show axis units on ruler

This commit is contained in:
Rokas Puzonas 2025-03-16 13:38:53 +02:00
parent de2941c5bf
commit d8867cb3d6
4 changed files with 183 additions and 124 deletions

View File

@ -60,6 +60,8 @@ pub const ChannelView = struct {
x_range: RangeF64,
y_range: RangeF64,
sample_rate: ?f64 = null,
unit: ?NIDaq.Unit = null,
source: union(enum) {
file: usize,
@ -227,110 +229,32 @@ fn readSamplesFromFile(allocator: std.mem.Allocator, file: std.fs.File) ![]f64 {
return samples;
}
pub fn appendChannelFromFile(self: *App, path: []const u8) !void {
const path_dupe = try self.allocator.dupe(u8, path);
errdefer self.allocator.free(path_dupe);
const file = try std.fs.cwd().openFile(path, .{});
defer file.close();
const samples = try readSamplesFromFile(self.allocator, file);
errdefer self.allocator.free(samples);
var min_value: f64 = 0;
var max_value: f64 = 0;
if (samples.len > 0) {
min_value = samples[0];
max_value = samples[0];
for (samples) |sample| {
min_value = @min(min_value, sample);
max_value = @max(max_value, sample);
}
}
const loaded_file_index = findFreeSlot(FileChannel, &self.loaded_files) orelse return error.FileLimitReached;
self.loaded_files[loaded_file_index] = FileChannel{
.path = path_dupe,
.samples = samples
};
errdefer self.loaded_files[loaded_file_index] = null;
const from: f64 = 0;
const to = @max(@as(f64, @floatFromInt(samples.len)) - 1, 0);
self.channel_views.appendAssumeCapacity(ChannelView{
.view_rect = .{
.x_range = RangeF64.init(from, to),
.y_range = RangeF64.init(max_value, min_value)
},
.x_range = RangeF64.init(from, to),
.y_range = RangeF64.init(max_value, min_value),
.source = .{ .file = loaded_file_index }
});
errdefer _ = self.channel_views.pop();
}
pub fn appendChannelFromDevice(self: *App, channel_name: []const u8) !void {
const ni_daq = &(self.ni_daq orelse return);
const device_channel_index = findFreeSlot(DeviceChannel, &self.device_channels) orelse return error.DeviceChannelLimitReached;
const name_buff = try DeviceChannel.Name.fromSlice(channel_name);
const channel_name_z = name_buff.buffer[0..name_buff.len :0];
const device = NIDaq.getDeviceNameFromChannel(channel_name) orelse return error.InvalidChannelName;
const device_buff = try NIDaq.BoundedDeviceName.fromSlice(device);
const device_z = device_buff.buffer[0..device_buff.len :0];
var min_value: f64 = 0;
var max_value: f64 = 1;
const voltage_ranges = try ni_daq.listDeviceAOVoltageRanges(device_z);
if (voltage_ranges.len > 0) {
min_value = voltage_ranges[0].low;
max_value = voltage_ranges[0].high;
}
const max_sample_rate = try ni_daq.getMaxSampleRate(channel_name_z);
self.device_channels[device_channel_index] = DeviceChannel{
.name = name_buff,
.min_sample_rate = ni_daq.getMinSampleRate(channel_name_z) catch max_sample_rate,
.max_sample_rate = max_sample_rate,
.min_value = min_value,
.max_value = max_value,
.samples = std.ArrayList(f64).init(self.allocator)
};
errdefer self.device_channels[device_channel_index] = null;
self.channel_views.appendAssumeCapacity(ChannelView{
.view_rect = .{
.x_range = RangeF64.init(0, 0),
.y_range = RangeF64.init(max_value, min_value)
},
.x_range = RangeF64.init(0, 0),
.y_range = RangeF64.init(max_value, min_value),
.source = .{ .device = device_channel_index }
});
errdefer _ = self.channel_views.pop();
}
pub fn listChannelViews(self: *App) []ChannelView {
return self.channel_views.slice();
}
pub fn tick(self: *App) !void {
var ui = &self.ui;
rl.clearBackground(srcery.black);
var ui = &self.ui;
{
ui.begin();
defer ui.end();
{
self.channel_mutex.lock();
defer self.channel_mutex.unlock();
try self.screen.tick();
for (self.listChannelViews()) |*channel_view| {
const device_channel = self.getChannelSourceDevice(channel_view) orelse continue;
const sample_count: f32 = @floatFromInt(device_channel.samples.items.len);
channel_view.x_range = RangeF64.init(0, sample_count);
}
{
ui.begin();
defer ui.end();
try self.screen.tick();
}
}
ui.draw();
@ -390,12 +314,13 @@ pub fn startDeviceChannelReading(self: *App, channel_view: *ChannelView) void {
}
const channel_name = device_channel.name.buffer[0..device_channel.name.len :0];
const sample_rate = device_channel.max_sample_rate;
const task = self.task_pool.launchAIVoltageChannel(
ni_daq,
&device_channel.samples,
.{
.continous = .{ .sample_rate = device_channel.max_sample_rate }
.sample_rate = sample_rate
},
.{
.min_value = device_channel.min_value,
@ -414,3 +339,96 @@ pub fn startDeviceChannelReading(self: *App, channel_view: *ChannelView) void {
device_channel.active_task = task;
}
pub fn appendChannelFromFile(self: *App, path: []const u8) !void {
const path_dupe = try self.allocator.dupe(u8, path);
errdefer self.allocator.free(path_dupe);
const file = try std.fs.cwd().openFile(path, .{});
defer file.close();
const samples = try readSamplesFromFile(self.allocator, file);
errdefer self.allocator.free(samples);
var min_value: f64 = 0;
var max_value: f64 = 0;
if (samples.len > 0) {
min_value = samples[0];
max_value = samples[0];
for (samples) |sample| {
min_value = @min(min_value, sample);
max_value = @max(max_value, sample);
}
}
const loaded_file_index = findFreeSlot(FileChannel, &self.loaded_files) orelse return error.FileLimitReached;
self.loaded_files[loaded_file_index] = FileChannel{
.path = path_dupe,
.samples = samples
};
errdefer self.loaded_files[loaded_file_index] = null;
const from: f64 = 0;
const to = @max(@as(f64, @floatFromInt(samples.len)) - 1, 0);
self.channel_views.appendAssumeCapacity(ChannelView{
.view_rect = .{
.x_range = RangeF64.init(from, to),
.y_range = RangeF64.init(max_value, min_value)
},
.x_range = RangeF64.init(from, to),
.y_range = RangeF64.init(max_value, min_value),
.source = .{ .file = loaded_file_index }
});
errdefer _ = self.channel_views.pop();
}
pub fn appendChannelFromDevice(self: *App, channel_name: []const u8) !void {
const ni_daq = &(self.ni_daq orelse return);
const device_channel_index = findFreeSlot(DeviceChannel, &self.device_channels) orelse return error.DeviceChannelLimitReached;
const name_buff = try DeviceChannel.Name.fromSlice(channel_name);
const channel_name_z = name_buff.buffer[0..name_buff.len :0];
const device = NIDaq.getDeviceNameFromChannel(channel_name) orelse return error.InvalidChannelName;
const device_buff = try NIDaq.BoundedDeviceName.fromSlice(device);
const device_z = device_buff.buffer[0..device_buff.len :0];
// TODO: Add support for other measurement types
const unit = NIDaq.Unit.Voltage;
var min_value: f64 = 0;
var max_value: f64 = 1;
const voltage_ranges = try ni_daq.listDeviceAOVoltageRanges(device_z);
if (voltage_ranges.len > 0) {
min_value = voltage_ranges[0].low;
max_value = voltage_ranges[0].high;
}
const max_sample_rate = try ni_daq.getMaxSampleRate(channel_name_z);
self.device_channels[device_channel_index] = DeviceChannel{
.name = name_buff,
.min_sample_rate = ni_daq.getMinSampleRate(channel_name_z) catch max_sample_rate,
.max_sample_rate = max_sample_rate,
.min_value = min_value,
.max_value = max_value,
.samples = std.ArrayList(f64).init(self.allocator)
};
errdefer self.device_channels[device_channel_index] = null;
self.channel_views.appendAssumeCapacity(ChannelView{
.view_rect = .{
.x_range = RangeF64.init(0, 0),
.y_range = RangeF64.init(max_value, min_value)
},
.x_range = RangeF64.init(0, 0),
.y_range = RangeF64.init(max_value, min_value),
.source = .{ .device = device_channel_index },
.sample_rate = max_sample_rate,
.unit = unit
});
errdefer _ = self.channel_views.pop();
}

View File

@ -195,7 +195,7 @@ pub const Task = struct {
}
};
pub const AIMeasurementType = enum(i32) {
pub const Unit = enum(i32) {
Voltage = c.DAQmx_Val_Voltage,
VoltageRMS = c.DAQmx_Val_VoltageRMS,
Current = c.DAQmx_Val_Current,
@ -227,7 +227,7 @@ pub const AIMeasurementType = enum(i32) {
Power = c.DAQmx_Val_Power,
_,
pub fn name(self: AIMeasurementType) []const u8 {
pub fn name(self: Unit) ?[]const u8 {
return switch (self) {
.Voltage => "Voltage",
.VoltageRMS => "Voltage RMS",
@ -258,11 +258,49 @@ pub const AIMeasurementType = enum(i32) {
.TEDS_Sensor => "TEDS",
.Charge => "Charge",
.Power => "Power source",
_ => "Unknown"
_ => null
};
}
};
pub const AIMeasurementType = enum(i32) {
Voltage = @intFromEnum(Unit.Voltage),
VoltageRMS = @intFromEnum(Unit.VoltageRMS),
Current = @intFromEnum(Unit.Current),
CurrentRMS = @intFromEnum(Unit.CurrentRMS),
Voltage_CustomWithExcitation = @intFromEnum(Unit.Voltage_CustomWithExcitation),
Bridge = @intFromEnum(Unit.Bridge),
Freq_Voltage = @intFromEnum(Unit.Freq_Voltage),
Resistance = @intFromEnum(Unit.Resistance),
Temp_TC = @intFromEnum(Unit.Temp_TC),
Temp_Thrmstr = @intFromEnum(Unit.Temp_Thrmstr),
Temp_RTD = @intFromEnum(Unit.Temp_RTD),
Temp_BuiltInSensor = @intFromEnum(Unit.Temp_BuiltInSensor),
Strain_Gage = @intFromEnum(Unit.Strain_Gage),
Rosette_Strain_Gage = @intFromEnum(Unit.Rosette_Strain_Gage),
Position_LVDT = @intFromEnum(Unit.Position_LVDT),
Position_RVDT = @intFromEnum(Unit.Position_RVDT),
Position_EddyCurrentProximityProbe = @intFromEnum(Unit.Position_EddyCurrentProximityProbe),
Accelerometer = @intFromEnum(Unit.Accelerometer),
Acceleration_Charge = @intFromEnum(Unit.Acceleration_Charge),
Acceleration_4WireDCVoltage = @intFromEnum(Unit.Acceleration_4WireDCVoltage),
Velocity_IEPESensor = @intFromEnum(Unit.Velocity_IEPESensor),
Force_Bridge = @intFromEnum(Unit.Force_Bridge),
Force_IEPESensor = @intFromEnum(Unit.Force_IEPESensor),
Pressure_Bridge = @intFromEnum(Unit.Pressure_Bridge),
SoundPressure_Microphone = @intFromEnum(Unit.SoundPressure_Microphone),
Torque_Bridge = @intFromEnum(Unit.Torque_Bridge),
TEDS_Sensor = @intFromEnum(Unit.TEDS_Sensor),
Charge = @intFromEnum(Unit.Charge),
Power = @intFromEnum(Unit.Power),
_,
pub fn name(self: AIMeasurementType) ?[]const u8 {
return Unit.name(self);
}
};
pub const max_ai_measurement_type_list_len = @typeInfo(AIMeasurementType).Enum.fields.len;
pub const AIMeasurementTypeList = std.BoundedArray(AIMeasurementType, max_ai_measurement_type_list_len);

View File

@ -7,14 +7,9 @@ const log = std.log.scoped(.task_pool);
const TaskPool = @This();
const max_tasks = 32;
pub const Sampling = union(enum) {
finite: struct {
sample_rate: f64,
sample_count: u64
},
continous: struct {
sample_rate: f64
}
pub const Sampling = struct {
sample_rate: f64,
sample_count: ?u64 = null
};
pub const Entry = struct {
@ -80,13 +75,10 @@ fn readAnalog(task_pool: *TaskPool, entry: *Entry, timeout: f64) !void {
defer task_pool.mutex.unlock();
switch (entry.sampling) {
.finite => |args| {
try entry.samples.ensureTotalCapacity(args.sample_count);
},
.continous => |args| {
try entry.samples.ensureUnusedCapacity(@intFromFloat(@ceil(args.sample_rate)));
}
if (entry.sampling.sample_count) |sample_count| {
try entry.samples.ensureTotalCapacity(sample_count);
} else {
try entry.samples.ensureUnusedCapacity(@intFromFloat(@ceil(entry.sampling.sample_rate)));
}
const unused_capacity = entry.samples.unusedCapacitySlice();
@ -144,13 +136,10 @@ pub fn launchAIVoltageChannel(
errdefer entry.in_use = false;
try task.createAIVoltageChannel(options);
switch (sampling) {
.continous => |args| {
try task.setContinousSampleRate(args.sample_rate);
},
.finite => |args| {
try task.setFiniteSampleRate(args.sample_rate, args.sample_count);
}
if (sampling.sample_count) |sample_count| {
try task.setFiniteSampleRate(sampling.sample_rate, sample_count);
} else {
try task.setContinousSampleRate(sampling.sample_rate);
}
samples.clearRetainingCapacity();

View File

@ -275,6 +275,14 @@ fn showRulerTicks(self: *MainScreen, channel_view: *ChannelView, axis: UI.Axis)
return;
}
if (view_range.size() == 0) {
return;
}
if (full_range.size() == 0) {
return;
}
const ideal_pixels_per_division = 150;
var subdivisions: f32 = 20;
subdivisions = 20;
@ -284,8 +292,6 @@ fn showRulerTicks(self: *MainScreen, channel_view: *ChannelView, axis: UI.Axis)
const pixels_per_division = step / view_range.size() * ruler_rect_size_along_axis;
assert(pixels_per_division > 0);
if (pixels_per_division > ideal_pixels_per_division*2) {
subdivisions *= 2;
} else if (pixels_per_division < ideal_pixels_per_division/2) {
@ -392,13 +398,21 @@ fn showRuler(self: *MainScreen, ruler: *UI.Box, graph_box: *UI.Box, channel_view
is_zooming = axis_zoom.channel == channel_view and axis_zoom.axis == axis;
}
if (signal.hot) {
if (signal.hot and view_range.size() > 0) {
const mouse_tooltip = ui.mouseTooltip();
mouse_tooltip.beginChildren();
defer mouse_tooltip.endChildren();
if (channel_view.getSampleRange(axis).hasInclusive(mouse_position_on_graph)) {
_ = ui.label("{d:.3}", .{mouse_position_on_graph});
if (axis == .Y and channel_view.unit != null) {
const unit_name = channel_view.unit.?.name() orelse "Unknown";
_ = ui.label("{s}: {d:.3}", .{unit_name, mouse_position_on_graph});
} else if (axis == .X and channel_view.sample_rate != null) {
const sample_rate = channel_view.sample_rate.?;
_ = ui.label("{d:.3}s", .{mouse_position_on_graph / sample_rate});
} else {
_ = ui.label("{d:.3}", .{mouse_position_on_graph});
}
}
zoom_start = mouse_position_on_graph;
@ -632,7 +646,7 @@ pub fn tick(self: *MainScreen) !void {
for (self.app.listChannelViews()) |*channel_view| {
const device_channel = self.app.getChannelSourceDevice(channel_view) orelse continue;
const sample_rate = device_channel.active_task.?.sampling.continous.sample_rate;
const sample_rate = device_channel.active_task.?.sampling.sample_rate;
const samples = device_channel.samples.items;
const sample_count: f32 = @floatFromInt(samples.len);