Skip to content

Commit

Permalink
Address comments and update nimax nce
Browse files Browse the repository at this point in the history
  • Loading branch information
DeborahOoi96 committed Jan 12, 2024
1 parent e2dbe41 commit 5c83a17
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 3 deletions.
Binary file modified source/tests/assets/grpc-device-daq-tests.nce
Binary file not shown.
90 changes: 87 additions & 3 deletions source/tests/system/nidaqmx_driver_api_tests.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,72 @@ class NiDAQmxDriverApiTests : public Test {
return create_ai_voltage_chan(request, response);
}

CreateAIBridgeChanRequest create_ai_bridge_request(double min_val, double max_val, const std::string& custom_scale_name = "")
{
CreateAIBridgeChanRequest request;
set_request_session_name(request);
request.set_physical_channel("bridgeTester/ai0");
request.set_name_to_assign_to_channel("ai0");
request.set_min_val(min_val);
request.set_max_val(max_val);
if (custom_scale_name.empty()) {
request.set_units(BridgeUnits::BRIDGE_UNITS_VOLTS_PER_VOLT);
}
else {
request.set_custom_scale_name(custom_scale_name);
request.set_units(BridgeUnits::BRIDGE_UNITS_FROM_CUSTOM_SCALE);
}
request.set_bridge_config(BridgeConfiguration1::BRIDGE_CONFIGURATION1_FULL_BRIDGE);
request.set_voltage_excit_source(ExcitationSource::EXCITATION_SOURCE_INTERNAL);
request.set_voltage_excit_val(2.50);
request.set_nominal_bridge_resistance(350.00);
return request;
}

::grpc::Status create_ai_bridge_chan(const CreateAIBridgeChanRequest& request, CreateAIBridgeChanResponse& response = ThrowawayResponse<CreateAIBridgeChanResponse>::response())
{
::grpc::ClientContext context;
auto status = stub()->CreateAIBridgeChan(&context, request, &response);
client::raise_if_error(status, context);
return status;
}

::grpc::Status create_ai_bridge_chan(double min_val, double max_val, CreateAIBridgeChanResponse& response = ThrowawayResponse<CreateAIBridgeChanResponse>::response())
{
auto request = create_ai_bridge_request(min_val, max_val);
return create_ai_bridge_chan(request, response);
}

CreateAIThrmcplChanRequest create_ai_thrmcpl_request(double min_val, double max_val, const std::string& custom_scale_name = "")
{
CreateAIThrmcplChanRequest request;
set_request_session_name(request);
request.set_physical_channel("cDAQ1Mod2/ai0");
request.set_name_to_assign_to_channel("ai0");
request.set_min_val(min_val);
request.set_max_val(max_val);
request.set_units(TemperatureUnits::TEMPERATURE_UNITS_DEG_C);
request.set_thermocouple_type(ThermocoupleType1::THERMOCOUPLE_TYPE1_J_TYPE_TC);
request.set_cjc_val(25.0);
request.set_cjc_channel("");
request.set_cjc_source(CJCSource1::CJC_SOURCE1_CONST_VAL);
return request;
}

::grpc::Status create_ai_thrmcpl_chan(const CreateAIThrmcplChanRequest& request, CreateAIThrmcplChanResponse& response = ThrowawayResponse<CreateAIThrmcplChanResponse>::response())
{
::grpc::ClientContext context;
auto status = stub()->CreateAIThrmcplChan(&context, request, &response);
client::raise_if_error(status, context);
return status;
}

::grpc::Status create_ai_thrmcpl_chan(double min_val, double max_val, CreateAIThrmcplChanResponse& response = ThrowawayResponse<CreateAIThrmcplChanResponse>::response())
{
auto request = create_ai_thrmcpl_request(min_val, max_val);
return create_ai_thrmcpl_chan(request, response);
}

CreateAOVoltageChanRequest create_ao_voltage_chan_request(double min_val, double max_val, const std::string& name = "ao0")
{
CreateAOVoltageChanRequest request;
Expand Down Expand Up @@ -887,19 +953,25 @@ class NiDAQmxDriverApiTests : public Test {
return status;
}

::grpc::Status perform_bridge_offset_nulling_cal_ex(PerformBridgeOffsetNullingCalExResponse& response)
::grpc::Status perform_bridge_offset_nulling_cal_ex(std::string channel, bool skipUnsupportedChannels, PerformBridgeOffsetNullingCalExResponse& response)
{
::grpc::ClientContext context;
PerformBridgeOffsetNullingCalExRequest request;
set_request_session_name(request);
request.set_channel(channel);
request.set_skip_unsupported_channels(skip_unsupported_channels);
auto status = stub()->PerformBridgeOffsetNullingCalEx(&context, request, &response);
client::raise_if_error(status, context);
return status;
}

::grpc::Status perform_thrmcpl_lead_offset_nulling_cal(PerformThrmcplLeadOffsetNullingCalResponse& response)
::grpc::Status perform_thrmcpl_lead_offset_nulling_cal(std::string channel, bool skipUnsupportedChannels, PerformThrmcplLeadOffsetNullingCalResponse& response)
{
::grpc::ClientContext context;
PerformThrmcplLeadOffsetNullingCalRequest request;
set_request_session_name(request);
request.set_channel(channel);
request.set_skip_unsupported_channels(skip_unsupported_channels);
auto status = stub()->PerformThrmcplLeadOffsetNullingCal(&context, request, &response);
client::raise_if_error(status, context);
return status;
Expand Down Expand Up @@ -2085,14 +2157,26 @@ TEST_F(NiDAQmxDriverApiTests, LoadedVoltageTask_ReadAIData_ReturnsDataInExpected

TEST_F(NiDAQmxDriverApiTests, BridgeOffsetNullingCal_Succeeds)
{
const auto AI_MIN = 0.0;
const auto AI_MAX = 1.0;
create_ai_bridge_chan(AI_MIN, AI_MAX);

std::string channel = "";
bool skip_unsupported_channels = false;
auto response = PerformBridgeOffsetNullingCalExResponse{};
auto status = perform_bridge_offset_nulling_cal_ex(response);

auto status = perform_bridge_offset_nulling_cal_ex(channel, skip_unsupported_channels, response);
EXPECT_SUCCESS(status, response);
}

TEST_F(NiDAQmxDriverApiTests, ThrmcplLeadOffsetNullingCal_Succeeds)
{
const auto AI_MIN = 0.0;
const auto AI_MAX = 1.0;
create_ai_thrmcpl_chan(AI_MIN, AI_MAX);

std::string channel = "";
bool skip_unsupported_channels = false;
auto response = PerformThrmcplLeadOffsetNullingCalResponse{};
auto status = perform_thrmcpl_lead_offset_nulling_cal(response);

Expand Down

0 comments on commit 5c83a17

Please sign in to comment.