static void doPrepareModelShortcut(const sp<IDevice>& device, sp<IPreparedModel>* preparedModel) { ASSERT_NE(nullptr, preparedModel); Model model = createValidTestModel_1_0(); // see if service can handle model bool fullySupportsModel = false; Return<void> supportedOpsLaunchStatus = device->getSupportedOperations( model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) { ASSERT_EQ(ErrorStatus::NONE, status); ASSERT_NE(0ul, supported.size()); fullySupportsModel = std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; }); }); ASSERT_TRUE(supportedOpsLaunchStatus.isOk()); // launch prepare model sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); ASSERT_NE(nullptr, preparedModelCallback.get()); Return<ErrorStatus> prepareLaunchStatus = device->prepareModel(model, preparedModelCallback); ASSERT_TRUE(prepareLaunchStatus.isOk()); ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus)); // retrieve prepared model preparedModelCallback->wait(); ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus(); *preparedModel = preparedModelCallback->getPreparedModel(); // The getSupportedOperations call returns a list of operations that are // guaranteed not to fail if prepareModel is called, and // 'fullySupportsModel' is true i.f.f. the entire model is guaranteed. // If a driver has any doubt that it can prepare an operation, it must // return false. So here, if a driver isn't sure if it can support an // operation, but reports that it successfully prepared the model, the test // can continue. if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) { ASSERT_EQ(nullptr, preparedModel->get()); LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot " "prepare model that it does not support."; std::cout << "[ ] Early termination of test because vendor service cannot " "prepare model that it does not support." << std::endl; return; } ASSERT_EQ(ErrorStatus::NONE, prepareReturnStatus); ASSERT_NE(nullptr, preparedModel->get()); }
// supported operations negative test 2 TEST_F(NeuralnetworksHidlTest, SupportedOperationsNegativeTest2) { Model model = createInvalidTestModel2_1_0(); Return<void> ret = device->getSupportedOperations( model, [&](ErrorStatus status, const hidl_vec<bool>& supported) { EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, status); (void)supported; }); EXPECT_TRUE(ret.isOk()); }
// supported operations positive test TEST_F(NeuralnetworksHidlTest, SupportedOperationsPositiveTest) { Model model = createValidTestModel_1_0(); Return<void> ret = device->getSupportedOperations( model, [&](ErrorStatus status, const hidl_vec<bool>& supported) { EXPECT_EQ(ErrorStatus::NONE, status); EXPECT_EQ(model.operations.size(), supported.size()); }); EXPECT_TRUE(ret.isOk()); }
// initialization TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) { Return<void> ret = device->getCapabilities([](ErrorStatus status, const Capabilities& capabilities) { EXPECT_EQ(ErrorStatus::NONE, status); EXPECT_LT(0.0f, capabilities.float32Performance.execTime); EXPECT_LT(0.0f, capabilities.float32Performance.powerUsage); EXPECT_LT(0.0f, capabilities.quantized8Performance.execTime); EXPECT_LT(0.0f, capabilities.quantized8Performance.powerUsage); }); EXPECT_TRUE(ret.isOk()); }
status_t StreamInHalHidl::prepareForReading(size_t bufferSize) { std::unique_ptr<CommandMQ> tempCommandMQ; std::unique_ptr<DataMQ> tempDataMQ; std::unique_ptr<StatusMQ> tempStatusMQ; Result retval; pid_t halThreadPid, halThreadTid; Return<void> ret = mStream->prepareForReading( 1, bufferSize, [&](Result r, const CommandMQ::Descriptor& commandMQ, const DataMQ::Descriptor& dataMQ, const StatusMQ::Descriptor& statusMQ, const ThreadInfo& halThreadInfo) { retval = r; if (retval == Result::OK) { tempCommandMQ.reset(new CommandMQ(commandMQ)); tempDataMQ.reset(new DataMQ(dataMQ)); tempStatusMQ.reset(new StatusMQ(statusMQ)); if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) { EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup); } halThreadPid = halThreadInfo.pid; halThreadTid = halThreadInfo.tid; } }); if (!ret.isOk() || retval != Result::OK) { return processReturn("prepareForReading", ret, retval); } if (!tempCommandMQ || !tempCommandMQ->isValid() || !tempDataMQ || !tempDataMQ->isValid() || !tempStatusMQ || !tempStatusMQ->isValid() || !mEfGroup) { ALOGE_IF(!tempCommandMQ, "Failed to obtain command message queue for writing"); ALOGE_IF(tempCommandMQ && !tempCommandMQ->isValid(), "Command message queue for writing is invalid"); ALOGE_IF(!tempDataMQ, "Failed to obtain data message queue for reading"); ALOGE_IF(tempDataMQ && !tempDataMQ->isValid(), "Data message queue for reading is invalid"); ALOGE_IF(!tempStatusMQ, "Failed to obtain status message queue for reading"); ALOGE_IF(tempStatusMQ && !tempStatusMQ->isValid(), "Status message queue for reading is invalid"); ALOGE_IF(!mEfGroup, "Event flag creation for reading failed"); return NO_INIT; } requestHalThreadPriority(halThreadPid, halThreadTid); mCommandMQ = std::move(tempCommandMQ); mDataMQ = std::move(tempDataMQ); mStatusMQ = std::move(tempStatusMQ); mReaderClient = gettid(); return OK; }
// prepare simple model negative test 2 TEST_F(NeuralnetworksHidlTest, SimplePrepareModelNegativeTest2) { Model model = createInvalidTestModel2_1_0(); sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); ASSERT_NE(nullptr, preparedModelCallback.get()); Return<ErrorStatus> prepareLaunchStatus = device->prepareModel(model, preparedModelCallback); ASSERT_TRUE(prepareLaunchStatus.isOk()); EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(prepareLaunchStatus)); preparedModelCallback->wait(); ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus(); EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, prepareReturnStatus); sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel(); EXPECT_EQ(nullptr, preparedModel.get()); }
// execute simple graph negative test 2 TEST_F(NeuralnetworksHidlTest, SimpleExecuteGraphNegativeTest2) { sp<IPreparedModel> preparedModel; ASSERT_NO_FATAL_FAILURE(doPrepareModelShortcut(device, &preparedModel)); if (preparedModel == nullptr) { return; } Request request = createInvalidTestRequest2(); sp<ExecutionCallback> executionCallback = new ExecutionCallback(); ASSERT_NE(nullptr, executionCallback.get()); Return<ErrorStatus> executeLaunchStatus = preparedModel->execute(request, executionCallback); ASSERT_TRUE(executeLaunchStatus.isOk()); EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(executeLaunchStatus)); executionCallback->wait(); ErrorStatus executionReturnStatus = executionCallback->getStatus(); EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, executionReturnStatus); }
// execute simple graph positive test TEST_F(NeuralnetworksHidlTest, SimpleExecuteGraphPositiveTest) { std::vector<float> outputData = {-1.0f, -1.0f, -1.0f, -1.0f}; std::vector<float> expectedData = {6.0f, 8.0f, 10.0f, 12.0f}; const uint32_t OUTPUT = 1; sp<IPreparedModel> preparedModel; ASSERT_NO_FATAL_FAILURE(doPrepareModelShortcut(device, &preparedModel)); if (preparedModel == nullptr) { return; } Request request = createValidTestRequest(); auto postWork = [&] { sp<IMemory> outputMemory = mapMemory(request.pools[OUTPUT]); if (outputMemory == nullptr) { return false; } float* outputPtr = reinterpret_cast<float*>(static_cast<void*>(outputMemory->getPointer())); if (outputPtr == nullptr) { return false; } outputMemory->read(); std::copy(outputPtr, outputPtr + outputData.size(), outputData.begin()); outputMemory->commit(); return true; }; sp<ExecutionCallback> executionCallback = new ExecutionCallback(); ASSERT_NE(nullptr, executionCallback.get()); executionCallback->on_finish(postWork); Return<ErrorStatus> executeLaunchStatus = preparedModel->execute(request, executionCallback); ASSERT_TRUE(executeLaunchStatus.isOk()); EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executeLaunchStatus)); executionCallback->wait(); ErrorStatus executionReturnStatus = executionCallback->getStatus(); EXPECT_EQ(ErrorStatus::NONE, executionReturnStatus); EXPECT_EQ(expectedData, outputData); }
// status test TEST_F(NeuralnetworksHidlTest, StatusTest) { Return<DeviceStatus> status = device->getStatus(); ASSERT_TRUE(status.isOk()); EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status)); }