bool test_google_float_workload_cpu_images_classification::run()
{
    bool  run_ok = true;
    test_measurement_result   run_result;
    run_result.description = "RUN SUMMARY: " + test_description;

    C_time_control  run_timer;

    std::cout << "-> Testing: " << test_description << std::endl;

    try {
        if(!init()) throw std::runtime_error("init() returns false so can't run test");
        run_timer.tick();   //start time measurement
        run_result << std::string("run test with " + current_tested_device->get_device_description());

        for(uint32_t batch :{1,8,48}) {

            C_time_control  loop_timer;

            // compiling workload
            nn_workload_t             *workload = nullptr;
            NN_WORKLOAD_DATA_TYPE  input_format = NN_WORKLOAD_DATA_TYPE_F32_ZXY_BATCH;
            NN_WORKLOAD_DATA_TYPE output_format = NN_WORKLOAD_DATA_TYPE_F32_1D_BATCH;

            auto status = di->workflow_compile_function(&workload, di->device, workflow, &input_format, &output_format, batch);
            if(!workload) throw std::runtime_error("workload compilation failed for batch = " + std::to_string(batch)
                                                   + " status: " + std::to_string(status));

            test_measurement_result local_result;
            local_result.description = "RUN PART: (batch " + std::to_string(batch)+") execution of " + test_description;
            bool local_ok = true;

            auto images_list_iterator = images_list.begin();
            auto images_list_end      = images_list.end();

            while(images_list_iterator != images_list_end)
            {
                auto diff_itr = images_list_end - images_list_iterator < batch
                                ? images_list_end - images_list_iterator
                                : batch;

                std::vector< std::string >   batch_images(images_list_iterator,images_list_iterator + diff_itr);

                images_list_iterator += diff_itr;

                nn::data< float,4 > *images = nullptr;
                images = nn_data_load_from_image_list(&batch_images, img_size, image_process, batch, RGB_order);

                if(images) {
                    nn_data_t *input_array[1] ={images};
                    nn::data<float, 2> *workload_output = new nn::data<float, 2>(1000, batch);
                if(workload_output == nullptr)   throw std::runtime_error("unable to create workload_output for batch = " +std::to_string(batch));

                    nn::data<float> *output_array_cmpl[1] ={nn::data_cast<float,0>(workload_output)};

                    di->workload_execute_function(workload,reinterpret_cast<void**>(input_array),reinterpret_cast<void**>(output_array_cmpl),&status);

                    float *value_cmpl = reinterpret_cast<float *>(workload_output->buffer);

                    for(auto &image_filename : batch_images) {
                        std::ifstream reference_output_file(image_filename + ".txt", std::ifstream::in);
                        // Comparison with the reference output workload
                        float  difference = 0;
                        for(int index = 0; index < 1000; ++index) {

                            std::string reference_value_str;
                            std::getline(reference_output_file,reference_value_str);
                            float reference_value = std::stof(reference_value_str);
                            float delta = value_cmpl[index]-reference_value;

                             difference += abs(delta);

                        }
                        if(difference < threshold_to_pass_test)
                            local_result << std::string("note: " + image_filename + " difference = " + std::to_string(difference));
                        else {
                            local_result << std::string("error: image file: "
                                                        + image_filename
                                                        +" the difference exceeded the allowable threshold for compliance: "
                                                        + std::to_string(difference)
                                                        + " > "
                                                        + std::to_string(threshold_to_pass_test));
                            local_ok = false;
                            run_ok = false;
                        }

                        reference_output_file.close();
                        value_cmpl += 1000;
                    }

                    batch_images.clear();
                    if(images != nullptr) delete images;
                    if(workload_output != nullptr) delete workload_output;

                }

            }

            // batch loop summary:
            local_result.passed = local_ok;

            loop_timer.tock();
            local_result.time_consumed = loop_timer.get_time_diff();
            local_result.clocks_consumed = loop_timer.get_clocks_diff();
            tests_results << local_result;
            if(workload != nullptr) di->workload_delete_function(workload);
        } // END: for(uint32_t batch :{1,8,48})
    }
    catch(std::runtime_error &error) {
        run_result << "error: " + std::string(error.what());
        run_ok = false;
    }
    catch(...) {
        run_result << "error: unknown";
        run_ok = false;
    }

    run_timer.tock();
    run_result.time_consumed = run_timer.get_time_diff();
    run_result.clocks_consumed = run_timer.get_clocks_diff();

    run_result.passed = run_ok;
    tests_results << run_result;
    if (!done()) run_ok=false;
    std::cout << "<- Test " << (run_ok ? "passed" : "failed") << std::endl;;
    return run_ok;
}
Ejemplo n.º 2
0
int main(int argc, char *argv[])
{
    try {
        if (argc <= 1) {
            const std::string path(argv[0]);
            std::cout << "usage: " << path.substr(path.find_last_of("/\\") + 1) <<
R"_help_( <parameters> input_dir

<parameters> include:
    --device=<name>
        name of dynamic library (without suffix) with computational device
        to be used for demo
    --batch=<value>
        size of group of images that are classified together;  large batch
        sizes have better performance
    --model=<name>
        name of network model that is used for classification
        can be caffenet_float or caffenet_int16
    --input=<directory>
        path to directory that contains images to be classified
    --config=<name>
        file name of config file containing additional parameters
        command line parameters take priority over config ones

If last parameters do not fit --key=value format it is assumed to be a --input.
Instead of "--" "-" or "/" can be used.
)_help_";
            return 0;
        }

        // convert argc/argv to vector of arguments
        std::vector<std::string> arg;
        for(int n=1; n<argc; ++n) arg.push_back(argv[n]);

        // parse configuration (command line and from file)
        using config_t = std::map<std::string, std::string>;
        config_t config;
        parse_parameters(config, arg);
        { // find if config file name was given from command line
            config_t::iterator it;
            if((it = config.find("config"))!=std::end(config)){
                std::ifstream config_file(it->second);
                std::vector<std::string> config_lines;
                using istream_it = std::istream_iterator<std::string>;
                std::copy(istream_it(config_file), istream_it(), std::back_inserter(config_lines));
                parse_parameters(config, config_lines);
            }
        }
        { // validate & add default value for missing arguments
            auto not_found = std::end(config);
            if(config.find("device")==not_found) config["device"]="device_cpu";
            if(config.find("batch") ==not_found) config["batch"]="48";
            if(config.find("model") ==not_found) config["model"]="caffenet_float";
            if(config.find("input") ==not_found) throw std::runtime_error("missing input directory; run without arguments to get help");
            if(config.find("loops") ==not_found) config["loops"]="1";
        }

        // load images from input directory
        auto images_list = get_directory_contents(config["input"]);
        if(images_list.empty()) throw std::runtime_error(std::string("directory ")+config["input"]+" does not contain any images that can be processed");

        // RAII for loading library and device initialization
        scoped_library      library(config["device"]+dynamic_library_extension);
        scoped_device_0     device(library);

        auto workload = primitives_workload::instance().get(config["model"]);

        const int config_batch = std::stoi(config["batch"]);
        if(config_batch<=0) throw std::runtime_error("batch_size is 0 or negative");

        workload->init(device.primitives, device.device, config_batch);

        C_time_control timer;
        timer.tock();

        std::cout
            << "Workload initialized at "
            << timer.time_diff_string() <<" [" <<timer.clocks_diff_string() <<"]"
            << std::endl
            << "--------------------------------------------------------"
            << std::endl;

        auto absolute_output_cmpl = new nn::data<float, 2>(1000, config_batch);

        std::vector<std::string>   batch_images;
        uint16_t                   image_counter = 0;     //numbering images within single batch
        bool                       start_batch = false;

        const std::string path(argv[0]);
        std::string appname(path.substr(path.find_last_of("/\\") + 1));

        C_report_maker report(appname,library.name, config["model"], config_batch);

        std::cout << "Now, please wait. I try to recognize " << images_list.size() << " images" << std::endl;


        auto images_list_iterator = images_list.begin();
        auto images_list_end = images_list.end();

        while(images_list_iterator!=images_list_end) {

            auto diff_itr = images_list_end - images_list_iterator < config_batch
                          ? images_list_end - images_list_iterator
                          : config_batch;

            std::vector<std::string>   batch_images(images_list_iterator,images_list_iterator+diff_itr);

            images_list_iterator+=diff_itr;

            nn::data<float,4> *images = nullptr;
            images = nn_data_load_from_image_list(&batch_images,
                                                  workload->get_img_size(),
                                                  workload->image_process,
                                                  config_batch,
                                                  workload->RGB_order);

            if(images) {

                images_recognition_batch_t  temp_report_recognition_batch;

                {
                    NN_API_STATUS  status;
                    C_time_control timer;
                    auto loops = std::stoi(config["loops"]);
                    for(size_t i=0; i <loops; ++i)
                    {
                        workload->execute(*images,*absolute_output_cmpl);
                    }
                    timer.tock();
                    temp_report_recognition_batch.time_of_recognizing = timer.get_time_diff()/loops;
                    temp_report_recognition_batch.clocks_of_recognizing = timer.get_clocks_diff()/loops;
                }

                delete images;

                float* value_cmpl = reinterpret_cast<float*>(absolute_output_cmpl->buffer);

                auto batch_images_iterator = batch_images.begin();

                for(auto b = 0u; (b < config_batch) && (b < batch_images.size()); ++b) {

                    image_recognition_item_t    temp_report_recognition_item;

                    recognition_state_t         temp_recognition_state;
                    std::map <float,int>       output_values;

                    temp_report_recognition_item.recognitions.clear();
                    temp_report_recognition_item.recognized_image = *batch_images_iterator++;

                    for(int index = 0; index < 1000; ++index) {
                        output_values.insert(std::make_pair(value_cmpl[index],index));
                        temp_report_recognition_item.nnet_output.push_back(value_cmpl[index]);
                    }
                    temp_report_recognition_item.wwid = temp_report_recognition_item.recognized_image.find('[') != std::string::npos
                        ? temp_report_recognition_item.recognized_image.substr(temp_report_recognition_item.recognized_image.find('[') + 1,9)
                        : "n000000000";
                    auto iterator = --output_values.end();
                    for(int i = 1; i < 6 && iterator != output_values.end(); i++)
                    {
                        temp_recognition_state.label    = workload->labels[iterator->second];
                        temp_recognition_state.wwid     = workload->wwids[iterator->second];
                        temp_recognition_state.accuracy = iterator->first;
                        temp_report_recognition_item.recognitions.push_back(temp_recognition_state);
                        --iterator;
                    }

                    temp_report_recognition_batch.recognized_images.push_back(temp_report_recognition_item);
                    output_values.clear();
                    value_cmpl += 1000;
                }
                batch_images.clear();
                report.recognized_batches.push_back(temp_report_recognition_batch);
                temp_report_recognition_batch.recognized_images.clear();
            }
        }
        std::string html_filename="result_"+get_timestamp()+".html";

        report.print_to_html_file(html_filename, "Results of recognition");
        system((show_HTML_command+html_filename).c_str());
    return 0;
    }