Beispiel #1
0
gnash::key::code
Lirc::getKey()
{
//    GNASH_REPORT_FUNCTION;
    key::code key = gnash::key::INVALID;
    
    byte_t buf[LIRC_PACKET_SIZE];
    memset(buf, 0, LIRC_PACKET_SIZE);
    
    // read the data if there is any
    readNet(buf, LIRC_PACKET_SIZE, TIMEOUT);
    
    string packet = reinterpret_cast<char *>(buf);
    string::size_type space1 = packet.find(" ") +1;
    string::size_type space2 = packet.find(" ", space1) + 1;
    string::size_type space3 = packet.find(" ", space2) +1;

    string code_str = packet.substr(0, space1);
    string count_str = packet.substr(space1, space2-space1);    
    string button_str = packet.substr(space2,space3-space2);
    string control_str = packet.substr(space3);

    if (button_str[0] > 'A' && button_str[0] < 'Z') {
        std::cerr << "Character: " << button_str << std::endl;
        key = (gnash::key::code)button_str[0];
    }

    return key;
}
Beispiel #2
0
TEST(readNet, Regression)
{
    Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt", false),
                      findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
    EXPECT_FALSE(net.empty());
    net = readNet(findDataFile("dnn/opencv_face_detector.caffemodel", false),
                  findDataFile("dnn/opencv_face_detector.prototxt", false));
    EXPECT_FALSE(net.empty());
    net = readNet(findDataFile("dnn/openface_nn4.small2.v1.t7", false));
    EXPECT_FALSE(net.empty());
    net = readNet(findDataFile("dnn/tiny-yolo-voc.cfg", false),
                  findDataFile("dnn/tiny-yolo-voc.weights", false));
    EXPECT_FALSE(net.empty());
    net = readNet(findDataFile("dnn/ssd_mobilenet_v1_coco.pbtxt", false),
                  findDataFile("dnn/ssd_mobilenet_v1_coco.pb", false));
    EXPECT_FALSE(net.empty());
}
// inp = cv.imread('opencv_extra/testdata/cv/ximgproc/sources/08.png')
// inp = inp[:,:,[2, 1, 0]].astype(np.float32).reshape(1, 512, 512, 3)
// outs = sess.run([sess.graph.get_tensor_by_name('feature_fusion/Conv_7/Sigmoid:0'),
//                  sess.graph.get_tensor_by_name('feature_fusion/concat_3:0')],
//                 feed_dict={'input_images:0': inp})
// scores = np.ascontiguousarray(outs[0].transpose(0, 3, 1, 2))
// geometry = np.ascontiguousarray(outs[1].transpose(0, 3, 1, 2))
// np.save('east_text_detection.scores.npy', scores)
// np.save('east_text_detection.geometry.npy', geometry)
TEST_P(Test_TensorFlow_nets, EAST_text_detection)
{
    checkBackend();
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
    if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
        throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
#endif

    std::string netPath = findDataFile("dnn/frozen_east_text_detection.pb", false);
    std::string imgPath = findDataFile("cv/ximgproc/sources/08.png", false);
    std::string refScoresPath = findDataFile("dnn/east_text_detection.scores.npy", false);
    std::string refGeometryPath = findDataFile("dnn/east_text_detection.geometry.npy", false);

    Net net = readNet(findDataFile("dnn/frozen_east_text_detection.pb", false));

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat img = imread(imgPath);
    Mat inp = blobFromImage(img, 1.0, Size(), Scalar(123.68, 116.78, 103.94), true, false);
    net.setInput(inp);

    std::vector<Mat> outs;
    std::vector<String> outNames(2);
    outNames[0] = "feature_fusion/Conv_7/Sigmoid";
    outNames[1] = "feature_fusion/concat_3";
    net.forward(outs, outNames);

    Mat scores = outs[0];
    Mat geometry = outs[1];

    // Scores are in range [0, 1]. Geometry values are in range [-0.23, 290]
    double l1_scores = default_l1, lInf_scores = default_lInf;
    double l1_geometry = default_l1, lInf_geometry = default_lInf;
    if (target == DNN_TARGET_OPENCL_FP16)
    {
        lInf_scores = 0.11;
        l1_geometry = 0.28; lInf_geometry = 5.94;
    }
    else if (target == DNN_TARGET_MYRIAD)
    {
        lInf_scores = 0.214;
        l1_geometry = 0.47; lInf_geometry = 15.34;
    }
    else
    {
        l1_geometry = 1e-4, lInf_geometry = 3e-3;
    }
    normAssert(scores, blobFromNPY(refScoresPath), "scores", l1_scores, lInf_scores);
    normAssert(geometry, blobFromNPY(refGeometryPath), "geometry", l1_geometry, lInf_geometry);
}
TEST(Test_TensorFlow, two_inputs)
{
    Net net = readNet(path("two_inputs_net.pbtxt"));
    net.setPreferableBackend(DNN_BACKEND_OPENCV);

    Mat firstInput(2, 3, CV_32FC1), secondInput(2, 3, CV_32FC1);
    randu(firstInput, -1, 1);
    randu(secondInput, -1, 1);

    net.setInput(firstInput, "first_input");
    net.setInput(secondInput, "second_input");
    Mat out = net.forward();

    normAssert(out, firstInput + secondInput);
}
    void testDarknetLayer(const std::string& name, bool hasWeights = false)
    {
        std::string cfg = findDataFile("dnn/darknet/" + name + ".cfg", false);
        std::string model = "";
        if (hasWeights)
            model = findDataFile("dnn/darknet/" + name + ".weights", false);
        Mat inp = blobFromNPY(findDataFile("dnn/darknet/" + name + "_in.npy", false));
        Mat ref = blobFromNPY(findDataFile("dnn/darknet/" + name + "_out.npy", false));

        checkBackend(&inp, &ref);

        Net net = readNet(cfg, model);
        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);
        net.setInput(inp);
        Mat out = net.forward();
        normAssert(out, ref, "", default_l1, default_lInf);
    }
bool
MPTreeMgr::readInput( const char * node , const char * pl , const char * net )
{
   Node * pNd1 , * pNd2;
   int arg1 , arg2 , move;

	cout << " >  readInput() : start reading input data\n";
	if ( !readNode     ( node )   )      return false;
	if ( !readPosition ( pl   )   )      return false;
	if ( !readNet      ( net  )   )      return false;
   buildInitMPTree();
   
   while ( !packMPTree() ) {
      pNd1 = pNd2 = NULL;
      arg1 = arg2 = -1;
      move = rand() % 4;
      perturbMPTree( &pNd1 , &pNd2 , &arg1 , &arg2 , move );
   }
   
   initCost();
	return true;
}
Beispiel #7
0
const char *
Lirc::getButton()
{
//    GNASH_REPORT_FUNCTION;
 
    byte_t buf[LIRC_PACKET_SIZE];
    memset(buf, 0, LIRC_PACKET_SIZE);
    
    // read the data if there is any
    readNet(buf, LIRC_PACKET_SIZE, TIMEOUT);
    
    string packet = reinterpret_cast<char *>(buf);
    string::size_type space1 = packet.find(" ") + 1;
    string::size_type space2 = packet.find(" ", space1) + 1;
    string::size_type space3 = packet.find(" ", space2) + 1;
    
    string button_str = packet.substr(space2, space3-space2-1);

    memset(_button, 0, BUTTONSIZE);
    strncpy(_button, button_str.c_str(), BUTTONSIZE);
    return _button;
}
    // Test object detection network from Darknet framework.
    void testDarknetModel(const std::string& cfg, const std::string& weights,
                          const std::vector<std::vector<int> >& refClassIds,
                          const std::vector<std::vector<float> >& refConfidences,
                          const std::vector<std::vector<Rect2d> >& refBoxes,
                          double scoreDiff, double iouDiff, float confThreshold = 0.24, float nmsThreshold = 0.4)
    {
        checkBackend();

        Mat img1 = imread(_tf("dog416.png"));
        Mat img2 = imread(_tf("street.png"));
        std::vector<Mat> samples(2);
        samples[0] = img1; samples[1] = img2;

        // determine test type, whether batch or single img
        int batch_size = refClassIds.size();
        CV_Assert(batch_size == 1 || batch_size == 2);
        samples.resize(batch_size);

        Mat inp = blobFromImages(samples, 1.0/255, Size(416, 416), Scalar(), true, false);

        Net net = readNet(findDataFile("dnn/" + cfg, false),
                          findDataFile("dnn/" + weights, false));
        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);
        net.setInput(inp);
        std::vector<Mat> outs;
        net.forward(outs, getOutputsNames(net));

        for (int b = 0; b < batch_size; ++b)
        {
            std::vector<int> classIds;
            std::vector<float> confidences;
            std::vector<Rect2d> boxes;
            for (int i = 0; i < outs.size(); ++i)
            {
                Mat out;
                if (batch_size > 1){
                    // get the sample slice from 3D matrix (batch, box, classes+5)
                    Range ranges[3] = {Range(b, b+1), Range::all(), Range::all()};
                    out = outs[i](ranges).reshape(1, outs[i].size[1]);
                }else{
                    out = outs[i];
                }
                for (int j = 0; j < out.rows; ++j)
                {
                    Mat scores = out.row(j).colRange(5, out.cols);
                    double confidence;
                    Point maxLoc;
                    minMaxLoc(scores, 0, &confidence, 0, &maxLoc);

                    if (confidence > confThreshold) {
                        float* detection = out.ptr<float>(j);
                        double centerX = detection[0];
                        double centerY = detection[1];
                        double width = detection[2];
                        double height = detection[3];
                        boxes.push_back(Rect2d(centerX - 0.5 * width, centerY - 0.5 * height,
                                            width, height));
                        confidences.push_back(confidence);
                        classIds.push_back(maxLoc.x);
                    }
                }
            }

            // here we need NMS of boxes
            std::vector<int> indices;
            NMSBoxes(boxes, confidences, confThreshold, nmsThreshold, indices);

            std::vector<int> nms_classIds;
            std::vector<float> nms_confidences;
            std::vector<Rect2d> nms_boxes;

            for (size_t i = 0; i < indices.size(); ++i)
            {
                int idx = indices[i];
                Rect2d box = boxes[idx];
                float conf = confidences[idx];
                int class_id = classIds[idx];
                nms_boxes.push_back(box);
                nms_confidences.push_back(conf);
                nms_classIds.push_back(class_id);
            }

            normAssertDetections(refClassIds[b], refConfidences[b], refBoxes[b], nms_classIds,
                             nms_confidences, nms_boxes, format("batch size %d, sample %d\n", batch_size, b).c_str(), confThreshold, scoreDiff, iouDiff);
        }
    }