Exemplo n.º 1
0
static void runTorchNet(String prefix, int targetId = DNN_TARGET_CPU, String outLayerName = "",
                        bool check2ndBlob = false, bool isBinary = false)
{
    String suffix = (isBinary) ? ".dat" : ".txt";

    Net net = readNetFromTorch(_tf(prefix + "_net" + suffix), isBinary);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(DNN_BACKEND_DEFAULT);
    net.setPreferableTarget(targetId);

    Mat inp, outRef;
    ASSERT_NO_THROW( inp = readTorchBlob(_tf(prefix + "_input" + suffix), isBinary) );
    ASSERT_NO_THROW( outRef = readTorchBlob(_tf(prefix + "_output" + suffix), isBinary) );

    if (outLayerName.empty())
        outLayerName = net.getLayerNames().back();

    net.setInput(inp, "0");
    std::vector<Mat> outBlobs;
    net.forward(outBlobs, outLayerName);
    normAssert(outRef, outBlobs[0]);

    if (check2ndBlob)
    {
        Mat out2 = outBlobs[1];
        Mat ref2 = readTorchBlob(_tf(prefix + "_output_2" + suffix), isBinary);
        normAssert(out2, ref2);
    }
}
Exemplo n.º 2
0
    void runTorchNet(const String& prefix, String outLayerName = "",
                     bool check2ndBlob = false, bool isBinary = false,
                     double l1 = 0.0, double lInf = 0.0)
    {
        String suffix = (isBinary) ? ".dat" : ".txt";

        Mat inp, outRef;
        ASSERT_NO_THROW( inp = readTorchBlob(_tf(prefix + "_input" + suffix), isBinary) );
        ASSERT_NO_THROW( outRef = readTorchBlob(_tf(prefix + "_output" + suffix), isBinary) );

        checkBackend(backend, target, &inp, &outRef);

        Net net = readNetFromTorch(_tf(prefix + "_net" + suffix), isBinary);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

        if (outLayerName.empty())
            outLayerName = net.getLayerNames().back();

        net.setInput(inp);
        std::vector<Mat> outBlobs;
        net.forward(outBlobs, outLayerName);
        l1 = l1 ? l1 : default_l1;
        lInf = lInf ? lInf : default_lInf;
        normAssert(outRef, outBlobs[0], "", l1, lInf);

        if (check2ndBlob && backend != DNN_BACKEND_INFERENCE_ENGINE)
        {
            Mat out2 = outBlobs[1];
            Mat ref2 = readTorchBlob(_tf(prefix + "_output_2" + suffix), isBinary);
            normAssert(out2, ref2, "", l1, lInf);
        }
    }
static void runTorchNet(String prefix, String outLayerName = "",
                        bool check2ndBlob = false, bool isBinary = false)
{
    String suffix = (isBinary) ? ".dat" : ".txt";

    Net net;
    Ptr<Importer> importer = createTorchImporter(_tf(prefix + "_net" + suffix), isBinary);
    ASSERT_TRUE(importer != NULL);
    importer->populateNet(net);

    Blob inp, outRef;
    ASSERT_NO_THROW( inp = readTorchBlob(_tf(prefix + "_input" + suffix), isBinary) );
    ASSERT_NO_THROW( outRef = readTorchBlob(_tf(prefix + "_output" + suffix), isBinary) );

    net.setBlob(".0", inp);
    net.forward();
    if (outLayerName.empty())
        outLayerName = net.getLayerNames().back();
    Blob out = net.getBlob(outLayerName);

    normAssert(outRef, out);

    if (check2ndBlob)
    {
        Blob out2 = net.getBlob(outLayerName + ".1");
        Blob ref2 = readTorchBlob(_tf(prefix + "_output_2" + suffix), isBinary);
        normAssert(out2, ref2);
    }
}
Exemplo n.º 4
0
static std::vector<String> getOutputsNames(const Net& net)
{
    std::vector<String> names;
    std::vector<int> outLayers = net.getUnconnectedOutLayers();
    std::vector<String> layersNames = net.getLayerNames();
    names.resize(outLayers.size());
    for (size_t i = 0; i < outLayers.size(); ++i)
          names[i] = layersNames[outLayers[i] - 1];
    return names;
}
Exemplo n.º 5
0
    void expectNoFallbacks(Net& net)
    {
        // Check if all the layers are supported with current backend and target.
        // Some layers might be fused so their timings equal to zero.
        std::vector<double> timings;
        net.getPerfProfile(timings);
        std::vector<String> names = net.getLayerNames();
        CV_Assert(names.size() == timings.size());

        for (int i = 0; i < names.size(); ++i)
        {
            Ptr<dnn::Layer> l = net.getLayer(net.getLayerId(names[i]));
            bool fused = !timings[i];
            if ((!l->supportBackend(backend) || l->preferableTarget != target) && !fused)
                CV_Error(Error::StsNotImplemented, "Layer [" + l->name + "] of type [" +
                         l->type + "] is expected to has backend implementation");
        }
    }
TEST(Torch_Importer, ENet_accuracy)
{
    Net net;
    {
        Ptr<Importer> importer = createTorchImporter(_tf("Enet-model-best.net", false));
        ASSERT_TRUE(importer != NULL);
        importer->populateNet(net);
    }

    Mat sample = imread(_tf("street.png", false));
    cv::cvtColor(sample, sample, cv::COLOR_BGR2RGB);
    sample.convertTo(sample, CV_32F, 1/255.0);
    dnn::Blob inputBlob = dnn::Blob::fromImages(sample);

    net.setBlob("", inputBlob);
    net.forward();
    dnn::Blob out = net.getBlob(net.getLayerNames().back());

    Blob ref = blobFromNPY(_tf("torch_enet_prob.npy", false));
    normAssert(ref, out);
}