Пример #1
0
void testInPlaceActivation(LayerParams& lp)
{
    EXPECT_FALSE(lp.name.empty());

    LayerParams pool;
    pool.set("pool", "ave");
    pool.set("kernel_w", 2);
    pool.set("kernel_h", 2);
    pool.set("stride_w", 2);
    pool.set("stride_h", 2);
    pool.type = "Pooling";

    Net net;
    int poolId = net.addLayer(pool.name, pool.type, pool);
    net.connect(0, 0, poolId, 0);
    net.addLayerToPrev(lp.name, lp.type, lp);

    Mat input({1, kNumChannels, 10, 10}, CV_32F);
    randu(input, -1.0f, 1.0f);
    net.setInput(input);
    Mat outputDefault = net.forward(lp.name).clone();

    net.setInput(input);
    net.setPreferableBackend(DNN_BACKEND_HALIDE);
    Mat outputHalide = net.forward(lp.name).clone();
    normAssert(outputDefault, outputHalide);
}
Пример #2
0
////////////////////////////////////////////////////////////////////////////
// Mixed backends
////////////////////////////////////////////////////////////////////////////
TEST(MixedBackends_Halide_Default_Halide, Accuracy)
{
    // Just a layer that supports Halide backend.
    LayerParams lrn;
    lrn.type = "LRN";
    lrn.name = "testLRN";

    // Some of layers that doesn't supports Halide backend yet.
    LayerParams mvn;
    mvn.type = "MVN";
    mvn.name = "testMVN";

    // Halide layer again.
    LayerParams lrn2;
    lrn2.type = "LRN";
    lrn2.name = "testLRN2";

    Net net;
    int lrnId = net.addLayer(lrn.name, lrn.type, lrn);
    net.connect(0, 0, lrnId, 0);
    net.addLayerToPrev(mvn.name, mvn.type, mvn);
    net.addLayerToPrev(lrn2.name, lrn2.type, lrn2);

    Mat input({4, 3, 5, 6}, CV_32F);
    randu(input, -1.0f, 1.0f);
    net.setInput(input);
    Mat outputDefault = net.forward().clone();

    net.setPreferableBackend(DNN_BACKEND_HALIDE);
    net.setInput(input);
    Mat outputHalide = net.forward().clone();
    normAssert(outputDefault, outputHalide);

    net.setPreferableTarget(DNN_TARGET_OPENCL);
    net.setInput(input);
    outputHalide = net.forward().clone();
    normAssert(outputDefault, outputHalide);
}
Пример #3
0
    void testONNXModels(const String& basename, const Extension ext = npy,
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
                        bool checkNoFallbacks = true)
    {
        String onnxmodel = _tf("models/" + basename + ".onnx");
        Mat inp, ref;
        if (ext == npy) {
            inp = blobFromNPY(_tf("data/input_" + basename + ".npy"));
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
            inp = readTensorFromONNX(_tf("data/input_" + basename + ".pb"));
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

        checkBackend(&inp, &ref);
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

        net.setInput(inp);
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
            netSoftmax.addLayerToPrev("softmaxLayer", "SoftMax", lp);
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
    }