static void runTorchNet(String prefix, String outLayerName = "", bool check2ndBlob = false, bool isBinary = false) { String suffix = (isBinary) ? ".dat" : ".txt"; Net net; Ptr<Importer> importer = createTorchImporter(_tf(prefix + "_net" + suffix), isBinary); ASSERT_TRUE(importer != NULL); importer->populateNet(net); Blob inp, outRef; ASSERT_NO_THROW( inp = readTorchBlob(_tf(prefix + "_input" + suffix), isBinary) ); ASSERT_NO_THROW( outRef = readTorchBlob(_tf(prefix + "_output" + suffix), isBinary) ); net.setBlob(".0", inp); net.forward(); if (outLayerName.empty()) outLayerName = net.getLayerNames().back(); Blob out = net.getBlob(outLayerName); normAssert(outRef, out); if (check2ndBlob) { Blob out2 = net.getBlob(outLayerName + ".1"); Blob ref2 = readTorchBlob(_tf(prefix + "_output_2" + suffix), isBinary); normAssert(out2, ref2); } }
TEST(Reproducibility_AlexNet, Accuracy) { Net net; { const string proto = findDataFile("dnn/bvlc_alexnet.prototxt", false); const string model = findDataFile("dnn/bvlc_alexnet.caffemodel", false); Ptr<Importer> importer = createCaffeImporter(proto, model); ASSERT_TRUE(importer != NULL); importer->populateNet(net); } Mat sample = imread(_tf("grace_hopper_227.png")); ASSERT_TRUE(!sample.empty()); Size inputSize(227, 227); if (sample.size() != inputSize) resize(sample, sample, inputSize); net.setBlob(".data", blobFromImage(sample, 1.)); net.forward(); Mat out = net.getBlob("prob"); Mat ref = blobFromNPY(_tf("caffe_alexnet_prob.npy")); normAssert(ref, out); }
TEST(Reproducibility_FCN, Accuracy) { Net net; { const string proto = findDataFile("dnn/fcn8s-heavy-pascal.prototxt", false); const string model = findDataFile("dnn/fcn8s-heavy-pascal.caffemodel", false); Ptr<Importer> importer = createCaffeImporter(proto, model); ASSERT_TRUE(importer != NULL); importer->populateNet(net); } Mat sample = imread(_tf("street.png")); ASSERT_TRUE(!sample.empty()); Size inputSize(500, 500); if (sample.size() != inputSize) resize(sample, sample, inputSize); net.setBlob(".data", blobFromImage(sample, 1.)); net.forward(); Mat out = net.getBlob("score"); Mat ref = blobFromNPY(_tf("caffe_fcn8s_prob.npy")); normAssert(ref, out); }
TEST(Reproducibility_AlexNet, Accuracy) { Net net; { Ptr<Importer> importer = createCaffeImporter(_tf("bvlc_alexnet.prototxt"), _tf("bvlc_alexnet.caffemodel")); ASSERT_TRUE(importer != NULL); importer->populateNet(net); } Mat sample = imread(_tf("grace_hopper_227.png")); ASSERT_TRUE(!sample.empty()); cv::cvtColor(sample, sample, cv::COLOR_BGR2RGB); Size inputSize(227, 227); if (sample.size() != inputSize) resize(sample, sample, inputSize); net.setBlob(".data", dnn::Blob::fromImages(sample)); net.forward(); Blob out = net.getBlob("prob"); Blob ref = blobFromNPY(_tf("caffe_alexnet_prob.npy")); normAssert(ref, out); }
TEST(Reproducibility_FCN, Accuracy) { Net net; { Ptr<Importer> importer = createCaffeImporter(_tf("fcn8s-heavy-pascal.prototxt"), _tf("fcn8s-heavy-pascal.caffemodel")); ASSERT_TRUE(importer != NULL); importer->populateNet(net); } Mat sample = imread(_tf("street.png")); ASSERT_TRUE(!sample.empty()); Size inputSize(500, 500); if (sample.size() != inputSize) resize(sample, sample, inputSize); cv::cvtColor(sample, sample, cv::COLOR_BGR2RGB); net.setBlob(".data", dnn::Blob::fromImages(sample)); net.forward(); Blob out = net.getBlob("score"); Blob ref = blobFromNPY(_tf("caffe_fcn8s_prob.npy")); normAssert(ref, out); }
TEST(Torch_Importer, ENet_accuracy) { Net net; { Ptr<Importer> importer = createTorchImporter(_tf("Enet-model-best.net", false)); ASSERT_TRUE(importer != NULL); importer->populateNet(net); } Mat sample = imread(_tf("street.png", false)); cv::cvtColor(sample, sample, cv::COLOR_BGR2RGB); sample.convertTo(sample, CV_32F, 1/255.0); dnn::Blob inputBlob = dnn::Blob::fromImages(sample); net.setBlob("", inputBlob); net.forward(); dnn::Blob out = net.getBlob(net.getLayerNames().back()); Blob ref = blobFromNPY(_tf("torch_enet_prob.npy", false)); normAssert(ref, out); }