TEST_P(UMatBasicTests, base) { const int align_mask = 3; roi.x &= ~align_mask; roi.y &= ~align_mask; roi.width = (roi.width + align_mask) & ~align_mask; roi &= Rect(0, 0, ua.cols, ua.rows); if(useRoi) { ua = UMat(ua,roi); } UMat ub = ua.clone(); EXPECT_MAT_NEAR(ub,ua,0); ASSERT_EQ(ua.channels(), cn); ASSERT_EQ(ua.depth(), depth); ASSERT_EQ(ua.type(), type); ASSERT_EQ(ua.elemSize(), a.elemSize()); ASSERT_EQ(ua.elemSize1(), a.elemSize1()); ASSERT_EQ(ub.empty(), ub.cols*ub.rows == 0); ub.release(); ASSERT_TRUE( ub.empty() ); if(useRoi && a.size() != ua.size()) { ASSERT_EQ(ua.isSubmatrix(), true); } else { ASSERT_EQ(ua.isSubmatrix(), false); } int dims = randomInt(2,6); int sz[CV_MAX_DIM]; size_t total = 1; for(int i = 0; i<dims; i++) { sz[i] = randomInt(1,45); total *= (size_t)sz[i]; } int new_type = CV_MAKE_TYPE(randomInt(CV_8S,CV_64F),randomInt(1,4)); ub = UMat(dims, sz, new_type); ASSERT_EQ(ub.total(), total); }
OCL_PERF_TEST(HOGFixture, HOG) { UMat src; imread(getDataPath("gpu/hog/road.png"), cv::IMREAD_GRAYSCALE).copyTo(src); ASSERT_FALSE(src.empty()); vector<cv::Rect> found_locations; declare.in(src); HOGDescriptor hog; hog.setSVMDetector(hog.getDefaultPeopleDetector()); OCL_TEST_CYCLE() hog.detectMultiScale(src, found_locations); std::sort(found_locations.begin(), found_locations.end(), RectLess()); SANITY_CHECK(found_locations, 3); }
int main(int argc, char* argv[]) { if (argc==2) imread(argv[1]).copyTo(img); if (img.empty()) { cout << "File not found or empty image\n"; } imshow("Original",img); namedWindow( window_name, WINDOW_AUTOSIZE ); /// Create a Trackbar for user to enter threshold createTrackbar( "a:",window_name, &aa, 400, PaillouFilter ); createTrackbar( "w:", window_name, &ww, 400, PaillouFilter ); PaillouFilter(0,NULL); waitKey(); return 0; }
int main(int argc, char** argv) { const char* keys = "{ i input | ../data/pic1.png | specify input image }" "{ o output | squares_output.jpg | specify output save path}" "{ h help | | print help message }" "{ m cpu_mode | | run without OpenCL }"; CommandLineParser cmd(argc, argv, keys); if(cmd.has("help")) { cout << "Usage : squares [options]" << endl; cout << "Available options:" << endl; cmd.printMessage(); return EXIT_SUCCESS; } if (cmd.has("cpu_mode")) { ocl::setUseOpenCL(false); std::cout << "OpenCL was disabled" << std::endl; } string inputName = cmd.get<string>("i"); string outfile = cmd.get<string>("o"); int iterations = 10; namedWindow( wndname, WINDOW_AUTOSIZE ); vector<vector<Point> > squares; UMat image; imread(inputName, 1).copyTo(image); if( image.empty() ) { cout << "Couldn't load " << inputName << endl; cmd.printMessage(); return EXIT_FAILURE; } int j = iterations; int64 t_cpp = 0; //warm-ups cout << "warming up ..." << endl; findSquares(image, squares); do { int64 t_start = cv::getTickCount(); findSquares(image, squares); t_cpp += cv::getTickCount() - t_start; t_start = cv::getTickCount(); cout << "run loop: " << j << endl; } while(--j); cout << "average time: " << 1000.0f * (double)t_cpp / getTickFrequency() / iterations << "ms" << endl; UMat result = drawSquaresBoth(image, squares); imshow(wndname, result); imwrite(outfile, result); waitKey(0); return EXIT_SUCCESS; }
int main(int argc, const char** argv) { // declare capture engine that can read images from camera or file VideoCapture cap; // if no cmd-line arguments other than the app name then camera flag is raised bool camera = (1==argc); if(camera) // call open(int) method to init capture engine to read from camera // In case of many cameras the index of camera can be passed as argument. cap.open(0); else // call open(char*) method to init capture engine to read images from file // the argument is file name that will be opened for reading // it can be name of video file or still image cap.open(argv[1]); // check that capture engine open source (camera or file) successfully if (!cap.isOpened()) { printf("can not open %s\n",camera?"camera":argv[1]); printf("trying to open test.jpg\n"); // in case of fail try to open simple test file to be able check pipeline working cap.open("test.jpg"); if (!cap.isOpened()) { printf("can not open test.jpg\n"); return EXIT_FAILURE; } } // prepare for processing images // declare mat objects to store input, intermediate and output images // this is main loop over all input frames for (;;) { // get next frame from input stream //cap >> imgInp; imgInp = imread(cap, CV_LOAD_IMAGE_ANYDEPTH); // check read result // in case of reading from file the loop will be break after last frame is read and processed // in case of camera this condition is always false until something wrong with camera if (imgInp.empty()) { // wait until user press any key and the break the loop // we need to wait to ge waitKey(0); break; } // show the input image on the screen using opencv function // this call creates window named "Input" and draws imgInp inside the window imshow("Input", imgInp); // convert input image into intermediate grayscale image if (imgInp.channels() > 1) { printf("Preceding with blanks: %10d \n", imgInp.channels()); cvtColor(imgInp, imgGray, COLOR_BGR2GRAY); Mat spl; split(imgInp, spl); imshow("spl1", spl[0]);//b imshow("spl2", spl[1]);//g imshow("spl3", spl[2]);//r } else{ imgGray = imgInp; } /// Initialize values alpha_slider = 0; p2_slider = 0; // run canny processing on grayscale image //Canny(imgGray, imgOut, 50, 150); // show the result on the screen using opencv function // this call creates window named "Canny" and draw imgOut inside the window //imshow("Canny", imgOut); /// Create Windows namedWindow("Linear Blend",WINDOW_NORMAL); /// Create Trackbars char TrackbarName[50]; char TrackbarName2[50]; sprintf(TrackbarName, "Thresh#1 /n x %d", alpha_slider_max); sprintf(TrackbarName2, "Thresh#2 /n 2 x %d", alpha_slider_max); //createTrackbar(TrackbarName, "Linear Blend", &alpha_slider, alpha_slider_max, on_trackbar); createTrackbar(TrackbarName, "Linear Blend", &alpha_slider, alpha_slider_max, on_trackbar_Canny); createTrackbar(TrackbarName2, "Linear Blend", &p2_slider, p2_slider_max, on_trackbar_Canny); ps_array[0] = alpha_slider; ps_array[1] = p2_slider; /// Show some stuff on_trackbar_Canny(0,0); //on_trackbar(alpha_slider, 0); // the waitKey function is called for 2 reasons // 1. detect when ESC key is pressed // 2. to allow "Input" and "Canny" windows to plumb messages. It allows user to manipulate with "Input" and "Canny" windows // 10ms param is passed to spend only 10ms inside the waitKey function and then go to further processing int key = waitKey(100); //exit if ESC is pressed if (key == 27) break; } return EXIT_SUCCESS; }
bool OCL4DNNPool<Dtype>::Forward(const UMat& bottom, UMat& top, UMat& top_mask) { bool ret = true; size_t global[] = { 128 * 128 }; size_t local[] = { 128 }; // support 2D case switch (pool_method_) { case LIBDNN_POOLING_METHOD_MAX: { bool haveMask = !top_mask.empty(); ocl::Kernel oclk_max_pool_forward( haveMask ? CL_KERNEL_SELECT("max_pool_forward_mask") : CL_KERNEL_SELECT("max_pool_forward"), ocl::dnn::ocl4dnn_pooling_oclsrc, format("-D KERNEL_MAX_POOL=1 -D KERNEL_W=%d -D KERNEL_H=%d" " -D STRIDE_W=%d -D STRIDE_H=%d" " -D PAD_W=%d -D PAD_H=%d%s", kernel_w_, kernel_h_, stride_w_, stride_h_, pad_w_, pad_h_, haveMask ? " -D HAVE_MASK=1" : "" )); if (oclk_max_pool_forward.empty()) return false; oclk_max_pool_forward.args( count_, ocl::KernelArg::PtrReadOnly(bottom), channels_, height_, width_, pooled_height_, pooled_width_, ocl::KernelArg::PtrWriteOnly(top), ocl::KernelArg::PtrWriteOnly(top_mask) ); ret = oclk_max_pool_forward.run(1, global, local, false); } break; case LIBDNN_POOLING_METHOD_AVE: { CV_Assert(top_mask.empty()); ocl::Kernel oclk_ave_pool_forward(CL_KERNEL_SELECT("ave_pool_forward"), ocl::dnn::ocl4dnn_pooling_oclsrc, format("-D KERNEL_AVE_POOL=1 -D KERNEL_W=%d -D KERNEL_H=%d" " -D STRIDE_W=%d -D STRIDE_H=%d" " -D PAD_W=%d -D PAD_H=%d%s", kernel_w_, kernel_h_, stride_w_, stride_h_, pad_w_, pad_h_, avePoolPaddedArea ? " -D AVE_POOL_PADDING_AREA" : "" )); if (oclk_ave_pool_forward.empty()) return false; oclk_ave_pool_forward.args( count_, ocl::KernelArg::PtrReadOnly(bottom), channels_, height_, width_, pooled_height_, pooled_width_, ocl::KernelArg::PtrWriteOnly(top) ); ret = oclk_ave_pool_forward.run(1, global, local, false); } break; case LIBDNN_POOLING_METHOD_STO: { CV_Assert(top_mask.empty()); ocl::Kernel oclk_sto_pool_forward(CL_KERNEL_SELECT("sto_pool_forward_test"), ocl::dnn::ocl4dnn_pooling_oclsrc, format("-D KERNEL_STO_POOL=1 -D KERNEL_W=%d -D KERNEL_H=%d" " -D STRIDE_W=%d -D STRIDE_H=%d", kernel_w_, kernel_h_, stride_w_, stride_h_ )); if (oclk_sto_pool_forward.empty()) return false; oclk_sto_pool_forward.args( count_, ocl::KernelArg::PtrReadOnly(bottom), channels_, height_, width_, pooled_height_, pooled_width_, ocl::KernelArg::PtrWriteOnly(top) ); ret = oclk_sto_pool_forward.run(1, global, local, false); } break; default: { ret = false; LOG(FATAL)<< "Unknown pooling method."; } } return ret; }
int main(int argc, char** argv) { CV_TRACE_FUNCTION(); cv::CommandLineParser parser(argc, argv, "{help h ? | | help message}" "{n | 100 | number of frames to process }" "{@video | 0 | video filename or cameraID }" ); if (parser.has("help")) { parser.printMessage(); return 0; } VideoCapture capture; std::string video = parser.get<string>("@video"); if (video.size() == 1 && isdigit(video[0])) capture.open(parser.get<int>("@video")); else capture.open(video); int nframes = 0; if (capture.isOpened()) { nframes = (int)capture.get(CAP_PROP_FRAME_COUNT); cout << "Video " << video << ": width=" << capture.get(CAP_PROP_FRAME_WIDTH) << ", height=" << capture.get(CAP_PROP_FRAME_HEIGHT) << ", nframes=" << nframes << endl; } else { cout << "Could not initialize video capturing...\n"; return -1; } int N = parser.get<int>("n"); if (nframes > 0 && N > nframes) N = nframes; cout << "Start processing..." << endl << "Press ESC key to terminate" << endl; UMat frame; for (int i = 0; N > 0 ? (i < N) : true; i++) { CV_TRACE_REGION("FRAME"); // OpenCV Trace macro for named "scope" region { CV_TRACE_REGION("read"); capture.read(frame); if (frame.empty()) { cerr << "Can't capture frame: " << i << std::endl; break; } // OpenCV Trace macro for NEXT named region in the same C++ scope // Previous "read" region will be marked complete on this line. // Use this to eliminate unnecessary curly braces. CV_TRACE_REGION_NEXT("process"); process_frame(frame); CV_TRACE_REGION_NEXT("delay"); if (waitKey(1) == 27/*ESC*/) break; } } return 0; }