Beispiel #1
0
//! 字符分割与排序
int CCharsSegment::charsSegment(Mat input, vector<Mat>& resultVec) {

  if (!input.data) return 0x01;

  int w = input.cols;
  int h = input.rows;

  Mat tmpMat = input(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));

  // 判断车牌颜色以此确认threshold方法
  Color plateType = getPlateType(tmpMat, true);

  Mat input_grey;
  cvtColor(input, input_grey, CV_BGR2GRAY);

  Mat img_threshold;

  // 二值化
  // 根据车牌的不同颜色使用不同的阈值判断方法
  // TODO:使用MSER来提取这些轮廓
  if (BLUE == plateType) {
    // cout << "BLUE" << endl;
    img_threshold = input_grey.clone();

    int w = input_grey.cols;
    int h = input_grey.rows;
    Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
    int threadHoldV = ThresholdOtsu(tmp);
    // utils::imwrite("E:/img_inputgray2.jpg", input_grey);

    threshold(input_grey, img_threshold, threadHoldV, 255, CV_THRESH_BINARY);
    // utils::imwrite("E:/img_threshold.jpg", img_threshold);

    // threshold(input_grey, img_threshold, 5, 255, CV_THRESH_OTSU +
    // CV_THRESH_BINARY);

  } else if (YELLOW == plateType) {
    // cout << "YELLOW" << endl;
    img_threshold = input_grey.clone();
    int w = input_grey.cols;
    int h = input_grey.rows;
    Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
    int threadHoldV = ThresholdOtsu(tmp);
    utils::imwrite("resources/image/tmp/inputgray2.jpg", input_grey);

    threshold(input_grey, img_threshold, threadHoldV, 255,
              CV_THRESH_BINARY_INV);

    // threshold(input_grey, img_threshold, 10, 255, CV_THRESH_OTSU +
    // CV_THRESH_BINARY_INV);
  } else if (WHITE == plateType) {
    // cout << "WHITE" << endl;
    /*img_threshold = input_grey.clone();
    int w = input_grey.cols;
    int h = input_grey.rows;
    Mat tmp = input_grey(Rect(w*0.1, h*0.1, w*0.8, h*0.8));
    int threadHoldV = ThresholdOtsu(tmp);
    utils::imwrite("resources/image/tmp/inputgray2.jpg", input_grey);*/

    threshold(input_grey, img_threshold, 10, 255,
              CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
  } else {
    // cout << "UNKNOWN" << endl;
    threshold(input_grey, img_threshold, 10, 255,
              CV_THRESH_OTSU + CV_THRESH_BINARY);
  }

  if (0) {
    imshow("threshold", img_threshold);
    waitKey(0);
    destroyWindow("threshold");
  }

  if (m_debug) {
    stringstream ss(stringstream::in | stringstream::out);
    ss << "resources/image/tmp/debug_char_threshold" << iTag << ".jpg";
    utils::imwrite(ss.str(), img_threshold);
  }

  // 去除车牌上方的柳钉以及下方的横线等干扰
  // 并且也判断了是否是车牌
  // 并且在此对字符的跳变次数以及字符颜色所占的比重做了是否是车牌的判别条件
  // 如果不是车牌,返回ErrorCode=0x02
  if (!clearLiuDing(img_threshold)) return 0x02;

  if (m_debug) {
    stringstream ss(stringstream::in | stringstream::out);
    ss << "resources/image/tmp/debug_char_clearLiuDing" << iTag << ".jpg";
    utils::imwrite(ss.str(), img_threshold);
  }
  iTag++;

  // 在二值化图像中提取轮廓
  Mat img_contours;
  img_threshold.copyTo(img_contours);

  vector<vector<Point> > contours;
  findContours(img_contours,
               contours,               // a vector of contours
               CV_RETR_EXTERNAL,       // retrieve the external contours
               CV_CHAIN_APPROX_NONE);  // all pixels of each contours

  vector<vector<Point> >::iterator itc = contours.begin();
  vector<Rect> vecRect;

  // 将不符合特定尺寸的字符块排除出去
  while (itc != contours.end()) {
    Rect mr = boundingRect(Mat(*itc));
    Mat auxRoi(img_threshold, mr);

    if (verifyCharSizes(auxRoi)) vecRect.push_back(mr);
    ++itc;
  }

  // 如果找不到任何字符块,则返回ErrorCode=0x03
  if (vecRect.size() == 0) return 0x03;

  // 对符合尺寸的图块按照从左到右进行排序;
  // 直接使用stl的sort方法,更有效率
  vector<Rect> sortedRect(vecRect);
  std::sort(sortedRect.begin(), sortedRect.end(),[](const Rect& r1, const Rect& r2) { return r1.x < r2.x; });

  size_t specIndex = 0;

  //获得特殊字符对应的Rectt,如苏A的"A"
  specIndex = GetSpecificRect(sortedRect);

  if (m_debug) {
    if (specIndex < sortedRect.size()) {
      Mat specMat(img_threshold, sortedRect[specIndex]);
      stringstream ss(stringstream::in | stringstream::out);
      ss << "resources/image/tmp/debug_specMat"
         << ".jpg";
      utils::imwrite(ss.str(), specMat);
    }
  }

  //根据特定Rect向左反推出中文字符
  //这样做的主要原因是根据findContours方法很难捕捉到中文字符的准确Rect,因此仅能
  //退过特定算法来指定
  Rect chineseRect;
  if (specIndex < sortedRect.size())
    chineseRect = GetChineseRect(sortedRect[specIndex]);
  else
    return -3;

  if (m_debug) {
    Mat chineseMat(img_threshold, chineseRect);
    stringstream ss(stringstream::in | stringstream::out);
    ss << "resources/image/tmp/debug_chineseMat"
       << ".jpg";
    utils::imwrite(ss.str(), chineseMat);
  }

  //新建一个全新的排序Rect
  //将中文字符Rect第一个加进来,因为它肯定是最左边的
  //其余的Rect只按照顺序去6个,车牌只可能是7个字符!这样可以避免阴影导致的“1”字符
  vector<Rect> newSortedRect;
  newSortedRect.push_back(chineseRect);
  RebuildRect(sortedRect, newSortedRect, specIndex);

  if (newSortedRect.size() == 0) return -3;

  for (size_t i = 0; i < newSortedRect.size(); i++) {
    Rect mr = newSortedRect[i];
    Mat auxRoi(img_threshold, mr);

    if (1) {
      auxRoi = preprocessChar(auxRoi);
      if (m_debug) {
        stringstream ss(stringstream::in | stringstream::out);
        ss << "resources/image/tmp/debug_char_auxRoi_" << (i + staticIndex)
           << ".jpg";
        utils::imwrite(ss.str(), auxRoi);
      }
      resultVec.push_back(auxRoi);
    }
  }
  staticIndex += newSortedRect.size();

  return 0;
}
Beispiel #2
0
int main(int argc, char *argv[])
{
    // get input arguments

    string configFile = "";
    string video_file_left = "", video_file_right = "", video_directory = "";
    int starting_frame_number = 0;
    bool enable_gamma = false;
    float random_results = -1.0;

    int last_frame_number = -1;

    int last_playback_frame_number = -2;

    ConciseArgs parser(argc, argv);
    parser.add(configFile, "c", "config", "Configuration file containing camera GUIDs, etc.", true);
    parser.add(show_display, "d", "show-dispaly", "Enable for visual debugging display. Will reduce framerate significantly.");
    parser.add(show_display_wait, "w", "show-display-wait", "Optional argument to decrease framerate for lower network traffic when forwarding the display.");
    parser.add(show_unrectified, "u", "show-unrectified", "When displaying images, do not apply rectification.");
    parser.add(disable_stereo, "s", "disable-stereo", "Disable online stereo processing.");
    parser.add(force_brightness, "b", "force-brightness", "Force a brightness setting.");
    parser.add(force_exposure, "e", "force-exposure", "Force an exposure setting.");
    parser.add(quiet_mode, "q", "quiet", "Reduce text output.");
    parser.add(video_file_left, "l", "video-file-left", "Do not use cameras, instead use this video file (also requires a right video file).");
    parser.add(video_file_right, "t", "video-file-right", "Right video file, only for use with the -l option.");
    parser.add(video_directory, "i", "video-directory", "Directory to search for videos in (for playback).");
    parser.add(starting_frame_number, "f", "starting-frame", "Frame to start at when playing back videos.");
    parser.add(display_hud, "v", "hud", "Overlay HUD on display images.");
    parser.add(record_hud, "x", "record-hud", "Record the HUD display.");
    parser.add(file_frame_skip, "p", "skip", "Number of frames skipped in recording (for playback).");
    parser.add(enable_gamma, "g", "enable-gamma", "Turn gamma on for both cameras.");
    parser.add(random_results, "R", "random-results", "Number of random points to produce per frame.  Can be a float in which case we'll take a random sample to decide if to produce the last one.  Disables real stereo processing.  Only for debugging / analysis!");
    parser.add(publish_all_images, "P", "publish-all-images", "Publish all images to LCM");
    parser.parse();

    // parse the config file
    if (ParseConfigFile(configFile, &stereoConfig) != true)
    {
        fprintf(stderr, "Failed to parse configuration file, quitting.\n");
        return -1;
    }

    if (video_file_left.length() > 0
        && video_file_right.length() <= 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a left one.)\n");

        return -1;
    }

     if (video_file_left.length() <= 0
        && video_file_right.length() > 0) {

        fprintf(stderr, "Error: for playback you must specify both "
            "a right and left video file. (Only got a right one.)\n");

        return -1;
    }

    recording_manager.Init(stereoConfig);

    // attempt to load video files / directories
    if (video_file_left.length() > 0) {
        if (recording_manager.LoadVideoFiles(video_file_left, video_file_right) != true) {
            // don't have videos, bail out.
            return -1;
        }
    }

    if (video_directory.length() > 0) {
        if (recording_manager.SetPlaybackVideoDirectory(video_directory) != true) {
            // bail
            return -1;
        }
    }

    recording_manager.SetQuietMode(quiet_mode);
    recording_manager.SetPlaybackFrameNumber(starting_frame_number);



    uint64 guid = stereoConfig.guidLeft;
    uint64 guid2 = stereoConfig.guidRight;

    // start up LCM
    lcm_t * lcm;
    lcm = lcm_create (stereoConfig.lcmUrl.c_str());


    unsigned long elapsed;

    Hud hud;


    // --- setup control-c handling ---
    struct sigaction sigIntHandler;

    sigIntHandler.sa_handler = control_c_handler;
    sigemptyset(&sigIntHandler.sa_mask);
    sigIntHandler.sa_flags = 0;

    sigaction(SIGINT, &sigIntHandler, NULL);
    // --- end ctrl-c handling code ---

    dc1394error_t   err;
    dc1394error_t   err2;


    // tell opencv to use only one core so that we can manage our
    // own threading without a fight
    setNumThreads(1);

    if (recording_manager.UsingLiveCameras()) {
        d = dc1394_new ();
        if (!d)
            cerr << "Could not create dc1394 context" << endl;

        d2 = dc1394_new ();
        if (!d2)
            cerr << "Could not create dc1394 context for camera 2" << endl;

        camera = dc1394_camera_new (d, guid);
        if (!camera)
        {
            cerr << "Could not create dc1394 camera... quitting." << endl;
            exit(1);
        }

        camera2 = dc1394_camera_new (d2, guid2);
        if (!camera2)
            cerr << "Could not create dc1394 camera for camera 2" << endl;
        // reset the bus
        dc1394_reset_bus(camera);
        dc1394_reset_bus(camera2);

        // setup
        err = setup_gray_capture(camera, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not setup camera");

        err2 = setup_gray_capture(camera2, DC1394_VIDEO_MODE_FORMAT7_1);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not setup camera number 2");

        // enable camera
        err = dc1394_video_set_transmission(camera, DC1394_ON);
        DC1394_ERR_CLN_RTN(err, cleanup_and_exit(camera), "Could not start camera iso transmission");
        err2 = dc1394_video_set_transmission(camera2, DC1394_ON);
        DC1394_ERR_CLN_RTN(err2, cleanup_and_exit(camera2), "Could not start camera iso transmission for camera number 2");

        InitBrightnessSettings(camera, camera2, enable_gamma);
    }

    if (show_display) {

        namedWindow("Input", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Input2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Stereo", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Left Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Right Block", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);

        namedWindow("Debug 1", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);
        namedWindow("Debug 2", CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO);



        setMouseCallback("Input", onMouse); // for drawing disparity lines
        setMouseCallback("Stereo", onMouseStereo, &hud); // for drawing disparity lines

        moveWindow("Input", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 100);
        moveWindow("Stereo", stereoConfig.displayOffsetX + 100, stereoConfig.displayOffsetY + 370);
        moveWindow("Input2", stereoConfig.displayOffsetX + 478, stereoConfig.displayOffsetY + 100);
        moveWindow("Left Block", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 100);
        moveWindow("Right Block", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 100);

        moveWindow("Debug 1", stereoConfig.displayOffsetX + 900, stereoConfig.displayOffsetY + 670);
        moveWindow("Debug 2", stereoConfig.displayOffsetX + 1400, stereoConfig.displayOffsetY + 670);

    } // show display

    if (show_display || publish_all_images) {
        // if a channel exists, subscribe to it
        if (stereoConfig.stereo_replay_channel.length() > 0) {
            stereo_replay_sub = lcmt_stereo_subscribe(lcm, stereoConfig.stereo_replay_channel.c_str(), &stereo_replay_handler, &hud);
        }

        if (stereoConfig.pose_channel.length() > 0) {
            mav_pose_t_sub = mav_pose_t_subscribe(lcm, stereoConfig.pose_channel.c_str(), &mav_pose_t_handler, &hud);
        }

        if (stereoConfig.gps_channel.length() > 0) {
            mav_gps_data_t_sub = mav_gps_data_t_subscribe(lcm, stereoConfig.gps_channel.c_str(), &mav_gps_data_t_handler, &hud);
        }

        if (stereoConfig.baro_airspeed_channel.length() > 0) {
            baro_airspeed_sub = lcmt_baro_airspeed_subscribe(lcm, stereoConfig.baro_airspeed_channel.c_str(), &baro_airspeed_handler, &hud);
        }

        if (stereoConfig.servo_out_channel.length() > 0) {
            servo_out_sub = lcmt_deltawing_u_subscribe(lcm, stereoConfig.servo_out_channel.c_str(), &servo_out_handler, &hud);
        }

        if (stereoConfig.battery_status_channel.length() > 0) {
            battery_status_sub = lcmt_battery_status_subscribe(lcm, stereoConfig.battery_status_channel.c_str(), &battery_status_handler, &hud);
        }

        if (stereoConfig.cpu_info_channel1.length() > 0) {
            cpu_info_sub1 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel1.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub2 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel2.c_str(), &cpu_info_handler, &recording_manager);
            cpu_info_sub3 = lcmt_cpu_info_subscribe(lcm, stereoConfig.cpu_info_channel3.c_str(), &cpu_info_handler, &recording_manager);
        }

        if (stereoConfig.log_size_channel1.length() > 0) {
            log_size_sub1 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel1.c_str(), &log_size_handler, &hud);
            log_size_sub2 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel2.c_str(), &log_size_handler, &hud);
            log_size_sub3 = lcmt_log_size_subscribe(lcm, stereoConfig.log_size_channel3.c_str(), &log_size_handler, &hud);
        }

    } // end show_display || publish_all_images

    // load calibration
    OpenCvStereoCalibration stereoCalibration;

    if (LoadCalibration(stereoConfig.calibrationDir, &stereoCalibration) != true)
    {
        cerr << "Error: failed to read calibration files. Quitting." << endl;
        return -1;
    }

    int inf_disparity_tester, disparity_tester;
    disparity_tester = GetDisparityForDistance(10, stereoCalibration, &inf_disparity_tester);

    std::cout << "computed disparity is = " << disparity_tester << ", inf disparity = " << inf_disparity_tester << std::endl;

    // subscribe to the stereo control channel
    stereo_control_sub = lcmt_stereo_control_subscribe(lcm, stereoConfig.stereoControlChannel.c_str(), &lcm_stereo_control_handler, NULL);


    Mat imgDisp;
    Mat imgDisp2;

    // initilize default parameters
    //PushbroomStereoState state; // HACK

    state.disparity = stereoConfig.disparity;
    state.zero_dist_disparity = stereoConfig.infiniteDisparity;
    state.sobelLimit = stereoConfig.interestOperatorLimit;
    state.horizontalInvarianceMultiplier = stereoConfig.horizontalInvarianceMultiplier;
    state.blockSize = stereoConfig.blockSize;
    state.random_results = random_results;
    state.check_horizontal_invariance = true;

    if (state.blockSize > 10 || state.blockSize < 1)
    {
        fprintf(stderr, "Warning: block size is very large "
            "or small (%d).  Expect trouble.\n", state.blockSize);
    }

    state.sadThreshold = stereoConfig.sadThreshold;

    state.mapxL = stereoCalibration.mx1fp;
    state.mapxR = stereoCalibration.mx2fp;
    state.Q = stereoCalibration.qMat;
    state.show_display = show_display;

    state.lastValidPixelRow = stereoConfig.lastValidPixelRow;

    Mat matL, matR;
    bool quit = false;

    if (recording_manager.UsingLiveCameras()) {
        matL = GetFrameFormat7(camera);
        matR = GetFrameFormat7(camera2);

        if (recording_manager.InitRecording(matL, matR) != true) {
            // failed to init recording, things are going bad.  bail.
            return -1;
        }

        // before we start, turn the cameras on and set the brightness and exposure
        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);

        // grab a few frames and send them over LCM for the user
        // to verify that everything is working
        if (!show_display && !publish_all_images) {
            printf("Sending init images over LCM... ");
            fflush(stdout);

            for (int i = 0; i < 5; i++) {

                matL = GetFrameFormat7(camera);
                SendImageOverLcm(lcm, "stereo_image_left", matL, 50);

                matR = GetFrameFormat7(camera2);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 50);

                // don't send these too fast, otherwise we'll flood the ethernet link
                // and not actually be helpful

                // wait one second
                printf(".");
                fflush(stdout);

                sleep(1);
            }
            printf(" done.\n");
        }

    } // recording_manager.UsingLiveCameras()

    // spool up worker threads
    PushbroomStereo pushbroom_stereo;

    // start the framerate clock
    struct timeval start, now;
    gettimeofday( &start, NULL );

    while (quit == false) {

        // get the frames from the camera
        if (recording_manager.UsingLiveCameras()) {
            // we would like to match brightness every frame
            // but that would really hurt our framerate
            // match brightness every 10 frames instead
            if (numFrames % MATCH_BRIGHTNESS_EVERY_N_FRAMES == 0)
            {
                MatchBrightnessSettings(camera, camera2);
            }

            // capture images from the cameras
            matL = GetFrameFormat7(camera);
            matR = GetFrameFormat7(camera2);

            // record video
            recording_manager.AddFrames(matL, matR);


        } else {
            // using a video file -- get the next frame
            recording_manager.GetFrames(matL, matR);
        }

        cv::vector<Point3f> pointVector3d;
        cv::vector<uchar> pointColors;
        cv::vector<Point3i> pointVector2d; // for display
        cv::vector<Point3i> pointVector2d_inf; // for display

        // do the main stereo processing
        if (disable_stereo != true) {

            gettimeofday( &now, NULL );
            double before = now.tv_usec + now.tv_sec * 1000 * 1000;

            pushbroom_stereo.ProcessImages(matL, matR, &pointVector3d, &pointColors, &pointVector2d, state);

            gettimeofday( &now, NULL );
            double after = now.tv_usec + now.tv_sec * 1000 * 1000;

            timer_sum += after-before;
            timer_count ++;

        }

        // build an LCM message for the stereo data
        lcmt_stereo msg;


        if (recording_manager.UsingLiveCameras() || stereo_lcm_msg == NULL) {
            msg.timestamp = getTimestampNow();
        } else {
            // if we are replaying videos, preserve the timestamp of the original video
            msg.timestamp = stereo_lcm_msg->timestamp;

        }


        msg.number_of_points = (int)pointVector3d.size();

        float x[msg.number_of_points];
        float y[msg.number_of_points];
        float z[msg.number_of_points];
        uchar grey[msg.number_of_points];

        for (unsigned int i=0;i<pointVector3d.size();i++) {

            x[i] = pointVector3d[i].x / stereoConfig.calibrationUnitConversion;
            y[i] = pointVector3d[i].y / stereoConfig.calibrationUnitConversion;
            z[i] = pointVector3d[i].z / stereoConfig.calibrationUnitConversion;
            grey[i] = pointColors[i];
        }

        msg.x = x;
        msg.y = y;
        msg.z = z;
        msg.grey = grey;
        msg.frame_number = recording_manager.GetFrameNumber();

        if (recording_manager.UsingLiveCameras()) {
            msg.frame_number = msg.frame_number - 1;  // minus one since recording manager has
                                                      // already recorded this frame (above in
                                                      // AddFrames) but we haven't made a message
                                                      // for it yet
        }


        msg.video_number = recording_manager.GetRecVideoNumber();

        // publish the LCM message
        if (last_frame_number != msg.frame_number) {
            lcmt_stereo_publish(lcm, "stereo", &msg);
            last_frame_number = msg.frame_number;
        }

        if (publish_all_images) {
            if (recording_manager.GetFrameNumber() != last_playback_frame_number) {
                SendImageOverLcm(lcm, "stereo_image_left", matL, 80);
                SendImageOverLcm(lcm, "stereo_image_right", matR, 80);

                last_playback_frame_number = recording_manager.GetFrameNumber();
            }

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        }

        Mat matDisp, remapL, remapR;

        if (show_display) {
            // we remap again here because we're just in display
            Mat remapLtemp(matL.rows, matL.cols, matL.depth());
            Mat remapRtemp(matR.rows, matR.cols, matR.depth());

            remapL = remapLtemp;
            remapR = remapRtemp;

            remap(matL, remapL, stereoCalibration.mx1fp, Mat(), INTER_NEAREST);
            remap(matR, remapR, stereoCalibration.mx2fp, Mat(), INTER_NEAREST);

            remapL.copyTo(matDisp);

            //process LCM until there are no more messages
            // this allows us to drop frames if we are behind
            while (NonBlockingLcm(lcm)) {}
        } // end show_display


        if (show_display) {

            for (unsigned int i=0;i<pointVector2d.size();i++) {
                int x2 = pointVector2d[i].x;
                int y2 = pointVector2d[i].y;
                //int sad = pointVector2d[i].z;
                rectangle(matDisp, Point(x2,y2), Point(x2+state.blockSize, y2+state.blockSize), 0,  CV_FILLED);
                rectangle(matDisp, Point(x2+1,y2+1), Point(x2+state.blockSize-1, y2-1+state.blockSize), 255);

            }

            // draw pixel blocks
            if (lineLeftImgPosition >= 0 && lineLeftImgPositionY > 1) {
                DisplayPixelBlocks(remapL, remapR, lineLeftImgPosition - state.blockSize/2, lineLeftImgPositionY - state.blockSize/2, state, &pushbroom_stereo);
            }

            // draw a line for the user to show disparity
            DrawLines(remapL, remapR, matDisp, lineLeftImgPosition, lineLeftImgPositionY, state.disparity, state.zero_dist_disparity);


            if (visualize_stereo_hits == true && stereo_lcm_msg != NULL) {

                // transform the points from 3D space back onto the image's 2D space
                vector<Point3f> lcm_points;
                Get3DPointsFromStereoMsg(stereo_lcm_msg, &lcm_points);

                // draw the points on the unrectified image (to see these
                // you must pass the -u flag)
                Draw3DPointsOnImage(matL, &lcm_points, stereoCalibration.M1, stereoCalibration.D1, stereoCalibration.R1, 128);

            }

            if (show_unrectified == false) {

                imshow("Input", remapL);
                imshow("Input2", remapR);
            } else {
                imshow("Input", matL);
                imshow("Input2", matR);
            }


            if (display_hud) {
                Mat with_hud;

                recording_manager.SetHudNumbers(&hud);

                hud.DrawHud(matDisp, with_hud);

                if (record_hud) {
                    // put this frame into the HUD recording
                    recording_manager.RecFrameHud(with_hud);

                }

                imshow("Stereo", with_hud);
            } else {
                imshow("Stereo", matDisp);
            }


            char key = waitKey(show_display_wait);

            if (key != 255 && key != -1)
            {
                cout << endl << key << endl;
            }

            switch (key)
            {
                case 'T':
                    state.disparity --;
                    break;
                case 'R':
                    state.disparity ++;
                    break;

                case 'w':
                    state.sobelLimit += 10;
                    break;

                case 's':
                    state.sobelLimit -= 10;
                    break;

                case 'd':
                    state.horizontalInvarianceMultiplier -= 0.1;
                    break;

                case 'D':
                    state.horizontalInvarianceMultiplier += 0.1;
                    break;

                case 'g':
                    state.blockSize ++;
                    break;

                case 'b':
                    state.blockSize --;
                    if (state.blockSize < 1) {
                        state.blockSize = 1;
                    }
                    break;

                case 'Y':
                    state.sadThreshold += 50;
                    break;

                case 'y':
                    state.sadThreshold ++;
                    break;

                case 'h':
                    state.sadThreshold --;
                    break;

                case 'H':
                    state.sadThreshold -= 50;
                    break;

                case 'm':
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '1':
                    force_brightness --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '2':
                    force_brightness ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '3':
                    force_exposure --;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '4':
                    force_exposure ++;
                    if (recording_manager.UsingLiveCameras()) {
                        MatchBrightnessSettings(camera, camera2, true, force_brightness, force_exposure);
                    }
                    break;

                case '5':
                    // to show SAD boxes
                    state.sobelLimit = 0;
                    state.sadThreshold = 255;
                    break;

                case 'I':
                    state.check_horizontal_invariance = !state.check_horizontal_invariance;
                    break;

                case '.':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 1);
                    break;

                case ',':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 1);
                    break;

                case '>':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() + 50);
                    break;

                case '<':
                    recording_manager.SetPlaybackFrameNumber(recording_manager.GetFrameNumber() - 50);
                    break;

                //case 'k':
                //    state.zero_dist_disparity ++;
                 //   break;

                case 'l':
                    state.zero_dist_disparity --;
                    break;

                case 'o':
                    inf_sad_add --;
                    break;

                case 'p':
                    inf_sad_add ++;
                    break;

                case '[':
                    y_offset --;
                    if (y_offset < 0) {
                        y_offset = 0;
                    }
                    break;

                case ']':
                    y_offset ++;
                    break;

                case 'v':
                    display_hud = !display_hud;
                    break;

                case 'c':
                    hud.SetClutterLevel(hud.GetClutterLevel() + 1);
                    break;

                case 'C':
                    hud.SetClutterLevel(hud.GetClutterLevel() - 1);
                    break;

                case '}':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() + 1);
                    break;
                case '{':
                    hud.SetPitchRangeOfLens(hud.GetPitchRangeOfLens() - 1);
                    break;

                case 'S':
                    // take a screen cap of the left and right images
                    // useful for putting into a stereo tuner
                    printf("\nWriting left.ppm...");
                    imwrite("left.ppm", remapL);

                    printf("\nWriting right.ppm...");
                    imwrite("right.ppm", remapR);

                    printf("\ndone.");
                    break;

                case 'V':
                    // record the HUD
                    record_hud = true;
                    recording_manager.RestartRecHud();
                    break;

                    /*
                case 'j':
                    state.debugJ --;
                    break;

                case 'J':
                    state.debugJ ++;
                    break;

                case 'i':
                    state.debugI --;
                    break;

                case 'I':
                    state.debugI ++;
                    break;

                case 'k':
                    state.debugDisparity --;
                    break;

                case 'K':
                    state.debugDisparity ++;
                    break;

                    */

                case 'q':
                    quit = true;
                    break;
            }

            if (key != 255 && key != -1)
            {
                cout << "sadThreshold = " << state.sadThreshold << endl;
                cout << "sobelLimit = " << state.sobelLimit << endl;
                cout << "horizontalInvarianceMultiplier = " << state.horizontalInvarianceMultiplier << endl;
                cout << "brightness: " << force_brightness << endl;
                cout << "exposure: " << force_exposure << endl;
                cout << "disparity = " << state.disparity << endl;
                cout << "inf_disparity = " << state.zero_dist_disparity << endl;
                cout << "inf_sad_add = " << inf_sad_add << endl;
                cout << "blockSize = " << state.blockSize << endl;
                cout << "frame_number = " << recording_manager.GetFrameNumber() << endl;
                cout << "y offset = " << y_offset << endl;
                cout << "PitchRangeOfLens = " << hud.GetPitchRangeOfLens() << endl;
            }
        } // end show_display

        numFrames ++;

        // check for new LCM messages
        NonBlockingLcm(lcm);

        if (quiet_mode == false || numFrames % 100 == 0) {
            // compute framerate
            gettimeofday( &now, NULL );

            elapsed = (now.tv_usec / 1000 + now.tv_sec * 1000) -
            (start.tv_usec / 1000 + start.tv_sec * 1000);

            printf("\r%d frames (%lu ms) - %4.1f fps | %4.1f ms/frame, stereo: %f", numFrames, elapsed, (float)numFrames/elapsed * 1000, elapsed/(float)numFrames, timer_sum/(double)timer_count);
            fflush(stdout);
        }


    } // end main while loop

    printf("\n\n");

    destroyWindow("Input");
    destroyWindow("Input2");
    destroyWindow("Stereo");

    // close camera
    if (recording_manager.UsingLiveCameras()) {
        StopCapture(d, camera);
        StopCapture(d2, camera2);
    }

    return 0;
}
Beispiel #3
0
//--------------------------------------------------------------
// main entry
int WINAPI WinMain( 
  HINSTANCE hInstance,            // Instance
  HINSTANCE hPrevInstance,        // Previous Instance
  LPSTR lpCmdLine,                // Command Line Parameters
  int nCmdShow)                   // Window Show State
{
#ifdef DEBUG_MEMORY
  mgDebugMemoryInit();
#endif

  mgDebugReset();         // reset trace file
  mgOSInitTimer();       // performance timer
  
  // initialize random numbers
  time_t seed;
  time(&seed);
  srand(12123123); // srand(seed & 0xFFFF);

  mgOSFindWD("docs");

  // handle utility error messages
  m_errorTable = new mgUtilErrorTable();

  try
  {
    initWindow();
    createWindow();
    createBitmap();
    createUI();

    // create the terrain
    m_flatWorld = new FlatWorld();
    m_flatWorld->resize(m_windowWidth, m_windowHeight);
    m_ui->setValue(m_flatWorld->m_playerX, m_flatWorld->m_playerY);

    // check for screen update every 25 ms
    SetTimer(m_window, 123, 25, NULL);

    while (true)
    {
      MSG msg;      

      // if there is no input pending
      if (!PeekMessage(&msg, NULL, 0, 0, PM_NOREMOVE)) 
      {
        // update any movement keys still down
        updateMovement();

        // if the ui needs an update
        if (m_surface != NULL && m_surface->isDamaged())
        {
          // redraw ui at damaged area
          mgRectangle bounds;
          m_surface->getDamage(bounds);
          m_ui->m_top->surfacePaint(bounds);

          // copy bits from surface into bitmap
          m_surface->repair(bounds);

          // tell windows to redraw the updated area
          RECT rect;
          rect.left = bounds.m_x;
          rect.right = rect.left + bounds.m_width;
          rect.top = bounds.m_y;
          rect.bottom = rect.top + bounds.m_height;
          InvalidateRect(m_window, &rect, false);
        }
      }

      GetMessage(&msg, NULL, 0, 0);     

      // end on quit
      if (msg.message == WM_QUIT)       
        break;

      TranslateMessage(&msg);
      DispatchMessage(&msg);
    }

    // shutdown
    destroyUI();
    destroyBitmap();
    destroyWindow();
    termWindow();
  }
  catch (mgErrorMsg* e)
  {
    mgString msg;
    m_errorTable->msgText(msg, e);
    mgDebug("%s", (const char*) msg);

    MessageBox(m_window, msg, "Error", MB_OK | MB_ICONINFORMATION);
    return 0;
  }
  catch (mgException* e)
  {
    mgDebug("%s", (const char*) e->m_message);

    MessageBox(m_window, e->m_message, "Error", MB_OK | MB_ICONINFORMATION);
    return 0;
  }

  delete m_errorTable;
  m_errorTable = NULL;

#ifdef DEBUG_MEMORY
  // display all memory leaks
  mgDebugMemory();
#endif

  return 0;
}
 /*
  * hides the display window
  */
 bool fastViewer::hide() {
   destroyWindow();
   return true;
 }
Beispiel #5
0
int _glfwPlatformCreateWindow(_GLFWwindow* window,
                              const _GLFWwndconfig* wndconfig,
                              const _GLFWctxconfig* ctxconfig,
                              const _GLFWfbconfig* fbconfig)
{
    int status;

    if (!createWindow(window, wndconfig))
        return GLFW_FALSE;

    if (ctxconfig->api != GLFW_NO_API)
    {
        if (!_glfwCreateContext(window, ctxconfig, fbconfig))
            return GLFW_FALSE;

#if defined(_GLFW_WGL)
        status = _glfwAnalyzeContext(window, ctxconfig, fbconfig);

        if (status == _GLFW_RECREATION_IMPOSSIBLE)
            return GLFW_FALSE;

        if (status == _GLFW_RECREATION_REQUIRED)
        {
            // Some window hints require us to re-create the context using WGL
            // extensions retrieved through the current context, as we cannot
            // check for WGL extensions or retrieve WGL entry points before we
            // have a current context (actually until we have implicitly loaded
            // the vendor ICD)

            // Yes, this is strange, and yes, this is the proper way on WGL

            // As Windows only allows you to set the pixel format once for
            // a window, we need to destroy the current window and create a new
            // one to be able to use the new pixel format

            // Technically, it may be possible to keep the old window around if
            // we're just creating an OpenGL 3.0+ context with the same pixel
            // format, but it's not worth the added code complexity

            // First we clear the current context (the one we just created)
            // This is usually done by glfwDestroyWindow, but as we're not doing
            // full GLFW window destruction, it's duplicated here
            _glfwPlatformMakeContextCurrent(NULL);

            // Next destroy the Win32 window and WGL context (without resetting
            // or destroying the GLFW window object)
            _glfwDestroyContext(window);
            destroyWindow(window);

            // ...and then create them again, this time with better APIs
            if (!createWindow(window, wndconfig))
                return GLFW_FALSE;
            if (!_glfwCreateContext(window, ctxconfig, fbconfig))
                return GLFW_FALSE;
        }
#endif // _GLFW_WGL
    }

    if (window->monitor)
    {
        _glfwPlatformShowWindow(window);
        if (!enterFullscreenMode(window))
            return GLFW_FALSE;
    }

    return GLFW_TRUE;
}
int CPlateDetect::plateDetectDeep(Mat src, vector<Mat>& resultVec, bool showDetectArea, int index)
{

	vector<Mat> resultPlates;

	vector<CPlate> color_Plates;
	vector<CPlate> sobel_Plates;
	vector<CPlate> color_result_Plates;
	vector<CPlate> sobel_result_Plates;

	vector<CPlate> all_result_Plates;

	//如果颜色查找找到n个以上(包含n个)的车牌,就不再进行Sobel查找了。
	const int color_find_max = 4;

	Mat result;
	src.copyTo(result);

	m_plateLocate->plateColorLocate(src, color_Plates, index);
	m_plateJudge->plateJudge(color_Plates, color_result_Plates);

	
	for (int i = 0; i< color_result_Plates.size(); i++)
	{
		CPlate plate = color_result_Plates[i];

		RotatedRect minRect = plate.getPlatePos();
		Point2f rect_points[4];
		minRect.points(rect_points);
		for (int j = 0; j < 4; j++)
			line(result, rect_points[j], rect_points[(j + 1) % 4], Scalar(0, 255, 255), 2, 8);

		all_result_Plates.push_back(plate);
	}

	if (color_result_Plates.size() >= color_find_max)
	{
		//如果颜色查找找到n个以上(包含n个)的车牌,就不再进行Sobel查找了。
	} 
	else
	{
		m_plateLocate->plateSobelLocate(src, sobel_Plates, index);
		m_plateJudge->plateJudge(sobel_Plates, sobel_result_Plates);

		for (int i = 0; i< sobel_result_Plates.size(); i++)
		{
			CPlate plate = sobel_result_Plates[i];

			RotatedRect minRect = plate.getPlatePos();
			Point2f rect_points[4]; 
			minRect.points( rect_points );
			for( int j = 0; j < 4; j++ )
				line(result, rect_points[j], rect_points[(j+1)%4], Scalar(0,0,255), 2, 8 );

			all_result_Plates.push_back(plate);
		}
	}

	for (int i = 0; i < all_result_Plates.size(); i++)
	{
		// 把截取的车牌图像依次放到左上角
		CPlate plate = all_result_Plates[i];

		Mat plate_area = plate.getPlateMat();

		int height = m_plateLocate->HEIGHT;
		int width = m_plateLocate->WIDTH;

		assert(height*i + height < result.rows);

		Mat imageRoi = result(Rect(0, 0 + height*i, width, height));
		addWeighted(imageRoi, 0, plate_area, 1, 0, imageRoi);

		resultVec.push_back(plate_area);
	}

	if (showDetectArea)
	{
		namedWindow("EasyPR", CV_WINDOW_AUTOSIZE);
		showResult(result);
		destroyWindow("EasyPR");
	}
	

	if (0)
	{
		stringstream ss(stringstream::in | stringstream::out);
		ss << "image/tmp/" << index << "_" << 9 <<"_result" << ".jpg";
		imwrite(ss.str(), result);
	}

	return 0;
}
Beispiel #7
0
JNIEXPORT void JNICALL Java_org_lwjgl_opengl_LinuxDisplay_nDestroyWindow(JNIEnv *env, jclass clazz, jlong display, jlong window_ptr) {
	Display *disp = (Display *)(intptr_t)display;
	Window window = (Window)window_ptr;
	destroyWindow(env, disp, window);
}
void* RobotVideo::runThread(void* arg)
{

	cvNamedWindow("Robot", CV_WINDOW_AUTOSIZE);
	moveWindow("Robot", 0, 0);
	// The buffer in which to receive the hello from the server
	//unsigned char buff[12];
	unsigned char imgBuff[480*640];

	//Mat imgRecieve(480,640,CV_8UC1);
	// The size of the string the server sent
	size_t strSize;

	// The socket object
	ArSocket sock;
	unsigned char *ptr;
	// Initialize Aria.  It is especially important to do
	// this on Windows, because it will initialize Window's
	// sockets system. 
	// Aria::init();

	// Connect to the server
	//ArLog::log(ArLog::Normal, "socketClientExample: Connecting to localhost TCP port 7777...");

	if (sock.connect(server_ip, ROBOT_VIDEO_PORT, ArSocket::TCP))
		ArLog::log(ArLog::Normal, "Connected to server at %s TCP port 11000.", server_ip);
	else
	{
		ArLog::log(ArLog::Terse, "Error connecting to server at  %s  TCP port 11000: %s",server_ip, sock.getErrorStr().c_str());
		//return(-1);
	}

	//---------------------------VideoWriter---------------------------------------


	robotVideo.open("robot.avi"  , CV_FOURCC('M','J','P','G') /* CV_FOURCC('P','I','M','1') */, 20/*inputVideo.get(CV_CAP_PROP_FPS)*/,Size(640, 480), false);

	if (!robotVideo.isOpened())
	{
		cout  << "Could not open the output video for write: " /*<< source*/ << endl;
	}


	while(1)
	{
		ptr = &imgBuff[0];
		strSize = 0;
		while (strSize < 640 * 480)
		{
			//ArLog::log(ArLog::Normal, "socketClientExample: String Size: \"%d\"", strSize);
			strSize += sock.read(ptr, 2);
			ptr += 2;
		}

		ptr=robot_img.datastart;
		//ArLog::log(ArLog::Normal, "socketClientExample: String Size: \"%d\"", strSize);

		mutex_robotVideo.lock();
		for (int i = 0,k = 0; i < robot_img.rows*robot_img.cols; i++) 
		{
			*(ptr++) = imgBuff[k++];
		}


		imshow("Robot", robot_img);
		robotVideo << robot_img;


		mutex_robotVideo.unlock();
		waitKey(1);/*0)==27) break;*/
	}
	sock.close();
	//outputVideo.release();
	// Now close the connection to the server

	ArLog::log(ArLog::Normal, "Socket to server closed.");
	destroyWindow("Robot");
	// Uninitialize Aria and exit
	//Aria::exit(0);
	return NULL;
}
Beispiel #9
0
void printDumpUpdatePanel( kdbgerUiProperty_t *pKdbgerUiProperty ) {

	s32 i, x, y, color;
	u8 valueBuf[ KDBGER_DUMP_VBUF_SZ + 1 ];
	u8 asciiBuf[ KDBGER_DUMP_ABUF_SZ + 1 ];
	u8 *vp = valueBuf, *ap = asciiBuf;
	u8 *dataPtr, *pDataPtr;
	kdbgerPciDev_t *pKdbgerPciDev;
	kdbgerPciConfig_t *pKdbgerPciConfig;
	s8 *mem = "Mem", *io = "I/O";

	switch( pKdbgerUiProperty->kdbgerHwFunc ) {

		case KHF_PCI:
			dataPtr = (u8 *)&pKdbgerUiProperty->pKdbgerCommPkt->kdbgerRspPciReadPkt.pciContent;
			break;

		case KHF_IO:
			dataPtr = (u8 *)&pKdbgerUiProperty->pKdbgerCommPkt->kdbgerRspIoReadPkt.ioContent;
			break;

		case KHF_IDE:
			dataPtr = (u8 *)&pKdbgerUiProperty->pKdbgerCommPkt->kdbgerRspIdeReadPkt.ideContent;
			break;

		case KHF_CMOS:
			dataPtr = (u8 *)&pKdbgerUiProperty->pKdbgerCommPkt->kdbgerRspCmosReadPkt.cmosContent;
			break;

		default:
		case KHF_MEM:
			dataPtr = (u8 *)&pKdbgerUiProperty->pKdbgerCommPkt->kdbgerRspMemReadPkt.memContent;
		break;
	}
	pDataPtr = dataPtr;

	// Terminate buffers
	valueBuf[ KDBGER_DUMP_VBUF_SZ ] = 0;
	asciiBuf[ KDBGER_DUMP_ABUF_SZ ] = 0;

	// Format data for value & ascii
	for( i = 0 ; i < KDBGER_DUMP_BYTE_PER_LINE ; i++ ) {
		 
		vp += sprintf( (s8 *)vp,
		"%2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X %2.2X",
		dataPtr[ 0 ], dataPtr[ 1 ], dataPtr[ 2 ], dataPtr[ 3 ], dataPtr[ 4 ], dataPtr[ 5 ], dataPtr[ 6 ],
		dataPtr[ 7 ], dataPtr[ 8 ], dataPtr[ 9 ], dataPtr[ 10 ], dataPtr[ 11 ], dataPtr[ 12 ], dataPtr[ 13 ],
		dataPtr[ 14 ], dataPtr[ 15 ] );

		if( pKdbgerUiProperty->kdbgerHwFunc != KHF_PCI )
			ap += sprintf( (s8 *)ap,
			"%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c",
			KDBGER_DUMP_ASCII_FILTER( dataPtr[ 0 ] ),
			KDBGER_DUMP_ASCII_FILTER( dataPtr[ 1 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 2 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 3 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 4 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 5 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 6 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 7 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 8 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 9 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 10 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 11 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 12 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 13 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 14 ] ),
        	KDBGER_DUMP_ASCII_FILTER( dataPtr[ 15 ] ) );

		// Move to next line
		dataPtr += KDBGER_DUMP_BYTE_PER_LINE;
	}

	if( pKdbgerUiProperty->kdbgerHwFunc == KHF_PCI ) {

		pKdbgerPciConfig = (kdbgerPciConfig_t *)pDataPtr;
		sprintf( (s8 *)ap,
			"VEN ID: %4.4Xh\n"
			"DEV ID: %4.4Xh\n\n"
			"Rev ID  : %2.2Xh\n"
			"Int Line: %2.2Xh\n"
			"Int Pin : %2.2Xh\n\n"
			"%s: %8.8Xh\n"
			"%s: %8.8Xh\n"
			"%s: %8.8Xh\n"
			"%s: %8.8Xh\n"
			"%s: %8.8Xh\n"
			"%s: %8.8Xh\n\n"
			"ROM: %8.8Xh\n",
			pKdbgerPciConfig->vendorId,
			pKdbgerPciConfig->deviceId,
			pKdbgerPciConfig->revisionId,
			pKdbgerPciConfig->intLine,
			pKdbgerPciConfig->intPin,

			(pKdbgerPciConfig->baseAddrReg0 & KDBGER_PCIBAR_IO) ? io : mem,
			(pKdbgerPciConfig->baseAddrReg0 & KDBGER_PCIBAR_IO) ?
				(pKdbgerPciConfig->baseAddrReg0 & KDBGER_PCIBAR_IOBA_MASK) :
				(pKdbgerPciConfig->baseAddrReg0 & KDBGER_PCIBAR_MEMBA_MASK),

			(pKdbgerPciConfig->baseAddrReg1 & KDBGER_PCIBAR_IO) ? io : mem,
            (pKdbgerPciConfig->baseAddrReg1 & KDBGER_PCIBAR_IO) ?
                (pKdbgerPciConfig->baseAddrReg1 & KDBGER_PCIBAR_IOBA_MASK) :
                (pKdbgerPciConfig->baseAddrReg1 & KDBGER_PCIBAR_MEMBA_MASK),

			(pKdbgerPciConfig->baseAddrReg2 & KDBGER_PCIBAR_IO) ? io : mem,
            (pKdbgerPciConfig->baseAddrReg2 & KDBGER_PCIBAR_IO) ?
                (pKdbgerPciConfig->baseAddrReg2 & KDBGER_PCIBAR_IOBA_MASK) :
                (pKdbgerPciConfig->baseAddrReg2 & KDBGER_PCIBAR_MEMBA_MASK),

			(pKdbgerPciConfig->baseAddrReg3 & KDBGER_PCIBAR_IO) ? io : mem,
            (pKdbgerPciConfig->baseAddrReg3 & KDBGER_PCIBAR_IO) ?
                (pKdbgerPciConfig->baseAddrReg3 & KDBGER_PCIBAR_IOBA_MASK) :
                (pKdbgerPciConfig->baseAddrReg3 & KDBGER_PCIBAR_MEMBA_MASK),

			(pKdbgerPciConfig->baseAddrReg4 & KDBGER_PCIBAR_IO) ? io : mem,
            (pKdbgerPciConfig->baseAddrReg4 & KDBGER_PCIBAR_IO) ?
                (pKdbgerPciConfig->baseAddrReg4 & KDBGER_PCIBAR_IOBA_MASK) :
                (pKdbgerPciConfig->baseAddrReg4 & KDBGER_PCIBAR_MEMBA_MASK),

			(pKdbgerPciConfig->baseAddrReg5 & KDBGER_PCIBAR_IO) ? io : mem,
            (pKdbgerPciConfig->baseAddrReg5 & KDBGER_PCIBAR_IO) ?
                (pKdbgerPciConfig->baseAddrReg5 & KDBGER_PCIBAR_IOBA_MASK) :
                (pKdbgerPciConfig->baseAddrReg5 & KDBGER_PCIBAR_MEMBA_MASK),

			pKdbgerPciConfig->expRomBaseAddr );
	}

	// Print value
	printWindowAt(
		pKdbgerUiProperty->kdbgerDumpPanel,
		value, 
		KDBGER_DUMP_BYTE_PER_LINE,
		KDBGER_DUMP_BUF_PER_LINE,
		KDBGER_DUMP_VALUE_LINE,
		KDBGER_DUMP_VALUE_COLUMN,
		WHITE_BLUE,
		"%s",
		valueBuf );

	// Print ASCII
	printWindowAt(
		pKdbgerUiProperty->kdbgerDumpPanel,
		ascii,
		KDBGER_DUMP_BYTE_PER_LINE,
		KDBGER_DUMP_BYTE_PER_LINE,
		KDBGER_DUMP_ASCII_LINE,
		KDBGER_DUMP_ASCII_COLUMN,
		WHITE_BLUE,
		"%s",
		asciiBuf );

	// Print Offset bar
	printWindowAt(
		pKdbgerUiProperty->kdbgerDumpPanel,
		offset, 
		KDBGER_STRING_NLINE,
		4,
		KDBGER_DUMP_OFF_LINE,
		KDBGER_DUMP_OFF_COLUMN,
		YELLOW_BLUE,
		"%4.4X",
		pKdbgerUiProperty->kdbgerDumpPanel.byteOffset );

	// Print base address & First/Second title
	switch( pKdbgerUiProperty->kdbgerHwFunc ) {

		default:
		case KHF_MEM:

			// Base address
			printWindowAt(
				pKdbgerUiProperty->kdbgerDumpPanel,
				baseaddr, 
				KDBGER_STRING_NLINE,
				20,
				KDBGER_DUMP_BASEADDR_LINE,
				strlen( pKdbgerUiProperty->kdbgerDumpPanel.infoStr ),
				WHITE_BLUE,
				KDBGER_INFO_MEMORY_BASE_FMT,
				(u32)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase >> 32),
				(u32)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase & 0xFFFFFFFFULL) );
			break;

		case KHF_IO:

			// Base address
			printWindowAt(
				pKdbgerUiProperty->kdbgerDumpPanel,
				baseaddr, 
				KDBGER_STRING_NLINE,
				5,
				KDBGER_DUMP_BASEADDR_LINE,
				strlen( pKdbgerUiProperty->kdbgerDumpPanel.infoStr ),
				WHITE_BLUE,
				KDBGER_INFO_IO_BASE_FMT,
				(u32)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase & 0x0000FFFFULL) );
			break;

		case KHF_PCI:

			// Base address
			pKdbgerPciDev = getPciDevice( pKdbgerUiProperty, 
							pKdbgerUiProperty->kdbgerDumpPanel.byteBase );
			if( pKdbgerPciDev )
				printWindowAt(
					pKdbgerUiProperty->kdbgerDumpPanel,
					baseaddr, 
					KDBGER_STRING_NLINE,
					29,
					KDBGER_DUMP_BASEADDR_LINE,
					strlen( pKdbgerUiProperty->kdbgerDumpPanel.infoStr ),
					WHITE_BLUE,
					KDBGER_INFO_PCI_BASE_FMT,
					pKdbgerPciDev->bus, pKdbgerPciDev->dev, pKdbgerPciDev->fun );

			// PCI first/second title
			printWindowMove(
				pKdbgerUiProperty->kdbgerDumpPanel,
				ftitle, 
				KDBGER_STRING_NLINE,
				KDBGER_MAX_PCINAME,
				KDBGER_DUMP_FTITLE_LINE,
				KDBGER_DUMP_FTITLE_COLUMN,
				WHITE_BLUE,
				"%s: %s",
				KDBGER_FTITLE_PCI,
				(pKdbgerUiProperty->pKdbgerPciIds + pKdbgerUiProperty->kdbgerDumpPanel.byteBase)->venTxt );

			printWindowMove(
				pKdbgerUiProperty->kdbgerDumpPanel,
				stitle, 
				KDBGER_STRING_NLINE,
				KDBGER_MAX_PCINAME,
				KDBGER_DUMP_STITLE_LINE,
				KDBGER_DUMP_FTITLE_COLUMN,
				WHITE_BLUE,
				"%s: %s",
				KDBGER_STITLE_PCI,
				(pKdbgerUiProperty->pKdbgerPciIds + pKdbgerUiProperty->kdbgerDumpPanel.byteBase)->devTxt );
			break;

		case KHF_PCIL:
			break;

		case KHF_IDE:

			// Base address
			printWindowAt(
				pKdbgerUiProperty->kdbgerDumpPanel,
				baseaddr, 
				KDBGER_STRING_NLINE,
				20,
				KDBGER_DUMP_BASEADDR_LINE,
				strlen( pKdbgerUiProperty->kdbgerDumpPanel.infoStr ),
				WHITE_BLUE,
				KDBGER_INFO_IDE_BASE_FMT,
				(u32)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase >> 32),
				(u32)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase & 0xFFFFFFFFULL) );
			break;

		case KHF_CMOS:

			// Base address
			printWindowAt(
				pKdbgerUiProperty->kdbgerDumpPanel,
				baseaddr, 
				KDBGER_STRING_NLINE,
				20,
				KDBGER_DUMP_BASEADDR_LINE,
				strlen( pKdbgerUiProperty->kdbgerDumpPanel.infoStr ),
				WHITE_BLUE,
				KDBGER_INFO_CMOS_BASE_FMT,
				(u8)(pKdbgerUiProperty->kdbgerDumpPanel.byteBase & 0xFFULL) );
			break;
	}


	// Highlight & Editing
	y = (pKdbgerUiProperty->kdbgerDumpPanel.byteOffset / KDBGER_DUMP_BYTE_PER_LINE) + KDBGER_DUMP_VALUE_LINE;
	x = ((pKdbgerUiProperty->kdbgerDumpPanel.byteOffset % KDBGER_DUMP_BYTE_PER_LINE) * 3) + KDBGER_DUMP_VALUE_COLUMN;
	if( pKdbgerUiProperty->kdbgerDumpPanel.toggleEditing ) {

		color = (editorColorCount++ % 2) ? YELLOW_RED : YELLOW_BLACK;

		printWindowMove(
			pKdbgerUiProperty->kdbgerDumpPanel,
			highlight,
			KDBGER_STRING_NLINE,
			KDBGER_DUMP_HL_DIGITS,
			y,
			x,
			color,
			"%2.2X",
			pKdbgerUiProperty->kdbgerDumpPanel.editingBuf );
		}
	else {

		printWindowMove(
			pKdbgerUiProperty->kdbgerDumpPanel,
			highlight,
			KDBGER_STRING_NLINE,
			KDBGER_DUMP_HL_DIGITS,
			y,
			x,
			YELLOW_RED,
			"%2.2X",
			*(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset) );

		pKdbgerUiProperty->kdbgerDumpPanel.editingBuf = *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset);
	}


	// Bits
	if( pKdbgerUiProperty->kdbgerDumpPanel.toggleBits ) {

		printWindowMove(
			pKdbgerUiProperty->kdbgerDumpPanel,
			bits,
			KDBGER_STRING_NLINE,
			KDBGER_DUMP_BITS_DIGITS,
			y + 1,
			x,
			WHITE_RED,
			"%d%d%d%d_%d%d%d%d",
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 7 ),			
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 6 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 5 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 4 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 3 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 2 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 1 ),
			OLUX_GET_BIT( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset), 0 ) );
	}
	else
		destroyWindow( pKdbgerUiProperty->kdbgerDumpPanel, bits );


	// Print ASCII highlight
	if( pKdbgerUiProperty->kdbgerHwFunc != KHF_PCI ) {
    	y = (pKdbgerUiProperty->kdbgerDumpPanel.byteOffset / KDBGER_DUMP_BYTE_PER_LINE) + KDBGER_DUMP_ASCII_LINE;
    	x = (pKdbgerUiProperty->kdbgerDumpPanel.byteOffset % KDBGER_DUMP_BYTE_PER_LINE) + KDBGER_DUMP_ASCII_COLUMN;
		printWindowMove(
			pKdbgerUiProperty->kdbgerDumpPanel,
			hlascii,
			KDBGER_STRING_NLINE,
			KDBGER_DUMP_HLA_DIGITS,
			y,
			x,
			YELLOW_RED,
			"%c",
			KDBGER_DUMP_ASCII_FILTER( *(pDataPtr + pKdbgerUiProperty->kdbgerDumpPanel.byteOffset) ) );
	}
}
	ApplicationWindow_WGL::~ApplicationWindow_WGL(void)
	{
		destroyWindow();
	}
	bool ApplicationWindow_WGL::createWindow( std::string title, Vector2 position, Vector2 size, bool fullscreen, const ParameterMap *parameters)
	{
		_renderer = Renderer::getSingletonPtr();

		HWND		parentWnd = nullptr;
		int			bpp = 32;
		int			PixelFormat;			// Holds The Results After Searching For A Match
		WNDCLASS	wc;						// Windows Class Structure
		DWORD		dwExStyle;				// Window Extended Style
		DWORD		dwStyle;				// Window Style

		dwExStyle = WS_EX_APPWINDOW | WS_EX_WINDOWEDGE;
		dwStyle = WS_OVERLAPPEDWINDOW;

		if(parameters != nullptr)
		{
			ParameterMap::const_iterator it;
			ParameterMap::const_iterator itEnd = parameters->end();
			if ( (it = parameters->find("parent_window")) != itEnd )
			{
				parentWnd = (HWND) atoi(it->second.c_str());
				dwStyle = WS_CHILD;
			}
		}

		Vector2 sceneSize = _renderer->_sceneViewMax - _renderer->_sceneViewMin;
		_aspectRatio = sceneSize.x / sceneSize.y;

		_fullscreen = fullscreen;			

		_hInstance			= GetModuleHandle(NULL);				// Grab An Instance For Our Window

		wc.style			= CS_HREDRAW | CS_VREDRAW | CS_OWNDC;	// Redraw On Size, And Own DC For Window.
		wc.lpfnWndProc		= &ApplicationWindow_WGL::initialWndProc;// WndProc Handles Messages
		wc.cbClsExtra		= 0;									// No Extra Window Data
		wc.cbWndExtra		= 0;									// No Extra Window Data
		wc.hInstance		= _hInstance;							// Set The Instance
		wc.hIcon			= LoadIcon(NULL, IDI_WINLOGO);			// Load The Default Icon
		wc.hCursor			= LoadCursor(NULL, IDC_ARROW);			// Load The Arrow Pointer
		wc.hbrBackground	= NULL;									// No Background Required For GL
		wc.lpszMenuName		= NULL;									// We Don't Want A Menu
		wc.lpszClassName	= SKETCHYDYNAMICS_WINDOW_CLASS_NAME;			// Set The Class Name

		if (!RegisterClass(&wc))									// Attempt To Register The Window Class
		{
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Failed To Register The Window Class");
			return FALSE;											// Return FALSE
		}

		if (fullscreen)												// Attempt Fullscreen Mode?
		{
			DEVMODE dmScreenSettings;								// Device Mode
			memset(&dmScreenSettings,0,sizeof(dmScreenSettings));	// Makes Sure Memory's Cleared
			dmScreenSettings.dmSize=sizeof(dmScreenSettings);		// Size Of The Devmode Structure
			dmScreenSettings.dmPelsWidth	= (long)size.x;			// Selected Screen Width
			dmScreenSettings.dmPelsHeight	= (long)size.y;			// Selected Screen Height
			dmScreenSettings.dmBitsPerPel	= bpp;					// Selected Bits Per Pixel
			dmScreenSettings.dmFields=DM_BITSPERPEL|DM_PELSWIDTH|DM_PELSHEIGHT;

			// Try To Set Selected Mode And Get Results.  NOTE: CDS_FULLSCREEN Gets Rid Of Start Bar.
			if (ChangeDisplaySettings(&dmScreenSettings,CDS_FULLSCREEN)!=DISP_CHANGE_SUCCESSFUL)
			{
				// If The Mode Fails, Use Windowed Mode.			
				fullscreen=FALSE;
				Logger::getSingletonPtr()->writeWarning("Fullscreen mode not supported");			
			}
		}

		if (fullscreen)												// Are We Still In Fullscreen Mode?
		{
			dwExStyle=WS_EX_APPWINDOW;								// Window Extended Style
			dwStyle=WS_POPUP;										// Windows Style
			//ShowCursor(FALSE);										// Hide Mouse Pointer
		}		

		RECT		WindowRect;				
		WindowRect.left	= (long)position.x;
		WindowRect.right = (long)(position.x + size.x);
		WindowRect.top = (long)position.y;
		WindowRect.bottom = (long)(position.y + size.y);

		AdjustWindowRectEx(&WindowRect, dwStyle, FALSE, dwExStyle);		// Adjust Window To True Requested Size

		// Create The Window
		if (!(_hWnd=CreateWindowEx(	dwExStyle,							// Extended Style For The Window
			SKETCHYDYNAMICS_WINDOW_CLASS_NAME,		// Class Name
			title.c_str(),						// Window Title
			dwStyle |							// Defined Window Style
			WS_CLIPSIBLINGS |					// Required Window Style
			WS_CLIPCHILDREN,					// Required Window Style
			(int)position.x, (int)position.y,	// Window Position
			WindowRect.right-WindowRect.left,	// Calculate Window Width
			WindowRect.bottom-WindowRect.top,	// Calculate Window Height
			parentWnd,							// Parent Window
			NULL,								// No Menu
			_hInstance,							// Instance
			this)))								// Pass To WM_CREATE
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Window Creation Error");
			return FALSE;								// Return FALSE
		}

		static	PIXELFORMATDESCRIPTOR pfd=				// pfd Tells Windows How We Want Things To Be
		{
			sizeof(PIXELFORMATDESCRIPTOR),				// Size Of This Pixel Format Descriptor
			1,											// Version Number
			PFD_DRAW_TO_WINDOW |						// Format Must Support Window
			PFD_SUPPORT_OPENGL |						// Format Must Support OpenGL
			PFD_DOUBLEBUFFER,							// Must Support Double Buffering
			PFD_TYPE_RGBA,								// Request An RGBA Format
			bpp,										// Select Our Color Depth
			0, 0, 0, 0, 0, 0,							// Color Bits Ignored
			0,											// No Alpha Buffer
			0,											// Shift Bit Ignored
			0,											// No Accumulation Buffer
			0, 0, 0, 0,									// Accumulation Bits Ignored
			16,											// 16Bit Z-Buffer (Depth Buffer)  
			0,											// No Stencil Buffer
			0,											// No Auxiliary Buffer
			PFD_MAIN_PLANE,								// Main Drawing Layer
			0,											// Reserved
			0, 0, 0										// Layer Masks Ignored
		};

		if (!(_hDC=GetDC(_hWnd)))							// Did We Get A Device Context?
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Can't Create A GL Device Context");
			return FALSE;								// Return FALSE
		}

		if (!(PixelFormat=ChoosePixelFormat(_hDC,&pfd)))	// Did Windows Find A Matching Pixel Format?
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Can't Find A Suitable PixelFormat");
			return FALSE;								// Return FALSE
		}

		if(!SetPixelFormat(_hDC,PixelFormat,&pfd))		// Are We Able To Set The Pixel Format?
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Can't Set The PixelFormat");
			return FALSE;								// Return FALSE
		}

		if (!(_hRC=wglCreateContext(_hDC)))				// Are We Able To Get A Rendering Context?
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Can't Create A GL Rendering Context");
			return FALSE;								// Return FALSE
		}

		if(!wglMakeCurrent(_hDC,_hRC))					// Try To Activate The Rendering Context
		{
			destroyWindow();								// Reset The Display
			Logger::getSingletonPtr()->writeError("{ApplicationWindow_WGL}Can't Activate The GL Rendering Context");
			return FALSE;								// Return FALSE
		}
				
		ShowWindow(_hWnd, SW_SHOW);						// Show The Window
		SetForegroundWindow(_hWnd);						// Slightly Higher Priority
		SetFocus(_hWnd);								// Sets Keyboard Focus To The Window
		resizeGLScene((int)size.x, (int)size.y);					// Set Up Our Perspective GL Screen

		if(!_renderer->initGL())
		{
			SKETCHYDYNAMICS_LOG_ERROR("OpenGL init failed");
			destroyWindow();
			return FALSE;
		}

		setVSync(false);

		return TRUE;									// Success
	}
void FrameAnalyser::analyseObjects(string filename)
{

    ausgabe.open(filename.c_str(), ios::out);

    int frames = 0;
    double zeit = 0;
    char c;
    int time = 1;


    ColoredObjectDetector g(DObject::YellowBuoy);
    ColoredObjectDetector r(DObject::RedBuoy,time);
    namedWindow("Ausgabe",CV_WINDOW_FREERATIO);

    int d[8];
    for (int i = 0; i < 8; i++)
        d[i] = 0;

    for (int var = myStack->size()-1; var > 0; var-=time) {

        clock_t start, end;
        Frame f = myStack->getFrame(var);

        start = clock();
        g.getObjects(f);
        r.getObjects(f);
        end = clock();

        Mat im = f.getImage();

        zeit += end - start;

        imshow("Ausgabe",im);

        cout << endl << "1: boje zu sehen." << endl;
        cout << "2: boje erkannt" << endl;
        cout << "3: boje sicher erkannt" << endl;
        cout << "4: falsche boje erkannt" << endl << endl;

        char c = 0;
        while (c != 32) {
            c = waitKey(0);
            int k = ((int) c) - 48;
            switch(k){
            case 1:
                d[0] = d[0] +1;
                break;
            case 2:
                d[0] = d[0] +1;
                d[1] = d[1] +1;
                break;
            case 3:
                d[0] = d[0] +1;
                d[1] = d[1] +1;
                d[2] = d[2] +1;
                break;
            case 4:
                d[3] = d[3] +1;
            }

//           cout << k << ": " << d[k-1] << "\t";
        }

        cout << endl;

        frames++;

        zeit += (end-start);

        ausgabe << d[0] << "\t";
        ausgabe << d[1] << "\t";
        ausgabe << d[2] << "\t";
        ausgabe << d[3] << "\t";

        ausgabe << (end-start) << "\t";
        ausgabe << CLOCKS_PER_SEC << endl;
    }


    destroyWindow("Ausgabe");

    ausgabe.close();

    cout << "Frames: " << frames << endl;
    cout << "Zu sehen: " << d[0]  << endl;
    cout << "erkannt: " << d[1]  << endl;
    cout << "sicher erkannt: " << d[2]  << endl;
    cout << "falsch erkannt: " << d[3]  << endl;
    cout << "Clocks per second: " << zeit/frames << endl;
    cout << "Millisekunden: " << zeit/frames/CLOCKS_PER_SEC*1000 << endl;

}
void FrameAnalyser::analyseHorizont(string filename, string label, bool print)
{

    HorizontDetector hDetector(1, 180, 100);

    vector<Horizont> horizontVector;
    fs.open(label, FileStorage::READ);
    if (!fs.isOpened())
        cout << "Ahhhhhhhhhh! Kann datei nicht öffnen: " << label << endl;
    else
        fs["HorizontVector"] >> horizontVector;

    ausgabe.open(filename.c_str(), ios::out);

    int index = 0;

    int frames = 0;
    int moeglich = 0;
    int richtig = 0;
    double abweichungr = 0;
    double abweichungf = 0;
    int entfernung = 0;
    int varianz = 0;
    double zeit = 0;


    for (int var = myStack->size()-1; var > 0; var--, index++) {

        clock_t start, end;
        start = clock();
        if(var < myStack->size()-1){
            Horizont a = myStack->getFrame(var+1).getHorizont();
            hDetector.getStableHorizont(myStack->getFrame(var),a);

            myStack->getFrame(var).calcFlow(myStack->getFrame(var+1));
        }else{
            hDetector.getStableHorizont(myStack->getFrame(var));
        }
        end = clock();

        Horizont b = myStack->getFrame(var).getHorizont();
        Horizont c = horizontVector[index];

        frames++;
        if(!c.empty()){
            moeglich++;
            if(b.dAngle(c) <=2 && b.dPoint(c) <= 100){
                richtig++;
                abweichungr += b.dAngle(c);
            }else
                abweichungf += b.dAngle(c);
            varianz += b.compareHorizont(c);
            entfernung += b.dPoint(c);
            zeit += (end-start);///CLOCKS_PER_SEC;
        }


        ausgabe << b.dAngle(c) << "\t";
        ausgabe << b.dPoint(c) << "\t";
        ausgabe << b.dCenter() << "\t";
        ausgabe << b.compareHorizont(c) << "\t";
        ausgabe << b.angle << "\t";
        ausgabe << c.angle << "\t";
        ausgabe << (end - start) << "\t";
        ausgabe << myStack->getFrame(var).getMean().x << "\t";
        ausgabe << myStack->getFrame(var).getMean().y << "\t";
        if(myStack->getFrame(var).hasLog()){
            ausgabe << myStack->getFrame(var).getLog().kompass << "\t";
            ausgabe << myStack->getFrame(var).getLog().kraengung << "\t";
            ausgabe << myStack->getFrame(var).getLog().accY << "\t";
            ausgabe << myStack->getFrame(var).getLog().gyroY;
        }
        ausgabe << endl;

        if(print){
            Mat im = myStack->getFrame(var).getFrame();
            line(im, b.p1, b.p2, Scalar(0, 0, 255), 1, 8);
            line(im, c.p1, c.p2, Scalar(0, 255, 0), 1, 8);

            ostringstream oss;
            oss << b.dAngle(c) << " - " << b.dPoint(c ) << " - " << b.compareHorizont(c) << " - " << b.distance(c);
            string msg = oss.str();
            int baseLine = 0;
            Size textSize = getTextSize(msg, 1, 1, 1, &baseLine);
            Point textOrigin(im.cols - 2*textSize.width - 10, im.rows - 2*baseLine - 10);
            putText( im, msg, textOrigin, 1, 1, Scalar(255,255,255));

            imshow("Ausgabe",im);
            waitKey(0);
        }

    }
    if (print) {
        destroyWindow("Ausgabe");
    }
    ausgabe.close();

    double prozent = (double)(100/frames) * richtig;

    cout << "Frames: " << frames << endl;
    cout << "Möglich: " << moeglich << endl;
    cout << "Erkannt: " << richtig << " , Prozent: " << prozent << endl;
    cout << "Abweichung R: " << abweichungr/moeglich << endl;
    cout << "Abweichung F: " << abweichungf/moeglich << endl;
    cout << "Entfernung: " << entfernung/moeglich << endl;
    cout << "Varianz: " << varianz/moeglich << endl;
    cout << "Clocks per second: " << zeit/moeglich << endl;
    cout << "Millisekunden: " << zeit/moeglich/CLOCKS_PER_SEC*1000 << endl;
}
void ColorImageProcessor::done()
{
	if(processedImage)
		delete processedImage;
	destroyWindow("Settings");
}
Beispiel #15
0
/*************************************************************************
	Destroy all Window objects
*************************************************************************/
void WindowManager::destroyAllWindows(void)
{
	while (!d_windowRegistry.empty())
		destroyWindow(*d_windowRegistry.begin());
}
Beispiel #16
0
int main(int argc, char **argv){

  int opcion;	//Opcion para el getopt
  int vflag=0, rflag=0, nflag=0, glfag=0, iflag=0, mflag=0, oflag=0;  //Flags para el getopt
  float r=0.5, g=1.0;
  int n=2;
  string nombreImagen;
  string nombreMascara;
  string nombreSalida = "output.png";
  Mat imagen, padded, complexImg, filter, filterAux, imagenSalida, filterSalida, imagenFrecuencias, imagenFrecuenciasSinOrden, imagenHSV;
  Mat complexAux;
  Mat salida;
  Mat imagenPasoBaja;
  Mat mascara;
  vector<Mat> canales;

  while((opcion=getopt(argc, argv, "vr:n:g:i:o:m:")) !=-1 ){

    switch(opcion){

      case 'v':
        vflag=1;
      break;

      case 'r':
        rflag=1;
        r=atof(optarg);
        if(r<0 || r>1){
          cout << "Valor de 'r' introducido invalido" << endl;
          exit(-1);
        }
      break;

      case 'n':
        nflag=1;
        n = atoi(optarg);
        if(n<0 || n>10){
          cout << "Valor de 'n' introducido invalido" << endl;
          exit(-1);
        }
      break;

      case 'g':
        glfag=1;
        g = atof(optarg);
        if(g<0.0 || g>5.0){
          cout << "Valor de 'g' introducido invalido" << endl;
          exit(-1);
        }
      break;

      case 'i':
        iflag=1;
        nombreImagen = optarg;
      break;

      case 'm':
        mflag=1;
        nombreMascara=optarg;
      break;

      case 'o':
        oflag=1;
        nombreSalida=optarg;
      break;
    	
    	
    	case '?':
     	   //Algo ha ido mal
     	   help();
     	   exit(-1);
        break;

    	default:
    		help();
     		exit(-1);
        break;
      }

   }

   //Primero cargaremos la imagen



   if(iflag==1){
    imagen = imread(nombreImagen, CV_LOAD_IMAGE_ANYDEPTH);
    if(imagen.empty()){
      cout << "Imagen especificada invalida" << endl;
      exit(-1);
    }else{
      cout << "Imagen cargada con exito" << endl;
      if(vflag==1){
        namedWindow("Imagen", CV_WINDOW_AUTOSIZE);
        imshow("Imagen", imagen);
        waitKey(0);
        destroyWindow("Imagen");
      }
    }
  }else{
    cout << "La imagen es necesaria" << endl;
    exit(-1);
   }

   //Calculamos r
   r=(r)*(sqrt(pow((imagen.rows),2.0)+pow((imagen.cols),2.0))/2);

   int M = getOptimalDFTSize(imagen.rows);
   int N = getOptimalDFTSize(imagen.cols);


   //Miramos si tiene mascara para cargarla
  if(mflag==1){
    //Cargamos la mascara
    mascara = imread(nombreMascara, 0);
    if(mascara.empty()){
      cout << "Mascara especificada invalida" << endl;
      exit(-1);
    }else{
	cout << "Mascara cargada con exito" << endl;
	   }
  }


   //Ahora miramos los canales para hacer cosas distintas dependiendo

   if(imagen.channels()==1){
    //Imagen monocromatica

    imagen.convertTo(imagenPasoBaja,CV_32F, 1.0/255.0);
    copyMakeBorder(imagenPasoBaja, padded, 0, M-imagenPasoBaja.rows, 0, N - imagenPasoBaja.cols, BORDER_CONSTANT, Scalar::all(0));
    Mat planes[] = {Mat_<float>(padded), Mat::zeros(padded.size(), CV_32F)};
    merge(planes, 2, complexImg);

    dft(complexImg, complexImg);
    filter = complexImg.clone();
    filterAux = complexImg.clone();
    complexAux = complexImg.clone();
    shiftDFT(complexImg);
    shiftDFT(complexAux);

    butterworth(filter, r, n);
    butterworth(filterAux, r, 0);
    mulSpectrums(complexImg, filter, complexImg, 0);
    mulSpectrums(complexAux, filterAux, complexAux, 0);
    shiftDFT(complexImg);
    shiftDFT(complexAux);

    //Falta hacer lo de poder mostrarla
    imagenFrecuencias = create_spectrum(complexImg);
    imagenFrecuenciasSinOrden = create_spectrum(complexAux);

    //Hacemos la inversa
    idft(complexImg, complexImg, DFT_SCALE);
    split(complexImg, planes);
    normalize(planes[0], imagenSalida, 0, 1, CV_MINMAX);
    split(filter, planes);
    normalize(planes[0], filterSalida, 0, 1, CV_MINMAX);

   salida = imagenPasoBaja.clone();
    if(mflag==1){
      //Con mascara procesaremos pixel por pixel
      //Recorremos la imagen
      for(int i=0; i<imagen.rows; i++){
        for(int j=0; j<imagen.cols;j++){
          if(mascara.at<uchar>(i,j)!=0){
            salida.at<float>(i,j) = (g+1)*(imagenPasoBaja.at<float>(i,j)) - (g*imagenSalida.at<float>(i,j));
          }
        }
      }
    }else{
      //Sin mascara lo haremos de forma inmediata
      for(int i=0; i<imagen.rows; i++){
        for(int j=0; j<imagen.cols;j++){
            salida.at<float>(i,j) = ((g+1)*imagenPasoBaja.at<float>(i,j)) - (g*imagenSalida.at<float>(i,j));
        }
      }
    }

    salida.convertTo(salida, CV_8U, 255.0, 0.0);

    if(vflag==1){
      imshow("Imagen final", salida);
      imshow("Filtro Butterworth", filterSalida);
      imshow("Espectro", imagenFrecuencias);
      imshow("Espectro de imagen sin orden", imagenFrecuenciasSinOrden);
      waitKey(0);
    }

   }else{
    //Spliteamos la imagen en canales
    cvtColor(imagen, imagenHSV, CV_BGR2HSV);
    split(imagenHSV, canales);
    Mat temporal;
    canales[2].convertTo(imagenPasoBaja, CV_32F, 1.0/255.0);
    copyMakeBorder(imagenPasoBaja, padded, 0, M-imagenPasoBaja.rows, 0, N - imagenPasoBaja.cols, BORDER_CONSTANT, Scalar::all(0));
    Mat planes[] = {Mat_<float>(padded), Mat::zeros(padded.size(), CV_32F)};
    merge(planes, 2, complexImg);

    dft(complexImg, complexImg);

    filter = complexImg.clone();

    shiftDFT(complexImg);

    butterworth(filter, r, n);
    mulSpectrums(complexImg, filter, complexImg, 0);
    shiftDFT(complexImg);

    //Falta hacer lo de poder mostrarla
    imagenFrecuencias = create_spectrum(complexImg);

    //Hacemos la inversa
    idft(complexImg, complexImg, DFT_SCALE);
    split(complexImg, planes);
    normalize(planes[0], imagenSalida, 0, 1, CV_MINMAX);
    split(filter, planes);
    normalize(planes[0], filterSalida, 0, 1, CV_MINMAX);

    Mat salida = imagen.clone();
    canales[2] = imagenPasoBaja.clone();
    if(mflag==1){
      //Con mascara
      for(int i=0; i<canales[2].rows; i++){
        for(int j=0; j<canales[2].cols;j++){
          if(mascara.at<uchar>(i,j)!=0){
            canales[2].at<float>(i,j) = ((g+1)*imagenPasoBaja.at<float>(i,j)) - (g*imagenSalida.at<float>(i,j));
          }
        }
      }
    }else{
      //Sin mascara
      for(int i=0; i<canales[2].rows; i++){
        for(int j=0; j<canales[2].cols;j++){
            canales[2].at<float>(i,j) = ((g+1)*imagenPasoBaja.at<float>(i,j)) - (g*imagenSalida.at<float>(i,j));
        }
      }
    }

    canales[2].convertTo(canales[2], CV_8U, 255.0, 0.0);
    merge(canales, salida);
    cvtColor(salida, salida, CV_HSV2BGR);

    salida.convertTo(salida, CV_8U, 255.0, 0.0);

    if(vflag==1){
      imshow("Imagen final", salida);
      imshow("Filtro Butterworth", filterSalida);
      imshow("Espectro", imagenFrecuencias);
      imshow("Espectro de imagen sin orden", imagenFrecuenciasSinOrden);
      waitKey(0);
    }


   }
   //Y escribimos la imagen a fichero
   imwrite(nombreSalida, salida);

return 0;

}
void FindLargest_ProjectionVoxel(int ImageNum, vector<OctVoxel>& Octree, vector<cv::Mat>& Silhouette, Cpixel*** vertexII, CMatrix* ART){

	int thresh = 70;
	int max_thresh = 210;
	RNG rng(12345);

	Mat src_gray;
	Mat drawing;

	double scale(0.7);
	Size ssize;
	CVector M(4);		//Homogeneous coordinates of the vertices(x,y,z,1) world coordinate
	CVector m(4);		//That the image coordinates (normalized) expressed in homogeneous coordinates
	M[3] = 1.0;
	//8 vertices world coordinates of the voxel (x,y,z)
	CVector3d vertexW[8];

	ofstream fout("larget_boundingbox_contour.txt");

	int Boundingbox_line[12][2] = { { 0, 1 }, { 1, 2 }, { 2, 3 }, { 3, 0 },
	{ 0, 4 }, { 1, 5 }, { 2, 6 }, { 3, 7 },
	{ 4, 5 }, { 5, 6 }, { 6, 7 }, { 7, 4 } };

	//---------------------------------------------------------------	
	for (auto h(0); h < ImageNum; h++){
		//src_gray = Silhouette[h];         	
		Silhouette[h].copyTo(src_gray);
		cout << "Silhouette_" << h << endl;

		for (auto j(0); j < Octree.size(); j++){

			Octree[j].SetVertexWorld_Rotated(vertexW);
			for (int k = 0; k < 8; ++k){	//8 vertices of the voxel
				M[0] = vertexW[k].x;
				M[1] = vertexW[k].y;
				M[2] = vertexW[k].z;
				m = ART[h] * M;
				vertexII[h][j][k].setPixel_u_v((int)(m[0] / m[2]), (int)(m[1] / m[2]));  // normalize
			}

			//-------------------------------------- bounding box ------------------------
			for (auto k(0); k < 12; k++){
				//Draw 12 lines of the voxel in img.
				Start_point.x = vertexII[h][j][Boundingbox_line[k][0]].getPixel_u();
				Start_point.y = vertexII[h][j][Boundingbox_line[k][0]].getPixel_v();
				PointStart.push_back(Start_point);
				End_point.x = vertexII[h][j][Boundingbox_line[k][1]].getPixel_u();
				End_point.y = vertexII[h][j][Boundingbox_line[k][1]].getPixel_v();
				PointEnd.push_back(End_point);

				//line(src_gray, Start_point, End_point, Scalar(225, 225,255), 2.0, CV_AA);
			}
		}
		

		Mat canny_output;
		vector<vector<Point> > contours;
		vector<Vec4i> hierarchy;

		double max_contour_area(0.0);
		int largest_contour_index(0);
		
		/// Detect edges using canny
		Canny(src_gray, canny_output, thresh, max_thresh, 3);
		/// Find contours
		//findContours(canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, Point(0, 0));
		findContours(canny_output, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE, Point(0, 0));

		/// Draw contours
		drawing = Mat::zeros(canny_output.size(), CV_8UC3);

		for (auto n(0); n < PointEnd.size(); n++){
			line(drawing, PointStart[n], PointEnd[n], Scalar(225, 225, 225), 1.0, 1, 0);
		}

		/// Get the moments
		vector<Moments> mu(contours.size());
		for (int i = 0; i < contours.size(); i++)
		{
			mu[i] = moments(contours[i], false);
			//cout << "# of contour points: " << contours[i].size() << endl;
			for (int j = 0; j < contours[i].size(); j++)
			{
				//cout << "Point(x,y)=" <<i<<" j "<<j<<" "<< contours[i][j] << endl;
			}
		}
		////  Get the mass centers:
		vector<Point2f> mc(contours.size());
		for (int i = 0; i < contours.size(); i++)
		{
			mc[i] = Point2f(mu[i].m10 / mu[i].m00, mu[i].m01 / mu[i].m00);
		}
		//// ---------- - Find the convex hull object for each contour
			vector<vector<Point>>hull(contours.size());
		for (int i = 0; i < contours.size(); i++){
			convexHull(Mat(contours[i]), hull[i], false);
		}				
		
		// Calculate the area with the moments 00 and compare with the result of the OpenCV function
		//printf("\t Info: Area and Contour Length \n");

		//cout << "contours.size() " << contours.size() << endl;
		double countour_Area(0.0);
		double arc_Length(0.0);

		for (int i = 0; i < contours.size(); i++)
		{
			countour_Area = (double)contourArea(contours[i]);
			arc_Length = (double)arcLength(contours[i], true);

			//cout << "contourArea [" << i << "] " << ": Moment " << mu[i].m00 
			//	 << " OpenCV " << countour_Area << " arcLength " << arc_Length << endl;		
			//cout << "countour_Area "<< countour_Area << " " << endl;

			if (countour_Area > max_contour_area){
				max_contour_area = countour_Area;
				largest_contour_index = i;
			}

			//------- draw all contour ---------------
			//Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255));
			//drawContours(drawing, contours, i, color, 2, 8, hierarchy, 0, Point());
			//circle(drawing, mc[i], 4, color, -1, 8, 0);
			//drawContours(drawing, hull, i, color, 1, 8, vector<Vec4i>(), 0, Point());
			//drawContours(drawing, contours, i, Scalar(255, 255, 255), 0.10, 8, hierarchy, 0, Point());

		}
		//------- draw largest contour ---------------
		Scalar color = Scalar(rng.uniform(0, 255), rng.uniform(0, 255), rng.uniform(0, 255));
		drawContours(drawing, contours, largest_contour_index, color, 2, 8, hierarchy, 0, Point());
		//circle(drawing, mc[largest_contour_index], 4, color, -1, 8, 0);		
		//drawContours(drawing, contours, largest_contour_index, Scalar(0, 255, 255), 2, 8, hierarchy, 0, Point());
		//drawContours(drawing, hull, largest_contour_index, color, 2, 8, vector<Vec4i>(), 0, Point());
		//drawContours(drawing, contours, largest_contour_index, Scalar(255, 255, 255), 1, 8, hierarchy, 0, Point());

		fout << max_contour_area << endl;
		cout << "max_contour_area " << max_contour_area << endl;	
		
		//----------------------- Show in a window --------------------------------------
		//resize(drawing, drawing, ssize, INTER_NEAREST);
		namedWindow("Contours", CV_WINDOW_AUTOSIZE);
		imshow("Contours", drawing);

		//output white boundary
		imwrite("../../data2016/input/newzebra/contour_voxel/contour_voxel" + to_string(h) + ".bmp", drawing);

		waitKey(0);
		destroyWindow("silhouette");

		PointStart.clear();
		PointStart.shrink_to_fit();
		PointEnd.clear();
		PointEnd.shrink_to_fit();
	}

	//getchar();
}
Beispiel #18
0
int CCharsSegment::charsSegment(Mat input, vector<Mat>& resultVec, Color color) {
  if (!input.data) return 0x01;

  Color plateType = color;

  Mat input_grey;
  cvtColor(input, input_grey, CV_BGR2GRAY);

  if (0) {
    imshow("plate", input_grey);
    waitKey(0);
    destroyWindow("plate");
  }

  Mat img_threshold;


  //if (BLUE == plateType) {
  //  // cout << "BLUE" << endl;
  //  img_threshold = input_grey.clone();

  //  int w = input_grey.cols;
  //  int h = input_grey.rows;
  //  Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
  //  int threadHoldV = ThresholdOtsu(tmp);
  //  threshold(input_grey, img_threshold, threadHoldV, 255, CV_THRESH_BINARY);

  //} else if (YELLOW == plateType) {
  //  // cout << "YELLOW" << endl;
  //  img_threshold = input_grey.clone();
  //  int w = input_grey.cols;
  //  int h = input_grey.rows;
  //  Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
  //  int threadHoldV = ThresholdOtsu(tmp);
  //  // utils::imwrite("resources/image/tmp/inputgray2.jpg", input_grey);

  //  threshold(input_grey, img_threshold, threadHoldV, 255,
  //            CV_THRESH_BINARY_INV);

  //} else if (WHITE == plateType) {
  //  // cout << "WHITE" << endl;

  //  threshold(input_grey, img_threshold, 10, 255,
  //            CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
  //} else {
  //  // cout << "UNKNOWN" << endl;
  //  threshold(input_grey, img_threshold, 10, 255,
  //            CV_THRESH_OTSU + CV_THRESH_BINARY);
  //}

  img_threshold = input_grey.clone();
  spatial_ostu(img_threshold, 8, 2, plateType);

  if (0) {
    imshow("plate", img_threshold);
    waitKey(0);
    destroyWindow("plate");
  }

  // remove liuding and hor lines
  // also judge weather is plate use jump count

  if (!clearLiuDing(img_threshold)) return 0x02;
  //clearLiuDing(img_threshold);


  Mat img_contours;
  img_threshold.copyTo(img_contours);

  vector<vector<Point> > contours;
  findContours(img_contours,
               contours,               // a vector of contours
               CV_RETR_EXTERNAL,       // retrieve the external contours
               CV_CHAIN_APPROX_NONE);  // all pixels of each contours

  vector<vector<Point> >::iterator itc = contours.begin();
  vector<Rect> vecRect;

  while (itc != contours.end()) {
    Rect mr = boundingRect(Mat(*itc));
    Mat auxRoi(img_threshold, mr);

    if (verifyCharSizes(auxRoi)) vecRect.push_back(mr);
    ++itc;
  }


  if (vecRect.size() == 0) return 0x03;

  vector<Rect> sortedRect(vecRect);
  std::sort(sortedRect.begin(), sortedRect.end(),
            [](const Rect& r1, const Rect& r2) { return r1.x < r2.x; });

  size_t specIndex = 0;

  specIndex = GetSpecificRect(sortedRect);

  Rect chineseRect;
  if (specIndex < sortedRect.size())
    chineseRect = GetChineseRect(sortedRect[specIndex]);
  else
    return 0x04;

  if (0) {
    rectangle(img_threshold, chineseRect, Scalar(255));
    imshow("plate", img_threshold);
    waitKey(0);
    destroyWindow("plate");
  }

  vector<Rect> newSortedRect;
  newSortedRect.push_back(chineseRect);
  RebuildRect(sortedRect, newSortedRect, specIndex);

  if (newSortedRect.size() == 0) return 0x05;

  bool useSlideWindow = true;
  bool useAdapThreshold = true;
  //bool useAdapThreshold = CParams::instance()->getParam1b();

  for (size_t i = 0; i < newSortedRect.size(); i++) {
    Rect mr = newSortedRect[i];

    // Mat auxRoi(img_threshold, mr);
    Mat auxRoi(input_grey, mr);
    Mat newRoi;

    if (i == 0) {
      if (useSlideWindow) {
        float slideLengthRatio = 0.1f;
        //float slideLengthRatio = CParams::instance()->getParam1f();
        if (!slideChineseWindow(input_grey, mr, newRoi, plateType, slideLengthRatio, useAdapThreshold))
          judgeChinese(auxRoi, newRoi, plateType);
      }
      else
        judgeChinese(auxRoi, newRoi, plateType);
    }
    else {
      if (BLUE == plateType) {  
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_BINARY + CV_THRESH_OTSU);
      }
      else if (YELLOW == plateType) {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_BINARY_INV + CV_THRESH_OTSU);
      }
      else if (WHITE == plateType) {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
      }
      else {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
      }

      newRoi = preprocessChar(newRoi);
    }
     
    if (0) {
      if (i == 0) {
        imshow("input_grey", input_grey);
        waitKey(0);
        destroyWindow("input_grey");
      }
      if (i == 0) {
        imshow("newRoi", newRoi);
        waitKey(0);
        destroyWindow("newRoi");
      }
    }

    resultVec.push_back(newRoi);
  }

  return 0;
}
	findBlobs::~findBlobs() 
	{
		destroyWindow("labled centroids");
	}
Beispiel #20
0
int StWinHandles::glCreateContext(StWinHandles*    theSlave,
                                  const StRectI_t& theRect,
                                  const int        theDepthSize,
                                  const bool       theIsQuadStereo,
                                  const bool       theDebugCtx) {
    (void )theRect;
#ifdef _WIN32
    ThreadGL = StThread::getCurrentThreadId();
    ST_DEBUG_LOG("WinAPI, glCreateContext, ThreadGL= " + ThreadGL + ", ThreadWnd= " + ThreadWnd);
    hDC = GetDC(hWindowGl);
    ST_GL_ERROR_CHECK(hDC != NULL, STWIN_ERROR_WIN32_GLDC,
                      "WinAPI, Can't create Master GL Device Context");
    if(theSlave != NULL) {
        theSlave->ThreadGL = ThreadGL;
        theSlave->hDC      = GetDC(theSlave->hWindowGl);
        ST_GL_ERROR_CHECK(theSlave->hDC != NULL, STWIN_ERROR_WIN32_GLDC,
                          "WinAPI, Can't create Slave GL Device Context");
    }

    HGLRC aRendCtx = NULL;
    {
      PIXELFORMATDESCRIPTOR aPixFrmtDesc = THE_PIXELFRMT_DOUBLE;
      aPixFrmtDesc.cDepthBits = (BYTE )theDepthSize;
      if(theIsQuadStereo) {
          aPixFrmtDesc.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_GDI | PFD_SUPPORT_OPENGL
                               | PFD_DOUBLEBUFFER | PFD_STEREO;
      }

      HMODULE aModule = GetModuleHandleW(NULL);
      hWinTmp = CreateWindowExW(WS_EX_TOOLWINDOW | WS_EX_WINDOWEDGE | WS_EX_NOACTIVATE,
                                ClassTmp.toCString(), L"TmpWnd",
                                WS_POPUP | WS_CLIPSIBLINGS | WS_CLIPCHILDREN | WS_DISABLED,
                                // always create temporary window on main screen
                                // to workaround sporadic bugs (access violation) in AMD Catalyst drivers
                                2, 2, 4, 4, //theRect.left() + 2, theRect.top() + 2, 4, 4,
                                NULL, NULL, aModule, NULL);
      ST_GL_ERROR_CHECK(hWinTmp != NULL, STWIN_ERROR_WIN32_GLDC,
                        "WinAPI, Temporary window creation error");

      HDC aDevCtxTmp = GetDC(hWinTmp);
      int aPixFrmtIdTmp = ChoosePixelFormat(aDevCtxTmp, &aPixFrmtDesc);
      ST_GL_ERROR_CHECK(aPixFrmtIdTmp != 0, STWIN_ERROR_WIN32_PIXELFORMATF,
                        "WinAPI, Can't find a suitable PixelFormat for Tmp");

      ST_GL_ERROR_CHECK(SetPixelFormat(aDevCtxTmp, aPixFrmtIdTmp, &aPixFrmtDesc),
                        STWIN_ERROR_WIN32_PIXELFORMATS, "WinAPI, Can't set the PixelFormat for Master");
      StWinGlrcH aRendCtxTmp = new StWinGlrc(aDevCtxTmp, NULL);
      ST_GL_ERROR_CHECK(aRendCtxTmp->isValid(),
                        STWIN_ERROR_WIN32_GLRC_CREATE, "WinAPI, Can't create GL Rendering Context");
      ST_GL_ERROR_CHECK(aRendCtxTmp->makeCurrent(aDevCtxTmp),
                        STWIN_ERROR_WIN32_GLRC_ACTIVATE, "WinAPI, Can't activate Tmp GL Rendering Context");

      StGLContext aCtx(false);
      ST_GL_ERROR_CHECK(aCtx.stglInit(),
                        STWIN_ERROR_WIN32_GLRC_ACTIVATE, "WinAPI, Broken Tmp GL Rendering Context");

      int aPixFrmtId = 0;
      if(aCtx.extAll->wglChoosePixelFormatARB != NULL) {
          const int aPixAttribs[] = {
              WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
              WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
              WGL_DOUBLE_BUFFER_ARB,  GL_TRUE,
              WGL_STEREO_ARB,         theIsQuadStereo ? GL_TRUE : GL_FALSE,
              WGL_PIXEL_TYPE_ARB,     WGL_TYPE_RGBA_ARB,
              //WGL_SAMPLE_BUFFERS_ARB, 1,
              //WGL_SAMPLES_ARB,        8,
              // WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB       0x00000004
              WGL_COLOR_BITS_ARB,     24,
              WGL_DEPTH_BITS_ARB,     theDepthSize,
              WGL_STENCIL_BITS_ARB,   0,
              0, 0,
          };
          unsigned int aFrmtsNb = 0;
          aCtx.extAll->wglChoosePixelFormatARB(hDC, aPixAttribs, NULL, 1, &aPixFrmtId, &aFrmtsNb);
          if(theSlave != NULL) {
              int aPixFrmtIdSlave = 0;
              aCtx.extAll->wglChoosePixelFormatARB(theSlave->hDC, aPixAttribs, NULL, 1, &aPixFrmtIdSlave, &aFrmtsNb);
              if(aPixFrmtIdSlave != aPixFrmtId) {
                  ST_ERROR_LOG("Slave window returns another pixel format! Try to ignore...");
              }
          }
      } else {
          aPixFrmtId = ChoosePixelFormat(hDC, &aPixFrmtDesc);
          if(theSlave != NULL
          && ChoosePixelFormat(theSlave->hDC, &aPixFrmtDesc) != aPixFrmtId) {
              ST_ERROR_LOG("Slave window returns another pixel format! Try to ignore...");
          }
      }
      ST_GL_ERROR_CHECK(aPixFrmtId != 0, STWIN_ERROR_WIN32_PIXELFORMATF,
                        "WinAPI, Can't find a suitable PixelFormat for Master");
      DescribePixelFormat(hDC, aPixFrmtId, sizeof(PIXELFORMATDESCRIPTOR), &aPixFrmtDesc);
      if(theIsQuadStereo) {
          if((aPixFrmtDesc.dwFlags & PFD_STEREO) == 0) {
              ST_ERROR_LOG("WinAPI, Quad Buffered stereo is not supported");
          }
      }
      ST_GL_ERROR_CHECK(SetPixelFormat(hDC, aPixFrmtId, &aPixFrmtDesc),
                        STWIN_ERROR_WIN32_PIXELFORMATS, "WinAPI, Can't set the PixelFormat for Master");
      ST_GL_ERROR_CHECK(theSlave == NULL || SetPixelFormat(theSlave->hDC, aPixFrmtId, &aPixFrmtDesc),
                        STWIN_ERROR_WIN32_PIXELFORMATS, "WinAPI, Can't set the PixelFormat for Slave");
      if(aCtx.extAll->wglCreateContextAttribsARB != NULL) {
          // Beware! NVIDIA drivers reject context creation when WGL_CONTEXT_PROFILE_MASK_ARB are specified
          // but not WGL_CONTEXT_MAJOR_VERSION_ARB/WGL_CONTEXT_MINOR_VERSION_ARB
          int aCtxAttribs[] = {
              //WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
              //WGL_CONTEXT_MINOR_VERSION_ARB, 2,
              //WGL_CONTEXT_PROFILE_MASK_ARB,  WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB, //WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
              WGL_CONTEXT_FLAGS_ARB,         theDebugCtx ? WGL_CONTEXT_DEBUG_BIT_ARB : 0,
              0, 0
          };

          aRendCtx = aCtx.extAll->wglCreateContextAttribsARB(hDC, NULL, aCtxAttribs);
      }

      aRendCtxTmp.nullify();
      destroyWindow(hWinTmp);
    }

    hRC = new StWinGlrc(hDC, aRendCtx);
    ST_GL_ERROR_CHECK(hRC->isValid(),
                      STWIN_ERROR_WIN32_GLRC_CREATE, "WinAPI, Can't create GL Rendering Context");
    if(theSlave != NULL) {
        theSlave->hRC = hRC;
    }

    ST_GL_ERROR_CHECK(hRC->makeCurrent(hDC),
                      STWIN_ERROR_WIN32_GLRC_ACTIVATE, "WinAPI, Can't activate Master GL Rendering Context");
    return STWIN_INIT_SUCCESS;
#elif defined(__linux__)
    // create an OpenGL rendering context
#if defined(ST_HAVE_EGL) || defined(__ANDROID__)
    // GL context is created beforehand for EGL
    ST_GL_ERROR_CHECK(!hRC.isNull() && hRC->isValid(),
                      STWIN_ERROR_X_GLRC_CREATE, "EGL, could not create rendering context for Master");

#if defined(__ANDROID__)
    EGLint aFormat = 0;
    eglGetConfigAttrib(hRC->getDisplay(), hRC->getConfig(), EGL_NATIVE_VISUAL_ID, &aFormat);
    ANativeWindow_setBuffersGeometry(hWindowGl, 0, 0, aFormat);
#endif

    eglSurface = eglCreateWindowSurface(hRC->getDisplay(), hRC->getConfig(), hWindowGl, NULL);
    if(theSlave != NULL) {
        theSlave->hRC = hRC;
        theSlave->eglSurface = eglCreateWindowSurface(hRC->getDisplay(), hRC->getConfig(), theSlave->hWindowGl, NULL);

        // bind the rendering context to the window
        ST_GL_ERROR_CHECK(hRC->makeCurrent(theSlave->eglSurface),
                          STWIN_ERROR_X_GLRC_CREATE, "EGL, Can't activate Slave GL Rendering Context");
    }

    // bind the rendering context to the window
    ST_GL_ERROR_CHECK(hRC->makeCurrent(eglSurface),
                      STWIN_ERROR_X_GLRC_CREATE, "EGL, Can't activate Master GL Rendering Context");
    return STWIN_INIT_SUCCESS;
#else // GLX
    hRC = new StWinGlrc(stXDisplay, theDebugCtx);
    ST_GL_ERROR_CHECK(hRC->isValid(),
                      STWIN_ERROR_X_GLRC_CREATE, "GLX, could not create rendering context for Master");
    if(theSlave != NULL) {
        theSlave->hRC = hRC;

        // bind the rendering context to the window
        ST_GL_ERROR_CHECK(hRC->makeCurrent(theSlave->hWindowGl),
                          STWIN_ERROR_X_GLRC_CREATE, "GLX, Can't activate Slave GL Rendering Context");
    }

    // bind the rendering context to the window
    ST_GL_ERROR_CHECK(hRC->makeCurrent(hWindowGl),
                      STWIN_ERROR_X_GLRC_CREATE, "GLX, Can't activate Master GL Rendering Context");
    return STWIN_INIT_SUCCESS;
#endif // GLX or EGL
#endif
}
int CPlateDetect::plateDetect(Mat src, vector<CPlate>& resultVec,
                              bool showDetectArea, int index) {
  vector<Mat> resultPlates;

  vector<CPlate> color_Plates;
  vector<CPlate> sobel_Plates;
  vector<CPlate> color_result_Plates;
  vector<CPlate> sobel_result_Plates;

  vector<CPlate> all_result_Plates;

  //如果颜色查找找到n个以上(包含n个)的车牌,就不再进行Sobel查找了。
  const int color_find_max = m_maxPlates;

  m_plateLocate->plateColorLocate(src, color_Plates, index);
  m_plateJudge->plateJudge(color_Plates, color_result_Plates);

  // for (int i=0;i<color_Plates.size();++i)
  //{
  //	color_result_Plates.push_back(color_Plates[i]);
  //}

  for (size_t i = 0; i < color_result_Plates.size(); i++) {
    CPlate plate = color_result_Plates[i];

    plate.setPlateLocateType(COLOR);
    all_result_Plates.push_back(plate);
  }

  //颜色和边界闭操作同时采用
  {
    m_plateLocate->plateSobelLocate(src, sobel_Plates, index);
    m_plateJudge->plateJudge(sobel_Plates, sobel_result_Plates);

    /*for (int i=0;i<sobel_Plates.size();++i)
    {
            sobel_result_Plates.push_back(sobel_Plates[i]);
    }*/

    for (size_t i = 0; i < sobel_result_Plates.size(); i++) {
      CPlate plate = sobel_result_Plates[i];

      if (0) {
        imshow("plate_mat", plate.getPlateMat());
        waitKey(0);
        destroyWindow("plate_mat");
      }

      plate.bColored = false;
      plate.setPlateLocateType(SOBEL);

      all_result_Plates.push_back(plate);
    }
  }

  for (size_t i = 0; i < all_result_Plates.size(); i++) {
    // 把截取的车牌图像依次放到左上角
    CPlate plate = all_result_Plates[i];
    resultVec.push_back(plate);
  }
  return 0;
}
Beispiel #22
0
bool StWinHandles::close() {
#ifdef _WIN32
    // NOTE - destroy functions will fail if called from another thread than created
    const size_t aThreadId = StThread::getCurrentThreadId();
    myMutex.lock();
    // ========= Release OpenGL resources =========
    if(aThreadId == ThreadGL && hWindowGl != NULL) {
        ST_DEBUG_LOG("WinAPI, close, aThreadId= " + aThreadId + ", ThreadGL= " + ThreadGL + ", ThreadWnd= " + ThreadWnd);

        // release Rendering Context
        hRC.nullify();

        // Release Device Context
        if(hDC != NULL && hWindowGl != NULL) {
            if(ReleaseDC(hWindowGl, hDC) == 0) {
                ST_DEBUG_LOG("WinAPI, FAILED to release DC");
                myMutex.unlock();
                return false;
            } else {
                ST_DEBUG_LOG("WinAPI, Released DC");
                hDC = NULL;
                ThreadGL = 0;
            }
        }
    }

    // release window resources
    if(aThreadId == ThreadWnd && hDC == NULL) {
        ST_DEBUG_LOG("WinAPI, close, aThreadId= " + aThreadId + ", ThreadGL= " + ThreadGL + ", ThreadWnd= " + ThreadWnd);

        // destroy windows
        if(!destroyWindow(hWindowGl)
        || !destroyWindow(hWindow)
        || !destroyWindow(hWinTmp)) {
            myMutex.unlock();
            return false;
        }

        // unregister window classes
        if(hWindowGl == NULL && hWindow == NULL) {
            if(!unregisterClass(ClassGL)
            || !unregisterClass(ClassBase)
            || !unregisterClass(ClassTmp)) {
                myMutex.unlock();
                return false;
            }
        }
    }
    myMutex.unlock();
#elif defined(__linux__)

#if defined(ST_HAVE_EGL) || defined(__ANDROID__)
    if(!hRC.isNull()) {
        if(eglSurface != EGL_NO_SURFACE) {
            hRC->makeCurrent(EGL_NO_SURFACE);
            eglDestroySurface(hRC->getDisplay(), eglSurface);
            eglSurface = EGL_NO_SURFACE;
        }
    }
#endif

    // release active context
    hRC.nullify();

#if defined(__ANDROID__)
    //
#else
    if(!stXDisplay.isNull()) {
        // close x-server windows
        if(hWindowGl != 0) {
            XUnmapWindow(stXDisplay->hDisplay, hWindowGl);
            XDestroyWindow(stXDisplay->hDisplay, hWindowGl);
            hWindowGl = 0;
        }
        if(hWindow != 0) {
            XUnmapWindow(stXDisplay->hDisplay, hWindow);
            XDestroyWindow(stXDisplay->hDisplay, hWindow);
            hWindow = 0;
        }
        if(iconImage != 0) {
            XFreePixmap(stXDisplay->hDisplay, iconImage);
            iconImage = 0;
        }
        if(iconShape != 0) {
            XFreePixmap(stXDisplay->hDisplay, iconShape);
            iconShape = 0;
        }

        // close x-server connection
        stXDisplay.nullify();
    }
#endif

#endif
    return true;
}
Beispiel #23
0
	LinuxWindow::~LinuxWindow() {
		destroyWindow();
	}
Beispiel #24
0
//! 字符分割与排序
int CCharsSegment::charsSegment(Mat input, vector<Mat>& resultVec, Color color) {
  if (!input.data) return 0x01;

  Mat input_grey;
  cvtColor(input, input_grey, CV_BGR2GRAY);

  if (0) {
    imshow("plate", input_grey);
    waitKey(0);
    destroyWindow("plate");
  }

  Mat img_threshold;

  // 二值化
  // 根据车牌的不同颜色使用不同的阈值判断方法
  // TODO:使用MSER来提取这些轮廓

  //if (BLUE == plateType) {
  //  // cout << "BLUE" << endl;
  //  img_threshold = input_grey.clone();

  //  int w = input_grey.cols;
  //  int h = input_grey.rows;
  //  Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
  //  int threadHoldV = ThresholdOtsu(tmp);
  //  threshold(input_grey, img_threshold, threadHoldV, 255, CV_THRESH_BINARY);

  //} else if (YELLOW == plateType) {
  //  // cout << "YELLOW" << endl;
  //  img_threshold = input_grey.clone();
  //  int w = input_grey.cols;
  //  int h = input_grey.rows;
  //  Mat tmp = input_grey(Rect_<double>(w * 0.1, h * 0.1, w * 0.8, h * 0.8));
  //  int threadHoldV = ThresholdOtsu(tmp);
  //  // utils::imwrite("resources/image/tmp/inputgray2.jpg", input_grey);

  //  threshold(input_grey, img_threshold, threadHoldV, 255,
  //            CV_THRESH_BINARY_INV);

  //} else if (WHITE == plateType) {
  //  // cout << "WHITE" << endl;

  //  threshold(input_grey, img_threshold, 10, 255,
  //            CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
  //} else {
  //  // cout << "UNKNOWN" << endl;
  //  threshold(input_grey, img_threshold, 10, 255,
  //            CV_THRESH_OTSU + CV_THRESH_BINARY);
  //}

  Color plateType = color;

  img_threshold = input_grey.clone();
  spatial_ostu(img_threshold, 8, 2, plateType);

  if (0) {
    imshow("plate", img_threshold);
    waitKey(0);
    destroyWindow("plate");
  }

  // 去除车牌上方的柳钉以及下方的横线等干扰
  // 并且也判断了是否是车牌
  // 并且在此对字符的跳变次数以及字符颜色所占的比重做了是否是车牌的判别条件
  // 如果不是车牌,返回ErrorCode=0x02

  if (!clearLiuDing(img_threshold)) return 0x02;
  //clearLiuDing(img_threshold);

  // 在二值化图像中提取轮廓

  Mat img_contours;
  img_threshold.copyTo(img_contours);

  vector<vector<Point> > contours;
  findContours(img_contours,
               contours,               // a vector of contours
               CV_RETR_EXTERNAL,       // retrieve the external contours
               CV_CHAIN_APPROX_NONE);  // all pixels of each contours

  vector<vector<Point> >::iterator itc = contours.begin();
  vector<Rect> vecRect;

  // 将不符合特定尺寸的字符块排除出去

  while (itc != contours.end()) {
    Rect mr = boundingRect(Mat(*itc));
    Mat auxRoi(img_threshold, mr);

    if (verifyCharSizes(auxRoi)) vecRect.push_back(mr);
    ++itc;
  }

  // 如果找不到任何字符块,则返回ErrorCode=0x03

  if (vecRect.size() == 0) return 0x03;

  // 对符合尺寸的图块按照从左到右进行排序;
  // 直接使用stl的sort方法,更有效率

  vector<Rect> sortedRect(vecRect);
  std::sort(sortedRect.begin(), sortedRect.end(),
            [](const Rect& r1, const Rect& r2) { return r1.x < r2.x; });

  size_t specIndex = 0;

  // 获得特殊字符对应的Rectt,如苏A的"A"

  specIndex = GetSpecificRect(sortedRect);

  // 根据特定Rect向左反推出中文字符
  // 这样做的主要原因是根据findContours方法很难捕捉到中文字符的准确Rect,因此仅能
  // 退过特定算法来指定

  Rect chineseRect;
  if (specIndex < sortedRect.size())
    chineseRect = GetChineseRect(sortedRect[specIndex]);
  else
    return 0x04;

  if (0) {
    rectangle(img_threshold, chineseRect, Scalar(255));
    imshow("plate", img_threshold);
    waitKey(0);
    destroyWindow("plate");
  }

  //新建一个全新的排序Rect
  //将中文字符Rect第一个加进来,因为它肯定是最左边的
  //其余的Rect只按照顺序去6个,车牌只可能是7个字符!这样可以避免阴影导致的“1”字符

  vector<Rect> newSortedRect;
  newSortedRect.push_back(chineseRect);
  RebuildRect(sortedRect, newSortedRect, specIndex);

  if (newSortedRect.size() == 0) return 0x05;

  // 开始截取每个字符
  bool useSlideWindow = true;

  bool useAdapThreshold = true;
  //bool useAdapThreshold = CParams::instance()->getParam1b();

  for (size_t i = 0; i < newSortedRect.size(); i++) {
    Rect mr = newSortedRect[i];

    // Mat auxRoi(img_threshold, mr);

    // 使用灰度图来截取图块,然后依次对每个图块进行大津阈值来二值化

    Mat auxRoi(input_grey, mr);
    Mat newRoi;

    if (i == 0) {
      if (useSlideWindow) {
        float slideLengthRatio = 0.1f;
        //float slideLengthRatio = CParams::instance()->getParam1f();
        if (!slideChineseWindow(input_grey, mr, newRoi, plateType, slideLengthRatio, useAdapThreshold))
          judgeChinese(auxRoi, newRoi, plateType);
      }
      else
        judgeChinese(auxRoi, newRoi, plateType);
    }
    else {
      if (BLUE == plateType) {  
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_BINARY + CV_THRESH_OTSU);
      }
      else if (YELLOW == plateType) {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_BINARY_INV + CV_THRESH_OTSU);
      }
      else if (WHITE == plateType) {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY_INV);
      }
      else {
        threshold(auxRoi, newRoi, 0, 255, CV_THRESH_OTSU + CV_THRESH_BINARY);
      }

      // 归一化大小
      newRoi = preprocessChar(newRoi);
    }
     
    if (0) {
      if (i == 0) {
        imshow("input_grey", input_grey);
        waitKey(0);
        destroyWindow("input_grey");
      }
      if (i == 0) {
        imshow("newRoi", newRoi);
        waitKey(0);
        destroyWindow("newRoi");
      }
    }

    // 每个字符图块输入到下面的步骤进行处理
    resultVec.push_back(newRoi);
  }

  return 0;
}
Beispiel #25
0
/*
**	Destructor: display score, destroy main window and tc window.
*/
Gui::~Gui()
{
  destroyWindow();
  endwin();
}
void AP_Win32Dialog_Latex::event_Close(void)
{
	m_answer = AP_Dialog_Latex::a_CANCEL;
	modeless_cleanup();
	destroyWindow();
}
Beispiel #27
0
void OpencvModule::CloseWindow()
{
    destroyWindow("Caremedia Kinect Viewer");
}
void FaceDetection()
{

	bool detected = false;
	Mat captureFrame;
	Mat bottomFrame;
	Mat grayscaleFrame;
	Mat colorImg;
	Mat grayImage;
	char kbCmd = ' ';

	CascadeClassifier faceCascade;
	faceCascade.load("haarcascade_frontalface_alt.xml");
	//setup video capture device and link it to the first capture device
	VideoCapture captureDevice;
	captureDevice.open(0);

	//create a loop to capture and find faces
	cout << "Press C to capture image..." << endl;
	while (!detected)
	{
		captureDevice >> captureFrame;
		bottomFrame = captureFrame.clone();

		//convert captured image to gray scale and equalize
		cvtColor(captureFrame, grayscaleFrame, CV_RGB2GRAY);
		equalizeHist(grayscaleFrame, grayscaleFrame);

		vector<Rect> detectedFaces;
		faceCascade.detectMultiScale(grayscaleFrame, detectedFaces, 1.1, 3, CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_SCALE_IMAGE, Size(30, 30));

		//draw a rectangle for all found faces in the vector array on the original image
		for (int i = 0; i < detectedFaces.size(); i++)
		{
			int x = detectedFaces[i].x;
			int y = detectedFaces[i].y;
			int w = detectedFaces[i].width;
			int h = detectedFaces[i].height;
			
			int xBound = w/2;
			int yBound = h/2;
			faceCenter = Point( xBound + w/2, yBound + h/2 );

			Point pt1(x + w, y + h);
			Point pt2(x, y);
			
			// Make sure the boundary in image range
			if (x - xBound > 0 && y - yBound > 0 && x - xBound + w * 2 < 640 && y - yBound + h * 2 < 480)
			{
				grayImage = grayscaleFrame( Rect(x - xBound, y - yBound, w * 2, h * 2) );
				colorImg = bottomFrame( Rect(x - xBound, y - yBound, w * 2, h * 2) );
			}
			//Draw rectangle
			rectangle(captureFrame, pt1, pt2, cvScalar(0, 255, 0, 0), 1, 8, 0);
		}

		//print the output
		imshow("Face Detecting...", captureFrame);
		waitKey(33);
		if (_kbhit())
			kbCmd = _getche();
		if (detectedFaces.size() == 0){
			kbCmd == ' ';
		}
		else {
			if (kbCmd == 'c')
				detected = true;
		}
	}
	
	destroyWindow("Face Detecting...");
	faceCenter *= 400.0 / colorImg.cols;
	resize(colorImg, colorImg, Size(400, 400));
	
	//Create a window to present the detecting results
	namedWindow("Portrait Region", CV_WINDOW_AUTOSIZE);
	imshow("Portrait Region", colorImg); waitKey(0);
	destroyWindow("Portrait Region");

	imwrite("colorImg.jpg", colorImg);
}
Beispiel #29
0
void rcopy_runloop(char *server_host, uint16_t server_port, char *remote_file_name, char *output_file_name, uint32_t buf_size, uint32_t window_size)
{
	int output_fd = -1;
	int select_count = 0;
	int state = STATE_FILENAME;
	Window *window = NULL;

	while (state != STATE_DONE) {
		switch (state) {

			case STATE_FILENAME:
				if (connectToServer(server_host, server_port, &server)) {
					perror("ERROR: unable to connect to host");
					exit(1);
				}
				state = filename(remote_file_name, buf_size, window_size);
				if (state == STATE_FILENAME) {
					close(server.socket_num);
				}
				select_count++;
				if (select_count >= MAX_RETRIES) {
					fprintf(stderr, "Ten failed filename tries: unable to reach server\n");
					exit(1);
				}
				break;

			case STATE_FILE_OK:
				select_count = 0;
				if((output_fd = open(output_file_name, O_WRONLY|O_CREAT|O_TRUNC, 0600)) < 0 ) {
			    	perror("Open local_file");
			    	exit(1);
			    }
			    window = newWindowWithSizeAndBuffer(window_size, buf_size);
			    state = STATE_RECV_DATA;
			    break;

			case STATE_RECV_DATA:
				state = recv_data(window);
				break;

			case STATE_WINDOW_FULL:
				state = window_full(window, output_fd);
				clearWindow(window);
				window->base_seq_num += window->window_size;
				window->buffer_size = 0;
				break;

			case STATE_EOF:
				if (select_count == 0) {
					window_full(window, output_fd);
				}
				select_count++;
				if (select_count >= MAX_RETRIES) {
					fprintf(stderr, "Ten failed EOF acks: file is ok but server doesn't know\n");
					state = STATE_DONE;
				}
				else {
					state = recv_eof(window, output_fd);
				}
				break;

			case STATE_DONE:
			default:
				break;
		}
	}

	if (window != NULL) {
		destroyWindow(window);
		window = NULL;
	}

	close(output_fd);
}
/** This function builds the game window, the chess game area and the side panel, and attaches
 *	all the required logic and gui components to each other. When this method is done we can start processing events
 *	to get the game going.
 */
GuiWindow* createGameWindow(char board[BOARD_SIZE][BOARD_SIZE], bool isUserBlack)
{
	// Z indices under window
	short gameAreaPanelZIndex = 1;
	short sidePanelZIndex = 2;

	// Z indices under wooden panel
	short sidePanelImgZIndex = 0;
	short saveButtonZIndex = 1;
	short menuButtonZIndex = 2;
	short quitButtonZIndex = 3;

	GuiColorRGB bgcolor = WHITE;
	GuiWindow* gameWindow = createWindow(WIN_W, WIN_H, GAME_WINDOW_TITLE, bgcolor);

	if ((NULL == gameWindow) || g_guiError)
		return NULL; // Clean on errors

	// Set a custom destructor for the window
	gameWindow->generalProperties.destroy = destroyGameWindow;

	// Side panel creation

	Rectangle sidePanelBounds = { 0, 0, WOODPANEL_W, WOODPANEL_H };
	sidePanelBounds.x = BOARD_W; // Panel is to the right of the board
	GuiPanel* sidePanel = createPanel(gameWindow->generalProperties.wrapper, sidePanelBounds, sidePanelZIndex, GREEN);
	if ((NULL == sidePanel) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	sidePanelBounds.x = 0; // Image is 0 relative to the panel
	GuiImage* sidePanelImg = createImage(sidePanel->generalProperties.wrapper, sidePanelBounds,
		sidePanelImgZIndex, SIDE_PANEL_IMG, GREEN);
	if ((NULL == sidePanelImg) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	Rectangle btnBounds = { ((WOODPANEL_W - (BUTTON_W / 2)) / 2), 0, BUTTON_W, BUTTON_H };
	btnBounds.y = BESTMOVE_BUTTON_OFFSET_Y;
	GuiButton* bestMoveBtn = createButton(sidePanel->generalProperties.wrapper, btnBounds,
		saveButtonZIndex, BUTTON_BESTMOVE_IMG, BROWN, onBestMoveClick);
	if ((NULL == bestMoveBtn) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	btnBounds.y = SAVE_BUTTON_OFFSET_Y;
	GuiButton* saveBtn = createButton(sidePanel->generalProperties.wrapper, btnBounds,
		saveButtonZIndex, BUTTON_SAVE_IMG, BROWN, onSaveClick);
	if ((NULL == saveBtn) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	btnBounds.y = MENU_BUTTON_OFFSET_Y;
	GuiButton* mainMenuBtn = createButton(sidePanel->generalProperties.wrapper, btnBounds,
		menuButtonZIndex, BUTTON_MENU_IMG, BROWN, onMainMenuClick);
	if ((NULL == mainMenuBtn) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	btnBounds.y = QUIT_BUTTON_OFFSET_Y;
	GuiButton* quitBtn = createButton(sidePanel->generalProperties.wrapper, btnBounds,
		quitButtonZIndex, BUTTON_QUIT_IMG, BROWN, onQuit);
	if ((NULL == quitBtn) || g_guiError)
	{ // Clean on errors
		destroyWindow(gameWindow);
		return NULL;
	}

	Rectangle gameAreaBounds = { 0, 0, BOARD_W, BOARD_H };
	GuiPanel* gameAreaPanel = createPanel(gameWindow->generalProperties.wrapper, gameAreaBounds, gameAreaPanelZIndex, GRAY);
	if ((NULL == gameAreaPanel) || g_guiError)
	{
		destroyWindow(gameWindow);
		return NULL;
	}

	// Create the game window extent
	GameWindowExtent* windowExtent = createGameWindowExtent(gameWindow, gameAreaPanel, bestMoveBtn, board);
	if (NULL == windowExtent)
	{
		gameWindow->generalProperties.destroy(gameWindow);
		return NULL;
	}
	gameWindow->generalProperties.extent = windowExtent;
	GameControl* gameControl = windowExtent->gameControl;

	refreshBoard(gameControl);
	if (g_memError)
	{ // Avoid errors
		gameWindow->generalProperties.destroy(gameWindow);
		return NULL;
	}

	saveBtn->generalProperties.extent = gameControl; // Save a reference to the game control / window in the button extents.
	// This makes the game control available on events.
	bestMoveBtn->generalProperties.extent = gameControl;
	mainMenuBtn->generalProperties.extent = gameWindow;
	quitBtn->generalProperties.extent = gameWindow;

	gameWindow->onShow = onGameWindowShow; // Set the onShow event, so when the window is drawn we can start playing

	return gameWindow;
}