示例#1
0
int main(int argc, char *argv[]) {

  Halide::Func data_mat;
  Halide::Var row, col, ki, iter;
  //Halide::Expr e = row + col;
  // initialize data matrix
  data_mat(row, col) = 1.0f;

  // initialize model matrix L, R
  Halide::Func L, R;
  L(row, ki) = 1.0f;
  R(ki, col) = 1.0f;

  Halide::Func approx_mat;
  approx_mat(row, col) = 0.0f;

  Halide::RDom k(0, kRank);
  approx_mat(row, col) = approx_mat(row, col) + L(row, k) * R(k, col);

  Halide::Func diff_mat;
  diff_mat(row, col) = approx_mat(row, col) - data_mat(row, col);

  Halide::Func Ln, Rn;
  Ln(row, ki) = 1.0f;
  Rn(ki, col) = 1.0f;
  Halide::RDom c(0, kWidth);
  Ln(row, ki) = L(row, ki) - step_size * diff_mat(row, c) * R(ki, c);
  Halide::RDom r(0, kHeight);
  Rn(ki, col) = R(ki, col) - step_size * diff_mat(r, col) * L(r, ki);

  Halide::Image<float> Ln_output = Ln.realize(kHeight, kRank);
  Halide::Image<float> Rn_output = Rn.realize(kRank, kWidth);

  for (int i = 0; i < Ln_output.height(); i++) {
    std::cout << i << " - ";
    for (int j = 0; j < Ln_output.width(); j++) {
      std::cout << j <<":" << Ln_output(i, j) << " ";
    }
    std::cout << std::endl;
  }

  for (int i = 0; i < Rn_output.height(); i++) {
    std::cout << i << " - ";
    for (int j = 0; j < Rn_output.width(); j++) {
      std::cout << j <<":" << Rn_output(i, j) << " ";
    }
    std::cout << std::endl;
  }

  std::cout << "Success!" << std::endl;
  return 0;
}
示例#2
0
bool InPatchGraphCut::prepareDataTerm(Segmentation::GraphCut& gc)
{
//    std::cerr<<"0"<<std::endl;
    data_.reset(new double[label_number_*pix_number_]);
    double* data = data_.get();
    arma::mat data_mat(data,label_number_,pix_number_,false,true);
    ObjModel& model = *objects_[ current_label_ - 1 ];
    bool has_outlier = true;
    double minP = arma::min(model.DistP_);
    double maxP = arma::max(model.DistP_);
    if( minP / maxP > 0.4 ) has_outlier = false;
    DefaultMesh obj_mesh;
    if(!model.transform(obj_mesh,current_frame_))
    {
        std::cerr<<"no extracted object "<< current_label_ - 1 <<" for frame "<<current_frame_<<std::endl;
    }
    arma::vec score;
//    std::cerr<<"1"<<std::endl;
    if(!config_->has("GC_color_var"))current_patch_graph_->match(obj_mesh,model.DistP_,model.NormP_,model.ColorP_,score,config_->getDouble("GC_distance_threshold"));
    else current_patch_graph_->match(obj_mesh,model.DistP_,model.NormP_,model.ColorP_,score,config_->getDouble("GC_distance_threshold"),config_->getDouble("GC_color_var"));
//    std::cerr<<"2"<<std::endl;
    size_t N_max,N_min;
    if( config_->has("GC_gamma_soft") ) N_max = config_->getDouble("GC_gamma_soft")*score.size();
    else N_max = 0.1*score.size();
    N_max = std::max(size_t(10),N_max);
    N_max = std::min(N_max,score.size()-1);
    if( config_->has("GC_gamma_hard") ) N_min = config_->getDouble("GC_gamma_hard")*N_max;
    else N_min = 0.1*N_max;
    N_min = std::max(size_t(3),N_min);
    N_min = std::min(N_min,score.size()-1);
    arma::vec sorted_score = arma::sort(score);
    double min_score = arma::mean( sorted_score.head(N_min) );
    double max_score = arma::mean( sorted_score.tail(N_max) );
    score -= min_score;
    score /= ( max_score - min_score );
    arma::uvec underflow = arma::find( score < 0 );
    score(underflow).fill(0.0);
    arma::uvec overflow = arma::find( score > 1.0 );
    score(overflow).fill(1.0);

    if(has_outlier)data_mat.row(0) = score.t();
    else data_mat.row(0).fill(1.0);
    data_mat.row(1).fill(1.0);
    data_mat.row(1) -= score.t();

    if(!data_mat.is_finite())
    {
        std::cerr<<"infinite in data of frame "<<current_frame_<<" patch "<<current_label_<<std::endl;
    }
//    std::cerr<<"3"<<std::endl;
    current_data_.reset(new DataCost(data_.get()));
    gc.inputDataTerm(current_data_);
    return true;
}
示例#3
0
    /******************************************************************************
    Description.: this thread worker grabs a frame and copies it to the global buffer
    Input Value.: unused
    Return Value: unused, always NULL
     ******************************************************************************/
    void *cam_thread(void *arg) {
        
        g_settings.init();
        setCameraExposure();
        
        CVideoFrame* pFrame = NULL;

#ifndef TEST_USE_JPEGS_NOT_CAMERA 
        int width = VIEW_PIXEL_X_WIDTH;
        int height = VIEW_PIXEL_Y_HEIGHT;
        IplImage * img = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); // obraz OpenCV
#endif

        frameGrinder.init();

#ifdef TEST_USE_JPEGS_NOT_CAMERA 
        std::string sBasePath = "/home/";
        sBasePath += HOME_NAME;
        std::string sPath = sBasePath;
        sPath += "/0243-20150125-22-21-46.jpg";
        //sPath += "/0007-20150125-22-36-25.jpg";  
        cv::Mat frame1 = cv::imread(sPath.c_str(), CV_LOAD_IMAGE_COLOR);
        if (frame1.empty()) {
            dbgMsg_s("Failed to read image data from a file1\n");
        }

        sPath = sBasePath;
        sPath += "/0243-20150125-22-21-46.jpg";
        //sPath += "/0007-20150125-22-36-25.jpg";  
        cv::Mat frame2 = cv::imread(sPath.c_str(), CV_LOAD_IMAGE_COLOR);
        if (frame2.empty()) {
            dbgMsg_s("Failed to read image data from a file2\n");
        }
        bool toggle = false;
#endif

        context *pcontext = (context*) arg;
        pglobal = pcontext->pglobal;

        /* set cleanup handler to cleanup allocated ressources */
        pthread_cleanup_push(cam_cleanup, pcontext);

        while (!pglobal->stop) {
            while (pcontext->videoIn->streamingState == STREAMING_PAUSED) {
                usleep(1); // maybe not the best way so FIXME
            }

#ifdef TEST_USE_JPEGS_NOT_CAMERA 
            if (frameGrinder.safeGetFreeFrame(&pFrame)) {
                if (toggle) {
                    pFrame->m_frame = frame1;
                } else {
                    pFrame->m_frame = frame2;
                }
                toggle = (!toggle);
                if (!pFrame->m_frame.empty()) {
                    frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_WAIT_FOR_BLOB_DETECT);
                } else {
                    dbgMsg_s("Frame is empty\n");
                    frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_FREE);
                }
                frameGrinder.m_testMonitor.m_nTasksDone[CTestMonitor::TASK_DONE_CAMERA]++;
            }

#else
            /* grab a frame */
            if (uvcGrab(pcontext->videoIn) < 0) {
                IPRINT("Error grabbing frames\n");
                exit(EXIT_FAILURE);
            }

            DBG("received frame of size: %d from plugin: %d\n", pcontext->videoIn->buf.bytesused, pcontext->id);

            /*
             * Workaround for broken, corrupted frames:
             * Under low light conditions corrupted frames may get captured.
             * The good thing is such frames are quite small compared to the regular pictures.
             * For example a VGA (640x480) webcam picture is normally >= 8kByte large,
             * corrupted frames are smaller.
             */
            if (pcontext->videoIn->buf.bytesused < minimum_size) {
                DBG("dropping too small frame, assuming it as broken\n");
                continue;
            }

            if (g_settings.isDynamicSettingsEnabled())
            {
                g_settings.getValueFromFile(CSetting::SETTING_EXPOSURE);
            }
            if(g_settings.isValueChanged(CSetting::SETTING_EXPOSURE))
            {
                setCameraExposure();
            }

#ifdef NO_CV_JUST_STREAM_THE_CAMERA

            /* copy JPG picture to global buffer */
            pthread_mutex_lock(&pglobal->in[pcontext->id].db);

            /*
             * If capturing in YUV mode convert to JPEG now.
             * This compression requires many CPU cycles, so try to avoid YUV format.
             * Getting JPEGs straight from the webcam, is one of the major advantages of
             * Linux-UVC compatible devices.
             */
            if (pcontext->videoIn->formatIn == V4L2_PIX_FMT_YUYV) {
                DBG("compressing frame from input: %d\n", (int) pcontext->id);
                pglobal->in[pcontext->id].size = compress_yuyv_to_jpeg(pcontext->videoIn, pglobal->in[pcontext->id].buf, pcontext->videoIn->framesizeIn, gquality);
            } else {
                DBG("copying frame from input: %d\n", (int) pcontext->id);
                pglobal->in[pcontext->id].size = memcpy_picture(pglobal->in[pcontext->id].buf, pcontext->videoIn->tmpbuffer, pcontext->videoIn->buf.bytesused);
            }

            /* copy this frame's timestamp to user space */
            pglobal->in[pcontext->id].timestamp = pcontext->videoIn->buf.timestamp;

            /* signal fresh_frame */
            pthread_cond_broadcast(&pglobal->in[pcontext->id].db_update);
            pthread_mutex_unlock(&pglobal->in[pcontext->id].db);


#else // #ifndef NO_CV_JUST_STREAM_THE_CAMERA

            if (frameGrinder.safeGetFreeFrame(&pFrame)) {
                std::vector<uchar> vectordata(pcontext->videoIn->tmpbuffer, pcontext->videoIn->tmpbuffer + (height * width));
                cv::Mat data_mat(vectordata, false);
                cv::Mat image(cv::imdecode(data_mat, 1)); //put 0 if you want greyscale
                pFrame->m_frame = image;
                if (!pFrame->m_frame.empty()) {
                    frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_WAIT_FOR_BLOB_DETECT);
                } else {
                    dbgMsg_s("Frame is empty\n");
                    frameGrinder.safeAddTail(pFrame, CVideoFrame::FRAME_QUEUE_FREE);
                }
                frameGrinder.m_testMonitor.m_nTasksDone[CTestMonitor::TASK_DONE_CAMERA]++;
            }

#endif  // #ifndef NO_CV_JUST_STREAM_THE_CAMERA

#endif   // TEST_USE_JPEGS_NOT_CAMERA
        }

        DBG("leaving input thread, calling cleanup function now\n");
        pthread_cleanup_pop(1);

        return NULL;
    }