コード例 #1
0
void USSavingRecorderFixture::addVideoSource(int width, int height)
{
	size_t index = mVideo.size();
	cx::TestVideoSourcePtr videoSource(new cx::TestVideoSource(
											QString("videoSource%1").arg(index),
											QString("Video Source %1").arg(index),
											80, 40));
	videoSource->start();
	mVideo.push_back(videoSource);
}
コード例 #2
0
bool HimppVpssGroup::enableObject()
{
    HI_S32 s32Ret = HI_FAILURE;

    ImageResolution res = videoSource()->getResolution();
    VPSS_GRP_ATTR_S attr;
    attr.u32MaxW = res.Width;
    attr.u32MaxH = res.Height;
    attr.bDciEn = HI_FALSE;
    attr.bIeEn = HI_FALSE;
    attr.bNrEn = HI_TRUE;
    attr.bHistEn = HI_FALSE;
    attr.enDieMode = VPSS_DIE_MODE_NODIE;
    attr.enPixFmt = HIMPP_PIXEL_FORMAT;
    if ((s32Ret = HI_MPI_VPSS_CreateGrp(_grpid, &attr)) != HI_SUCCESS) {
        HIMPP_PRINT("HI_MPI_VPSS_CreateGrp %d failed [%#x]\n",
                    _grpid, s32Ret);
        return false;
    }

    if ((s32Ret = HI_MPI_VPSS_StartGrp(_grpid)) != HI_SUCCESS) {
        HIMPP_PRINT("HI_MPI_VPSS_CreateGrp %d failed [%#x]\n",
                    _grpid, s32Ret);
        goto err_destroy_grp;
    }

    MPP_CHN_S dst_chn;
    dst_chn.enModId = HI_ID_VPSS;
    dst_chn.s32DevId = _grpid;
    dst_chn.s32ChnId = 0;
    if ((s32Ret = HI_MPI_SYS_Bind(*source(), &dst_chn)) != HI_SUCCESS) {
        HIMPP_PRINT("HI_MPI_SYS_Bind %d failed [%#x]\n",
                    _grpid, s32Ret);
        goto err_stop_grp;
    }

    return true;

err_stop_grp:
    HI_MPI_VPSS_StopGrp(_grpid);
err_destroy_grp:
    HI_MPI_VPSS_DestroyGrp(_grpid);

    return false;
}
コード例 #3
0
bool HimppVencChan::setResolution(ImageResolution &res)
{
    _resolution = res;

    // Calculate the crop configuration
    ImageResolution &out = res;
    ImageResolution in = videoSource()->getResolution();
    RECT_S &rect = _crop_cfg.stRect;
    if (in.Width * out.Height > out.Width * in.Height) {
        // crop width
        _crop_cfg.bEnable = HI_TRUE;
        rect.u32Height = in.Height;
        rect.u32Width = out.Width * in.Height / out.Height;
        rect.s32X = ((in.Width - rect.u32Width) / 2) & 0xFFFFFFF0;
        rect.s32Y = 0;
    }
    else if (in.Width * out.Height < out.Width * in.Height) {
        // crop height
        _crop_cfg.bEnable = HI_TRUE;
        rect.u32Width = in.Width;
        rect.u32Height = out.Height * in.Width / out.Width;
        rect.s32X = 0;
        rect.s32Y = (in.Height - rect.u32Height) / 2;
    }
    else {
        // crop is not necessary
        _crop_cfg.bEnable = HI_FALSE;
    }

    if (isEnabled()) {
        disableObject();
        enableObject();
    }

    return true;
}
コード例 #4
0
bool HimppVencChan::prepareVencChnAttr(VENC_CHN_ATTR_S &attr)
{
    HI_U32 stattime;
    stattime = _gop / _framerate;
    stattime = stattime > 0 ? stattime : 1;

    switch (_encoding) {
    case H264:
        attr.stVeAttr.enType = PT_H264;
        attr.stVeAttr.stAttrH264e.u32MaxPicWidth = _resolution.Width;
        attr.stVeAttr.stAttrH264e.u32MaxPicHeight = _resolution.Height;
        attr.stVeAttr.stAttrH264e.u32BufSize = _resolution.Width * _resolution.Height * 2;
        attr.stVeAttr.stAttrH264e.u32PicWidth = _resolution.Width;
        attr.stVeAttr.stAttrH264e.u32PicHeight = _resolution.Height;
        attr.stVeAttr.stAttrH264e.u32Profile = _h264profile;
        attr.stVeAttr.stAttrH264e.bByFrame = HI_FALSE;
        attr.stVeAttr.stAttrH264e.bField = HI_FALSE;
        attr.stVeAttr.stAttrH264e.bMainStream = HI_TRUE;
        attr.stVeAttr.stAttrH264e.u32Priority = 0;
        attr.stVeAttr.stAttrH264e.bVIField = HI_FALSE;
        // Rate Control Attribute
        switch (_rcmode) {
        case CBR:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_H264CBRv2;
            attr.stRcAttr.stAttrH264Cbr.u32Gop = _gop;
            attr.stRcAttr.stAttrH264Cbr.u32StatTime = stattime;
            attr.stRcAttr.stAttrH264Cbr.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrH264Cbr.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrH264Cbr.u32BitRate = _bitrate;
            attr.stRcAttr.stAttrH264Cbr.u32FluctuateLevel = 0;
            break;
        case VBR:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_H264VBRv2;
            attr.stRcAttr.stAttrH264Vbr.u32Gop = _gop;
            attr.stRcAttr.stAttrH264Vbr.u32StatTime = stattime;
            attr.stRcAttr.stAttrH264Vbr.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrH264Vbr.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrH264Vbr.u32MinQp = 24;
            attr.stRcAttr.stAttrH264Vbr.u32MaxQp = 45;
            attr.stRcAttr.stAttrH264Vbr.u32MaxBitRate = _bitrate;
            break;
        case FIXQP:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_H264FIXQP;
            attr.stRcAttr.stAttrH264FixQp.u32Gop = _gop;
            attr.stRcAttr.stAttrH264FixQp.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrH264FixQp.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrH264FixQp.u32IQp = 20;
            attr.stRcAttr.stAttrH264FixQp.u32PQp = 23;
            break;
        default:
            HIMPP_PRINT("Unsupported RC mode[%d]\n", _rcmode);
            return false;
        }
        break;
    case MJPEG:
        attr.stVeAttr.enType = PT_MJPEG;
        attr.stVeAttr.stAttrMjpeg.u32MaxPicWidth = _resolution.Width;
        attr.stVeAttr.stAttrMjpeg.u32MaxPicHeight = _resolution.Height;
        attr.stVeAttr.stAttrMjpeg.u32BufSize = _resolution.Width * _resolution.Height * 2;
        attr.stVeAttr.stAttrMjpeg.u32PicWidth = _resolution.Width;
        attr.stVeAttr.stAttrMjpeg.u32PicHeight = _resolution.Height;
        attr.stVeAttr.stAttrMjpeg.bByFrame = HI_TRUE;
        attr.stVeAttr.stAttrMjpeg.bVIField = HI_FALSE;
        attr.stVeAttr.stAttrMjpeg.bMainStream = HI_TRUE;
        attr.stVeAttr.stAttrMjpeg.u32Priority = 0;
        // Rate Control Attribute
        switch (_rcmode) {
        case CBR:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_MJPEGCBR;
            attr.stRcAttr.stAttrMjpegeCbr.u32StatTime = 1;
            attr.stRcAttr.stAttrMjpegeCbr.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrMjpegeCbr.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrMjpegeCbr.u32BitRate = _bitrate;
            attr.stRcAttr.stAttrMjpegeCbr.u32FluctuateLevel = 0;
            break;
        case VBR:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_MJPEGVBR;
            attr.stRcAttr.stAttrMjpegeVbr.u32StatTime = 1;
            attr.stRcAttr.stAttrMjpegeVbr.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrMjpegeVbr.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrMjpegeVbr.u32MinQfactor = 50;
            attr.stRcAttr.stAttrMjpegeVbr.u32MaxQfactor = 95;
            break;
        case FIXQP:
            attr.stRcAttr.enRcMode = VENC_RC_MODE_MJPEGFIXQP;
            attr.stRcAttr.stAttrMjpegeFixQp.u32ViFrmRate = videoSource()->getFramerate();
            attr.stRcAttr.stAttrMjpegeFixQp.fr32TargetFrmRate = _framerate;
            attr.stRcAttr.stAttrMjpegeFixQp.u32Qfactor = 90;
            break;
        default:
            HIMPP_PRINT("Unsupported RC mode[%d]\n", _rcmode);
            return false;
        }
        break;
    case JPEG:
        attr.stVeAttr.enType = PT_JPEG;
        attr.stVeAttr.stAttrJpeg.u32PicWidth = _resolution.Width;
        attr.stVeAttr.stAttrJpeg.u32PicHeight = _resolution.Height;
        attr.stVeAttr.stAttrJpeg.u32BufSize = _resolution.Width * _resolution.Height * 2;
        attr.stVeAttr.stAttrJpeg.bByFrame = HI_TRUE;
        attr.stVeAttr.stAttrJpeg.bVIField = HI_FALSE;
        attr.stVeAttr.stAttrJpeg.u32Priority = 0;
        break;
    default:
        HIMPP_PRINT("Unsupported encoding code[%d]\n", _encoding);
        return false;
    }
    return true;
}
コード例 #5
0
uint32_t HimppVpssGroup::getFramerate()
{
    return videoSource()->getFramerate();
}
コード例 #6
0
ImageResolution HimppVpssGroup::getResolution()
{
    return videoSource()->getResolution();
}
コード例 #7
0
//work offline on recorded sequence
int main(int argc, char** argv)
{
    if(argc < 5 || argc > 6)
    {
        print_usage(argv[0]);
        return 1;
    }

    std::string calibFilename = argv[1];
    std::string seqFilename = argv[2];
    std::string poseFilename = argv[3];
    std::string outFilename = argv[4];
    
    int offset = 0;
    if(argc >= 6)
        offset = std::stoi(argv[5]);


    int firstFrame = 2;
    tt::ThymioBlobModel mRobot;
    //allocate space to the surfaces to store the texture
    mRobot.allocateSurfaceLearning();

    //get sequence
    VideoSourceSeq videoSource(seqFilename.c_str(),firstFrame+offset);
    videoSource.resizeSource(0.5);

    //get calib
    tt::IntrinsicCalibration mCalibration(calibFilename);

    //for each image of the sequence, we have:
    std::vector<int> founds;//pose computed in the image or not (sometimes the board is not found)
    std::vector<cv::Vec3d> rvecs;//pose of the board (not the robot !)
    std::vector<cv::Vec3d> tvecs;

    //read the xml file which stores this values
    cv::FileStorage store(poseFilename, cv::FileStorage::READ);
    cv::FileNode n1 = store["founds"];
    cv::read(n1,founds);
    cv::FileNode n2 = store["rvecs"];
    cv::read(n2,rvecs);
    cv::FileNode n3 = store["tvecs"];
    cv::read(n3,tvecs);
    store.release();

    cv::namedWindow( window_name, cv::WINDOW_AUTOSIZE );
    
    cv::Mat outputImage;
    int cpt = firstFrame;

    
    while(!videoSource.isOver())
    {
        videoSource.grabNewFrame();
        cv::Mat inputImage = videoSource.getFramePointer();

        cv::Mat inputGray;
        cv::cvtColor(inputImage, inputGray, CV_RGB2GRAY);

        //use board pose to compute robots pose
        //here the robot was placed in the middle of the board
        cv::Affine3d boardPose = cv::Affine3d(rvecs[cpt],tvecs[cpt]);
        cv::Affine3d robotPose = boardPose*cv::Affine3d().translate(cv::Vec3d(0.,0.,-0.022))*cv::Affine3d().rotate(cv::Vec3d(0.,M_PI,0.));

        if(founds[cpt]==1)
        {
            mRobot.learnAppearance(inputGray, mCalibration, robotPose);
            mRobot.draw(inputImage, mCalibration, robotPose);
        }
            
        
        imshow(window_name, inputImage);
        cpt ++;
        

        auto key = cv::waitKey(5);
        if(key == 27 || key == 'q')
            break;
    }

    //save the model to an xml file
    mRobot.writeSurfaceLearned(outFilename);

    
    return 0;
}