void ocvColorQuantizeApp::setup()
{
    // Image copyright Eric J Paparatto
    // http://www.flickr.com/photos/ejpphoto/2633923684/

    mInputImage = ci::Surface8u( loadImage( loadResource( RES_IMAGE ) ) );
    setWindowSize( mInputImage.getWidth(), mInputImage.getHeight() );

    updateImage();
}
Ejemplo n.º 2
0
//--------------------------------------------------------------
void testApp::setup(){
    ofEnableAlphaBlending();
    
    //load image file
    string path = ofToDataPath("logo.png");
    of_image.loadImage( path );
    ci_surface = ci::loadImage( path );
    
    //convert images
    ci_texture = ci::gl::Texture( ofxCi::toCi(of_image), GL_RGBA, of_image.getWidth(), of_image.getHeight() );
    of_image.setFromPixels(ofxCi::toOf(ci_surface), ci_surface.getWidth(), ci_surface.getHeight(), OF_IMAGE_COLOR_ALPHA);
    
}
//-----------------------------------------------------------------------------
bool HaarHandDetector::doUpdateDetection(ci::Surface pSurface)
{
	//Set a Scale for the image to increase the processing time
	int lScale = 2;

	//Create a GrayScale copy of the input image
	cv::Mat	lGrayScaleImage(toOcv(pSurface, CV_8UC1));

	//Scale the GrayScaleImage
	cv::Mat lSmallGrayScaleImage(pSurface.getHeight() / lScale, pSurface.getWidth() / lScale, CV_8UC1);
	cv::resize(lGrayScaleImage, lSmallGrayScaleImage, lSmallGrayScaleImage.size(), 0,0,cv::INTER_LINEAR);

	//Equalize the Histogram to improve Edges Detection
	cv::equalizeHist(lSmallGrayScaleImage, lSmallGrayScaleImage);

	//Clear out the previous Detected Closed Hands
	mClosedHands.clear();

	//Start the Detection 
	std::vector<cv::Rect> lCVRectangles;
	int lCount = 0;
	mClosedHandCascade.detectMultiScale(lSmallGrayScaleImage, lCVRectangles, 1.2f, 2, CV_HAAR_FIND_BIGGEST_OBJECT, cv::Size(24,24));

	for (std::vector<cv::Rect>::iterator lIter = lCVRectangles.begin(); lIter != lCVRectangles.end(); lIter++)
	{
		ci::Rectf lClosedHand(ci::fromOcv(*lIter));
		lClosedHand *= lScale;
		mClosedHands.push_back(lClosedHand);
		lCount++;
	}

	//Return detection success flag
	if (lCount > 0)
	{
		return true;
	}

	else
	{
		return false;
	}
	
}
void ocvColorQuantizeApp::updateImage()
{
    const int colorCount = 32;
    const int sampleCount = mInputImage.getHeight() * mInputImage.getWidth();
    cv::Mat colorSamples( sampleCount, 1, CV_32FC3 );

    // build our matrix of samples
    Surface::ConstIter imageIt = mInputImage.getIter();
    cv::MatIterator_<cv::Vec3f> sampleIt = colorSamples.begin<cv::Vec3f>();
    while( imageIt.line() )
        while( imageIt.pixel() )
            *sampleIt++ = cv::Vec3f( imageIt.r(), imageIt.g(), imageIt.b() );

    // call kmeans
    cv::Mat labels, clusters;
    cv::kmeans( colorSamples, colorCount, labels, cv::TermCriteria( cv::TermCriteria::COUNT, 8, 0 ), 2, cv::KMEANS_RANDOM_CENTERS, &clusters );

    Color8u clusterColors[colorCount];
    for( int i = 0; i < colorCount; ++i )
        clusterColors[i] = Color8u( clusters.at<cv::Vec3f>(i,0)[0], clusters.at<cv::Vec3f>(i,0)[1], clusters.at<cv::Vec3f>(i,0)[2] );

    Surface result( mInputImage.getWidth(), mInputImage.getHeight(), false );
    Surface::Iter resultIt = result.getIter();
    cv::MatIterator_<int> labelIt = labels.begin<int>();
    while( resultIt.line() ) {
        while( resultIt.pixel() ) {
            resultIt.r() = clusterColors[*labelIt].r;
            resultIt.g() = clusterColors[*labelIt].g;
            resultIt.b() = clusterColors[*labelIt].b;
            ++labelIt;
        }
    }

    // draw the color palette across the bottom of the image
    const int swatchSize = 12;
    for( int i = 0; i < colorCount; ++i ) {
        ip::fill( &result, clusterColors[i], Area( i * swatchSize, result.getHeight() - swatchSize, ( i + 1 ) * swatchSize, result.getHeight() ) );
    }

    mTexture = gl::Texture( result );
}
Ejemplo n.º 5
0
void ocvWarpApp::setup()
{
    mInputImage = ci::Surface8u( loadImage( loadAsset( "aus.jpg" ) ) );

    mRotationCenter = vec2( mInputImage.getSize() ) * 0.5f;
    mRotationAngle = 31.2f;
    mScale = 0.77f;

    mParams = params::InterfaceGl( "Parameters", ivec2( 200, 400 ) );
    mParams.addParam( "Rotation Center X", &mRotationCenter.x );
    mParams.addParam( "Rotation Center Y", &mRotationCenter.y );
    mParams.addParam( "Rotation Angle", &mRotationAngle );
    mParams.addParam( "Scale", &mScale, "step=0.01" );

    updateImage();
}
Ejemplo n.º 6
0
void ocvWarpApp::setup()
{		
	mInputImage = ci::Surface8u( loadImage( loadResource( RES_IMAGE ) ) );

	mRotationCenter = mInputImage.getSize() * 0.5f;
	mRotationAngle = 31.2f;
	mScale = 0.77f;
	
	mParams = params::InterfaceGl( "Parameters", Vec2i( 200, 400 ) );
	mParams.addParam( "Rotation Center X", &mRotationCenter.x );
	mParams.addParam( "Rotation Center Y", &mRotationCenter.y );
	mParams.addParam( "Rotation Angle", &mRotationAngle );
	mParams.addParam( "Scale", &mScale, "step=0.1" );

	updateImage();
}