void setup() 
{
  size(1300, 1000);
  img = (loadImage("pic4.jpg"));

  /*//Use this to create your own pic of a specific colour to test individual RGB vals
   img = createImage(cols,rows,RGB); // 
   for(int i = 0; i < img.pixels.length; i++) {
   img.pixels[i] = color(0, 0, 255); 
   }
   */

  loadPixels(); 
  System.out.println(img.pixels.length);
  String portName = "COM4";
  myPort = new Serial(this, portName, 9600);
  background(0);
  dataIn[0][0] = 0;

  //send ack every 2 seconds, wait for ack back (see serial event
  //this begins negotiation between machines
  //reception of the ack is handled by serial interrupt method below
  while (!ack) {  
    myPort.write(88); 
    delay(2000);
  }
    ack = true;
}//end setup
Beispiel #2
0
GLuint ofxImGui::loadPixels(string imagePath)
{
    if(!engine) return -1;
    ofPixels pixels;
    ofLoadImage(pixels, imagePath);
    return loadPixels(pixels);
}
Beispiel #3
0
void draw() {

  loadPixels();
  
  // Maximum number of iterations for each point on the complex plane
  int maxiterations = 100;

  // x goes from xmin to xmax
  double xmax = xmin + w;
  // y goes from ymin to ymax
  double ymax = ymin + h;
  
  // Calculate amount we increment x,y for each pixel
  double dx = (xmax - xmin) / (sketchWidth);
  double dy = (ymax - ymin) / (sketchHeight);

  // Start y
  double y = ymin;
  for(int j = 0; j < sketchHeight; j++) {
    // Start x
    double x = xmin;
    for(int i = 0;  i < sketchWidth; i++) {
      
      // Now we test, as we iterate z = z^2 + cm does z tend towards infinity?
      double a = x;
      double b = y;
      int n = 0;
      while (n < maxiterations) {
        double aa = a * a;
        double bb = b * b;
        double twoab = 2.0 * a * b;
        a = aa - bb + x;
        b = twoab + y;
        // Infinty in our finite world is simple, let's just consider it 16
        if(aa + bb > 16.0f) {
          break;  // Bail
        }
        n++;
      }
      
      // We color each pixel based on how long it takes to get to infinity
      // If we never got there, let's pick the color black
      if (n == maxiterations) setPixel(i, j, color(0, 0, 0));
      else {
        int cl = n*16 % 255;
        setPixel(i, j, color(cl, cl, cl));  // Gosh, we could make fancy colors here if we wanted
      }
      x += dx;
    }
    y += dy;
  }
  updatePixels();
  
  noLoop();
}
/**
 *	getIplImageFromIntArray
 *	Java側にある本体のピクセルデータを取得し、IPL画像を作成する
 */
IplImage* getIplImageFromIntArray(JNIEnv* env, jintArray array_data,
		jint width, jint height) {
	int *pixels = env->GetIntArrayElements(array_data, 0);	// int配列array_dataの本体のポインタをpixelsに格納
	if (pixels == 0) {
		LOGE("Error getting int array of pixels.");
		return 0;
	}
	IplImage *image = loadPixels(pixels, width, height);	// IPL画像作成
	env->ReleaseIntArrayElements(array_data, pixels, 0);	// array_dataに内容をコピーし、pixelsのバッファを開放
	if (image == 0) {
		LOGE("Error loading pixel array.");
		return 0;
	}
	return image;
}
IplImage* getIplImageFromIntArray(JNIEnv* env, jintArray array_data,
		jint width, jint height) {
	int *pixels = env->GetIntArrayElements(array_data, 0);
	if (pixels == 0) {
		LOGE("Error getting int array of pixels.");
		return 0;
	}
	IplImage *image = loadPixels(pixels, width, height);
	env->ReleaseIntArrayElements(array_data, pixels, 0);
	if (image == 0) {
		LOGE("Error loading pixel array.");
		return 0;
	}
	return image;
}
//--------------------------------------------------------------
ofPixels& ofxImageSegmentation::segment(ofPixels& image){

	if(!image.isAllocated()){
		ofLogError("ofxImageSegmentation::segment") << "input image must be allocated";
		return segmentedPixels;
	}
	
	if(!segmentedPixels.isAllocated() || 
		segmentedPixels.getWidth() != image.getWidth() ||
		segmentedPixels.getHeight() != image.getHeight() ||
		segmentedPixels.getImageType() != image.getImageType() )
	{
		segmentedPixels.allocate(image.getWidth(), image.getHeight(), OF_IMAGE_COLOR);
		segmentedMasks.clear();
	}

	image11<rgb> *input = loadPixels(image);	
	image11<rgb> *seg;
	image11<char> **masks;
	numSegments = segment_image(input, sigma, k, min, seg, masks); 
	memcpy(segmentedPixels.getPixels(),seg->data,segmentedPixels.getWidth()*segmentedPixels.getHeight()*segmentedPixels.getBytesPerPixel());
	
	//calculate segment masks
	if(numSegments > 0){
		while(segmentedMasks.size() < numSegments){
			segmentedMasks.push_back(ofPixels());
			segmentedMasks.back().allocate(image.getWidth(), image.getHeight(), OF_IMAGE_GRAYSCALE);
		}
		int bytesPerMask = segmentedMasks[0].getWidth()*segmentedMasks[0].getHeight()*segmentedMasks[0].getBytesPerPixel();
		for(int i = 0; i < numSegments; i++){
			memcpy(segmentedMasks[i].getPixels(),masks[i]->data,bytesPerMask);
		}
	}

	//This is really slow to do, find a way to preserve memory
	delete input;
	delete seg;
	for(int i = 0; i < numSegments; i++){
		delete masks[i];
	}
	delete [] masks;

	return segmentedPixels;
}
Beispiel #7
0
GLuint ofxImGui::loadImage(string imagePath)
{
    return loadPixels(imagePath);
}
Beispiel #8
0
GLuint ofxImGui::loadImage(ofImage& image)
{
    if(!engine) return -1;
    return loadPixels(image.getPixels());
}
Beispiel #9
0
JNIEXPORT
jbooleanArray
JNICALL
Java_org_siprop_opencv_OpenCV_faceDetect(JNIEnv* env,
										jobject thiz,
										jintArray photo_data1,
										jintArray photo_data2,
										jint width,
										jint height) {
	LOGV("Load desp.");

	int i, x, y;
	int* pixels;
	IplImage *frameImage;
	
	IplImage *backgroundImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *grayImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *differenceImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	
	IplImage *hsvImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 3 );
	IplImage *hueImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *saturationImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *valueImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *thresholdImage1 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *thresholdImage2 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *thresholdImage3 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	IplImage *faceImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
	
	CvMoments moment;
	double m_00;
	double m_10;
	double m_01;
	int gravityX;
	int gravityY;

	jbooleanArray res_array;
	int imageSize;



	// Load Image
	pixels = env->GetIntArrayElements(photo_data1, 0);
	frameImage = loadPixels(pixels, width, height);
	if(frameImage == 0) {
		LOGV("Error loadPixels.");
		return 0;
	}
	
	
	cvCvtColor( frameImage, backgroundImage, CV_BGR2GRAY );
	
	
	pixels = env->GetIntArrayElements(photo_data2, 0);
	frameImage = loadPixels(pixels, width, height);
	if(frameImage == 0) {
		LOGV("Error loadPixels.");
		return 0;
	}
	cvCvtColor( frameImage, grayImage, CV_BGR2GRAY );
	cvAbsDiff( grayImage, backgroundImage, differenceImage );
	
	cvCvtColor( frameImage, hsvImage, CV_BGR2HSV );
	LOGV("Load cvCvtColor.");
	cvSplit( hsvImage, hueImage, saturationImage, valueImage, 0 );
	LOGV("Load cvSplit.");
	cvThreshold( hueImage, thresholdImage1, THRESH_BOTTOM, THRESHOLD_MAX_VALUE, CV_THRESH_BINARY );
	cvThreshold( hueImage, thresholdImage2, THRESH_TOP, THRESHOLD_MAX_VALUE, CV_THRESH_BINARY_INV );
	cvAnd( thresholdImage1, thresholdImage2, thresholdImage3, 0 );
	LOGV("Load cvAnd.");
	
	cvAnd( differenceImage, thresholdImage3, faceImage, 0 );
	
	cvMoments( faceImage, &moment, 0 );
	m_00 = cvGetSpatialMoment( &moment, 0, 0 );
	m_10 = cvGetSpatialMoment( &moment, 1, 0 );
	m_01 = cvGetSpatialMoment( &moment, 0, 1 );
	gravityX = m_10 / m_00;
	gravityY = m_01 / m_00;
	LOGV("Load cvMoments.");


	cvCircle( frameImage, cvPoint( gravityX, gravityY ), CIRCLE_RADIUS,
		 CV_RGB( 255, 0, 0 ), LINE_THICKNESS, LINE_TYPE, 0 );




	CvMat stub, *mat_image;
    int channels, ipl_depth;
    mat_image = cvGetMat( frameImage, &stub );
    channels = CV_MAT_CN( mat_image->type );

    ipl_depth = cvCvToIplDepth(mat_image->type);

	WLNonFileByteStream* m_strm = new WLNonFileByteStream();
    loadImageBytes(mat_image->data.ptr, mat_image->step, mat_image->width,
                             mat_image->height, ipl_depth, channels, m_strm);
	LOGV("Load loadImageBytes.");


	imageSize = m_strm->GetSize();
	res_array = env->NewBooleanArray(imageSize);
	LOGV("Load NewByteArray.");
    if (res_array == 0) {
        return 0;
    }
    env->SetBooleanArrayRegion(res_array, 0, imageSize, (jboolean*)m_strm->GetByte());
	LOGV("Load SetBooleanArrayRegion.");




	cvReleaseImage( &backgroundImage );
	cvReleaseImage( &grayImage );
	cvReleaseImage( &differenceImage );
	cvReleaseImage( &hsvImage );
	cvReleaseImage( &hueImage );
	cvReleaseImage( &saturationImage );
	cvReleaseImage( &valueImage );
	cvReleaseImage( &thresholdImage1 );
	cvReleaseImage( &thresholdImage2 );
	cvReleaseImage( &thresholdImage3 );
	cvReleaseImage( &faceImage );
	cvReleaseImage( &frameImage );
	m_strm->Close();
	SAFE_DELETE(m_strm);

	return res_array;

}
Beispiel #10
0
// Grabs a frame (image) from a socket.
bool CVCapture_Socket::grabFrame()
{
	// First ensure that our addrinfo and read buffer are allocated.
	if (pAddrInfo == 0 || readBuf == 0)
	{
		LOGV("You haven't opened the socket capture yet!");
		return false;
	}
	
	// Establish the socket.
	int sockd = socket(pAddrInfo->ai_family, pAddrInfo->ai_socktype, pAddrInfo->ai_protocol);
	if (sockd < 0 || errno != 0)
	{
		char buffer[100];
		sprintf(buffer, "Failed to create socket, errno = %d", errno);
		LOGV(buffer);
		::close(sockd);
		return false;
	}
	
	// Now connect to the socket.
	if (connect(sockd, pAddrInfo->ai_addr, pAddrInfo->ai_addrlen) < 0 || errno != 0)
	{
		char buffer[100];
		sprintf(buffer, "socket connection errorno = %d", errno);
		LOGV(buffer);
		::close(sockd);
		return false;
	}

	// Release the image if it hasn't been already because we are going to overwrite it.
	if (frame)
	{
		cvReleaseImage( &frame );
		frame = 0;
	}
	
	// Read the socket until we have filled the data with the space allocated OR run
	// out of data which we treat as an error.
	long read_count, total_read = 0;
	while (total_read < readBufSize)
	{
		read_count = read(sockd, &readBuf[total_read], readBufSize);
		if (read_count <= 0 || errno != 0)
		{
			char buffer[100];
			sprintf(buffer, "socket read errorno = %d", errno);
			LOGV(buffer);
			break;
		}
		total_read += read_count;
	}
	
	// If we read all of the data we expected, we will load the frame from the pixels.
	if (total_read == readBufSize)
	{
		frame = loadPixels(readBuf, width, height);
	}
	else
	{
		LOGV("full read of pixels failed");
	}
	
	// Close the socket and return the frame!
	::close(sockd);
	
    return frame != 0;
}
bool TextureCube::load(const std::string &_path, bool _vFlip) {

    // Init
    glGenTextures(1, &m_id);

    glBindTexture(GL_TEXTURE_CUBE_MAP, m_id);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
#ifndef PLATFORM_RPI
    glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
#endif

    int sh_samples = 0;
    if (haveExt(_path,"png") || haveExt(_path,"PNG") ||
        haveExt(_path,"jpg") || haveExt(_path,"JPG") ||
        haveExt(_path,"jpeg") || haveExt(_path,"JPEG")) {

        unsigned char* data = loadPixels(_path, &m_width, &m_height, RGB, false);

        // LOAD FACES
        Face<unsigned char> **faces = new Face<unsigned char>*[6];

        if (m_height > m_width) {

            if (m_width/6 == m_height) {
                // Vertical Row
                splitFacesFromVerticalRow<unsigned char>(data, m_width, m_height, faces);
            }
            else {
                // Vertical Cross
                splitFacesFromVerticalCross<unsigned char>(data, m_width, m_height, faces);

                // adjust NEG_Z face
                if (_vFlip) {
                    faces[5]->flipHorizontal();
                    faces[5]->flipVertical();
                }
            }
            
        }
        else {
            if (m_width/2 == m_height) {
                // Equilateral
                splitFacesFromEquilateral<unsigned char>(data, m_width, m_height, faces);
            }
            else if (m_width/6 == m_height) {
                // Horizontal Row
                splitFacesFromHorizontalRow<unsigned char>(data, m_width, m_height, faces);
            }
            else {
                // Horizontal Cross
                splitFacesFromHorizontalCross<unsigned char>(data, m_width, m_height, faces);
            }
        }
        
        for (int i = 0; i < 6; i++) {
            faces[i]->upload();
            sh_samples += faces[i]->calculateSH(SH);
        }

        delete[] data;
        for(int i = 0; i < 6; ++i) {
            delete[] faces[i]->data;
            delete faces[i];
        }
        delete[] faces;

    }

    else if (haveExt(_path, "hdr") || haveExt(_path,"HDR")) {
        float* data = loadFloatPixels(_path, &m_width, &m_height, false);

        // LOAD FACES
        Face<float> **faces = new Face<float>*[6];

        if (m_height > m_width) {
            if (m_width/6 == m_height) {
                // Vertical Row
                splitFacesFromVerticalRow<float>(data, m_width, m_height, faces);
            }
            else {
                // Vertical Cross
                splitFacesFromVerticalCross<float>(data, m_width, m_height, faces);

                // adjust NEG_Z face
                if (_vFlip) {
                    faces[5]->flipHorizontal();
                    faces[5]->flipVertical();
                }
            }
        }
        else {

            if (m_width/2 == m_height)  {
                // Equilatera
                splitFacesFromEquilateral<float>(data, m_width, m_height, faces);
            }
            else if (m_width/6 == m_height) {
                // Horizontal Row
                splitFacesFromHorizontalRow<float>(data, m_width, m_height, faces);
            }
            else {
                // Horizontal Cross
                splitFacesFromHorizontalCross<float>(data, m_width, m_height, faces);
            }
        }

        for (int i = 0; i < 6; i++) {
            faces[i]->upload();
            sh_samples += faces[i]->calculateSH(SH);
        }

        delete[] data;
        for(int i = 0; i < 6; ++i) {
            delete[] faces[i]->data;
            delete faces[i];
        }
        delete[] faces;

    }

    for (int i = 0; i < 9; i++) {
        SH[i] = SH[i] * (32.0f / (float)sh_samples);
    }

    glGenerateMipmap(GL_TEXTURE_CUBE_MAP);
    glBindTexture(GL_TEXTURE_CUBE_MAP, 0);

    m_path = _path;             
    return true;
}