void process_mbr(char * mbr, uint8_t offset, FILE *input)
{
  int i=0;
  do {
    hdr_ent *e = (hdr_ent*) (mbr + offset + sizeof(mainheader) + sizeof(hdr_ent) * i++);
    if (e->start_s == 0xffffffff || ((char*)e) - mbr > MBR_HDR_LEN) { break; }
    process_image(e, input);
  } while (1);
}
    void vtkTimerCallbackSnapshot::process(vtkRenderWindowInteractor * iren, vtkActor * actor, vtkRenderer * renderer, int ino)
    {

      vtkRenderWindow *win = iren->GetRenderWindow();

      //snap
      std::string snapname = takeSnapshot(win, ino);
      //save
      process_image(snapname, renderer, actor, ino);

    }
Example #3
0
/* used by poll interface.  lock is already held */
static int proc_video_handler (zebra_processor_t *proc,
                               int i)
{
    if(!proc->active)
        return(0);
    /* not expected to block */
    zebra_image_t *img = zebra_video_next_image(proc->video);
    if(!img)
        return(-1);
    int rc = process_image(proc, img);
    zebra_image_destroy(img);
    return(rc);
}
Example #4
0
static inline int proc_event_wait_unthreaded (zebra_processor_t *proc,
                                              unsigned event,
                                              int timeout,
                                              struct timespec *abstime)
{
    int blocking = proc->active && zebra_video_get_fd(proc->video) < 0;
    proc->events &= ~event;
    /* unthreaded, poll here for window input */
    while(!(proc->events & event)) {
        if(blocking) {
            zebra_image_t *img = zebra_video_next_image(proc->video);
            if(!img)
                return(-1);
            process_image(proc, img);
            zebra_image_destroy(img);
        }
        int reltime = timeout;
        if(reltime >= 0) {
            struct timespec now;
#if _POSIX_TIMERS > 0
            clock_gettime(CLOCK_REALTIME, &now);
#else
            struct timeval ustime;
            gettimeofday(&ustime, NULL);
            now.tv_nsec = ustime.tv_usec * 1000;
            now.tv_sec = ustime.tv_sec;
#endif
            reltime = ((abstime->tv_sec - now.tv_sec) * 1000 +
                       (abstime->tv_nsec - now.tv_nsec) / 1000000);
            if(reltime <= 0)
                return(0);
        }
        if(blocking && (reltime < 0 || reltime > 10))
            reltime = 10;
        if(proc->polling.num)
            proc_poll_inputs(proc, (poll_desc_t*)&proc->polling, reltime);
        else if(!blocking) {
            proc_unlock(proc);
            struct timespec sleepns, remns;
            sleepns.tv_sec = timeout / 1000;
            sleepns.tv_nsec = (timeout % 1000) * 1000000;
            while(nanosleep(&sleepns, &remns) && errno == EINTR)
                sleepns = remns;
            (void)proc_lock(proc);
            return(0);
        }
    }
    return(1);
}
Example #5
0
static int read_frame(void)
{
	struct v4l2_buffer buf;
	struct v4l2_plane planes[VIDEO_MAX_PLANES];
	int index;

	CLEAR(buf);
	buf.type = CAPTURE_BUFFER_TYPE;
	buf.memory = V4L2_MEMORY_MMAP;

	PR_IO(VIDIOC_DQBUF);
	if (-1 == xioctl(g_file_desc, VIDIOC_DQBUF, &buf)) {
		switch (errno) {
		case EAGAIN:
			return 0;

		case EIO:
			/* Could ignore EIO, see spec. */

			/* fall through */

		default:
			errno_exit("VIDIOC_DQBUF");
		}
	}

	assert(buf.index < n_buffers);
	process_image(buffers[buf.index].addr[0]);

	index = buf.index;

	PR_IO(VIDIOC_QBUF);
	CLEAR(buf);
	buf.type = CAPTURE_BUFFER_TYPE;
	buf.memory = V4L2_MEMORY_MMAP;
	buf.index = index;
	if (buf.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
		buf.m.planes = planes;
		buf.length = 1;
		buf.m.planes[0].bytesused = buffers[index].size[0];
	}

	if (-1 == xioctl(g_file_desc, VIDIOC_QBUF, &buf))
		errno_exit("VIDIOC_QBUF");

	return 1;
}
Example #6
0
static int pdp_ieee1394_read_frame(t_pdp_ieee1394 *x)
{

  if (!x->x_decoder)return 0;
  if (!x->x_frame_ready) {
	//x->x_image.newimage = 0;
  }
  else {
    dv_parse_header(x->x_decoder, x->videobuf);
    dv_parse_packs (x->x_decoder, x->videobuf);
    if(dv_frame_changed(x->x_decoder)) {
      int pitches[3] = {0,0,0};
      //      pitches[0]=x_decoder->width*3; // rgb
      //      pitches[0]=x_decoder->width*((x_reqFormat==GL_RGBA)?3:2);
      pitches[0]=x->x_decoder->width*3;
      x->x_height=x->x_decoder->height;
      x->x_width=x->x_decoder->width;
      
      /* decode the DV-data to something we can handle and that is similar to the wanted format */
      //      dv_report_video_error(x_decoder, videobuf);  // do we need this ?
      // gosh, this(e_dv_color_rgb) is expansive:: the decoding is done in software only...
      //      dv_decode_full_frame(x_decoder, videobuf, ((x_reqFormat==GL_RGBA)?e_dv_color_rgb:e_dv_color_yuv), &decodedbuf, pitches);
      dv_decode_full_frame(x->x_decoder, x->videobuf, e_dv_color_rgb, &x->decodedbuf, pitches);

      //     post("sampling %d", x_decoder->sampling);

      /* convert the colour-space to the one we want */
      /*
       * btw. shouldn't this be done in [pix_video] rather than here ?
       * no because [pix_video] knows nothing about the possible colourspaces in here
       */

      // letting the library do the conversion to RGB and then doing the conversion to RGBA
      // is really stupid.
      // let's do it all ourselfes:
      //      if (x_reqFormat==GL_RGBA)x_image.image.fromRGB(decodedbuf); else
      //x_image.image.fromYVYU(decodedbuf);
    	process_image (x);
	
    }

    x->x_frame_ready = false;
  }
	
  return 1;
}
Example #7
0
int CameraManager::read_frame() {
    struct v4l2_buffer buf;
    
    CLEAR(buf);
    std::cout << "reading frame" << std::endl;

    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    
    if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
        switch (errno) {
            case EAGAIN:
                perror("No data available");
                return 0;
                
            case EIO:
                /* Could ignore EIO, see spec. */
                
                /* fall through */
                
            default:
                perror("VIDIOC_DQBUF");
        }
    }
    
    assert(buf.index < n_buffers);
    
    std::cout << "Bytes used: " << buf.bytesused << std::endl;

    unsigned char *tmpbuffer[(width * height << 1)];
    
    memcpy (tmpbuffer, buffers[buf.index].start, HEADERFRAME1);
    memcpy (tmpbuffer + HEADERFRAME1, dht_data, DHT_SIZE);
    memcpy (tmpbuffer + HEADERFRAME1 + DHT_SIZE, buffers[buf.index].start + HEADERFRAME1, (buf.bytesused - HEADERFRAME1));
    
    std::cout << "Done with memcpy" << std::endl;
    
    process_image(tmpbuffer, buf.bytesused);
    
    if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
        perror("VIDIOC_QBUF");
    
    return 1;

}
Example #8
0
static int read_frame	(void) {
	struct v4l2_buffer buf;
	switch (io) {
		case IO_METHOD_READ:
		case IO_METHOD_USERPTR:
		default:
			fprintf(stderr, "not supported, use mmap!\n");
			break;
		case IO_METHOD_MMAP:
			CLEAR (buf);

			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_MMAP;

			if (-1 == xioctl (fd, VIDIOC_DQBUF, &buf)) {
				switch (errno) {
					case EAGAIN:
						return 0;

					case EIO:
						/* Could ignore EIO, see spec. */

						/* fall through */

					default:
						errno_exit ("VIDIOC_DQBUF");
				}
			}
			//xioctl (fd, S2253_VIDIOC_ECHO_TS, &buf.timestamp);

			assert (buf.index < n_buffers);

			process_image (buffers[buf.index].start,
					buf.bytesused,
					buf.sequence);

			if (-1 == xioctl (fd, VIDIOC_QBUF, &buf))
				errno_exit ("VIDIOC_QBUF");

			break;
	}

	return 1;
}
Example #9
0
int zebra_process_image (zebra_processor_t *proc,
                         zebra_image_t *img)
{
    if(proc_lock(proc) < 0)
        return(-1);
    int rc = 0;
    if(img && proc->window)
        rc = _zebra_window_resize(proc,
                                  zebra_image_get_width(img),
                                  zebra_image_get_height(img));
    if(!rc) {
        zebra_image_scanner_enable_cache(proc->scanner, 0);
        rc = process_image(proc, img);
        if(proc->active)
            zebra_image_scanner_enable_cache(proc->scanner, 1);
    }
    proc_unlock(proc);
    return(rc);
}
Example #10
0
File: car.c Project: engie/spycar
void stream_images( int sock )
{
    char buffer[BUFFER_SIZE];
    int len = 0;
    int found_count = 0;
    int i;
    
    while (1)
    {
        int received = recv (sock, buffer+len, BUFFER_SIZE-len, 0);
        if (received==0)
        {
            exit(0);
        }
        len += received;

        if (len==BUFFER_SIZE)
        {
            perror ("overflow");
            exit (EXIT_FAILURE);
        }

        //TODO: Jump forwards SEPERATOR length at a time, etc
        //like grep does
        for (i=0;i<len-strlen(SEPERATOR);i++)
        {
            if (memcmp(buffer+i, SEPERATOR, strlen(SEPERATOR)) == 0)
            {
                const char* seperator = buffer+i;
                printf ("Found length: %d\n", i);

                //Drop the first blob in the HTTP headers
                if (found_count++ > 0)
                {
                    process_image (buffer, i);
                }

                len = len-i-strlen(SEPERATOR);
                memmove (buffer, seperator+strlen(SEPERATOR), len);
            }
        }
    }
}
Example #11
0
/**
 * @brief 
 *
 * @param fd /dev/video设备文件
 * @param fp 264文件fp
 *
 * @return 
 */
int read_frame(int fd, FILE *fp)
{
    printf("read_frame\n");
    struct v4l2_buffer buf;
    unsigned int i;

    bzero(&buf,sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    //从队列中取缓冲区
    if(-1 == ioctl(fd,VIDIOC_DQBUF,&buf))
    {
        perror("Fail to ioctl 'VIDIOC_DQBUF'");
        exit(EXIT_FAILURE);
    }

    assert(buf.index < n_buffer);
//    memset(user_buf[4].start, 0, 1024);

    YUV422To420(user_buf[buf.index].start, user_buf[4].start, 640, 480);

//    fwrite(user_buf[buf.index].start, user_buf[buf.index].length, 1, fp);
    fwrite(user_buf[4].start, user_buf[buf.index].length, 1, fp);
    //usleep(500);


    //读取进程空间的数据到一个文件中
#if 0
    process_image(user_buf[buf.index].start,user_buf[buf.index].length);
#endif

    if(-1 == ioctl(fd,VIDIOC_QBUF,&buf)) /*Sean Hou: 将采集的视频写入到文件中后又将用户视频buf地址放入缓冲队列,供视频采集的填充 */
    {
        perror("Fail to ioctl 'VIDIOC_QBUF'");
        exit(EXIT_FAILURE);
    }

    return 1;
}
Example #12
0
static int read_frame(void)
{
    struct v4l2_buffer buf;

    CLEAR (buf);
    buf.type 	 = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory    = V4L2_MEMORY_MMAP;
		// outgoing queue から1フレーム分のバッファを取り出す
    if (xioctl(fd, VIDIOC_DQBUF, &buf) == -1) {
       switch (errno) {	
            case EAGAIN:		// outgoing queueが空のとき
               return 0;
            default:
	        errno_exit("VIDIOC_DQBUF");
        }
    }
    assert(buf.index < n_buffers);
    process_image(buffers[buf.index].start); // フレームを処理
    xioctl(fd, VIDIOC_QBUF, &buf);	   // 使用したバッファをincoming queueに入れる
	
    return 1;
}
Example #13
0
int read_frame(int fd)
{

	struct v4l2_buffer buf;
	//	unsigned int i;
	bzero(&buf,sizeof(buf));
	buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	buf.memory = V4L2_MEMORY_MMAP;
	//从队列中取缓冲区
	if(-1 == ioctl(fd,VIDIOC_DQBUF,&buf)){
		perror("Fail to ioctl 'VIDIOC_DQBUF'");
		exit(EXIT_FAILURE);
	}
	assert(buf.index < n_buffer);
	//读取进程空间的数据到一个文件中
	process_image(user_buf[buf.index].start,user_buf[buf.index].length);
	if(-1 == ioctl(fd,VIDIOC_QBUF,&buf)){
		perror("Fail to ioctl 'VIDIOC_QBUF'");
		exit(EXIT_FAILURE);
	}
	return 1;
}
Example #14
0
int read_frame(int fd)
{
    struct v4l2_buffer buf;
    unsigned int i;
    memset(&buf, 0, sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    //put cache from queue
    if(-1 == ioctl(fd, VIDIOC_DQBUF,&buf)){
        perror("Fail to ioctl 'VIDIOC_DQBUF'");
        exit(EXIT_FAILURE);
    }
 
    assert(buf.index < n_buffer);
    //read process space's data to a file
    process_image(usr_buf[buf.index].start, usr_buf[buf.index].length);
    if(-1 == ioctl(fd, VIDIOC_QBUF,&buf)){
        perror("Fail to ioctl 'VIDIOC_QBUF'");
        exit(EXIT_FAILURE);
    }
    return 1;
}
Example #15
0
TiledFont::TiledFont(Image* img, Size tile_size, unsigned int bg_key, int fontsize, bool monosized, char* charset) {
	m_img = img;
	m_tilesize = tile_size;
	m_monosize = monosized;
	m_fontsize = fontsize;
	m_bg_key = bg_key;
	if(charset) {
		int len = strlen(charset);
		// m_char_index.resize(256);
		// std::fill(m_char_index.begin(), m_char_index.end(), -1);
		for(int i=0; i < len; i++) {
			m_char_index[charset[i]] = i;
		}
	} else {
		// m_char_index.resize(256);
		for(int i=0; i < 256; i++) {
			m_char_index[i] = i;
		}
	}
	process_image();
	
	m_fontsize /= 13.0f;
}
Example #16
0
static int read_frame(void) {
    struct v4l2_buffer buf;

    CLEAR(buf);
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;

    if (xioctl(cam_fd, VIDIOC_DQBUF, &buf)) {
        switch (errno) {
        case EAGAIN: return -1;

        case EIO:
        default: return errno_report("VIDIOC_DQBUF");
        }
    }

    process_image(cam_buffers[buf.index].start);

    if (xioctl(cam_fd, VIDIOC_QBUF, &buf) == -1) {
        return errno_report("VIDIOC_QBUF");
    }

    return 0;
}
Example #17
0
// Relative path
static int process_file(char *filename, struct s_category *category)
{
	//printf("Processing filename : %s\n", filename);
	if (is_markdown_file(filename))
		return process_mkd(filename, category);

	if (is_image_file(filename))
		return process_image(filename);
	

	// if (is_document_file(filename)) {
	// 	return process_document(filename);
	// }
	
	// is (is_music_file(filename)) {
	// 	return process_music(filename);
	// }


	// TODO : add_header_footer
	//
	
	return -1;
}
Example #18
0
static int read_frame(void)
{
	struct v4l2_buffer buf;
	int i;

	switch (io) {
	case IO_METHOD_READ:
		i = read(fd, buffers[0].start, buffers[0].length);
		if (i < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;
			case EIO:
				/* Could ignore EIO, see spec. */
				/* fall through */
			default:
				errno_exit("read");
			}
		}
		process_image(buffers[0].start, i);
		break;

	case V4L2_MEMORY_MMAP:
		CLEAR(buf);
		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_MMAP;

		if (xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;

			case EIO:
				/* Could ignore EIO, see spec. */

				/* fall through */

			default:
				errno_exit("VIDIOC_DQBUF");
			}
		}
		assert(buf.index < n_buffers);

		if (display_time) {
			char buf_local[32], buf_frame[32];
			time_t ltime;

			time(&ltime);
			strftime(buf_local, sizeof buf_local,
				"%Y %H:%M:%S", localtime(&ltime));
			strftime(buf_frame, sizeof buf_frame,
				"%Y %H:%M:%S", localtime(&buf.timestamp.tv_sec));
			printf("time - cur: %s - frame: %s\n",
				buf_local, buf_frame);
			
			display_time = 0;
		}

		process_image(buffers[buf.index].start, buf.bytesused);

		if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
			errno_exit("VIDIOC_QBUF");
		if (info) {
			struct timeval new_time;
			int d1, d2;

			gettimeofday(&new_time, 0);
			d1 = new_time.tv_sec - cur_time.tv_sec;
			if (d1 != 0) {
				d2 = new_time.tv_usec - cur_time.tv_usec;
				while (d2 < 0) {
					d2 += 1000000;
					d1--;
				}
#if 1
				printf("FPS: %5.2f\n",
					(float) info / (d1 + 0.000001 * d2));
#else
				printf("FPS: %5.2f (d:%d.%06d)\n",
					(float) info / (d1 + 0.000001 * d2),
					d1, d2);
#endif
				info = 0;
				cur_time = new_time;
			}
			info++;
		}
		break;
	case V4L2_MEMORY_USERPTR:
		CLEAR(buf);

		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		buf.memory = V4L2_MEMORY_USERPTR;

		if (xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
			switch (errno) {
			case EAGAIN:
				return 0;
			case EIO:
				/* Could ignore EIO, see spec. */
				/* fall through */
			default:
				errno_exit("VIDIOC_DQBUF");
			}
		}

		for (i = 0; i < n_buffers; ++i)
			if (buf.m.userptr == (unsigned long) buffers[i].start
			    && buf.length == buffers[i].length)
				break;
		assert(i < n_buffers);

		process_image((unsigned char *) buf.m.userptr,
				buf.bytesused);

		if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
			errno_exit("VIDIOC_QBUF");
		break;
	}
	return 1;
}
Example #19
0
unsigned char *newframe()
{
    get_frame();
    process_image((unsigned char *)buffers[buf.index].start, buf.bytesused, w, h);
    return dst_buf;
}
Example #20
0
// Main function, defines the entry point for the program.
int main( int argc, char** argv )
{
    int scale = 2;
    // Structure for getting video from camera or avi
    CvCapture* capture = 0;
    // Images to capture the frame from video or camera or from file
    IplImage *frame = 0, *frame_copy = 0;
    // Used for calculations
    int optlen = strlen("--cascade=");
    // Input file name for avi or image file.
    const char* input_name;

    // Check for the correct usage of the command line
    if( argc > 1 && strncmp( argv[1], "--cascade=", optlen ) == 0 )
    {
        cascade_name = argv[1] + optlen;
        input_name = argc > 2 ? argv[2] : 0;
    } else if (strncmp(argv[1], "train", 5) == 0) {
	learn_eigenfaces();
	exit(0);
    } else if (strncmp(argv[1], "test", 4) == 0) {
	recognize_eigenfaces();
	exit(0);
    } else {
        fprintf( stderr,
        "Usage: facedetect --cascade=\"<cascade_path>\" [filename|camera_index]\n" );
        return -1;
        /*input_name = argc > 1 ? argv[1] : 0;*/
    }

    // Load the HaarClassifierCascade
    cascade = (CvHaarClassifierCascade*)cvLoad( cascade_name, 0, 0, 0 );
    
    // Check whether the cascade has loaded successfully. Else report and error and quit
    if( !cascade )
    {
        fprintf( stderr, "ERROR: Could not load classifier cascade\n" );
        return -1;
    }
   
    cascade_eyes = (CvHaarClassifierCascade*)cvLoad(cascade_eyes_name, 0, 0, 0 );
    if (!cascade_eyes) {
	fprintf(stderr, "ERROR: failed to load eye classifier cascade\n" );
	return -1;
    }
 
    char *ext = strrchr(input_name, '.');
    // Allocate the memory storage
    storage = cvCreateMemStorage(0);
    // Find whether to detect the object from file or from camera.
    if( !input_name || (isdigit(input_name[0]) && input_name[1] == '\0') ){
        capture = cvCaptureFromCAM( !input_name ? 0 : input_name[0] - '0' );
    } else if (ext && strncmp(ext, ".txt", 4) == 0) {
	capture = NULL;
    } else
        capture = cvCaptureFromAVI( input_name ); 

    // Create a new named window with title: result
    cvNamedWindow( "result", 1 );
    // Find if the capture is loaded successfully or not.

    // If loaded succesfully, then:
    if( capture )
    {
 
        // Capture from the camera.
        for(;;)
        {
            // Capture the frame and load it in IplImage
            if( !cvGrabFrame( capture ))
                break;
            frame = cvRetrieveFrame( capture, 0 );

            // If the frame does not exist, quit the loop
            if( !frame )
                break;

            if (!frame_copy) {
             	   printf("Allocate image\n");
		   frame_copy = cvCreateImage(cvSize(frame->width/2,frame->height/2),
                                   8, 3);
	    }
            cvResize(frame, frame_copy, CV_INTER_LINEAR);
 	    //cvCopy(frame, frame_copy,0);

            // Call the function to detect and draw the face
            //detect_and_draw( frame_copy );
	    process_image(frame_copy);
	    //cvShowImage("result", frame_copy);
            // Wait for a while before proceeding to the next frame
            cvWaitKey(1);
	    //if( cvWaitKey( 10 ) >= 0 )
            //    break;
        }

        // Release the images, and capture memory
        cvReleaseImage( &frame_copy );
	//cvReleaseImage( &frame_resized );
        cvReleaseCapture( &capture );
    }

    // If the capture is not loaded succesfully, then:
    else
    {
	still = 1;
        // Assume the image to be lena.jpg, or the input_name specified
        const char* filename = input_name ? input_name : (char*)"lena.jpg";

	IplImage* image = NULL;
	printf("%s\n", filename);
	if (strncmp(strrchr(filename, '.')+1, "txt", 3) != 0) {
        // Load the image from that filename
            image = cvLoadImage( filename, 1 );

        // If Image is loaded succesfully, then:
        //if( image )
        //{
            // Detect and draw the face
            //detect_and_draw( image );
	    process_image(image);
            // Wait for user input
            cvWaitKey(0);

            // Release the image memory
            cvReleaseImage( &image );
        }
        else
        {
	    printf("Not an image\n");
            /* assume it is a text file containing the
               list of the image filenames to be processed - one per line */
            FILE* f = fopen( filename, "rt" );
            if( f )
            {
                char buf[1000+1];

                // Get the line from the file
                while( fgets( buf, 1000, f ) )
                {

                    // Remove the spaces if any, and clean up the name
                    int len = (int)strlen(buf);
                    while( len > 0 && isspace(buf[len-1]) )
                        len--;
                    buf[len] = '\0';

                    // Load the image from the filename present in the buffer
                    image = cvLoadImage( buf, 1 );

                    // If the image was loaded succesfully, then:
                    if( image )
                    {
                        // Detect and draw the face from the image
                        //detect_and_draw( image );
                        process_image(image);
                        // Wait for the user input, and release the memory
                        cvWaitKey(0);
                        cvReleaseImage( &image );
                    }
                }
                // Close the file
                fclose(f);
            }
        }

    }
    
    // Destroy the window previously created with filename: "result"
    cvDestroyWindow("result");

    // return 0 to indicate successfull execution of the program
    return 0;
}
Example #21
0
void *
_GetCelData (uio_Stream *fp, DWORD length)
{
	int cel_total, cel_index, n;
	DWORD opos;
	char CurrentLine[1024], filename[PATH_MAX];
	TFB_Canvas *img;
	AniData *ani;
	DRAWABLE Drawable;
	uio_MountHandle *aniMount = 0;
	uio_DirHandle *aniDir = 0;
	uio_Stream *aniFile = 0;
	
	opos = uio_ftell (fp);

	{
		char *s1, *s2;
		char aniDirName[PATH_MAX];			
		const char *aniFileName;
		uint8 buf[4] = { 0, 0, 0, 0 };
		uint32 header;

		if (_cur_resfile_name == 0
				|| (((s2 = 0), (s1 = strrchr (_cur_resfile_name, '/')) == 0)
						&& (s2 = strrchr (_cur_resfile_name, '\\')) == 0))
		{
			n = 0;
		}
		else
		{
			if (s2 > s1)
				s1 = s2;
			n = s1 - _cur_resfile_name + 1;
		}

		uio_fread(buf, 4, 1, fp);
		header = buf[0] | (buf[1] << 8) | (buf[2] << 16) | (buf[3] << 24);
 		if (_cur_resfile_name && header == 0x04034b50)
		{
			// zipped ani file
			if (n)
			{
				strncpy (aniDirName, _cur_resfile_name, n - 1);
				aniDirName[n - 1] = 0;
				aniFileName = _cur_resfile_name + n;
			}
			else
			{
				strcpy(aniDirName, ".");
				aniFileName = _cur_resfile_name;
			}
			aniDir = uio_openDir (repository, aniDirName, 0);
			aniMount = uio_mountDir (repository, aniDirName, uio_FSTYPE_ZIP,
							aniDir, aniFileName, "/", autoMount,
							uio_MOUNT_RDONLY | uio_MOUNT_TOP,
							NULL);
			aniFile = uio_fopen (aniDir, aniFileName, "r");
			opos = 0;
			n = 0;
		}
		else
		{
			// unpacked ani file
			strncpy (filename, _cur_resfile_name, n);
			aniFile = fp;
			aniDir = contentDir;
		}
	}

	cel_total = 0;
	uio_fseek (aniFile, opos, SEEK_SET);
	while (uio_fgets (CurrentLine, sizeof (CurrentLine), aniFile))
	{
		++cel_total;
	}

	img = HMalloc (sizeof (TFB_Canvas) * cel_total);
	ani = HMalloc (sizeof (AniData) * cel_total);
	if (!img || !ani)
	{
		log_add (log_Warning, "Couldn't allocate space for '%s'", _cur_resfile_name);
		if (aniMount)
		{
			uio_fclose(aniFile);
			uio_closeDir(aniDir);
			uio_unmountDir(aniMount);
		}
		HFree (img);
		HFree (ani);
		return NULL;
	}

	cel_index = 0;
	uio_fseek (aniFile, opos, SEEK_SET);
	while (uio_fgets (CurrentLine, sizeof (CurrentLine), aniFile) && cel_index < cel_total)
	{
		sscanf (CurrentLine, "%s %d %d %d %d", &filename[n], 
			&ani[cel_index].transparent_color, &ani[cel_index].colormap_index, 
			&ani[cel_index].hotspot_x, &ani[cel_index].hotspot_y);
	
		img[cel_index] = TFB_DrawCanvas_LoadFromFile (aniDir, filename);
		if (img[cel_index] == NULL)
		{
			const char *err;

			err = TFB_DrawCanvas_GetError ();
			log_add (log_Warning, "_GetCelData: Unable to load image!");
			if (err != NULL)
				log_add (log_Warning, "Gfx Driver reports: %s", err);
		}
		else
		{
			++cel_index;
		}

		if ((int)uio_ftell (aniFile) - (int)opos >= (int)length)
			break;
	}

	Drawable = NULL;
	if (cel_index && (Drawable = AllocDrawable (cel_index)))
	{
		if (!Drawable)
		{
			while (cel_index--)
				TFB_DrawCanvas_Delete (img[cel_index]);

			HFree (Drawable);
			Drawable = NULL;
		}
		else
		{
			FRAME FramePtr;

			Drawable->Flags = WANT_PIXMAP;
			Drawable->MaxIndex = cel_index - 1;

			FramePtr = &Drawable->Frame[cel_index];
			while (--FramePtr, cel_index--)
				process_image (FramePtr, img, ani, cel_index);
		}
	}

	if (Drawable == NULL)
		log_add (log_Warning, "Couldn't get cel data for '%s'",
				_cur_resfile_name);

	if (aniMount)
	{
		uio_fclose(aniFile);
		uio_closeDir(aniDir);
		uio_unmountDir(aniMount);
	}

	HFree (img);
	HFree (ani);
	return Drawable;
}
Example #22
0
int main(int argc, char *argv[])
{
#ifndef EMBEDED_X210  //PC platform
    const SDL_VideoInfo *info;
    char driver[128];
    SDL_Surface *pscreen;
    SDL_Overlay *overlay;
    SDL_Rect drect;
    SDL_Event sdlevent;
    SDL_Thread *mythread;
    SDL_mutex *affmutex;
    Uint32 currtime;
    Uint32 lasttime;
#endif
    int status;

    unsigned char *p = NULL;
    int hwaccel = 0;
    const char *videodevice = NULL;
    const char *mode = NULL;
    int format = V4L2_PIX_FMT_MJPEG;
    int i;
    int grabmethod = 1;
    int width = 320;
    int height = 240;
    int fps = 15;
    unsigned char frmrate = 0;
    char *avifilename = NULL;
    int queryformats = 0;
    int querycontrols = 0;
    int readconfigfile = 0;
    char *separateur;
    char *sizestring = NULL;
    char *fpsstring  = NULL;
    int enableRawStreamCapture = 0;
    int enableRawFrameCapture = 0;
    char * pRGBData=NULL;



    printf("luvcview version %s \n", version);
    for (i = 1; i < argc; i++)
    {
        /* skip bad arguments */
        if (argv[i] == NULL || *argv[i] == 0 || *argv[i] != '-') {
            continue;
        }
        if (strcmp(argv[i], "-d") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -d, aborting.\n");
                exit(1);
            }
            videodevice = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-g") == 0) {
            /* Ask for read instead default  mmap */
            grabmethod = 0;
        }
        if (strcmp(argv[i], "-w") == 0) {
            /* disable hw acceleration */
            hwaccel = 1;
        }
        if (strcmp(argv[i], "-f") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -f, aborting.\n");
                exit(1);
            }
            mode = strdup(argv[i + 1]);

            if (strncmp(mode, "yuv", 3) == 0) {
                format = V4L2_PIX_FMT_YUYV;

            } else if (strncmp(mode, "jpg", 3) == 0) {
                format = V4L2_PIX_FMT_MJPEG;

            } else {
                format = V4L2_PIX_FMT_MJPEG;

            }
        }
        if (strcmp(argv[i], "-s") == 0) {
            if (i + 1 >= argc) {
                printf("No parameter specified with -s, aborting.\n");
                exit(1);
            }

            sizestring = strdup(argv[i + 1]);

            width = strtoul(sizestring, &separateur, 10);
            if (*separateur != 'x') {
                printf("Error in size use -s widthxheight \n");
                exit(1);
            } else {
                ++separateur;
                height = strtoul(separateur, &separateur, 10);
                if (*separateur != 0)
                    printf("hmm.. dont like that!! trying this height \n");
                printf(" size width: %d height: %d \n", width, height);
            }
        }
        if (strcmp(argv[i], "-i") == 0){
            if (i + 1 >= argc) {
                printf("No parameter specified with -i, aborting. \n");
                exit(1);
            }
            fpsstring = strdup(argv[i + 1]);
            fps = strtoul(fpsstring, &separateur, 10);
            printf(" interval: %d fps \n", fps);
        }
        if (strcmp(argv[i], "-S") == 0) {
            /* Enable raw stream capture from the start */
            enableRawStreamCapture = 1;
        }
        if (strcmp(argv[i], "-c") == 0) {
            /* Enable raw frame capture for the first frame */
            enableRawFrameCapture = 1;
        }
        if (strcmp(argv[i], "-C") == 0) {
            /* Enable raw frame stream capture from the start*/
            enableRawFrameCapture = 2;
        }
        if (strcmp(argv[i], "-o") == 0) {
            /* set the avi filename */
            if (i + 1 >= argc) {
                printf("No parameter specified with -o, aborting.\n");
                exit(1);
            }
            avifilename = strdup(argv[i + 1]);
        }
        if (strcmp(argv[i], "-L") == 0) {
            /* query list of valid video formats */
            queryformats = 1;
        }
        if (strcmp(argv[i], "-l") == 0) {
            /* query list of valid video formats */
            querycontrols = 1;
        }

        if (strcmp(argv[i], "-r") == 0) {
            /* query list of valid video formats */
            readconfigfile = 1;
        }
        if (strcmp(argv[i], "-h") == 0) {
            printf("usage: uvcview [-h -d -g -f -s -i -c -o -C -S -L -l -r] \n");
            printf("-h	print this message \n");
            printf("-d	/dev/videoX       use videoX device\n");
            printf("-g	use read method for grab instead mmap \n");
            printf("-w	disable SDL hardware accel. \n");
            printf("-f	video format  default jpg  others options are yuv jpg \n");
            printf("-i	fps           use specified frame interval \n");
            printf("-s	widthxheight      use specified input size \n");
            printf("-c	enable raw frame capturing for the first frame\n");
            printf("-C	enable raw frame stream capturing from the start\n");
            printf("-S	enable raw stream capturing from the start\n");
            printf("-o	avifile  create avifile, default video.avi\n");
            printf("-L	query valid video formats\n");
            printf("-l	query valid controls and settings\n");
            printf("-r	read and set control settings from luvcview.cfg\n");
            exit(0);
        }
    }

#ifndef   EMBEDED_X210 //PC platform

    /************* Test SDL capabilities ************/
    if (SDL_Init(SDL_INIT_VIDEO) < 0) {
        fprintf(stderr, "Couldn't initialize SDL: %s\n", SDL_GetError());
        exit(1);
    }
    
    /* For this version, we'll be save and disable hardware acceleration */
    if(hwaccel)
    {
        if ( ! getenv("SDL_VIDEO_YUV_HWACCEL") )
        {
            putenv("SDL_VIDEO_YUV_HWACCEL=0");
        }
    }

    if (SDL_VideoDriverName(driver, sizeof(driver)))
    {
        printf("Video driver: %s\n", driver);
    }
    info = SDL_GetVideoInfo();

    if (info->wm_available) {
        printf("A window manager is available\n");
    }
    if (info->hw_available) {
        printf("Hardware surfaces are available (%dK video memory)\n",
               info->video_mem);
        SDL_VIDEO_Flags |= SDL_HWSURFACE;
    }
    if (info->blit_hw) {
        printf("Copy blits between hardware surfaces are accelerated\n");
        SDL_VIDEO_Flags |= SDL_ASYNCBLIT;
    }
    if (info->blit_hw_CC) {
        printf
                ("Colorkey blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_hw_A) {
        printf("Alpha blits between hardware surfaces are accelerated\n");
    }
    if (info->blit_sw) {
        printf
                ("Copy blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_CC) {
        printf
                ("Colorkey blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_sw_A) {
        printf
                ("Alpha blits from software surfaces to hardware surfaces are accelerated\n");
    }
    if (info->blit_fill) {
        printf("Color fills on hardware surfaces are accelerated\n");
    }



    if (!(SDL_VIDEO_Flags & SDL_HWSURFACE))
        SDL_VIDEO_Flags |= SDL_SWSURFACE;

#endif

    if (videodevice == NULL || *videodevice == 0) {
        videodevice = "/dev/video0";
    }

    if (avifilename == NULL || *avifilename == 0) {
        avifilename = "video.avi";
    }

    videoIn = (struct vdIn *) calloc(1, sizeof(struct vdIn));
    if ( queryformats ) {
        /* if we're supposed to list the video formats, do that now and go out */
        check_videoIn(videoIn,(char *) videodevice);
        free(videoIn);
#ifndef EMBEDED_X210
        SDL_Quit();
#endif
        exit(1);
    }

    if (init_videoIn(videoIn, (char *) videodevice, width, height, fps, format, grabmethod, avifilename) < 0)
        exit(1);
    /* if we're supposed to list the controls, do that now */
    if ( querycontrols )
        enum_controls(videoIn->fd);
    
    /* if we're supposed to read the control settings from a configfile, do that now */
    if ( readconfigfile )
        load_controls(videoIn->fd);


#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    init_framebuffer();
#else
    x6410_init_Draw(videoIn->width,videoIn->height);
#endif

#else
    pscreen = SDL_SetVideoMode(videoIn->width, videoIn->height+30 , 0,SDL_VIDEO_Flags);
    overlay =SDL_CreateYUVOverlay(videoIn->width, videoIn->height+30 , SDL_YUY2_OVERLAY, pscreen);
    p = (unsigned char *) overlay->pixels[0];

    drect.x = 0;
    drect.y = 0;
    drect.w =pscreen->w;
    drect.h = pscreen->h;

#endif

    if (enableRawStreamCapture)
    {
        videoIn->captureFile = fopen("stream.raw", "wb");
        if(videoIn->captureFile == NULL) {
            perror("Unable to open file for raw stream capturing");
        } else {
            printf("Starting raw stream capturing to stream.raw ...\n");
        }
    }
    if (enableRawFrameCapture)
        videoIn->rawFrameCapture = enableRawFrameCapture;

    initLut();

#ifndef EMBEDED_X210
    SDL_WM_SetCaption(title_act[A_VIDEO].title, NULL);
    lasttime = SDL_GetTicks();
    creatButt(videoIn->width, 32);
    SDL_LockYUVOverlay(overlay);
    memcpy(p + (videoIn->width * (videoIn->height) * 2), YUYVbutt,
           videoIn->width * 64);
    SDL_UnlockYUVOverlay(overlay);

    /* initialize thread data */
    ptdata.ptscreen = &pscreen;
    ptdata.ptvideoIn = videoIn;
    ptdata.ptsdlevent = &sdlevent;
    ptdata.drect = &drect;
    affmutex = SDL_CreateMutex();
    ptdata.affmutex = affmutex;
    mythread = SDL_CreateThread(eventThread, (void *) &ptdata);
#endif



    pRGBData = (unsigned char *)malloc(videoIn->width*videoIn->width*4*sizeof(char));
    if(pRGBData==NULL)
    {
        return ;
	}
    /* main big loop */
    while (videoIn->signalquit)
    {
#ifndef EMBEDED_X210
        currtime = SDL_GetTicks();
        if (currtime - lasttime > 0) {
            frmrate = 1000/(currtime - lasttime);
        }
        lasttime = currtime;
#endif
        if (uvcGrab(videoIn) < 0) {
            printf("Error grabbing \n");
            break;
        }

        /* if we're grabbing video, show the frame rate */
        if (videoIn->toggleAvi)
            printf("\rframe rate: %d     ",frmrate);

#ifndef EMBEDED_X210
        SDL_LockYUVOverlay(overlay);
        memcpy(p, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);
        SDL_UnlockYUVOverlay(overlay);
        SDL_DisplayYUVOverlay(overlay, &drect);
#endif

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
        // yuv to rgb565 ,and to frambuffer
        process_image(videoIn->framebuffer,fbp,videoIn->width,videoIn->height,vinfo,finfo);
    
    //    convertYUYVtoRGB565(videoIn->framebuffer,pRGBData,videoIn->width,videoIn->height);

   //   Pyuv422torgb24(videoIn->framebuffer, pRGBData, videoIn->width, videoIn->height);
    //    memcpy(fbp,pRGBData,videoIn->width*videoIn->height*2);
     
     

#else   //X6410 post processor convert yuv to rgb,X210 not suport now.

        /*
        memcpy(pInbuffer, videoIn->framebuffer, videoIn->width * (videoIn->height) * 2);

        ioctl(dev_fb0, GET_FB_INFO, &fb_info);

        pp_param.SrcFrmSt = ioctl(dev_pp, S3C_PP_GET_RESERVED_MEM_ADDR_PHY); //must be  physical adress
        pp_param.DstFrmSt = fb_info.map_dma_f1; //must be physical adress

        ioctl(dev_pp, S3C_PP_SET_PARAMS, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_DST_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_SET_SRC_BUF_ADDR_PHY, &pp_param);
        ioctl(dev_pp, S3C_PP_START);
        */
#endif
#endif
        if (videoIn->getPict)
        {
            switch(videoIn->formatIn){
            case V4L2_PIX_FMT_MJPEG:
                get_picture(videoIn->tmpbuffer,videoIn->buf.bytesused);
                break;
            case V4L2_PIX_FMT_YUYV:
                get_pictureYV2(videoIn->framebuffer,videoIn->width,videoIn->height);
                break;
            default:
                break;
            }
            videoIn->getPict = 0;
            printf("get picture !\n");
        }

#ifndef EMBEDED_X210
        SDL_LockMutex(affmutex);
        ptdata.frmrate = frmrate;
        SDL_WM_SetCaption(videoIn->status, NULL);
        SDL_UnlockMutex(affmutex);
#endif

#ifdef  EMBEDED_X210
        usleep(10);
#else
        SDL_Delay(10);
#endif


    }
#ifndef EMBEDED_X210
    SDL_WaitThread(mythread, &status);
    SDL_DestroyMutex(affmutex);
#endif
    /* if avifile is defined, we made a video: compute the exact fps and
       set it in the video */
    if (videoIn->avifile != NULL) {
        float fps=(videoIn->framecount/(videoIn->recordtime/1000));
        fprintf(stderr,"setting fps to %f\n",fps);
        AVI_set_video(videoIn->avifile, videoIn->width, videoIn->height,
                      fps, "MJPG");
        AVI_close(videoIn->avifile);
    }

    close_v4l2(videoIn);

#ifdef EMBEDED_X210
#ifdef SOFT_COLOR_CONVERT
    close_frambuffer();
#else
    x6410_DeInit_Draw();
#endif

#endif
    free(pRGBData);
    free(videoIn);
    destroyButt();
    freeLut();
    printf(" Clean Up done Quit \n");
#ifndef EMBEDED_X210
    SDL_Quit();
#endif
}
Example #23
0
int main(int argc, char **argv)
{
	DIR *imgdir = opendir("img/");
	DIR *editdir = opendir("img/edit");
	struct dirent *file;
	std::string outfile;
	if (imgdir == NULL)
	{
		std::cerr << "Can't open ./img." << std::endl;
		exit(1);
	}
	if (editdir == NULL)
	{
		std::cerr << "Can't open ./img/edit." << std::endl;
		exit(1);
	}
	closedir(editdir);
	if (argc < 2)
	{
		std::cerr << "Usage: " << argv[0] << " [brightness] <brightness> [gamma] <gamma>" << std::endl;
		exit(1);
	}
	double brightness = 1.0;
	double gamma = 1.0;
	unsigned int threads = 1;
	for (int i = 1; i < argc; i++)
	{
		if (strcmp("brightness", argv[i]) == 0)
		{
			if (argc > i + 1)
			{
				brightness = strtod(argv[i + 1], NULL);
				assert(brightness > 0);
			}
		}
		if (strcmp("gamma", argv[i]) == 0)
		{
			gamma = strtod(argv[i + 1], NULL);
			assert(gamma > 0);
		}
		if (strcmp("threads", argv[i]) == 0)
		{
			threads = strtoul(argv[i + 1], NULL, 0);
			assert(threads > 0);
		}
	}
	FreeImage_Initialise();
	std::string temp;
	if (threads == 1)
	{
		while ((file = readdir(imgdir)) != NULL)
		{
			if (ispng(file -> d_name))
			{
				gen_edit_filename(file -> d_name, temp);
				if (!ispng(temp))
				{
					std::cout << "Processing " << file -> d_name << std::endl;
					gen_edit_filename(file -> d_name, outfile);
					if (process_image(file -> d_name, outfile.c_str(), brightness, gamma))
					{
						std::cout << "Saved " << outfile << std::endl;
					}
				}
			}
		}
	}
	else
	{
		std::queue<std::string> input;
		std::mutex lock;
		while ((file = readdir(imgdir)) != NULL)
		{
			if (ispng(file -> d_name))
			{
				input.push(file -> d_name);
			}
		}
		process_threaded(&input, &lock, brightness, gamma, threads);
	}
	closedir(imgdir);
	FreeImage_DeInitialise();
	return 0;
}
Example #24
0
void usb_cam_camera_grab_mjpeg(std::vector<unsigned char>* image)
{
  fd_set fds;
  struct timeval tv;
  int r;

  FD_ZERO (&fds);
  FD_SET (fd, &fds);

  /* Timeout. */
  tv.tv_sec = 5;
  tv.tv_usec = 0;

  r = select(fd+1, &fds, NULL, NULL, &tv);

  if (-1==r) {
    if (EINTR==errno)
      return;

    errno_exit("select");
  }

  if (0==r) {
    ROS_ERROR("select timeout\n");
    exit(EXIT_FAILURE);
  }

  struct v4l2_buffer buf;
  unsigned int i;
  int len;

  switch (io) {
  case IO_METHOD_READ:
    len = read(fd, buffers[0].start, buffers[0].length);
    if (len==-1) {
      switch (errno) {
      case EAGAIN:
        return;

      case EIO:
        /* Could ignore EIO, see spec. */

        /* fall through */

      default:
        errno_exit("read");
      }
    }

    process_image(buffers[0].start, len, image);

    break;

  case IO_METHOD_MMAP:
    CLEAR (buf);

    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;

    if (-1==xioctl(fd, VIDIOC_DQBUF, &buf)) {
      switch (errno) {
      case EAGAIN:
        return;

      case EIO:
        /* Could ignore EIO, see spec. */

        /* fall through */

      default:
        errno_exit("VIDIOC_DQBUF");
      }
    }

    assert (buf.index < n_buffers);
    len = buf.bytesused;
    process_image(buffers[buf.index].start, len, image);

    if (-1==xioctl(fd, VIDIOC_QBUF, &buf))
      errno_exit("VIDIOC_QBUF");

    break;

  case IO_METHOD_USERPTR:
    CLEAR (buf);

    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_USERPTR;

    if (-1==xioctl(fd, VIDIOC_DQBUF, &buf)) {
      switch (errno) {
      case EAGAIN:
        return;

      case EIO:
        /* Could ignore EIO, see spec. */

        /* fall through */

      default:
        errno_exit("VIDIOC_DQBUF");
      }
    }

    for(i = 0; i<n_buffers; ++i)
      if (buf.m.userptr==(unsigned long) buffers[i].start&&buf.length==buffers[i].length)
        break;

    assert (i < n_buffers);
    len = buf.bytesused;
    process_image((void *) buf.m.userptr, len, image);

    if (-1==xioctl(fd, VIDIOC_QBUF, &buf))
      errno_exit("VIDIOC_QBUF");

    break;
  }
}
Example #25
0
static int
read_frame(struct device* dev)
{
  struct v4l2_buffer buf;

  switch (dev->io) {
  case IO_METHOD_READ:
    if (-1 == read(dev->fd, dev->buffers[0].start, dev->buffers[0].length)) {
      switch (errno) {
      case EAGAIN:
        return EAGAIN;

      case EIO:
        /* Could ignore EIO, see spec. */

        /* fall through */

      default:
        errno_exit("read");
      }
    }

    //process_image(dev, 0);

    return 0;
    break;

  case IO_METHOD_MMAP:
    CLEAR(buf);

    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;

    if (-1 == xioctl(dev->fd, VIDIOC_DQBUF, &buf)) {
      switch (errno) {
      case EAGAIN:
        return EAGAIN;

      case EIO:
        /* Could ignore EIO, see spec. */

        /* fall through */

      default:
        errno_exit("VIDIOC_DQBUF");
      }
    }

    // time of DQ
    //printf("timestamp %s buf: %d %ld.%03ld\n", dev->dev_name, buf.index, buf.timestamp.tv_sec, buf.timestamp.tv_usec/1000);
    dev->buffers[buf.index].timestamp = buf.timestamp;
    assert(buf.index < dev->n_buffers);

    process_image(dev, buf.index);

    return buf.index;
    break;
  }
  exit(EXIT_FAILURE);
  return -1;
}
Example #26
0
int main(int argc, char **argv)
{
    // Configure left page.
    std::map<int, CvPoint2D32f> left_dst_markers;
    left_dst_markers[0] = cvPoint2D32f(0.00, 0.00);
    left_dst_markers[1] = cvPoint2D32f(6.00, 0.00);
    left_dst_markers[2] = cvPoint2D32f(6.00, 9.50);
    left_dst_markers[3] = cvPoint2D32f(0.00, 9.50);
    LayoutInfo left_layout;
    left_layout.page_left = 0.50;
    left_layout.page_top = 0.25;
    left_layout.page_right = 6.30;
    left_layout.page_bottom = 9.20;
    left_layout.dpi = 600.0;

    // Configure right page.
    std::map<int, CvPoint2D32f> right_dst_markers;
    right_dst_markers[4] = cvPoint2D32f(0.00, 0.00);
    right_dst_markers[5] = cvPoint2D32f(6.00, 0.00);
    right_dst_markers[6] = cvPoint2D32f(6.00, 9.50);
    right_dst_markers[7] = cvPoint2D32f(0.00, 9.50);
    LayoutInfo right_layout;
    right_layout.page_left = -0.30;
    right_layout.page_top = 0.25;
    right_layout.page_right = 5.50;
    right_layout.page_bottom = 9.20;
    right_layout.dpi = 600.0;

    // Process if an input image is supplied; otherwise, open a webcam for
    // debugging.
    if (argc > 3) {
        IplImage *src_img = cvLoadImage(argv[1]);
        if (src_img == NULL) {
            std::cerr << "Failed to load the source image specified.\n";
            return 1;
        }

        BookImage book_img(src_img);

        IplImage *left_img
                = book_img.create_page_image(left_dst_markers, left_layout);
        if (left_img != NULL) {
            cvSaveImage(argv[2], left_img);
            cvReleaseImage(&left_img);
        }

        IplImage *right_img
                = book_img.create_page_image(right_dst_markers, right_layout);
        if (right_img != NULL) {
            cvSaveImage(argv[3], right_img);
            cvReleaseImage(&right_img);
        }

        cvReleaseImage(&src_img);
    } else {
        // Create windows.
        cvNamedWindow("Source", 0);
        cvResizeWindow("Source", 480, 640);

        left_layout.dpi = 100;
        right_layout.dpi = 100;

        // Open webcam.
        CvCapture* capture = cvCreateCameraCapture(0);
        if (!capture) {
            std::cerr << "Failed to load the camera device.\n";
            return 1;
        }
        const double scale = 1.0;
        cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH, 1600 * scale);
        cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT, 1200 * scale);

        while (cvWaitKey(10) < 0) {
            IplImage *src_img = cvQueryFrame(capture);
            cvShowImage("Source", src_img);
            process_image(src_img,
                    left_dst_markers, left_layout,
                    right_dst_markers, right_layout);
        }
    }

    return 0;
}
Example #27
0
/*
 *  ======== main ========
 */
int main(int argc, char * argv[])
{
    int status = 0;
    struct stat st;
    u32 size = 0;
    int i, j, o;
    char *elf_files[] = {NULL, NULL, NULL};
    int num_files = sizeof(elf_files) / sizeof(elf_files[0]);
    char *tokenstr;

    printf("###############################################################\n");
    printf("                     GENCMBELF : %s    \n", VERSION);
    printf("###############################################################\n");

    /* process arguments */
    while ((o = getopt (argc, argv, ":s:a:o:")) != -1) {
        switch (o) {
            case 's':
                elf_files[0] = optarg;
                break;
            case 'a':
                elf_files[1] = optarg;
                break;
            case 'o':
                elf_files[2] = optarg;
                break;
            case ':':
                status = -1;
                printf("Option -%c requires an operand\n", optopt);
                break;
            case '?':
                status = -1;
                printf("Unrecognized option: -%c\n", optopt);
                break;
        }
    }

    for (i = 0, j = optind; j < argc; j++) {
        while (i < num_files && elf_files[i]) {
            i++;
        }
        if (strstr(argv[j], ".xem3")) {
            if (i == num_files) {
                print_help_and_exit();
            }
            elf_files[i++] = argv[j];
        }
        else {
            if (num_tags == MAX_TAGS) {
                print_help_and_exit();
            }
            tag_name[num_tags] = strtok(argv[j], ":");
            tokenstr = strtok(NULL, ":");
            if (!tokenstr) {
                print_help_and_exit();
            }
            tag_addr[num_tags] = strtoll(tokenstr, NULL, 16);
            tokenstr = strtok(NULL, ":");
            if (!tokenstr) {
                print_help_and_exit();
            }
            tag_size[num_tags] = strtoll(tokenstr, NULL, 16);

            DEBUG_PRINT("found tag %d: name '%s' addr 0x%x size %d\n", num_tags,
                    tag_name[num_tags], tag_addr[num_tags], tag_size[num_tags]);
            num_tags++;
        }
    }

    if (status || !elf_files[0] || !elf_files[1] || !elf_files[2]) {
        print_help_and_exit();
    }

    if ((!strcmp(elf_files[0], elf_files[1])) ||
        (!strcmp(elf_files[0], elf_files[2])) ||
        (!strcmp(elf_files[1], elf_files[2]))) {
        print_help_and_exit();
    }

    DEBUG_PRINT("\nCore0 File: %s, Core1 File: %s, Output File: %s\n",
                                    elf_files[0], elf_files[1], elf_files[2]);
    status = prepare_file(elf_files[0], INPUT_FILE, &core0_info);
    if (status) {
        printf("\nError preparing file: %s\n", elf_files[0]);
        goto finish0;
    }

    status = prepare_file(elf_files[1], INPUT_FILE, &core1_info);
    if (status) {
        printf("\nError preparing file: %s\n", elf_files[1]);
        goto finish1;
    }

    status = prepare_file(elf_files[2], OUTPUT_FILE, &cores_info);
    if (status) {
        printf("\nError preparing file: %s\n", elf_files[2]);
        goto done;
    }

    status = process_image();
    if (status) {
        printf("\nError generating output file: %s\n", elf_files[2]);
        goto done;
    }
    rewind(cores_info.fp);
    fstat(fileno(cores_info.fp), &st);
    size = st.st_size;

done:
    fclose(cores_info.fp);
    if (cores_info.data) {
        free(cores_info.data);
    }
    cores_info.fp = NULL;
    cores_info.data = NULL;
finish1:
    printf("\nFinalizing input ELF file: %s of size: %d\n", elf_files[1],
                core1_info.size);
    finalize_file(&core1_info, status);
    status = 0;
finish0:
    printf("Finalizing input ELF file: %s of size: %d\n", elf_files[0],
                core0_info.size);
    finalize_file(&core0_info, status);

    if (size) {
        printf("\nProcessed Output ELF file: %s of size: %d\n\n", elf_files[2],
                        size);
    }

    return status;
}
void ManualOperation::work()
{
#ifndef DEBUG_FRAME_BY_FRAME
  // Turn off display by default
  if (image_input->can_display()) {
    mvWindow::setShowImage(show_raw_images);
  }
#else
  // Show first image
  process_image();
#endif

  display_start_message();

  // Take keyboard commands
  bool loop = true;
  while (loop) {
    char c = CharacterStreamSingleton::get_instance().wait_key(WAIT_KEY_IN_MS);

    // Print yaw and depth unless delayed by another message
    if (c == '\0') {
      if (count < 0) {
        count++;
      } else if (mode != VISION) {
        static int attitude_counter = 0;
        attitude_counter++;
        if (attitude_counter == 1000 / WAIT_KEY_IN_MS / REFRESH_RATE_IN_HZ) {
          attitude_counter = 0;
          char buf[128];
          sprintf(buf, "Yaw: %+04d degrees, Depth: %+04d cm, Target Yaw: %+04d degrees, Target Depth: %+04d",
            attitude_input->yaw(), attitude_input->depth(), attitude_input->target_yaw(), attitude_input->target_depth());
          message(buf);
        }
      }
    }

    switch(c) {
      case 'q':
         loop = false;
         break;
      case 'z':
         dump_images();
         break;
      case 'y':
         if (image_input->can_display()) {
           show_raw_images = !show_raw_images;
           mvWindow::setShowImage(show_raw_images);
         } else {
           message_hold("Image stream should already be displayed");
         }
         break;
      case 'u':
         use_fwd_img = !use_fwd_img;
         break;
      case 'i':
         actuator_output->special_cmd(SIM_MOVE_FWD);
         break;
      case 'k':
         actuator_output->special_cmd(SIM_MOVE_REV);
         break;
      case 'j':
         actuator_output->special_cmd(SIM_MOVE_LEFT);
         break;
      case 'l':
         actuator_output->special_cmd(SIM_MOVE_RIGHT);
         break;
      case 'p':
         actuator_output->special_cmd(SIM_MOVE_RISE);
         break;
      case ';':
         actuator_output->special_cmd(SIM_MOVE_SINK);
         break;
      case 'e':
         actuator_output->stop();
         break;
      case 'w':
         actuator_output->set_attitude_change(FORWARD, SPEED_CHG);
         break;
      case 's':
         actuator_output->set_attitude_change(REVERSE, SPEED_CHG);
         break;
      case 'a':
         actuator_output->set_attitude_change(LEFT, YAW_CHG_IN_DEG);
         break;
      case 'd':
         actuator_output->set_attitude_change(RIGHT, YAW_CHG_IN_DEG);
         break;
      case 'r':
         actuator_output->set_attitude_change(RISE, DEPTH_CHG_IN_CM);
         break;
      case 'f':
         actuator_output->set_attitude_change(SINK, DEPTH_CHG_IN_CM);
         break;
      case ' ':
         actuator_output->special_cmd(SIM_ACCEL_ZERO);
#ifdef DEBUG_FRAME_BY_FRAME
         process_image();
#endif
         break;
      case '^':
         actuator_output->special_cmd(SUB_POWER_ON);
         break;
      case '%':
         actuator_output->stop();
         actuator_output->special_cmd(SUB_STARTUP_SEQUENCE);
         break;
      case '$':
         actuator_output->special_cmd(SUB_POWER_OFF);
         break;
      case '#':
         long_input();
         break;
      case 'm':
         endwin();
         // Scope mission so that it is destructed before display_start_message
         {
           Mission m(attitude_input, image_input, actuator_output);
           m.work_internal(true);
         }
         display_start_message();
         message_hold("Mission complete!");
         break;
      case 'M': // same as mission, but force turn off show_image
         endwin();
         // Scope mission so that it is destructed before display_start_message
         {
           Mission m(attitude_input, image_input, actuator_output);
           m.work();
         }
         display_start_message();
         message_hold("Mission complete!");
         break;

      case '0':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_TEST test_task(attitude_input, image_input, actuator_output);
             ret_code = test_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Test task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Test task quit by user");
                break;
             default:
                message_hold("Test task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected test vision module\n");
         vision_module = new MDA_VISION_MODULE_TEST();
         use_fwd_img = true;
         break;
      case '1':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_GATE gate_task(attitude_input, image_input, actuator_output);
             ret_code = gate_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Gate task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Gate task quit by user");
                break;
             default:
                message_hold("Gate task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected gate vision module\n");
         vision_module = new MDA_VISION_MODULE_GATE();
         use_fwd_img = true;
         break;
      case '2':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_PATH path_task(attitude_input, image_input, actuator_output);
             ret_code = path_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Path task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Path task quit by user");
                break;
             default:
                message_hold("Path task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected path vision module\n");
         vision_module = new MDA_VISION_MODULE_PATH();
         use_fwd_img = false;
         break;
      case '3':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_BUOY buoy_task(attitude_input, image_input, actuator_output);
             ret_code = buoy_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Buoy task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Buoy task quit by user");
                break;
             default:
                message_hold("Buoy task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected buoy vision module\n");
         vision_module = new MDA_VISION_MODULE_BUOY();
         use_fwd_img = true;
         break;
      case '4':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_FRAME frame_task(attitude_input, image_input, actuator_output);
             ret_code = frame_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Frame task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Frame task quit by user");
                break;
             default:
                message_hold("Frame task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected frame vision module\n");
         vision_module = new MDA_VISION_MODULE_FRAME();
         use_fwd_img = true;
         break;
      case '5':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_MARKER marker_task(attitude_input, image_input, actuator_output);
             ret_code = marker_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Marker task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Marker task quit by user");
                break;
             default:
                message_hold("Marker task errored out");
                break;
           }
           break;
         }
         delete vision_module;
         message_hold("Selected marker dropper vision module\n");
         vision_module = new MDA_VISION_MODULE_MARKER();
         use_fwd_img = false;
         break;
      case '8':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_PATH_SKIP path_skip(attitude_input, image_input, actuator_output);
             ret_code = path_skip.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Path skip task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Path skip task quit by user");
                break;
             default:
                message_hold("Path skip task errored out");
                break;
           }
         }
         break;
      case '9':
         if (mode != VISION) {
           endwin();

           MDA_TASK_RETURN_CODE ret_code;
           // Scope task so that it is destructed before display_start_message
           {
             MDA_TASK_SURFACE surface_task(attitude_input, image_input, actuator_output);
             ret_code = surface_task.run_task();
           }

           display_start_message();

           switch(ret_code) {
             case TASK_DONE:
                message_hold("Surface task completed successfully");
                break;
             case TASK_QUIT:
                message_hold("Surface task quit by user");
                break;
             default:
                message_hold("Surface task errored out");
                break;
           }
         }
         break;
      case 'x':
         if (mode == NORMAL) {
           break;
         }
         delete vision_module;

         vision_module = NULL;
         mode = NORMAL;
         display_start_message();
         break;
      case 'v':
         if (mode == VISION) {
           delete vision_module;
           vision_module = NULL;
         }
         endwin();
         mode = VISION;
         message(
           "Entering Vision Mode:\n"
           "  0    - test vision\n"
           "  1    - gate vision\n"
           "  2    - path vision\n"
           "  3    - buoy vision\n"
           "  4    - frame vision\n"
           "  5    - marker dropper vision\n"
           "\n"
           "  v    - cancel current vision selection\n"
           "  x    - exit vision mode\n"
           "  q    - exit simulator\n"
         );
         break;
      case '\0': // timeout
#ifndef DEBUG_FRAME_BY_FRAME
        process_image();
#else
        char ch = cvWaitKey(3);
        if (ch != -1) {
          CharacterStreamSingleton::get_instance().write_char(ch);
        }
#endif
        break;
    }
  }

  // close ncurses
  endwin();

  // Surface
  MDA_TASK_SURFACE surface(attitude_input, image_input, actuator_output);
  surface.run_task();

  actuator_output->special_cmd(SUB_POWER_OFF);
}
static int read_frame(void)
{
        struct v4l2_buffer buf;
        unsigned int i;

        switch (io) {
        case IO_METHOD_READ:
          printf("IO_METHOD_READ\n");
                if (-1 == read(fd, buffers[0].start, buffers[0].length)) {
                        switch (errno) {
                        case EAGAIN:
                                return 0;

                        case EIO:
                                /* Could ignore EIO, see spec. */

                                /* fall through */

                        default:
                                errno_exit("read");
                        }
                }

                process_image(buffers[0].start, buffers[0].length);
                break;

        case IO_METHOD_MMAP:
          printf("IO_METHOD_MMAP\n");
                CLEAR(buf);

                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_MMAP;

                if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
                        switch (errno) {
                        case EAGAIN:
                          printf("EAGAIN\n");
                                return 0;

                        case EIO:
                          printf("EIO\n");
                                /* Could ignore EIO, see spec. */

                                /* fall through */

                        default:
                          printf("default\n");
                                errno_exit("VIDIOC_DQBUF");
                        }
                }

                assert(buf.index < n_buffers);

                process_image(buffers[buf.index].start, buf.bytesused);

                if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
                        errno_exit("VIDIOC_QBUF");
                break;

        case IO_METHOD_USERPTR:
                CLEAR(buf);

                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_USERPTR;

                if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) {
                        switch (errno) {
                        case EAGAIN:
                                return 0;

                        case EIO:
                                /* Could ignore EIO, see spec. */

                                /* fall through */

                        default:
                                errno_exit("VIDIOC_DQBUF");
                        }
                }

                for (i = 0; i < n_buffers; ++i)
                        if (buf.m.userptr == (unsigned long)buffers[i].start
                            && buf.length == buffers[i].length)
                                break;

                assert(i < n_buffers);

                process_image((void *)buf.m.userptr, buf.bytesused);

                if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
                        errno_exit("VIDIOC_QBUF");
                break;
        }

        return 1;
}
Example #30
0
int main( int argc, char** argv )
{
  char c;
  // Russ: unsigned char is required for the glasses input to work properly (GDP v0).
  uchar indat;

  // Russ: Glasses support added here.
  readuntilchar(stdin,SYMBOL_SOF);
  indat = (unsigned)readchar(stdin);
  assert(OPCODE_RESP_NUM_CAMS == indat);
  // Russ: Read numcams byte.
  indat = (unsigned)readchar(stdin);
  // Russ: Really needs 2 cameras.
  assert(2 == indat);

  Open_GUI();

  Open_Logfile(argc,argv);

  Start_Timer();

  int i, j;
  double T[3][3], T1[3][3];
  for (j = 0; j < 3; j++) {
    for (i = 0; i < 3; i++) {
      T[j][i] = j*3+i+1;
    }
  }
  T[2][0] = T[2][1] = 0;
  printf("\nT: \n");
  for (j = 0; j < 3; j++) {
    for (i = 0; i < 3; i++) {
      printf("%6.2lf ", T[j][i]);
    }
    printf("\n");
  }
  affine_matrix_inverse(T, T1);
  printf("\nT1: \n");
  for (j = 0; j < 3; j++) {
    for (i = 0; i < 3; i++) {
      printf("%6.2lf ", T1[j][i]);
    }
    printf("\n");
  }
  

  // Russ: Value of 370 picked to support the v1 glasses frame rate, roughly.
  while ((c=cvWaitKey(370))!='q') {
    if (c == 's') {
      sprintf(eye_file, "eye%05d.bmp", image_no);
      sprintf(scene_file, "scene%05d.bmp", image_no);
      image_no++;
      cvSaveImage(eye_file, eye_image);
      cvSaveImage(scene_file, scene_image);
      printf("thres: %d\n", pupil_edge_thres);
    } else if (c == 'c') {
      save_image = 1 - save_image;
      printf("save_image = %d\n", save_image);
    } else if (c == 'e') {
      save_ellipse = 1 - save_ellipse;
      printf("save_ellipse = %d\n", save_ellipse);
      if (save_ellipse == 1) {
        Open_Ellipse_Log();
      } else {
        fclose(ellipse_log);
      }
    }
    if (start_point.x == -1 && start_point.y == -1)
      Grab_Camera_Frames();
    else 
      process_image(); 
    if (frame_number%1==0) Update_Gui_Windows(); 
  }

  Close_Logfile();

  Close_GUI();

  return 0;
}