Exemple #1
0
/* 游戏主循环。
 * 在初始化工作结束后,main函数就跳转到主循环执行。
 * 在主循环执行期间随时会插入异步的中断。时钟中断最终调用timer_event,
 * 键盘中断最终调用keyboard_event。中断处理完成后将返回主循环原位置继续执行。
 *
 * tick是时钟中断中维护的信号,数值含义是“系统到当前时刻已经发生过的时钟中断数”
 * HZ是时钟控制器硬件每秒产生的中断数,在include/device/timer.h中定义
 * now是主循环已经正确处理的时钟中断数,即游戏已经处理到的物理时间点
 * */
void
main_loop(void) {
	int now = 0, target;
	int num_draw = 0;
	bool redraw;

	while (true) {
		wait_intr();
		cli();
		if (now == tick) {
			sti();
			continue;
		}
		assert(now < tick);
		target = tick; /* now总是小于tick,因此我们需要“追赶”当前的时间 */
		sti();

		redraw = false;
		while (update_keypress())
			;

		/* 依次模拟已经错过的时钟中断。一次主循环如果执行时间长,期间可能到来多次时钟中断,
		 * 从而主循环中维护的时钟可能与实际时钟相差较多。为了维持游戏的正常运行,必须补上
		 * 期间错过的每一帧游戏逻辑。 */
		while (now < target) { 
			/* 每隔一定时间产生一个新的字符 */
			if (now % (HZ / CHARACTER_PER_SECOND) == 0) {
				create_new_letter();
			} 
			/* 每隔一定时间更新屏幕上字符的位置 */
			if (now % (HZ / UPDATE_PER_SECOND) == 0) {
				update_letter_pos();
			}
			/* 每隔一定时间需要刷新屏幕。注意到这里实现了“跳帧”的机制:假设
			 *   HZ = 1000, FPS = 100, now = 10, target = 1000
			 * 即我们要模拟990个时钟中断之间发生的事件,其中包含了9次屏幕更新,
			 * 但redraw flag只被置一次。 */

			/* 以上是余大神的解释,但我觉得怎么应该是: */
			/* 即我们要模拟990个时钟中断,其中redraw flag被重置了99次(应该更新99次), */
			/* 但实际上屏幕只更新了一次 */
			if (now % (HZ / FPS) == 0) {
				redraw = true;
			}
			/* 更新fps统计信息 */
			if (now % (HZ / 2) == 0) {
				int now_fps = num_draw * 2 + 1;
				if (now_fps > FPS) now_fps = FPS;
				set_fps(now_fps);
				num_draw = 0;
			}
			now ++;
		}

		if (redraw) { /* 当需要重新绘图时重绘 */
			num_draw ++;
			redraw_screen();
		}
	}
}
Exemple #2
0
/* 游戏主循环。
 * 在初始化工作结束后,main函数就跳转到主循环执行。
 * 在主循环执行期间随时会插入异步的中断。时钟中断最终调用timer_event,
 * 键盘中断最终调用keyboard_event。中断处理完成后将返回主循环原位置继续执行。
 *
 * tick是时钟中断中维护的信号,数值含义是“系统到当前时刻已经发生过的时钟中断数”
 * HZ是时钟控制器硬件每秒产生的中断数,在include/device/timer.h中定义
 * now是主循环已经正确处理的时钟中断数,即游戏已经处理到的物理时间点
 *
 * 由于qemu-kvm在访问内存映射IO区域时每次都会产生陷入,在30FPS时,
 * 对显存区域每秒会产生30*320*200/4次陷入,从而消耗过多时间导致跳帧的产生(实际FPS<30)。
 * 在CFLAGS中增加-DSLOW可以在此情况下提升FPS。如果FPS仍太小,可以尝试
 * -DTOOSLOW,此时将会采用隔行扫描的方式更新屏幕(可能会降低显示效果)。
 * 这些机制的实现在device/video.c中。
 * */
void
main_loop(void) {
    int now = 0, target;
    int num_draw = 0;
    bool redraw;

    while (TRUE) {
        wait_for_interrupt();
        /*disable_interrupt();*/
        if (now == tick) {
            /*enable_interrupt();*/
            continue;
        }
        assert(now < tick);
        target = tick; /* now总是小于tick,因此我们需要“追赶”当前的时间 */
        /*enable_interrupt();*/

        redraw = FALSE;
        while (update_keypress())
            ;

        /* 依次模拟已经错过的时钟中断。一次主循环如果执行时间长,期间可能到来多次时钟中断,
         * 从而主循环中维护的时钟可能与实际时钟相差较多。为了维持游戏的正常运行,必须补上
         * 期间错过的每一帧游戏逻辑。 */
        while (now < target) {
            /* 每隔一定时间产生一个新的字符 */
            if (now % (HZ / CHARACTER_PER_SECOND) == 0) {
                create_new_letter();
            }
            /* 每隔一定时间更新屏幕上字符的位置 */
            if (now % (HZ / UPDATE_PER_SECOND) == 0) {
                update_letter_pos();
            }
            /* 每隔一定时间需要刷新屏幕。注意到这里实现了“跳帧”的机制:假设
             *   HZ = 1000, FPS = 100, now = 10, target = 1000
             * 即我们要模拟990个时钟中断之间发生的事件,其中包含了9次屏幕更新,
             * 但redraw flag只被置一次。 */
            if (now % (HZ / FPS) == 0) {
                redraw = TRUE;
            }
            /* 更新fps统计信息 */
            if (now % (HZ / 2) == 0) {
                int now_fps = num_draw * 2 + 1;
                if (now_fps > FPS) now_fps = FPS;
                set_fps(now_fps);
                num_draw = 0;
            }
            now ++;
        }

        if (redraw) { /* 当需要重新绘图时重绘 */
            num_draw ++;
            redraw_screen();
        }
    }
}
Exemple #3
0
/* Reads the first few bytes of "sock" socket and decides what to do (send webcam stream or list of controls)
 * In the latter case (list of controls), more bytes are parsed to see whether we should also set a new value to one
 * of the controls
 */
int get_action(int sock, struct video_device *d) {
	int c, ctrl_index = 0, value = 0, ret = ACTION_CAPTURE;
	char *buf, *sptr, *fptr;
	struct control_list *l = get_control_list(d);

	XMALLOC(buf, char *, INPUT_BLOCK_SIZE);
	c = read(sock, buf, INPUT_BLOCK_SIZE - 1);
	buf[c] = '\0';

	if(strstr(buf, "webcam") != NULL) {
		ret = ACTION_CAPTURE;
		info(LOG_INFO, "going for capture\n");
	}
	else if (strstr(buf, "list") != NULL){
		ret = ACTION_LIST;
		if((sptr = strstr(buf, "?")) != NULL) {
			fptr = strstr(sptr, " HTTP");
			*fptr = '\0';
			if(sscanf(++sptr, "val=%6d&%3d=update", &value, &ctrl_index) == 2) {
				//catch the jpeg control setting
				if(ctrl_index==-1) {
					info(LOG_INFO, "Setting JPEG quality to %d\n", value);
					if((1 <= value) && (value <= 100)) jpeg_quality=value;
					else info(LOG_ERR, "Invalid jpeg quality value %d\n", value);
				} else if(ctrl_index==-2) {
				//catch the frame ratio control
					info(LOG_INFO, "Setting frame ratio to %d\n", value);
					if((1 <= value) && (value <= 25)) {
						requested_fps = value; set_fps(requested_fps);
					} else info(LOG_ERR, "Invalid frame rate %d\n", value);
				} else {
					assert(ctrl_index < l->count);
					info(LOG_INFO, "Setting %s to %d\n", l->controls[ctrl_index].v4l2_ctrl->name, value);
					set_control_value(d, l->controls[ctrl_index].v4l2_ctrl, &value);
					info(LOG_INFO, "New value: %d\n", value);
				}
			} else
				info(LOG_ERR, "Error parsing URL. Unable to set new value\n");
		}
	}
	XFREE(buf);
	release_control_list(d);
	return ret;
}
Exemple #4
0
int world_main_loop(int argc, char **argv, world_t *w){
	init_time();
	set_fps(90);
	world_set(w);
	/*printf("coucou2\n");	*/
	glutInit(&argc,argv);
	glutInitDisplayMode(GLUT_DEPTH|GLUT_DOUBLE|GLUT_RGB);
	glEnable(GL_DEPTH_TEST);
	glutInitWindowPosition(250,250);
	glutInitWindowSize(600,400);
	glutCreateWindow("BASTOS 85");
	draw_init();
	glutDisplayFunc(do_world);
	glutIdleFunc(do_world);
	glutReshapeFunc(draw_reshape);
	/*glutIgnoreKeyRepeat(1);*/
	glutKeyboardFunc(keyboard_down_func);
	glutKeyboardUpFunc(keyboard_up_func);
	glutMainLoop();
	return 0;
}
Exemple #5
0
int main(int argc,char *argv[])
{
	//char huge *sptemp;
	short	huge *ip;
	char	huge *cp;
	int	a,b,c,d,e,f,g;
	#ifdef DEBUG
	//fr=fopen("tmp","wt");
	fr=stdout;
	#endif
	if(argc>1)
	{
		strcpy(scene,argv[1]);
		//strupr(scene);
	}
	else indemo=1;
	
	sprintf(tmpname,"%s.00M",scene);
	if(!indemo) printf("Loading materials %s...\n",tmpname);
	scene0=scenem=readfile(tmpname);

	if(scene0[15]=='C') city=1;
	if(scene0[15]=='R') city=2;
	ip=(short *)(scene0+LONGAT(scene0+4));
	conum=d=*ip++;
	for(f=-1,c=1;c<d;c++)
	{	
		e=*ip++;
		if(e>f)
		{
			f=e;
			sprintf(tmpname,"%s.%03i",scene,e);
			if(!indemo) printf("Loading %s... ",tmpname);
			co[c].o=vis_loadobject(tmpname);
			memset(co[c].o->r,0,sizeof(rmatrix));
			memset(co[c].o->r0,0,sizeof(rmatrix));
			co[c].index=e;
			co[c].on=0;
			if(!indemo) printf("(co[%i]:%s)\n",c,co[c].o->name);
		}
		else
		{
			if(!indemo) printf("Copying %s.%03i... ",scene,e);
			for(g=0;g<c;g++) if(co[g].index==e) break;
			memcpy(co+c,co+g,sizeof(struct s_co));
			co[c].o=getmem(sizeof(object));
			memcpy(co[c].o,co[g].o,sizeof(object));
			co[c].o->r=getmem(sizeof(rmatrix));
			co[c].o->r0=getmem(sizeof(rmatrix));
			memset(co[c].o->r,0,sizeof(rmatrix));
			memset(co[c].o->r0,0,sizeof(rmatrix));
			co[c].on=0;
			if(!indemo) printf("(co[%i]:%s)\n",c,co[c].o->name);
		}
	}
	co[0].o=&camobject;
	camobject.r=&cam;
	camobject.r0=&cam;

	sprintf(tmpname,"%s.0AA",scene);
	if(!indemo) printf("Loading animations... %s\n",tmpname);
	ip=(short *)readfile(tmpname);
	while(*ip)
	{
		a=*ip;
		if(a==-1) break;
		sprintf(tmpname,"%s.0%c%c",scene,a/10+'A',a%10+'A');
		if(!indemo) printf("Scene: %s ",tmpname);
		scenelist[scl].data=readfile(tmpname);
		printf("(%i:@%p)\n",scl,scenelist[scl].data);
		scl++;
		ip+=2;
	}

	if(!indemo) 
	{
		printf("Press any key to continue...\n");
		//getch();
	}	

	resetscene();

	if (init_graphics("Cplay", argc, argv) < 0) {
		fprintf(stderr, "Can't init graphics\n");
		return -1;
	};

	init_opengl();
	set_fps(36);

	vid_init(1); ////// oversample x 4
	cp=(char *)(scenem+16);
	vid_setpal(cp);
#if 0
	outp(0x3c8,0);
	outp(0x3c9,0);
	outp(0x3c9,0);
	outp(0x3c9,0);
#endif
	
	while(1 /*!kbhit()*/)
	{	
		int fov;
		int onum;
		long pflag;
		long dis;
		long l;
		object *o;
		rmatrix *r;
		
		//vid_switch();
		//vid_waitb();
		vid_clear();
		// parse animation stream
		
		onum=0;
		for(;;)
		{
			/*
			sptemp=sp;
			_asm
			{
				mov	ax,word ptr sptemp[0]
				cmp	ax,1000h
				jb	l1
				sub	word ptr sptemp[0],1000h
				add	word ptr sptemp[2],100h
			l1:
			}
			sp=sptemp;
			*/
			a=*sp++;
			if(a==0xff)
			{
				a=*sp++;
				if(a<=0x7f) 
				{
					fov=a<<8;
					break;
				}
				else if(a==0xff) 
				{
					resetscene();
					continue;
				}
			}
			if((a&0xc0)==0xc0)
			{
				onum=((a&0x3f)<<4);
				a=*sp++;
			}
			onum=(onum&0xff0)|(a&0xf);
			b=0;
			switch(a&0xc0)
			{
			case 0x80 : b=1; co[onum].on=1; break;
			case 0x40 : b=1; co[onum].on=0; break;
			}
			
			#ifdef DEBUG
			if(b) fprintf(fr,"[%i (%s) ",onum,co[onum].on?"on":"off");
			else fprintf(fr,"[%i (--) ",onum);
			#endif
			if(onum>=conum) return(3);
			
			r=co[onum].o->r0;
			
			pflag=0;
			switch(a&0x30)
			{
			case 0x00 : break;
			case 0x10 : pflag|=*sp++; break;
			case 0x20 : pflag|=sp[0]; 
				    pflag|=(long)sp[1]<<8; 
				    sp+=2; break;
			case 0x30 : pflag|=sp[0]; 
				    pflag|=(long)sp[1]<<8; 
				    pflag|=(long)sp[2]<<16; 
				    sp+=3; break;
			}
			
			#ifdef DEBUG
			fprintf(fr,"pfl:%06lX",pflag);
			#endif
			
			l=lsget(pflag);
			r->x+=l;
			l=lsget(pflag>>2);
			r->y+=l;
			l=lsget(pflag>>4);
			r->z+=l;
			
			#ifdef DEBUG
			fprintf(fr," XYZ:(%f,%f,%f)",r->x,r->y,r->z);
			#endif

			if(pflag&0x40)
			{ // word matrix
				for(b=0;b<9;b++) if(pflag&(0x80<<b))
				{
					r->m[b]+=lsget(2);
				}
			}
			else
			{ // byte matrix
				for(b=0;b<9;b++) if(pflag&(0x80<<b))
				{
					r->m[b]+=lsget(1);
				}
			}

			#ifdef DEBUG
			fprintf(fr,"]\n");
			#endif
		}
		// Field of vision
		vid_cameraangle(fov);
		// Calc matrices and add to order list (only enabled objects)
		ordernum=0;
		/* start at 1 to skip camera */
		for(a=1;a<conum;a++) if(co[a].on)
		{
			order[ordernum++]=a;
			o=co[a].o;
			memcpy(o->r,o->r0,sizeof(rmatrix));
			calc_applyrmatrix(o->r,&cam);
			b=o->pl[0][1]; // center vertex
			if(co[a].o->name[1]=='_') co[a].dist=1000000000L;
			else co[a].dist=calc_singlez(b,o->v0,o->r);
		}
		// Zsort
		if(city==1)
		{
			co[2].dist=1000000000L; // for CITY scene, test
			co[7].dist=1000000000L; // for CITY scene, test
			co[13].dist=1000000000L; // for CITY scene, test
		}
		if(city==2)
		{
			co[14].dist=1000000000L; // for CITY scene, test
		}
		for(a=0;a<ordernum;a++) 
		{
			dis=co[c=order[a]].dist;
			for(b=a-1;b>=0 && dis>co[order[b]].dist;b--)
				order[b+1]=order[b];
			order[b+1]=c;
		}
		// Draw
		for(a=0;a<ordernum;a++)
		{
			//int	x,y;
			o=co[order[a]].o;
			#ifdef DEBUG
			fprintf(fr,"%s (i:%i Z:%li)\n",o->name,order[a],co[order[a]].dist);
			#endif
			vis_drawobject(o);
		}
		#ifdef DEBUG
		fprintf(fr,"\n");
		#endif

		swap_buffers();
	}

	vid_deinit();

	#ifdef DEBUG
	fclose(fr);	
	#endif
	return(0);
}
Exemple #6
0
void camera_server() {
	size_t count = 0;

	initialise_termination_handler();

	int deviceDescriptor = v4l2_open("/dev/video0",
			O_RDWR /* required */| O_NONBLOCK, 0);
	if (deviceDescriptor == -1) {
		throw std::runtime_error("Unable to open device");
	}

//	disable_output_processing(deviceDescriptor);

	if (!isStreamingIOSupported(deviceDescriptor)) {
		throw std::runtime_error("Streaming is not supported");
	}


	setCameraOutputFormat(deviceDescriptor, CAMERA_FRAME_WIDTH, CAMERA_FRAME_HEIGHT, V4L2_PIX_FMT_YUYV);


	std::cout << "Absolute focus supported: " << isControlSupported(deviceDescriptor,V4L2_CID_FOCUS_ABSOLUTE) << std::endl;
	std::cout << "Relative focus supported: " << isControlSupported(deviceDescriptor,V4L2_CID_FOCUS_RELATIVE) << std::endl;


	set_manual_exposure(deviceDescriptor,true);
	printf("Is manual exposure set = %u\n", is_manual_exposure(deviceDescriptor));
	set_absolute_exposure(100,deviceDescriptor);

	set_exposure_auto_priority(deviceDescriptor,false);
	printf("Is exposure auto priority set = %u\n", is_exposure_auto_priority(deviceDescriptor));

	set_auto_white_balance(deviceDescriptor,false);
	printf("Is auto white balance set = %u\n", is_auto_white_balance_set(deviceDescriptor));

	set_gain(deviceDescriptor,1);
	printf("Gain set = %u\n", get_gain(deviceDescriptor));


    printf("Focus value = %u\n", get_focus_variable(deviceDescriptor));

	set_fps(deviceDescriptor,30);


	start_capturing(deviceDescriptor);


	unsigned int counter;



	int announce_socket=socket(AF_INET,SOCK_DGRAM,0);
	if (announce_socket < 0) {
	  perror("socket");
	  exit(1);
	}

	sockaddr_in announce_address;
	memset(&announce_address,0,sizeof(announce_address));
	announce_address.sin_family=AF_INET;
	announce_address.sin_addr.s_addr=inet_addr(CAMERA_ANNOUNCE_GROUP);
	announce_address.sin_port=htons(CAMERA_ANNOUNCE_PORT);

	while (running != 0) {
		fd_set fds;
		int r;

		FD_ZERO(&fds);
		FD_SET(deviceDescriptor, &fds);

		r = select(deviceDescriptor + 1, &fds, NULL, NULL, NULL);

		if (r > 0) {
			struct v4l2_buffer buf;

			memset(&buf, 0, sizeof(buf));

			buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
			buf.memory = V4L2_MEMORY_USERPTR;

			if (-1 == xioctl(deviceDescriptor, VIDIOC_DQBUF, &buf)) {
				switch (errno) {
				case EAGAIN:
					continue;

				case EIO:
					/* Could ignore EIO, see spec. */

					/* fall through */

				default:
					perror("VIDIOC_DQBUF");
					exit(1);
				}
			}

			if ((buf.flags | V4L2_BUF_FLAG_ERROR) != 0) {
//TODO Investigate the permanent occurence of the V4L2_BUF_FLAG_ERROR
//				std::cerr << "Frame buffer error" << std::endl;
			}

			printf("Index = %u, seconds = %ld us = %ld\n", buf.index,buf.timestamp.tv_sec,buf.timestamp.tv_usec);
		//	printf("Real time: seconds = %ld, us = %ld\n", tp.tv_sec,tp.tv_nsec/1000);

			int ret;
			assert(ptrToSequenceMap.count(buf.m.userptr) != 0);
			size_t sequence_number = ptrToSequenceMap[buf.m.userptr];
			ptrToSequenceMap.erase(buf.m.userptr);

			queueNextFrameBuffer(deviceDescriptor, buf.index, sequence_number, CAMERA_FRAME_WIDTH*CAMERA_FRAME_HEIGHT*2);

//TODO Investigate why the video streaming fails if the unmap call below is placed before the queueNextFrameBuffer call above.
//Probably this is because in that case the mmap call returns the same virtual address as the munmap call had just used for the deallocation
			ret = munmap(reinterpret_cast<void*>(buf.m.userptr),buf.length);
			if (ret == -1) {
				perror("munmap");
			}

			BufferReference readyBuffer;
			readyBuffer.index = buf.index;
			readyBuffer.offset = 0;
			readyBuffer.size = buf.bytesused;
			readyBuffer.timestamp_seconds = buf.timestamp.tv_sec;
			readyBuffer.timestamp_microseconds = buf.timestamp.tv_usec;

			readyBuffer.width = CAMERA_FRAME_WIDTH;
			readyBuffer.height = CAMERA_FRAME_HEIGHT;
			readyBuffer.sequence = sequence_number;

			std::array<char,1024> ipc_buffer;
			asn_enc_rval_t encode_result = der_encode_to_buffer(&asn_DEF_BufferReference, &readyBuffer,ipc_buffer.data(),ipc_buffer.size());


			ret = sendto(announce_socket,ipc_buffer.data(),encode_result.encoded,0,(struct sockaddr *) &announce_address,sizeof(announce_address));
			if (ret < 0) {
			   perror("sendto");
			   exit(1);
			}

			timespec tp;
			clock_gettime(CLOCK_MONOTONIC,&tp);

			std::cout << "Grab frame delay = " << get_milliseconds_delta(buf.timestamp,tp) << " ms" << std::endl;

			count++;

			counter++;


		}
	}

	v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (-1 == xioctl(deviceDescriptor, VIDIOC_STREAMOFF, &type))
		perror("VIDIOC_STREAMOFF");

	if (-1 == close(deviceDescriptor))
		perror("close");

	close(announce_socket);
}
Exemple #7
0
/* Webcam stream thread: v is a ptr to a struct thread_data which packages a socket and a cdev
 * this function is meant to run as a separate thread and handles a webcam stream connection
 * on "sock" socket: It sends and HTTP header and then continuously captures a frame from cdev,
 * jpeg-encodes it and sends it. The loop is terminated when keep_going is false or when the
 * socket is closed from the other end
 */
void *send_stream_to(void *v) {
	struct timeval start, now;
	struct timespec sleep_length, sleep_rem;
	struct jpeg j;
	struct thread_data *td = (struct thread_data *)v;
	struct video_device *d = td->vdev;
	struct capture_device *cdev = d->capture;
	int jpeg_len, yuv_len, f_nr = 0, retval = 0, sock = td->sock; //, calibrated = 1;
	void *yuv_data, *jpeg_data;
	float ts, last_fps;

	CLEAR(j);
	XMALLOC(jpeg_data, void *, (cdev->width * cdev->height * 3));
	dprint(LOG_SOURCE_HTTP, LOG_LEVEL_DEBUG2, "New thread starting sock: %d, jpeg_quality: %d\n", sock, jpeg_quality);

	//setup the jpeg encoder based on the image palette
	if(setup_jpeg_encoder(cdev, &j)!=0) {
		info(LOG_ERR, "Error setting the JPEG encoder\n");
		goto end;
	}


	//send mjpeg header
	if (send_mjpeg_header(sock) != 0 )
		goto end;

	gettimeofday(&start, NULL);
	sleep_rem.tv_sec = sleep_rem.tv_nsec = 0;

	while((retval>=0) && keep_going) {

		gettimeofday(&now, NULL);
		if((now.tv_sec>=start.tv_sec + SHOW_FPS_INTERVAL)) {
			ts = (now.tv_sec - start.tv_sec) + ((float) (now.tv_usec - start.tv_usec)/1000000);
			last_fps = (f_nr / ts);
			//fprintf(stderr, "%d frames in %.3f sec (fps=%.1f, req_fps = %d)\n",f_nr, ts, last_fps, requested_fps);
			info(LOG_INFO, "%d frames in %.3f sec (fps=%.1f)\n",f_nr, ts, last_fps);

			//check whether the calibration was done
			if(fps_nanosleep_step == 0) {
				//no it wasnt, compute fps_nanosleep_step, the time it takes to capture,
				//jpeg-encode and send a single frame
				fps_nanosleep_step = (1000000000 / last_fps);
				dprint(LOG_SOURCE_HTTP, LOG_LEVEL_DEBUG1,"Calibrating loop: set nanosleep step to %09ld\n",fps_nanosleep_step);
				max_fps = last_fps;
				set_fps(requested_fps);
			} else {
				//calibration already completed
				//is current_fps = req_fps +- 0.5 ?
				if(last_fps<(requested_fps - 0.5 ))
					decr_nanosleep();
				else if (last_fps >(requested_fps + 0.5 ))
				 	incr_nanosleep();
				else
					dprint(LOG_SOURCE_HTTP, LOG_LEVEL_DEBUG1,"Current fps_nanosleep (%ld.%09ld) achieves the desired framerate\n", fps_secsleep, fps_nanosleep);
			}

			f_nr = 0;
			gettimeofday(&start, NULL);
		}

		//sleep to adjust the fps to correct value
		if(((fps_nanosleep != 0) || (fps_secsleep != 0))) {
			sleep_length.tv_nsec = fps_nanosleep;
			sleep_length.tv_sec = fps_secsleep;
			nanosleep(&sleep_length, &sleep_rem);
		}

		//get frame from v4l2
		if((yuv_data = (*cdev->actions->dequeue_buffer)(d, &yuv_len)) != NULL) {

			//encode in JPEG
			jpeg_len = (*j.jpeg_encode)(yuv_data,yuv_len, cdev, &j, jpeg_data);

			//return buffer to v4l2
			(*cdev->actions->enqueue_buffer)(d);

			//send in multipart_jpeg stream
			retval = send_frame(sock, jpeg_data, jpeg_len);

			f_nr++;
		}
	}

	end:

	XFREE(jpeg_data);
	release_jpeg_encoder(cdev, &j);
	//not thread safe!!
	//if multiple clients enabled, needs locking
	//concurrent paths with start_client_thread() could lead to bad thinds
	//FIXME if MAX_CLIENT > 1
	if(--client_nr==0) {
		dprint(LOG_SOURCE_HTTP, LOG_LEVEL_DEBUG2, "Last client, stopping capture \n");
		//Stop capture
		(*cdev->actions->stop_capture)(d);
	}

	//close socket
	info(LOG_INFO, "closing connection on socket %d\n", sock );
	close(sock);

	XFREE(v);

	return NULL;
}
Exemple #8
0
/* 游戏主循环。
 * 在初始化工作结束后,main函数就跳转到主循环执行。
 * 在主循环执行期间随时会插入异步的中断。时钟中断最终调用timer_event,
 * 键盘中断最终调用keyboard_event。中断处理完成后将返回主循环原位置继续执行。
 *
 * tick是时钟中断中维护的信号,数值含义是“系统到当前时刻已经发生过的时钟中断数”
 * HZ是时钟控制器硬件每秒产生的中断数,在include/device/timer.h中定义
 * now是主循环已经正确处理的时钟中断数,即游戏已经处理到的物理时间点
 *
 * 由于qemu-kvm在访问内存映射IO区域时每次都会产生陷入,在30FPS时,
 * 对显存区域每秒会产生30*320*200/4次陷入,从而消耗过多时间导致跳帧的产生(实际FPS<30)。
 * 在CFLAGS中增加-DSLOW可以在此情况下提升FPS。如果FPS仍太小,可以尝试
 * -DTOOSLOW,此时将会采用隔行扫描的方式更新屏幕(可能会降低显示效果)。
 * 这些机制的实现在device/video.c中。
 * */
void
main_loop(void) {
	int now = 0, target;
	int num_draw = 0;
	bool redraw;

	create_main_character();

	while (TRUE) {
		wait_for_interrupt();
		disable_interrupt();
		if (now == tick) {
			enable_interrupt();
			continue;
		}
		assert(now < tick);
		target = tick; /* now总是小于tick,因此我们需要“追赶”当前的时间 */
		enable_interrupt();

		redraw = FALSE;

		while (update_keypress())
			;

		/* 依次模拟已经错过的时钟中断。一次主循环如果执行时间长,期间可能到来多次时钟中断,
		 * 从而主循环中维护的时钟可能与实际时钟相差较多。为了维持游戏的正常运行,必须补上
		 * 期间错过的每一帧游戏逻辑。 */
		while (now < target) { 
			/* 每隔一定时间产生一个新的敌人 */
			if (now % (HZ * SECOND_PER_CHARACTER) == 0) {
				create_new_enemy();
			}
			/* 每隔一定时间更新主角的子弹位置 如果在敌人移动之后才更新就会出现射不中的情况*/
			if (now % (HZ / UPDATE_PER_SECOND) == 0) {
				update_mcb_pos();
				update_enemyb_pos();
			}
			/* 每隔一定时间产生一次敌方子弹*/
			if(now % (HZ * ENEMY_SECOND_PER_BULLET) == 0){
				create_new_enemyb();
			}	
		        /* 敌人每隔一秒移动一次*/
			if(now % HZ == 0){
				update_enemy_pos();
			}
			/* 每隔一定时间需要刷新屏幕。注意到这里实现了“跳帧”的机制:假设
			 *   HZ = 1000, FPS = 100, now = 10, target = 1000
			 * 即我们要模拟990个时钟中断之间发生的事件,其中包含了9次屏幕更新,
			 * 但redraw flag只被置一次。 */
			if (now % (HZ / FPS) == 0) {
				redraw = TRUE;
			}
			/* 更新fps统计信息 */
			if (now % (HZ / 2) == 0) {
				int now_fps = num_draw * 2 + 1;
				if (now_fps > FPS) now_fps = FPS;
				set_fps(now_fps);
				num_draw = 0;
			}
			now ++;
		}
		if (redraw) { /* 当需要重新绘图时重绘 */
			num_draw ++;
			redraw_screen();
		}
	}
}