Пример #1
0
/*---------------------------------------------------------------------------*/
PROCESS_THREAD(tcpip_process, ev, data)
{
  PROCESS_BEGIN();
  
#if UIP_TCP
 {
   static unsigned char i;
   
   for(i = 0; i < UIP_LISTENPORTS; ++i) {
     s.listenports[i].port = 0;
   }
   s.p = PROCESS_CURRENT();
 }
#endif

  tcpip_event = process_alloc_event();
  etimer_set(&periodic, CLOCK_SECOND/2);

  uip_init();
  
  while(1) {
    PROCESS_YIELD();
    eventhandler(ev, data);
  }
  
  PROCESS_END();
}
Пример #2
0
// call eventhandler once every frame
// to check if windows is trying to kill you (or other events)
int ik_eventhandler()
{
	eventhandler();

	if (must_quit)
		return 1;

	return 0;
}
Пример #3
0
PROCESS_THREAD(unreach_process, ev, data) {
	PROCESS_BEGIN();
	unreach_event = process_alloc_event();
	while (1) {
		PROCESS_YIELD();
		eventhandler(ev, data);
	}
PROCESS_END();
}
Пример #4
0
/*---------------------------------------------------------------------------*/
PROCESS_THREAD(tcpip_process, ev, data)
{
  PROCESS_BEGIN();

#if UIP_TCP
  {
    static unsigned char i;

    for(i = 0; i < UIP_LISTENPORTS; ++i) {
      s.listenports[i].port = 0;
    }
    s.p = PROCESS_CURRENT();
  }
#endif

  tcpip_event = process_alloc_event();
#if UIP_CONF_ICMP6
  tcpip_icmp6_event = process_alloc_event();
#endif /* UIP_CONF_ICMP6 */
  etimer_set(&periodic, CLOCK_SECOND);
ctimer_set(&reset_input,WINDOW_SIZE_DATA_RATE, reset_packet_input, 0);
  uip_init();
#ifdef UIP_FALLBACK_INTERFACE
  UIP_FALLBACK_INTERFACE.init();
#endif
/* initialize RPL if configured for using RPL */
#if UIP_CONF_IPV6 && UIP_CONF_IPV6_RPL
  rpl_init();
#endif /* UIP_CONF_IPV6_RPL */

#if FORWARDER
  define_mobile_nodes();
#endif

  while(1) {
    PROCESS_YIELD();
    eventhandler(ev, data);
  }

  PROCESS_END();
}
Пример #5
0
/*---------------------------------------------------------------------------*/
PROCESS_THREAD(tcpip_process, ev, data)
{
  PROCESS_BEGIN();
  
  tcpip_set_inputfunc(tcpip_inputfunc);

#if UIP_TCP
 {
   static unsigned char i;
   
   for(i = 0; i < UIP_LISTENPORTS; ++i) {
     s.listenports[i].port = 0;
   }
   s.p = PROCESS_CURRENT();
 }
#endif

  tcpip_event = process_alloc_event();
#if UIP_CONF_ICMP6
  tcpip_icmp6_event = process_alloc_event();
#endif /* UIP_CONF_ICMP6 */
  etimer_set(&periodic, CLOCK_SECOND / 2);

  uip_init();
#ifdef UIP_FALLBACK_INTERFACE
  UIP_FALLBACK_INTERFACE.init();
#endif
/* initialize RPL if configured for using RPL */
#if NETSTACK_CONF_WITH_IPV6 && UIP_CONF_IPV6_RPL
  rpl_init();
#endif /* UIP_CONF_IPV6_RPL */

  while(1) {
    PROCESS_YIELD();
    eventhandler(ev, data);
  }
  
  PROCESS_END();
}
Пример #6
0
/*
 * Helper thread to periodically poll the video sources and enqueue the
 * generated frames directed to the remote party to the channel's queue.
 * Using a separate thread also helps because the encoding can be
 * computationally expensive so we don't want to starve the main thread.
 */
static void *video_thread(void *arg)
{
	struct video_desc *env = arg;
	int count = 0;
	char save_display[128] = "";
	int i; /* integer variable used as iterator */

	/* if sdl_videodriver is set, override the environment. Also,
	 * if it contains 'console' override DISPLAY around the call to SDL_Init
	 * so we use the console as opposed to the x11 version of aalib
	 */
	if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */
		const char *s = getenv("DISPLAY");
		setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1);
		if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) {
			ast_copy_string(save_display, s, sizeof(save_display));
			unsetenv("DISPLAY");
		}
	}
	sdl_setup(env);
	if (!ast_strlen_zero(save_display)) {
		setenv("DISPLAY", save_display, 1);
	}

	ast_mutex_init(&env->dec_lock);	/* used to sync decoder and renderer */

	if (grabber_open(&env->out)) {
		ast_log(LOG_WARNING, "cannot open local video source\n");
	}

	if (env->out.device_num) {
		env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY;
	}

	/* even if no device is connected, we must call video_out_init,
	 * as some of the data structures it initializes are
	 * used in get_video_frames()
	 */
	video_out_init(env);

	/* Writes intial status of the sources. */
	if (env->gui) {
		for (i = 0; i < env->out.device_num; i++) {
			print_message(env->gui->thumb_bd_array[i].board,
				src_msgs[env->out.devices[i].status_index]);
		}
	}

	for (;;) {
		struct timespec t = { 0, 50000000 };	/* XXX 20 times/sec */
		struct ast_frame *p, *f;
		struct ast_channel *chan;
		int fd;
		char *caption = NULL, buf[160];

		/* determine if video format changed */
		if (count++ % 10 == 0) {
			if (env->out.sendvideo && env->out.devices) {
				snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps",
				env->out.devices[env->out.device_primary].name, env->codec_name,
				env->enc_in.w, env->enc_in.h,
				env->out.fps, env->out.bitrate / 1000);
			} else {
				sprintf(buf, "hold");
			}
			caption = buf;
		}

		/* manage keypad events */
		/* XXX here we should always check for events,
		* otherwise the drag will not work */ 
		if (env->gui)
			eventhandler(env, caption);

		/* sleep for a while */
		nanosleep(&t, NULL);

	    if (env->in) {
			struct video_dec_desc *v = env->in;

			/*
			 * While there is something to display, call the decoder and free
			 * the buffer, possibly enabling the receiver to store new data.
			 */
			while (v->dec_in_dpy) {
				struct fbuf_t *tmp = v->dec_in_dpy;	/* store current pointer */

				/* decode the frame, but show it only if not frozen */
				if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze)
					show_frame(env, WIN_REMOTE);
				tmp->used = 0;	/* mark buffer as free */
				tmp->ebit = 0;
				ast_mutex_lock(&env->dec_lock);
				if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN])	/* advance to next, circular */
					v->dec_in_dpy = &v->dec_in[0];

				if (v->dec_in_cur == NULL)	/* receiver was idle, enable it... */
					v->dec_in_cur = tmp;	/* using the slot just freed */
				else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */
					v->dec_in_dpy = NULL;	/* nothing more to display */
				ast_mutex_unlock(&env->dec_lock);
			}
		}

		if (env->shutdown)
			break;
		f = get_video_frames(env, &p);	/* read and display */
		if (!f)
			continue;
		chan = env->owner;
		if (chan == NULL) {
			/* drop the chain of frames, nobody uses them */
			while (f) {
				struct ast_frame *g = AST_LIST_NEXT(f, frame_list);
				ast_frfree(f);
				f = g;
			}
			continue;
		}
		fd = chan->alertpipe[1];
		ast_channel_lock(chan);

		/* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */
		if (chan->readq.first == NULL) {
			chan->readq.first = f;
		} else {
			chan->readq.last->frame_list.next = f;
		}
		chan->readq.last = p;
		/*
		 * more or less same as ast_queue_frame, but extra
		 * write on the alertpipe to signal frames.
		 */
		if (fd > -1) {
			int blah = 1, l = sizeof(blah);
			for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) {
				if (write(fd, &blah, l) != l)
					ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n",
						chan->name, f->frametype, f->subclass, strerror(errno));
			}
		}
		ast_channel_unlock(chan);
	}
	/* thread terminating, here could call the uninit */
	/* uninitialize the local and remote video environments */
	env->in = dec_uninit(env->in);
	video_out_uninit(env);

	if (env->gui)
		env->gui = cleanup_sdl(env->gui, env->out.device_num);
	ast_mutex_destroy(&env->dec_lock);
	env->shutdown = 0;
	return NULL;
}
void
tcpip_process(int ev, void *data)
{
    eventhandler(ev, data);
}
Пример #8
0
/*
 * Helper thread to periodically poll the video source and enqueue the
 * generated frames to the channel's queue.
 * Using a separate thread also helps because the encoding can be
 * computationally expensive so we don't want to starve the main thread.
 */
static void *video_thread(void *arg)
{
	struct video_desc *env = arg;
	int count = 0;
	char save_display[128] = "";

	/* if sdl_videodriver is set, override the environment. Also,
	 * if it contains 'console' override DISPLAY around the call to SDL_Init
	 * so we use the console as opposed to the x11 version of aalib
	 */
	if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */
		const char *s = getenv("DISPLAY");
		setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1);
		if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) {
			ast_copy_string(save_display, s, sizeof(save_display));
			unsetenv("DISPLAY");
		}
	}
	sdl_setup(env);
	if (!ast_strlen_zero(save_display))
		setenv("DISPLAY", save_display, 1);

        /* initialize grab coordinates */
        env->out.loc_src_geometry.x = 0;
        env->out.loc_src_geometry.y = 0;

	ast_mutex_init(&env->dec_lock);	/* used to sync decoder and renderer */

	if (grabber_open(&env->out)) {
		ast_log(LOG_WARNING, "cannot open local video source\n");
	} else {
#if 0
		/* In principle, try to register the fd.
		 * In practice, many webcam drivers do not support select/poll,
		 * so don't bother and instead read periodically from the
		 * video thread.
		 */
		if (env->out.fd >= 0)
			ast_channel_set_fd(env->owner, 1, env->out.fd);
#endif
		video_out_init(env);
	}

	for (;;) {
		struct timeval t = { 0, 50000 };	/* XXX 20 times/sec */
		struct ast_frame *p, *f;
		struct ast_channel *chan;
		int fd;
		char *caption = NULL, buf[160];

		/* determine if video format changed */
		if (count++ % 10 == 0) {
			if (env->out.sendvideo)
			    sprintf(buf, "%s %s %dx%d @@ %dfps %dkbps",
				env->out.videodevice, env->codec_name,
				env->enc_in.w, env->enc_in.h,
				env->out.fps, env->out.bitrate/1000);
			else
			    sprintf(buf, "hold");
			caption = buf;
		}

		/* manage keypad events */
		/* XXX here we should always check for events,
		* otherwise the drag will not work */ 
		if (env->gui)
			eventhandler(env, caption);
 
		/* sleep for a while */
		ast_select(0, NULL, NULL, NULL, &t);

	    if (env->in) {
		struct video_dec_desc *v = env->in;
		
		/*
		 * While there is something to display, call the decoder and free
		 * the buffer, possibly enabling the receiver to store new data.
		 */
		while (v->dec_in_dpy) {
			struct fbuf_t *tmp = v->dec_in_dpy;	/* store current pointer */

			if (v->d_callbacks->dec_run(v, tmp))
				show_frame(env, WIN_REMOTE);
			tmp->used = 0;	/* mark buffer as free */
			tmp->ebit = 0;
			ast_mutex_lock(&env->dec_lock);
			if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN])	/* advance to next, circular */
				v->dec_in_dpy = &v->dec_in[0];

			if (v->dec_in_cur == NULL)	/* receiver was idle, enable it... */
				v->dec_in_cur = tmp;	/* using the slot just freed */
			else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */
				v->dec_in_dpy = NULL;	/* nothing more to display */
			ast_mutex_unlock(&env->dec_lock);
		}
	    }

		if (env->shutdown)
			break;
		f = get_video_frames(env, &p);	/* read and display */
		if (!f)
			continue;
		chan = env->owner;
		if (chan == NULL)
			continue;
		fd = chan->alertpipe[1];
		ast_channel_lock(chan);

		/* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */
		if (chan->readq.first == NULL) {
			chan->readq.first = f;
		} else {
			chan->readq.last->frame_list.next = f;
		}
		chan->readq.last = p;
		/*
		 * more or less same as ast_queue_frame, but extra
		 * write on the alertpipe to signal frames.
		 */
		if (fd > -1) {
			int blah = 1, l = sizeof(blah);
			for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) {
				if (write(fd, &blah, l) != l)
					ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n",
					    chan->name, f->frametype, f->subclass, strerror(errno));
			}
		}
		ast_channel_unlock(chan);
	}
	/* thread terminating, here could call the uninit */
	/* uninitialize the local and remote video environments */
	env->in = dec_uninit(env->in);
	video_out_uninit(env);

	if (env->gui)
		env->gui = cleanup_sdl(env->gui);
	ast_mutex_destroy(&env->dec_lock);
	env->shutdown = 0;
	return NULL;
}
Пример #9
0
/*
 * Helper thread to periodically poll the video sources and enqueue the
 * generated frames directed to the remote party to the channel's queue.
 * Using a separate thread also helps because the encoding can be
 * computationally expensive so we don't want to starve the main thread.
 */
static void *video_thread(void *arg)
{
	struct video_desc *env = arg;
	int count = 0;
	char save_display[128] = "";
	int i; /* integer variable used as iterator */

	/* if sdl_videodriver is set, override the environment. Also,
	 * if it contains 'console' override DISPLAY around the call to SDL_Init
	 * so we use the console as opposed to the x11 version of aalib
	 */
	if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */
		const char *s = getenv("DISPLAY");
		setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1);
		if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) {
			ast_copy_string(save_display, s, sizeof(save_display));
			unsetenv("DISPLAY");
		}
	}
	sdl_setup(env);
	if (!ast_strlen_zero(save_display)) {
		setenv("DISPLAY", save_display, 1);
	}

	ast_mutex_init(&env->dec_lock);	/* used to sync decoder and renderer */

	if (grabber_open(&env->out)) {
		ast_log(LOG_WARNING, "cannot open local video source\n");
	}

	if (env->out.device_num) {
		env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY;
	}

	/* even if no device is connected, we must call video_out_init,
	 * as some of the data structures it initializes are
	 * used in get_video_frames()
	 */
	video_out_init(env);

	/* Writes intial status of the sources. */
	if (env->gui) {
		for (i = 0; i < env->out.device_num; i++) {
			print_message(env->gui->thumb_bd_array[i].board,
				src_msgs[env->out.devices[i].status_index]);
		}
	}

	for (;;) {
		struct timespec t = { 0, 50000000 };	/* XXX 20 times/sec */
		struct ast_frame *p, *f;
		struct ast_channel *chan;
		int fd;
		char *caption = NULL, buf[160];

		/* determine if video format changed */
		if (count++ % 10 == 0) {
			if (env->out.sendvideo && env->out.devices) {
				snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps",
				env->out.devices[env->out.device_primary].name, env->codec_name,
				env->enc_in.w, env->enc_in.h,
				env->out.fps, env->out.bitrate / 1000);
			} else {
				sprintf(buf, "hold");
			}
			caption = buf;
		}

		/* manage keypad events */
		/* XXX here we should always check for events,
		* otherwise the drag will not work */ 
		if (env->gui)
			eventhandler(env, caption);

		/* sleep for a while */
		nanosleep(&t, NULL);

	    if (env->in) {
			struct video_dec_desc *v = env->in;

			/*
			 * While there is something to display, call the decoder and free
			 * the buffer, possibly enabling the receiver to store new data.
			 */
			while (v->dec_in_dpy) {
				struct fbuf_t *tmp = v->dec_in_dpy;	/* store current pointer */

				/* decode the frame, but show it only if not frozen */
				if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze)
					show_frame(env, WIN_REMOTE);
				tmp->used = 0;	/* mark buffer as free */
				tmp->ebit = 0;
				ast_mutex_lock(&env->dec_lock);
				if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN])	/* advance to next, circular */
					v->dec_in_dpy = &v->dec_in[0];

				if (v->dec_in_cur == NULL)	/* receiver was idle, enable it... */
					v->dec_in_cur = tmp;	/* using the slot just freed */
				else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */
					v->dec_in_dpy = NULL;	/* nothing more to display */
				ast_mutex_unlock(&env->dec_lock);
			}
		}

		if (env->shutdown)
			break;
		f = get_video_frames(env, &p);	/* read and display */
		if (!f)
			continue;
		chan = env->owner;
		if (chan == NULL) {
			/* drop the chain of frames, nobody uses them */
			while (f) {
				struct ast_frame *g = AST_LIST_NEXT(f, frame_list);
				ast_frfree(f);
				f = g;
			}
			continue;
		}
		ast_channel_lock(chan);

		/* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */
		if (ast_channel_readq(chan).first == NULL) {
			ast_channel_readq(chan).first = f;
		} else {
			ast_channel_readq(chan).last->frame_list.next = f;
		}
		ast_channel_readq(chan).last = p;
		/*
		 * more or less same as ast_queue_frame, but extra
		 * write on the alertpipe to signal frames.
		 */
		if (ast_channel_alertable(chan)) {
			for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) {
				if (ast_channel_alert(chan)) {
					ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n",
						ast_channel_name(chan), f->frametype, f->subclass, strerror(errno));
			}
		}
		ast_channel_unlock(chan);
	}
	/* thread terminating, here could call the uninit */
	/* uninitialize the local and remote video environments */
	env->in = dec_uninit(env->in);
	video_out_uninit(env);

	if (env->gui)
		env->gui = cleanup_sdl(env->gui, env->out.device_num);
	ast_mutex_destroy(&env->dec_lock);
	env->shutdown = 0;
	return NULL;
}

static void copy_geometry(struct fbuf_t *src, struct fbuf_t *dst)
{
	if (dst->w == 0)
		dst->w = src->w;
	if (dst->h == 0)
		dst->h = src->h;
}

/*! initialize the video environment.
 * Apart from the formats (constant) used by sdl and the codec,
 * we use enc_in as the basic geometry.
 */
static void init_env(struct video_desc *env)
{
	struct fbuf_t *c = &(env->out.loc_src_geometry);		/* local source */
	struct fbuf_t *ei = &(env->enc_in);		/* encoder input */
	struct fbuf_t *ld = &(env->loc_dpy);	/* local display */
	struct fbuf_t *rd = &(env->rem_dpy);		/* remote display */
	int i; /* integer working as iterator */

	c->pix_fmt = PIX_FMT_YUV420P;	/* default - camera format */
	ei->pix_fmt = PIX_FMT_YUV420P;	/* encoder input */
	if (ei->w == 0 || ei->h == 0) {
		ei->w = 352;
		ei->h = 288;
	}
	ld->pix_fmt = rd->pix_fmt = PIX_FMT_YUV420P; /* sdl format */
	/* inherit defaults */
	copy_geometry(ei, c);	/* camera inherits from encoder input */
	copy_geometry(ei, rd);	/* remote display inherits from encoder input */
	copy_geometry(rd, ld);	/* local display inherits from remote display */

	/* fix the size of buffers for small windows */
	for (i = 0; i < env->out.device_num; i++) {
		env->src_dpy[i].pix_fmt = PIX_FMT_YUV420P;
		env->src_dpy[i].w = SRC_WIN_W;
		env->src_dpy[i].h = SRC_WIN_H;
	}
	/* now we set the default coordinates for the picture in picture
	frames inside the env_in buffers, those can be changed by dragging the
	picture in picture with left click */
	env->out.pip_x = ei->w - ei->w/3;
	env->out.pip_y = ei->h - ei->h/3;
}

/*!
 * The first call to the video code, called by oss_new() or similar.
 * Here we initialize the various components we use, namely SDL for display,
 * ffmpeg for encoding/decoding, and a local video source.
 * We do our best to progress even if some of the components are not
 * available.
 */
void console_video_start(struct video_desc *env, struct ast_channel *owner)
{
	ast_log(LOG_WARNING, "env %p chan %p\n", env, owner);
	if (env == NULL)	/* video not initialized */
		return;
	env->owner = owner;	/* work even if no owner is specified */
	if (env->vthread)
		return;		/* already initialized, nothing to do */
	init_env(env);
	env->out.enc = map_config_video_format(env->codec_name);

	ast_log(LOG_WARNING, "start video out %s %dx%d\n",
		env->codec_name, env->enc_in.w,  env->enc_in.h);
	/*
	 * Register all codecs supported by the ffmpeg library.
	 * We only need to do it once, but probably doesn't
	 * harm to do it multiple times.
	 */
	avcodec_init();
	avcodec_register_all();
	av_log_set_level(AV_LOG_ERROR);	/* only report errors */

	if (env->out.fps == 0) {
		env->out.fps = 15;
		ast_log(LOG_WARNING, "fps unset, forcing to %d\n", env->out.fps);
	}
	if (env->out.bitrate == 0) {
		env->out.bitrate = 65000;
		ast_log(LOG_WARNING, "bitrate unset, forcing to %d\n", env->out.bitrate);
	}
	/* create the thread as detached so memory is freed on termination */
	ast_pthread_create_detached_background(&env->vthread,
		NULL, video_thread, env);
}