void play_back() { int x,y; unsigned int h,w,bw,d; Window root; XEvent ev; int i=0; XGetGeometry(display,draw_win,&root,&x,&y,&w,&h,&bw,&d); if(mov_ind==0)return; if(h<movie[i].h||w<movie[i].w){ too_small(); return; } XCopyArea(display,movie[i].xi,draw_win,gc_graph,0,0,w,h,0,0); XFlush(display); while(1){ XNextEvent(display,&ev); switch(ev.type) { case ButtonPress: i++; if(i>=mov_ind)i=0; if(show_frame(i,h,w))return; break; case KeyPress: switch(get_key_press(&ev)){ case ESC: return; case RIGHT: i++; if(i>=mov_ind)i=0; if(show_frame(i,h,w))return; break; case LEFT: i--; if(i<0)i=mov_ind-1; if(show_frame(i,h,w))return; break; case HOME: i=0; if(show_frame(i,h,w))return; break; case END: i=mov_ind-1; if(show_frame(i,h,w))return; break; } } } }
int prepare_response(Socket_t *sock, uint8_t *frame, uint8_t *data, int length) { IPV4TX_header_t *txhdr = (IPV4TX_header_t *)frame; char *ptr = (char *)data; int len = length; printf("Response: "); while (--len >= 0 && *ptr != '\r') putchar(*ptr++); putchar('\n'); txhdr->apiid = ID_IPV4TX; txhdr->frameid = ++txframeid; memcpy(&txhdr->dstaddr, &sock->id.srcaddr, 4); memcpy(&txhdr->dstport, &sock->id.srcport, 2); memcpy(&txhdr->srcport, &sock->id.dstport, 2); txhdr->protocol = sock->protocol; //txhdr->options = 0x00; // don't terminate after send txhdr->options = 0x01; // terminate after send memcpy(txhdr->data, data, length); length += field_offset(IPV4TX_header_t, data); #ifdef FRAME_TX_DEBUG printf("[TX %02x]", txframeid); show_frame(frame, length); #endif return length; }
static void send_at_command(int id, char *fmt, ...) { uint8_t frame[32]; ATCommand_t *atcmd = (ATCommand_t *)frame; va_list ap; int len; atcmd->apiid = ID_ATCOMMAND; atcmd->frameid = id; va_start(ap, fmt); len = field_offset(ATCommand_t, command) + vsprintf((char *)atcmd->command, fmt, ap); va_end(ap); #ifdef AT_DEBUG printf("AT frame:"); show_frame(frame, len); printf("AT '%s'", atcmd->command); #endif XbeeFrame_sendframe(mailbox, frame, len); #ifdef AT_DEBUG printf(" -- sent\n"); #endif }
/*! \brief read a frame from webcam or X11 through grabber_read(), * display it, then encode and split it. * Return a list of ast_frame representing the video fragments. * The head pointer is returned by the function, the tail pointer * is returned as an argument. */ static struct ast_frame *get_video_frames(struct video_desc *env, struct ast_frame **tail) { struct video_out_desc *v = &env->out; struct ast_frame *dummy; struct fbuf_t *loc_src = grabber_read(v); if (!loc_src) return NULL; /* can happen, e.g. we are reading too early */ if (tail == NULL) tail = &dummy; *tail = NULL; /* Scale the video for the encoder, then use it for local rendering * so we will see the same as the remote party. */ my_scale(loc_src, NULL, &env->enc_in, NULL); show_frame(env, WIN_LOCAL); if (!v->sendvideo) return NULL; if (v->enc_out.data == NULL) { static volatile int a = 0; if (a++ < 2) ast_log(LOG_WARNING, "fail, no encoder output buffer\n"); return NULL; } v->enc->enc_run(v); return v->enc->enc_encap(&v->enc_out, v->mtu, tail); }
/* main - the main routine */ int main(void) { XbeeFrameInit_t init; /* initialize the sockets */ memset(sockets, 0, sizeof(sockets)); printf("Starting frame driver\n"); if (XbeeFrame_start(&init, mailbox, XBEE_RX, XBEE_TX, XBEE_RTS, XBEE_BAUD) < 0) { printf("failed to start frame driver\n"); return 1; } /* set the port and ask for our IP address */ //send_at_command(0, "C0%x", PORT); // this doesn't seem to work for some reason send_at_command(1, "MY"); printf("Listening for frames\n"); while (1) { uint8_t *frame; int length; if ((frame = XbeeFrame_recvframe(mailbox, &length)) != NULL) { #ifdef FRAME_RX_DEBUG printf("[RX]"); show_frame(frame, length); #endif /* handle the frame */ switch (frame[0]) { case ID_IPV4RX: handle_ipv4_frame((IPV4RX_header_t *)frame, length); break; case ID_ATRESPONSE: handle_atresponse_frame((ATResponse_t *)frame, length); break; case ID_TXSTATUS: handle_txstatus_frame((TXStatus_t *)frame, length); break; default: break; } XbeeFrame_release(mailbox); } } return 0; }
void send_response(XbeeFrame_t *mbox, IPV4RX_header *rxhdr, uint8_t *data, int length) { uint8_t txframe[1024]; IPV4TX_header *txhdr = (IPV4TX_header *)txframe; txhdr->apiid = ID_IPV4TX; txhdr->frameid = 0x42; memcpy(&txhdr->dstaddr, &rxhdr->srcaddr, 4); memcpy(&txhdr->dstport, &rxhdr->srcport, 2); memcpy(&txhdr->srcport, &rxhdr->dstport, 2); txhdr->protocol = rxhdr->protocol; txhdr->options = 0x00; // don't terminate after send //txhdr->options = 0x01; // terminate after send memcpy(txhdr->data, data, length); length += sizeof(IPV4TX_header) - 1; printf("[TX]"); show_frame(txframe, length); XbeeFrame_sendframe(mbox, txframe, length); }
int movie(int num_buf, int wait_vsync) { int buf_no = 0; int k = 0, ret = 0; for (k = 0; k < ITERATIONS; k++) { ret = show_frame(buf_no); if (ret) return ret; if (wait_vsync) { ret = ioctl(fd, OMAPFB_WAITFORVSYNC, 0); if (ret) { printf("\n ioctl OMAPFB_WAITFORVSYNC failed"); return ret; } } buf_no = (++buf_no) % num_buf; } return 0; }
/* * Helper thread to periodically poll the video sources and enqueue the * generated frames directed to the remote party to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; int i; /* integer variable used as iterator */ /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) { setenv("DISPLAY", save_display, 1); } ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } if (env->out.device_num) { env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY; } /* even if no device is connected, we must call video_out_init, * as some of the data structures it initializes are * used in get_video_frames() */ video_out_init(env); /* Writes intial status of the sources. */ if (env->gui) { for (i = 0; i < env->out.device_num; i++) { print_message(env->gui->thumb_bd_array[i].board, src_msgs[env->out.devices[i].status_index]); } } for (;;) { struct timespec t = { 0, 50000000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo && env->out.devices) { snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps", env->out.devices[env->out.device_primary].name, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate / 1000); } else { sprintf(buf, "hold"); } caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ nanosleep(&t, NULL); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ /* decode the frame, but show it only if not frozen */ if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) { /* drop the chain of frames, nobody uses them */ while (f) { struct ast_frame *g = AST_LIST_NEXT(f, frame_list); ast_frfree(f); f = g; } continue; } fd = chan->alertpipe[1]; ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (chan->readq.first == NULL) { chan->readq.first = f; } else { chan->readq.last->frame_list.next = f; } chan->readq.last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (fd > -1) { int blah = 1, l = sizeof(blah); for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (write(fd, &blah, l) != l) ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", chan->name, f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui, env->out.device_num); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; }
/*! \brief refreshes the buffers of all the device by calling the * grabber_read on each device in the device table. * it encodes the primary source buffer, if the picture in picture mode is * enabled it encodes (in the buffer to split) the secondary source buffer too. * The encoded buffer is splitted to build the local and the remote view. * Return a list of ast_frame representing the video fragments. * The head pointer is returned by the function, the tail pointer * is returned as an argument. * * \param env = video environment descriptor * \param tail = tail ponter (pratically a return value) */ static struct ast_frame *get_video_frames(struct video_desc *env, struct ast_frame **tail) { struct video_out_desc *v = &env->out; struct ast_frame *dummy; struct fbuf_t *loc_src_primary = NULL, *p_read; int i; /* if no device was found in the config file */ if (!env->out.device_num) return NULL; /* every time this function is called we refresh the buffers of every device, updating the private device buffer in the device table */ for (i = 0; i < env->out.device_num; i++) { p_read = grabber_read(&env->out.devices[i], env->out.fps); /* it is used only if different from NULL, we mantain last good buffer otherwise */ if (p_read) env->out.devices[i].dev_buf = p_read; } /* select the primary device buffer as the one to encode */ loc_src_primary = env->out.devices[env->out.device_primary].dev_buf; /* loc_src_primary can be NULL if the device has been turned off during execution of it is read too early */ if (loc_src_primary) { /* Scale the video for the encoder, then use it for local rendering so we will see the same as the remote party */ my_scale(loc_src_primary, NULL, &env->enc_in, NULL); } if (env->out.picture_in_picture) { /* the picture in picture mode is enabled */ struct fbuf_t *loc_src_secondary; /* reads from the secondary source */ loc_src_secondary = env->out.devices[env->out.device_secondary].dev_buf; if (loc_src_secondary) { env->enc_in.win_x = env->out.pip_x; env->enc_in.win_y = env->out.pip_y; env->enc_in.win_w = env->enc_in.w/3; env->enc_in.win_h = env->enc_in.h/3; /* scales to the correct geometry and inserts in the enc_in buffer the picture in picture */ my_scale(loc_src_secondary, NULL, &env->enc_in, NULL); /* returns to normal parameters (not picture in picture) */ env->enc_in.win_x = 0; env->enc_in.win_y = 0; env->enc_in.win_w = 0; env->enc_in.win_h = 0; } else { /* loc_src_secondary can be NULL if the device has been turned off during execution of it is read too early */ env->out.picture_in_picture = 0; /* disable picture in picture */ } } show_frame(env, WIN_LOCAL); /* local rendering */ for (i = 0; i < env->out.device_num; i++) show_frame(env, i+WIN_SRC1); /* rendering of every source device in thumbnails */ if (tail == NULL) tail = &dummy; *tail = NULL; /* if no reason for encoding, do not encode */ if (!env->owner || !loc_src_primary || !v->sendvideo) return NULL; if (v->enc_out.data == NULL) { static volatile int a = 0; if (a++ < 2) ast_log(LOG_WARNING, "fail, no encoder output buffer\n"); return NULL; } v->enc->enc_run(v); return v->enc->enc_encap(&v->enc_out, v->mtu, tail); }
/* * Helper thread to periodically poll the video source and enqueue the * generated frames to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) setenv("DISPLAY", save_display, 1); /* initialize grab coordinates */ env->out.loc_src_geometry.x = 0; env->out.loc_src_geometry.y = 0; ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } else { #if 0 /* In principle, try to register the fd. * In practice, many webcam drivers do not support select/poll, * so don't bother and instead read periodically from the * video thread. */ if (env->out.fd >= 0) ast_channel_set_fd(env->owner, 1, env->out.fd); #endif video_out_init(env); } for (;;) { struct timeval t = { 0, 50000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo) sprintf(buf, "%s %s %dx%d @@ %dfps %dkbps", env->out.videodevice, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate/1000); else sprintf(buf, "hold"); caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ ast_select(0, NULL, NULL, NULL, &t); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ if (v->d_callbacks->dec_run(v, tmp)) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) continue; fd = chan->alertpipe[1]; ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (chan->readq.first == NULL) { chan->readq.first = f; } else { chan->readq.last->frame_list.next = f; } chan->readq.last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (fd > -1) { int blah = 1, l = sizeof(blah); for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (write(fd, &blah, l) != l) ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", chan->name, f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; }
int video_output(render_context_type *rc) { show_frame(rc); SLEEP(1); return 0; }
/* * Helper thread to periodically poll the video sources and enqueue the * generated frames directed to the remote party to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; int i; /* integer variable used as iterator */ /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) { setenv("DISPLAY", save_display, 1); } ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } if (env->out.device_num) { env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY; } /* even if no device is connected, we must call video_out_init, * as some of the data structures it initializes are * used in get_video_frames() */ video_out_init(env); /* Writes intial status of the sources. */ if (env->gui) { for (i = 0; i < env->out.device_num; i++) { print_message(env->gui->thumb_bd_array[i].board, src_msgs[env->out.devices[i].status_index]); } } for (;;) { struct timespec t = { 0, 50000000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo && env->out.devices) { snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps", env->out.devices[env->out.device_primary].name, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate / 1000); } else { sprintf(buf, "hold"); } caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ nanosleep(&t, NULL); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ /* decode the frame, but show it only if not frozen */ if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) { /* drop the chain of frames, nobody uses them */ while (f) { struct ast_frame *g = AST_LIST_NEXT(f, frame_list); ast_frfree(f); f = g; } continue; } ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (ast_channel_readq(chan).first == NULL) { ast_channel_readq(chan).first = f; } else { ast_channel_readq(chan).last->frame_list.next = f; } ast_channel_readq(chan).last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (ast_channel_alertable(chan)) { for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (ast_channel_alert(chan)) { ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", ast_channel_name(chan), f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui, env->out.device_num); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; } static void copy_geometry(struct fbuf_t *src, struct fbuf_t *dst) { if (dst->w == 0) dst->w = src->w; if (dst->h == 0) dst->h = src->h; } /*! initialize the video environment. * Apart from the formats (constant) used by sdl and the codec, * we use enc_in as the basic geometry. */ static void init_env(struct video_desc *env) { struct fbuf_t *c = &(env->out.loc_src_geometry); /* local source */ struct fbuf_t *ei = &(env->enc_in); /* encoder input */ struct fbuf_t *ld = &(env->loc_dpy); /* local display */ struct fbuf_t *rd = &(env->rem_dpy); /* remote display */ int i; /* integer working as iterator */ c->pix_fmt = PIX_FMT_YUV420P; /* default - camera format */ ei->pix_fmt = PIX_FMT_YUV420P; /* encoder input */ if (ei->w == 0 || ei->h == 0) { ei->w = 352; ei->h = 288; } ld->pix_fmt = rd->pix_fmt = PIX_FMT_YUV420P; /* sdl format */ /* inherit defaults */ copy_geometry(ei, c); /* camera inherits from encoder input */ copy_geometry(ei, rd); /* remote display inherits from encoder input */ copy_geometry(rd, ld); /* local display inherits from remote display */ /* fix the size of buffers for small windows */ for (i = 0; i < env->out.device_num; i++) { env->src_dpy[i].pix_fmt = PIX_FMT_YUV420P; env->src_dpy[i].w = SRC_WIN_W; env->src_dpy[i].h = SRC_WIN_H; } /* now we set the default coordinates for the picture in picture frames inside the env_in buffers, those can be changed by dragging the picture in picture with left click */ env->out.pip_x = ei->w - ei->w/3; env->out.pip_y = ei->h - ei->h/3; } /*! * The first call to the video code, called by oss_new() or similar. * Here we initialize the various components we use, namely SDL for display, * ffmpeg for encoding/decoding, and a local video source. * We do our best to progress even if some of the components are not * available. */ void console_video_start(struct video_desc *env, struct ast_channel *owner) { ast_log(LOG_WARNING, "env %p chan %p\n", env, owner); if (env == NULL) /* video not initialized */ return; env->owner = owner; /* work even if no owner is specified */ if (env->vthread) return; /* already initialized, nothing to do */ init_env(env); env->out.enc = map_config_video_format(env->codec_name); ast_log(LOG_WARNING, "start video out %s %dx%d\n", env->codec_name, env->enc_in.w, env->enc_in.h); /* * Register all codecs supported by the ffmpeg library. * We only need to do it once, but probably doesn't * harm to do it multiple times. */ avcodec_init(); avcodec_register_all(); av_log_set_level(AV_LOG_ERROR); /* only report errors */ if (env->out.fps == 0) { env->out.fps = 15; ast_log(LOG_WARNING, "fps unset, forcing to %d\n", env->out.fps); } if (env->out.bitrate == 0) { env->out.bitrate = 65000; ast_log(LOG_WARNING, "bitrate unset, forcing to %d\n", env->out.bitrate); } /* create the thread as detached so memory is freed on termination */ ast_pthread_create_detached_background(&env->vthread, NULL, video_thread, env); }