/* ast_list_tail: */ AST *ast_list_tail ( AST *list ) { if (list) { ASSERT(AST_IS_LIST(list)); while (AST_LIST_NEXT(list)) list = AST_LIST_NEXT(list); return list; } else { return NULL; } }
static int spy_generate(struct ast_channel *chan, void *data, int len, int samples) { struct chanspy_translation_helper *csth = data; struct ast_frame *f, *cur; ast_audiohook_lock(&csth->spy_audiohook); if (csth->spy_audiohook.status != AST_AUDIOHOOK_STATUS_RUNNING) { ast_audiohook_unlock(&csth->spy_audiohook); return -1; } f = ast_audiohook_read_frame(&csth->spy_audiohook, samples, AST_AUDIOHOOK_DIRECTION_BOTH, AST_FORMAT_SLINEAR); ast_audiohook_unlock(&csth->spy_audiohook); if (!f) return 0; for (cur = f; cur; cur = AST_LIST_NEXT(cur, frame_list)) { if (ast_write(chan, cur)) { ast_frfree(f); return -1; } if (csth->fd) { if (write(csth->fd, cur->data, cur->datalen) < 0) { ast_log(LOG_WARNING, "write() failed: %s\n", strerror(errno)); } } } ast_frfree(f); return 0; }
/*! \brief encode and produce a frame */ static struct ast_frame *lintocodec2_frameout(struct ast_trans_pvt *pvt) { struct codec2_translator_pvt *tmp = pvt->pvt; struct ast_frame *result = NULL; struct ast_frame *last = NULL; int samples = 0; /* output samples */ while (pvt->samples >= CODEC2_SAMPLES) { struct ast_frame *current; /* Encode a frame of data */ codec2_encode(tmp->state, pvt->outbuf.uc, tmp->buf + samples); samples += CODEC2_SAMPLES; pvt->samples -= CODEC2_SAMPLES; current = ast_trans_frameout(pvt, CODEC2_FRAME_LEN, CODEC2_SAMPLES); if (!current) { continue; } else if (last) { AST_LIST_NEXT(last, frame_list) = current; } else { result = current; } last = current; } /* Move the data at the end of the buffer to the front */ if (samples) { memmove(tmp->buf, tmp->buf + samples, pvt->samples * 2); } return result; }
static int group_function_read(struct ast_channel *chan, const char *cmd, char *data, char *buf, size_t len) { int ret = -1; struct ast_group_info *gi = NULL; ast_app_group_list_rdlock(); for (gi = ast_app_group_list_head(); gi; gi = AST_LIST_NEXT(gi, group_list)) { if (gi->chan != chan) continue; if (ast_strlen_zero(data)) break; if (!ast_strlen_zero(gi->category) && !strcasecmp(gi->category, data)) break; } if (gi) { ast_copy_string(buf, gi->group, len); ret = 0; } ast_app_group_list_unlock(); return ret; }
static int group_list_function_read(struct ast_channel *chan, const char *cmd, char *data, char *buf, size_t len) { struct ast_group_info *gi = NULL; char tmp1[1024] = ""; char tmp2[1024] = ""; if (!chan) return -1; ast_app_group_list_rdlock(); for (gi = ast_app_group_list_head(); gi; gi = AST_LIST_NEXT(gi, group_list)) { if (gi->chan != chan) continue; if (!ast_strlen_zero(tmp1)) { ast_copy_string(tmp2, tmp1, sizeof(tmp2)); if (!ast_strlen_zero(gi->category)) snprintf(tmp1, sizeof(tmp1), "%s %s@%s", tmp2, gi->group, gi->category); else snprintf(tmp1, sizeof(tmp1), "%s %s", tmp2, gi->group); } else { if (!ast_strlen_zero(gi->category)) snprintf(tmp1, sizeof(tmp1), "%s@%s", gi->group, gi->category); else snprintf(tmp1, sizeof(tmp1), "%s", gi->group); } } ast_app_group_list_unlock(); ast_copy_string(buf, tmp1, len); return 0; }
/* Helper function to find a specific speech recognition result by number and nbest alternative */ static struct ast_speech_result *find_result(struct ast_speech_result *results, char *result_num) { struct ast_speech_result *result = results; char *tmp = NULL; int nbest_num = 0, wanted_num = 0, i = 0; if (!result) { return NULL; } if ((tmp = strchr(result_num, '/'))) { *tmp++ = '\0'; nbest_num = atoi(result_num); wanted_num = atoi(tmp); } else { wanted_num = atoi(result_num); } do { if (result->nbest_num != nbest_num) continue; if (i == wanted_num) break; i++; } while ((result = AST_LIST_NEXT(result, list))); return result; }
/*! * \internal * \brief Convert the allocated regions hash table to a list. * * \param list Fill list with the allocated regions. * * \details * Take all allocated regions from the regions[] and put them * into the list. * * \note reglock must be locked before calling. * * \note This function is destructive to the regions[] lists. * * \return Length of list created. */ static size_t mm_atexit_hash_list(struct region_list *list) { struct ast_region *reg; size_t total_length; int idx; total_length = 0; for (idx = 0; idx < ARRAY_LEN(regions); ++idx) { while ((reg = regions[idx])) { regions[idx] = AST_LIST_NEXT(reg, node); AST_LIST_NEXT(reg, node) = NULL; AST_LIST_INSERT_HEAD(list, reg, node); ++total_length; } } return total_length; }
/*! * \internal * \brief Put the regions list into the allocated regions hash table. * * \param list List to put into the allocated regions hash table. * * \note reglock must be locked before calling. * * \return Nothing */ static void mm_atexit_hash_restore(struct region_list *list) { struct ast_region *reg; int hash; while ((reg = AST_LIST_REMOVE_HEAD(list, node))) { hash = HASH(reg->data); AST_LIST_NEXT(reg, node) = regions[hash]; regions[hash] = reg; } }
/* ast_list_find_p: */ AST *ast_list_find_p ( AST *list, ASTListFindFunc func, gpointer data ) { while (list) { if (func(AST_LIST_ITEM(list), data)) return AST_LIST_ITEM(list); list = AST_LIST_NEXT(list); } return NULL; }
/*! * \internal * \brief Check the fences of all regions currently allocated. * * \return Nothing */ static void regions_check_all_fences(void) { int idx; struct ast_region *reg; ast_mutex_lock(®lock); for (idx = 0; idx < ARRAY_LEN(regions); ++idx) { for (reg = regions[idx]; reg; reg = AST_LIST_NEXT(reg, node)) { region_check_fences(reg); } } ast_mutex_unlock(®lock); }
/* ast_decl_get_member: */ AST *ast_decl_get_member ( AST *decl, AST *ident ) { AST *l; ASSERT(AST_IS_DECL(decl)); ASSERT(AST_IS_IDENT(ident)); for (l = AST_DECL_MEMBERS(decl); l; l = AST_LIST_NEXT(l)) { AST *m = AST_LIST_ITEM(l); if (AST_DECL_IDENT(m) == ident) return m; } return NULL; }
/*! * \note reglock must be locked before calling. */ static struct ast_region *region_find(void *ptr) { int hash; struct ast_region *reg; hash = HASH(ptr); for (reg = regions[hash]; reg; reg = AST_LIST_NEXT(reg, node)) { if (reg->data == ptr) { break; } } return reg; }
/*! * \internal * \brief Remove a region from the active regions. * * \param ptr Region payload data pointer. * * \retval region on success. * \retval NULL if not found. */ static struct ast_region *region_remove(void *ptr) { int hash; struct ast_region *reg; struct ast_region *prev = NULL; hash = HASH(ptr); ast_mutex_lock(®lock); for (reg = regions[hash]; reg; reg = AST_LIST_NEXT(reg, node)) { if (reg->data == ptr) { if (prev) { AST_LIST_NEXT(prev, node) = AST_LIST_NEXT(reg, node); } else { regions[hash] = AST_LIST_NEXT(reg, node); } break; } prev = reg; } ast_mutex_unlock(®lock); return reg; }
static int spy_generate(struct ast_channel *chan, void *data, int len, int samples) { struct chanspy_translation_helper *csth = data; struct ast_frame *f, *cur; struct ast_format format_slin; ast_format_set(&format_slin, AST_FORMAT_SLINEAR, 0); ast_audiohook_lock(&csth->spy_audiohook); if (csth->spy_audiohook.status != AST_AUDIOHOOK_STATUS_RUNNING) { /* Channel is already gone more than likely */ ast_audiohook_unlock(&csth->spy_audiohook); return -1; } if (ast_test_flag(&csth->flags, OPTION_READONLY)) { /* Option 'o' was set, so don't mix channel audio */ f = ast_audiohook_read_frame(&csth->spy_audiohook, samples, AST_AUDIOHOOK_DIRECTION_READ, &format_slin); } else { f = ast_audiohook_read_frame(&csth->spy_audiohook, samples, AST_AUDIOHOOK_DIRECTION_BOTH, &format_slin); } ast_audiohook_unlock(&csth->spy_audiohook); if (!f) return 0; for (cur = f; cur; cur = AST_LIST_NEXT(cur, frame_list)) { if (ast_write(chan, cur)) { ast_frfree(f); return -1; } if (csth->fd) { if (write(csth->fd, cur->data.ptr, cur->datalen) < 0) { ast_log(LOG_WARNING, "write() failed: %s\n", strerror(errno)); } } } ast_frfree(f); return 0; }
static int group_count_function_read(struct ast_channel *chan, const char *cmd, char *data, char *buf, size_t len) { int ret = -1; int count = -1; char group[80] = "", category[80] = ""; if (!chan) { ast_log(LOG_WARNING, "No channel was provided to %s function.\n", cmd); return -1; } ast_app_group_split_group(data, group, sizeof(group), category, sizeof(category)); /* If no group has been provided let's find one */ if (ast_strlen_zero(group)) { struct ast_group_info *gi = NULL; ast_app_group_list_rdlock(); for (gi = ast_app_group_list_head(); gi; gi = AST_LIST_NEXT(gi, group_list)) { if (gi->chan != chan) continue; if (ast_strlen_zero(category) || (!ast_strlen_zero(gi->category) && !strcasecmp(gi->category, category))) break; } if (gi) { ast_copy_string(group, gi->group, sizeof(group)); if (!ast_strlen_zero(gi->category)) ast_copy_string(category, gi->category, sizeof(category)); } ast_app_group_list_unlock(); } if ((count = ast_app_group_get_count(group, category)) == -1) { ast_log(LOG_NOTICE, "No group could be found for channel '%s'\n", ast_channel_name(chan)); } else { snprintf(buf, len, "%d", count); ret = 0; } return ret; }
static void *__ast_alloc_region(size_t size, const enum func_type which, const char *file, int lineno, const char *func, unsigned int cache) { struct ast_region *reg; unsigned int *fence; int hash; if (!(reg = malloc(size + sizeof(*reg) + sizeof(*fence)))) { astmm_log("Memory Allocation Failure - '%d' bytes at %s %s() line %d\n", (int) size, file, func, lineno); return NULL; } reg->len = size; reg->cache = cache; reg->lineno = lineno; reg->which = which; reg->bt = backtrace_enabled ? ast_bt_create() : NULL; ast_copy_string(reg->file, file, sizeof(reg->file)); ast_copy_string(reg->func, func, sizeof(reg->func)); /* * Init lower fence. * * We use the bytes just preceeding reg->data and not reg->fence * because there is likely to be padding between reg->fence and * reg->data for reg->data alignment. */ fence = (unsigned int *) (reg->data - sizeof(*fence)); *fence = FENCE_MAGIC; /* Init higher fence. */ fence = (unsigned int *) (reg->data + reg->len); put_unaligned_uint32(fence, FENCE_MAGIC); hash = HASH(reg->data); ast_mutex_lock(®lock); AST_LIST_NEXT(reg, node) = regions[hash]; regions[hash] = reg; ast_mutex_unlock(®lock); return reg->data; }
/*! \brief SPEECH() Dialplan Function */ static int speech_read(struct ast_channel *chan, const char *cmd, char *data, char *buf, size_t len) { int results = 0; struct ast_speech_result *result = NULL; struct ast_speech *speech = find_speech(chan); char tmp[128] = ""; /* Now go for the various options */ if (!strcasecmp(data, "status")) { if (speech != NULL) ast_copy_string(buf, "1", len); else ast_copy_string(buf, "0", len); return 0; } /* Make sure we have a speech structure for everything else */ if (speech == NULL) { return -1; } /* Check to see if they are checking for silence */ if (!strcasecmp(data, "spoke")) { if (ast_test_flag(speech, AST_SPEECH_SPOKE)) ast_copy_string(buf, "1", len); else ast_copy_string(buf, "0", len); } else if (!strcasecmp(data, "results")) { /* Count number of results */ for (result = speech->results; result; result = AST_LIST_NEXT(result, list)) results++; snprintf(tmp, sizeof(tmp), "%d", results); ast_copy_string(buf, tmp, len); } else { buf[0] = '\0'; } return 0; }
/* ast_class_decl_new: */ AST *ast_class_decl_new ( AST *context, AST *ident, AST *bases ) { AST *node, *l; node = ast_decl_new(AST_TYPE_CLASS_DECL, context, C_IDENT_TYPE_CML, ident); /* bases */ ASSERT((!bases) || AST_IS_LIST(bases)); for (l = bases; l; l = AST_LIST_NEXT(l)) { AST *b = AST_LIST_ITEM(l); if (AST_IS_CLASS_DECL(b)) { ASSERT(!AST_CLASS_DECL_BASE_CLASS(node)); AST_CLASS_DECL(node)->base_class = b; } else { CL_ERROR("[TODO] %s", ast_type_name(b->type)); } } return node; }
/* * Helper thread to periodically poll the video sources and enqueue the * generated frames directed to the remote party to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; int i; /* integer variable used as iterator */ /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) { setenv("DISPLAY", save_display, 1); } ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } if (env->out.device_num) { env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY; } /* even if no device is connected, we must call video_out_init, * as some of the data structures it initializes are * used in get_video_frames() */ video_out_init(env); /* Writes intial status of the sources. */ if (env->gui) { for (i = 0; i < env->out.device_num; i++) { print_message(env->gui->thumb_bd_array[i].board, src_msgs[env->out.devices[i].status_index]); } } for (;;) { struct timespec t = { 0, 50000000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo && env->out.devices) { snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps", env->out.devices[env->out.device_primary].name, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate / 1000); } else { sprintf(buf, "hold"); } caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ nanosleep(&t, NULL); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ /* decode the frame, but show it only if not frozen */ if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) { /* drop the chain of frames, nobody uses them */ while (f) { struct ast_frame *g = AST_LIST_NEXT(f, frame_list); ast_frfree(f); f = g; } continue; } ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (ast_channel_readq(chan).first == NULL) { ast_channel_readq(chan).first = f; } else { ast_channel_readq(chan).last->frame_list.next = f; } ast_channel_readq(chan).last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (ast_channel_alertable(chan)) { for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (ast_channel_alert(chan)) { ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", ast_channel_name(chan), f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui, env->out.device_num); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; } static void copy_geometry(struct fbuf_t *src, struct fbuf_t *dst) { if (dst->w == 0) dst->w = src->w; if (dst->h == 0) dst->h = src->h; } /*! initialize the video environment. * Apart from the formats (constant) used by sdl and the codec, * we use enc_in as the basic geometry. */ static void init_env(struct video_desc *env) { struct fbuf_t *c = &(env->out.loc_src_geometry); /* local source */ struct fbuf_t *ei = &(env->enc_in); /* encoder input */ struct fbuf_t *ld = &(env->loc_dpy); /* local display */ struct fbuf_t *rd = &(env->rem_dpy); /* remote display */ int i; /* integer working as iterator */ c->pix_fmt = PIX_FMT_YUV420P; /* default - camera format */ ei->pix_fmt = PIX_FMT_YUV420P; /* encoder input */ if (ei->w == 0 || ei->h == 0) { ei->w = 352; ei->h = 288; } ld->pix_fmt = rd->pix_fmt = PIX_FMT_YUV420P; /* sdl format */ /* inherit defaults */ copy_geometry(ei, c); /* camera inherits from encoder input */ copy_geometry(ei, rd); /* remote display inherits from encoder input */ copy_geometry(rd, ld); /* local display inherits from remote display */ /* fix the size of buffers for small windows */ for (i = 0; i < env->out.device_num; i++) { env->src_dpy[i].pix_fmt = PIX_FMT_YUV420P; env->src_dpy[i].w = SRC_WIN_W; env->src_dpy[i].h = SRC_WIN_H; } /* now we set the default coordinates for the picture in picture frames inside the env_in buffers, those can be changed by dragging the picture in picture with left click */ env->out.pip_x = ei->w - ei->w/3; env->out.pip_y = ei->h - ei->h/3; } /*! * The first call to the video code, called by oss_new() or similar. * Here we initialize the various components we use, namely SDL for display, * ffmpeg for encoding/decoding, and a local video source. * We do our best to progress even if some of the components are not * available. */ void console_video_start(struct video_desc *env, struct ast_channel *owner) { ast_log(LOG_WARNING, "env %p chan %p\n", env, owner); if (env == NULL) /* video not initialized */ return; env->owner = owner; /* work even if no owner is specified */ if (env->vthread) return; /* already initialized, nothing to do */ init_env(env); env->out.enc = map_config_video_format(env->codec_name); ast_log(LOG_WARNING, "start video out %s %dx%d\n", env->codec_name, env->enc_in.w, env->enc_in.h); /* * Register all codecs supported by the ffmpeg library. * We only need to do it once, but probably doesn't * harm to do it multiple times. */ avcodec_init(); avcodec_register_all(); av_log_set_level(AV_LOG_ERROR); /* only report errors */ if (env->out.fps == 0) { env->out.fps = 15; ast_log(LOG_WARNING, "fps unset, forcing to %d\n", env->out.fps); } if (env->out.bitrate == 0) { env->out.bitrate = 65000; ast_log(LOG_WARNING, "bitrate unset, forcing to %d\n", env->out.bitrate); } /* create the thread as detached so memory is freed on termination */ ast_pthread_create_detached_background(&env->vthread, NULL, video_thread, env); }
/*! \brief convert work buffer and produce output frame */ static struct ast_frame *lintospeex_frameout(struct ast_trans_pvt *pvt) { struct speex_coder_pvt *tmp = pvt->pvt; struct ast_frame *result = NULL; struct ast_frame *last = NULL; int samples = 0; /* output samples */ while (pvt->samples >= tmp->framesize) { struct ast_frame *current; int is_speech = 1; speex_bits_reset(&tmp->bits); #ifdef _SPEEX_TYPES_H /* Preprocess audio */ if (preproc) is_speech = speex_preprocess(tmp->pp, tmp->buf + samples, NULL); /* Encode a frame of data */ if (is_speech) { /* If DTX enabled speex_encode returns 0 during silence */ is_speech = speex_encode_int(tmp->speex, tmp->buf + samples, &tmp->bits) || !dtx; } else { /* 5 zeros interpreted by Speex as silence (submode 0) */ speex_bits_pack(&tmp->bits, 0, 5); } #else { float fbuf[1024]; int x; /* Convert to floating point */ for (x = 0; x < tmp->framesize; x++) fbuf[x] = tmp->buf[samples + x]; /* Encode a frame of data */ is_speech = speex_encode(tmp->speex, fbuf, &tmp->bits) || !dtx; } #endif samples += tmp->framesize; pvt->samples -= tmp->framesize; /* Use AST_FRAME_CNG to signify the start of any silence period */ if (is_speech) { int datalen = 0; /* output bytes */ tmp->silent_state = 0; /* Terminate bit stream */ speex_bits_pack(&tmp->bits, 15, 5); datalen = speex_bits_write(&tmp->bits, pvt->outbuf.c, pvt->t->buf_size); current = ast_trans_frameout(pvt, datalen, tmp->framesize); } else if (tmp->silent_state) { current = NULL; } else { struct ast_frame frm = { .frametype = AST_FRAME_CNG, .src = pvt->t->name, }; /* * XXX I don't think the AST_FRAME_CNG code has ever * really worked for speex. There doesn't seem to be * any consumers of the frame type. Everyone that * references the type seems to pass the frame on. */ tmp->silent_state = 1; /* XXX what now ? format etc... */ current = ast_frisolate(&frm); } if (!current) { continue; } else if (last) { AST_LIST_NEXT(last, frame_list) = current; } else { result = current; } last = current; } /* Move the data at the end of the buffer to the front */ if (samples) { memmove(tmp->buf, tmp->buf + samples, pvt->samples * 2); } return result; } static void speextolin_destroy(struct ast_trans_pvt *arg) { struct speex_coder_pvt *pvt = arg->pvt; speex_decoder_destroy(pvt->speex); speex_bits_destroy(&pvt->bits); } static void lintospeex_destroy(struct ast_trans_pvt *arg) { struct speex_coder_pvt *pvt = arg->pvt; #ifdef _SPEEX_TYPES_H if (preproc) speex_preprocess_state_destroy(pvt->pp); #endif speex_encoder_destroy(pvt->speex); speex_bits_destroy(&pvt->bits); } static struct ast_translator speextolin = { .name = "speextolin", .src_codec = { .name = "speex", .type = AST_MEDIA_TYPE_AUDIO, .sample_rate = 8000, }, .dst_codec = { .name = "slin", .type = AST_MEDIA_TYPE_AUDIO, .sample_rate = 8000, }, .format = "slin",
/* * Helper thread to periodically poll the video source and enqueue the * generated frames to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) setenv("DISPLAY", save_display, 1); /* initialize grab coordinates */ env->out.loc_src_geometry.x = 0; env->out.loc_src_geometry.y = 0; ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } else { #if 0 /* In principle, try to register the fd. * In practice, many webcam drivers do not support select/poll, * so don't bother and instead read periodically from the * video thread. */ if (env->out.fd >= 0) ast_channel_set_fd(env->owner, 1, env->out.fd); #endif video_out_init(env); } for (;;) { struct timeval t = { 0, 50000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo) sprintf(buf, "%s %s %dx%d @@ %dfps %dkbps", env->out.videodevice, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate/1000); else sprintf(buf, "hold"); caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ ast_select(0, NULL, NULL, NULL, &t); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ if (v->d_callbacks->dec_run(v, tmp)) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) continue; fd = chan->alertpipe[1]; ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (chan->readq.first == NULL) { chan->readq.first = f; } else { chan->readq.last->frame_list.next = f; } chan->readq.last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (fd > -1) { int blah = 1, l = sizeof(blah); for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (write(fd, &blah, l) != l) ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", chan->name, f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; }
static void *mixmonitor_thread(void *obj) { struct mixmonitor *mixmonitor = obj; struct ast_filestream **fs = NULL; unsigned int oflags; char *ext; int errflag = 0; ast_verb(2, "Begin MixMonitor Recording %s\n", mixmonitor->name); fs = &mixmonitor->mixmonitor_ds->fs; /* The audiohook must enter and exit the loop locked */ ast_audiohook_lock(&mixmonitor->audiohook); while (mixmonitor->audiohook.status == AST_AUDIOHOOK_STATUS_RUNNING && !mixmonitor->mixmonitor_ds->fs_quit) { struct ast_frame *fr = NULL; ast_audiohook_trigger_wait(&mixmonitor->audiohook); if (mixmonitor->audiohook.status != AST_AUDIOHOOK_STATUS_RUNNING) break; if (!(fr = ast_audiohook_read_frame(&mixmonitor->audiohook, SAMPLES_PER_FRAME, AST_AUDIOHOOK_DIRECTION_BOTH, AST_FORMAT_SLINEAR))) continue; /* audiohook lock is not required for the next block. * Unlock it, but remember to lock it before looping or exiting */ ast_audiohook_unlock(&mixmonitor->audiohook); ast_mutex_lock(&mixmonitor->mixmonitor_ds->lock); if (!ast_test_flag(mixmonitor, MUXFLAG_BRIDGED) || (mixmonitor->mixmonitor_ds->chan && ast_bridged_channel(mixmonitor->mixmonitor_ds->chan))) { /* Initialize the file if not already done so */ if (!*fs && !errflag && !mixmonitor->mixmonitor_ds->fs_quit) { oflags = O_CREAT | O_WRONLY; oflags |= ast_test_flag(mixmonitor, MUXFLAG_APPEND) ? O_APPEND : O_TRUNC; if ((ext = strrchr(mixmonitor->filename, '.'))) *(ext++) = '\0'; else ext = "raw"; if (!(*fs = ast_writefile(mixmonitor->filename, ext, NULL, oflags, 0, 0666))) { ast_log(LOG_ERROR, "Cannot open %s.%s\n", mixmonitor->filename, ext); errflag = 1; } } /* Write out the frame(s) */ if (*fs) { struct ast_frame *cur; for (cur = fr; cur; cur = AST_LIST_NEXT(cur, frame_list)) { ast_writestream(*fs, cur); } } } ast_mutex_unlock(&mixmonitor->mixmonitor_ds->lock); /* All done! free it. */ ast_frame_free(fr, 0); ast_audiohook_lock(&mixmonitor->audiohook); } ast_audiohook_unlock(&mixmonitor->audiohook); /* Datastore cleanup. close the filestream and wait for ds destruction */ ast_mutex_lock(&mixmonitor->mixmonitor_ds->lock); mixmonitor_ds_close_fs(mixmonitor->mixmonitor_ds); if (!mixmonitor->mixmonitor_ds->destruction_ok) { ast_cond_wait(&mixmonitor->mixmonitor_ds->destruction_condition, &mixmonitor->mixmonitor_ds->lock); } ast_mutex_unlock(&mixmonitor->mixmonitor_ds->lock); /* kill the audiohook */ destroy_monitor_audiohook(mixmonitor); if (mixmonitor->post_process) { ast_verb(2, "Executing [%s]\n", mixmonitor->post_process); ast_safe_system(mixmonitor->post_process); } ast_verb(2, "End MixMonitor Recording %s\n", mixmonitor->name); mixmonitor_free(mixmonitor); return NULL; }
static void *mixmonitor_thread(void *obj) { struct mixmonitor *mixmonitor = obj; struct ast_filestream **fs = NULL; struct ast_filestream **fs_read = NULL; struct ast_filestream **fs_write = NULL; unsigned int oflags; int errflag = 0; struct ast_format format_slin; ast_verb(2, "Begin MixMonitor Recording %s\n", mixmonitor->name); fs = &mixmonitor->mixmonitor_ds->fs; fs_read = &mixmonitor->mixmonitor_ds->fs_read; fs_write = &mixmonitor->mixmonitor_ds->fs_write; ast_mutex_lock(&mixmonitor->mixmonitor_ds->lock); mixmonitor_save_prep(mixmonitor, mixmonitor->filename, fs, &oflags, &errflag); mixmonitor_save_prep(mixmonitor, mixmonitor->filename_read, fs_read, &oflags, &errflag); mixmonitor_save_prep(mixmonitor, mixmonitor->filename_write, fs_write, &oflags, &errflag); ast_format_set(&format_slin, ast_format_slin_by_rate(mixmonitor->mixmonitor_ds->samp_rate), 0); ast_mutex_unlock(&mixmonitor->mixmonitor_ds->lock); /* The audiohook must enter and exit the loop locked */ ast_audiohook_lock(&mixmonitor->audiohook); while (mixmonitor->audiohook.status == AST_AUDIOHOOK_STATUS_RUNNING && !mixmonitor->mixmonitor_ds->fs_quit) { struct ast_frame *fr = NULL; struct ast_frame *fr_read = NULL; struct ast_frame *fr_write = NULL; if (!(fr = ast_audiohook_read_frame_all(&mixmonitor->audiohook, SAMPLES_PER_FRAME, &format_slin, &fr_read, &fr_write))) { ast_audiohook_trigger_wait(&mixmonitor->audiohook); if (mixmonitor->audiohook.status != AST_AUDIOHOOK_STATUS_RUNNING) { break; } continue; } /* audiohook lock is not required for the next block. * Unlock it, but remember to lock it before looping or exiting */ ast_audiohook_unlock(&mixmonitor->audiohook); if (!ast_test_flag(mixmonitor, MUXFLAG_BRIDGED) || (mixmonitor->autochan->chan && ast_bridged_channel(mixmonitor->autochan->chan))) { ast_mutex_lock(&mixmonitor->mixmonitor_ds->lock); /* Write out the frame(s) */ if ((*fs_read) && (fr_read)) { struct ast_frame *cur; for (cur = fr_read; cur; cur = AST_LIST_NEXT(cur, frame_list)) { ast_writestream(*fs_read, cur); } } if ((*fs_write) && (fr_write)) { struct ast_frame *cur; for (cur = fr_write; cur; cur = AST_LIST_NEXT(cur, frame_list)) { ast_writestream(*fs_write, cur); } } if ((*fs) && (fr)) { struct ast_frame *cur; for (cur = fr; cur; cur = AST_LIST_NEXT(cur, frame_list)) { ast_writestream(*fs, cur); } } ast_mutex_unlock(&mixmonitor->mixmonitor_ds->lock); } /* All done! free it. */ if (fr) { ast_frame_free(fr, 0); } if (fr_read) { ast_frame_free(fr_read, 0); } if (fr_write) { ast_frame_free(fr_write, 0); } fr = NULL; fr_write = NULL; fr_read = NULL; ast_audiohook_lock(&mixmonitor->audiohook); } ast_audiohook_unlock(&mixmonitor->audiohook); ast_autochan_destroy(mixmonitor->autochan); /* Datastore cleanup. close the filestream and wait for ds destruction */ ast_mutex_lock(&mixmonitor->mixmonitor_ds->lock); mixmonitor_ds_close_fs(mixmonitor->mixmonitor_ds); if (!mixmonitor->mixmonitor_ds->destruction_ok) { ast_cond_wait(&mixmonitor->mixmonitor_ds->destruction_condition, &mixmonitor->mixmonitor_ds->lock); } ast_mutex_unlock(&mixmonitor->mixmonitor_ds->lock); /* kill the audiohook */ destroy_monitor_audiohook(mixmonitor); if (mixmonitor->post_process) { ast_verb(2, "Executing [%s]\n", mixmonitor->post_process); ast_safe_system(mixmonitor->post_process); } ast_verb(2, "End MixMonitor Recording %s\n", mixmonitor->name); mixmonitor_free(mixmonitor); return NULL; }
/* * Helper thread to periodically poll the video sources and enqueue the * generated frames directed to the remote party to the channel's queue. * Using a separate thread also helps because the encoding can be * computationally expensive so we don't want to starve the main thread. */ static void *video_thread(void *arg) { struct video_desc *env = arg; int count = 0; char save_display[128] = ""; int i; /* integer variable used as iterator */ /* if sdl_videodriver is set, override the environment. Also, * if it contains 'console' override DISPLAY around the call to SDL_Init * so we use the console as opposed to the x11 version of aalib */ if (!ast_strlen_zero(env->sdl_videodriver)) { /* override */ const char *s = getenv("DISPLAY"); setenv("SDL_VIDEODRIVER", env->sdl_videodriver, 1); if (s && !strcasecmp(env->sdl_videodriver, "aalib-console")) { ast_copy_string(save_display, s, sizeof(save_display)); unsetenv("DISPLAY"); } } sdl_setup(env); if (!ast_strlen_zero(save_display)) { setenv("DISPLAY", save_display, 1); } ast_mutex_init(&env->dec_lock); /* used to sync decoder and renderer */ if (grabber_open(&env->out)) { ast_log(LOG_WARNING, "cannot open local video source\n"); } if (env->out.device_num) { env->out.devices[env->out.device_primary].status_index |= IS_PRIMARY | IS_SECONDARY; } /* even if no device is connected, we must call video_out_init, * as some of the data structures it initializes are * used in get_video_frames() */ video_out_init(env); /* Writes intial status of the sources. */ if (env->gui) { for (i = 0; i < env->out.device_num; i++) { print_message(env->gui->thumb_bd_array[i].board, src_msgs[env->out.devices[i].status_index]); } } for (;;) { struct timespec t = { 0, 50000000 }; /* XXX 20 times/sec */ struct ast_frame *p, *f; struct ast_channel *chan; int fd; char *caption = NULL, buf[160]; /* determine if video format changed */ if (count++ % 10 == 0) { if (env->out.sendvideo && env->out.devices) { snprintf(buf, sizeof(buf), "%s %s %dx%d @@ %dfps %dkbps", env->out.devices[env->out.device_primary].name, env->codec_name, env->enc_in.w, env->enc_in.h, env->out.fps, env->out.bitrate / 1000); } else { sprintf(buf, "hold"); } caption = buf; } /* manage keypad events */ /* XXX here we should always check for events, * otherwise the drag will not work */ if (env->gui) eventhandler(env, caption); /* sleep for a while */ nanosleep(&t, NULL); if (env->in) { struct video_dec_desc *v = env->in; /* * While there is something to display, call the decoder and free * the buffer, possibly enabling the receiver to store new data. */ while (v->dec_in_dpy) { struct fbuf_t *tmp = v->dec_in_dpy; /* store current pointer */ /* decode the frame, but show it only if not frozen */ if (v->d_callbacks->dec_run(v, tmp) && !env->frame_freeze) show_frame(env, WIN_REMOTE); tmp->used = 0; /* mark buffer as free */ tmp->ebit = 0; ast_mutex_lock(&env->dec_lock); if (++v->dec_in_dpy == &v->dec_in[N_DEC_IN]) /* advance to next, circular */ v->dec_in_dpy = &v->dec_in[0]; if (v->dec_in_cur == NULL) /* receiver was idle, enable it... */ v->dec_in_cur = tmp; /* using the slot just freed */ else if (v->dec_in_dpy == v->dec_in_cur) /* this was the last slot */ v->dec_in_dpy = NULL; /* nothing more to display */ ast_mutex_unlock(&env->dec_lock); } } if (env->shutdown) break; f = get_video_frames(env, &p); /* read and display */ if (!f) continue; chan = env->owner; if (chan == NULL) { /* drop the chain of frames, nobody uses them */ while (f) { struct ast_frame *g = AST_LIST_NEXT(f, frame_list); ast_frfree(f); f = g; } continue; } fd = chan->alertpipe[1]; ast_channel_lock(chan); /* AST_LIST_INSERT_TAIL is only good for one frame, cannot use here */ if (chan->readq.first == NULL) { chan->readq.first = f; } else { chan->readq.last->frame_list.next = f; } chan->readq.last = p; /* * more or less same as ast_queue_frame, but extra * write on the alertpipe to signal frames. */ if (fd > -1) { int blah = 1, l = sizeof(blah); for (p = f; p; p = AST_LIST_NEXT(p, frame_list)) { if (write(fd, &blah, l) != l) ast_log(LOG_WARNING, "Unable to write to alert pipe on %s, frametype/subclass %d/%d: %s!\n", chan->name, f->frametype, f->subclass, strerror(errno)); } } ast_channel_unlock(chan); } /* thread terminating, here could call the uninit */ /* uninitialize the local and remote video environments */ env->in = dec_uninit(env->in); video_out_uninit(env); if (env->gui) env->gui = cleanup_sdl(env->gui, env->out.device_num); ast_mutex_destroy(&env->dec_lock); env->shutdown = 0; return NULL; }
static char *handle_memory_show_allocations(struct ast_cli_entry *e, int cmd, struct ast_cli_args *a) { const char *fn = NULL; struct ast_region *reg; unsigned int idx; unsigned int whales_len; unsigned int minnows_len; unsigned int total_len = 0; unsigned int selected_len = 0; unsigned int cache_len = 0; unsigned int count = 0; switch (cmd) { case CLI_INIT: e->command = "memory show allocations"; e->usage = "Usage: memory show allocations [<file>|anomalies]\n" " Dumps a list of segments of allocated memory.\n" " Defaults to listing all memory allocations.\n" " <file> - Restricts output to memory allocated by the file.\n" " anomalies - Only check for fence violations.\n"; return NULL; case CLI_GENERATE: return NULL; } if (a->argc == 4) { fn = a->argv[3]; } else if (a->argc != 3) { return CLI_SHOWUSAGE; } /* Look for historical misspelled option as well. */ if (fn && (!strcasecmp(fn, "anomalies") || !strcasecmp(fn, "anomolies"))) { regions_check_all_fences(); ast_cli(a->fd, "Anomaly check complete.\n"); return CLI_SUCCESS; } ast_mutex_lock(®lock); for (idx = 0; idx < ARRAY_LEN(regions); ++idx) { for (reg = regions[idx]; reg; reg = AST_LIST_NEXT(reg, node)) { total_len += reg->len; if (fn && strcasecmp(fn, reg->file)) { continue; } region_check_fences(reg); ast_cli(a->fd, "%10u bytes allocated%s by %20s() line %5u of %s\n", (unsigned int) reg->len, reg->cache ? " (cache)" : "", reg->func, reg->lineno, reg->file); selected_len += reg->len; if (reg->cache) { cache_len += reg->len; } ++count; } } whales_len = freed_regions_size(&whales); minnows_len = freed_regions_size(&minnows); ast_mutex_unlock(®lock); print_memory_show_common_stats(a->fd, whales_len, minnows_len, total_len, selected_len, cache_len, count); return CLI_SUCCESS; }
static char *handle_memory_show_summary(struct ast_cli_entry *e, int cmd, struct ast_cli_args *a) { #define my_max(a, b) ((a) >= (b) ? (a) : (b)) const char *fn = NULL; int idx; int cmp; struct ast_region *reg; unsigned int whales_len; unsigned int minnows_len; unsigned int total_len = 0; unsigned int selected_len = 0; unsigned int cache_len = 0; unsigned int count = 0; struct file_summary { struct file_summary *next; unsigned int len; unsigned int cache_len; unsigned int count; unsigned int lineno; char name[my_max(sizeof(reg->file), sizeof(reg->func))]; } *list = NULL, *cur, **prev; switch (cmd) { case CLI_INIT: e->command = "memory show summary"; e->usage = "Usage: memory show summary [<file>]\n" " Summarizes heap memory allocations by file, or optionally\n" " by line if a file is specified.\n"; return NULL; case CLI_GENERATE: return NULL; } if (a->argc == 4) { fn = a->argv[3]; } else if (a->argc != 3) { return CLI_SHOWUSAGE; } ast_mutex_lock(®lock); for (idx = 0; idx < ARRAY_LEN(regions); ++idx) { for (reg = regions[idx]; reg; reg = AST_LIST_NEXT(reg, node)) { total_len += reg->len; if (fn) { if (strcasecmp(fn, reg->file)) { continue; } /* Sort list by func/lineno. Find existing or place to insert. */ for (prev = &list; (cur = *prev); prev = &cur->next) { cmp = strcmp(cur->name, reg->func); if (cmp < 0) { continue; } if (cmp > 0) { /* Insert before current */ cur = NULL; break; } cmp = cur->lineno - reg->lineno; if (cmp < 0) { continue; } if (cmp > 0) { /* Insert before current */ cur = NULL; } break; } } else { /* Sort list by filename. Find existing or place to insert. */ for (prev = &list; (cur = *prev); prev = &cur->next) { cmp = strcmp(cur->name, reg->file); if (cmp < 0) { continue; } if (cmp > 0) { /* Insert before current */ cur = NULL; } break; } } if (!cur) { cur = ast_alloca(sizeof(*cur)); memset(cur, 0, sizeof(*cur)); cur->lineno = reg->lineno; ast_copy_string(cur->name, fn ? reg->func : reg->file, sizeof(cur->name)); cur->next = *prev; *prev = cur; } cur->len += reg->len; if (reg->cache) { cur->cache_len += reg->len; } ++cur->count; } } whales_len = freed_regions_size(&whales); minnows_len = freed_regions_size(&minnows); ast_mutex_unlock(®lock); /* Dump the whole list */ for (cur = list; cur; cur = cur->next) { selected_len += cur->len; cache_len += cur->cache_len; count += cur->count; if (cur->cache_len) { if (fn) { ast_cli(a->fd, "%10u bytes (%10u cache) in %10u allocations by %20s() line %5u of %s\n", cur->len, cur->cache_len, cur->count, cur->name, cur->lineno, fn); } else { ast_cli(a->fd, "%10u bytes (%10u cache) in %10u allocations in file %s\n", cur->len, cur->cache_len, cur->count, cur->name); } } else { if (fn) { ast_cli(a->fd, "%10u bytes in %10u allocations by %20s() line %5u of %s\n", cur->len, cur->count, cur->name, cur->lineno, fn); } else { ast_cli(a->fd, "%10u bytes in %10u allocations in file %s\n", cur->len, cur->count, cur->name); } } } print_memory_show_common_stats(a->fd, whales_len, minnows_len, total_len, selected_len, cache_len, count); return CLI_SUCCESS; }