static void android_display_process(MSFilter *f){ AndroidDisplay *ad=(AndroidDisplay*)f->data; MSPicture pic; mblk_t *m; ms_filter_lock(f); if (ad->android_video_window){ if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){ if (ms_yuv_buf_init_from_mblk (&pic,m)==0){ /* schedule display of frame */ if (!ad->ogl || !ad->ogl_free_ready) { /* m is dupb'ed inside ogl_display */ ogl_display_set_yuv_to_display(ad->ogl, m); } else { ms_warning("%s: opengldisplay not ready (%p)", __FUNCTION__, ad->ogl); } JNIEnv *jenv=ms_get_jni_env(); (*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->request_render_id); } } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); if (f->inputs[1] != NULL) ms_queue_flush(f->inputs[1]); }
static void winsnd_write_process(MSFilter *f){ WinSnd *d=(WinSnd*)f->data; mblk_t *m; MMRESULT mr; mblk_t *old; if (d->outdev==NULL) { ms_queue_flush(f->inputs[0]); return; } if (d->overrun){ ms_warning("nbufs_playing=%i",d->nbufs_playing); if (d->nbufs_playing>0){ ms_queue_flush(f->inputs[0]); return; } else d->overrun=FALSE; } while(1){ int outcurbuf=d->outcurbuf % WINSND_OUT_NBUFS; WAVEHDR *hdr=&d->hdrs_write[outcurbuf]; old=(mblk_t*)hdr->dwUser; if (d->nsamples==0){ int tmpsize=(int)(WINSND_OUT_DELAY*d->wfx.nAvgBytesPerSec); mblk_t *tmp=allocb(tmpsize,0); memset(tmp->b_wptr,0,tmpsize); tmp->b_wptr+=tmpsize; playout_buf(d,hdr,tmp); d->outcurbuf++; d->nsamples+=(int)(WINSND_OUT_DELAY*d->wfx.nSamplesPerSec); continue; } m=ms_queue_get(f->inputs[0]); if (!m) break; d->nsamples+=msgdsize(m)/d->wfx.nBlockAlign; /*if the output buffer has finished to play, unprepare it*/ if (hdr->dwFlags & WHDR_DONE){ mr=waveOutUnprepareHeader(d->outdev,hdr,sizeof(*hdr)); if (mr != MMSYSERR_NOERROR){ ms_error("waveOutUnprepareHeader error"); } freemsg(old); old=NULL; hdr->dwFlags=0; hdr->dwUser=0; } if (old==NULL){ /* a free wavheader */ playout_buf(d,hdr,m); }else{ /* no more free wavheader, overrun !*/ ms_warning("WINSND overrun, restarting"); d->overrun=TRUE; d->nsamples=0; waveOutReset(d->outdev); break; } d->outcurbuf++; } }
static void android_display_process(MSFilter *f){ AndroidDisplay *ad=(AndroidDisplay*)f->data; MSPicture pic; mblk_t *m; ms_filter_lock(f); if (ad->jbitmap!=0 && !ad->orientation_change_pending){ if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){ if (ms_yuv_buf_init_from_mblk (&pic,m)==0){ MSVideoSize wsize={ad->bmpinfo.width,ad->bmpinfo.height}; MSVideoSize vsize={pic.w, pic.h}; MSRect vrect; MSPicture dest={0}; void *pixels=NULL; JNIEnv *jenv=ms_get_jni_env(); if (!ms_video_size_equal(vsize,ad->vsize)){ ms_message("Video to display has size %ix%i",vsize.width,vsize.height); ad->vsize=vsize; if (ad->sws){ ms_scaler_context_free(ad->sws); ad->sws=NULL; } /*select_orientation(ad,wsize,vsize);*/ } ms_layout_compute(wsize,vsize,vsize,-1,0,&vrect, NULL); if (ad->sws==NULL){ ad->sws=ms_scaler_create_context (vsize.width,vsize.height,MS_YUV420P, vrect.w,vrect.h,MS_RGB565,MS_SCALER_METHOD_BILINEAR); if (ad->sws==NULL){ ms_fatal("Could not obtain sws context !"); } } if (sym_AndroidBitmap_lockPixels(jenv,ad->jbitmap,&pixels)==0){ if (pixels!=NULL){ dest.planes[0]=(uint8_t*)pixels+(vrect.y*ad->bmpinfo.stride)+(vrect.x*2); dest.strides[0]=ad->bmpinfo.stride; ms_scaler_process(ad->sws,pic.planes,pic.strides,dest.planes,dest.strides); }else ms_warning("Pixels==NULL in android bitmap !"); sym_AndroidBitmap_unlockPixels(jenv,ad->jbitmap); }else{ ms_error("AndroidBitmap_lockPixels() failed !"); } (*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->update_id); } } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); ms_queue_flush(f->inputs[1]); }
void video_slot_destory(ConfVideoPort *slot){ if(slot->video_itc_source!=NULL) ms_filter_destroy(slot->video_itc_source); if(slot->video_itc_sink!=NULL) ms_filter_destroy(slot->video_itc_sink); if(slot->video_input_join!=NULL) ms_filter_destroy(slot->video_input_join); ms_mutex_destroy(&slot->slotq_in.lock); ms_queue_flush (&slot->slotq_in.q); ms_mutex_destroy(&slot->slotq_out.lock); ms_queue_flush (&slot->slotq_out.q); ms_free(slot); }
void audio_slot_destory(ConfAudioPort *slot){ if(slot->audio_itc_source!=NULL) ms_filter_destroy(slot->audio_itc_source); if(slot->audio_itc_sink!=NULL) ms_filter_destroy(slot->audio_itc_sink); ms_mutex_destroy(&slot->slotq_in.lock); ms_queue_flush (&slot->slotq_in.q); ms_mutex_destroy(&slot->slotq_out.lock); ms_queue_flush (&slot->slotq_out.q); ms_free(slot); }
static void bb10display_process(MSFilter *f) { BB10Display *d = (BB10Display*) f->data; mblk_t *inm = NULL; MSPicture src = {0}; ms_filter_lock(f); if (f->inputs[0] != NULL && (inm = ms_queue_peek_last(f->inputs[0])) != 0) { if (ms_yuv_buf_init_from_mblk(&src, inm) == 0) { MSVideoSize newsize; newsize.width = src.w; newsize.height = src.h; if (!ms_video_size_equal(newsize, d->vsize)) { ms_debug("[bb10_display] video size changed from %i,%i to %i,%i", newsize.width, newsize.height, d->vsize.width, d->vsize.height); d->vsize = newsize; if (d->pixmap_created) { bb10display_destroyPixmap(d); } bb10display_createPixmap(d); } if (d->window_created) { int wdims[2] = { 0, 0 }; screen_get_window_property_iv(d->window, SCREEN_PROPERTY_SIZE, wdims); if (d->wsize.width != wdims[0] || d->wsize.height != wdims[1]) { ms_debug("[bb10_display] screen size changed from %i,%i to %i,%i", d->wsize.width, d->wsize.height, wdims[0], wdims[1]); d->wsize.width = wdims[0]; d->wsize.height = wdims[1]; d->destroy_and_recreate_window = TRUE; d->last_time_wsize_changed = f->ticker->time; } } if (d->destroy_and_recreate_window && f->ticker->time - d->last_time_wsize_changed >= 500) { if (d->window_created) { bb10display_destroyWindow(d); } bb10display_createWindow(d); d->destroy_and_recreate_window = FALSE; } if (d->window_created && !d->destroy_and_recreate_window) { bb10display_fillWindowBuffer(d, &src); } } } ms_filter_unlock(f); if (f->inputs[0] != NULL) ms_queue_flush(f->inputs[0]); if (f->inputs[1] != NULL) ms_queue_flush(f->inputs[1]); }
void alsa_write_process(MSFilter *obj){ AlsaWriteData *ad=(AlsaWriteData*)obj->data; mblk_t *im=NULL; int size; int samples; int err; if (ad->handle==NULL && ad->pcmdev!=NULL){ ad->handle=alsa_open_w(ad->pcmdev,16,ad->nchannels==2,ad->rate); #ifdef EPIPE_BUGFIX alsa_fill_w (ad->pcmdev); #endif } if (ad->handle==NULL) { ms_queue_flush(obj->inputs[0]); return; } while ((im=ms_queue_get(obj->inputs[0]))!=NULL){ while((size=im->b_wptr-im->b_rptr)>0){ samples=size/(2*ad->nchannels); err=alsa_write(ad->handle,im->b_rptr,samples); if (err>0) { im->b_rptr+=err*(2*ad->nchannels); } else break; } freemsg(im); } }
static void sender_process(MSFilter * f) { SenderData *d = (SenderData *) f->data; RtpSession *s = d->session; mblk_t *im; uint32_t timestamp; if (s == NULL){ ms_queue_flush(f->inputs[0]); return; } if (d->relay_session_id_size>0 && ( (f->ticker->time-d->last_rsi_time)>5000 || d->last_rsi_time==0) ) { ms_message("relay session id sent in RTCP APP"); rtp_session_send_rtcp_APP(s,0,"RSID",(const uint8_t *)d->relay_session_id,d->relay_session_id_size); d->last_rsi_time=f->ticker->time; } while ((im = ms_queue_get(f->inputs[0])) != NULL) { mblk_t *header; timestamp = get_cur_timestamp(f, mblk_get_timestamp_info(im)); ms_filter_lock(f); if (d->skip) { ms_debug("skipping.."); send_dtmf(f, d->skip_until-d->dtmf_duration, timestamp); d->dtmf_start = FALSE; if (!RTP_TIMESTAMP_IS_NEWER_THAN(timestamp, d->skip_until)) { freemsg(im); ms_filter_unlock(f); continue; } d->skip = FALSE; d->dtmf = 0; } if (d->skip == FALSE && d->mute_mic==FALSE){ int pt = mblk_get_payload_type(im); header = rtp_session_create_packet(s, 12, NULL, 0); if (pt>0) rtp_set_payload_type(header, pt); rtp_set_markbit(header, mblk_get_marker_info(im)); header->b_cont = im; rtp_session_sendm_with_ts(s, header, timestamp); } else{ freemsg(im); } if (d->dtmf != 0) { ms_debug("prepare to send RFC2833 dtmf."); d->skip_until = timestamp + d->dtmf_duration; d->skip = TRUE; d->dtmf_start = TRUE; } ms_filter_unlock(f); } }
static void h264_enc_unconfigure(VTH264EncCtx *ctx) { VTCompressionSessionInvalidate(ctx->session); CFRelease(ctx->session); ms_queue_flush(&ctx->queue); rfc3984_uninit(&ctx->packer_ctx); ctx->is_configured = FALSE; }
static void dec_uninit(MSFilter *f) { DecState *s = (DecState *)f->data; vp8rtpfmt_unpacker_uninit(&s->unpacker); vpx_codec_destroy(&s->codec); if (s->yuv_msg) freemsg(s->yuv_msg); ms_queue_flush(&s->q); ms_free(s); }
static void glxvideo_process(MSFilter *f){ GLXVideo *obj=(GLXVideo*)f->data; mblk_t *inm; MSPicture src={0}; XWindowAttributes wa; XGetWindowAttributes(obj->display,obj->window_id,&wa); if (wa.width!=obj->wsize.width || wa.height!=obj->wsize.height){ ms_warning("Resized to %ix%i", wa.width,wa.height); obj->wsize.width=wa.width; obj->wsize.height=wa.height; ogl_display_init(obj->glhelper, wa.width, wa.height); } ms_filter_lock(f); if (!obj->show) { goto end; } if (!obj->ready) glxvideo_prepare(f); if (!obj->ready){ goto end; } glXMakeCurrent( obj->display, obj->window_id, obj->glContext ); if (f->inputs[0]!=NULL && (inm=ms_queue_peek_last(f->inputs[0]))!=0) { if (ms_yuv_buf_init_from_mblk(&src,inm)==0){ ogl_display_set_yuv_to_display(obj->glhelper, inm); } } if (f->inputs[1]!=NULL && (inm=ms_queue_peek_last(f->inputs[1]))!=0) { if (ms_yuv_buf_init_from_mblk(&src,inm)==0){ ogl_display_set_preview_yuv_to_display(obj->glhelper, inm); } } ogl_display_render(obj->glhelper, 0); glXSwapBuffers ( obj->display, obj->window_id ); end: ms_filter_unlock(f); if (f->inputs[0]!=NULL) ms_queue_flush(f->inputs[0]); if (f->inputs[1]!=NULL) ms_queue_flush(f->inputs[1]); }
static void MSDisplayProcess (MSFilter *f) { MSPicture pic; mblk_t *inp; // Handle remote image ... if (f->inputs[0]) { if ( (inp = ms_queue_peek_last (f->inputs[0])) ) { if (ms_yuv_buf_init_from_mblk (&pic, inp) == 0) { SDL_LockMutex (picMutex); MSPictureCopyFrom (&picRemote, &pic); picRemoteUpdated = TRUE; SDL_UnlockMutex (picMutex); } } ms_queue_flush (f->inputs[0]); } else { SDL_LockMutex (picMutex); MSPictureCopyFrom (&picRemote, NULL); picRemoteUpdated = TRUE; SDL_UnlockMutex (picMutex); } // Handle local image ... if (f->inputs[1]) { if ( (inp = ms_queue_peek_last (f->inputs[1])) ) { if (ms_yuv_buf_init_from_mblk (&pic, inp) == 0) { SDL_LockMutex (picMutex); MSPictureCopyFrom (&picLocal, &pic); picLocalUpdated = TRUE; SDL_UnlockMutex (picMutex); } } ms_queue_flush (f->inputs[1]); } else { SDL_LockMutex (picMutex); MSPictureCopyFrom (&picLocal, NULL); picLocalUpdated = TRUE; SDL_UnlockMutex (picMutex); } }
static void sender_process(MSFilter * f){ SenderData *d = (SenderData *) f->data; RtpSession *s = d->session; if (s == NULL){ ms_queue_flush(f->inputs[0]); return; } if (d->use_task) ms_filter_postpone_task(f,_sender_process); else _sender_process(f); }
static void h264_dec_uninit(MSFilter *f) { VTH264DecCtx *ctx = (VTH264DecCtx *)f->data; rfc3984_uninit(&ctx->unpacker); if(ctx->session) h264_dec_uninit_decoder(ctx); ms_queue_flush(&ctx->queue); ms_mutex_destroy(&ctx->mutex); ms_yuv_buf_allocator_free(ctx->pixbuf_allocator); ms_free(f->data); }
static void bb10capture_preprocess(MSFilter *f) { BB10Capture *d = (BB10Capture*) f->data; if (!d->camera_openned) { bb10capture_open_camera(d); } ms_average_fps_init(&d->avgfps, "[bb10_capture] fps=%f"); ms_queue_flush(&d->rq); bb10capture_start_capture(d); }
static void dec_uninit(MSFilter *f) { DecState *s=(DecState*)f->data; vpx_codec_destroy(&s->codec); if (s->curframe!=NULL) freemsg(s->curframe); if (s->yuv_msg) freemsg(s->yuv_msg); ms_queue_flush(&s->q); ms_free(s); }
static void itc_sink_process(MSFilter *f){ MSFilter *other=(MSFilter *)f->data; mblk_t *im; if(other==NULL) { ms_queue_flush (f->inputs[0]); return; } while((im=ms_queue_get(f->inputs[0]))!=NULL){ itc_source_queue_packet(other,im); } }
void MSOpenH264Encoder::feed() { if (!isInitialized()){ ms_queue_flush(mFilter->inputs[0]); return; } mblk_t *im; MSQueue nalus; ms_queue_init(&nalus); long long int ts = mFilter->ticker->time * 90LL; // Send I frame 2 seconds and 4 seconds after the beginning if (mVideoStarter.needIFrame(mFilter->ticker->time)) { generateKeyframe(); } while ((im = ms_queue_get(mFilter->inputs[0])) != NULL) { MSPicture pic; if (ms_yuv_buf_init_from_mblk(&pic, im) == 0) { SFrameBSInfo sFbi = { 0 }; SSourcePicture srcPic = { 0 }; srcPic.iColorFormat = videoFormatI420; srcPic.iPicWidth = pic.w; srcPic.iPicHeight = pic.h; for (int i = 0; i < 3; i++) { srcPic.iStride[i] = pic.strides[i]; srcPic.pData[i] = pic.planes[i]; } srcPic.uiTimeStamp = ts; int ret = mEncoder->EncodeFrame(&srcPic, &sFbi); if (ret == cmResultSuccess) { if ((sFbi.eOutputFrameType != videoFrameTypeSkip) && (sFbi.eOutputFrameType != videoFrameTypeInvalid)) { if (mFrameCount == 0) { mVideoStarter.firstFrame(mFilter->ticker->time); } mFrameCount++; fillNalusQueue(sFbi, &nalus); rfc3984_pack(mPacker, &nalus, mFilter->outputs[0], sFbi.uiTimeStamp); } } else { ms_error("OpenH264 encoder: Frame encoding failed: %d", ret); } } freemsg(im); } }
static void jpg_process(MSFilter *f){ JpegWriter *s=(JpegWriter*)f->data; if (s->take_pic){ MSPicture yuvbuf; mblk_t *m=ms_queue_peek_last(f->inputs[0]); if (ms_yuv_buf_init_from_mblk(&yuvbuf,m)==0){ MSVideoSize dstsize; dstsize.height = yuvbuf.h; dstsize.width = yuvbuf.w; if(jpeg_enc_yv12(m->b_rptr,dstsize.width,dstsize.height,90,s->filename)) ms_message("Snapshot Ok"); else ms_message("Snapshot failed!"); s->take_pic = FALSE; } goto end; } end: ms_queue_flush(f->inputs[0]); }
static void enc_process(MSFilter *f) { mblk_t *im; uint64_t timems = f->ticker->time; uint32_t timestamp = (uint32_t)(timems*90); EncState *s = (EncState *)f->data; unsigned int flags = 0; vpx_codec_err_t err; MSPicture yuv; bool_t is_ref_frame=FALSE; ms_filter_lock(f); #ifdef AVPF_DEBUG ms_message("VP8 enc_process:"); #endif if (!s->ready) { ms_queue_flush(f->inputs[0]); ms_filter_unlock(f); return; } if ((im = ms_queue_peek_last(f->inputs[0])) != NULL) { vpx_image_t img; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if ((s->avpf_enabled != TRUE) && ms_video_starter_need_i_frame(&s->starter, f->ticker->time)) { s->force_keyframe = TRUE; } if (s->force_keyframe == TRUE) { ms_message("Forcing vp8 key frame for filter [%p]", f); flags = VPX_EFLAG_FORCE_KF; } else if (s->avpf_enabled == TRUE) { if (s->frame_count == 0) s->force_keyframe = TRUE; enc_fill_encoder_flags(s, &flags); } #ifdef AVPF_DEBUG ms_message("VP8 encoder frames state:"); ms_message("\tgolden: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.golden.count, s->frames_state.golden.picture_id, (s->frames_state.golden.acknowledged == TRUE) ? "Y" : "N"); ms_message("\taltref: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.altref.count, s->frames_state.altref.picture_id, (s->frames_state.altref.acknowledged == TRUE) ? "Y" : "N"); #endif err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, 1000000LL/(2*(int)s->vconf.fps)); /*encoder has half a framerate interval to encode*/ if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; MSList *list = NULL; /* Update the frames state. */ is_ref_frame=FALSE; if (flags & VPX_EFLAG_FORCE_KF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); s->frames_state.golden.is_independant=TRUE; s->frames_state.altref.is_independant=TRUE; s->frames_state.last_independent_frame=s->frame_count; s->force_keyframe = FALSE; is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_GF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_ARF) { enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_NO_REF_LAST) { enc_mark_reference_frame_as_sent(s, VP8_LAST_FRAME); is_ref_frame=is_reconstruction_frame_sane(s,flags); } if (is_frame_independent(flags)){ s->frames_state.last_independent_frame=s->frame_count; } /* Pack the encoded frame. */ while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if ((pkt->kind == VPX_CODEC_CX_FRAME_PKT) && (pkt->data.frame.sz > 0)) { Vp8RtpFmtPacket *packet = ms_new0(Vp8RtpFmtPacket, 1); packet->m = allocb(pkt->data.frame.sz, 0); memcpy(packet->m->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); packet->m->b_wptr += pkt->data.frame.sz; mblk_set_timestamp_info(packet->m, timestamp); packet->pd = ms_new0(Vp8RtpFmtPayloadDescriptor, 1); packet->pd->start_of_partition = TRUE; packet->pd->non_reference_frame = s->avpf_enabled && !is_ref_frame; if (s->avpf_enabled == TRUE) { packet->pd->extended_control_bits_present = TRUE; packet->pd->pictureid_present = TRUE; packet->pd->pictureid = s->picture_id; } else { packet->pd->extended_control_bits_present = FALSE; packet->pd->pictureid_present = FALSE; } if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) { packet->pd->pid = (uint8_t)pkt->data.frame.partition_id; if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) { mblk_set_marker_info(packet->m, TRUE); } } else { packet->pd->pid = 0; mblk_set_marker_info(packet->m, TRUE); } list = ms_list_append(list, packet); } } #ifdef AVPF_DEBUG ms_message("VP8 encoder picture_id=%i ***| %s | %s | %s | %s", (int)s->picture_id, (flags & VPX_EFLAG_FORCE_KF) ? "KF " : (flags & VP8_EFLAG_FORCE_GF) ? "GF " : (flags & VP8_EFLAG_FORCE_ARF) ? "ARF" : " ", (flags & VP8_EFLAG_NO_REF_GF) ? "NOREFGF" : " ", (flags & VP8_EFLAG_NO_REF_ARF) ? "NOREFARF" : " ", (flags & VP8_EFLAG_NO_REF_LAST) ? "NOREFLAST" : " "); #endif vp8rtpfmt_packer_process(&s->packer, list, f->outputs[0], f->factory); /* Handle video starter if AVPF is not enabled. */ s->frame_count++; if ((s->avpf_enabled != TRUE) && (s->frame_count == 1)) { ms_video_starter_first_frame(&s->starter, f->ticker->time); } /* Increment the pictureID. */ s->picture_id++; #ifdef PICTURE_ID_ON_16_BITS if (s->picture_id == 0) s->picture_id = 0x8000; #else if (s->picture_id == 0x0080) s->picture_id = 0; #endif } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); }
static void x11video_process(MSFilter *f){ X11Video *obj=(X11Video*)f->data; mblk_t *inm; int update=0; MSPicture lsrc={0}; MSPicture src={0}; MSRect mainrect,localrect; bool_t precious=FALSE; bool_t local_precious=FALSE; XWindowAttributes wa; MSTickerLateEvent late_info; ms_filter_lock(f); if ((obj->window_id == 0) || (x11_error == TRUE)) goto end; XGetWindowAttributes(obj->display,obj->window_id,&wa); if (x11_error == TRUE) { ms_error("Could not get window attributes for window %lu", obj->window_id); goto end; } if (wa.width!=obj->wsize.width || wa.height!=obj->wsize.height){ ms_warning("Resized to %ix%i", wa.width,wa.height); obj->wsize.width=wa.width; obj->wsize.height=wa.height; XClearWindow(obj->display,obj->window_id); } ms_ticker_get_last_late_tick(f->ticker, &late_info); if(late_info.current_late_ms > 100) { ms_warning("Dropping frames because we're late"); goto end; } if (!obj->show) { goto end; } if (!obj->ready){ goto end; } if (f->inputs[0]!=NULL && (inm=ms_queue_peek_last(f->inputs[0]))!=0) { if (ms_yuv_buf_init_from_mblk(&src,inm)==0){ MSVideoSize newsize; newsize.width=src.w; newsize.height=src.h; precious=mblk_get_precious_flag(inm); if (!ms_video_size_equal(newsize,obj->vsize) ) { ms_message("received size is %ix%i",newsize.width,newsize.height); obj->vsize=newsize; if (obj->autofit){ MSVideoSize new_window_size; static const MSVideoSize min_size=MS_VIDEO_SIZE_QVGA; /*don't resize less than QVGA, it is too small*/ if (min_size.width*min_size.height>newsize.width*newsize.height){ new_window_size.width=newsize.width*2; new_window_size.height=newsize.height*2; }else new_window_size=newsize; obj->wsize=new_window_size; ms_message("autofit: new window size should be %ix%i",new_window_size.width,new_window_size.height); XResizeWindow(obj->display,obj->window_id,new_window_size.width,new_window_size.height); XSync(obj->display,FALSE); } x11video_unprepare(f); x11video_prepare(f); if (!obj->ready) goto end; } } update=1; } /*process last video message for local preview*/ if (obj->corner!=-1 && f->inputs[1]!=NULL && (inm=ms_queue_peek_last(f->inputs[1]))!=0) { if (ms_yuv_buf_init_from_mblk(&lsrc,inm)==0){ obj->lsize.width=lsrc.w; obj->lsize.height=lsrc.h; local_precious=mblk_get_precious_flag(inm); update=1; } } ms_layout_compute(obj->vsize, obj->vsize,obj->lsize,obj->corner,obj->scale_factor,&mainrect,&localrect); if (lsrc.w!=0 && obj->corner!=-1){ /* first reduce the local preview image into a temporary image*/ if (obj->local_msg==NULL){ obj->local_msg=ms_yuv_buf_alloc(&obj->local_pic,localrect.w,localrect.h); } if (obj->sws2==NULL){ obj->sws2=ms_scaler_create_context(lsrc.w,lsrc.h,MS_YUV420P,localrect.w,localrect.h,MS_YUV420P, MS_SCALER_METHOD_BILINEAR); } ms_scaler_process(obj->sws2,lsrc.planes,lsrc.strides,obj->local_pic.planes,obj->local_pic.strides); if (!local_precious) ms_yuv_buf_mirror(&obj->local_pic); } if (update && src.w!=0){ ms_yuv_buf_copy(src.planes,src.strides,obj->fbuf.planes,obj->fbuf.strides,obj->vsize); if (obj->mirror && !precious) ms_yuv_buf_mirror(&obj->fbuf); } /*copy resized local view into a corner:*/ if (update && obj->local_msg!=NULL && obj->corner!=-1){ MSPicture corner=obj->fbuf; MSVideoSize roi; roi.width=obj->local_pic.w; roi.height=obj->local_pic.h; corner.w=obj->local_pic.w; corner.h=obj->local_pic.h; corner.planes[0]+=localrect.x+(localrect.y*corner.strides[0]); corner.planes[1]+=(localrect.x/2)+((localrect.y/2)*corner.strides[1]); corner.planes[2]+=(localrect.x/2)+((localrect.y/2)*corner.strides[2]); corner.planes[3]=0; ms_yuv_buf_copy(obj->local_pic.planes,obj->local_pic.strides, corner.planes,corner.strides,roi); } if (update){ MSRect rect; ms_layout_center_rectangle(obj->wsize,obj->vsize,&rect); //ms_message("XvShmPutImage() %ix%i --> %ix%i",obj->fbuf.w,obj->fbuf.h,obj->wsize.width,obj->wsize.height); XvShmPutImage(obj->display,obj->port,obj->window_id,obj->gc, obj->xv_image, 0,0,obj->fbuf.w,obj->fbuf.h, rect.x,rect.y,rect.w,rect.h,TRUE); XSync(obj->display,FALSE); } end: ms_filter_unlock(f); if (f->inputs[0]!=NULL) ms_queue_flush(f->inputs[0]); if (f->inputs[1]!=NULL) ms_queue_flush(f->inputs[1]); }
void rfc3984_uninit(Rfc3984Context *ctx){ ms_queue_flush(&ctx->q); if (ctx->m) freemsg(ctx->m); ctx->m=NULL; }
static void ms_opus_dec_process(MSFilter *f) { OpusDecData *d = (OpusDecData *)f->data; mblk_t *im; mblk_t *om; int frames; if (!d->state) ms_queue_flush(f->inputs[0]); /* decode available packets */ while ((im = ms_queue_get(f->inputs[0])) != NULL) { om = allocb(5760 * d->channels * SIGNAL_SAMPLE_SIZE, 0); /* 5760 is the maximum number of sample in a packet (120ms at 48KHz) */ frames = opus_decode(d->state, (const unsigned char *)im->b_rptr, im->b_wptr - im->b_rptr, (opus_int16 *)om->b_wptr, 5760, 0); if (frames < 0) { ms_warning("Opus decoder error: %s", opus_strerror(frames)); freemsg(om); } else { d->lastPacketLength = frames; // store the packet length for eventual PLC if next two packets are missing om->b_wptr += frames * d->channels * SIGNAL_SAMPLE_SIZE; mblk_meta_copy(im,om); ms_queue_put(f->outputs[0], om); /*ms_message("Opus: outputing a normal frame of %i bytes (%i samples,%i ms)",(int)(om->b_wptr-om->b_rptr),frames,frames*1000/d->samplerate);*/ d->sequence_number = mblk_get_cseq(im); // used to get eventual FEC information if next packet is missing ms_concealer_inc_sample_time(d->concealer,f->ticker->time, frames*1000/d->samplerate, 1); } freemsg(im); } /* Concealment if needed */ if (ms_concealer_context_is_concealement_required(d->concealer, f->ticker->time)) { int imLength = 0; uint8_t *payload = NULL; im = NULL; // try fec : info are stored in the next packet, do we have it? if (d->rtp_picker_context.picker) { /* FEC information is in the next packet, last valid packet was d->sequence_number, the missing one shall then be d->sequence_number+1, so check jitter buffer for d->sequence_number+2 */ /* but we may have the n+1 packet in the buffer and adaptative jitter control keeping it for later, in that case, just go for PLC */ if (d->rtp_picker_context.picker(&d->rtp_picker_context,d->sequence_number+1) == NULL) { /* missing packet is really missing */ im = d->rtp_picker_context.picker(&d->rtp_picker_context,d->sequence_number+2); /* try to get n+2 */ if (im) { imLength=rtp_get_payload(im,&payload); } } } om = allocb(5760 * d->channels * SIGNAL_SAMPLE_SIZE, 0); /* 5760 is the maximum number of sample in a packet (120ms at 48KHz) */ /* call to the decoder, we'll have either FEC or PLC, do it on the same length that last received packet */ if (payload) { // found frame to try FEC d->statsfec++; frames = opus_decode(d->state, payload, imLength, (opus_int16 *)om->b_wptr, d->lastPacketLength, 1); } else { // do PLC: PLC doesn't seem to be able to generate more than 960 samples (20 ms at 48000 Hz), get PLC until we have the correct number of sample //frames = opus_decode(d->state, NULL, 0, (opus_int16 *)om->b_wptr, d->lastPacketLength, 0); // this should have work if opus_decode returns the requested number of samples d->statsplc++; frames = 0; while (frames < d->lastPacketLength) { frames += opus_decode(d->state, NULL, 0, (opus_int16 *)(om->b_wptr + (frames*d->channels*SIGNAL_SAMPLE_SIZE)), d->lastPacketLength-frames, 0); } } if (frames < 0) { ms_warning("Opus decoder error in concealment: %s", opus_strerror(frames)); freemsg(om); } else { om->b_wptr += frames * d->channels * SIGNAL_SAMPLE_SIZE; /*ms_message("Opus: outputing a PLC frame of %i bytes (%i samples,%i ms)",(int)(om->b_wptr-om->b_rptr),frames,frames*1000/d->samplerate);*/ mblk_set_plc_flag(om,TRUE); ms_queue_put(f->outputs[0], om); d->sequence_number++; ms_concealer_inc_sample_time(d->concealer,f->ticker->time, frames*1000/d->samplerate, 0); } } }
static void itc_source_uninit(MSFilter *f){ SourceState *s=(SourceState *)f->data; ms_mutex_destroy(&s->mutex); ms_queue_flush (&s->q); ms_free(s); }
static void enc_process(MSFilter *f){ EncData *d=(EncData*)f->data; MSPicture pic = {0}; MSQueue nalus; mblk_t *im; long long int ts = f->ticker->time * 90LL; if (d->codec==NULL){ ms_queue_flush(f->inputs[0]); return; } ms_queue_init(&nalus); while((im=ms_queue_get(f->inputs[0]))!=NULL){ if (ms_yuv_buf_init_from_mblk(&pic,im)==0){ uint8_t *buf=NULL; size_t bufsize; ssize_t ibufidx, obufidx; ibufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US); if (ibufidx >= 0) { buf = AMediaCodec_getInputBuffer(d->codec, ibufidx, &bufsize); if(buf != NULL){ if(d->isYUV){ int ysize = pic.w * pic.h; int usize = ysize / 4; memcpy(buf, pic.planes[0], ysize); memcpy(buf + ysize, pic.planes[1], usize); memcpy(buf + ysize+usize, pic.planes[2], usize); } else { size_t size=(size_t) pic.w * pic.h; uint8_t *dst = pic.planes[0]; memcpy(buf,dst,size); int i; for (i = 0; i < pic.w/2*pic.h/2; i++){ buf[size+2*i]=pic.planes[1][i]; buf[size+2*i+1]=pic.planes[2][i]; } } AMediaCodec_queueInputBuffer(d->codec, ibufidx, 0, (size_t)(pic.w * pic.h)*3/2, f->ticker->time*1000,0); } } AMediaCodecBufferInfo info; obufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US); while(obufidx >= 0) { buf = AMediaCodec_getOutputBuffer(d->codec, obufidx, &bufsize); extractNalus(buf,info.size,ts,&nalus); AMediaCodec_releaseOutputBuffer(d->codec, obufidx, FALSE); obufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US); rfc3984_pack(d->packer,&nalus,f->outputs[0],ts); } if (d->framenum==0) ms_video_starter_first_frame(&d->starter, f->ticker->time); d->framenum++; } freemsg(im); } if (d->force_keyframe == TRUE) { AMediaCodec_setParams(d->codec,""); d->force_keyframe = FALSE; } }
/* inputs[0]= reference signal from far end (sent to soundcard) * inputs[1]= near speech & echo signal (read from soundcard) * outputs[0]= is a copy of inputs[0] to be sent to soundcard * outputs[1]= near end speech, echo removed - towards far end */ static void speex_ec_process(MSFilter *f){ SpeexECState *s=(SpeexECState*)f->data; int nbytes=s->framesize*2; mblk_t *refm; uint8_t *ref,*echo; if (s->bypass_mode) { while((refm=ms_queue_get(f->inputs[0]))!=NULL){ ms_queue_put(f->outputs[0],refm); } while((refm=ms_queue_get(f->inputs[1]))!=NULL){ ms_queue_put(f->outputs[1],refm); } return; } if (f->inputs[0]!=NULL){ if (s->echostarted){ while((refm=ms_queue_get(f->inputs[0]))!=NULL){ refm=audio_flow_controller_process(&s->afc,refm); if (refm){ mblk_t *cp=dupmsg(refm); ms_bufferizer_put(&s->delayed_ref,cp); ms_bufferizer_put(&s->ref,refm); } } }else{ ms_warning("Getting reference signal but no echo to synchronize on."); ms_queue_flush(f->inputs[0]); } } ms_bufferizer_put_from_queue(&s->echo,f->inputs[1]); ref=(uint8_t*)alloca(nbytes); echo=(uint8_t*)alloca(nbytes); while (ms_bufferizer_read(&s->echo,echo,nbytes)==nbytes){ mblk_t *oecho=allocb(nbytes,0); int avail; int avail_samples; if (!s->echostarted) s->echostarted=TRUE; if ((avail=ms_bufferizer_get_avail(&s->delayed_ref))<((s->nominal_ref_samples*2)+nbytes)){ /*we don't have enough to read in a reference signal buffer, inject silence instead*/ avail=nbytes; refm=allocb(nbytes,0); memset(refm->b_wptr,0,nbytes); refm->b_wptr+=nbytes; ms_bufferizer_put(&s->delayed_ref,refm); ms_queue_put(f->outputs[0],dupmsg(refm)); if (!s->using_zeroes){ ms_warning("Not enough ref samples, using zeroes"); s->using_zeroes=TRUE; } }else{ if (s->using_zeroes){ ms_message("Samples are back."); s->using_zeroes=FALSE; } /* read from our no-delay buffer and output */ refm=allocb(nbytes,0); if (ms_bufferizer_read(&s->ref,refm->b_wptr,nbytes)==0){ ms_fatal("Should never happen"); } refm->b_wptr+=nbytes; ms_queue_put(f->outputs[0],refm); } /*now read a valid buffer of delayed ref samples*/ if (ms_bufferizer_read(&s->delayed_ref,ref,nbytes)==0){ ms_fatal("Should never happen"); } avail-=nbytes; avail_samples=avail/2; /*ms_message("avail=%i",avail_samples);*/ if (avail_samples<s->min_ref_samples || s->min_ref_samples==-1){ s->min_ref_samples=avail_samples; } #ifdef EC_DUMP if (s->reffile) fwrite(ref,nbytes,1,s->reffile); if (s->echofile) fwrite(echo,nbytes,1,s->echofile); #endif speex_echo_cancellation(s->ecstate,(short*)echo,(short*)ref,(short*)oecho->b_wptr); speex_preprocess_run(s->den, (short*)oecho->b_wptr); #ifdef EC_DUMP if (s->cleanfile) fwrite(oecho->b_wptr,nbytes,1,s->cleanfile); #endif oecho->b_wptr+=nbytes; ms_queue_put(f->outputs[1],oecho); } /*verify our ref buffer does not become too big, meaning that we are receiving more samples than we are sending*/ if ((((uint32_t)(f->ticker->time - s->flow_control_time)) >= flow_control_interval_ms) && (s->min_ref_samples != -1)) { int diff=s->min_ref_samples-s->nominal_ref_samples; if (diff>(nbytes/2)){ int purge=diff-(nbytes/2); ms_warning("echo canceller: we are accumulating too much reference signal, need to throw out %i samples",purge); audio_flow_controller_set_target(&s->afc,purge,(flow_control_interval_ms*s->samplerate)/1000); } s->min_ref_samples=-1; s->flow_control_time = f->ticker->time; } }
static void dec_process(MSFilter *f){ DecData *d=(DecData*)f->data; MSPicture pic = {0}; mblk_t *im,*om = NULL; ssize_t oBufidx = -1; size_t bufsize; bool_t need_reinit=FALSE; bool_t request_pli=FALSE; MSQueue nalus; AMediaCodecBufferInfo info; ms_queue_init(&nalus); while((im=ms_queue_get(f->inputs[0]))!=NULL){ if (d->packet_num==0 && d->sps && d->pps){ mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im)); mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im)); rfc3984_unpack(&d->unpacker, d->sps, &nalus); rfc3984_unpack(&d->unpacker, d->pps, &nalus); d->sps=NULL; d->pps=NULL; } if(rfc3984_unpack(&d->unpacker,im,&nalus) <0){ request_pli=TRUE; } if (!ms_queue_empty(&nalus)){ int size; uint8_t *buf=NULL; ssize_t iBufidx; size=nalusToFrame(d,&nalus,&need_reinit); if (need_reinit) { //In case of rotation, the decoder needs to flushed in order to restart with the new video size AMediaCodec_flush(d->codec); d->first_buffer_queued = FALSE; } /*First put our H264 bitstream into the decoder*/ iBufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US); if (iBufidx >= 0) { buf = AMediaCodec_getInputBuffer(d->codec, iBufidx, &bufsize); if(buf == NULL) { ms_error("MSMediaCodecH264Dec: AMediaCodec_getInputBuffer() returned NULL"); break; } if((size_t)size > bufsize) { ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i",size,(int) bufsize); break; } else { struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); memcpy(buf,d->bitstream,(size_t)size); AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, (ts.tv_nsec/1000) + 10000LL, 0); d->first_buffer_queued = TRUE; } }else if (iBufidx == AMEDIA_ERROR_UNKNOWN){ ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueInputBuffer() had an exception"); } } d->packet_num++; if (d->sps && d->pps) request_pli = FALSE; else request_pli = TRUE; } /*secondly try to get decoded frames from the decoder, this is performed every tick*/ while (d->first_buffer_queued && (oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0){ AMediaFormat *format; int width = 0, height = 0, color = 0; uint8_t *buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize); if(buf == NULL){ ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS); ms_error("MSMediaCodecH264Dec: AMediaCodec_getOutputBuffer() returned NULL"); } format = AMediaCodec_getOutputFormat(d->codec); if(format != NULL){ AMediaFormat_getInt32(format, "width", &width); AMediaFormat_getInt32(format, "height", &height); AMediaFormat_getInt32(format, "color-format", &color); d->vsize.width=width; d->vsize.height=height; AMediaFormat_delete(format); } if(buf != NULL && d->sps && d->pps){ /*some decoders output garbage while no sps or pps have been received yet !*/ if(width != 0 && height != 0 ){ if(color == 19) { //YUV int ysize = width*height; int usize = ysize/4; om = ms_yuv_buf_allocator_get(d->buf_allocator,&pic,width,height); memcpy(pic.planes[0],buf,ysize); memcpy(pic.planes[1],buf+ysize,usize); memcpy(pic.planes[2],buf+ysize+usize,usize); } else { uint8_t* cbcr_src = (uint8_t*) (buf + width * height); om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE); } if (!d->first_image_decoded) { ms_message("First frame decoded %ix%i",width,height); d->first_image_decoded = true; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } ms_queue_put(f->outputs[0], om); }else{ ms_error("MSMediaCodecH264Dec: width and height are not known !"); } } AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE); } if (oBufidx == AMEDIA_ERROR_UNKNOWN){ ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueOutputBuffer() had an exception"); } if (d->avpf_enabled && request_pli) { ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI); } ms_queue_flush(f->inputs[0]); }
static void enc_process(MSFilter *f){ EncData *d=(EncData*)f->data; uint32_t ts=f->ticker->time*90LL; mblk_t *im; MSPicture pic; MSQueue nalus; if (d->enc==NULL){ ms_queue_flush(f->inputs[0]); return; } ms_queue_init(&nalus); while((im=ms_queue_get(f->inputs[0]))!=NULL){ if (ms_yuv_buf_init_from_mblk(&pic,im)==0){ x264_picture_t xpic; x264_picture_t oxpic; x264_nal_t *xnals=NULL; int num_nals=0; memset(&xpic, 0, sizeof(xpic)); memset(&oxpic, 0, sizeof(oxpic)); /*send I frame 2 seconds and 4 seconds after the beginning */ if (video_starter_need_i_frame(&d->starter,f->ticker->time)) d->generate_keyframe=TRUE; if (d->generate_keyframe){ xpic.i_type=X264_TYPE_IDR; d->generate_keyframe=FALSE; }else xpic.i_type=X264_TYPE_AUTO; xpic.i_qpplus1=0; xpic.i_pts=d->framenum; xpic.param=NULL; xpic.img.i_csp=X264_CSP_I420; xpic.img.i_plane=3; xpic.img.i_stride[0]=pic.strides[0]; xpic.img.i_stride[1]=pic.strides[1]; xpic.img.i_stride[2]=pic.strides[2]; xpic.img.i_stride[3]=0; xpic.img.plane[0]=pic.planes[0]; xpic.img.plane[1]=pic.planes[1]; xpic.img.plane[2]=pic.planes[2]; xpic.img.plane[3]=0; if (x264_encoder_encode(d->enc,&xnals,&num_nals,&xpic,&oxpic)>=0){ x264_nals_to_msgb(xnals,num_nals,&nalus); /*if (num_nals == 0) ms_message("Delayed frames info: current=%d max=%d\n", x264_encoder_delayed_frames(d->enc), x264_encoder_maximum_delayed_frames(d->enc)); */ rfc3984_pack(d->packer,&nalus,f->outputs[0],ts); if (d->framenum==0) video_starter_first_frame(&d->starter,f->ticker->time); d->framenum++; }else{ ms_error("x264_encoder_encode() error."); } } freemsg(im); } }
static void jpg_process(MSFilter *f){ JpegWriter *s=(JpegWriter*)f->data; ms_filter_lock(f); if (s->file!=NULL && s->codec!=NULL){ MSPicture yuvbuf, yuvjpeg; mblk_t *m=ms_queue_peek_last(f->inputs[0]); if (ms_yuv_buf_init_from_mblk(&yuvbuf,m)==0){ int error,got_pict; int comp_buf_sz=msgdsize(m); uint8_t *comp_buf=(uint8_t*)ms_malloc0(comp_buf_sz); mblk_t *jpegm; struct SwsContext *sws_ctx; struct AVPacket packet; AVCodecContext *avctx=avcodec_alloc_context3(s->codec); memset(&packet, 0, sizeof(packet)); avctx->width=yuvbuf.w; avctx->height=yuvbuf.h; avctx->time_base.num = 1; avctx->time_base.den =1; avctx->pix_fmt=AV_PIX_FMT_YUVJ420P; error=avcodec_open2(avctx,s->codec,NULL); if (error!=0) { ms_error("avcodec_open() failed: %i",error); cleanup(s,NULL, FALSE); av_free(avctx); goto end; } sws_ctx=sws_getContext(avctx->width,avctx->height,AV_PIX_FMT_YUV420P, avctx->width,avctx->height,avctx->pix_fmt,SWS_FAST_BILINEAR,NULL, NULL, NULL); if (sws_ctx==NULL) { ms_error(" sws_getContext() failed."); cleanup(s,avctx, FALSE); goto end; } jpegm=ms_yuv_buf_alloc (&yuvjpeg,avctx->width, avctx->height); #if LIBSWSCALE_VERSION_INT >= AV_VERSION_INT(0,9,0) if (sws_scale(sws_ctx,(const uint8_t *const*)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){ #else if (sws_scale(sws_ctx,(uint8_t **)yuvbuf.planes,yuvbuf.strides,0,avctx->height,yuvjpeg.planes,yuvjpeg.strides)<0){ #endif ms_error("sws_scale() failed."); sws_freeContext(sws_ctx); cleanup(s,avctx, FALSE); freemsg(jpegm); goto end; } sws_freeContext(sws_ctx); av_frame_unref(s->pict); avpicture_fill((AVPicture*)s->pict,(uint8_t*)jpegm->b_rptr,avctx->pix_fmt,avctx->width,avctx->height); packet.data=comp_buf; packet.size=comp_buf_sz; error=avcodec_encode_video2(avctx, &packet, s->pict, &got_pict); if (error<0){ ms_error("Could not encode jpeg picture."); }else{ if (fwrite(comp_buf,packet.size,1,s->file)>0){ ms_message("Snapshot done"); }else{ ms_error("Error writing snapshot."); } } ms_free(comp_buf); cleanup(s,avctx, TRUE); freemsg(jpegm); } goto end; } end: ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); } static MSFilterMethod jpg_methods[]={ { MS_JPEG_WRITER_TAKE_SNAPSHOT, take_snapshot }, { 0,NULL} }; #ifndef _MSC_VER MSFilterDesc ms_jpeg_writer_desc={ .id=MS_JPEG_WRITER_ID, .name="MSJpegWriter", .text="Take a video snapshot as jpg file", .category=MS_FILTER_OTHER, .ninputs=1, .noutputs=0, .init=jpg_init, .process=jpg_process, .uninit=jpg_uninit, .methods=jpg_methods }; #else MSFilterDesc ms_jpeg_writer_desc={ MS_JPEG_WRITER_ID, "MSJpegWriter", "Take a video snapshot as jpg file", MS_FILTER_OTHER, NULL, 1, 0, jpg_init, NULL, jpg_process, NULL, jpg_uninit, jpg_methods }; #endif MS_FILTER_DESC_EXPORT(ms_jpeg_writer_desc)
static void dec_process(MSFilter *f) { DecState *s = (DecState *)f->data; mblk_t *im; vpx_codec_err_t err; vpx_image_t *img; vpx_codec_iter_t iter = NULL; MSQueue frame; MSQueue mtofree_queue; Vp8RtpFmtFrameInfo frame_info; if (!s->ready){ ms_queue_flush(f->inputs[0]); return; } ms_filter_lock(f); ms_queue_init(&frame); ms_queue_init(&mtofree_queue); /* Unpack RTP payload format for VP8. */ vp8rtpfmt_unpacker_feed(&s->unpacker, f->inputs[0]); /* Decode unpacked VP8 frames. */ while (vp8rtpfmt_unpacker_get_frame(&s->unpacker, &frame, &frame_info) == 0) { while ((im = ms_queue_get(&frame)) != NULL) { err = vpx_codec_decode(&s->codec, im->b_rptr, (unsigned int)(im->b_wptr - im->b_rptr), NULL, 0); if ((s->flags & VPX_CODEC_USE_INPUT_FRAGMENTS) && mblk_get_marker_info(im)) { err = vpx_codec_decode(&s->codec, NULL, 0, NULL, 0); } if (err) { ms_warning("vp8 decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)?vpx_codec_error_detail(&s->codec):"no details"); } ms_queue_put(&mtofree_queue, im); } /* Get decoded frame */ if ((img = vpx_codec_get_frame(&s->codec, &iter))) { int i, j; int reference_updates = 0; if (vpx_codec_control(&s->codec, VP8D_GET_LAST_REF_UPDATES, &reference_updates) == 0) { if (frame_info.pictureid_present && ((reference_updates & VP8_GOLD_FRAME) || (reference_updates & VP8_ALTR_FRAME))) { vp8rtpfmt_send_rpsi(&s->unpacker, frame_info.pictureid); } } if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); ms_message("MSVp8Dec: video is %ix%i", img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; ms_filter_notify_no_arg(f, MS_FILTER_OUTPUT_FMT_CHANGED); } /* scale/copy frame to destination mblk_t */ for (i = 0; i < 3; i++) { uint8_t *dest = s->outbuf.planes[i]; uint8_t *src = img->planes[i]; int h = img->d_h >> ((i > 0) ? 1 : 0); for (j = 0; j < h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); ms_average_fps_update(&s->fps, (uint32_t)f->ticker->time); if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } while ((im = ms_queue_get(&mtofree_queue)) != NULL) { freemsg(im); } }