static bool_t read_event(MSEventQueue *q){ int available=q->size-q->freeroom; if (available>0){ MSFilter *f; unsigned int id; void *data; int argsize; int evsize; ms_mutex_lock(&q->mutex);/*q->endptr can be changed by write_event() so mutex is needed*/ if (q->rptr>=q->endptr){ q->rptr=q->buffer; } ms_mutex_unlock(&q->mutex); evsize=parse_event(q->rptr,&f,&id,&data,&argsize); if (f) { q->current_notifier=f; ms_filter_invoke_callbacks(&q->current_notifier,id,argsize>0 ? data : NULL, OnlyAsynchronous); q->current_notifier=NULL; } q->rptr+=evsize; ms_mutex_lock(&q->mutex); q->freeroom+=evsize; ms_mutex_unlock(&q->mutex); return TRUE; } return FALSE; }
/*the ticker thread function that executes the filters */ void * ms_ticker_run(void *arg) { uint64_t realtime; int64_t diff; MSTicker *s=(MSTicker*)arg; int lastlate=0; int precision=2; int late; precision = set_high_prio(s); s->ticks=1; ms_mutex_lock(&s->lock); s->orig=s->get_cur_time_ptr(s->get_cur_time_data); while(s->run){ s->ticks++; { #if TICKER_MEASUREMENTS MSTimeSpec begin,end;/*used to measure time spent in processing one tick*/ double iload; ms_get_cur_time(&begin); #endif run_graphs(s,s->execution_list,FALSE); #if TICKER_MEASUREMENTS ms_get_cur_time(&end); iload=100*((end.tv_sec-begin.tv_sec)*1000.0 + (end.tv_nsec-begin.tv_nsec)/1000000.0)/(double)s->interval; s->av_load=(smooth_coef*s->av_load)+((1.0-smooth_coef)*iload); #endif } s->time+=s->interval; while(1){ realtime=s->get_cur_time_ptr(s->get_cur_time_data)-s->orig; ms_mutex_unlock(&s->lock); diff=s->time-realtime; if (diff>0){ /* sleep until next tick */ sleepMs((int)diff); }else{ late=(int)-diff; if (late>s->interval*5 && late>lastlate){ ms_warning("%s: We are late of %d miliseconds.",s->name,late); } lastlate=late; break; /*exit the while loop */ } ms_mutex_lock(&s->lock); } ms_mutex_lock(&s->lock); } ms_mutex_unlock(&s->lock); unset_high_prio(precision); ms_message("%s thread exiting",s->name); ms_thread_exit(NULL); return NULL; }
static void vfw_process(MSFilter * obj){ VfwState *s=(VfwState*)obj->data; mblk_t *m; uint32_t timestamp; int cur_frame; if (s->frame_count==-1){ s->start_time=(float)obj->ticker->time; s->frame_count=0; } cur_frame=(int)((obj->ticker->time-s->start_time)*s->fps/1000.0); if (cur_frame>s->frame_count){ mblk_t *om=NULL; /*keep the most recent frame if several frames have been captured */ if (s->eng!=NULL){ ms_mutex_lock(&s->mutex); while((m=getq(&s->rq))!=NULL){ ms_mutex_unlock(&s->mutex); if (om!=NULL) freemsg(om); om=m; ms_mutex_lock(&s->mutex); } ms_mutex_unlock(&s->mutex); } if (om!=NULL){ timestamp=(uint32_t)(obj->ticker->time*90);/* rtp uses a 90000 Hz clockrate for video*/ mblk_set_timestamp_info(om,timestamp); ms_queue_put(obj->outputs[0],om); } s->frame_count++; } }
/*the ticker thread function that executes the filters */ void * ms_ticker_run(void *arg) { MSTicker *s=(MSTicker*)arg; int lastlate=0; int precision=2; int late; precision = set_high_prio(s); s->thread_id = ms_thread_self(); s->ticks=1; s->orig=s->get_cur_time_ptr(s->get_cur_time_data); ms_mutex_lock(&s->lock); while(s->run){ uint64_t late_tick_time=0; s->ticks++; /*Step 1: run the graphs*/ { #if TICKER_MEASUREMENTS MSTimeSpec begin,end;/*used to measure time spent in processing one tick*/ double iload; ms_get_cur_time(&begin); #endif run_tasks(s); run_graphs(s,s->execution_list,FALSE); #if TICKER_MEASUREMENTS ms_get_cur_time(&end); iload=100*((end.tv_sec-begin.tv_sec)*1000.0 + (end.tv_nsec-begin.tv_nsec)/1000000.0)/(double)s->interval; s->av_load=(smooth_coef*s->av_load)+((1.0-smooth_coef)*iload); #endif } ms_mutex_unlock(&s->lock); /*Step 2: wait for next tick*/ s->time+=s->interval; late=s->wait_next_tick(s->wait_next_tick_data,s->time); if (late>s->interval*5 && late>lastlate){ ms_warning("%s: We are late of %d miliseconds.",s->name,late); late_tick_time=ms_get_cur_time_ms(); } lastlate=late; ms_mutex_lock(&s->lock); if (late_tick_time){ s->late_event.lateMs=late; s->late_event.time=late_tick_time; } s->late_event.current_late_ms = late; } ms_mutex_unlock(&s->lock); unset_high_prio(precision); ms_message("%s thread exiting",s->name); ms_thread_exit(NULL); s->thread_id = 0; return NULL; }
static void wait_for_eof(Eof *obj, int refresh_time_ms, int timeout_ms) { ms_mutex_lock(&obj->mutex); while(obj->time_ms < timeout_ms && !obj->eof) { ms_mutex_unlock(&obj->mutex); ms_usleep((useconds_t)(refresh_time_ms) * 1000U); obj->time_ms += refresh_time_ms; ms_mutex_lock(&obj->mutex); } ms_mutex_unlock(&obj->mutex); }
static void itc_source_process(MSFilter *f){ SourceState *s=(SourceState *)f->data; mblk_t *m; ms_mutex_lock(&s->mutex); while((m=ms_queue_get(&s->q))!=NULL){ ms_mutex_unlock(&s->mutex); ms_queue_put(f->outputs[0],m); ms_mutex_lock(&s->mutex); } ms_mutex_unlock(&s->mutex); }
static int itc_sink_connect(MSFilter *f, void *data){ ms_mutex_lock(&f->ticker->lock); if(f->data!=NULL) { MSFilter *source = (MSFilter *)f->data; ms_mutex_lock(&source->ticker->lock); f->data=data; ms_mutex_unlock(&source->ticker->lock); }else f->data=data; ms_mutex_unlock(&f->ticker->lock); return 0; }
static OSStatus au_read_cb ( void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData ) { AUData *d=(AUData*)inRefCon; if (d->readTimeStamp.mSampleTime <0) { d->readTimeStamp=*inTimeStamp; } OSStatus err=0; mblk_t * rm=NULL; if (d->read_started) { rm=allocb(ioData->mBuffers[0].mDataByteSize,0); ioData->mBuffers[0].mData=rm->b_wptr; } err = AudioUnitRender(d->io_unit, ioActionFlags, &d->readTimeStamp, inBusNumber,inNumberFrames, ioData); if (d->read_started){ if (err == 0) { rm->b_wptr += ioData->mBuffers[0].mDataByteSize; ms_mutex_lock(&d->mutex); putq(&d->rq,rm); ms_mutex_unlock(&d->mutex); d->readTimeStamp.mSampleTime+=ioData->mBuffers[0].mDataByteSize/(d->bits/2); }else ms_warning("AudioUnitRender() failed: %i",err); } return err; }
int ms_ticker_attach_multiple(MSTicker *ticker,MSFilter *f,...) { MSList *sources=NULL; MSList *filters=NULL; MSList *it; MSList *total_sources=NULL; va_list l; va_start(l,f); do{ if (f->ticker==NULL) { filters=ms_filter_find_neighbours(f); sources=get_sources(filters); if (sources==NULL){ ms_fatal("No sources found around filter %s",f->desc->name); ms_list_free(filters); break; } /*run preprocess on each filter: */ for(it=filters;it!=NULL;it=it->next) ms_filter_preprocess((MSFilter*)it->data,ticker); ms_list_free(filters); total_sources=ms_list_concat(total_sources,sources); }else ms_message("Filter %s is already being scheduled; nothing to do.",f->desc->name); }while ((f=va_arg(l,MSFilter*))!=NULL); va_end(l); if (total_sources){ ms_mutex_lock(&ticker->lock); ticker->execution_list=ms_list_concat(ticker->execution_list,total_sources); ms_mutex_unlock(&ticker->lock); } return 0; }
static OSStatus writeRenderProc(void *inRefCon, AudioUnitRenderActionFlags *inActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumFrames, AudioBufferList *ioData) { AUWrite *d=(AUWrite*)inRefCon; int read; if (ioData->mNumberBuffers!=1) ms_warning("writeRenderProc: %"UINT32_PRINTF" buffers",ioData->mNumberBuffers); ms_mutex_lock(&d->common.mutex); read=ms_bufferizer_read(d->buffer,ioData->mBuffers[0].mData,ioData->mBuffers[0].mDataByteSize); if (ms_bufferizer_get_avail(d->buffer) >10*inNumFrames*2) { ms_message("we are late, bufferizer sise is [%i] bytes in framezize is [%"UINT32_PRINTF"] bytes" ,(int)ms_bufferizer_get_avail(d->buffer) ,inNumFrames*2); ms_bufferizer_flush(d->buffer); } ms_mutex_unlock(&d->common.mutex); if (read==0){ ms_debug("Silence inserted in audio output unit (%"UINT32_PRINTF" bytes)",ioData->mBuffers[0].mDataByteSize); memset(ioData->mBuffers[0].mData,0,ioData->mBuffers[0].mDataByteSize); } return 0; }
static void v4l_process(MSFilter * obj){ V4lState *s=(V4lState*)obj->data; uint32_t timestamp; int cur_frame; if (s->frame_count==-1){ s->start_time=obj->ticker->time; s->frame_count=0; } cur_frame=((obj->ticker->time-s->start_time)*s->fps/1000.0); if (cur_frame>=s->frame_count){ mblk_t *om=NULL; ms_mutex_lock(&s->mutex); /*keep the most recent frame if several frames have been captured */ if (s->fd!=-1){ om=getq(&s->rq); }else{ if (s->usemire){ om=dupmsg(v4l_make_mire(s)); }else { mblk_t *tmpm=v4l_make_nowebcam(s); if (tmpm) om=dupmsg(tmpm); } } ms_mutex_unlock(&s->mutex); if (om!=NULL){ timestamp=obj->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/ mblk_set_timestamp_info(om,timestamp); mblk_set_marker_info(om,TRUE); ms_queue_put(obj->outputs[0],om); /*ms_message("picture sent");*/ s->frame_count++; } }else flushq(&s->rq,0); }
static void *v4l_thread(void *ptr){ V4lState *s=(V4lState*)ptr; int err=-1; ms_message("v4l_thread starting"); if (s->v4lv2){ #ifdef HAVE_LINUX_VIDEODEV2_H err=v4lv2_do_mmap(s); #endif }else{ err=v4l_do_mmap(s); } if (err<0){ ms_thread_exit(NULL); } while(s->run){ mblk_t *m; #ifdef HAVE_LINUX_VIDEODEV2_H if (s->v4lv2) m=v4lv2_grab_image(s); else #endif m=v4l_grab_image_mmap(s); if (m) { ms_mutex_lock(&s->mutex); putq(&s->rq,dupmsg(m)); ms_mutex_unlock(&s->mutex); } } v4l_do_munmap(s); ms_message("v4l_thread exited."); ms_thread_exit(NULL); }
static mblk_t *ca_get(CAData *d){ mblk_t *m; ms_mutex_lock(&d->mutex); m=getq(&d->rq); ms_mutex_unlock(&d->mutex); return m; }
OSStatus writeACInputProc ( AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void* inUserData) { OSStatus err = noErr; CAData *d=(CAData*)inUserData; UInt32 packetSize = (d->bits / 8) * (d->stereo ? 2 : 1); if(*ioNumberDataPackets) { if(d->caSourceBuffer != NULL) { free(d->caSourceBuffer); d->caSourceBuffer = NULL; } d->caSourceBuffer = (void *) calloc (1, *ioNumberDataPackets * packetSize); ioData->mBuffers[0].mData = d->caSourceBuffer; // tell the Audio Converter where it's source data is ms_mutex_lock(&d->mutex); int readsize = ms_bufferizer_read(d->bufferizer,d->caSourceBuffer,*ioNumberDataPackets * packetSize); ms_mutex_unlock(&d->mutex); if(readsize != *ioNumberDataPackets * packetSize) { memset(d->caSourceBuffer, 0, *ioNumberDataPackets * packetSize); ioData->mBuffers[0].mDataByteSize = *ioNumberDataPackets * packetSize; // tell the Audio Converter how much source data there is } else { ioData->mBuffers[0].mDataByteSize = readsize; // tell the Audio Converter how much source data there is } } return err; }
static void enc_process(MSFilter *f){ EncData *d=(EncData*)f->data; uint32_t ts=f->ticker->time*90LL; int sizeAll=0,key=0; SPSInfo sps={0}; PPSInfo pps={0}; mblk_t *im; MSPicture pic; MSQueue nalus; ms_queue_init(&nalus); ms_mutex_lock(&f->lock); while((im=ms_queue_get(f->inputs[0]))!=NULL){ if (ms_yuv_buf_init_from_mblk(&pic,im)==0){ if (x264_encoder_start(d,&pic,&nalus,&sizeAll,&sps,&pps,&key)==0){ if (d->flv->state==Started){ MSQueue nalus_t; ms_queue_init(&nalus_t); x264_flv_to_file(f,&nalus,&nalus_t,sizeAll,&sps,&pps,key); //x264_nals_to_file(d->flv,&nalus,&nalus_t); rfc3984_pack(d->packer,&nalus_t,f->outputs[0],ts); } else{ rfc3984_pack(d->packer,&nalus,f->outputs[0],ts); } d->framenum++; }else{ ms_error("MSH264Enc: x264_encoder_start error"); } } freemsg(im); } ms_mutex_unlock(&f->lock); }
static void au_configure_write(AUData *d) { d->write_started=TRUE; au_configure(d); ms_mutex_lock(&d->mutex); ms_bufferizer_flush(d->bufferizer); ms_mutex_unlock(&d->mutex); }
HRESULT ( Callback)(IMediaSample* pSample, REFERENCE_TIME* sTime, REFERENCE_TIME* eTime, BOOL changed) { BYTE *byte_buf=NULL; mblk_t *buf; V4wState *s = s_callback; if (s==NULL) return S_OK; HRESULT hr = pSample->GetPointer(&byte_buf); if (FAILED(hr)) { return S_OK; } int size = pSample->GetActualDataLength(); if (size>+1000) { buf=allocb(size,0); memcpy(buf->b_wptr, byte_buf, size); buf->b_wptr+=size; ms_mutex_lock(&s->mutex); putq(&s->rq, buf); ms_mutex_unlock(&s->mutex); } return S_OK; }
static int rec_start(MSFilter *f, void *arg){ RecState *s=(RecState*)f->data; ms_mutex_lock(&f->lock); s->state=Started; ms_mutex_unlock(&f->lock); return 0; }
static mblk_t *au_read_get(AURead *d) { mblk_t *m; ms_mutex_lock(&d->common.mutex); m=getq(&d->rq); ms_mutex_unlock(&d->common.mutex); return m; }
static void rec_process(MSFilter *f){ RecState *s=(RecState*)f->data; mblk_t *m; int err; while((m=ms_queue_get(f->inputs[0]))!=NULL){ mblk_t *it=m; ms_mutex_lock(&f->lock); if (s->state==Started){ while(it!=NULL){ int len=it->b_wptr-it->b_rptr; DWORD byte_written=0; if ((err=WriteFile(s->fd,it->b_rptr,len, &byte_written, NULL))!=len){ if (err<0) { #if !defined(_WIN32_WCE) ms_warning("MSFileRec: fail to write %i bytes: %s",len,strerror(errno)); #else ms_warning("MSFileRec: fail to write %i bytes: %i",len,WSAGetLastError()); #endif } } it=it->b_cont; s->size+=len; } } ms_mutex_unlock(&f->lock); freemsg(m); } }
int ms_ticker_detach(MSTicker *ticker,MSFilter *f){ MSList *sources=NULL; MSList *filters=NULL; MSList *it; if (f->ticker==NULL) { ms_message("Filter %s is not scheduled; nothing to do.",f->desc->name); return 0; } ms_mutex_lock(&ticker->lock); filters=ms_filter_find_neighbours(f); sources=get_sources(filters); if (sources==NULL){ ms_fatal("No sources found around filter %s",f->desc->name); ms_list_free(filters); ms_mutex_unlock(&ticker->lock); return -1; } for(it=sources;it!=NULL;it=ms_list_next(it)){ ticker->execution_list=ms_list_remove(ticker->execution_list,it->data); } ms_mutex_unlock(&ticker->lock); ms_list_for_each(filters,(void (*)(void*))ms_filter_postprocess); ms_list_free(filters); ms_list_free(sources); return 0; }
int ms_ticker_attach(MSTicker *ticker,MSFilter *f) { MSList *sources=NULL; MSList *filters=NULL; MSList *it; if (f->ticker!=NULL) { ms_message("Filter %s is already being scheduled; nothing to do.",f->desc->name); return 0; } find_filters(&filters,f); sources=get_sources(filters); if (sources==NULL){ ms_fatal("No sources found around filter %s",f->desc->name); ms_list_free(filters); return -1; } /*run preprocess on each filter: */ for(it=filters;it!=NULL;it=it->next) ms_filter_preprocess((MSFilter*)it->data,ticker); ms_mutex_lock(&ticker->lock); ticker->execution_list=ms_list_concat(ticker->execution_list,sources); ms_mutex_unlock(&ticker->lock); ms_list_free(filters); return 0; }
static void ms_ticker_stop(MSTicker *s){ ms_mutex_lock(&s->lock); s->run=FALSE; ms_mutex_unlock(&s->lock); if(s->thread) ms_thread_join(s->thread,NULL); }
void video_capture_preprocess(MSFilter *f){ ms_message("Preprocessing of Android VIDEO capture filter"); AndroidReaderContext *d = getContext(f); ms_mutex_lock(&d->mutex); snprintf(d->fps_context, sizeof(d->fps_context), "Captured mean fps=%%f, expected=%f", d->fps); ms_video_init_framerate_controller(&d->fpsControl, d->fps); ms_video_init_average_fps(&d->averageFps, d->fps_context); JNIEnv *env = ms_get_jni_env(); jmethodID method = env->GetStaticMethodID(d->helperClass,"startRecording", "(IIIIIJ)Ljava/lang/Object;"); ms_message("Starting Android camera '%d' (rotation:%d)", ((AndroidWebcamConfig*)d->webcam->data)->id, d->rotation); jobject cam = env->CallStaticObjectMethod(d->helperClass, method, ((AndroidWebcamConfig*)d->webcam->data)->id, d->hwCapableSize.width, d->hwCapableSize.height, (jint)d->fps, d->rotationSavedDuringVSize, (jlong)d); d->androidCamera = env->NewGlobalRef(cam); if (d->previewWindow) { method = env->GetStaticMethodID(d->helperClass,"setPreviewDisplaySurface", "(Ljava/lang/Object;Ljava/lang/Object;)V"); env->CallStaticVoidMethod(d->helperClass, method, d->androidCamera, d->previewWindow); } ms_message("Preprocessing of Android VIDEO capture filter done"); ms_mutex_unlock(&d->mutex); }
static void video_player_process(MSFilter *f){ StreamData *d=(StreamData*)f->data; VideoState *is = d->is; mblk_t *om=NULL; ConfSlotQueue *confq= video_player_get_video_outq(d->is); ms_filter_lock(f); float elapsed=(float)(f->ticker->time-d->starttime); if ((elapsed*d->fps/1000.0)>d->index){ ms_mutex_lock(&confq->lock); om = ms_queue_get(&confq->q); ms_mutex_unlock(&confq->lock); if(om!=NULL) ms_queue_put(f->outputs[0],om); d->index++; } ms_filter_unlock(f); }
static int rec_set_sr(MSFilter *f, void *arg){ RecState *s=(RecState*)f->data; ms_mutex_lock(&f->lock); s->rate=*((int*)arg); ms_mutex_unlock(&f->lock); return 0; }
static int rec_stop(MSFilter *f, void *arg){ RecState *s=(RecState*)f->data; ms_mutex_lock(&f->lock); s->state=MSRecorderPaused; ms_mutex_unlock(&f->lock); return 0; }
JNIEXPORT void JNICALL Java_org_linphone_mediastream_video_capture_AndroidVideoApi5JniWrapper_putImage(JNIEnv* env, jclass thiz,jlong nativePtr,jbyteArray frame) { AndroidReaderContext* d = (AndroidReaderContext*) nativePtr; if (!d->androidCamera) return; ms_mutex_lock(&d->mutex); if (!ms_video_capture_new_frame(&d->fpsControl,d->filter->ticker->time)) { ms_mutex_unlock(&d->mutex); return; } if (d->rotation != UNDEFINED_ROTATION && d->rotationSavedDuringVSize != d->rotation) { ms_warning("Rotation has changed (new value: %d) since vsize was run (old value: %d)." "Will produce inverted images. Use set_device_orientation() then update call.\n", d->rotation, d->rotationSavedDuringVSize); } int image_rotation_correction = compute_image_rotation_correction(d, d->rotationSavedDuringVSize); jboolean isCopied; jbyte* jinternal_buff = env->GetByteArrayElements(frame, &isCopied); if (isCopied) { ms_warning("The video frame received from Java has been copied"); } int y_cropping_offset=0, cbcr_cropping_offset=0; //compute_cropping_offsets(d->hwCapableSize, d->requestedSize, &y_cropping_offset, &cbcr_cropping_offset); int width = d->hwCapableSize.width; int height = d->hwCapableSize.height; uint8_t* y_src = (uint8_t*)(jinternal_buff + y_cropping_offset); uint8_t* cbcr_src = (uint8_t*) (jinternal_buff + width * height + cbcr_cropping_offset); /* Warning note: image_rotation_correction == 90 does not imply portrait mode ! (incorrect function naming). It only implies one thing: image needs to rotated by that amount to be correctly displayed. */ mblk_t* yuv_block = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(y_src , cbcr_src , image_rotation_correction , d->requestedSize.width , d->requestedSize.height , d->hwCapableSize.width , d->hwCapableSize.width, false, d->useDownscaling); if (yuv_block) { if (d->frame) freemsg(d->frame); d->frame = yuv_block; } ms_mutex_unlock(&d->mutex); // JNI_ABORT free the buffer without copying back the possible changes env->ReleaseByteArrayElements(frame, jinternal_buff, JNI_ABORT); }
void * ms_ticker_run(void *arg) { uint64_t realtime; int64_t diff; MSTicker *s=(MSTicker*)arg; int lastlate=0; int precision=2; int late; precision = set_high_prio(); s->ticks=1; ms_mutex_lock(&s->lock); s->orig=s->get_cur_time_ptr(s->get_cur_time_data); while(s->run){ s->ticks++; run_graphs(s,s->execution_list,FALSE); s->time+=s->interval; while(1){ realtime=s->get_cur_time_ptr(s->get_cur_time_data)-s->orig; ms_mutex_unlock(&s->lock); diff=s->time-realtime; if (diff>0){ /* sleep until next tick */ sleepMs(diff); }else{ late=-diff; if (late>s->interval*5 && late>lastlate){ ms_warning("We are late of %d miliseconds.",late); } lastlate=late; break; /*exit the while loop */ } ms_mutex_lock(&s->lock); } ms_mutex_lock(&s->lock); } ms_mutex_unlock(&s->lock); unset_high_prio(precision); ms_message("MSTicker thread exiting"); ms_thread_exit(NULL); return NULL; }
static void _eof_filter_notify_cb(void *userdata, struct _MSFilter *f, unsigned int id, void *arg) { MSMediaPlayer *obj = (MSMediaPlayer *)userdata; ms_mutex_lock(&obj->cb_access); if(f == obj->player && id == MS_PLAYER_EOF && obj->eof_cb != NULL) { obj->eof_cb(obj->user_data_cb); } ms_mutex_unlock(&obj->cb_access); }