コード例 #1
0
static int bb10capture_set_vsize(MSFilter *f, void *arg) {
    BB10Capture *d = (BB10Capture*) f->data;
    MSVideoSize newSize = *(MSVideoSize*)arg;

    ms_filter_lock(f);

    if (!d->camera_openned) {
        bb10capture_open_camera(d);
    }

    if (d->camera_openned) {
        camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_WIDTH, newSize.width, CAMERA_IMGPROP_HEIGHT, newSize.height);
        camera_get_vf_property(d->cam_handle, CAMERA_IMGPROP_WIDTH, &(d->vsize.width), CAMERA_IMGPROP_HEIGHT, &(d->vsize.height));
        if (ms_video_size_equal(d->vsize, newSize)) {
            ms_filter_unlock(f);
            return 0;
        }

        ms_warning("[bb10_capture] vsize %i,%i couldn't be set, instead try using: %i,%i", newSize.width, newSize.height, newSize.height, newSize.width);
        int tmp = newSize.width;
        newSize.width = newSize.height;
        newSize.height = tmp;
        camera_set_vf_property(d->cam_handle, CAMERA_IMGPROP_WIDTH, newSize.width, CAMERA_IMGPROP_HEIGHT, newSize.height);
        camera_get_vf_property(d->cam_handle, CAMERA_IMGPROP_WIDTH, &(d->vsize.width), CAMERA_IMGPROP_HEIGHT, &(d->vsize.height));
        if (ms_video_size_equal(d->vsize, newSize)) {
            ms_filter_unlock(f);
            return 0;
        }

        ms_warning("[bb10_capture] vsize %i,%i couldn't be set either, instead using: %i,%i", newSize.width, newSize.height, d->vsize.width, d->vsize.height);
    }

    ms_filter_unlock(f);
    return -1;
}
コード例 #2
0
static int x11video_set_corner(MSFilter *f,void *arg){
	X11Video *s=(X11Video*)f->data;
	ms_filter_lock(f);
	s->corner= *(int*)arg;
	ms_filter_unlock(f);
	return 0;
}
コード例 #3
0
static void player_process(MSFilter *f){
	PlayerData *d=(PlayerData*)f->data;
	int bytes=2*(f->ticker->interval*d->rate*d->nchannels)/1000;
	ms_filter_lock(f);
	if (d->state==STARTED){
		int err;
		mblk_t *om=allocb(bytes,0);
		if (d->pause_time>0){
			err=bytes;
			memset(om->b_wptr,0,bytes);
			d->pause_time-=f->ticker->interval;
		}else{
			err=read(d->fd,om->b_wptr,bytes);
			if (d->swap) swap_bytes(om->b_wptr,bytes);
		}
		if (err>=0){
			if (err!=0){
				om->b_wptr+=bytes;
				ms_queue_put(f->outputs[0],om);
			}else freemsg(om);
			if (err<bytes){
				ms_filter_notify_no_arg(f,MS_FILE_PLAYER_EOF);
				if (d->loop_after>=0){
					lseek(d->fd,d->hsize,SEEK_SET);
					d->pause_time=d->loop_after;
				}else d->state=STOPPED;
			}
		}else{
			ms_warning("Fail to read %i bytes: %s",bytes,strerror(errno));
		}
	}
	ms_filter_unlock(f);
}
コード例 #4
0
ファイル: msrtp.c プロジェクト: biddyweb/mediastream-plus
static void sender_process(MSFilter * f)
{
	SenderData *d = (SenderData *) f->data;
	RtpSession *s = d->session;

	mblk_t *im;
	uint32_t timestamp;

	if (s == NULL){
		ms_queue_flush(f->inputs[0]);
		return;
	}

	if (d->relay_session_id_size>0 && 
		( (f->ticker->time-d->last_rsi_time)>5000 || d->last_rsi_time==0) ) {
		ms_message("relay session id sent in RTCP APP");
		rtp_session_send_rtcp_APP(s,0,"RSID",(const uint8_t *)d->relay_session_id,d->relay_session_id_size);
		d->last_rsi_time=f->ticker->time;
	}

	while ((im = ms_queue_get(f->inputs[0])) != NULL) {
		mblk_t *header;

		timestamp = get_cur_timestamp(f, mblk_get_timestamp_info(im));
		ms_filter_lock(f);
		if (d->skip) {
			ms_debug("skipping..");
			send_dtmf(f, d->skip_until-d->dtmf_duration, timestamp);
			d->dtmf_start = FALSE;
			if (!RTP_TIMESTAMP_IS_NEWER_THAN(timestamp, d->skip_until)) {
				freemsg(im);
				ms_filter_unlock(f);
				continue;
			}
			d->skip = FALSE;
			d->dtmf = 0;
		}

		if (d->skip == FALSE && d->mute_mic==FALSE){
			int pt = mblk_get_payload_type(im);
			header = rtp_session_create_packet(s, 12, NULL, 0);
			if (pt>0)
				rtp_set_payload_type(header, pt);
			rtp_set_markbit(header, mblk_get_marker_info(im));
			header->b_cont = im;
			rtp_session_sendm_with_ts(s, header, timestamp);
		}
		else{
			freemsg(im);
		}

		if (d->dtmf != 0) {
			ms_debug("prepare to send RFC2833 dtmf.");
			d->skip_until = timestamp + d->dtmf_duration;
			d->skip = TRUE;
			d->dtmf_start = TRUE;
		}
		ms_filter_unlock(f);
	}
}
コード例 #5
0
static int h264_dec_get_fps(MSFilter *f, float *fps) {
	VTH264DecCtx *ctx = (VTH264DecCtx *)f->data;
	ms_filter_lock(f);
	*fps = ms_average_fps_get(&ctx->fps);
	ms_filter_unlock(f);
	return 0;
}
コード例 #6
0
ファイル: nowebcam.c プロジェクト: zengtaoxian/linphone
static int static_image_set_image(MSFilter *f, void *arg){
	SIData *d=(SIData*)f->data;
	const char *image = (const char *)arg;
	ms_filter_lock(f);
	
	if (d->nowebcamimage) {
		ms_free(d->nowebcamimage);
		d->nowebcamimage=NULL;
	}
	
	if (image!=NULL && image[0]!='\0')
		d->nowebcamimage=ms_strdup(image);

	if (d->pic!=NULL){
		/* Get rid of the old image and force a new preprocess so that the
			 new image is properly read. */
		freemsg(d->pic);
		d->pic=NULL;
	}
	d->lasttime=0;
	static_image_preprocess(f);

	ms_filter_unlock(f);
	return 0;
}
コード例 #7
0
static void enc_process(MSFilter *f) {
	mblk_t *im,*om;
	uint64_t timems=f->ticker->time;
	uint32_t timestamp=timems*90;
	EncState *s=(EncState*)f->data;
	unsigned int flags = 0;
	vpx_codec_err_t err;
	YuvBuf yuv;

	ms_filter_lock(f);
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		vpx_image_t img;

		om = NULL;
		flags = 0;

		ms_yuv_buf_init_from_mblk(&yuv, im);
		vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]);
		
		if (video_starter_need_i_frame (&s->starter,f->ticker->time)){
			/*sends an I frame at 2 seconds and 4 seconds after the beginning of the call*/
			s->req_vfu=TRUE;
		}
		if (s->req_vfu){
			flags = VPX_EFLAG_FORCE_KF;
			s->req_vfu=FALSE;
		}

		err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, VPX_DL_REALTIME);

		if (err) {
			ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec));
		} else {
			vpx_codec_iter_t iter = NULL;
			const vpx_codec_cx_pkt_t *pkt;

			s->frame_count++;
			if (s->frame_count==1){
				video_starter_first_frame (&s->starter,f->ticker->time);
			}

			while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) {
				if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
					if (pkt->data.frame.sz > 0) {
						om = allocb(pkt->data.frame.sz,0);
						memcpy(om->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz);
						om->b_wptr += pkt->data.frame.sz;
						#ifdef FRAGMENT_ON_PARTITIONS
						vp8_fragment_and_send(f, s, om, timestamp, pkt, (pkt->data.frame.partition_id == s->token_partition_count));
						#else
						vp8_fragment_and_send(f, s, om, timestamp, pkt, 1);
						#endif
					}
				}
			}
		}
		freemsg(im);
	}
	ms_filter_unlock(f);
}
コード例 #8
0
ファイル: msvideoplayer.c プロジェクト: github188/Sip-MCU
static void video_player_process(MSFilter *f){
	StreamData *d=(StreamData*)f->data;
	VideoState *is = d->is;
	mblk_t *om=NULL;
	ConfSlotQueue *confq= video_player_get_video_outq(d->is);

	ms_filter_lock(f);
	float elapsed=(float)(f->ticker->time-d->starttime);

	
	if ((elapsed*d->fps/1000.0)>d->index){

		ms_mutex_lock(&confq->lock);
		om = ms_queue_get(&confq->q);
		ms_mutex_unlock(&confq->lock);

		if(om!=NULL) ms_queue_put(f->outputs[0],om);

		d->index++;
	}
	ms_filter_unlock(f);

	
	
}
コード例 #9
0
ファイル: msrtp.c プロジェクト: smking1122/heqinphone
static int sender_send_generic_cn(MSFilter *f, void *data){
	SenderData *d = (SenderData *) f->data;
	ms_filter_lock(f);
	memcpy(&d->cng_data, data, sizeof(MSCngData));
	ms_filter_unlock(f);
	return 0;
}
コード例 #10
0
static int android_display_set_window(MSFilter *f, void *arg){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	unsigned long id=*(unsigned long*)arg;
	int err;
	JNIEnv *jenv=ms_get_jni_env();
	jobject window=(jobject)id;

	ms_filter_lock(f);
	if (window!=NULL)
		ad->jbitmap=(*jenv)->CallObjectMethod(jenv,window,ad->get_bitmap_id);
	else
		ad->jbitmap=NULL;
	ad->android_video_window=window;
	if (ad->jbitmap!=NULL){
		err=sym_AndroidBitmap_getInfo(jenv,ad->jbitmap,&ad->bmpinfo);
		if (err!=0){
			ms_error("AndroidBitmap_getInfo() failed.");
			ad->jbitmap=0;
			ms_filter_unlock(f);
			return -1;
		}
	}
	if (ad->sws){
		ms_scaler_context_free(ad->sws);
		ad->sws=NULL;
	}
	ad->orientation_change_pending=FALSE;
	ms_filter_unlock(f);
	if (ad->jbitmap!=NULL) ms_message("New java bitmap given with w=%i,h=%i,stride=%i,format=%i",
	           ad->bmpinfo.width,ad->bmpinfo.height,ad->bmpinfo.stride,ad->bmpinfo.format);
	return 0;
}
コード例 #11
0
static int android_display_set_window(MSFilter *f, void *arg){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	unsigned long id=*(unsigned long*)arg;
	JNIEnv *jenv=ms_get_jni_env();
	jobject window=(jobject)id;

	ms_filter_lock(f);
	
	if (window) {
		unsigned int ptr = (unsigned int)ad->ogl;
		ms_message("Sending opengles_display pointer as long: %p -> %u", ad->ogl, ptr);
		(*jenv)->CallVoidMethod(jenv,window,ad->set_opengles_display_id, ptr);
		ad->ogl_free_ready = FALSE;
	} else {
		if (window != ad->android_video_window) {
			ms_message("Clearing opengles_display (%p : %d)", ad->ogl, ad->ogl_free_ready);
			/* when context is lost GL resources are freed by Android */
			ogl_display_uninit(ad->ogl, FALSE);
			if (ad->ogl_free_ready) {
				ms_free(ad->ogl);
				ad->ogl = 0;
			} else {
				ad->ogl_free_ready = TRUE;
			}
			/* clear native ptr, to prevent rendering to occur now that ptr is invalid */
			(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->set_opengles_display_id, 0);
		}
	}

	ad->android_video_window=window;

	ms_filter_unlock(f);

	return 0;
}
コード例 #12
0
static void android_display_process(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	MSPicture pic;
	mblk_t *m;

	ms_filter_lock(f);
	if (ad->android_video_window){
		if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
			if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
				/* schedule display of frame */
				if (!ad->ogl || !ad->ogl_free_ready) {
                    /* m is dupb'ed inside ogl_display */
					ogl_display_set_yuv_to_display(ad->ogl, m);
				} else {
					ms_warning("%s: opengldisplay not ready (%p)", __FUNCTION__, ad->ogl);
				}
				
				JNIEnv *jenv=ms_get_jni_env();
				(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->request_render_id);
			}
		}
	}
	ms_filter_unlock(f);

	ms_queue_flush(f->inputs[0]);
	if (f->inputs[1] != NULL)
		ms_queue_flush(f->inputs[1]);
}
コード例 #13
0
static int glxvideo_set_vsize(MSFilter *f,void *arg){
	GLXVideo *s=(GLXVideo*)f->data;
	ms_filter_lock(f);
	s->wsize=*(MSVideoSize*)arg;
	ms_filter_unlock(f);
	return 0;
}
コード例 #14
0
ファイル: msopus.c プロジェクト: xiaolds/VideoCallVoIP
/* the ptime is managed by the set bitrate function which increase ptime only at low bitrate */
static int ms_opus_enc_set_ptime(MSFilter *f, void *arg) {
	OpusEncData *d = (OpusEncData *)f->data;
	int retval = -1;
	int ptime = *(int*)arg;

	ms_filter_lock(f);

	/* ptime value can be 20, 40, 60, 80, 100 or 120 */
	if ((ptime%20 != 0) || (ptime>d->maxptime) || (ptime<20)) {
		d->ptime = ptime-ptime%20;
		if (d->ptime<20) {
			d->ptime=20;
		}
		if (d->ptime>d->maxptime) {
			d->ptime=d->maxptime;
		}
		ms_warning("Opus encoder doesn't support ptime [%i]( 20 multiple in range [20,%i] only) set to %d", ptime, d->maxptime, d->ptime);
	} else {
		d->ptime=ptime;
		ms_message ( "Opus enc: got ptime=%i",d->ptime );
	}
	if (d->bitrate!=-1) {
		d->max_network_bitrate = ((d->bitrate*d->ptime/8000) + 12 + 8 + 20) *8000/d->ptime;
	}
	d->ptime_set=TRUE;
	retval=0;
	ms_filter_unlock(f);
	return retval;
}
コード例 #15
0
static void android_display_process(MSFilter *f){
	AndroidDisplay *ad=(AndroidDisplay*)f->data;
	MSPicture pic;
	mblk_t *m;

	ms_filter_lock(f);
	if (ad->jbitmap!=0 && !ad->orientation_change_pending){
		if ((m=ms_queue_peek_last(f->inputs[0]))!=NULL){
			if (ms_yuv_buf_init_from_mblk (&pic,m)==0){
				MSVideoSize wsize={ad->bmpinfo.width,ad->bmpinfo.height};
				MSVideoSize vsize={pic.w, pic.h};
				MSRect vrect;
				MSPicture dest={0};
				void *pixels=NULL;
				JNIEnv *jenv=ms_get_jni_env();

				if (!ms_video_size_equal(vsize,ad->vsize)){
					ms_message("Video to display has size %ix%i",vsize.width,vsize.height);
					ad->vsize=vsize;
					if (ad->sws){
						ms_scaler_context_free(ad->sws);
						ad->sws=NULL;
					}
					/*select_orientation(ad,wsize,vsize);*/
				}

				ms_layout_compute(wsize,vsize,vsize,-1,0,&vrect, NULL);

				if (ad->sws==NULL){
					ad->sws=ms_scaler_create_context (vsize.width,vsize.height,MS_YUV420P,
					                           vrect.w,vrect.h,MS_RGB565,MS_SCALER_METHOD_BILINEAR);
					if (ad->sws==NULL){
						ms_fatal("Could not obtain sws context !");
					}
				}

				if (sym_AndroidBitmap_lockPixels(jenv,ad->jbitmap,&pixels)==0){

					if (pixels!=NULL){
						dest.planes[0]=(uint8_t*)pixels+(vrect.y*ad->bmpinfo.stride)+(vrect.x*2);
						dest.strides[0]=ad->bmpinfo.stride;
						ms_scaler_process(ad->sws,pic.planes,pic.strides,dest.planes,dest.strides);
					}else ms_warning("Pixels==NULL in android bitmap !");

					sym_AndroidBitmap_unlockPixels(jenv,ad->jbitmap);
				}else{
					ms_error("AndroidBitmap_lockPixels() failed !");
				}

				(*jenv)->CallVoidMethod(jenv,ad->android_video_window,ad->update_id);

			}
		}
	}
	ms_filter_unlock(f);

	ms_queue_flush(f->inputs[0]);
	ms_queue_flush(f->inputs[1]);
}
コード例 #16
0
ファイル: msrtp.c プロジェクト: blueskycoco/hp
static void _sender_process(MSFilter * f)
{
	SenderData *d = (SenderData *) f->data;
	RtpSession *s = d->session;

	mblk_t *im;
	uint32_t timestamp;

	
	if (d->relay_session_id_size>0 && 
		( (f->ticker->time-d->last_rsi_time)>5000 || d->last_rsi_time==0) ) {
		ms_message("relay session id sent in RTCP APP");
		rtp_session_send_rtcp_APP(s,0,"RSID",(const uint8_t *)d->relay_session_id,d->relay_session_id_size);
		d->last_rsi_time=f->ticker->time;
	}

	ms_filter_lock(f);
	im = ms_queue_get(f->inputs[0]);
	do {
		mblk_t *header;

		timestamp = get_cur_timestamp(f, im);
		
		if (d->dtmf != 0 && !d->skip) {
			ms_debug("prepare to send RFC2833 dtmf.");
			d->skip_until = timestamp + d->dtmf_duration;
			d->dtmf_ts_cur=timestamp;
			d->skip = TRUE;
		}
		if (d->skip) {
			uint32_t origin_ts=d->skip_until-d->dtmf_duration;
			if (RTP_TIMESTAMP_IS_NEWER_THAN(timestamp,d->dtmf_ts_cur)){
				ms_debug("Sending RFC2833 packet, start_timestamp=%u, dtmf_ts_cur=%u",origin_ts,d->dtmf_ts_cur);
				send_dtmf(f, origin_ts);
			}
		}
		if (im){
			if (d->skip == FALSE && d->mute_mic==FALSE){
				header = rtp_session_create_packet(s, 12, NULL, 0);
				rtp_set_markbit(header, mblk_get_marker_info(im));
				header->b_cont = im;
				rtp_session_sendm_with_ts(s, header, timestamp);
			}else{
				freemsg(im);
			}
		}
	}while ((im = ms_queue_get(f->inputs[0])) != NULL);

	if (d->last_sent_time == -1) {
		if ((d->last_stun_sent_time == -1) || ((f->ticker->time - d->last_stun_sent_time) >= 500)) {
			d->last_stun_sent_time = f->ticker->time;
		}
		if (d->last_stun_sent_time == f->ticker->time) {
			send_stun_packet(s);
		}
	}

	ms_filter_unlock(f);
}
コード例 #17
0
static int ms_rtt_4103_sink_set_red_payload(MSFilter *f, void *red) {
	RealTimeTextSinkData *s = (RealTimeTextSinkData *)f->data;
	ms_filter_lock(f);
	s->pt_red = *(int *)red;
	ms_debug("RED payload number is %i", s->pt_red);
	ms_filter_unlock(f);
	return 0;
}
コード例 #18
0
static int ms_rtt_4103_sink_set_t140_payload(MSFilter *f, void *t140) {
	RealTimeTextSinkData *s = (RealTimeTextSinkData *)f->data;
	ms_filter_lock(f);
	s->pt_t140 = *(int *)t140;
	ms_debug("T140 payload number is %i", s->pt_t140);
	ms_filter_unlock(f);
	return 0;
}
コード例 #19
0
static int h264_enc_set_fps(MSFilter *f, const float *fps) {
	VTH264EncCtx *ctx = (VTH264EncCtx *)f->data;
	ms_filter_lock(f);
	ctx->conf.fps = *fps;
	if(ctx->is_configured) ctx->fps_changed = TRUE;
	ms_filter_unlock(f);
	return 0;
}
コード例 #20
0
ファイル: msrtp.c プロジェクト: blueskycoco/hp
static int sender_unmute_mic(MSFilter * f, void *arg)
{
	SenderData *d = (SenderData *) f->data;
	ms_filter_lock(f);
	d->mute_mic=FALSE;
	ms_filter_unlock(f);
	return 0;
}
コード例 #21
0
static int enc_set_br(MSFilter *f, void *arg){
	SpeexEncState *s=(SpeexEncState*)f->data;
	ms_filter_lock(f);
	s->maxbitrate=((int*)arg)[0];
	if (s->state) apply_max_bitrate(s);
	ms_filter_unlock(f);
	return 0;
}
コード例 #22
0
ファイル: msrtp.c プロジェクト: smking1122/heqinphone
static int sender_mute(MSFilter * f, void *arg)
{
	SenderData *d = (SenderData *) f->data;
	ms_filter_lock(f);
	d->mute=TRUE;
	ms_filter_unlock(f);
	return 0;
}
コード例 #23
0
ファイル: sizeconv.c プロジェクト: biddyweb/mediastream-plus
static void size_conv_process(MSFilter *f){
	SizeConvState *s=(SizeConvState*)f->data;
	YuvBuf inbuf;
	mblk_t *im;
	int cur_frame;

	ms_filter_lock(f);

	if (s->frame_count==-1){
		s->start_time=(float)f->ticker->time;
		s->frame_count=0;
	}
	while((im=ms_queue_get(f->inputs[0]))!=NULL ){
		putq(&s->rq, im);
	}

	cur_frame=(int)((f->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame<=s->frame_count && s->fps>=0) {
		/* too much frame */
		while(s->rq.q_mcount>1){
			ms_message("MSSizeConv: extra frame removed.");
			im=getq(&s->rq);
			freemsg(im);
		}
		ms_filter_unlock(f);
		return;
	}

	if (cur_frame>s->frame_count && s->fps>=0) {
		/*keep the most recent frame if several frames have been captured */
		while(s->rq.q_mcount>1){
			ms_message("MSSizeConv: extra frame removed.");
			im=getq(&s->rq);
			freemsg(im);
		}
	}
	while((im=getq(&s->rq))!=NULL ){
		if (ms_yuv_buf_init_from_mblk(&inbuf,im)==0){
			if (inbuf.w==s->target_vsize.width &&
				inbuf.h==s->target_vsize.height){
				ms_queue_put(f->outputs[0],im);
			}else{
				struct ms_SwsContext *sws_ctx=get_resampler(s,inbuf.w,inbuf.h);
				mblk_t *om=size_conv_alloc_mblk(s);
				if (ms_sws_scale(sws_ctx,inbuf.planes,inbuf.strides, 0,
					inbuf.h, s->outbuf.planes, s->outbuf.strides)<0){
					ms_error("MSSizeConv: error in ms_sws_scale().");
				}
				ms_queue_put(f->outputs[0],om);
				freemsg(im);
			}
			s->frame_count++;
		}else freemsg(im);
	}

	ms_filter_unlock(f);
}
コード例 #24
0
static int glxvideo_zoom(MSFilter *f, void *arg){
	GLXVideo *s=(GLXVideo*)f->data;

	ms_filter_lock(f);
	ogl_display_zoom(s->glhelper, arg);
	
	ms_filter_unlock(f);
	return 0;
}
コード例 #25
0
ファイル: msfileplayer.c プロジェクト: hunghtbk/LinphoneV1
static void player_process(MSFilter *f){
	PlayerData *d=(PlayerData*)f->data;
	int nsamples=(f->ticker->interval*d->rate*d->nchannels)/1000;
	int bytes;
	/*send an even number of samples each tick. At 22050Hz the number of samples per 10 ms chunk is odd.
	Odd size buffer of samples cause troubles to alsa. Fixing in alsa is difficult, so workaround here.
	*/
	if (nsamples & 0x1 ) { //odd number of samples
		if (d->count & 0x1 )
			nsamples++;
		else
			nsamples--;
	}
	bytes=2*nsamples;
	d->count++;
	ms_filter_lock(f);
	if (d->state==STARTED){
		int err;
		mblk_t *om=allocb(bytes,0);
		if (d->pause_time>0){
			err=bytes;
			memset(om->b_wptr,0,bytes);
			d->pause_time-=f->ticker->interval;
		}else{
			err=read(d->fd,om->b_wptr,bytes);
			if (d->swap) swap_bytes(om->b_wptr,bytes);
		}
		if (err>=0){
			if (err!=0){
				if (err<bytes)
 					memset(om->b_wptr+err,0,bytes-err);
				om->b_wptr+=bytes;
				ms_queue_put(f->outputs[0],om);
			}else freemsg(om);
			if (err<bytes){
				ms_filter_notify_no_arg(f,MS_FILE_PLAYER_EOF);
				lseek(d->fd,d->hsize,SEEK_SET);

				/* special value for playing file only once */
				if (d->loop_after==-2)
				{
					d->state=STOPPED;
					ms_filter_unlock(f);
					return;
				}

				if (d->loop_after>=0){
					d->pause_time=d->loop_after;
				}
			}
		}else{
			ms_warning("Fail to read %i bytes: %s",bytes,strerror(errno));
		}
	}
	ms_filter_unlock(f);
}
コード例 #26
0
static void resample_process_ms2(MSFilter *obj) {
    ResampleData *dt=(ResampleData*)obj->data;
    mblk_t *m;

    if (dt->output_rate==dt->input_rate) {
        while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
            ms_queue_put(obj->outputs[0],m);
        }
        return;
    }
    ms_filter_lock(obj);
    if (dt->handle!=NULL) {
        unsigned int inrate=0, outrate=0;
        speex_resampler_get_rate(dt->handle,&inrate,&outrate);
        if (inrate!=dt->input_rate || outrate!=dt->output_rate) {
            speex_resampler_destroy(dt->handle);
            dt->handle=0;
        }
    }
    if (dt->handle==NULL) {
        int err=0;
        dt->handle=speex_resampler_init(dt->nchannels, dt->input_rate, dt->output_rate, SPEEX_RESAMPLER_QUALITY_VOIP, &err);
    }


    while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
        unsigned int inlen=(m->b_wptr-m->b_rptr)/(2*dt->nchannels);
        unsigned int outlen=((inlen*dt->output_rate)/dt->input_rate)+1;
        unsigned int inlen_orig=inlen;
        mblk_t *om=allocb(outlen*2*dt->nchannels,0);
        if (dt->nchannels==1) {
            speex_resampler_process_int(dt->handle,
                                        0,
                                        (int16_t*)m->b_rptr,
                                        &inlen,
                                        (int16_t*)om->b_wptr,
                                        &outlen);
        } else {
            speex_resampler_process_interleaved_int(dt->handle,
                                                    (int16_t*)m->b_rptr,
                                                    &inlen,
                                                    (int16_t*)om->b_wptr,
                                                    &outlen);
        }
        if (inlen_orig!=inlen) {
            ms_error("Bug in resampler ! only %u samples consumed instead of %u, out=%u",
                     inlen,inlen_orig,outlen);
        }
        om->b_wptr+=outlen*2*dt->nchannels;
        mblk_set_timestamp_info(om,dt->ts);
        dt->ts+=outlen;
        ms_queue_put(obj->outputs[0],om);
        freemsg(m);
    }
    ms_filter_unlock(obj);
}
コード例 #27
0
ファイル: msfileplayer.c プロジェクト: hunghtbk/LinphoneV1
static int player_stop(MSFilter *f, void *arg){
	PlayerData *d=(PlayerData*)f->data;
	ms_filter_lock(f);
	if (d->state==STARTED){
		d->state=STOPPED;
		lseek(d->fd,d->hsize,SEEK_SET);
	}
	ms_filter_unlock(f);
	return 0;
}
コード例 #28
0
ファイル: dtmfgen.c プロジェクト: biddyweb/mediastream-plus
static int dtmfgen_stop(MSFilter *f, void *arg){
	DtmfGenState *s=(DtmfGenState*)f->data;
	int min_duration=(100*s->rate)/1000; /*wait at least 100 ms*/
	ms_filter_lock(f);
	if (s->pos<min_duration)
		s->dur=min_duration;
	else s->dur=0;
	ms_filter_unlock(f);
	return 0;
}
コード例 #29
0
ファイル: msfilerec.c プロジェクト: smking1122/heqinphone
static int rec_set_fmtp(MSFilter *f, void *arg){
	RecState *d=(RecState*)f->data;
	MSPinFormat *pinfmt = (MSPinFormat*)arg;
	ms_filter_lock(f);
	d->rate = pinfmt->fmt->rate;
	d->nchannels = pinfmt->fmt->nchannels;
	d->mime = pinfmt->fmt->encoding;
	ms_filter_unlock(f);
	return 0;
}
コード例 #30
0
ファイル: vp8.c プロジェクト: dormclub/tjphone
static int enc_set_br(MSFilter *f, void*data){
	int br=*(int*)data;
	EncState *s=(EncState*)f->data;
	s->bitrate=br;
	s->cfg.rc_target_bitrate = ((float)s->bitrate)*0.92/1024.0; //0.9=take into account IP/UDP/RTP overhead, in average.
	if (s->ready){
		ms_filter_lock(f);
		enc_postprocess(f);
		enc_preprocess(f);
		ms_filter_unlock(f);
		return 0;
	}
	if (br>=1024000){
		s->width = MS_VIDEO_SIZE_VGA_W;
		s->height = MS_VIDEO_SIZE_VGA_H;
		s->fps=25;
	}else if (br>=350000){
		s->width = MS_VIDEO_SIZE_VGA_W;
		s->height = MS_VIDEO_SIZE_VGA_H;
		s->fps=15;
	} else if (br>=200000){
		s->width = MS_VIDEO_SIZE_CIF_W;
		s->height = MS_VIDEO_SIZE_CIF_H;
		s->fps=15;
	}else if (br>=150000){
		s->width=MS_VIDEO_SIZE_QVGA_W;
		s->height=MS_VIDEO_SIZE_QVGA_H;
		s->fps=15;
	}else if (br>=100000){
		s->width=MS_VIDEO_SIZE_QVGA_W;
		s->height=MS_VIDEO_SIZE_QVGA_H;
		s->fps=10;
	}else if (br>=64000){
		s->width=MS_VIDEO_SIZE_QCIF_W;
		s->height=MS_VIDEO_SIZE_QCIF_H;
		s->fps=12;
	}else{
		s->width=MS_VIDEO_SIZE_QCIF_W;
		s->height=MS_VIDEO_SIZE_QCIF_H;
		s->fps=5;
	}
#ifdef __arm__
	if (br >= 300000) {
		s->width = MS_VIDEO_SIZE_VGA_W;
		s->height = MS_VIDEO_SIZE_VGA_H;
	} else {
		s->width=MS_VIDEO_SIZE_QVGA_W;
		s->height=MS_VIDEO_SIZE_QVGA_H;
	}
	s->fps=12;
#endif

	ms_message("bitrate requested...: %d (%d x %d)\n", br, s->width, s->height);
	return 0;
}