Ejemplo n.º 1
0
static void dec_process(MSFilter *f){
	DecState *s=(DecState*)f->data;
	mblk_t *inputMessage, *outputMessage;
	struct	BV16_Bit_Stream bs;

	while((inputMessage=ms_queue_get(f->inputs[0]))!=NULL){

		while(inputMessage->b_rptr<inputMessage->b_wptr) {
			outputMessage = allocb(SIGNAL_FRAME_SIZE,0);
			mblk_meta_copy(inputMessage, outputMessage);
			BV16_BitUnPack((UWord8*)inputMessage->b_rptr, &bs);
			BV16_Decode(&bs, &s->state, (short*)(outputMessage->b_wptr));
			outputMessage->b_wptr+=SIGNAL_FRAME_SIZE;
			inputMessage->b_rptr+=BITSTREAM_FRAME_SIZE;
			ms_queue_put(f->outputs[0],outputMessage);
			if (s->plc) ms_concealer_inc_sample_time(s->concealer,f->ticker->time, 5, 1);

		}
		freemsg(inputMessage);

	}
	// called every 10 ms
	if (s->plc && ms_concealer_context_is_concealement_required(s->concealer, f->ticker->time)) {
		int ms_concealed;
		// need to conceal 10 ms
		for (ms_concealed=0; ms_concealed<s->packet_ms_size; ms_concealed+=5){
			outputMessage = allocb(SIGNAL_FRAME_SIZE,0);
			BV16_PLC(&s->state,(short*)outputMessage->b_wptr);
			outputMessage->b_wptr+=SIGNAL_FRAME_SIZE;
			mblk_set_plc_flag(outputMessage, 1);
			ms_queue_put(f->outputs[0],outputMessage);
		}
		ms_concealer_inc_sample_time(s->concealer,f->ticker->time,10, 0);
	}
}
Ejemplo n.º 2
0
static void adapter_process(MSFilter *f){
	AdapterState *s=(AdapterState*)f->data;
	mblk_t *im,*om;
	size_t msgsize;
	
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (s->inputchans==s->outputchans){
			ms_queue_put(f->outputs[0],im);
		}else if (s->inputchans==2){
			msgsize=msgdsize(im)/2;
			om=allocb(msgsize,0);
			for (;im->b_rptr<im->b_wptr;im->b_rptr+=4,om->b_wptr+=2){
				*(int16_t*)om->b_wptr=*(int16_t*)im->b_rptr;
			}
			ms_queue_put(f->outputs[0],om);
			freemsg(im);
		}else if (s->outputchans==2){
			msgsize=msgdsize(im)*2;
			om=allocb(msgsize,0);
			for (;im->b_rptr<im->b_wptr;im->b_rptr+=2,om->b_wptr+=4){
				((int16_t*)om->b_wptr)[0]=*(int16_t*)im->b_rptr;
				((int16_t*)om->b_wptr)[1]=*(int16_t*)im->b_rptr;
			}
			ms_queue_put(f->outputs[0],om);
			freemsg(im);
		}
	}
}
Ejemplo n.º 3
0
static void size_conv_process(MSFilter *f){
	SizeConvState *s=(SizeConvState*)f->data;
	YuvBuf inbuf;
	mblk_t *im;
	int cur_frame;

	ms_filter_lock(f);

	if (s->frame_count==-1){
		s->start_time=(float)f->ticker->time;
		s->frame_count=0;
	}
	while((im=ms_queue_get(f->inputs[0]))!=NULL ){
		putq(&s->rq, im);
	}

	cur_frame=(int)((f->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame<=s->frame_count && s->fps>=0) {
		/* too much frame */
		while(s->rq.q_mcount>1){
			ms_message("MSSizeConv: extra frame removed.");
			im=getq(&s->rq);
			freemsg(im);
		}
		ms_filter_unlock(f);
		return;
	}

	if (cur_frame>s->frame_count && s->fps>=0) {
		/*keep the most recent frame if several frames have been captured */
		while(s->rq.q_mcount>1){
			ms_message("MSSizeConv: extra frame removed.");
			im=getq(&s->rq);
			freemsg(im);
		}
	}
	while((im=getq(&s->rq))!=NULL ){
		if (ms_yuv_buf_init_from_mblk(&inbuf,im)==0){
			if (inbuf.w==s->target_vsize.width &&
				inbuf.h==s->target_vsize.height){
				ms_queue_put(f->outputs[0],im);
			}else{
				struct ms_SwsContext *sws_ctx=get_resampler(s,inbuf.w,inbuf.h);
				mblk_t *om=size_conv_alloc_mblk(s);
				if (ms_sws_scale(sws_ctx,inbuf.planes,inbuf.strides, 0,
					inbuf.h, s->outbuf.planes, s->outbuf.strides)<0){
					ms_error("MSSizeConv: error in ms_sws_scale().");
				}
				ms_queue_put(f->outputs[0],om);
				freemsg(im);
			}
			s->frame_count++;
		}else freemsg(im);
	}

	ms_filter_unlock(f);
}
Ejemplo n.º 4
0
static void resample_process_ms2(MSFilter *obj) {
    ResampleData *dt=(ResampleData*)obj->data;
    mblk_t *m;

    if (dt->output_rate==dt->input_rate) {
        while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
            ms_queue_put(obj->outputs[0],m);
        }
        return;
    }
    ms_filter_lock(obj);
    if (dt->handle!=NULL) {
        unsigned int inrate=0, outrate=0;
        speex_resampler_get_rate(dt->handle,&inrate,&outrate);
        if (inrate!=dt->input_rate || outrate!=dt->output_rate) {
            speex_resampler_destroy(dt->handle);
            dt->handle=0;
        }
    }
    if (dt->handle==NULL) {
        int err=0;
        dt->handle=speex_resampler_init(dt->nchannels, dt->input_rate, dt->output_rate, SPEEX_RESAMPLER_QUALITY_VOIP, &err);
    }


    while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
        unsigned int inlen=(m->b_wptr-m->b_rptr)/(2*dt->nchannels);
        unsigned int outlen=((inlen*dt->output_rate)/dt->input_rate)+1;
        unsigned int inlen_orig=inlen;
        mblk_t *om=allocb(outlen*2*dt->nchannels,0);
        if (dt->nchannels==1) {
            speex_resampler_process_int(dt->handle,
                                        0,
                                        (int16_t*)m->b_rptr,
                                        &inlen,
                                        (int16_t*)om->b_wptr,
                                        &outlen);
        } else {
            speex_resampler_process_interleaved_int(dt->handle,
                                                    (int16_t*)m->b_rptr,
                                                    &inlen,
                                                    (int16_t*)om->b_wptr,
                                                    &outlen);
        }
        if (inlen_orig!=inlen) {
            ms_error("Bug in resampler ! only %u samples consumed instead of %u, out=%u",
                     inlen,inlen_orig,outlen);
        }
        om->b_wptr+=outlen*2*dt->nchannels;
        mblk_set_timestamp_info(om,dt->ts);
        dt->ts+=outlen;
        ms_queue_put(obj->outputs[0],om);
        freemsg(m);
    }
    ms_filter_unlock(obj);
}
Ejemplo n.º 5
0
/* remove payload header and aggregates fragmented packets */
static void dec_unpacketize(MSFilter *f, DecState *s, mblk_t *im, MSQueue *out){
	int xbit = (im->b_rptr[0] & 0x80) >> 7;
	im->b_rptr++;
	if (xbit) {
		/* Ignore extensions if some are present */
		int ibit = (im->b_rptr[0] & 0x80) >> 7;
		int lbit = (im->b_rptr[0] & 0x40) >> 6;
		int tbit = (im->b_rptr[0] & 0x20) >> 5;
		int kbit = (im->b_rptr[0] & 0x10) >> 4;
		int mbit = 0;
		if (ibit) {
			mbit = (im->b_rptr[1] & 0x80) >> 7;
		}
		im->b_rptr += (ibit + lbit + (tbit | kbit) + mbit);
	}

	/* end of frame bit ? */
	if (mblk_get_marker_info(im)) {
		/* should be aggregated with previous packet ? */
		if (s->curframe!=NULL){
			/* same timestamp ? */
			if (mblk_get_timestamp_info(im) == mblk_get_timestamp_info(s->curframe)) {
				concatb(s->curframe,im);
				msgpullup(s->curframe,-1);
				/* transmit complete frame */
				ms_queue_put(out, s->curframe);
				s->curframe=NULL;
			} else {
				/* transmit partial frame */
				ms_queue_put(out, s->curframe);
				s->curframe = NULL;
				/* transmit new one (be it complete or not) */
				ms_queue_put(out, im);
			}
		} else {
			/* transmit new one (be it complete or not) */
			ms_queue_put(out, im);
		}
	} else {
		if (s->curframe!=NULL) {
			/* append if same timestamp */
			if (mblk_get_timestamp_info(im) == mblk_get_timestamp_info(s->curframe)) {
				concatb(s->curframe,im);
			} else {
				/* transmit partial frame */
				ms_queue_put(out, s->curframe);
				s->curframe = im;
			}
		}
		else {
			s->curframe = im;
		}
	}
}
Ejemplo n.º 6
0
static void dtmfgen_process(MSFilter *f){
	mblk_t *m;
	DtmfGenState *s=(DtmfGenState*)f->data;
	int nsamples;

	ms_filter_lock(f);
	if (ms_queue_empty(f->inputs[0])){
		s->nosamples_time+=f->ticker->interval;
		if ((s->playing || s->silence!=0) && s->nosamples_time>NO_SAMPLES_THRESHOLD){
			/*after 100 ms without stream we decide to generate our own sample
			 instead of writing into incoming stream samples*/
			nsamples=(f->ticker->interval*s->rate)/1000;
			m=allocb(nsamples*s->nchannels*2,0);
			if (s->silence==0){
				if (s->pos==0){
					MSDtmfGenEvent ev;
					ev.tone_start_time=f->ticker->time;
					strncpy(ev.tone_name,s->current_tone.tone_name,sizeof(ev.tone_name));
					ms_filter_notify(f,MS_DTMF_GEN_EVENT,&ev);
				}
				write_dtmf(s,(int16_t*)m->b_wptr,nsamples);
			}else{
				memset(m->b_wptr,0,nsamples*s->nchannels*2);
				s->silence-=f->ticker->interval;
				if (s->silence<0) s->silence=0;
			}
			m->b_wptr+=nsamples*s->nchannels*2;
			ms_queue_put(f->outputs[0],m);
		}
	}else{
		s->nosamples_time=0;
		if (s->current_tone.interval > 0) {
			s->silence-=f->ticker->interval;
			if (s->silence<0) s->silence=0;
		} else s->silence=0;
		while((m=ms_queue_get(f->inputs[0]))!=NULL){
			if (s->playing && s->silence==0){
				if (s->pos==0){
					MSDtmfGenEvent ev;
					ev.tone_start_time=f->ticker->time;
					strncpy(ev.tone_name,s->current_tone.tone_name,sizeof(ev.tone_name));
					ms_filter_notify(f,MS_DTMF_GEN_EVENT,&ev);
				}
				nsamples=(int)(m->b_wptr-m->b_rptr)/(2*s->nchannels);
				write_dtmf(s, (int16_t*)m->b_rptr,nsamples);
			}
			ms_queue_put(f->outputs[0],m);
		}
	}
	ms_filter_unlock(f);
}
Ejemplo n.º 7
0
static void msv4l2_process(MSFilter *f){
	V4l2State *s=(V4l2State*)f->data;
#ifdef V4L2_THREADED
	uint32_t timestamp;
	int cur_frame;
	if (s->frame_count==-1){
		s->start_time=f->ticker->time;
		s->frame_count=0;
	}
	cur_frame=((f->ticker->time-s->start_time)*s->fps/1000.0);
	
	if (cur_frame>=s->frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->fd!=-1){
			om=getq(&s->rq);
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=f->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			mblk_set_marker_info(om,TRUE);
			ms_queue_put(f->outputs[0],om);
			/*ms_message("picture sent");*/
			s->frame_count++;
		}
	}else{
			flushq(&s->rq,0);
	}
#else
	uint32_t elapsed;
	
	if (s->fd!=-1){
		/*see it is necessary to output a frame:*/
		elapsed=f->ticker->time-s->start_time;
		if (((float)elapsed*s->fps/1000.0)>s->frame_count){
			mblk_t *m;
			m=v4lv2_grab_image(s);
			if (m){
				mblk_t *om=dupb(m);
				mblk_set_marker_info(om,(s->pix_fmt==MS_MJPEG));
				ms_queue_put(f->outputs[0],om);
				s->frame_count++;
			}
		}
	}
#endif
}
Ejemplo n.º 8
0
static void volume_process(MSFilter *f){
	mblk_t *m;
	Volume *v=(Volume*)f->data;
	float target_gain;

	/* Important notice: any processes called herein can modify v->target_gain, at
	 * end of this function apply_gain() is called, thus: later process calls can
	 * override this target gain, and order must be well thought out
	 */
	if (v->agc_enabled || v->peer!=NULL){
		mblk_t *om;
		int nbytes=v->nsamples*2;
		ms_bufferizer_put_from_queue(v->buffer,f->inputs[0]);
		while(ms_bufferizer_get_avail(v->buffer)>=nbytes){
			om=allocb(nbytes,0);
			ms_bufferizer_read(v->buffer,om->b_wptr,nbytes);
			om->b_wptr+=nbytes;
			update_energy((int16_t*)om->b_rptr, v->nsamples, v);
			target_gain = v->static_gain;

			if (v->peer)  /* this ptr set = echo limiter enable flag */
				target_gain = volume_echo_avoider_process(v, om);

			/* Multiply with gain from echo limiter, not "choose smallest". Why?
			 * Remote talks, local echo suppress via mic path, but still audible in
			 * remote speaker. AGC operates fully, too (local speaker close to local mic!);
			 * having agc gain reduction also contribute to total reduction makes sense.
			 */
			if (v->agc_enabled) target_gain/= volume_agc_process(v, om);

			if (v->noise_gate_enabled)
				volume_noise_gate_process(v, v->level_pk, om);
			apply_gain(v, om, target_gain);
			ms_queue_put(f->outputs[0],om);
		}
	}else{
		/*light processing: no agc. Work in place in the input buffer*/
		while((m=ms_queue_get(f->inputs[0]))!=NULL){
			update_energy((int16_t*)m->b_rptr, (m->b_wptr - m->b_rptr) / 2, v);
			target_gain = v->static_gain;

			if (v->noise_gate_enabled)
				volume_noise_gate_process(v, v->level_pk, m);
			apply_gain(v, m, target_gain);
			ms_queue_put(f->outputs[0],m);
		}
	}
}
Ejemplo n.º 9
0
static void dec_process(MSFilter *f){
	DecState *s=(DecState*)f->data;
	int nbytes = 0;
	mblk_t *im;
	mblk_t *om;
	const int frsz= BV16_FRAME_SIZE * 2 * 4;
	int i,frames;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		om=allocb(frsz,0);

		nbytes=msgdsize(im);
		frames = nbytes/(BV16_CODE_SIZE * 2);
		
		//ms_error("read bv16 data, size %d, frame %d",msgdsize(im),frames);

		for(i=0;i<frames;i++){

			if (mblk_get_precious_flag(im)) { //¶ª°üÒþ±Î
				bv16_fillin(s->dec, (int16_t *) om->b_wptr, BV16_CODE_SIZE);
			} else {
				bv16_decode(s->dec, (int16_t *) om->b_wptr, (uint8_t *) im->b_rptr, BV16_CODE_SIZE);
			}
			im->b_rptr += 10;
			om->b_wptr += 80;
		}
	
		ms_queue_put(f->outputs[0],om);
		freemsg(im);
	}
		
}
Ejemplo n.º 10
0
static void receiver_process(MSFilter * f)
{
	ReceiverData *d = (ReceiverData *) f->data;
	mblk_t *m;
	uint32_t timestamp;
	if (d->session == NULL)
		return;
	
	if (d->reset_jb){
		ms_message("Reseting jitter buffer");
		rtp_session_resync(d->session);
		d->reset_jb=FALSE;
	}

	if (d->starting){
		PayloadType *pt=rtp_profile_get_payload(
			rtp_session_get_profile(d->session),
			rtp_session_get_recv_payload_type(d->session));
		if (pt && pt->type!=PAYLOAD_VIDEO)
			rtp_session_flush_sockets(d->session);
		d->starting=FALSE;
	}

	timestamp = (uint32_t) (f->ticker->time * (d->rate/1000));
	while ((m = rtp_session_recvm_with_ts(d->session, timestamp)) != NULL) {
		mblk_set_timestamp_info(m, rtp_get_timestamp(m));
		mblk_set_marker_info(m, rtp_get_markbit(m));
		mblk_set_cseq(m, rtp_get_seqnumber(m));
		rtp_get_payload(m,&m->b_rptr);		
		ms_queue_put(f->outputs[0], m);
	}
}
Ejemplo n.º 11
0
static void pixconv_process(MSFilter *f){
	mblk_t *im,*om=NULL;
	PixConvState *s=(PixConvState*)f->data;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (s->in_fmt==s->out_fmt){
			om=im;
		}else{
			MSPicture inbuf;
			if (ms_picture_init_from_mblk_with_size(&inbuf,im,s->in_fmt,s->size.width,s->size.height)==0){
				om=pixconv_alloc_mblk(s);
				if (s->scaler==NULL){
					s->scaler=ms_scaler_create_context(inbuf.w, inbuf.h,
						s->in_fmt,inbuf.w,inbuf.h,
						s->out_fmt,MS_SCALER_METHOD_BILINEAR);
				}
				if (s->in_fmt==MS_RGB24_REV){
					inbuf.planes[0]+=inbuf.strides[0]*(inbuf.h-1);
					inbuf.strides[0]=-inbuf.strides[0];
				}
				if (ms_scaler_process (s->scaler,inbuf.planes,inbuf.strides, s->outbuf.planes, s->outbuf.strides)<0){
					ms_error("MSPixConv: Error in ms_sws_scale().");
				}
			}
			freemsg(im);
		}
		if (om!=NULL) ms_queue_put(f->outputs[0],om);
	}
}
Ejemplo n.º 12
0
void alsa_read_process(MSFilter *obj){
	AlsaReadData *ad=(AlsaReadData*)obj->data;
	int samples=(128*ad->rate)/8000;
	int err;
	mblk_t *om=NULL;
	if (ad->handle==NULL && ad->pcmdev!=NULL && !ad->read_started){
		ad->read_started=TRUE;
		ad->handle=alsa_open_r(ad->pcmdev,16,ad->nchannels==2,ad->rate);
		if (ad->handle){
			ad->read_samples=0;
			ms_ticker_set_time_func(obj->ticker,(uint64_t (*)(void*))ms_ticker_synchronizer_get_corrected_time, ad->ticker_synchronizer);
		}
	}
	if (ad->handle==NULL) return;
	while (alsa_can_read(ad->handle)>=samples){

		int size=samples*2*ad->nchannels;
		om=allocb(size,0);
		if ((err=alsa_read(ad->handle,om->b_wptr,samples))<=0) {
			ms_warning("Fail to read samples");
			freemsg(om);
			return;
		}
		ad->read_samples+=err;
		size=err*2*ad->nchannels;
		om->b_wptr+=size;
		compute_timespec(ad);

		/*ms_message("alsa_read_process: Outputing %i bytes",size);*/
		ms_queue_put(obj->outputs[0],om);
	}
}
Ejemplo n.º 13
0
static void enc_process(MSFilter *f){
	EncState *s=(EncState*)f->data;
	mblk_t *im;
	unsigned int unitary_buff_size = sizeof(int16_t)*160;
	unsigned int buff_size = unitary_buff_size*s->ptime/20;
	int16_t* buff;
	int offset;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}
	while(ms_bufferizer_get_avail(s->bufferizer) >= buff_size) {
		mblk_t *om=allocb(33*s->ptime/20,0);
		buff = (int16_t *)alloca(buff_size);
		ms_bufferizer_read(s->bufferizer,(uint8_t*)buff,buff_size);

		for (offset=0;offset<buff_size;offset+=unitary_buff_size) {
			gsm_encode(s->state,(gsm_signal*)&buff[offset/sizeof(int16_t)],(gsm_byte*)om->b_wptr);
			om->b_wptr+=33;
		}
		mblk_set_timestamp_info(om,s->ts);
		ms_queue_put(f->outputs[0],om);
		s->ts+=buff_size/sizeof(int16_t)/*sizeof(buf)/2*/;
	}
}
Ejemplo n.º 14
0
static void vfw_process(MSFilter * obj){
	VfwState *s=(VfwState*)obj->data;
	mblk_t *m;
	uint32_t timestamp;
	int cur_frame;

	if (s->frame_count==-1){
		s->start_time=(float)obj->ticker->time;
		s->frame_count=0;
	}

	cur_frame=(int)((obj->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame>s->frame_count){
		mblk_t *om=NULL;
		/*keep the most recent frame if several frames have been captured */
		if (s->eng!=NULL){
			ms_mutex_lock(&s->mutex);
			while((m=getq(&s->rq))!=NULL){
				ms_mutex_unlock(&s->mutex);
				if (om!=NULL) freemsg(om);
				om=m;
				ms_mutex_lock(&s->mutex);
			}
			ms_mutex_unlock(&s->mutex);
		}
		if (om!=NULL){
			timestamp=(uint32_t)(obj->ticker->time*90);/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			ms_queue_put(obj->outputs[0],om);
		}
		s->frame_count++;
	}
}
Ejemplo n.º 15
0
static void  pushNalu(uint8_t *begin, uint8_t *end, uint32_t ts, bool marker, MSQueue *nalus){
	uint8_t *src=begin;
	size_t nalu_len = (end-begin);
	uint8_t nalu_byte  = *src++;

	mblk_t *m=allocb(nalu_len,0);

	// Removal of the 3 in a 003x sequence
	// This emulation prevention byte is normally part of a NAL unit.
	/* H.264 standard sys in par 7.4.1 page 58
		emulation_prevention_three_byte is a byte equal to 0x03.
		When an emulation_prevention_three_byte is present in a NAL unit, it shall be discarded by the decoding process.
		Within the NAL unit, the following three-byte sequence shall not occur at any byte-aligned position: 0x000000, 0x000001, 0x00002
	*/
	*m->b_wptr++=nalu_byte;
	unsigned ecount = 0;
	while (src<end-3) {
		if (src[0]==0 && src[1]==0 && src[2]==3){
			*m->b_wptr++=0;
			*m->b_wptr++=0;
			// drop the emulation_prevention_three_byte
			src+=3;
			++ecount;
			continue;
		}
		*m->b_wptr++=*src++;
	}
	*m->b_wptr++=*src++;
	*m->b_wptr++=*src++;
	*m->b_wptr++=*src++;

	ms_queue_put(nalus, m);
}
Ejemplo n.º 16
0
static void detector_process(MSFilter *f) {
    DetectorState *s=(DetectorState *)f->data;
    mblk_t *m;

    while ((m=ms_queue_get(f->inputs[0]))!=NULL) {
        ms_queue_put(f->outputs[0],m);
        if (s->tone_def->frequency!=0) {
            ms_bufferizer_put(s->buf,dupmsg(m));
        }
    }
    if (s->tone_def->frequency!=0) {
        uint8_t *buf=_alloca(s->framesize);

        while(ms_bufferizer_read(s->buf,buf,s->framesize)!=0) {
            float en=compute_energy((int16_t*)buf,s->framesize/2);
            if (en>energy_min) {
                float freq_en=goertzel_state_run(&s->tone_gs,(int16_t*)buf,s->framesize/2,en);
                if (freq_en>=s->tone_def->min_amplitude) {
                    if (s->dur==0) s->starttime=f->ticker->time;
                    s->dur+=s->frame_ms;
                    if (s->dur>s->tone_def->min_duration && !s->event_sent) {
                        MSToneDetectorEvent event;

                        strncpy(event.tone_name,s->tone_def->tone_name,sizeof(event.tone_name));
                        event.tone_start_time=s->starttime;
                        ms_filter_notify(f,MS_TONE_DETECTOR_EVENT,&event);
                        s->event_sent=TRUE;
                    }
                } else end_tone(s);
            } else end_tone(s);
        }
    }
}
Ejemplo n.º 17
0
static void write264FlvTag(FLVStream *flv,FLVTag *tag,MSQueue *naluq, MSQueue *naluq_t)
{
    mblk_t *tm;
    int len;
    if(flvFull(flv,tag->dataSize + 15)) {
       // struct timeval tv1,tv2;
       // gettimeofday(&tv2,NULL);
        flvFlush(flv);
       // gettimeofday(&tv1,NULL);
       // ms_message("[INFO] TIME=%d ", tv1.tv_sec*1000 + tv1.tv_usec/1000 - tv2.tv_sec*1000 - tv2.tv_usec/1000);
    }

	FLVVideoTag *dataTag=&(tag->tagData);

	putChar(flv, tag->tagType);
	putUI24(flv, tag->dataSize);
	putUI24(flv, tag->timeStamp);
	putChar(flv, tag->timestampExtended);
	putUI24(flv, tag->streamID);

	putChar(flv, (dataTag->frameType<<4)|dataTag->codecId);
	putChar(flv, dataTag->pktType);
	putUI24(flv, dataTag->compositionTime);
     while((tm=ms_queue_get(naluq))!=NULL){
        len=tm->b_wptr-tm->b_rptr;
	    putUI32(flv, len);
        writeData(flv,tm->b_rptr,len);

        ms_queue_put(naluq_t,dupmsg(tm));
        freemsg(tm);
     }
	putUI32(flv, tag->dataSize + 11);
}
Ejemplo n.º 18
0
void alsa_read_process(MSFilter *obj){
	AlsaReadData *ad=(AlsaReadData*)obj->data;
#ifdef AMD_HACK
	int samples=(160*ad->rate)/8000;
#else
	int samples=(128*ad->rate)/8000;
#endif
	int err;
	mblk_t *om=NULL;
	if (ad->handle==NULL && ad->pcmdev!=NULL){
		ad->handle=alsa_open_r(ad->pcmdev,16,ad->nchannels==2,ad->rate);
	}
	if (ad->handle==NULL) return;
	while (alsa_can_read(ad->handle,samples)){
		int size=samples*2;
		om=allocb(size,0);
		if ((err=alsa_read(ad->handle,om->b_wptr,samples))<=0) {
			ms_warning("Fail to read samples");
			freemsg(om);
			return;
		}
		size=err*2;
		om->b_wptr+=size;
		/*ms_message("alsa_read_process: Outputing %i bytes",size);*/
		ms_queue_put(obj->outputs[0],om);
#ifdef AMD_HACK
		break;
#endif
	}
}
Ejemplo n.º 19
0
static void player_process(MSFilter *f){
	PlayerData *d=(PlayerData*)f->data;
	int bytes=2*(f->ticker->interval*d->rate*d->nchannels)/1000;
	ms_filter_lock(f);
	if (d->state==STARTED){
		int err;
		mblk_t *om=allocb(bytes,0);
		if (d->pause_time>0){
			err=bytes;
			memset(om->b_wptr,0,bytes);
			d->pause_time-=f->ticker->interval;
		}else{
			err=read(d->fd,om->b_wptr,bytes);
			if (d->swap) swap_bytes(om->b_wptr,bytes);
		}
		if (err>=0){
			if (err!=0){
				om->b_wptr+=bytes;
				ms_queue_put(f->outputs[0],om);
			}else freemsg(om);
			if (err<bytes){
				ms_filter_notify_no_arg(f,MS_FILE_PLAYER_EOF);
				if (d->loop_after>=0){
					lseek(d->fd,d->hsize,SEEK_SET);
					d->pause_time=d->loop_after;
				}else d->state=STOPPED;
			}
		}else{
			ms_warning("Fail to read %i bytes: %s",bytes,strerror(errno));
		}
	}
	ms_filter_unlock(f);
}
Ejemplo n.º 20
0
static void aq_read_process(MSFilter * f)
{
	mblk_t *m;
	while ((m = aq_get(f)) != NULL) {
		ms_queue_put(f->outputs[0], m);
	}
}
Ejemplo n.º 21
0
static void v4l_process(MSFilter * obj){
	V4lState *s=(V4lState*)obj->data;
	uint32_t timestamp;
	int cur_frame;
	if (s->frame_count==-1){
		s->start_time=obj->ticker->time;
		s->frame_count=0;
	}
	cur_frame=((obj->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame>=s->frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->fd!=-1){
			om=getq(&s->rq);
		}else{
			if (s->usemire){
				om=dupmsg(v4l_make_mire(s));
			}else {
				mblk_t *tmpm=v4l_make_nowebcam(s);
				if (tmpm) om=dupmsg(tmpm);
			}
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=obj->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			mblk_set_marker_info(om,TRUE);
			ms_queue_put(obj->outputs[0],om);
			/*ms_message("picture sent");*/
			s->frame_count++;
		}
	}else flushq(&s->rq,0);
}
Ejemplo n.º 22
0
static void ca_read_process(MSFilter *f){
	CAData *d = (CAData *) f->data;
	mblk_t *m;
	while((m=ca_get(d))!=NULL){
		ms_queue_put(f->outputs[0],m);
	}
}
Ejemplo n.º 23
0
static void pixconv_process(MSFilter *f){
	mblk_t *im,*om;
	PixConvState *s=(PixConvState*)f->data;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (s->in_fmt==s->out_fmt){
			om=im;
		}else{
			AVPicture inbuf;
			avpicture_fill(&inbuf,im->b_rptr,s->in_fmt,s->size.width,s->size.height);
			om=pixconv_alloc_mblk(s);
			if (s->sws_ctx==NULL){
				s->sws_ctx=sws_getContext(s->size.width,s->size.height,
				s->in_fmt,s->size.width,s->size.height,
				s->out_fmt,SWS_FAST_BILINEAR,
                		NULL, NULL, NULL);
			}
			if (sws_scale(s->sws_ctx,inbuf.data,inbuf.linesize, 0,
				0, s->outbuf.planes, s->outbuf.strides)!=0){
				ms_error("MSPixConv: Error in sws_scale().");
			}
			freemsg(im);
		}
		if (om!=NULL) ms_queue_put(f->outputs[0],om);
	}
}
Ejemplo n.º 24
0
static void enc_process(MSFilter *f){
	static const int nsamples=160;
	EncState *s=(EncState*)f->data;
	mblk_t *im,*om;
	int16_t samples[nsamples];
	
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put (s->mb,im);
	}
	while((ms_bufferizer_read(s->mb,(uint8_t*)samples,nsamples*2))>=nsamples*2){
		int ret;
		om=allocb(33,0);
		*om->b_wptr=0xf0;
		om->b_wptr++;
		ret=Encoder_Interface_Encode(s->enc,MR122,samples,om->b_wptr,0);
		if (ret<=0){
			ms_warning("Encoder returned %i",ret);
			freemsg(om);
			continue;
		}
		om->b_wptr+=ret;
		mblk_set_timestamp_info(om,s->ts);
		s->ts+=nsamples;
		ms_queue_put(f->outputs[0],om);
	}
}
Ejemplo n.º 25
0
static void au_read_process(MSFilter *f) {
    AURead *d = (AURead *) f->data;
    mblk_t *m;
    while((m=au_read_get(d))!=NULL) {
        ms_queue_put(f->outputs[0],m);
    }
}
Ejemplo n.º 26
0
static void vad_dtx_process(MSFilter *f){
	VadDtxContext *ctx=(VadDtxContext*)f->data;
	mblk_t *m;

	while((m=ms_queue_get(f->inputs[0]))!=NULL){
		update_energy(ctx,(int16_t*)m->b_rptr, (m->b_wptr - m->b_rptr) / 2, f->ticker->time);

		if (ortp_extremum_get_current(&ctx->max)<silence_threshold){
			if (!ctx->silence_mode){
				MSCngData cngdata={0};
				cngdata.datasize=1; /*only noise level*/
				cngdata.data[0]=0; /*noise level set to zero for the moment*/
				ms_message("vad_dtx_process(): silence period detected.");
				ctx->silence_mode=1;
				ms_filter_notify(f, MS_VAD_DTX_NO_VOICE, &cngdata);
			}
		}else{
			if (ctx->silence_mode){
				ms_message("vad_dtx_process(): silence period finished.");
				ctx->silence_mode=0;
				ms_filter_notify(f, MS_VAD_DTX_VOICE, NULL);
			}
		}
		ms_queue_put(f->outputs[0],m);
	}

}
Ejemplo n.º 27
0
static void dec_process_frame(MSFilter *f, DecState *s, ogg_packet *op){
	yuv_buffer yuv;
	if (theora_decode_packetin(&s->tstate,op)==0){
		if (theora_decode_YUVout(&s->tstate,&yuv)==0){
			mblk_t *om;
			int i;
			int ylen=yuv.y_width*yuv.y_height;
			int uvlen=yuv.uv_width*yuv.uv_height;
			ms_debug("Got yuv buffer from theora decoder");
			if (s->yuv==NULL){
				int len=(ylen)+(2*uvlen);
				s->yuv=allocb(len,0);
			}
			om=dupb(s->yuv);
			for(i=0;i<yuv.y_height;++i){
				memcpy(om->b_wptr,yuv.y+yuv.y_stride*i,yuv.y_width);
				om->b_wptr+=yuv.y_width;
			}
			for(i=0;i<yuv.uv_height;++i){
				memcpy(om->b_wptr,yuv.u+yuv.uv_stride*i,yuv.uv_width);
				om->b_wptr+=yuv.uv_width;
			}
			for(i=0;i<yuv.uv_height;++i){
				memcpy(om->b_wptr,yuv.v+yuv.uv_stride*i,yuv.uv_width);
				om->b_wptr+=yuv.uv_width;
			}
			ms_queue_put(f->outputs[0],om);
		}
	}else{
		ms_warning("theora decoding error");
	}
}
Ejemplo n.º 28
0
static void winsnd_read_process(MSFilter *f){
	MSSndCard *card=(MSSndCard*)f->data;
	mblk_t *m;
	while((m=winsnd_get(card))!=NULL){
		ms_queue_put(f->outputs[0],m);
	}
}
Ejemplo n.º 29
0
static void video_player_process(MSFilter *f){
	StreamData *d=(StreamData*)f->data;
	VideoState *is = d->is;
	mblk_t *om=NULL;
	ConfSlotQueue *confq= video_player_get_video_outq(d->is);

	ms_filter_lock(f);
	float elapsed=(float)(f->ticker->time-d->starttime);

	
	if ((elapsed*d->fps/1000.0)>d->index){

		ms_mutex_lock(&confq->lock);
		om = ms_queue_get(&confq->q);
		ms_mutex_unlock(&confq->lock);

		if(om!=NULL) ms_queue_put(f->outputs[0],om);

		d->index++;
	}
	ms_filter_unlock(f);

	
	
}
Ejemplo n.º 30
0
	/***
	Encodes 8 kHz-sampled narrowband speech at a bit rate of or 16 kbit/s,
	uses 5 ms frames.
	The encoder receives 10 ms speech => 160 bytes.
	***/
static void enc_process (MSFilter *f){
	EncState *s=(EncState*)f->data;
	struct	BV16_Bit_Stream bs;
	short *buf= NULL;
	mblk_t *inputMessage = NULL, *outputMessage = NULL;
	int frame_per_packet=s->ptime/5;
	int in_rcvd_bytes = 0;

	in_rcvd_bytes = SIGNAL_FRAME_SIZE * frame_per_packet;
	buf=(short*)alloca(in_rcvd_bytes);
	memset((void*)buf,0, in_rcvd_bytes );

	while((inputMessage=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,inputMessage);

	}

	/* process ptimes ms of data : (ptime in ms)/1000->ptime is seconds * 8000(sample rate) * 2(byte per sample) */
	while(ms_bufferizer_get_avail(s->bufferizer)>= in_rcvd_bytes){
		int bufferIndex;
		outputMessage = allocb(BITSTREAM_FRAME_SIZE*frame_per_packet,0); /* output bitStream is 80 bits long * number of samples */
		/* process buffer in 5 ms frames but read everything first*/
		ms_bufferizer_read(s->bufferizer,(uint8_t*)buf,in_rcvd_bytes);
		for (bufferIndex=0; bufferIndex<frame_per_packet; bufferIndex++) {
			BV16_Encode(&bs, &s->state, (short*)&buf[bufferIndex*FRSZ]);
			BV16_BitPack( (UWord8*)outputMessage->b_wptr, &bs );
			outputMessage->b_wptr+=BITSTREAM_FRAME_SIZE;
		}
		mblk_set_timestamp_info(outputMessage,s->ts);
		ms_bufferizer_fill_current_metas(s->bufferizer, outputMessage);
		ms_queue_put(f->outputs[0],outputMessage);
		s->ts +=  FRSZ * frame_per_packet;
	}

}