コード例 #1
0
ファイル: h264dec.c プロジェクト: avis/mediastreamer2
static void dec_process(MSFilter *f){
	DecData *d=(DecData*)f->data;
	mblk_t *im;
	MSQueue nalus;
	AVFrame orig;
	ms_queue_init(&nalus);
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		/*push the sps/pps given in sprop-parameter-sets if any*/
		if (d->packet_num==0 && d->sps && d->pps){
			mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im));
			mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im));
			rfc3984_unpack(&d->unpacker,d->sps,&nalus);
			rfc3984_unpack(&d->unpacker,d->pps,&nalus);
			d->sps=NULL;
			d->pps=NULL;
		}
		rfc3984_unpack(&d->unpacker,im,&nalus);
		if (!ms_queue_empty(&nalus)){
			int size;
			uint8_t *p,*end;
			bool_t need_reinit=FALSE;

			size=nalusToFrame(d,&nalus,&need_reinit);
			if (need_reinit)
				dec_reinit(d);
			p=d->bitstream;
			end=d->bitstream+size;
			while (end-p>0) {
				int len;
				int got_picture=0;
				AVPacket pkt;
				avcodec_get_frame_defaults(&orig);
				av_init_packet(&pkt);
				pkt.data = p;
				pkt.size = end-p;
				len=avcodec_decode_video2(&d->av_context,&orig,&got_picture,&pkt);
				if (len<=0) {
					ms_warning("ms_AVdecoder_process: error %i.",len);
					if ((f->ticker->time - d->last_error_reported_time)>5000 || d->last_error_reported_time==0) {
						d->last_error_reported_time=f->ticker->time;
						ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
					}
					break;
				}
				if (got_picture) {
					ms_queue_put(f->outputs[0],get_as_yuvmsg(f,d,&orig));
					if (!d->first_image_decoded) {
						ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
						d->first_image_decoded = TRUE;
					}
				}
				p+=len;
			}
		}
		d->packet_num++;
	}
}
コード例 #2
0
static void vp8_fragment_and_send(MSFilter *f,EncState *s,mblk_t *frame, uint32_t timestamp, const vpx_codec_cx_pkt_t *pkt, bool_t lastPartition){
	uint8_t *rptr;
	mblk_t *packet=NULL;
	mblk_t* vp8_payload_desc = NULL;
	int len;

#if 0
	if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) == 0) {
		ms_debug("P-FRAME: %u\n", pkt->data.frame.sz);
	} else {
		ms_debug("I-FRAME: %u\n", pkt->data.frame.sz);
	}
#endif

	for (rptr=frame->b_rptr;rptr<frame->b_wptr;){
		vp8_payload_desc = allocb(1, 0);
		vp8_payload_desc->b_wptr=vp8_payload_desc->b_rptr+1;

		len=MIN(s->mtu,(frame->b_wptr-rptr));
		packet=dupb(frame);
		packet->b_rptr=rptr;
		packet->b_wptr=rptr+len;
		mblk_set_timestamp_info(packet,timestamp);
		mblk_set_timestamp_info(vp8_payload_desc,timestamp);

		/* insert 1 byte vp8 payload descriptor */
		(*vp8_payload_desc->b_rptr) = 0;
		/* X (extended) field, 0 */
		(*vp8_payload_desc->b_rptr) &= ~VP8_PAYLOAD_DESC_X_MASK;
		/* RSV field, always 0 */
		(*vp8_payload_desc->b_rptr) &= ~VP8_PAYLOAD_DESC_RSV_MASK;
		/* N : set to 1 if non reference frame */
		if ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) == 0)
			(*vp8_payload_desc->b_rptr) |= VP8_PAYLOAD_DESC_N_MASK;
		/* S : partition start */
		if (rptr == frame->b_rptr) {
			(*vp8_payload_desc->b_rptr) |= VP8_PAYLOAD_DESC_S_MASK;
		}
		/* PartID : partition id */
		#ifdef FRAGMENT_ON_PARTITIONS
		(*vp8_payload_desc->b_rptr) |= (pkt->data.frame.partition_id & VP8_PAYLOAD_DESC_PARTID_MASK);
		#endif

		vp8_payload_desc->b_cont = packet;

		ms_queue_put(f->outputs[0], vp8_payload_desc);
		rptr+=len;
	}

	freeb(frame);

	/*set marker bit on last packet*/
	if (lastPartition) {
		mblk_set_marker_info(packet,TRUE);
		mblk_set_marker_info(vp8_payload_desc,TRUE);
	}
}
コード例 #3
0
ファイル: bv16.c プロジェクト: Accontech/mediastreamer2
	/***
	Encodes 8 kHz-sampled narrowband speech at a bit rate of or 16 kbit/s,
	uses 5 ms frames.
	The encoder receives 10 ms speech => 160 bytes.
	***/
static void enc_process (MSFilter *f){
	EncState *s=(EncState*)f->data;
	struct	BV16_Bit_Stream bs;
	short *buf= NULL;
	mblk_t *inputMessage = NULL, *outputMessage = NULL;
	int frame_per_packet=s->ptime/5;
	int in_rcvd_bytes = 0;

	in_rcvd_bytes = SIGNAL_FRAME_SIZE * frame_per_packet;
	buf=(short*)alloca(in_rcvd_bytes);
	memset((void*)buf,0, in_rcvd_bytes );

	while((inputMessage=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,inputMessage);

	}

	/* process ptimes ms of data : (ptime in ms)/1000->ptime is seconds * 8000(sample rate) * 2(byte per sample) */
	while(ms_bufferizer_get_avail(s->bufferizer)>= in_rcvd_bytes){
		int bufferIndex;
		outputMessage = allocb(BITSTREAM_FRAME_SIZE*frame_per_packet,0); /* output bitStream is 80 bits long * number of samples */
		/* process buffer in 5 ms frames but read everything first*/
		ms_bufferizer_read(s->bufferizer,(uint8_t*)buf,in_rcvd_bytes);
		for (bufferIndex=0; bufferIndex<frame_per_packet; bufferIndex++) {
			BV16_Encode(&bs, &s->state, (short*)&buf[bufferIndex*FRSZ]);
			BV16_BitPack( (UWord8*)outputMessage->b_wptr, &bs );
			outputMessage->b_wptr+=BITSTREAM_FRAME_SIZE;
		}
		mblk_set_timestamp_info(outputMessage,s->ts);
		ms_bufferizer_fill_current_metas(s->bufferizer, outputMessage);
		ms_queue_put(f->outputs[0],outputMessage);
		s->ts +=  FRSZ * frame_per_packet;
	}

}
コード例 #4
0
ファイル: msrtp.c プロジェクト: blueskycoco/hp
static void receiver_process(MSFilter * f)
{
	ReceiverData *d = (ReceiverData *) f->data;
	mblk_t *m;
	uint32_t timestamp;
	if (d->session == NULL)
		return;
	
	if (d->reset_jb){
		ms_message("Reseting jitter buffer");
		rtp_session_resync(d->session);
		d->reset_jb=FALSE;
	}

	if (d->starting){
		PayloadType *pt=rtp_profile_get_payload(
			rtp_session_get_profile(d->session),
			rtp_session_get_recv_payload_type(d->session));
		if (pt && pt->type!=PAYLOAD_VIDEO)
			rtp_session_flush_sockets(d->session);
		d->starting=FALSE;
	}

	timestamp = (uint32_t) (f->ticker->time * (d->rate/1000));
	while ((m = rtp_session_recvm_with_ts(d->session, timestamp)) != NULL) {
		mblk_set_timestamp_info(m, rtp_get_timestamp(m));
		mblk_set_marker_info(m, rtp_get_markbit(m));
		mblk_set_cseq(m, rtp_get_seqnumber(m));
		rtp_get_payload(m,&m->b_rptr);		
		ms_queue_put(f->outputs[0], m);
	}
}
コード例 #5
0
static void v4l_process(MSFilter * obj){
	V4lState *s=(V4lState*)obj->data;
	uint32_t timestamp;
	int cur_frame;
	if (s->frame_count==-1){
		s->start_time=obj->ticker->time;
		s->frame_count=0;
	}
	cur_frame=((obj->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame>=s->frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->fd!=-1){
			om=getq(&s->rq);
		}else{
			if (s->usemire){
				om=dupmsg(v4l_make_mire(s));
			}else {
				mblk_t *tmpm=v4l_make_nowebcam(s);
				if (tmpm) om=dupmsg(tmpm);
			}
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=obj->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			mblk_set_marker_info(om,TRUE);
			ms_queue_put(obj->outputs[0],om);
			/*ms_message("picture sent");*/
			s->frame_count++;
		}
	}else flushq(&s->rq,0);
}
コード例 #6
0
static void enc_process(MSFilter *f){
	static const int nsamples=160;
	EncState *s=(EncState*)f->data;
	mblk_t *im,*om;
	int16_t samples[nsamples];
	
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put (s->mb,im);
	}
	while((ms_bufferizer_read(s->mb,(uint8_t*)samples,nsamples*2))>=nsamples*2){
		int ret;
		om=allocb(33,0);
		*om->b_wptr=0xf0;
		om->b_wptr++;
		ret=Encoder_Interface_Encode(s->enc,MR122,samples,om->b_wptr,0);
		if (ret<=0){
			ms_warning("Encoder returned %i",ret);
			freemsg(om);
			continue;
		}
		om->b_wptr+=ret;
		mblk_set_timestamp_info(om,s->ts);
		s->ts+=nsamples;
		ms_queue_put(f->outputs[0],om);
	}
}
コード例 #7
0
ファイル: winvideo2.c プロジェクト: github188/Sip-MCU
static void vfw_process(MSFilter * obj){
	VfwState *s=(VfwState*)obj->data;
	mblk_t *m;
	uint32_t timestamp;
	int cur_frame;

	if (s->frame_count==-1){
		s->start_time=(float)obj->ticker->time;
		s->frame_count=0;
	}

	cur_frame=(int)((obj->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame>s->frame_count){
		mblk_t *om=NULL;
		/*keep the most recent frame if several frames have been captured */
		if (s->eng!=NULL){
			ms_mutex_lock(&s->mutex);
			while((m=getq(&s->rq))!=NULL){
				ms_mutex_unlock(&s->mutex);
				if (om!=NULL) freemsg(om);
				om=m;
				ms_mutex_lock(&s->mutex);
			}
			ms_mutex_unlock(&s->mutex);
		}
		if (om!=NULL){
			timestamp=(uint32_t)(obj->ticker->time*90);/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			ms_queue_put(obj->outputs[0],om);
		}
		s->frame_count++;
	}
}
コード例 #8
0
ファイル: gsm.c プロジェクト: biddyweb/mediastream-plus
static void enc_process(MSFilter *f){
	EncState *s=(EncState*)f->data;
	mblk_t *im;
	unsigned int unitary_buff_size = sizeof(int16_t)*160;
	unsigned int buff_size = unitary_buff_size*s->ptime/20;
	int16_t* buff;
	int offset;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}
	while(ms_bufferizer_get_avail(s->bufferizer) >= buff_size) {
		mblk_t *om=allocb(33*s->ptime/20,0);
		buff = (int16_t *)alloca(buff_size);
		ms_bufferizer_read(s->bufferizer,(uint8_t*)buff,buff_size);

		for (offset=0;offset<buff_size;offset+=unitary_buff_size) {
			gsm_encode(s->state,(gsm_signal*)&buff[offset/sizeof(int16_t)],(gsm_byte*)om->b_wptr);
			om->b_wptr+=33;
		}
		mblk_set_timestamp_info(om,s->ts);
		ms_queue_put(f->outputs[0],om);
		s->ts+=buff_size/sizeof(int16_t)/*sizeof(buf)/2*/;
	}
}
コード例 #9
0
ファイル: isac_enc.c プロジェクト: zengtaoxian/linphone
static void filter_process ( MSFilter *f ) {
    isac_encoder_struct_t* obj = (isac_encoder_struct_t*)f->data;

    mblk_t *im;
    mblk_t *om=NULL;
    u_int8_t* input_buf = NULL;
    WebRtc_Word16 ret;
    static int out_count = 0;

    // get the input data and put it into our buffered input
    while( (im = ms_queue_get( f->inputs[0] ) ) != NULL ) {
        ms_bufferizer_put( obj->bufferizer, im );
    }

    // feed the encoder with 160 16bit samples, until it has reached enough data
    // to produce a packet
    while( ms_bufferizer_get_avail(obj->bufferizer) > ISAC_SAMPLES_PER_ENCODE*2 ){

        om = allocb( WebRtcIsacfix_GetNewFrameLen(obj->isac), 0 );
        if(!input_buf) input_buf = ms_malloc( ISAC_SAMPLES_PER_ENCODE*2 );
        ms_bufferizer_read(obj->bufferizer, input_buf, ISAC_SAMPLES_PER_ENCODE*2);

        ret = WebRtcIsacfix_Encode(obj->isac,
                                   (const WebRtc_Word16*)input_buf,
                                   (WebRtc_Word16*)om->b_wptr);

        if( ret < 0) {

            ms_error( "WebRtcIsacfix_Encode error: %d", WebRtcIsacfix_GetErrorCode(obj->isac) );
            freeb(om);

        } else if( ret == 0 ) {
            // Encode() buffered the input, not yet able to produce a packet, continue feeding it
            // 160 samples per-call
            obj->ts += ISAC_SAMPLES_PER_ENCODE;
            freeb(om);

        } else {

            // a new packet has been encoded, send it
            obj->ts += ISAC_SAMPLES_PER_ENCODE;
            om->b_wptr += ret;
            out_count++;
//            ms_message("packet %d out, samples %d", out_count, obj->ts);

            mblk_set_timestamp_info( om, obj->ts );
            ms_queue_put(f->outputs[0], om);

            om = NULL;
        }

    }

    if( input_buf ){
        ms_free(input_buf);
    }
}
コード例 #10
0
static void resample_process_ms2(MSFilter *obj) {
    ResampleData *dt=(ResampleData*)obj->data;
    mblk_t *m;

    if (dt->output_rate==dt->input_rate) {
        while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
            ms_queue_put(obj->outputs[0],m);
        }
        return;
    }
    ms_filter_lock(obj);
    if (dt->handle!=NULL) {
        unsigned int inrate=0, outrate=0;
        speex_resampler_get_rate(dt->handle,&inrate,&outrate);
        if (inrate!=dt->input_rate || outrate!=dt->output_rate) {
            speex_resampler_destroy(dt->handle);
            dt->handle=0;
        }
    }
    if (dt->handle==NULL) {
        int err=0;
        dt->handle=speex_resampler_init(dt->nchannels, dt->input_rate, dt->output_rate, SPEEX_RESAMPLER_QUALITY_VOIP, &err);
    }


    while((m=ms_queue_get(obj->inputs[0]))!=NULL) {
        unsigned int inlen=(m->b_wptr-m->b_rptr)/(2*dt->nchannels);
        unsigned int outlen=((inlen*dt->output_rate)/dt->input_rate)+1;
        unsigned int inlen_orig=inlen;
        mblk_t *om=allocb(outlen*2*dt->nchannels,0);
        if (dt->nchannels==1) {
            speex_resampler_process_int(dt->handle,
                                        0,
                                        (int16_t*)m->b_rptr,
                                        &inlen,
                                        (int16_t*)om->b_wptr,
                                        &outlen);
        } else {
            speex_resampler_process_interleaved_int(dt->handle,
                                                    (int16_t*)m->b_rptr,
                                                    &inlen,
                                                    (int16_t*)om->b_wptr,
                                                    &outlen);
        }
        if (inlen_orig!=inlen) {
            ms_error("Bug in resampler ! only %u samples consumed instead of %u, out=%u",
                     inlen,inlen_orig,outlen);
        }
        om->b_wptr+=outlen*2*dt->nchannels;
        mblk_set_timestamp_info(om,dt->ts);
        dt->ts+=outlen;
        ms_queue_put(obj->outputs[0],om);
        freemsg(m);
    }
    ms_filter_unlock(obj);
}
コード例 #11
0
ファイル: msv4l2.c プロジェクト: fremaks/linphone_sdk
static void msv4l2_process(MSFilter *f){
	V4l2State *s=(V4l2State*)f->data;
#ifdef V4L2_THREADED
	uint32_t timestamp;
	int cur_frame;
	if (s->frame_count==-1){
		s->start_time=f->ticker->time;
		s->frame_count=0;
	}
	cur_frame=((f->ticker->time-s->start_time)*s->fps/1000.0);
	
	if (cur_frame>=s->frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->fd!=-1){
			om=getq(&s->rq);
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=f->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			mblk_set_marker_info(om,TRUE);
			ms_queue_put(f->outputs[0],om);
			/*ms_message("picture sent");*/
			s->frame_count++;
		}
	}else{
			flushq(&s->rq,0);
	}
#else
	uint32_t elapsed;
	
	if (s->fd!=-1){
		/*see it is necessary to output a frame:*/
		elapsed=f->ticker->time-s->start_time;
		if (((float)elapsed*s->fps/1000.0)>s->frame_count){
			mblk_t *m;
			m=v4lv2_grab_image(s);
			if (m){
				mblk_t *om=dupb(m);
				mblk_set_marker_info(om,(s->pix_fmt==MS_MJPEG));
				ms_queue_put(f->outputs[0],om);
				s->frame_count++;
			}
		}
	}
#endif
}
コード例 #12
0
static void enc_process(MSFilter *f){
	SpeexEncState *s=(SpeexEncState*)f->data;
	mblk_t *im;
	int nbytes;
	uint8_t *buf;
	int frame_per_packet=1;

	if (s->frame_size<=0)
		return;

	ms_filter_lock(f);

	if (s->ptime>=20)
	{
		frame_per_packet = s->ptime/20;
	}

	if (frame_per_packet<=0)
		frame_per_packet=1;
	if (frame_per_packet>7) /* 7*20 == 140 ms max */
		frame_per_packet=7;

	nbytes=s->frame_size*2;
	buf=(uint8_t*)alloca(nbytes*frame_per_packet);

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}
	while(ms_bufferizer_read(s->bufferizer,buf,nbytes*frame_per_packet)==nbytes*frame_per_packet){
		mblk_t *om=allocb(nbytes*frame_per_packet,0);//too large...
		int k;
		SpeexBits bits;
		speex_bits_init(&bits);
		for (k=0;k<frame_per_packet;k++)
		{
			speex_encode_int(s->state,(int16_t*)(buf + (k*s->frame_size*2)),&bits);
			s->ts+=s->frame_size;
		}
		speex_bits_insert_terminator(&bits);
		k=speex_bits_write(&bits, (char*)om->b_wptr, nbytes*frame_per_packet);
		om->b_wptr+=k;

		mblk_set_timestamp_info(om,s->ts-s->frame_size);
		ms_bufferizer_fill_current_metas(s->bufferizer, om);
		ms_queue_put(f->outputs[0],om);
		speex_bits_destroy(&bits);
	}
	ms_filter_unlock(f);
}
コード例 #13
0
ファイル: msv4l2.c プロジェクト: dmonakhov/mediastreamer2
static void msv4l2_process(MSFilter *f){
	V4l2State *s=(V4l2State*)f->data;
	uint32_t timestamp;
	int cur_frame;
	uint32_t curtime=f->ticker->time;
	float elapsed;
	
	if (s->th_frame_count==-1){
		s->start_time=curtime;
		s->th_frame_count=0;
	}
	elapsed=((float)(curtime-s->start_time))/1000.0;
	cur_frame=elapsed*s->fps;
	
	if (cur_frame>=s->th_frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->fd!=-1){
			mblk_t *tmp=NULL;
			while((tmp=getq(&s->rq))!=NULL){
				if (om!=NULL) freemsg(om);
				om=tmp;
			}
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=f->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			mblk_set_marker_info(om,TRUE);
			ms_queue_put(f->outputs[0],om);
			if (s->last_frame_time!=-1){
				float frame_interval=(float)(curtime-s->last_frame_time)/1000.0;
				if (s->mean_inter_frame==0){
					s->mean_inter_frame=frame_interval;
				}else{
					s->mean_inter_frame=(0.8*s->mean_inter_frame)+(0.2*frame_interval);
				}
			}
			s->last_frame_time=curtime;
		}
		s->th_frame_count++;
		if (s->th_frame_count%50==0 && s->mean_inter_frame!=0){
			ms_message("Captured mean fps=%f, expected=%f",1/s->mean_inter_frame, s->fps);
		}
	}
}
コード例 #14
0
static void enc_process(MSFilter *f){
	EncState *s=(EncState*)f->data;
	mblk_t *im;
	int16_t buf[160];
	
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}
	while(ms_bufferizer_read(s->bufferizer,(uint8_t*)buf,sizeof(buf))==sizeof(buf)) {
		mblk_t *om=allocb(33,0);
		gsm_encode(s->state,(gsm_signal*)buf,(gsm_byte*)om->b_wptr);
		om->b_wptr+=33;
		mblk_set_timestamp_info(om,s->ts);
		ms_queue_put(f->outputs[0],om);
		s->ts+=sizeof(buf)/2;
	}
}
コード例 #15
0
ファイル: msrtp.c プロジェクト: biddyweb/mediastream-plus
static void receiver_process(MSFilter * f)
{
	ReceiverData *d = (ReceiverData *) f->data;
	mblk_t *m;
	uint32_t timestamp;

	if (d->session == NULL)
		return;

	timestamp = (uint32_t) (f->ticker->time * (d->rate/1000));
	while ((m = rtp_session_recvm_with_ts(d->session, timestamp)) != NULL) {
		mblk_set_timestamp_info(m, rtp_get_timestamp(m));
		mblk_set_marker_info(m, rtp_get_markbit(m));
		mblk_set_payload_type(m, rtp_get_payload_type(m));
		rtp_get_payload(m,&m->b_rptr);
		ms_queue_put(f->outputs[0], m);
	}
}
コード例 #16
0
ファイル: bv16.c プロジェクト: LaughingAngus/linphone-vs2008
static void enc_process(MSFilter *f){
	EncState *s=(EncState*)f->data;
	mblk_t *im;
	int size = BV16_CODE_SIZE *2;
	int i,frames;
	uint8_t buf[BV16_FRAME_SIZE * 2 * 4];
	
	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}

	while(ms_bufferizer_read(s->bufferizer,(uint8_t*)buf,sizeof(buf))==sizeof(buf)) {
		mblk_t *om=allocb(BV16_CODE_SIZE*8,0);
		uint8_t *in_buf = (uint8_t *)buf;

		frames = sizeof(buf)/(BV16_FRAME_SIZE  * 2);
		
		
		for(i=0;i<frames;i++)
		{
			//BV16_Encode(&s->ebs, &s->cs,(short *)in_buf);
			//BV16_BitPack((unsigned short *)om->b_wptr, &s->ebs);

			bv16_encode(s->enc,
				(uint8_t *)om->b_wptr,
				(int16_t *) in_buf,
				BV16_FRAME_SIZE);

			//ms_error("BV16_Encode frames %d",i);
			in_buf += 80;
			om->b_wptr += 10;
		}

		mblk_set_timestamp_info(om,s->ts);
		ms_queue_put(f->outputs[0],om);
		s->ts+=sizeof(buf)/2;
	}

}
コード例 #17
0
ファイル: amrnb.c プロジェクト: AirDev/linphone-android
static void enc_process(MSFilter *f) {
    EncState *s = (EncState*) f->data;
    unsigned int unitary_buff_size = sizeof (int16_t)*160;
    unsigned int buff_size = unitary_buff_size * s->ptime / 20;
    mblk_t *im;
    uint8_t tmp[OUT_MAX_SIZE];
    int16_t samples[buff_size];

    while ((im = ms_queue_get(f->inputs[0])) != NULL) {
        ms_bufferizer_put(s->mb, im);
    }
    while (ms_bufferizer_get_avail(s->mb) >= buff_size) {
        mblk_t *om = allocb(OUT_MAX_SIZE * buff_size / unitary_buff_size + 1, 0);
        ms_bufferizer_read(s->mb, (uint8_t*) samples, buff_size);

        *om->b_wptr = 0xf0;
        uint8_t *tocs = om->b_wptr++;

        om->b_wptr += buff_size / unitary_buff_size;
        int offset;
        for (offset = 0; offset < buff_size; offset += unitary_buff_size) {
            int ret = Encoder_Interface_Encode(s->enc, s->mode, &samples[offset / sizeof (int16_t)], tmp, s->dtx);
            if (ret <= 0 || ret > 32) {
                ms_warning("Encoder returned %i", ret);
                freemsg(om);
                continue;
            }
            memcpy(om->b_wptr, &tmp[1], ret - 1);
            om->b_wptr += ret - 1;
            *(++tocs) = tmp[0] | 0x80; // Not last payload
        }
        *tocs &= 0x7F; // last payload

        mblk_set_timestamp_info(om, s->ts);
        ms_queue_put(f->outputs[0], om);

        s->ts += buff_size / sizeof (int16_t)/*sizeof(buf)/2*/;
    }
}
コード例 #18
0
ファイル: ilbc.c プロジェクト: Distrotech/msilbc
static void enc_process(MSFilter *f){
	EncState *s=(EncState*)f->data;
	mblk_t *im,*om;
	int size=s->nsamples*2;
	int16_t samples[1610]; /* BLOCKL_MAX * 7 is the largest size for ptime == 140 */
	float samples2[BLOCKL_MAX];
	int i;
	int frame_per_packet=1;

	if (s->ptime>=20 && s->ms_per_frame>0 && s->ptime%s->ms_per_frame==0)
	{
		frame_per_packet = s->ptime/s->ms_per_frame;
	}

	if (frame_per_packet<=0)
		frame_per_packet=1;
	if (frame_per_packet>7) /* 7*20 == 140 ms max */
		frame_per_packet=7;

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		ms_bufferizer_put(s->bufferizer,im);
	}
	while(ms_bufferizer_read(s->bufferizer,(uint8_t*)samples,size*frame_per_packet)==(size*frame_per_packet)){
		int k;
		om=allocb(s->nbytes*frame_per_packet,0);
		for (k=0;k<frame_per_packet;k++)
		{
			for (i=0;i<s->nsamples;i++){
				samples2[i]=samples[i+(s->nsamples*k)];
			}
			iLBC_encode((uint8_t*)om->b_wptr,samples2,&s->ilbc_enc);
			om->b_wptr+=s->nbytes;			
		}
		s->ts+=s->nsamples*frame_per_packet;
		mblk_set_timestamp_info(om,s->ts);
		ms_bufferizer_fill_current_metas(s->bufferizer,om);
		ms_queue_put(f->outputs[0],om);
	}
}
コード例 #19
0
static void v4w_process(MSFilter * obj){
	V4wState *s=(V4wState*)obj->data;
	mblk_t *m;
	uint32_t timestamp;
	int cur_frame;

	if (s->frame_count==-1){
		s->start_time=obj->ticker->time;
		s->frame_count=0;
	}

	cur_frame=(int)((obj->ticker->time-s->start_time)*s->fps/1000.0);
	if (cur_frame>s->frame_count){
		mblk_t *om=NULL;
		ms_mutex_lock(&s->mutex);
		/*keep the most recent frame if several frames have been captured */
		if (s->rotregvalue!=0){
			while((m=getq(&s->rq))!=NULL){
				if (om!=NULL) freemsg(om);
				om=m;
			}
		}else {
			mblk_t *nowebcam = v4w_make_nowebcam(s);
			if (nowebcam!=NULL){
				om=dupmsg(nowebcam);
				mblk_set_precious_flag(om,1);
			}
		}
		ms_mutex_unlock(&s->mutex);
		if (om!=NULL){
			timestamp=(uint32_t)obj->ticker->time*90;/* rtp uses a 90000 Hz clockrate for video*/
			mblk_set_timestamp_info(om,timestamp);
			ms_queue_put(obj->outputs[0],om);
			/*ms_message("picture sent");*/
		}
		s->frame_count++;
	}
}
コード例 #20
0
ファイル: alaw.c プロジェクト: Accontech/mediastreamer2
static void alaw_enc_process(MSFilter *obj){
	AlawEncData *dt=(AlawEncData*)obj->data;
	MSBufferizer *bz=dt->bz;
	uint8_t buffer[2240];
	int frame_per_packet=2;
	int size_of_pcm=320;

	mblk_t *m;
	
	if (dt->ptime>=10)
	{
		frame_per_packet = dt->ptime/10;
	}

	if (frame_per_packet<=0)
		frame_per_packet=1;
	if (frame_per_packet>14) /* 7*20 == 140 ms max */
		frame_per_packet=14;

	size_of_pcm = 160*frame_per_packet; /* ex: for 20ms -> 160*2==320 */

	while((m=ms_queue_get(obj->inputs[0]))!=NULL){
		ms_bufferizer_put(bz,m);
	}
	while (ms_bufferizer_read(bz,buffer,size_of_pcm)==size_of_pcm){
		mblk_t *o=allocb(size_of_pcm/2,0);
		int i;
		for (i=0;i<size_of_pcm/2;i++){
			*o->b_wptr=Snack_Lin2Alaw(((int16_t*)buffer)[i]);
			o->b_wptr++;
		}
		ms_bufferizer_fill_current_metas(bz, o);
		mblk_set_timestamp_info(o,dt->ts);
		dt->ts+=size_of_pcm/2;
		ms_queue_put(obj->outputs[0],o);
	}
}
コード例 #21
0
ファイル: theora.c プロジェクト: Amini-Philips/mediastreamer2
static void packetize_and_send(MSFilter *f, EncState *s, mblk_t *om, uint32_t timestamp, uint8_t tdt){
	mblk_t *packet;
	mblk_t *h;
	int npackets=0;
	static const int ident=0xdede;
	while(om!=NULL){
		if (om->b_wptr-om->b_rptr>=s->mtu){
			packet=dupb(om);
			packet->b_wptr=packet->b_rptr+s->mtu;
			om->b_rptr=packet->b_wptr;
		}else {
			packet=om;
			om=NULL;
		}
		++npackets;
		h=allocb(6,0);
		if (npackets==1){
			if (om==NULL)
				payload_header_set(h->b_wptr,ident,NOT_FRAGMENTED,tdt,1);
			else
				payload_header_set(h->b_wptr,ident,START_FRAGMENT,tdt,1);
		}else{
			if (om==NULL)
				payload_header_set(h->b_wptr,ident,END_FRAGMENT,tdt,1);
			else
				payload_header_set(h->b_wptr,ident,CONT_FRAGMENT,tdt,1);
		}
		h->b_wptr+=4;
		*((uint16_t*)h->b_wptr)=htons(msgdsize(packet));
		h->b_wptr+=2;
		h->b_cont=packet;
		mblk_set_timestamp_info(h,timestamp);
		ms_debug("sending theora frame of size %i",msgdsize(h));
		ms_queue_put(f->outputs[0],h);
	}
}
コード例 #22
0
static void bb10capture_process(MSFilter *f) {
    BB10Capture *d = (BB10Capture*) f->data;
    mblk_t *om = NULL;
    mblk_t *tmp = NULL;
    uint32_t timestamp = 0;

    ms_filter_lock(f);

    ms_mutex_lock(&d->mutex);
    while ((tmp = ms_queue_get(&d->rq)) != NULL) {
        if (om != NULL) freemsg(om);
        om = tmp;
    }
    ms_mutex_unlock(&d->mutex);

    if (om != NULL) {
        timestamp = f->ticker->time * 90;/* rtp uses a 90000 Hz clockrate for video*/
        mblk_set_timestamp_info(om, timestamp);
        ms_queue_put(f->outputs[0], om);
        ms_average_fps_update(&d->avgfps, f->ticker->time);
    }

    ms_filter_unlock(f);
}
コード例 #23
0
static void dec_process(MSFilter *f){
	DecData *d=(DecData*)f->data;
	MSPicture pic = {0};
	mblk_t *im,*om = NULL;
	bool_t need_reinit=FALSE;
	bool_t request_pli=FALSE;
	MSQueue nalus;
	ms_queue_init(&nalus);

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (d->packet_num==0 && d->sps && d->pps){
			mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im));
			mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im));
			rfc3984_unpack(&d->unpacker, d->sps, &nalus);
            rfc3984_unpack(&d->unpacker, d->pps, &nalus);
			d->sps=NULL;
			d->pps=NULL;
		}

		if(rfc3984_unpack(&d->unpacker,im,&nalus) <0){
			request_pli=TRUE;
		}
		if (!ms_queue_empty(&nalus)){
			AMediaCodecBufferInfo info;
			int size;
			int width = 0, height = 0, color = 0;
			uint8_t *buf=NULL;
			size_t bufsize;
			ssize_t iBufidx, oBufidx;

			size=nalusToFrame(d,&nalus,&need_reinit);

			if (need_reinit) {
				//In case of rotation, the decoder needs to flushed in order to restart with the new video size
				AMediaCodec_flush(d->codec);
			}

			iBufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US);
			if (iBufidx >= 0) {
				buf = AMediaCodec_getInputBuffer(d->codec, iBufidx, &bufsize);
				if(buf == NULL) {
					break;
				}
				if((size_t)size > bufsize) {
					ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i",size,(int) bufsize);
				} else {
					memcpy(buf,d->bitstream,(size_t)size);
					AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, TIMEOUT_US, 0);
				}
			}

            oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US);
			if(oBufidx >= 0){
				AMediaFormat *format;
				buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize);
				if(buf == NULL){
					ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
					break;
				}

				format = AMediaCodec_getOutputFormat(d->codec);
				if(format != NULL){
					AMediaFormat_getInt32(format, "width", &width);
					AMediaFormat_getInt32(format, "height", &height);
					AMediaFormat_getInt32(format, "color-format", &color);

					d->vsize.width=width;
					d->vsize.height=height;
					AMediaFormat_delete(format);
				}
			}

			if(buf != NULL){
				//YUV
				if(width != 0 && height != 0 ){
					if(color == 19) {
						int ysize = width*height;
						int usize = ysize/4;
						om=ms_yuv_buf_alloc(&pic,width,height);
						memcpy(pic.planes[0],buf,ysize);
						memcpy(pic.planes[1],buf+ysize,usize);
						memcpy(pic.planes[2],buf+ysize+usize,usize);
					} else {
						uint8_t* cbcr_src = (uint8_t*) (buf + width * height);
						om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
					}

					if (!d->first_image_decoded) {
						ms_message("First frame decoded %ix%i",width,height);
						d->first_image_decoded = true;
						ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
					}

					ms_queue_put(f->outputs[0], om);
				}

				if(oBufidx > 0) {
                	AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE);
				}


			}
		}
		d->packet_num++;
	}

	if (d->avpf_enabled && request_pli) {
    	ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI);
    }
}
コード例 #24
0
void MSOpenH264Decoder::feed()
{
    if (!isInitialized()) {
        ms_error("MSOpenH264Decoder::feed(): not initialized");
        ms_queue_flush(mFilter->inputs[0]);
        return;
    }

    MSQueue nalus;
    ms_queue_init(&nalus);

    mblk_t *im;
    while ((im = ms_queue_get(mFilter->inputs[0])) != NULL) {
        if ((getIDRPicId() == 0) && (mSPS != 0) && (mPPS != 0)) {
            // Push the sps/pps given in sprop-parameter-sets if any
            mblk_set_timestamp_info(mSPS, mblk_get_timestamp_info(im));
            mblk_set_timestamp_info(mPPS, mblk_get_timestamp_info(im));
            rfc3984_unpack(mUnpacker, mSPS, &nalus);
            rfc3984_unpack(mUnpacker, mPPS, &nalus);
            mSPS = 0;
            mPPS = 0;
        }
        rfc3984_unpack(mUnpacker, im, &nalus);
        if (!ms_queue_empty(&nalus)) {
            void * pData[3] = { 0 };
            SBufferInfo sDstBufInfo = { 0 };
            int len = nalusToFrame(&nalus);

            DECODING_STATE state = mDecoder->DecodeFrame2(mBitstream, len, (uint8_t**)pData, &sDstBufInfo);
            if (state != dsErrorFree) {
                ms_error("OpenH264 decoder: DecodeFrame2 failed: 0x%x", state);
                if (((mFilter->ticker->time - mLastErrorReportTime) > 5000) || (mLastErrorReportTime == 0)) {
                    mLastErrorReportTime = mFilter->ticker->time;
                    ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_DECODING_ERRORS);
                }
            }
            if (sDstBufInfo.iBufferStatus == 1) {
                uint8_t * pDst[3] = { 0 };
                pDst[0] = (uint8_t *)pData[0];
                pDst[1] = (uint8_t *)pData[1];
                pDst[2] = (uint8_t *)pData[2];

                // Update video size and (re)allocate YUV buffer if needed
                if ((mWidth != sDstBufInfo.UsrData.sSystemBuffer.iWidth)
                        || (mHeight != sDstBufInfo.UsrData.sSystemBuffer.iHeight)) {
                    if (mYUVMsg) {
                        freemsg(mYUVMsg);
                    }
                    mWidth = sDstBufInfo.UsrData.sSystemBuffer.iWidth;
                    mHeight = sDstBufInfo.UsrData.sSystemBuffer.iHeight;
                    mYUVMsg = ms_yuv_buf_alloc(&mOutbuf, mWidth, mHeight);
                    ms_filter_notify_no_arg(mFilter,MS_FILTER_OUTPUT_FMT_CHANGED);
                }

                // Scale/copy frame to destination mblk_t
                for (int i = 0; i < 3; i++) {
                    uint8_t *dst = mOutbuf.planes[i];
                    uint8_t *src = pDst[i];
                    int h = mHeight >> (( i > 0) ? 1 : 0);

                    for(int j = 0; j < h; j++) {
                        memcpy(dst, src, mOutbuf.strides[i]);
                        dst += mOutbuf.strides[i];
                        src += sDstBufInfo.UsrData.sSystemBuffer.iStride[(i == 0) ? 0 : 1];
                    }
                }
                ms_queue_put(mFilter->outputs[0], dupmsg(mYUVMsg));

                // Update average FPS
                if (ms_average_fps_update(&mFPS, mFilter->ticker->time)) {
                    ms_message("OpenH264 decoder: Frame size: %dx%d", mWidth, mHeight);
                }

                // Notify first decoded image
                if (!mFirstImageDecoded) {
                    mFirstImageDecoded = true;
                    ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
                }

#if MSOPENH264_DEBUG
                ms_message("OpenH264 decoder: IDR pic id: %d, Frame num: %d, Temporal id: %d, VCL NAL: %d", getIDRPicId(), getFrameNum(), getTemporalId(), getVCLNal());
#endif
            }
        }
コード例 #25
0
ファイル: msopus.c プロジェクト: xiaolds/VideoCallVoIP
static void ms_opus_enc_process(MSFilter *f) {
	OpusEncData *d = (OpusEncData *)f->data;
	mblk_t *im;
	mblk_t *om = NULL;
	int i;
	int frameNumber, packet_size;
	uint8_t *signalFrameBuffer = NULL;
	uint8_t *codedFrameBuffer[MAX_INPUT_FRAMES];
	OpusRepacketizer *rp = opus_repacketizer_create();
	opus_int32 ret = 0;
	opus_int32 totalLength = 0;
	int frame_size = d->samplerate * FRAME_LENGTH / 1000; /* in samples */

	// lock the access while getting ptime
	ms_filter_lock(f);
	frameNumber = d->ptime/FRAME_LENGTH; /* encode 20ms frames, ptime is a multiple of 20ms */
	packet_size = d->samplerate * d->ptime / 1000; /* in samples */
	ms_filter_unlock(f);


	while ((im = ms_queue_get(f->inputs[0])) != NULL) {
		ms_bufferizer_put(d->bufferizer, im);
	}

	for (i=0; i<MAX_INPUT_FRAMES; i++) {
		codedFrameBuffer[i]=NULL;
	}
	while (ms_bufferizer_get_avail(d->bufferizer) >= (d->channels * packet_size * SIGNAL_SAMPLE_SIZE)) {
		totalLength = 0;
		opus_repacketizer_init(rp);
		for (i=0; i<frameNumber; i++) { /* encode 20ms by 20ms and repacketize all of them together */
			if (!codedFrameBuffer[i]) codedFrameBuffer[i] = ms_malloc(MAX_BYTES_PER_FRAME); /* the repacketizer need the pointer to packet to remain valid, so we shall have a buffer for each coded frame */
			if (!signalFrameBuffer) signalFrameBuffer = ms_malloc(frame_size * SIGNAL_SAMPLE_SIZE * d->channels);

			ms_bufferizer_read(d->bufferizer, signalFrameBuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
			ret = opus_encode(d->state, (opus_int16 *)signalFrameBuffer, frame_size, codedFrameBuffer[i], MAX_BYTES_PER_FRAME);
			if (ret < 0) {
				ms_error("Opus encoder error: %s", opus_strerror(ret));
				break;
			}
			if (ret > 0) {
				int err = opus_repacketizer_cat(rp, codedFrameBuffer[i], ret); /* add the encoded frame into the current packet */
				if (err != OPUS_OK) {
					ms_error("Opus repacketizer error: %s", opus_strerror(err));
					break;
				}
				totalLength += ret;
			}
		}

		if (ret > 0) {
			om = allocb(totalLength+frameNumber + 1, 0); /* opus repacktizer API: allocate at leat number of frame + size of all data added before */
			ret = opus_repacketizer_out(rp, om->b_wptr, totalLength+frameNumber);

			om->b_wptr += ret;
			mblk_set_timestamp_info(om, d->ts);
			ms_queue_put(f->outputs[0], om);
			d->ts += packet_size*48000/d->samplerate; /* RFC payload RTP opus 03 - section 4: RTP timestamp multiplier : WARNING works only with sr at 48000 */
			ret = 0;
		}
	}

	opus_repacketizer_destroy(rp);

	if (signalFrameBuffer != NULL) {
		ms_free(signalFrameBuffer);
	}
	for (i=0; i<frameNumber; i++) {
		if (codedFrameBuffer[i] != NULL) {
			ms_free(codedFrameBuffer[i]);
		}
	}
}
コード例 #26
0
static void dec_process(MSFilter *f) {
	DecData *d = (DecData*)f->data;
	mblk_t *im = NULL;
	MSQueue nalus;
	mblk_t *decodeM = NULL;	
	int in_size = 0;
	jint out_size = 0;
	mblk_t *oneNalu = NULL;
	JNIEnv *jni_env = NULL;
	JavaVM *jvm = ms_get_jvm();
	static char start_code[4] = {0, 0, 0, 1};
	
	(*jvm)->AttachCurrentThread(jvm, &jni_env, NULL);	
	
	ms_queue_init(&nalus);	
	while((im = ms_queue_get(f->inputs[0])) != NULL) {
		/*push the sps/pps given in sprop-parameter-sets if any*/
		if (d->packet_num == 0 && d->sps && d->pps) {
			mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im));
			mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im));
			rfc3984_unpack(&d->unpacker,d->sps,&nalus);
			rfc3984_unpack(&d->unpacker,d->pps,&nalus);
			d->sps = NULL;
			d->pps = NULL;
		}
		rfc3984_unpack(&d->unpacker,im,&nalus);
		
		while((oneNalu = ms_queue_get(&nalus)) != NULL) {
			in_size = oneNalu->b_wptr - oneNalu->b_rptr;
			
			if (GET_TYPE(oneNalu->b_rptr[0]) == SPS_TYPE) {
				if (d->sps == NULL) {
					FMS_WARN("dec_process get sps\n");
					d->sps = allocb(in_size + START_CODE_LEN, 0);
					if (d->sps) {
						memcpy(d->sps->b_rptr, start_code, START_CODE_LEN);
						memcpy(d->sps->b_rptr + START_CODE_LEN, oneNalu->b_rptr, in_size);
						d->sps->b_wptr += in_size + START_CODE_LEN;
					}
				} else {
					freemsg(oneNalu);
					continue;						
				}
			} else if (GET_TYPE(oneNalu->b_rptr[0]) == PPS_TYPE) {
				if (d->pps == NULL && d->sps != NULL) {
					FMS_WARN("dec_process get pps\n");
					d->pps = allocb(in_size + START_CODE_LEN, 0);
					if (d->pps) {
						memcpy(d->pps->b_rptr, start_code, START_CODE_LEN);
						memcpy(d->pps->b_rptr + START_CODE_LEN, oneNalu->b_rptr, in_size);
						d->pps->b_wptr += in_size + START_CODE_LEN;
					}
				} else {
					freemsg(oneNalu);
					continue;	
				}
			}
			
			if (d->sps == NULL || (GET_TYPE(oneNalu->b_rptr[0]) != SPS_TYPE && d->pps == NULL)) {
				FMS_WARN("skip frame without no sps and pps\n");
				freemsg(oneNalu);
				continue;			
			}
			if (!d->IsRecivedFirstIframe && GET_TYPE(oneNalu->b_rptr[0]) != SPS_TYPE 
				&& GET_TYPE(oneNalu->b_rptr[0]) != PPS_TYPE) {
				if (GET_TYPE(oneNalu->b_rptr[0]) == IDR_TYPE) {
					d->IsRecivedFirstIframe = TRUE;
				} else {
					FMS_WARN("skip frame without the first IDR\n");
					freemsg(oneNalu);
					continue;
				}
			}

			(*jni_env)->SetByteArrayRegion(jni_env, d->input_data, 0, START_CODE_LEN, (jbyte*)start_code);
			(*jni_env)->SetByteArrayRegion(jni_env, d->input_data, START_CODE_LEN, in_size, (jbyte*)oneNalu->b_rptr);
	 
			out_size = (*jni_env)->CallIntMethod(jni_env, d->h264_decode_obj, d->h264_decode_id,
				                                     d->input_data, in_size + START_CODE_LEN, d->output_data);
			if (out_size <= 0) {
				freemsg(oneNalu);
				continue;
			}
				
			(*jni_env)->GetByteArrayRegion(jni_env, d->output_data, 0, out_size, 
				                              (jbyte*)d->bitstream);
		
			if (FALSE == d->IsFirstImageDec) {
				d->IsFirstImageDec = TRUE;
				//ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_GET_FIRST_VIDEO_FRAME);
			}
			
			decodeM = get_as_yuvmsg(f, d, d->bitstream, out_size);
			if (decodeM) {
				ms_queue_put(f->outputs[0], decodeM);
			}
	
			freemsg(oneNalu);
		}
		d->packet_num++;
	}
	(*jvm)->DetachCurrentThread(jvm);
	
}
コード例 #27
0
ファイル: msopus.c プロジェクト: Distrotech/mediastreamer2
static void ms_opus_enc_process(MSFilter *f) {
	OpusEncData *d = (OpusEncData *)f->data;
	OpusRepacketizer *repacketizer = NULL;
	mblk_t *om = NULL;
	int packet_size, pcm_buffer_size;
	int max_frame_byte_size, ptime = 20;
	int frame_count = 0, frame_size = 0;
	opus_int32 total_length = 0;
	uint8_t *repacketizer_frame_buffer[MAX_INPUT_FRAMES];
	int i;
	ms_filter_lock(f);
	ptime = d->ptime;
	packet_size = d->samplerate * ptime / 1000; /* in samples */
	ms_filter_unlock(f);
	
	switch (ptime) {
		case 10:
			frame_size = d->samplerate * 10 / 1000;
			frame_count = 1;
			break;
		case 20:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 1;
			break;
		case 40:
			frame_size = d->samplerate * 40 / 1000;
			frame_count = 1;
			break;
		case 60:
			frame_size = d->samplerate * 60 / 1000;
			frame_count = 1;
			break;
		case 80:
			frame_size = d->samplerate * 40 / 1000;
			frame_count = 2;
			break;
		case 100:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 5;
			break;
		case 120:
			frame_size = d->samplerate * 60 / 1000;
			frame_count = 2;
			break;
		default:
			frame_size = d->samplerate * 20 / 1000;
			frame_count = 1;
	}

	max_frame_byte_size = MAX_BYTES_PER_MS * ptime/frame_count;

	pcm_buffer_size = d->channels * frame_size * SIGNAL_SAMPLE_SIZE;
	if (pcm_buffer_size > d->pcmbufsize){
		if (d->pcmbuffer) ms_free(d->pcmbuffer);
		d->pcmbuffer = ms_malloc(pcm_buffer_size);
		d->pcmbufsize = pcm_buffer_size;
	}
	for (i=0; i<MAX_INPUT_FRAMES; i++) {
		repacketizer_frame_buffer[i]=NULL;
	}

	ms_bufferizer_put_from_queue(d->bufferizer, f->inputs[0]);
	while (ms_bufferizer_get_avail(d->bufferizer) >= (d->channels * packet_size * SIGNAL_SAMPLE_SIZE)) {
		opus_int32 ret = 0;

		if (frame_count == 1) { /* One Opus frame, not using the repacketizer */
			om = allocb(max_frame_byte_size, 0);
			ms_bufferizer_read(d->bufferizer, d->pcmbuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
			ret = opus_encode(d->state, (opus_int16 *)d->pcmbuffer, frame_size, om->b_wptr, max_frame_byte_size);
			if (ret < 0) {
				freemsg(om);
				om=NULL;
				ms_error("Opus encoder error: %s", opus_strerror(ret));
				break;
			} else {
				total_length = ret;
				om->b_wptr += total_length;
			}
		} else if(frame_count > 1) { /* We have multiple Opus frames we will use the opus repacketizer */

			repacketizer = opus_repacketizer_create();
			opus_repacketizer_init(repacketizer);

			/* Do not include FEC/LBRR in any frame after the first one since it will be sent with the previous one */
			ret = opus_encoder_ctl(d->state, OPUS_SET_INBAND_FEC(0));
			if (ret != OPUS_OK) {
				ms_error("could not set inband FEC to opus encoder: %s", opus_strerror(ret));
			}
			for (i=0; i<frame_count; i++) {
				if(frame_count == i+1){ /* if configured, reactivate FEC on the last frame to tell the encoder he should restart saving LBRR frames */
					ret = opus_encoder_ctl(d->state, OPUS_SET_INBAND_FEC(d->useinbandfec));
					if (ret != OPUS_OK) {
						ms_error("could not set inband FEC to opus encoder: %s", opus_strerror(ret));
					}
				}
				if (!repacketizer_frame_buffer[i]) repacketizer_frame_buffer[i] = ms_malloc(max_frame_byte_size); /* the repacketizer need the pointer to packet to remain valid, so we shall have a buffer for each coded frame */
				ms_bufferizer_read(d->bufferizer, d->pcmbuffer, frame_size * SIGNAL_SAMPLE_SIZE * d->channels);
				ret = opus_encode(d->state, (opus_int16 *)d->pcmbuffer, frame_size, repacketizer_frame_buffer[i], max_frame_byte_size);
				if (ret < 0) {
					ms_error("Opus encoder error: %s", opus_strerror(ret));
					break;
				} else if (ret > 0) {
					int err = opus_repacketizer_cat(repacketizer, repacketizer_frame_buffer[i], ret); /* add the encoded frame into the current packet */
					if (err != OPUS_OK) {
						ms_error("Opus repacketizer error: %s", opus_strerror(err));
						break;
					}
					total_length += ret;
				}
			}

			om = allocb(total_length + frame_count + 1, 0); /* opus repacketizer API: allocate at least number of frame + size of all data added before */
			ret = opus_repacketizer_out(repacketizer, om->b_wptr, total_length+frame_count);
			if(ret < 0){
				freemsg(om);
				om=NULL;
				ms_error("Opus repacketizer out error: %s", opus_strerror(ret));
			} else {
				om->b_wptr += ret;
			}
			opus_repacketizer_destroy(repacketizer);
			for (i=0; i<frame_count; i++) {
				if (repacketizer_frame_buffer[i] != NULL) {
					ms_free(repacketizer_frame_buffer[i]);
				}
			}
		}

		if(om) { /* we have an encoded output message */
			mblk_set_timestamp_info(om, d->ts);
			ms_bufferizer_fill_current_metas(d->bufferizer, om);
			ms_queue_put(f->outputs[0], om);
			d->ts += packet_size*48000/d->samplerate; /* RFC payload RTP opus 03 - section 4: RTP timestamp multiplier : WARNING works only with sr at 48000 */
			total_length = 0;
		}
	}

}
コード例 #28
0
static void send_packet(MSQueue *rtpq, uint32_t ts, mblk_t *m, bool_t marker){
	mblk_set_timestamp_info(m,ts);
	mblk_set_marker_info(m,marker);
	ms_queue_put(rtpq,m);
}
コード例 #29
0
static void dec_process(MSFilter *f){
	DecData *d=(DecData*)f->data;
	MSPicture pic = {0};
	mblk_t *im,*om = NULL;
	ssize_t oBufidx = -1;
	size_t bufsize;
	bool_t need_reinit=FALSE;
	bool_t request_pli=FALSE;
	MSQueue nalus;
	AMediaCodecBufferInfo info;
	
	ms_queue_init(&nalus);

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (d->packet_num==0 && d->sps && d->pps){
			mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im));
			mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im));
			rfc3984_unpack(&d->unpacker, d->sps, &nalus);
            rfc3984_unpack(&d->unpacker, d->pps, &nalus);
			d->sps=NULL;
			d->pps=NULL;
		}

		if(rfc3984_unpack(&d->unpacker,im,&nalus) <0){
			request_pli=TRUE;
		}
		if (!ms_queue_empty(&nalus)){
			int size;
			uint8_t *buf=NULL;
			ssize_t iBufidx;

			size=nalusToFrame(d,&nalus,&need_reinit);

			if (need_reinit) {
				//In case of rotation, the decoder needs to flushed in order to restart with the new video size
				AMediaCodec_flush(d->codec);
				d->first_buffer_queued = FALSE;
			}

			/*First put our H264 bitstream into the decoder*/
			iBufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US);
			if (iBufidx >= 0) {
				buf = AMediaCodec_getInputBuffer(d->codec, iBufidx, &bufsize);
				if(buf == NULL) {
					ms_error("MSMediaCodecH264Dec: AMediaCodec_getInputBuffer() returned NULL");
					break;
				}
				if((size_t)size > bufsize) {
					ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i",size,(int) bufsize);
					break;
				} else {
					struct timespec ts;
					clock_gettime(CLOCK_MONOTONIC, &ts);
					memcpy(buf,d->bitstream,(size_t)size);
					AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, (ts.tv_nsec/1000) + 10000LL, 0);
					d->first_buffer_queued = TRUE;
				}
			}else if (iBufidx == AMEDIA_ERROR_UNKNOWN){
				ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueInputBuffer() had an exception");
			}
		}
		d->packet_num++;
		if (d->sps && d->pps) request_pli = FALSE;
		else request_pli = TRUE;
	}
	
	/*secondly try to get decoded frames from the decoder, this is performed every tick*/
	while (d->first_buffer_queued && (oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0){
		AMediaFormat *format;
		int width = 0, height = 0, color = 0;
		uint8_t *buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize);
		
		if(buf == NULL){
			ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
			ms_error("MSMediaCodecH264Dec: AMediaCodec_getOutputBuffer() returned NULL");
		}

		format = AMediaCodec_getOutputFormat(d->codec);
		if(format != NULL){
			AMediaFormat_getInt32(format, "width", &width);
			AMediaFormat_getInt32(format, "height", &height);
			AMediaFormat_getInt32(format, "color-format", &color);

			d->vsize.width=width;
			d->vsize.height=height;
			AMediaFormat_delete(format);
		}

		if(buf != NULL && d->sps && d->pps){ /*some decoders output garbage while no sps or pps have been received yet !*/
			if(width != 0 && height != 0 ){
				if(color == 19) {
					//YUV
					int ysize = width*height;
					int usize = ysize/4;
					om = ms_yuv_buf_allocator_get(d->buf_allocator,&pic,width,height);
					memcpy(pic.planes[0],buf,ysize);
					memcpy(pic.planes[1],buf+ysize,usize);
					memcpy(pic.planes[2],buf+ysize+usize,usize);
				} else {
					uint8_t* cbcr_src = (uint8_t*) (buf + width * height);
					om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
				}

				if (!d->first_image_decoded) {
					ms_message("First frame decoded %ix%i",width,height);
					d->first_image_decoded = true;
					ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
				}
				ms_queue_put(f->outputs[0], om);
			}else{
				ms_error("MSMediaCodecH264Dec: width and height are not known !");
			}
		}
		AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE);
	}
	if (oBufidx == AMEDIA_ERROR_UNKNOWN){
		ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueOutputBuffer() had an exception");
	}

	if (d->avpf_enabled && request_pli) {
    	ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI);
    }
    ms_queue_flush(f->inputs[0]);
}
コード例 #30
0
ファイル: vp8.c プロジェクト: Accontech/mediastreamer2
static void enc_process(MSFilter *f) {
	mblk_t *im;
	uint64_t timems = f->ticker->time;
	uint32_t timestamp = (uint32_t)(timems*90);
	EncState *s = (EncState *)f->data;
	unsigned int flags = 0;
	vpx_codec_err_t err;
	MSPicture yuv;
	bool_t is_ref_frame=FALSE;

	ms_filter_lock(f);

#ifdef AVPF_DEBUG
	ms_message("VP8 enc_process:");
#endif

	if (!s->ready) {
		ms_queue_flush(f->inputs[0]);
		ms_filter_unlock(f);
		return;
	}

	if ((im = ms_queue_peek_last(f->inputs[0])) != NULL) {
		vpx_image_t img;

		flags = 0;
		ms_yuv_buf_init_from_mblk(&yuv, im);
		vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]);

		if ((s->avpf_enabled != TRUE) && ms_video_starter_need_i_frame(&s->starter, f->ticker->time)) {
			s->force_keyframe = TRUE;
		}
		if (s->force_keyframe == TRUE) {
			ms_message("Forcing vp8 key frame for filter [%p]", f);
			flags = VPX_EFLAG_FORCE_KF;
		} else if (s->avpf_enabled == TRUE) {
			if (s->frame_count == 0) s->force_keyframe = TRUE;
			enc_fill_encoder_flags(s, &flags);
		}

#ifdef AVPF_DEBUG
		ms_message("VP8 encoder frames state:");
		ms_message("\tgolden: count=%" PRIi64 ", picture_id=0x%04x, ack=%s",
			s->frames_state.golden.count, s->frames_state.golden.picture_id, (s->frames_state.golden.acknowledged == TRUE) ? "Y" : "N");
		ms_message("\taltref: count=%" PRIi64 ", picture_id=0x%04x, ack=%s",
			s->frames_state.altref.count, s->frames_state.altref.picture_id, (s->frames_state.altref.acknowledged == TRUE) ? "Y" : "N");
#endif
		err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, 1000000LL/(2*(int)s->vconf.fps)); /*encoder has half a framerate interval to encode*/
		if (err) {
			ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec));
		} else {
			vpx_codec_iter_t iter = NULL;
			const vpx_codec_cx_pkt_t *pkt;
			MSList *list = NULL;

			/* Update the frames state. */
			is_ref_frame=FALSE;
			if (flags & VPX_EFLAG_FORCE_KF) {
				enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME);
				enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME);
				s->frames_state.golden.is_independant=TRUE;
				s->frames_state.altref.is_independant=TRUE;
				s->frames_state.last_independent_frame=s->frame_count;
				s->force_keyframe = FALSE;
				is_ref_frame=TRUE;
			}else if (flags & VP8_EFLAG_FORCE_GF) {
				enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME);
				is_ref_frame=TRUE;
			}else if (flags & VP8_EFLAG_FORCE_ARF) {
				enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME);
				is_ref_frame=TRUE;
			}else if (flags & VP8_EFLAG_NO_REF_LAST) {
				enc_mark_reference_frame_as_sent(s, VP8_LAST_FRAME);
				is_ref_frame=is_reconstruction_frame_sane(s,flags);
			}
			if (is_frame_independent(flags)){
				s->frames_state.last_independent_frame=s->frame_count;
			}

			/* Pack the encoded frame. */
			while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) {
				if ((pkt->kind == VPX_CODEC_CX_FRAME_PKT) && (pkt->data.frame.sz > 0)) {
					Vp8RtpFmtPacket *packet = ms_new0(Vp8RtpFmtPacket, 1);

					packet->m = allocb(pkt->data.frame.sz, 0);
					memcpy(packet->m->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz);
					packet->m->b_wptr += pkt->data.frame.sz;
					mblk_set_timestamp_info(packet->m, timestamp);
					packet->pd = ms_new0(Vp8RtpFmtPayloadDescriptor, 1);
					packet->pd->start_of_partition = TRUE;
					packet->pd->non_reference_frame = s->avpf_enabled && !is_ref_frame;
					if (s->avpf_enabled == TRUE) {
						packet->pd->extended_control_bits_present = TRUE;
						packet->pd->pictureid_present = TRUE;
						packet->pd->pictureid = s->picture_id;
					} else {
						packet->pd->extended_control_bits_present = FALSE;
						packet->pd->pictureid_present = FALSE;
					}
					if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) {
						packet->pd->pid = (uint8_t)pkt->data.frame.partition_id;
						if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) {
							mblk_set_marker_info(packet->m, TRUE);
						}
					} else {
						packet->pd->pid = 0;
						mblk_set_marker_info(packet->m, TRUE);
					}
					list = ms_list_append(list, packet);
				}
			}

#ifdef AVPF_DEBUG
			ms_message("VP8 encoder picture_id=%i ***| %s | %s | %s | %s", (int)s->picture_id,
				(flags & VPX_EFLAG_FORCE_KF) ? "KF " : (flags & VP8_EFLAG_FORCE_GF) ? "GF " :  (flags & VP8_EFLAG_FORCE_ARF) ? "ARF" : "   ",
				(flags & VP8_EFLAG_NO_REF_GF) ? "NOREFGF" : "       ",
				(flags & VP8_EFLAG_NO_REF_ARF) ? "NOREFARF" : "        ",
				(flags & VP8_EFLAG_NO_REF_LAST) ? "NOREFLAST" : "         ");
#endif

			vp8rtpfmt_packer_process(&s->packer, list, f->outputs[0], f->factory);

			/* Handle video starter if AVPF is not enabled. */
			s->frame_count++;
			if ((s->avpf_enabled != TRUE) && (s->frame_count == 1)) {
				ms_video_starter_first_frame(&s->starter, f->ticker->time);
			}

			/* Increment the pictureID. */
			s->picture_id++;
#ifdef PICTURE_ID_ON_16_BITS
			if (s->picture_id == 0)
				s->picture_id = 0x8000;
#else
			if (s->picture_id == 0x0080)
				s->picture_id = 0;
#endif
		}
	}
	ms_filter_unlock(f);
	ms_queue_flush(f->inputs[0]);
}