コード例 #1
0
static void *mpeg4_loop( void *d )
{
	struct mpeg4_decoder *en = (struct mpeg4_decoder *)d;
	xvid_dec_frame_t xvid_dec_frame;
	xvid_dec_stats_t xvid_dec_stats;
	struct frame *out, *input;
	int used, pos;

	for(;;)
	{
		input = get_next_frame( en->ex, 1 );

		if( en->reset_pending && en->xvid_handle ) mpeg4_stop( en );
		if( ! en->xvid_handle ) mpeg4_start( en, input );

		out = new_frame();
		out->width = en->width;
		out->height = en->height;

		pos = 0;

		while( input->length - pos > 0 )
		{
			memset( &xvid_dec_frame, 0, sizeof( xvid_dec_frame ) );
			xvid_dec_frame.version = XVID_VERSION;
			xvid_dec_frame.general = 0;
			xvid_dec_frame.bitstream = input->d + pos;
			xvid_dec_frame.length = input->length - pos;
			xvid_dec_frame.output.plane[0] = out->d;
			xvid_dec_frame.output.stride[0] = 2 * out->width;
			xvid_dec_frame.output.csp = XVID_CSP_UYVY;
			xvid_dec_stats.version = XVID_VERSION;

			used = xvid_decore( en->xvid_handle, XVID_DEC_DECODE,
					&xvid_dec_frame, &xvid_dec_stats );
			if( used < 0 )
			{
				out->length = 0;
				spook_log( SL_WARN, "mpeg4: XviD decoding failed!" );
			}
			if( xvid_dec_stats.type == XVID_TYPE_VOL )
			{
				out->width = en->width = xvid_dec_stats.data.vol.width;
				out->height = en->height = xvid_dec_stats.data.vol.height;
			}
			pos += used;
		}

		out->format = FORMAT_RAW_UYVY;
		out->length = 2 * out->width * out->height;
		out->key = 1;

		deliver_frame( en->ex, out );

		unref_frame( input );
	}

	return NULL;
}
コード例 #2
0
ファイル: sapi.c プロジェクト: daar/libvidcap
/** \note stride-ignorant sapis should pass a stride of zero */
int
sapi_src_capture_notify(struct sapi_src_context * src_ctx,
		char * video_data, int video_data_size,
		int stride,
		int error_status)
{
	/** \note We may be called here by the capture thread while the
	 *        main thread is clearing capture_data and capture_callback
	 *        from within vidcap_src_capture_stop().
	 */

	struct frame_info *frame = &src_ctx->callback_frame;

	/* Screen-out useless callbacks */
	if ( video_data_size < 1 && !error_status )
	{
		log_info("callback with no data?\n");
		return 0;
	}

	/* Package the video information */
	frame->video_data_size = video_data_size;
	frame->error_status = error_status;
	frame->stride = stride;
	frame->capture_time = vc_now();
	frame->video_data = video_data;

	if ( src_ctx->use_timer_thread )
	{
		/* buffer the frame - for processing by the timer thread */
		double_buffer_write(src_ctx->double_buff, frame);

		/* If there's an error, wait here until it's acknowledged
		 * by the timer thread (after having delivered it to the app).
		 */
		if ( error_status )
			wait_for_error_ack(src_ctx);
	}
	else
	{
		/* process the frame now */
		deliver_frame(src_ctx);
	}

	if ( error_status )
	{
		src_ctx->src_state = src_bound;
		src_ctx->capture_callback = 0;
		src_ctx->capture_data = VIDCAP_INVALID_USER_DATA;
	}

	return 0;
}
コード例 #3
0
ファイル: input-rtp.c プロジェクト: lubing521/Zheyang
static void *capture_loop( void *d )
{
	struct rtp_spook_input *conf = (struct rtp_spook_input *)d;
	struct frame *f;
	int status;
    unsigned char buffer[38400];
    int have_more, err, stream_received=0;
    uint32_t ts=0;

    do
    {
		have_more=1;
		while (have_more)
        {
			err=rtp_session_recv_with_ts(conf->session,buffer,sizeof(buffer),ts,&have_more);
			if (err>0) 
			{
                stream_received=1;
			}
			
			if ((stream_received) && (err>0)) 
            {
				if( ( f = get_next_frame( conf->ex, 0 ) ) )
        		{
        			f->length = err;
        			f->format = conf->format;
        			f->width = conf->width;
        			f->height = conf->height;
        			f->key = 1;

        			memcpy( f->d, buffer, err);

        			deliver_frame( conf->ex, f );
					printf("\r\nDeliver packet with length %d", err);
        		} 
                else
        		{
        			spook_log( SL_WARN, "video: dropping frame" );
        		}
			}
		}
		ts+=160;
		//ortp_message("Receiving packet.");
	}while(1);

	return NULL;
}
コード例 #4
0
ファイル: sapi.c プロジェクト: daar/libvidcap
unsigned int
STDCALL sapi_src_timer_thread_func(void *args)
{
	struct sapi_src_context * src_ctx = args;
	struct timeval tv_now;
	const long idle_state_sleep_period_ms = 100;
	long sleep_ms = idle_state_sleep_period_ms;
	int first_time = 1;
	int got_frame = 0;
	int ret;
	int capture_error = 0;    /** \bug perhaps should exit on error */

	src_ctx->timer_thread_idle = 1;

	tv_now = vc_now();

	src_ctx->frame_time_next.tv_sec = tv_now.tv_sec;
	src_ctx->frame_time_next.tv_usec = tv_now.tv_usec;

	src_ctx->capture_timer_thread_started = 1;

	while ( !src_ctx->kill_timer_thread )
	{
		tv_now = vc_now();

		/* sleep or read? */
		if ( capture_error || src_ctx->src_state != src_capturing ||
				!tv_greater_or_equal(&tv_now, &src_ctx->frame_time_next) )
		{
			if ( src_ctx->src_state != src_capturing )
			{
				sleep_ms = idle_state_sleep_period_ms;
				first_time = 1;
			}
			else if ( !capture_error )
			{
				/* sleep just enough */
				sleep_ms = ((src_ctx->frame_time_next.tv_sec - tv_now.tv_sec) *
						1000000L + src_ctx->frame_time_next.tv_usec -
						tv_now.tv_usec) / 1000L;
			}

			if ( sleep_ms < 0 )
				sleep_ms = 0;

			vc_millisleep(sleep_ms);
		}
		else
		{
			src_ctx->timer_thread_idle = 0;
			/** \bug memory barrier needed? */

			/* attempt to read and deliver a frame */
			ret = deliver_frame(src_ctx);

			got_frame = !ret;
			capture_error = ret > 0;

			/* Is this the first frame? */
			if ( got_frame && first_time )
			{
				first_time = 0;

				/* re-initialize when next to check for a frame */
				src_ctx->frame_time_next.tv_sec = tv_now.tv_sec;
				src_ctx->frame_time_next.tv_usec = tv_now.tv_usec;
			}

			if ( !first_time )
			{
				/* update when next to check for a frame */
				tv_add_usecs(&src_ctx->frame_time_next, &src_ctx->frame_time_next,
						1000000 *
						src_ctx->fmt_nominal.fps_denominator /
						src_ctx->fmt_nominal.fps_numerator);
			}
			else
			{
				/* still no first frame */
				/* update when next to check for a frame */
				tv_add_usecs(&src_ctx->frame_time_next, &tv_now,
						1000000 *
						src_ctx->fmt_nominal.fps_denominator /
						src_ctx->fmt_nominal.fps_numerator);
			}
		}

		/** \bug memory barrier needed? */
		src_ctx->timer_thread_idle = 1;
	}

	return 0;
}
コード例 #5
0
ファイル: encoder-mpeg4.c プロジェクト: ChenXuJasper/spook
static void *mpeg4_loop( void *d )
{
	struct mpeg4_encoder *en = (struct mpeg4_encoder *)d;
	xvid_enc_frame_t xvid_enc_frame;
	struct frame *mpeg, *input;

	for(;;)
	{
		input = get_next_frame( en->ex, 1 );

		if( en->reset_pending && en->xvid_handle ) mpeg4_stop( en );
		if( ! en->xvid_handle ) mpeg4_start( en, input );

		if( input->width != en->width || input->height != en->height )
		{
			spook_log( SL_WARN,
				"mpeg4: image size changed midstream!" );
			unref_frame( input );
			continue;
		}

		mpeg = new_frame();

		memset( &xvid_enc_frame, 0, sizeof( xvid_enc_frame ) );
		xvid_enc_frame.version = XVID_VERSION;
		xvid_enc_frame.bitstream = mpeg->d;
		xvid_enc_frame.length = -1;
		xvid_enc_frame.input.plane[0] = input->d;
		switch( input->format )
		{
		case FORMAT_RAW_BGR24:
			xvid_enc_frame.input.csp = XVID_CSP_BGR;
			xvid_enc_frame.input.stride[0] = en->width * 3;
			break;
		case FORMAT_RAW_UYVY:
			xvid_enc_frame.input.csp = XVID_CSP_UYVY;
			xvid_enc_frame.input.stride[0] = en->width * 2;
			break;
		}
		xvid_enc_frame.vol_flags = 0;
		xvid_enc_frame.vop_flags = 0;
		xvid_enc_frame.type = XVID_TYPE_AUTO;
		xvid_enc_frame.quant = 0;
		xvid_enc_frame.motion = XVID_ME_ADVANCEDDIAMOND16;
		xvid_enc_frame.quant_intra_matrix = NULL;
		xvid_enc_frame.quant_inter_matrix = NULL;

		mpeg->length = xvid_encore( en->xvid_handle, XVID_ENC_ENCODE,
					&xvid_enc_frame, NULL );
		if( mpeg->length < 0 )
		{
			mpeg->length = 0;
			spook_log( SL_WARN, "mpeg4: XviD encoding failed!" );
		}

		mpeg->format = FORMAT_MPEG4;
		mpeg->width = en->width;
		mpeg->height = en->height;
		mpeg->key = xvid_enc_frame.out_flags & XVID_KEYFRAME;

		deliver_frame( en->ex, mpeg );

		unref_frame( input );
	}

	return NULL;
}
コード例 #6
0
void DeliverIFrame(
	struct h264_encoder		*en,
	uint8_t 			*pu8BitStreamBuf,
	uint32_t 			u32BitStreamLen,
	S_UTIL_H264_FRAME_INFO 	*psFrameInfo
)
{
	struct frame *psH264Frame = NULL;
	int32_t i32TryCnt;
	E_UTIL_H264_NALTYPE_MASK eFrameMask;
	uint32_t u32FrameOffset;
	uint32_t u32FrameLen;

	if (u32BitStreamLen > en->u32IFrameBufSize) {
		if (en->pu8IFrameBuf) {
			free(en->pu8IFrameBuf);
			en->u32IFrameBufSize = 0;
		}

		en->pu8IFrameBuf = malloc(u32BitStreamLen + 100);

		if (en->pu8IFrameBuf) {
			en->u32IFrameBufSize = u32BitStreamLen;
		}
		else {
			en->u32IFrameBufSize = 0;
		}
	}

	if (en->pu8IFrameBuf == NULL) {
		printf("Spook DeliverIFrame: s_pu8IFrameBuf is null\n");
		return;
	}

	memcpy(en->pu8IFrameBuf, pu8BitStreamBuf, u32BitStreamLen);

	eFrameMask = psFrameInfo->eNALType;

	while (eFrameMask) {
		if (eFrameMask & eUTIL_H264_NAL_SPS) {
			u32FrameOffset = psFrameInfo->u32SPSOffset + psFrameInfo->u32SPSStartCodeLen;
			u32FrameLen = psFrameInfo->u32SPSLen - psFrameInfo->u32SPSStartCodeLen;
			eFrameMask &= (~eUTIL_H264_NAL_SPS);
		}
		else if (eFrameMask & eUTIL_H264_NAL_PPS) {
			u32FrameOffset = psFrameInfo->u32PPSOffset + psFrameInfo->u32PPSStartCodeLen;
			u32FrameLen = psFrameInfo->u32PPSLen - psFrameInfo->u32PPSStartCodeLen;
			eFrameMask &= (~eUTIL_H264_NAL_PPS);
		}
		else {
			u32FrameOffset = psFrameInfo->u32IPOffset + psFrameInfo->u32IPStartCodeLen;
			u32FrameLen = psFrameInfo->u32IPLen - psFrameInfo->u32IPStartCodeLen;
			eFrameMask = 0;
		}

		i32TryCnt = 100;

		while (i32TryCnt) {
			psH264Frame = new_frame();

			if (psH264Frame)
				break;

			i32TryCnt --;
			usleep(10000);
		}

		if (psH264Frame == NULL) {
			printf("Spook DeliverIFrame: psH264Frame is null\n");
			return;
		}

		if (psH264Frame->size < u32FrameLen) {
			// enlarge frame size
			struct frame *new_frame;
			new_frame = enlarge_frame_buffer(psH264Frame, u32FrameLen + 40);

			if (new_frame) {
				psH264Frame = new_frame;
			}
			else {
				printf("Spook DeliverIFrame: unable enlarge frame buffer size\n");
				unref_frame(psH264Frame);
				continue;
			}
		}

		memcpy(psH264Frame->d, en->pu8IFrameBuf + u32FrameOffset, u32FrameLen);

		psH264Frame->format = FORMAT_H264;
		psH264Frame->width = en->sH264EncConfig.m_asEncPipeInfo[en->eH264EncRes].m_uiWidth;
		psH264Frame->height = en->sH264EncConfig.m_asEncPipeInfo[en->eH264EncRes].m_uiHeight;
		psH264Frame->length = u32FrameLen;

		psH264Frame->key = 1;

		if (deliver_frame(en->ex, psH264Frame) != 0)
			printf("Spook DeliverIFrame: Deliver frame failed\n");
	}
}
コード例 #7
0
void DeliverPFrame(
	struct h264_encoder		*en,
	uint8_t 			*pu8BitStreamBuf,
	uint32_t 			u32BitStreamLen,
	S_UTIL_H264_FRAME_INFO 	*psFrameInfo
)
{
	struct frame *psH264Frame = NULL;
	int32_t i32TryCnt;

#if 1
	i32TryCnt = 100;

	while (i32TryCnt) {
		psH264Frame = new_frame();

		if (psH264Frame)
			break;

		i32TryCnt --;
		usleep(10000);
	}

#else
	psH264Frame = new_frame();
#endif

	if (psH264Frame == NULL) {
		printf("Spook DeliverPFrame: psH264Frame is null\n");
		return;
	}

	if (psH264Frame->size < u32BitStreamLen) {
		// enlarge frame size
		struct frame *new_frame;
		new_frame = enlarge_frame_buffer(psH264Frame, u32BitStreamLen + 40);

		if (new_frame) {
			psH264Frame = new_frame;
		}
		else {
			printf("Spook DeliverPFrame: unable enlarge frame buffer size\n");
			unref_frame(psH264Frame);
			return;
		}
	}

	uint32_t u32FrameOffset = psFrameInfo->u32IPOffset + psFrameInfo->u32IPStartCodeLen;
	uint32_t u32FrameLen = psFrameInfo->u32IPLen - psFrameInfo->u32IPStartCodeLen;

	memcpy(psH264Frame->d, pu8BitStreamBuf + u32FrameOffset, u32FrameLen);

	psH264Frame->format = FORMAT_H264;
	psH264Frame->width = en->sH264EncConfig.m_asEncPipeInfo[en->eH264EncRes].m_uiWidth;
	psH264Frame->height = en->sH264EncConfig.m_asEncPipeInfo[en->eH264EncRes].m_uiHeight;
	psH264Frame->length = u32FrameLen;

	psH264Frame->key = 0;

	if (deliver_frame(en->ex, psH264Frame) != 0)
		printf("Spook DeliverPFrame: Deliver frame failed\n");
}
コード例 #8
0
static void *adpcm_loop(void *d)
{
	struct adpcm_encoder *en = (struct adpcm_encoder *)d;
	struct frame *psAdpcmFrame = NULL;
	S_MSF_RESOURCE_DATA *psAudioSrcAdpcm;
	uint64_t u64AudioUpdateTime = 0;
	uint8_t *pu8AudioSrcBuf = NULL;
	uint32_t u32AudioDataSize = 0;
	int i32MaxFrameSize = get_max_frame_size();
	S_AUDIO_DATA_HDR *psAudioDataHdr;
	uint64_t u64RTPTimestamp;

	for (;;) {
		if (!en->running) {
			usleep(10000);
			continue;
		}

		psAudioSrcAdpcm = g_sPluginIf.m_pResIf->m_pfnImportWaitDirty(eMSF_PLUGIN_ID_ADPCM_ENCODER, eADPCM_ENCODER_RES_FRAME,
						  NULL, u64AudioUpdateTime);

		if (psAudioSrcAdpcm == NULL) {
			usleep(10000);
			continue;
		}

		psAudioDataHdr = (S_AUDIO_DATA_HDR *)psAudioSrcAdpcm->m_pBuf;
		u64AudioUpdateTime = psAudioSrcAdpcm->m_uiBufUpdateTime;


		while (1) {
			if (psAudioDataHdr->u32Delimiter != AUDIO_DATA_DELIMITER)
				break;

			u32AudioDataSize = psAudioDataHdr->u32DataLen;
			pu8AudioSrcBuf = (uint8_t *)psAudioDataHdr + sizeof(S_AUDIO_DATA_HDR);

			if (u32AudioDataSize > i32MaxFrameSize) {
				spook_log(SL_WARN, "alaw: encode size large than frame size \n");
				usleep(10000);
				continue;
			}

			psAdpcmFrame = new_frame();

			if (psAdpcmFrame == NULL) {
				usleep(10000);
				continue;
			}

			memcpy(psAdpcmFrame->d, pu8AudioSrcBuf, u32AudioDataSize);

			psAdpcmFrame->format = FORMAT_ADPCM;
			psAdpcmFrame->width = 0;
			psAdpcmFrame->height = 0;
			psAdpcmFrame->key = 1;
			psAdpcmFrame->length = u32AudioDataSize;

			u64RTPTimestamp = (uint64_t)en->sAdpcmEncConfig.m_uiSampleRate * psAudioDataHdr->u64Timestamp / 1000;
			psAdpcmFrame->timestamp = (uint32_t)u64RTPTimestamp;

			deliver_frame(en->ex, psAdpcmFrame);

			if (!psAudioDataHdr->u32Flag_End) {
				psAudioDataHdr = (S_AUDIO_DATA_HDR *)(pu8AudioSrcBuf + u32AudioDataSize + psAudioDataHdr->u32PadBytes);
			}
			else {
				break;
			}

			usleep(10000);
		}
	}

	return NULL;
}
コード例 #9
0
static void *aac_loop(void *d)
{
	struct aac_encoder *en = (struct aac_encoder *)d;
	struct frame *psAACFrame = NULL;
	S_MSF_RESOURCE_DATA *psAudioSrcAAC;
	uint64_t u64AudioUpdateTime = 0;
	uint8_t *pu8AudioSrcBuf = NULL;
	uint32_t u32AudioDataSize = 0;
	int i32MaxFrameSize = get_max_frame_size();
	S_AUDIO_DATA_HDR *psAudioDataHdr;
	uint64_t u64RTPTimestamp;

	for (;;) {
		if (!en->running) {
			usleep(10000);
			continue;
		}

		psAudioSrcAAC = g_sPluginIf.m_pResIf->m_pfnImportWaitDirty(eMSF_PLUGIN_ID_AAC_ENCODER, eAAC_ENCODER_RES_FRAME, NULL,
						u64AudioUpdateTime);

		if (psAudioSrcAAC == NULL) {
			usleep(10000);
			continue;
		}

		psAudioDataHdr = (S_AUDIO_DATA_HDR *)psAudioSrcAAC->m_pBuf;
		u64AudioUpdateTime = psAudioSrcAAC->m_uiBufUpdateTime;

		while (1) {
			if (psAudioDataHdr->u32Delimiter != AUDIO_DATA_DELIMITER) {
				break;
			}

			u32AudioDataSize = psAudioDataHdr->u32DataLen;
			pu8AudioSrcBuf = (uint8_t *)psAudioDataHdr + sizeof(S_AUDIO_DATA_HDR);

			if (u32AudioDataSize > i32MaxFrameSize) {
				spook_log(SL_WARN, "aac: encode size large than frame size \n");
				usleep(10000);
				continue;
			}

			//There is ADTS header in bitstream, remove it!
			if ((pu8AudioSrcBuf[0] == 0xFF) && ((pu8AudioSrcBuf[1] & 0xF0) == 0xF0)) {
				if (pu8AudioSrcBuf[1] & 0x01) {
					//without CRC
					pu8AudioSrcBuf = pu8AudioSrcBuf + 7;
					u32AudioDataSize = u32AudioDataSize - 7;
				}
				else {
					//with CRC
					pu8AudioSrcBuf = pu8AudioSrcBuf + 9;
					u32AudioDataSize = u32AudioDataSize - 9;
				}
			}

			psAACFrame = new_frame();

			if (psAACFrame == NULL) {
				usleep(10000);
				continue;
			}

			memcpy(psAACFrame->d, pu8AudioSrcBuf, u32AudioDataSize);

			psAACFrame->format = FORMAT_AAC;
			psAACFrame->width = 0;
			psAACFrame->height = 0;
			psAACFrame->key = 1;
			psAACFrame->length = u32AudioDataSize;

			u64RTPTimestamp = (uint64_t)en->sAACEncConfig.m_uiSampleRate * psAudioDataHdr->u64Timestamp / 1000;

			psAACFrame->timestamp = (uint32_t)u64RTPTimestamp;

			deliver_frame(en->ex, psAACFrame);

			if (!psAudioDataHdr->u32Flag_End) {
				psAudioDataHdr = (S_AUDIO_DATA_HDR *)(pu8AudioSrcBuf + u32AudioDataSize + psAudioDataHdr->u32PadBytes);
			}
			else {
				break;
			}

			usleep(10000);
		}
	}

	return NULL;
}