static void dec_process(MSFilter *f){ DecData *d=(DecData*)f->data; mblk_t *im; MSQueue nalus; ms_queue_init(&nalus); while((im=ms_queue_get(f->inputs[0]))!=NULL){ /*push the sps/pps given in sprop-parameter-sets if any*/ if (d->packet_num==0 && d->sps && d->pps){ mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im)); mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im)); rfc3984_unpack(&d->unpacker,d->sps,&nalus); rfc3984_unpack(&d->unpacker,d->pps,&nalus); d->sps=NULL; d->pps=NULL; } rfc3984_unpack(&d->unpacker,im,&nalus); if (!ms_queue_empty(&nalus)){ int size; uint8_t *p,*end; bool_t need_reinit=FALSE; size=nalusToFrame(d,&nalus,&need_reinit); if (need_reinit) dec_reinit(d); p=d->bitstream; end=d->bitstream+size; while (end-p>0) { int len; int got_picture=0; AVPacket pkt; av_frame_unref(d->orig); av_init_packet(&pkt); pkt.data = p; pkt.size = end-p; len=avcodec_decode_video2(&d->av_context,d->orig,&got_picture,&pkt); if (len<=0) { ms_warning("ms_AVdecoder_process: error %i.",len); ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS); break; } if (got_picture) { ms_queue_put(f->outputs[0],get_as_yuvmsg(f,d,d->orig)); if (!d->first_image_decoded) { d->first_image_decoded = TRUE; ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } if (ms_average_fps_update(&d->fps, f->ticker->time)) { ms_message("ffmpeg H264 decoder: Frame size: %dx%d", d->outbuf.w, d->outbuf.h); } } p+=len; } } d->packet_num++; } }
static void bb10capture_process(MSFilter *f) { BB10Capture *d = (BB10Capture*) f->data; mblk_t *om = NULL; mblk_t *tmp = NULL; uint32_t timestamp = 0; ms_filter_lock(f); ms_mutex_lock(&d->mutex); while ((tmp = ms_queue_get(&d->rq)) != NULL) { if (om != NULL) freemsg(om); om = tmp; } ms_mutex_unlock(&d->mutex); if (om != NULL) { timestamp = f->ticker->time * 90;/* rtp uses a 90000 Hz clockrate for video*/ mblk_set_timestamp_info(om, timestamp); ms_queue_put(f->outputs[0], om); ms_average_fps_update(&d->avgfps, f->ticker->time); } ms_filter_unlock(f); }
/*compatibility, deprecated*/ bool_t ms_video_update_average_fps(MSAverageFPS* afps, uint32_t current_time){ return ms_average_fps_update(afps,current_time); }
static void dec_process(MSFilter *f) { DecState *s = (DecState *)f->data; mblk_t *im; vpx_codec_err_t err; vpx_image_t *img; vpx_codec_iter_t iter = NULL; MSQueue frame; MSQueue mtofree_queue; Vp8RtpFmtFrameInfo frame_info; if (!s->ready){ ms_queue_flush(f->inputs[0]); return; } ms_filter_lock(f); ms_queue_init(&frame); ms_queue_init(&mtofree_queue); /* Unpack RTP payload format for VP8. */ vp8rtpfmt_unpacker_feed(&s->unpacker, f->inputs[0]); /* Decode unpacked VP8 frames. */ while (vp8rtpfmt_unpacker_get_frame(&s->unpacker, &frame, &frame_info) == 0) { while ((im = ms_queue_get(&frame)) != NULL) { err = vpx_codec_decode(&s->codec, im->b_rptr, (unsigned int)(im->b_wptr - im->b_rptr), NULL, 0); if ((s->flags & VPX_CODEC_USE_INPUT_FRAGMENTS) && mblk_get_marker_info(im)) { err = vpx_codec_decode(&s->codec, NULL, 0, NULL, 0); } if (err) { ms_warning("vp8 decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)?vpx_codec_error_detail(&s->codec):"no details"); } ms_queue_put(&mtofree_queue, im); } /* Get decoded frame */ if ((img = vpx_codec_get_frame(&s->codec, &iter))) { int i, j; int reference_updates = 0; if (vpx_codec_control(&s->codec, VP8D_GET_LAST_REF_UPDATES, &reference_updates) == 0) { if (frame_info.pictureid_present && ((reference_updates & VP8_GOLD_FRAME) || (reference_updates & VP8_ALTR_FRAME))) { vp8rtpfmt_send_rpsi(&s->unpacker, frame_info.pictureid); } } if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); ms_message("MSVp8Dec: video is %ix%i", img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; ms_filter_notify_no_arg(f, MS_FILTER_OUTPUT_FMT_CHANGED); } /* scale/copy frame to destination mblk_t */ for (i = 0; i < 3; i++) { uint8_t *dest = s->outbuf.planes[i]; uint8_t *src = img->planes[i]; int h = img->d_h >> ((i > 0) ? 1 : 0); for (j = 0; j < h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); ms_average_fps_update(&s->fps, (uint32_t)f->ticker->time); if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } while ((im = ms_queue_get(&mtofree_queue)) != NULL) { freemsg(im); } }
void MSOpenH264Decoder::feed() { if (!isInitialized()) { ms_error("MSOpenH264Decoder::feed(): not initialized"); ms_queue_flush(mFilter->inputs[0]); return; } MSQueue nalus; ms_queue_init(&nalus); mblk_t *im; while ((im = ms_queue_get(mFilter->inputs[0])) != NULL) { if ((getIDRPicId() == 0) && (mSPS != 0) && (mPPS != 0)) { // Push the sps/pps given in sprop-parameter-sets if any mblk_set_timestamp_info(mSPS, mblk_get_timestamp_info(im)); mblk_set_timestamp_info(mPPS, mblk_get_timestamp_info(im)); rfc3984_unpack(mUnpacker, mSPS, &nalus); rfc3984_unpack(mUnpacker, mPPS, &nalus); mSPS = 0; mPPS = 0; } rfc3984_unpack(mUnpacker, im, &nalus); if (!ms_queue_empty(&nalus)) { void * pData[3] = { 0 }; SBufferInfo sDstBufInfo = { 0 }; int len = nalusToFrame(&nalus); DECODING_STATE state = mDecoder->DecodeFrame2(mBitstream, len, (uint8_t**)pData, &sDstBufInfo); if (state != dsErrorFree) { ms_error("OpenH264 decoder: DecodeFrame2 failed: 0x%x", state); if (((mFilter->ticker->time - mLastErrorReportTime) > 5000) || (mLastErrorReportTime == 0)) { mLastErrorReportTime = mFilter->ticker->time; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_DECODING_ERRORS); } } if (sDstBufInfo.iBufferStatus == 1) { uint8_t * pDst[3] = { 0 }; pDst[0] = (uint8_t *)pData[0]; pDst[1] = (uint8_t *)pData[1]; pDst[2] = (uint8_t *)pData[2]; // Update video size and (re)allocate YUV buffer if needed if ((mWidth != sDstBufInfo.UsrData.sSystemBuffer.iWidth) || (mHeight != sDstBufInfo.UsrData.sSystemBuffer.iHeight)) { if (mYUVMsg) { freemsg(mYUVMsg); } mWidth = sDstBufInfo.UsrData.sSystemBuffer.iWidth; mHeight = sDstBufInfo.UsrData.sSystemBuffer.iHeight; mYUVMsg = ms_yuv_buf_alloc(&mOutbuf, mWidth, mHeight); ms_filter_notify_no_arg(mFilter,MS_FILTER_OUTPUT_FMT_CHANGED); } // Scale/copy frame to destination mblk_t for (int i = 0; i < 3; i++) { uint8_t *dst = mOutbuf.planes[i]; uint8_t *src = pDst[i]; int h = mHeight >> (( i > 0) ? 1 : 0); for(int j = 0; j < h; j++) { memcpy(dst, src, mOutbuf.strides[i]); dst += mOutbuf.strides[i]; src += sDstBufInfo.UsrData.sSystemBuffer.iStride[(i == 0) ? 0 : 1]; } } ms_queue_put(mFilter->outputs[0], dupmsg(mYUVMsg)); // Update average FPS if (ms_average_fps_update(&mFPS, mFilter->ticker->time)) { ms_message("OpenH264 decoder: Frame size: %dx%d", mWidth, mHeight); } // Notify first decoded image if (!mFirstImageDecoded) { mFirstImageDecoded = true; ms_filter_notify_no_arg(mFilter, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } #if MSOPENH264_DEBUG ms_message("OpenH264 decoder: IDR pic id: %d, Frame num: %d, Temporal id: %d, VCL NAL: %d", getIDRPicId(), getFrameNum(), getTemporalId(), getVCLNal()); #endif } }
static void h264_dec_process(MSFilter *f) { VTH264DecCtx *ctx = (VTH264DecCtx *)f->data; mblk_t *pkt; mblk_t *nalu; mblk_t *pixbuf; MSQueue q_nalus; MSQueue q_nalus2; CMBlockBufferRef stream = NULL; CMSampleBufferRef sample = NULL; CMSampleTimingInfo timing_info; MSPicture pixbuf_desc; OSStatus status; MSList *parameter_sets = NULL; bool_t unpacking_failed; ms_queue_init(&q_nalus); ms_queue_init(&q_nalus2); // unpack RTP packet unpacking_failed = FALSE; while((pkt = ms_queue_get(f->inputs[0]))) { unpacking_failed |= (rfc3984_unpack(&ctx->unpacker, pkt, &q_nalus) != 0); } if(unpacking_failed) { ms_error("VideoToolboxDecoder: error while unpacking RTP packets"); goto fail; } // Pull out SPSs and PPSs and put them into the filter context if necessary while((nalu = ms_queue_get(&q_nalus))) { MSH264NaluType nalu_type = ms_h264_nalu_get_type(nalu); if(nalu_type == MSH264NaluTypeSPS || nalu_type == MSH264NaluTypePPS) { parameter_sets = ms_list_append(parameter_sets, nalu); } else if(ctx->format_desc || parameter_sets) { ms_queue_put(&q_nalus2, nalu); } else { ms_free(nalu); } } if(parameter_sets) { CMFormatDescriptionRef last_format = ctx->format_desc ? CFRetain(ctx->format_desc) : NULL; h264_dec_update_format_description(ctx, parameter_sets); parameter_sets = ms_list_free_with_data(parameter_sets, (void (*)(void *))freemsg); if(ctx->format_desc == NULL) goto fail; if(last_format) { CMVideoDimensions last_vsize = CMVideoFormatDescriptionGetDimensions(last_format); CMVideoDimensions vsize = CMVideoFormatDescriptionGetDimensions(ctx->format_desc); if(last_vsize.width != vsize.width || last_vsize.height != vsize.height) { ms_message("VideoToolboxDecoder: new encoded video size %dx%d -> %dx%d", (int)last_vsize.width, (int)last_vsize.height, (int)vsize.width, (int)vsize.height); ms_message("VideoToolboxDecoder: destroying decoding session"); VTDecompressionSessionInvalidate(ctx->session); CFRelease(ctx->session); ctx->session = NULL; } CFRelease(last_format); } } /* Stops proccessing if no IDR has been received yet */ if(ctx->format_desc == NULL) { ms_warning("VideoToolboxDecoder: no IDR packet has been received yet"); goto fail; } /* Initializes the decoder if it has not be done yet or reconfigure it when the size of the encoded video change */ if(ctx->session == NULL) { if(!h264_dec_init_decoder(ctx)) { ms_error("VideoToolboxDecoder: failed to initialized decoder"); goto fail; } } // Pack all nalus in a VTBlockBuffer CMBlockBufferCreateEmpty(NULL, 0, kCMBlockBufferAssureMemoryNowFlag, &stream); while((nalu = ms_queue_get(&q_nalus2))) { CMBlockBufferRef nalu_block; size_t nalu_block_size = msgdsize(nalu) + H264_NALU_HEAD_SIZE; uint32_t nalu_size = htonl(msgdsize(nalu)); CMBlockBufferCreateWithMemoryBlock(NULL, NULL, nalu_block_size, NULL, NULL, 0, nalu_block_size, kCMBlockBufferAssureMemoryNowFlag, &nalu_block); CMBlockBufferReplaceDataBytes(&nalu_size, nalu_block, 0, H264_NALU_HEAD_SIZE); CMBlockBufferReplaceDataBytes(nalu->b_rptr, nalu_block, H264_NALU_HEAD_SIZE, msgdsize(nalu)); CMBlockBufferAppendBufferReference(stream, nalu_block, 0, nalu_block_size, 0); CFRelease(nalu_block); freemsg(nalu); } if(!CMBlockBufferIsEmpty(stream)) { timing_info.duration = kCMTimeInvalid; timing_info.presentationTimeStamp = CMTimeMake(f->ticker->time, 1000); timing_info.decodeTimeStamp = CMTimeMake(f->ticker->time, 1000); CMSampleBufferCreate( NULL, stream, TRUE, NULL, NULL, ctx->format_desc, 1, 1, &timing_info, 0, NULL, &sample); status = VTDecompressionSessionDecodeFrame(ctx->session, sample, 0, NULL, NULL); CFRelease(sample); if(status != noErr) { CFRelease(stream); ms_error("VideoToolboxDecoder: error while passing encoded frames to the decoder: %d", status); if(status == kVTInvalidSessionErr) { h264_dec_uninit_decoder(ctx); } goto fail; } } CFRelease(stream); goto put_frames_out; fail: ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_DECODING_ERRORS); ms_filter_lock(f); if(ctx->enable_avpf) { ms_message("VideoToolboxDecoder: sending PLI"); ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI); } ms_filter_unlock(f); put_frames_out: // Transfer decoded frames in the output queue ms_mutex_lock(&ctx->mutex); while((pixbuf = ms_queue_get(&ctx->queue))) { ms_mutex_unlock(&ctx->mutex); ms_yuv_buf_init_from_mblk(&pixbuf_desc, pixbuf); ms_filter_lock(f); if(pixbuf_desc.w != ctx->vsize.width || pixbuf_desc.h != ctx->vsize.height) { ctx->vsize = (MSVideoSize){ pixbuf_desc.w , pixbuf_desc.h }; } ms_average_fps_update(&ctx->fps, (uint32_t)f->ticker->time); if(ctx->first_image) { ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); ctx->first_image = FALSE; } ms_filter_unlock(f); ms_queue_put(f->outputs[0], pixbuf); ms_mutex_lock(&ctx->mutex); } ms_mutex_unlock(&ctx->mutex); // Cleaning ms_queue_flush(&q_nalus); ms_queue_flush(&q_nalus2); ms_queue_flush(f->inputs[0]); return; }