static rpi_pixmap_decoder_t * pixmap_decoder_create(int cfmt) { rpi_pixmap_decoder_t *rpd = calloc(1, sizeof(rpi_pixmap_decoder_t)); hts_mutex_init(&rpd->rpd_mtx); hts_cond_init(&rpd->rpd_cond, &rpd->rpd_mtx); rpd->rpd_decoder = omx_component_create("OMX.broadcom.image_decode", &rpd->rpd_mtx, &rpd->rpd_cond); rpd->rpd_decoder->oc_port_settings_changed_cb = decoder_port_settings_changed; rpd->rpd_decoder->oc_opaque = rpd; rpd->rpd_resizer = omx_component_create("OMX.broadcom.resize", &rpd->rpd_mtx, &rpd->rpd_cond); omx_set_state(rpd->rpd_decoder, OMX_StateIdle); OMX_IMAGE_PARAM_PORTFORMATTYPE fmt; OMX_INIT_STRUCTURE(fmt); fmt.nPortIndex = rpd->rpd_decoder->oc_inport; fmt.eCompressionFormat = cfmt; omxchk(OMX_SetParameter(rpd->rpd_decoder->oc_handle, OMX_IndexParamImagePortFormat, &fmt)); #ifndef NOCOPY omx_alloc_buffers(rpd->rpd_decoder, rpd->rpd_decoder->oc_inport); omx_set_state(rpd->rpd_decoder, OMX_StateExecuting); #endif return rpd; }
static int rpi_codec_create(media_codec_t *mc, const media_codec_params_t *mcp, media_pipe_t *mp) { int fmt; switch(mc->codec_id) { case CODEC_ID_H264: fmt = OMX_VIDEO_CodingAVC; break; case CODEC_ID_MPEG2VIDEO: if(!omx_enable_mpg2) return 1; fmt = OMX_VIDEO_CodingMPEG2; break; #if 0 case CODEC_ID_VC1: case CODEC_ID_WMV3: if(mcp->extradata_size == 0) return 1; mc->decode = vc1_pt_decode; return 0; #endif default: return 1; } rpi_video_codec_t *rvc = calloc(1, sizeof(rpi_video_codec_t)); hts_cond_init(&rvc->rvc_avail_cond, &mp->mp_mutex); omx_component_t *d = omx_component_create("OMX.broadcom.video_decode", &mp->mp_mutex, &rvc->rvc_avail_cond); if(d == NULL) { hts_cond_destroy(&rvc->rvc_avail_cond); free(rvc); return 1; } rvc->rvc_decoder = d; omx_set_state(d, OMX_StateIdle); OMX_VIDEO_PARAM_PORTFORMATTYPE format; OMX_INIT_STRUCTURE(format); format.nPortIndex = 130; format.eCompressionFormat = fmt; omxchk(OMX_SetParameter(d->oc_handle, OMX_IndexParamVideoPortFormat, &format)); OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE ec; OMX_INIT_STRUCTURE(ec); ec.bStartWithValidFrame = OMX_FALSE; omxchk(OMX_SetParameter(d->oc_handle, OMX_IndexParamBrcmVideoDecodeErrorConcealment, &ec)); OMX_CONFIG_BOOLEANTYPE bt; OMX_INIT_STRUCTURE(bt); bt.bEnabled = 1; omxchk(OMX_SetConfig(d->oc_handle, OMX_IndexParamBrcmInterpolateMissingTimestamps, &bt)); omx_alloc_buffers(d, 130); omx_set_state(d, OMX_StateExecuting); if(mcp->extradata_size) { hts_mutex_lock(&mp->mp_mutex); OMX_BUFFERHEADERTYPE *buf = omx_get_buffer_locked(rvc->rvc_decoder); hts_mutex_unlock(&mp->mp_mutex); buf->nOffset = 0; buf->nFilledLen = mcp->extradata_size; memcpy(buf->pBuffer, mcp->extradata, buf->nFilledLen); buf->nFlags = OMX_BUFFERFLAG_CODECCONFIG; omxchk(OMX_EmptyThisBuffer(rvc->rvc_decoder->oc_handle, buf)); } mc->opaque = rvc; mc->close = rpi_codec_close; mc->decode = rpi_codec_decode; mc->flush = rpi_codec_flush; return 0; }
OMX_ERRORTYPE omx_setup_pipeline(struct omx_pipeline_t* pipe, OMX_VIDEO_CODINGTYPE video_codec, char* audio_dest, int is_hd) { OMX_VIDEO_PARAM_PORTFORMATTYPE format; OMX_TIME_CONFIG_CLOCKSTATETYPE cstate; OMX_CONFIG_BOOLEANTYPE configBoolTrue; OMX_INIT_STRUCTURE(configBoolTrue); configBoolTrue.bEnabled = OMX_TRUE; pipe->do_deinterlace = 0; if (((is_hd == 0) && (global_settings.deinterlace_sd)) || ((is_hd == 1) && (global_settings.deinterlace_hd))) { DEBUGF("Enabling de-interlace\n"); pipe->do_deinterlace = 1; } omx_init_component(pipe, &pipe->video_decode, "OMX.broadcom.video_decode"); omx_init_component(pipe, &pipe->video_render, "OMX.broadcom.video_render"); if (pipe->do_deinterlace) { DEBUGF("Enabling de-interlacer\n"); /* De-interlacer. Input port 190, Output port 191. Insert between decoder and scheduler */ omx_init_component(pipe, &pipe->image_fx, "OMX.broadcom.image_fx"); /* Configure image_fx */ omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateIdle, NULL); OMX_CONFIG_IMAGEFILTERPARAMSTYPE imagefilter; OMX_INIT_STRUCTURE(imagefilter); imagefilter.nPortIndex=191; imagefilter.nNumParams=1; imagefilter.nParams[0]=3; //??? imagefilter.eImageFilter=OMX_ImageFilterDeInterlaceAdvanced; OERR(OMX_SetConfig(pipe->image_fx.h, OMX_IndexConfigCommonImageFilterParameters, &imagefilter)); } else { memset(&pipe->image_fx,0,sizeof(struct omx_component_t)); } omx_init_component(pipe, &pipe->clock, "OMX.broadcom.clock"); OMX_INIT_STRUCTURE(cstate); cstate.eState = OMX_TIME_ClockStateWaitingForStartTime; cstate.nWaitMask = OMX_CLOCKPORT0|OMX_CLOCKPORT1; OERR(OMX_SetParameter(pipe->clock.h, OMX_IndexConfigTimeClockState, &cstate)); OMX_TIME_CONFIG_ACTIVEREFCLOCKTYPE refClock; OMX_INIT_STRUCTURE(refClock); refClock.eClock = OMX_TIME_RefClockAudio; OERR(OMX_SetConfig(pipe->clock.h, OMX_IndexConfigTimeActiveRefClock, &refClock)); omx_init_component(pipe, &pipe->video_scheduler, "OMX.broadcom.video_scheduler"); /* Initialise audio output - hardcoded to 48000/Stereo/16-bit */ omx_init_component(pipe, &pipe->audio_render, "OMX.broadcom.audio_render"); OMX_PARAM_PORTDEFINITIONTYPE param; OMX_INIT_STRUCTURE(param); param.nPortIndex = 100; OERR(OMX_GetParameter(pipe->audio_render.h, OMX_IndexParamPortDefinition, ¶m)); param.nBufferSize = 8192; /* Needs to be big enough for one frame of data */ param.nBufferCountActual = 32; /* Arbitrary */ OERR(OMX_SetParameter(pipe->audio_render.h, OMX_IndexParamPortDefinition, ¶m)); omx_config_pcm(&pipe->audio_render, 48000, 2, 16, audio_dest); OERR(OMX_SetConfig(pipe->audio_render.h, OMX_IndexConfigBrcmClockReferenceSource, &configBoolTrue)); omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateIdle, NULL); omx_send_command_and_wait0(&pipe->audio_render, OMX_CommandPortEnable, 100, NULL); omx_alloc_buffers(&pipe->audio_render, 100); omx_send_command_and_wait1(&pipe->audio_render, OMX_CommandPortEnable, 100, NULL); /* Setup clock tunnels first */ omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateIdle, NULL); OERR(OMX_SetupTunnel(pipe->clock.h, 80, pipe->audio_render.h, 101)); OERR(OMX_SetupTunnel(pipe->clock.h, 81, pipe->video_scheduler.h, 12)); OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortEnable, 80, NULL)); OERR(OMX_SendCommand(pipe->video_scheduler.h, OMX_CommandPortEnable, 12, NULL)); OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortEnable, 81, NULL)); OERR(OMX_SendCommand(pipe->audio_render.h, OMX_CommandPortEnable, 101, NULL)); omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL); omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateExecuting, NULL); /* Configure video_decoder */ omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateIdle, NULL); /* Enable lazy image pool destroying */ OERR(OMX_SetConfig(pipe->video_decode.h, OMX_IndexParamBrcmLazyImagePoolDestroy, &configBoolTrue)); OMX_INIT_STRUCTURE(format); format.nPortIndex = 130; format.eCompressionFormat = video_codec; OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamVideoPortFormat, &format)); /* Enable error concealment for H264 only - without this, HD channels don't work reliably */ if (video_codec == OMX_VIDEO_CodingAVC) { OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE ec; OMX_INIT_STRUCTURE(ec); ec.bStartWithValidFrame = OMX_FALSE; OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamBrcmVideoDecodeErrorConcealment, &ec)); } /* Enable video decoder input port */ omx_send_command_and_wait0(&pipe->video_decode, OMX_CommandPortEnable, 130, NULL); /* Allocate input buffers */ omx_alloc_buffers(&pipe->video_decode, 130); /* Wait for input port to be enabled */ omx_send_command_and_wait1(&pipe->video_decode, OMX_CommandPortEnable, 130, NULL); /* Change video_decode to OMX_StateExecuting */ omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateExecuting, NULL); /* Change audio_render to OMX_StateExecuting */ omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateExecuting, NULL); /* Enable passing of buffer marks */ OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamPassBufferMarks, &configBoolTrue)); OERR(OMX_SetParameter(pipe->video_render.h, OMX_IndexParamPassBufferMarks, &configBoolTrue)); return OMX_ErrorNone; }
/* will be feeding stuff into the encoding pipeline */ void * decode_thread(void *context) { OMX_BUFFERHEADERTYPE *input_buffer; // buffer taken from the OMX decoder OMX_PARAM_PORTDEFINITIONTYPE encoder_config; OMX_PARAM_PORTDEFINITIONTYPE decoder_config; OMX_PARAM_PORTDEFINITIONTYPE deinterlacer_config; OMX_VIDEO_PARAM_PORTFORMATTYPE encoder_format_config; //used for the output of the encoder OMX_VIDEO_PARAM_BITRATETYPE encoder_bitrate_config; //used for the output of the encoder struct transcoder_ctx_t *ctx = (struct transcoder_ctx_t *) context; struct packet_t *current_packet; int bytes_left; uint8_t *p; // points to currently copied buffer // omx_setup_encoding_pipeline(&decoder_ctx->pipeline, OMX_VIDEO_CodingAVC); omx_setup_encoding_pipeline(&ctx->pipeline, OMX_VIDEO_CodingMPEG2); // main loop that will poll packets and render while (ctx->input_video_queue->queue_count != 0 || ctx->input_video_queue->queue_finished != 1) { //TODO a memory barrier is going to be needed so that we don't race current_packet = packet_queue_get_next_item(ctx->input_video_queue); p = current_packet->data; bytes_left = current_packet->data_length; while (bytes_left > 0) { fprintf(stderr, "OMX buffers: v: %02d/20, vcodec queue: %4d\r", omx_get_free_buffer_count(&ctx->pipeline.video_decode), ctx->input_video_queue->queue_count); input_buffer = omx_get_next_input_buffer(&ctx->pipeline.video_decode); // This will block if there are no empty buffers // copy at most the length of the OMX buf int copy_length = OMX_MIN(bytes_left, input_buffer->nAllocLen); memcpy(input_buffer->pBuffer, p, copy_length); p += copy_length; bytes_left -= copy_length; input_buffer->nFilledLen = copy_length; input_buffer->nTimeStamp = pts_to_omx(current_packet->PTS); if (ctx->first_packet) { input_buffer->nFlags = OMX_BUFFERFLAG_STARTTIME; ctx->first_packet = 0; } else { //taken from ilclient input_buffer->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN; } if (current_packet->flags & AV_PKT_FLAG_KEY) { input_buffer->nFlags |= OMX_BUFFERFLAG_SYNCFRAME; } if(bytes_left == 0) { input_buffer->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME; } //configure the resizer after the decoder has got output if (ctx->pipeline.video_decode.port_settings_changed == 1) { ctx->pipeline.video_decode.port_settings_changed = 0; #if 0 int x = 640; int y = 480; // rounding the dimensions up to the next multiple of 16. x += 0x0f; x &= ~0x0f; y += 0x0f; y &= ~0x0f; //the above code is used if resizer is used //get information about the decoded video OMX_INIT_STRUCTURE(decoder_config); decoder_config.nPortIndex = 131; OERR(OMX_GetParameter(ctx->pipeline.video_decode.h, OMX_IndexParamPortDefinition, &decoder_config)); decoder_config.nPortIndex = 190; OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &decoder_config)); decoder_config.nPortIndex = 191; OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &decoder_config)); #endif OERR(OMX_SetupTunnel(ctx->pipeline.video_decode.h, 131, ctx->pipeline.image_fx.h, 190)); omx_send_command_and_wait(&ctx->pipeline.video_decode, OMX_CommandPortEnable, 131, NULL); omx_send_command_and_wait(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 190, NULL); omx_send_command_and_wait(&ctx->pipeline.image_fx, OMX_CommandStateSet, OMX_StateExecuting, NULL); fprintf(stderr, "configuring deinterlacer done\n"); } if(ctx->pipeline.image_fx.port_settings_changed == 1) { OMX_ERRORTYPE error; ctx->pipeline.image_fx.port_settings_changed = 0; #if 0 //get info from deinterlacer output OMX_INIT_STRUCTURE(deinterlacer_config); deinterlacer_config.nPortIndex = 191; OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &deinterlacer_config)); //get default from encoder input OMX_INIT_STRUCTURE(encoder_config); encoder_config.nPortIndex = 200; OERR(OMX_GetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamPortDefinition, &encoder_config)); //modify it with deinterlacer encoder_config.format.video.nFrameHeight = deinterlacer_config.format.image.nFrameHeight; encoder_config.format.video.nFrameWidth = deinterlacer_config.format.image.nFrameWidth; encoder_config.format.video.eCompressionFormat = deinterlacer_config.format.image.eCompressionFormat; encoder_config.format.video.eColorFormat = deinterlacer_config.format.image.eColorFormat; encoder_config.format.video.nSliceHeight = deinterlacer_config.format.image.nSliceHeight; encoder_config.format.video.nStride = deinterlacer_config.format.image.nStride; //and feed it OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamPortDefinition, &encoder_config)); #endif //configure encoder output format OMX_INIT_STRUCTURE(encoder_format_config); encoder_format_config.nPortIndex = 201; //encoder output port encoder_format_config.eCompressionFormat = OMX_VIDEO_CodingAVC; OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamVideoPortFormat, &encoder_format_config)); //configure encoder output bitrate OMX_INIT_STRUCTURE(encoder_bitrate_config); encoder_bitrate_config.nPortIndex = 201; encoder_bitrate_config.eControlRate = OMX_Video_ControlRateVariable; //var bitrate encoder_bitrate_config.nTargetBitrate = ENCODED_BITRATE; //1 mbit OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamVideoBitrate, &encoder_bitrate_config)); //setup tunnel from decoder to encoder OERR(OMX_SetupTunnel(ctx->pipeline.image_fx.h, 191, ctx->pipeline.video_encode.h, 200)); //set encoder to idle after we have finished configuring it omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateIdle, NULL); //allocate buffers for output of the encoder //send the enable command, which won't complete until the bufs are alloc'ed omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 201, NULL); omx_alloc_buffers(&ctx->pipeline.video_encode, 201); //allocate output buffers //block until the port is fully enabled omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 201, NULL); omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateIdle, NULL); //enable the two ports omx_send_command_and_wait0(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 191, NULL); omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 200, NULL); omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 200, NULL); omx_send_command_and_wait1(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 191, NULL); omx_send_command_and_wait(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateExecuting, NULL); fprintf(stderr, "finished configuring encoder\n"); } if(ctx->pipeline.video_encode.port_settings_changed == 1) { fprintf(stderr, "encoder enabled\n"); ctx->pipeline.video_encode.port_settings_changed = 0; //signal the consumer thread it can start polling for data ctx->pipeline.video_encode.is_running = 1; pthread_cond_signal(&ctx->pipeline.video_encode.is_running_cv); } OERR(OMX_EmptyThisBuffer(ctx->pipeline.video_decode.h, input_buffer)); } packet_queue_free_packet(current_packet, 1); current_packet = NULL; } printf("Finishing stream \n"); /* Indicate end of video stream */ input_buffer = omx_get_next_input_buffer(&ctx->pipeline.video_decode); input_buffer->nFilledLen = 0; input_buffer->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN | OMX_BUFFERFLAG_EOS; OERR(OMX_EmptyThisBuffer(ctx->pipeline.video_decode.h, input_buffer)); // omx_teardown_pipeline(&decoder_ctx->pipeline); }