Exemplo n.º 1
0
/* Based on allocbufs from omxtx.
   Buffers are connected as a one-way linked list using pAppPrivate as the pointer to the next element */
void omx_alloc_buffers(struct omx_component_t *component, int port)
{
  int i;
  OMX_BUFFERHEADERTYPE *list = NULL, **end = &list;
  OMX_PARAM_PORTDEFINITIONTYPE portdef;

  OMX_INIT_STRUCTURE(portdef);
  portdef.nPortIndex = port;

  OERR(OMX_GetParameter(component->h, OMX_IndexParamPortDefinition, &portdef));

  if (component == &component->pipe->audio_render) {
    DEBUGF("Allocating %d buffers of %d bytes\n",(int)portdef.nBufferCountActual,(int)portdef.nBufferSize);
    DEBUGF("portdef.bEnabled=%d\n",portdef.bEnabled);
  }

  for (i = 0; i < portdef.nBufferCountActual; i++) {
    OMX_U8 *buf;

    buf = vcos_malloc_aligned(portdef.nBufferSize, portdef.nBufferAlignment, "buffer");

    //    printf("Allocated a buffer of %u bytes\n",(unsigned int)portdef.nBufferSize);

    OERR(OMX_UseBuffer(component->h, end, port, NULL, portdef.nBufferSize, buf));

    end = (OMX_BUFFERHEADERTYPE **) &((*end)->pAppPrivate);
  }

  component->buffers = list;
}
Exemplo n.º 2
0
static void omx_config_pcm(struct omx_component_t* audio_render, int samplerate, int channels, int bitdepth, char* dest)
{
  OMX_AUDIO_PARAM_PCMMODETYPE pcm;
  int32_t s;

  OMX_INIT_STRUCTURE(pcm);
  pcm.nPortIndex = 100;
  pcm.nChannels = channels;
  pcm.eNumData = OMX_NumericalDataSigned;
  pcm.eEndian = OMX_EndianLittle;
  pcm.nSamplingRate = samplerate;
  pcm.bInterleaved = OMX_TRUE;
  pcm.nBitPerSample = bitdepth;
  pcm.ePCMMode = OMX_AUDIO_PCMModeLinear;

  switch(channels) {
    case 1:
      pcm.eChannelMapping[0] = OMX_AUDIO_ChannelCF;
      break;
    case 8:
      pcm.eChannelMapping[0] = OMX_AUDIO_ChannelLF;
      pcm.eChannelMapping[1] = OMX_AUDIO_ChannelRF;
      pcm.eChannelMapping[2] = OMX_AUDIO_ChannelCF;
      pcm.eChannelMapping[3] = OMX_AUDIO_ChannelLFE;
      pcm.eChannelMapping[4] = OMX_AUDIO_ChannelLR;
      pcm.eChannelMapping[5] = OMX_AUDIO_ChannelRR;
      pcm.eChannelMapping[6] = OMX_AUDIO_ChannelLS;
      pcm.eChannelMapping[7] = OMX_AUDIO_ChannelRS;
     break;
    case 4:
      pcm.eChannelMapping[0] = OMX_AUDIO_ChannelLF;
      pcm.eChannelMapping[1] = OMX_AUDIO_ChannelRF;
      pcm.eChannelMapping[2] = OMX_AUDIO_ChannelLR;
      pcm.eChannelMapping[3] = OMX_AUDIO_ChannelRR;
     break;
    case 2:
      pcm.eChannelMapping[0] = OMX_AUDIO_ChannelLF;
      pcm.eChannelMapping[1] = OMX_AUDIO_ChannelRF;
     break;
  }

  OERR(OMX_SetParameter(audio_render->h, OMX_IndexParamAudioPcm, &pcm));

  OMX_CONFIG_BRCMAUDIODESTINATIONTYPE ar_dest;
  OMX_INIT_STRUCTURE(ar_dest);
  strcpy((char *)ar_dest.sName, dest);
  OERR(OMX_SetConfig(audio_render->h, OMX_IndexConfigBrcmAudioDestination, &ar_dest));
}
Exemplo n.º 3
0
OMX_ERRORTYPE omx_init_component(struct omx_pipeline_t* pipe, struct omx_component_t* component, char* compname)
{
  memset(component,0,sizeof(struct omx_component_t));

  pthread_mutex_init(&component->cmd_queue_mutex, NULL);
  pthread_cond_init(&component->cmd_queue_count_cv,NULL);
  component->buf_notempty = 1;
  pthread_cond_init(&component->buf_notempty_cv,NULL);
  pthread_cond_init(&component->eos_cv,NULL);
  pthread_mutex_init(&component->eos_mutex,NULL);

  component->callbacks.EventHandler = omx_event_handler;
  component->callbacks.EmptyBufferDone = omx_empty_buffer_done;
  component->callbacks.FillBufferDone = omx_fill_buffer_done;

  component->pipe = pipe;

  component->name = compname;

  /* Create OMX component */
  OERR(OMX_GetHandle(&component->h, compname, component, &component->callbacks));

  /* Disable all ports */
  omx_disable_all_ports(component);

}
Exemplo n.º 4
0
void omx_clock_set_speed(struct omx_component_t *clock, int v)
{
  OMX_TIME_CONFIG_SCALETYPE scale;
  OMX_INIT_STRUCTURE(scale);

  scale.xScale = v;
  OERR(OMX_SetConfig(clock->h, OMX_IndexConfigTimeScale, &scale));
}
int	options(int argc, char *const argv[], const char *opts)
{
	static int sp = 1;
	register int c;
	register char *cp;

	if(sp == 1) {
		if(optind >= argc ||
		   (argv[optind][0] != '-' && argv[optind][0] != '/') ||
                   argv[optind][1] == '\0')
			return(EOF);
		else if(strcmp(argv[optind], "--") == 0) {
			optind++;
			return(EOF);
		}
	}
	optopt = c = argv[optind][sp];
	if(c == ':' || (cp=strchr(opts, c)) == 0) {
		OERR(argv[0],"illegal option", c);
		if(argv[optind][++sp] == '\0') {
			optind++;
			sp = 1;
		}
		return('?');
	}
	if(*++cp == ':') {
		if(argv[optind][sp+1] != '\0')
			optarg = &argv[optind++][sp+1];
		else if(++optind >= argc) {
			OERR(argv[0],"option requires an argument", c);
			sp = 1;
			return('?');
		} else
			optarg = argv[optind++];
		sp = 1;
	} else {
		if(argv[optind][++sp] == '\0') {
			sp = 1;
			optind++;
		}
		optarg = 0;
	}
	return(c);
}
Exemplo n.º 6
0
static int is_port_enabled(OMX_HANDLETYPE handle, int port)
{
  OMX_PARAM_PORTDEFINITIONTYPE  portdef;

  OMX_INIT_STRUCTURE(portdef);
  portdef.nPortIndex = port;
  OERR(OMX_GetParameter(handle, OMX_IndexParamPortDefinition, &portdef));

  return (portdef.bEnabled == 0 ? 0 : 1);;
}
Exemplo n.º 7
0
static void omx_show_state(struct omx_component_t* component, int port1, int port2, int port3)
{
  OMX_STATETYPE state;
  OMX_PARAM_PORTDEFINITIONTYPE  portdef;

  OERR(OMX_GetState(component->h, &state));

  DEBUGF("%s is in state ",component->name);

  switch (state) {
    case OMX_StateInvalid: DEBUGF("OMX_StateInvalid\n"); break;
    case OMX_StateLoaded: DEBUGF("OMX_StateLoaded\n"); break;
    case OMX_StateIdle: DEBUGF("OMX_StateIdle\n"); break;
    case OMX_StateExecuting: DEBUGF("OMX_StateExecuting\n"); break;
    case OMX_StatePause: DEBUGF("OMX_StatePause\n"); break;
    case OMX_StateWaitForResources: DEBUGF("OMX_StateWaitForResources\n"); break;

    default:
      DEBUGF("0x%08x\n",(unsigned int)state);
  }

  OMX_INIT_STRUCTURE(portdef);

  if (port1) {
    portdef.nPortIndex = port1;
    OERR(OMX_GetParameter(component->h, OMX_IndexParamPortDefinition, &portdef));
    DEBUGF("Port %d is %s\n",port1, (portdef.bEnabled == 0 ? "disabled" : "enabled"));
  }

  if (port2) {
    portdef.nPortIndex = port2;
    OERR(OMX_GetParameter(component->h, OMX_IndexParamPortDefinition, &portdef));
    DEBUGF("Port %d is %s\n",port2, (portdef.bEnabled == 0 ? "disabled" : "enabled"));
  }

  if (port3) {
    portdef.nPortIndex = port3;
    OERR(OMX_GetParameter(component->h, OMX_IndexParamPortDefinition, &portdef));
    DEBUGF("Port %d is %s\n",port3, (portdef.bEnabled == 0 ? "disabled" : "enabled"));
  }
}
Exemplo n.º 8
0
void omx_free_buffers(struct omx_component_t *component, int port)
{
  OMX_BUFFERHEADERTYPE *buf, *prev;
  int i=0;

  buf = component->buffers;
  while (buf) {
    prev = buf->pAppPrivate;
    OERR(OMX_FreeBuffer(component->h, port, buf)); /* This also calls free() */
    buf = prev;
  }
}
Exemplo n.º 9
0
void omx_set_display_region(struct omx_pipeline_t* pipe, int x, int y, int width, int height)
{
  OMX_CONFIG_DISPLAYREGIONTYPE region;

  OMX_INIT_STRUCTURE(region);
  region.nPortIndex = 90; /* Video render input port */

  region.set = OMX_DISPLAY_SET_DEST_RECT | OMX_DISPLAY_SET_FULLSCREEN | OMX_DISPLAY_SET_NOASPECT;

  region.fullscreen = OMX_FALSE;
  region.noaspect = OMX_TRUE;

  region.dest_rect.x_offset = x;
  region.dest_rect.y_offset = y;
  region.dest_rect.width = width;
  region.dest_rect.height = height;

  DEBUGF("Setting display region\n");
  OERR(OMX_SetParameter(pipe->video_render.h, OMX_IndexConfigDisplayRegion, &region));
}
Exemplo n.º 10
0
static void configure(struct context *ctx)
{
	pthread_t	fpst;
	pthread_attr_t	fpsa;
	OMX_CONFIG_FRAMERATETYPE	*framerate;
	OMX_VIDEO_PARAM_PROFILELEVELTYPE *level;
	OMX_VIDEO_PARAM_BITRATETYPE	*bitrate;
	OMX_BUFFERHEADERTYPE		*encbufs;
	OMX_PARAM_PORTDEFINITIONTYPE	*portdef, *portimgdef;
	OMX_VIDEO_PORTDEFINITIONTYPE	*viddef;
	OMX_VIDEO_PARAM_PORTFORMATTYPE	*pfmt;
	OMX_CONFIG_POINTTYPE		*pixaspect;
	int encportidx, decportidx, resizeportidx;
	OMX_HANDLETYPE	m2, m4, resize;

	encportidx = ctx->encportidx;
	decportidx = ctx->decportidx;
	resizeportidx = ctx->resizeportidx;
	m2 = ctx->m2;
	m4 = ctx->m4;
	resize = ctx->resize;

	MAKEME(portdef, OMX_PARAM_PORTDEFINITIONTYPE);
	MAKEME(portimgdef, OMX_PARAM_PORTDEFINITIONTYPE);
	viddef = &portdef->format.video;
	MAKEME(pixaspect, OMX_CONFIG_POINTTYPE);

	printf("Decoder has changed settings.  Setting up resizer.\n");

/*	We need some parameters from de decoder output to put in the resizer:
	eColorFormat (= YUV42-PackedPlanar)
	Width of the frame
	Height of the frame
*/
	portdef->nPortIndex = decportidx+1;
	OERR(OMX_GetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx->verbose);
	portimgdef->nPortIndex = resizeportidx;

	OERR(OMX_GetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);

	portimgdef->format.image.eColorFormat = portdef->format.video.eColorFormat;
	portimgdef->format.image.nFrameWidth = portdef->format.video.nFrameWidth;
	portimgdef->format.image.nFrameHeight = portdef->format.video.nFrameHeight;
	portimgdef->format.image.nStride = 0;
	portimgdef->format.image.nSliceHeight = 0;
	OERR(OMX_SetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);

//	The actual resizing if set at call
	if(ctx->width) {
	 	portimgdef->format.image.nFrameWidth = ctx->width;
		portimgdef->format.image.nFrameHeight = ctx->height;
	}
	portimgdef->format.image.nStride = 0;
	portimgdef->format.image.nSliceHeight = 0;
	portimgdef->nPortIndex = resizeportidx+1;
	OERR(OMX_SetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);
	free (portimgdef);

/*	Now set the input parameters for the encoder to the scaled height/width */
	portdef->format.video.nFrameWidth = portimgdef->format.image.nFrameWidth;
	portdef->format.video.nFrameHeight = portimgdef->format.image.nFrameHeight;
	portdef->format.video.nStride = 0;
	portdef->format.video.nSliceHeight = 0;
	portdef->nPortIndex = encportidx;
	OERR(OMX_SetParameter(m4, OMX_IndexParamPortDefinition, portdef), ctx->verbose);

/*	setup tunnels */
	OERR(OMX_SetupTunnel(m2, decportidx+1, resize, resizeportidx), ctx->verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx+1, m4, encportidx), ctx->verbose);

//	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);

	viddef = &portdef->format.video;
	if (viddef->nBitrate != 0) {
		viddef->nBitrate *= 3;
		viddef->nBitrate /= 4;
	} else {
		viddef->nBitrate = (1*1024*1024/2);
	}
//		viddef->nBitrate = (2*1024*1024);
//	viddef->nFrameWidth  /= 2;
//	viddef->nFrameHeight /= 2;

	viddef->eCompressionFormat = OMX_VIDEO_CodingAVC;
	viddef->nStride = viddef->nSliceHeight = viddef->eColorFormat = 0;
	portdef->nPortIndex = encportidx+1;
	OERR(OMX_SetParameter(m4, OMX_IndexParamPortDefinition, portdef), ctx->verbose);
	free(portdef);

	MAKEME(bitrate, OMX_VIDEO_PARAM_BITRATETYPE);
	bitrate->nPortIndex = encportidx+1;
	bitrate->eControlRate = OMX_Video_ControlRateVariable;
	bitrate->nTargetBitrate = viddef->nBitrate;
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoBitrate, bitrate), ctx->verbose);
	free(bitrate);

	MAKEME(pfmt, OMX_VIDEO_PARAM_PORTFORMATTYPE);
	pfmt->nPortIndex = encportidx+1;
	pfmt->nIndex = 0;
	pfmt->eCompressionFormat = OMX_VIDEO_CodingAVC;
	pfmt->eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
	pfmt->xFramerate = viddef->xFramerate;

	pixaspect->nPortIndex = encportidx+1;
	pixaspect->nX = 118;
	pixaspect->nY = 81;
	OERR(OMX_SetConfig(m4, OMX_IndexParamBrcmPixelAspectRatio, pixaspect), ctx->verbose);
	free(pixaspect);

//		DUMPPORT(m4, encportidx+1); exit(0);
	pfmt->nPortIndex = encportidx+1;
	pfmt->nIndex = 1;
	pfmt->eCompressionFormat = OMX_VIDEO_CodingAVC;
	pfmt->eColorFormat = 0;
	pfmt->xFramerate = 0; //viddef->xFramerate;
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoPortFormat, pfmt), ctx->verbose);
	free(pfmt);
	
	MAKEME(framerate, OMX_CONFIG_FRAMERATETYPE);
	framerate->nPortIndex = encportidx+1;
	framerate->xEncodeFramerate = viddef->xFramerate;
	OERR(OMX_SetParameter(m4, OMX_IndexConfigVideoFramerate, framerate), ctx->verbose);
	free(framerate);

#if 0 /* Doesn't seem to apply to video? */
printf("Interlacing: %d\n", ic->streams[vidindex]->codec->field_order);
	if (0 || ic->streams[vidindex]->codec->field_order == AV_FIELD_TT) {
		interlace->nPortIndex = encportidx+1;
		interlace->eMode = OMX_InterlaceFieldsInterleavedUpperFirst;
		interlace->bRepeatFirstField = 0;
		OERR(OMX_SetParameter(m4, OMX_IndexConfigCommonInterlace,
			interlace), ctx->verbose);
	}
#endif

	MAKEME(level, OMX_VIDEO_PARAM_PROFILELEVELTYPE);
	level->nPortIndex = encportidx+1;
	OERR(OMX_GetParameter(m4, OMX_IndexParamVideoProfileLevelCurrent, level), ctx->verbose);
	if (ctx->verbose) printf("Current level:\t\t%d\nCurrent profile:\t%d\n",
		level->eLevel, level->eProfile);
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoProfileLevelCurrent, level), ctx->verbose);
	free(level);
	ctx->encbufs = encbufs = allocbufs(m4, encportidx+1, 1);
	OERR(OMX_SendCommand(m2, OMX_CommandPortEnable, decportidx+1, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortEnable, resizeportidx, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortEnable, resizeportidx+1, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortEnable, encportidx, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx->verbose);
	sleep(1);
	OERR(OMX_FillThisBuffer(m4, encbufs), ctx->verbose);

/* Dump current port states: */
	if (ctx->verbose) {
		dumpport(m2, decportidx);
		dumpport(m2, decportidx+1);
		dumpport(resize, resizeportidx);
		dumpport(resize, resizeportidx+1);
		dumpport(m4, encportidx);
		dumpport(m4, encportidx+1);
	}

	if (ctx->verbose) atexit(dumpportstate);
	pthread_attr_init(&fpsa);
	pthread_attr_setdetachstate(&fpsa, PTHREAD_CREATE_DETACHED);
	pthread_create(&fpst, &fpsa, fps, NULL);
}
Exemplo n.º 11
0
/* Print some useful information about the state of the port: */
static void dumpport(OMX_HANDLETYPE handle, int port)
{
  OMX_VIDEO_PORTDEFINITIONTYPE  *viddef;
  OMX_PARAM_PORTDEFINITIONTYPE  portdef;

  OMX_INIT_STRUCTURE(portdef);
  portdef.nPortIndex = port;
  OERR(OMX_GetParameter(handle, OMX_IndexParamPortDefinition, &portdef));

  printf("Port %d is %s, %s\n", portdef.nPortIndex,
    (portdef.eDir == 0 ? "input" : "output"),
    (portdef.bEnabled == 0 ? "disabled" : "enabled"));
  printf("Wants %d bufs, needs %d, size %d, enabled: %d, pop: %d, "
    "aligned %d\n", portdef.nBufferCountActual,
    portdef.nBufferCountMin, portdef.nBufferSize,
    portdef.bEnabled, portdef.bPopulated,
    portdef.nBufferAlignment);
  viddef = &portdef.format.video;

  switch (portdef.eDomain) {
  case OMX_PortDomainVideo:
    printf("Video type is currently:\n"
      "\tMIME:\t\t%s\n"
      "\tNative:\t\t%p\n"
      "\tWidth:\t\t%d\n"
      "\tHeight:\t\t%d\n"
      "\tStride:\t\t%d\n"
      "\tSliceHeight:\t%d\n"
      "\tBitrate:\t%d\n"
      "\tFramerate:\t%d (%x); (%f)\n"
      "\tError hiding:\t%d\n"
      "\tCodec:\t\t%d\n"
      "\tColour:\t\t%d\n",
      viddef->cMIMEType, viddef->pNativeRender,
      viddef->nFrameWidth, viddef->nFrameHeight,
      viddef->nStride, viddef->nSliceHeight,
      viddef->nBitrate,
      viddef->xFramerate, viddef->xFramerate,
      ((float)viddef->xFramerate/(float)65536),
      viddef->bFlagErrorConcealment,
      viddef->eCompressionFormat, viddef->eColorFormat);
    break;
  case OMX_PortDomainImage:
    printf("Image type is currently:\n"
      "\tMIME:\t\t%s\n"
      "\tNative:\t\t%p\n"
      "\tWidth:\t\t%d\n"
      "\tHeight:\t\t%d\n"
      "\tStride:\t\t%d\n"
      "\tSliceHeight:\t%d\n"
      "\tError hiding:\t%d\n"
      "\tCodec:\t\t%d\n"
      "\tColour:\t\t%d\n",
      portdef.format.image.cMIMEType,
      portdef.format.image.pNativeRender,
      portdef.format.image.nFrameWidth,
      portdef.format.image.nFrameHeight,
      portdef.format.image.nStride,
      portdef.format.image.nSliceHeight,
      portdef.format.image.bFlagErrorConcealment,
      portdef.format.image.eCompressionFormat,
      portdef.format.image.eColorFormat);     
    break;
/* Feel free to add others. */
  default:
    break;
  }
}
Exemplo n.º 12
0
void omx_teardown_pipeline(struct omx_pipeline_t* pipe)
{
   OMX_BUFFERHEADERTYPE *buf;
   int i=1;

   DEBUGF("[vcodec] omx_teardown pipeline:\n");
   DEBUGF("pipe->video_decode.port_settings_changed = %d\n",pipe->video_decode.port_settings_changed);
   DEBUGF("pipe->image_fx.port_settings_changed = %d\n",pipe->image_fx.port_settings_changed);
   DEBUGF("pipe->video_scheduler.port_settings_changed = %d\n",pipe->video_scheduler.port_settings_changed);
   //dumpport(pipe->video_decode.h,130);

#if 0
   /* Indicate end of video stream */
   buf = get_next_buffer(&pipe->video_decode);

   buf->nFilledLen = 0;
   buf->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN | OMX_BUFFERFLAG_EOS;
   
   OERR(OMX_EmptyThisBuffer(pipe->video_decode.h, buf));

   /* NOTE: Three events are sent after the previous command:

      [EVENT] Got an event of type 4 on video_decode 0x426a10 (d1: 83, d2 1)
      [EVENT] Got an event of type 4 on video_scheduler 0x430d10 (d1: b, d2 1)
      [EVENT] Got an event of type 4 on video_render 0x430b30 (d1: 5a, d2 1)  5a = port (90) 1 = OMX_BUFFERFLAG_EOS
   */

#endif

#if 0
   DEBUGF("[vcodec] omx_teardown pipeline 2\n");
   /* Wait for video_decode to shutdown */
   pthread_mutex_lock(&pipe->video_decode.eos_mutex);
   while (!pipe->video_decode.eos)
     pthread_cond_wait(&pipe->video_decode.eos_cv,&pipe->video_decode.eos_mutex);
   pthread_mutex_unlock(&pipe->video_decode.eos_mutex);
#endif
         
   DEBUGF("[vcodec] omx_teardown pipeline 1\n");

   /* Transition all components to Idle, if they have been initialised */
   omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateIdle, NULL); 
   omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateIdle, NULL);
   DEBUGF("pipe->do_deinterlace=%d, pipe->image_fx=%d\n",pipe->do_deinterlace,(int)pipe->image_fx.h);
   if (pipe->video_decode.port_settings_changed == 2) {
      if (pipe->do_deinterlace) { 
        omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateIdle, NULL); 
      } else {
        omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL); 
      }
   }
   if (pipe->image_fx.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL);
   }
   if (pipe->video_scheduler.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateIdle, NULL);
   }
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateIdle, NULL);

#if 0
   DEBUGF("[vcodec] omx_teardown pipeline 2\n");
   /* Wait for video_render to shutdown */
   pthread_mutex_lock(&pipe->video_render.eos_mutex);
   while (!pipe->video_render.eos)
     pthread_cond_wait(&pipe->video_render.eos_cv,&pipe->video_render.eos_mutex);
   pthread_mutex_unlock(&pipe->video_render.eos_mutex);
#endif

/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/

   /* Flush entrances to pipeline */
   omx_send_command_and_wait(&pipe->video_decode,OMX_CommandFlush,130,NULL);
   omx_send_command_and_wait(&pipe->audio_render,OMX_CommandFlush,100,NULL);

   /* Flush all tunnels */
   DEBUGF("[vcodec] omx_teardown pipeline 3\n");
   if (pipe->do_deinterlace) {
     omx_flush_tunnel(&pipe->video_decode, 131, &pipe->image_fx, 190);
     omx_flush_tunnel(&pipe->image_fx, 191, &pipe->video_scheduler, 10);
   } else {
     omx_flush_tunnel(&pipe->video_decode, 131, &pipe->video_scheduler, 10);
   }
   DEBUGF("[vcodec] omx_teardown pipeline 4\n");
   omx_flush_tunnel(&pipe->video_scheduler, 11, &pipe->video_render, 90);
   omx_flush_tunnel(&pipe->clock, 81, &pipe->video_scheduler, 12);

   DEBUGF("[vcodec] omx_teardown pipeline 2b\n");

   omx_send_command_and_wait(&pipe->video_scheduler,OMX_CommandFlush,10,NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 5\n");

   omx_flush_tunnel(&pipe->clock, 80, &pipe->audio_render, 101);

   /* Disable audio_render input port and buffers */
   omx_send_command_and_wait0(&pipe->audio_render, OMX_CommandPortDisable, 100, NULL);
   omx_free_buffers(&pipe->audio_render, 100);
   omx_send_command_and_wait1(&pipe->audio_render, OMX_CommandPortDisable, 100, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 9\n");

   /* Scheduler -> render tunnel */
   if (pipe->video_scheduler.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 11, NULL);
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandPortDisable, 90, NULL);

     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 10, NULL);
   }

   if ((pipe->image_fx.port_settings_changed == 2) && (pipe->do_deinterlace)) {
     omx_send_command_and_wait(&pipe->image_fx, OMX_CommandPortDisable, 190, NULL);
     omx_send_command_and_wait(&pipe->image_fx, OMX_CommandPortDisable, 191, NULL);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 8a\n");

   //dumpport(pipe->video_scheduler.h,10);

   /* Teardown tunnels */
/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/
   //dumpport(pipe->video_decode.h,131);
   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 10, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 10\n");

   /* NOTE: The clock disable doesn't complete until after the video scheduler port is 
      disabled (but it completes before the video scheduler port disabling completes). */
   OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortDisable, 80, NULL));
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandPortDisable, 101, NULL);
   OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortDisable, 81, NULL));
   omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 12, NULL);

   DEBUGF("[vcodec] omx_teardown pipeline 12b\n");

   if (pipe->do_deinterlace) {
     OERR(OMX_SetupTunnel(pipe->image_fx.h, 190, NULL, 0));
     OERR(OMX_SetupTunnel(pipe->image_fx.h, 191, NULL, 0));
   }

   DEBUGF("[vcodec] omx_teardown pipeline 13\n");

   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 11, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->video_render.h, 90, NULL, 0));

   OERR(OMX_SetupTunnel(pipe->clock.h, 81, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 12, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 13b\n");

   OERR(OMX_SetupTunnel(pipe->clock.h, 80, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->audio_render.h, 101, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 8b\n");


/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/

   omx_show_state(&pipe->video_decode,130,131,0);
   dumpport(pipe->video_decode.h,131);
   omx_show_state(&pipe->video_scheduler,10,11,12);
   if (pipe->do_deinterlace) { omx_show_state(&pipe->image_fx,190,191,0); }
   omx_show_state(&pipe->video_render,90,0,0);
   omx_show_state(&pipe->audio_render,100,101,0);
   omx_show_state(&pipe->clock,80,81,0);

   if (pipe->video_decode.port_settings_changed == 2) {
     //dumpport(pipe->video_decode.h,131);
     omx_send_command_and_wait(&pipe->video_decode, OMX_CommandPortDisable, 131, NULL);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 11\n");

   /* Disable video_decode input port and buffers */
   //dumpport(pipe->video_decode.h,130);
   omx_send_command_and_wait0(&pipe->video_decode, OMX_CommandPortDisable, 130, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 6\n");
   omx_free_buffers(&pipe->video_decode, 130);
   DEBUGF("[vcodec] omx_teardown pipeline 7\n");
   //omx_send_command_and_wait1(&pipe->video_decode, OMX_CommandPortDisable, 130, NULL);

   //dumpport(pipe->video_decode.h,130);
   if (is_port_enabled(pipe->video_decode.h, 130)) {
     fprintf(stderr,"Unexpected error video_decode port 130 is not disabled\n");
     exit(1);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 12\n");

   OERR(OMX_SetupTunnel(pipe->video_decode.h, 131, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 15\n");

   omx_show_state(&pipe->video_decode,130,131,0);

   /* Transition all components to Loaded */
   DEBUGF("[vcodec] omx_teardown pipeline 15a\n");
   omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15b\n");
   omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15c\n");
   if (((pipe->video_decode.port_settings_changed == 2) && (pipe->do_deinterlace)) || (pipe->image_fx.port_settings_changed == 2)) {
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   }
   DEBUGF("[vcodec] omx_teardown pipeline 15d\n");
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15e\n");
   omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15f\n");
   if (pipe->do_deinterlace) { omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateLoaded, NULL); }

   DEBUGF("[vcodec] omx_teardown pipeline 16\n");
   /* Finally free the component handles */
   OERR(OMX_FreeHandle(pipe->video_decode.h));
   OERR(OMX_FreeHandle(pipe->video_scheduler.h));
   OERR(OMX_FreeHandle(pipe->video_render.h));
   OERR(OMX_FreeHandle(pipe->audio_render.h));
   OERR(OMX_FreeHandle(pipe->clock.h));
   if (pipe->do_deinterlace) { OERR(OMX_FreeHandle(pipe->image_fx.h)); }
   DEBUGF("[vcodec] omx_teardown pipeline 17\n");
}
Exemplo n.º 13
0
OMX_ERRORTYPE omx_setup_pipeline(struct omx_pipeline_t* pipe, OMX_VIDEO_CODINGTYPE video_codec, char* audio_dest, int is_hd)
{
  OMX_VIDEO_PARAM_PORTFORMATTYPE format;
  OMX_TIME_CONFIG_CLOCKSTATETYPE cstate;

  OMX_CONFIG_BOOLEANTYPE configBoolTrue;
  OMX_INIT_STRUCTURE(configBoolTrue);
  configBoolTrue.bEnabled = OMX_TRUE;

  pipe->do_deinterlace = 0;

  if (((is_hd == 0) && (global_settings.deinterlace_sd)) || ((is_hd == 1) && (global_settings.deinterlace_hd))) {
    DEBUGF("Enabling de-interlace\n");
    pipe->do_deinterlace = 1;
  }

  omx_init_component(pipe, &pipe->video_decode, "OMX.broadcom.video_decode");
  omx_init_component(pipe, &pipe->video_render, "OMX.broadcom.video_render");

  if (pipe->do_deinterlace) {
    DEBUGF("Enabling de-interlacer\n");
    /* De-interlacer.  Input port 190, Output port 191.  Insert between decoder and scheduler */
    omx_init_component(pipe, &pipe->image_fx, "OMX.broadcom.image_fx");

    /* Configure image_fx */
    omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateIdle, NULL);

    OMX_CONFIG_IMAGEFILTERPARAMSTYPE imagefilter;
    OMX_INIT_STRUCTURE(imagefilter);
    imagefilter.nPortIndex=191;
    imagefilter.nNumParams=1;
    imagefilter.nParams[0]=3; //???
    imagefilter.eImageFilter=OMX_ImageFilterDeInterlaceAdvanced;

    OERR(OMX_SetConfig(pipe->image_fx.h, OMX_IndexConfigCommonImageFilterParameters, &imagefilter));
  } else {
    memset(&pipe->image_fx,0,sizeof(struct omx_component_t));
  }


  omx_init_component(pipe, &pipe->clock, "OMX.broadcom.clock");

  OMX_INIT_STRUCTURE(cstate);
  cstate.eState = OMX_TIME_ClockStateWaitingForStartTime;
  cstate.nWaitMask = OMX_CLOCKPORT0|OMX_CLOCKPORT1;
  OERR(OMX_SetParameter(pipe->clock.h, OMX_IndexConfigTimeClockState, &cstate));

  OMX_TIME_CONFIG_ACTIVEREFCLOCKTYPE refClock;
  OMX_INIT_STRUCTURE(refClock);
  refClock.eClock = OMX_TIME_RefClockAudio;
  OERR(OMX_SetConfig(pipe->clock.h, OMX_IndexConfigTimeActiveRefClock, &refClock));

  omx_init_component(pipe, &pipe->video_scheduler, "OMX.broadcom.video_scheduler");

  /* Initialise audio output - hardcoded to 48000/Stereo/16-bit */
  omx_init_component(pipe, &pipe->audio_render, "OMX.broadcom.audio_render");

  OMX_PARAM_PORTDEFINITIONTYPE param;
  OMX_INIT_STRUCTURE(param);
  param.nPortIndex = 100;

  OERR(OMX_GetParameter(pipe->audio_render.h, OMX_IndexParamPortDefinition, &param));
  param.nBufferSize = 8192;  /* Needs to be big enough for one frame of data */
  param.nBufferCountActual = 32; /* Arbitrary */
  OERR(OMX_SetParameter(pipe->audio_render.h, OMX_IndexParamPortDefinition, &param));

  omx_config_pcm(&pipe->audio_render, 48000, 2, 16, audio_dest);

  OERR(OMX_SetConfig(pipe->audio_render.h, OMX_IndexConfigBrcmClockReferenceSource, &configBoolTrue));

  omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateIdle, NULL);

  omx_send_command_and_wait0(&pipe->audio_render, OMX_CommandPortEnable, 100, NULL);
  omx_alloc_buffers(&pipe->audio_render, 100);
  omx_send_command_and_wait1(&pipe->audio_render, OMX_CommandPortEnable, 100, NULL);


  /* Setup clock tunnels first */
  omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateIdle, NULL);

  OERR(OMX_SetupTunnel(pipe->clock.h, 80, pipe->audio_render.h, 101));
  OERR(OMX_SetupTunnel(pipe->clock.h, 81, pipe->video_scheduler.h, 12));

  OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortEnable, 80, NULL));
  OERR(OMX_SendCommand(pipe->video_scheduler.h, OMX_CommandPortEnable, 12, NULL));

  OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortEnable, 81, NULL));
  OERR(OMX_SendCommand(pipe->audio_render.h, OMX_CommandPortEnable, 101, NULL));

  omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL);

  omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateExecuting, NULL);

  /* Configure video_decoder */
  omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateIdle, NULL);

  /* Enable lazy image pool destroying */
  OERR(OMX_SetConfig(pipe->video_decode.h, OMX_IndexParamBrcmLazyImagePoolDestroy, &configBoolTrue));

  OMX_INIT_STRUCTURE(format);
  format.nPortIndex = 130;
  format.eCompressionFormat = video_codec;

  OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamVideoPortFormat, &format));

   /* Enable error concealment for H264 only - without this, HD channels don't work reliably */
  if (video_codec == OMX_VIDEO_CodingAVC) {
     OMX_PARAM_BRCMVIDEODECODEERRORCONCEALMENTTYPE ec;
     OMX_INIT_STRUCTURE(ec);
     ec.bStartWithValidFrame = OMX_FALSE;
     OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamBrcmVideoDecodeErrorConcealment, &ec));
  }

  /* Enable video decoder input port */
  omx_send_command_and_wait0(&pipe->video_decode, OMX_CommandPortEnable, 130, NULL);

  /* Allocate input buffers */
  omx_alloc_buffers(&pipe->video_decode, 130);

  /* Wait for input port to be enabled */
  omx_send_command_and_wait1(&pipe->video_decode, OMX_CommandPortEnable, 130, NULL);

  /* Change video_decode to OMX_StateExecuting */
  omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateExecuting, NULL);

  /* Change audio_render to OMX_StateExecuting */
  omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateExecuting, NULL);

  /* Enable passing of buffer marks */
  OERR(OMX_SetParameter(pipe->video_decode.h, OMX_IndexParamPassBufferMarks, &configBoolTrue));
  OERR(OMX_SetParameter(pipe->video_render.h, OMX_IndexParamPassBufferMarks, &configBoolTrue));

  return OMX_ErrorNone;
}
Exemplo n.º 14
0
/* will be feeding stuff into the encoding pipeline */
void *
decode_thread(void *context) {

    OMX_BUFFERHEADERTYPE *input_buffer; // buffer taken from the OMX decoder
    OMX_PARAM_PORTDEFINITIONTYPE encoder_config;
    OMX_PARAM_PORTDEFINITIONTYPE decoder_config;
    OMX_PARAM_PORTDEFINITIONTYPE deinterlacer_config;
    OMX_VIDEO_PARAM_PORTFORMATTYPE encoder_format_config; //used for the output of the encoder
    OMX_VIDEO_PARAM_BITRATETYPE encoder_bitrate_config; //used for the output of the encoder
    struct transcoder_ctx_t *ctx = (struct transcoder_ctx_t *) context;
    struct packet_t *current_packet;
    int bytes_left;
    uint8_t *p; // points to currently copied buffer

    //    omx_setup_encoding_pipeline(&decoder_ctx->pipeline, OMX_VIDEO_CodingAVC);
    omx_setup_encoding_pipeline(&ctx->pipeline, OMX_VIDEO_CodingMPEG2);

    // main loop that will poll packets and render
    while (ctx->input_video_queue->queue_count != 0 || ctx->input_video_queue->queue_finished != 1) {
        //TODO a memory barrier is going to be needed so that we don't race
        current_packet = packet_queue_get_next_item(ctx->input_video_queue);
        p = current_packet->data;
        bytes_left = current_packet->data_length;

        while (bytes_left > 0) {

            fprintf(stderr, "OMX buffers: v: %02d/20, vcodec queue: %4d\r", omx_get_free_buffer_count(&ctx->pipeline.video_decode), ctx->input_video_queue->queue_count);
            input_buffer = omx_get_next_input_buffer(&ctx->pipeline.video_decode); // This will block if there are no empty buffers

            // copy at most the length of the OMX buf
            int copy_length = OMX_MIN(bytes_left, input_buffer->nAllocLen);

            memcpy(input_buffer->pBuffer, p, copy_length);
            p += copy_length;
            bytes_left -= copy_length;

            input_buffer->nFilledLen = copy_length;
            input_buffer->nTimeStamp = pts_to_omx(current_packet->PTS);

            if (ctx->first_packet) {
                input_buffer->nFlags = OMX_BUFFERFLAG_STARTTIME;
                ctx->first_packet = 0;
            } else {
                //taken from ilclient
                input_buffer->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN;
            }

            if (current_packet->flags & AV_PKT_FLAG_KEY) {
                input_buffer->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
            }

            if(bytes_left == 0) {
                input_buffer->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME;
            }

            //configure the resizer after the decoder has got output
            if (ctx->pipeline.video_decode.port_settings_changed == 1) {
                ctx->pipeline.video_decode.port_settings_changed = 0;
#if 0
                int x = 640;
                int y = 480;

                // rounding the dimensions up to the next multiple of 16.
                x += 0x0f;
                x &= ~0x0f;
                y += 0x0f;
                y &= ~0x0f;
                //the above code is used if resizer is used

                //get information about the decoded video
                OMX_INIT_STRUCTURE(decoder_config);
                decoder_config.nPortIndex = 131;
                OERR(OMX_GetParameter(ctx->pipeline.video_decode.h, OMX_IndexParamPortDefinition, &decoder_config));

                decoder_config.nPortIndex = 190;
                OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &decoder_config));
                decoder_config.nPortIndex = 191;
                OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &decoder_config));
#endif
                OERR(OMX_SetupTunnel(ctx->pipeline.video_decode.h, 131, ctx->pipeline.image_fx.h, 190));
                omx_send_command_and_wait(&ctx->pipeline.video_decode, OMX_CommandPortEnable, 131, NULL);
                omx_send_command_and_wait(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 190, NULL);
                omx_send_command_and_wait(&ctx->pipeline.image_fx, OMX_CommandStateSet, OMX_StateExecuting, NULL);

                fprintf(stderr, "configuring deinterlacer done\n");
            }

            if(ctx->pipeline.image_fx.port_settings_changed == 1) {
                OMX_ERRORTYPE error;
                ctx->pipeline.image_fx.port_settings_changed = 0;
#if 0
                //get info from deinterlacer output
                OMX_INIT_STRUCTURE(deinterlacer_config);
                deinterlacer_config.nPortIndex = 191;
                OERR(OMX_GetParameter(ctx->pipeline.image_fx.h, OMX_IndexParamPortDefinition, &deinterlacer_config));

                //get default from encoder input
                OMX_INIT_STRUCTURE(encoder_config);
                encoder_config.nPortIndex = 200;
                OERR(OMX_GetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamPortDefinition, &encoder_config));
                //modify it with deinterlacer
                encoder_config.format.video.nFrameHeight = deinterlacer_config.format.image.nFrameHeight;
                encoder_config.format.video.nFrameWidth = deinterlacer_config.format.image.nFrameWidth;
                encoder_config.format.video.eCompressionFormat = deinterlacer_config.format.image.eCompressionFormat;
                encoder_config.format.video.eColorFormat = deinterlacer_config.format.image.eColorFormat;
                encoder_config.format.video.nSliceHeight = deinterlacer_config.format.image.nSliceHeight;
                encoder_config.format.video.nStride = deinterlacer_config.format.image.nStride;
                //and feed it
                OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamPortDefinition, &encoder_config));
#endif
                //configure encoder output format
                OMX_INIT_STRUCTURE(encoder_format_config);
                encoder_format_config.nPortIndex = 201; //encoder output port
                encoder_format_config.eCompressionFormat = OMX_VIDEO_CodingAVC;
                OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamVideoPortFormat, &encoder_format_config));

                //configure encoder output bitrate
                OMX_INIT_STRUCTURE(encoder_bitrate_config);
                encoder_bitrate_config.nPortIndex = 201;
                encoder_bitrate_config.eControlRate = OMX_Video_ControlRateVariable; //var bitrate
                encoder_bitrate_config.nTargetBitrate = ENCODED_BITRATE; //1 mbit
                OERR(OMX_SetParameter(ctx->pipeline.video_encode.h, OMX_IndexParamVideoBitrate, &encoder_bitrate_config));

                //setup tunnel from decoder to encoder
                OERR(OMX_SetupTunnel(ctx->pipeline.image_fx.h, 191, ctx->pipeline.video_encode.h, 200));

                //set encoder to idle after we have finished configuring it
                omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateIdle, NULL);

                //allocate buffers for output of the encoder
                //send the enable command, which won't complete until the bufs are alloc'ed
                omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 201, NULL);
                omx_alloc_buffers(&ctx->pipeline.video_encode, 201); //allocate output buffers
                //block until the port is fully enabled
                omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 201, NULL);

                omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateIdle, NULL);

                //enable the two ports
                omx_send_command_and_wait0(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 191, NULL);
                omx_send_command_and_wait0(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 200, NULL);
                omx_send_command_and_wait1(&ctx->pipeline.video_encode, OMX_CommandPortEnable, 200, NULL);
                omx_send_command_and_wait1(&ctx->pipeline.image_fx, OMX_CommandPortEnable, 191, NULL);

                omx_send_command_and_wait(&ctx->pipeline.video_encode, OMX_CommandStateSet, OMX_StateExecuting, NULL);
                fprintf(stderr, "finished configuring encoder\n");
            }

            if(ctx->pipeline.video_encode.port_settings_changed == 1) {
                fprintf(stderr, "encoder enabled\n");
                ctx->pipeline.video_encode.port_settings_changed = 0;
                //signal the consumer thread it can start polling for data
                ctx->pipeline.video_encode.is_running = 1;
                pthread_cond_signal(&ctx->pipeline.video_encode.is_running_cv);
            }

            OERR(OMX_EmptyThisBuffer(ctx->pipeline.video_decode.h, input_buffer));
        }

        packet_queue_free_packet(current_packet, 1);
        current_packet = NULL;
    }

    printf("Finishing stream \n");
    /* Indicate end of video stream */
    input_buffer = omx_get_next_input_buffer(&ctx->pipeline.video_decode);

    input_buffer->nFilledLen = 0;
    input_buffer->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN | OMX_BUFFERFLAG_EOS;

    OERR(OMX_EmptyThisBuffer(ctx->pipeline.video_decode.h, input_buffer));

    // omx_teardown_pipeline(&decoder_ctx->pipeline);
}
Exemplo n.º 15
0
int main(int argc, char *argv[])
{
	AVFormatContext	*ic;
	char		*iname;
	char		*oname;
	char		*size;
	int		err;
	int		vidindex;
	int		i, j;
	OMX_ERRORTYPE	oerr;
	OMX_HANDLETYPE	m2 = NULL, m4 = NULL, resize = NULL;
	OMX_VIDEO_PARAM_PORTFORMATTYPE	*pfmt;
	OMX_PORT_PARAM_TYPE		*porttype;
	OMX_PARAM_PORTDEFINITIONTYPE	*portdef;
	OMX_BUFFERHEADERTYPE		*decbufs;
	OMX_VIDEO_PORTDEFINITIONTYPE	*viddef;
	OMX_VIDEO_PARAM_PROFILELEVELTYPE *level;
	int		decportidx = 200;
	int		resizeportidx = 60;
	int		encportidx = 130;
	int		fd;
	time_t		start, end;
	int		offset;
	AVPacket	*p, *rp;
	int		ish264;
	int		filtertest;
	int		opt;
	ILCLIENT_T	*client;

	if (argc < 3)
		usage(argv[0]);

	ctx.bitrate = 2*1024*1024;
	ctx.verbose = 0;
	ctx.width = 0;
	ctx.height = 0;

	while ((opt = getopt(argc, argv, ":b:vs:")) != -1) {
		switch (opt) {
		case 'b':	//bitrate
			ctx.bitrate = atoi(optarg);
			printf("bitrate = %d\n", ctx.bitrate);
			break;
		case 'v':	//verbose`
			ctx.verbose = 1;
			break;
		case 's':	//WxH
			ctx.width = atoi(optarg);
			if ((atoi(optarg) % 16) != 0) {
				printf("W = %d is not a multiple of 16\n", ctx.width);
				usage(argv[0]);
			}
			if (ctx.width <16 || ctx.width > 1080) {
				printf("W = %d should be between 16 and 1080\n", ctx.width);
				usage(argv[0]);
			}	
			printf("W = %d\n", ctx.width);
			if ((size = strchr(optarg, 'x')) == NULL) {
				printf("wrong size parameter (no 'x') exiting\n");
				usage(argv[0]);
			}
			ctx.height = atoi(size+1);
			if ((atoi(size+1) % 16) != 0) {
				printf("H = %d is not a multiple of 16\n", ctx.height);
				usage(argv[0]);
			}
			if (ctx.height <16 || ctx.height > 1080) {
				printf("H = %d should be between 16 and 1080\n", ctx.height);
				usage(argv[0]);
			}	
			printf("H = %d\n", ctx.height);
			break;
		case '?':
			usage(argv[0]);
		}
	}
	if ((client = ilclient_init()) == NULL)
		return -2;
	iname = argv[optind++];
	oname = argv[optind++];

	MAKEME(porttype, OMX_PORT_PARAM_TYPE);
	MAKEME(portdef, OMX_PARAM_PORTDEFINITIONTYPE);
	MAKEME(pfmt, OMX_VIDEO_PARAM_PORTFORMATTYPE);

	av_register_all();

	ic = NULL;
	ish264 = 0;
	pthread_mutex_init(&ctx.lock, NULL);

#if 0
	fmt = av_oformat_next(fmt);
	while (fmt) {
		printf("Found '%s'\t\t'%s'\n", fmt->name, fmt->long_name);
		fmt = av_oformat_next(fmt);
	}
#endif

	/* Input init: */

	if ((err = avformat_open_input(&ic, iname, NULL, NULL) != 0)) {
		fprintf(stderr, "Failed to open '%s': %s\n", iname,
			strerror(err));
		exit(1);
	}
	ctx.ic = ic;

	if (avformat_find_stream_info(ic, NULL) < 0) {
		fprintf(stderr, "Failed to find streams in '%s'\n", iname);
		exit(1);
	}

	av_dump_format(ic, 0, iname, 0);

	vidindex = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, -1, -1,
		NULL, 0);
	if (vidindex < 0) {
		fprintf(stderr, "Failed to find a video stream in '%s'\n",
			iname);
		exit(1);
	}
	printf("Found a video at index %d\n", vidindex);

	printf("Frame size: %dx%d\n", ic->streams[vidindex]->codec->width, 
		ic->streams[vidindex]->codec->height);
	ish264 = (ic->streams[vidindex]->codec->codec_id == CODEC_ID_H264);

	/* Output init: */
	ctx.fd = fd = open(oname, O_CREAT | O_LARGEFILE | O_WRONLY | O_TRUNC,
			0666);
	printf("File descriptor %d\n", fd);


#if 0
	avformat_alloc_output_context(&oc, NULL, /*NULL,*/ oname);
	if (!oc) {
		printf("Couldn't determine output from '%s'; using MPEG.\n",
			oname);
		avformat_alloc_output_context(&oc, NULL, /*"matroska",*/ oname);
	}
#endif
//	if (!oc)
//		exit(1);
	
//	fmt = oc->oformat;
	
	for (i = 0; i < ic->nb_streams; i++) {
		printf("Found stream %d, context %p\n",
			ic->streams[i]->index, ic->streams[i]->codec);
	}

	bcm_host_init();
	OERR(OMX_Init(), ctx.verbose);
	OERR(OMX_GetHandle(&m2, DECNAME, &ctx, &decevents), ctx.verbose);
	OERR(OMX_GetHandle(&m4, ENCNAME, &ctx, &encevents), ctx.verbose);
	OERR(OMX_GetHandle(&resize, RESIZENAME, &ctx, &resizeevents), ctx.verbose);
	ctx.m2 = m2;
	ctx.m4 = m4;
	ctx.resize = resize;

	if (ctx.verbose) printf("Obtained handles.  %p decode, %p resize, %p encode\n",
		m2, resize, m4);

	OERR(OMX_GetParameter(m2, OMX_IndexParamVideoInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on decoder\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.decportidx = decportidx = porttype->nStartPortNumber;

	OERR(OMX_GetParameter(resize, OMX_IndexParamImageInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on resizer\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.resizeportidx = resizeportidx = porttype->nStartPortNumber;

	OERR(OMX_GetParameter(m4, OMX_IndexParamVideoInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on encoder\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.encportidx = encportidx = porttype->nStartPortNumber;
	free(porttype);

	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx+1, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx+1, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx+1, NULL), ctx.verbose);

	portdef->nPortIndex = decportidx;
	OERR(OMX_GetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx.verbose);
	viddef = &portdef->format.video;
	viddef->nFrameWidth = ic->streams[vidindex]->codec->width;
	viddef->nFrameHeight = ic->streams[vidindex]->codec->height;
	printf("Mapping codec %d to %d\n",
		ic->streams[vidindex]->codec->codec_id,
		mapcodec(ic->streams[vidindex]->codec->codec_id));
	viddef->eCompressionFormat = 
		mapcodec(ic->streams[vidindex]->codec->codec_id);
	viddef->bFlagErrorConcealment = 0;
//	viddef->xFramerate = 25<<16;
	OERR(OMX_SetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx.verbose);
	free(portdef);

#if 0
/* It appears these have limited effect: */
	dataunit->nPortIndex = decportidx;
	dataunit->eUnitType = OMX_DataUnitCodedPicture;
	dataunit->eEncapsulationType = OMX_DataEncapsulationGenericPayload;
	OERR(OMX_SetParameter(m2, OMX_IndexParamBrcmDataUnit, dataunit), ctx.verbose);

	if (ish264) {
		naltype->nPortIndex = decportidx;
		naltype->eNaluFormat = OMX_NaluFormatStartCodes;
		OERR(OMX_SetParameter(m2, OMX_IndexParamNalStreamFormatSelect, naltype), ctx.verbose);
	}
#endif

	MAKEME(level, OMX_VIDEO_PARAM_PROFILELEVELTYPE);
	level->nPortIndex = encportidx+1;
/* Dump what the encoder is capable of: */
	if (ctx.verbose) print_codecs();
	if (ctx.verbose) {
		for (oerr = OMX_ErrorNone, i = 0; oerr == OMX_ErrorNone; i++) {
			pfmt->nIndex = i;
			oerr = OMX_GetParameter(m4, OMX_IndexParamVideoPortFormat, pfmt);
			if (oerr == OMX_ErrorNoMore)
				break;
			printf("Codecs supported:\n"
				"\tIndex:\t\t%d\n"
				"\tCodec:\t\t%d (%x)\n"
				"\tColour:\t\t%d\n"
				"\tFramerate:\t%x (%f)\n",
				pfmt->nIndex,
				pfmt->eCompressionFormat, pfmt->eCompressionFormat,
				pfmt->eColorFormat,
				pfmt->xFramerate,
				((float)pfmt->xFramerate/(float)65536));
		}

		for (oerr = OMX_ErrorNone, i = 0; oerr == OMX_ErrorNone; i++) {
			level->nProfileIndex = i;
			oerr = OMX_GetParameter(m4,
				OMX_IndexParamVideoProfileLevelQuerySupported,
				level);
			if (oerr == OMX_ErrorNoMore)
				break;
			printf("Levels supported:\n"
				"\tIndex:\t\t%d\n"
				"\tProfile:\t%d\n"
				"\tLevel:\t\t%d\n",
				level->nProfileIndex,
				level->eProfile,
				level->eLevel);
		}
	}
	free(pfmt);
	free(level);

/* Dump current port states: */
	if (ctx.verbose) {
		dumpport(m2, decportidx);
		dumpport(m2, decportidx+1);
		dumpport(resize, resizeportidx);
		dumpport(resize, resizeportidx+1);
		dumpport(m4, encportidx);
		dumpport(m4, encportidx+1);
	}

	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);

	decbufs = allocbufs(m2, decportidx, 1);

/* Start the initial loop.  Process until we have a state change on port 131 */
	ctx.decstate = DECINIT;
	ctx.encstate = ENCPREINIT;
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx.verbose);

	rp = calloc(sizeof(AVPacket), 1);
	filtertest = ish264;

	for (offset = i = j = 0; ctx.decstate != DECFAILED; i++, j++) {
		int rc;
		int k;
		int size, nsize;
		OMX_BUFFERHEADERTYPE *spare;

		if (offset == 0 && ctx.decstate != DECFLUSH) {
			rc = av_read_frame(ic, rp);
			if (rc != 0) {
				if (ic->pb->eof_reached)
					ctx.decstate = DECFLUSH;
				break;
			}
			if (rp->stream_index != vidindex) {
				i--;
				av_free_packet(rp);
				continue;
			}
			size = rp->size;
			ctx.fps++;
			ctx.framecount++;

			if (ish264 && filtertest) {
				filtertest = 0;
				ctx.bsfc = dofiltertest(rp);
			}
			if (ctx.bsfc) {
				p = filter(&ctx, rp);
			} else {
				p = rp;
			}
		}

		switch (ctx.decstate) {
		case DECTUNNELSETUP:
			start = time(NULL);
			//printf("NOW to CONFIGURE !!!!!!!!!!!!!!\n\n");
			configure(&ctx);
			ctx.decstate = DECRUNNING;
			break;
		case DECFLUSH:
			size = 0;
			/* Add the flush code here */
			printf("IN FLUSH NOW\n\n");
			break;
		case DECINIT:
			if (i < 120) /* Bail; decoder doesn't like it */
				break;
			ctx.decstate = DECFAILED;
			/* Drop through */
		case DECFAILED:
			fprintf(stderr, "Failed to set the parameters after "
					"%d video frames.  Giving up.\n", i);
			dumpport(m2, decportidx);
			dumpport(m2, decportidx+1);
			dumpport(resize, resizeportidx);
			dumpport(resize, resizeportidx+1);
			dumpport(m4, encportidx);
			dumpport(m4, encportidx+1);
			exit(1);
			break;
		default:
			break;	/* Shuts the compiler up */
		}

		for (spare = NULL; !spare; usleep(10)) {
			pthread_mutex_lock(&ctx.lock);
			spare = ctx.bufhead;
			ctx.bufhead = NULL;
			ctx.flags &= ~FLAGS_DECEMPTIEDBUF;
			pthread_mutex_unlock(&ctx.lock);
			while (spare) {
				write(fd, &spare->pBuffer[spare->nOffset],
					spare->nFilledLen);
				spare->nFilledLen = 0;
				spare->nOffset = 0;
				OERRq(OMX_FillThisBuffer(m4, spare));
				spare = spare->pAppPrivate;
			}

			spare = decbufs;
			for (k = 0; spare && spare->nFilledLen != 0; k++)
				spare = spare->pAppPrivate;
		}

		if (size > spare->nAllocLen) {
			nsize = spare->nAllocLen;
		} else {
			nsize = size;
		}

		if (ctx.decstate != DECFLUSH) {
			memcpy(spare->pBuffer, &(p->data[offset]), nsize);
			spare->nFlags = i == 0 ? OMX_BUFFERFLAG_STARTTIME : 0;
			spare->nFlags |= size == nsize ?
				OMX_BUFFERFLAG_ENDOFFRAME : 0;
		} else {
			spare->nFlags = OMX_BUFFERFLAG_STARTTIME |
					OMX_BUFFERFLAG_EOS;
		}
		spare->nFilledLen = nsize;
		spare->nOffset = 0;
		OERRq(OMX_EmptyThisBuffer(m2, spare));
		size -= nsize;
		if (size) {
			offset += nsize;
		} else {
			offset = 0;
			av_free_packet(p);
		}
	}

	close(fd);

	end = time(NULL);

	printf("Processed %d frames in %d seconds; %df/s\n",
		ctx.framecount, end-start, (ctx.framecount/(end-start)));

	// flush the encoder
//	OERR(OMX_SendCommand(m4, OMX_CommandFlush, encportidx, NULL), ctx.verbose);
//	OERR(OMX_SendCommand(m4, OMX_CommandFlush, encportidx+ctx.verbose, NULL), ctx.verbose);



	// tear down the tunnels
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	// free buffers
	vcos_free(decbufs);
	vcos_free(ctx.encbufs);
	// Apparantly the teardwon function is not implemented. Use setup function instead
	//OERR(OMX_TeardownTunnel(m2, decportidx+ctx.verbose, resize, resizeportidx), ctx.verbose);
	//OERR(OMX_TeardownTunnel(resize, resizeportidx+ctx.verbose, m4, encportidx), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx+ctx.verbose, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx+ctx.verbose, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx+ctx.verbose, NULL), ctx.verbose);
//	ilclient_disable_port_buffers(m2, decportidx, NULL, NULL, NULL);
//	ilclient_disable_port_buffers(m4, encportidx, NULL, NULL, NULL);
	OERR(OMX_SetupTunnel(m2, decportidx+ctx.verbose, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx+ctx.verbose, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(m4, encportidx, NULL, 0), ctx.verbose);
	OERR(OMX_FreeHandle(m2), ctx.verbose);
	OERR(OMX_FreeHandle(resize), ctx.verbose);
	OERR(OMX_FreeHandle(m4), ctx.verbose);

//	free(porttype);
//	free(portdef);
//	free(pfmt);
//	free(level);
	return 0;
}
Exemplo n.º 16
0
void
*writer_thread(void *thread_ctx) {

    struct transcoder_ctx_t *ctx = (struct transcoder_ctx_t *) thread_ctx;
    AVStream *video_stream = NULL, *audio_stream = NULL;
    AVFormatContext *output_context = init_output_context(ctx, &video_stream, &audio_stream);
    struct mux_state_t mux_state = {0};

    //from omxtx
    mux_state.pts_offset = av_rescale_q(ctx->input_context->start_time, AV_TIME_BASE_Q, output_context->streams[ctx->video_stream_index]->time_base);

#if 0
    FILE *out_file;

    out_file = fopen(ctx->output_filename, "wb");
    if (out_file == NULL) {
        printf("error creating output file. DYING \n");
        exit(1);
    }
#endif

    //write stream header if any
    avformat_write_header(output_context, NULL);

    //do not start doing anything until we get an encoded packet
    pthread_mutex_lock(&ctx->pipeline.video_encode.is_running_mutex);
    while (!ctx->pipeline.video_encode.is_running) {
        pthread_cond_wait(&ctx->pipeline.video_encode.is_running_cv, &ctx->pipeline.video_encode.is_running_mutex);
    }

    while (!ctx->pipeline.video_encode.eos || !ctx->processed_audio_queue->queue_finished) {
        //FIXME a memory barrier is required here so that we don't race 
        //on above variables 

        //fill a buffer with video data 
        OERR(OMX_FillThisBuffer(ctx->pipeline.video_encode.h, omx_get_next_output_buffer(&ctx->pipeline.video_encode)));

        write_audio_frame(output_context, audio_stream, ctx); //write full audio frame 
        //FIXME no guarantee that we have a full frame per packet?
        write_video_frame(output_context, video_stream, ctx, &mux_state); //write full video frame
        //encoded_video_queue is being filled by the previous command

#if 0
        struct packet_t *encoded_packet = packet_queue_get_next_item(&ctx->pipeline.encoded_video_queue);
        fwrite(encoded_packet->data, 1, encoded_packet->data_length, out_file);
        packet_queue_free_packet(encoded_packet, 1);
#endif

    }

    av_write_trailer(output_context);

    //free all the resources
    avcodec_close(video_stream->codec);
    avcodec_close(audio_stream->codec);
    /* Free the streams. */
    for (int i = 0; i < output_context->nb_streams; i++) {
        av_freep(&output_context->streams[i]->codec);
        av_freep(&output_context->streams[i]);
    }

    if (!(output_context->oformat->flags & AVFMT_NOFILE)) {
        /* Close the output file. */
        avio_close(output_context->pb);
    }
       

    /* free the stream */
    av_free(output_context);
    free(mux_state.pps);
    free(mux_state.sps);
#if 0
    fclose(out_file);
#endif
}
Exemplo n.º 17
0
OMX_ERRORTYPE omx_setup_camera_pipeline(struct omx_pipeline_t* pipe)
{

  // Create component.
  omx_init_component(pipe, &pipe->camera, "OMX.broadcom.camera");

  // Use OMX_IndexConfigRequestCallback to request callbacks on OMX_IndexParamCameraDeviceNumber.
  OMX_CONFIG_REQUESTCALLBACKTYPE cbtype;
  OMX_INIT_STRUCTURE(cbtype);
  cbtype.nPortIndex=OMX_ALL;
  cbtype.nIndex=OMX_IndexParamCameraDeviceNumber;
  cbtype.bEnable = OMX_TRUE;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigRequestCallback, &cbtype));

  // Set OMX_IndexParamISPTunerName.

  // Set OMX_IndexParamCameraFlashType.

  // Set OMX_IndexParamCameraDeviceNumber.
  OMX_PARAM_U32TYPE device;
  OMX_INIT_STRUCTURE(device);
  device.nPortIndex = OMX_ALL;
  device.nU32 = 0;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexParamCameraDeviceNumber, &device));

  dumpport(pipe->camera.h, 71);

  /* Set the resolution */
  OMX_PARAM_PORTDEFINITIONTYPE portdef;
  OMX_INIT_STRUCTURE(portdef);
  portdef.nPortIndex = 71;
  OERR(OMX_GetParameter(pipe->camera.h, OMX_IndexParamPortDefinition, &portdef));
  portdef.format.image.nFrameWidth = 640;
  portdef.format.image.nFrameHeight = 360;
  portdef.format.image.nStride = 640;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexParamPortDefinition, &portdef));

  /* Set the framerate */
  OMX_CONFIG_FRAMERATETYPE framerate;
  OMX_INIT_STRUCTURE(framerate);
  framerate.nPortIndex = 71;
  framerate.xEncodeFramerate = 25 << 16; // Q16 format - 25fps
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigVideoFramerate, &framerate));

  /* Set the sharpness */
  OMX_CONFIG_SHARPNESSTYPE sharpness;
  OMX_INIT_STRUCTURE(sharpness);
  sharpness.nPortIndex = OMX_ALL;
  sharpness.nSharpness = -50; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonSharpness, &sharpness));

  /* Set the contrast */
  OMX_CONFIG_CONTRASTTYPE contrast;
  OMX_INIT_STRUCTURE(contrast);
  contrast.nPortIndex = OMX_ALL;
  contrast.nContrast = -10; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonContrast, &contrast));

  /* Set the brightness */
  OMX_CONFIG_BRIGHTNESSTYPE brightness;
  OMX_INIT_STRUCTURE(brightness);
  brightness.nPortIndex = OMX_ALL;
  brightness.nBrightness = 50; /* 0 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonBrightness, &brightness));

  /* Set the saturation */
  OMX_CONFIG_SATURATIONTYPE saturation;
  OMX_INIT_STRUCTURE(saturation);
  saturation.nPortIndex = OMX_ALL;
  saturation.nSaturation = 0; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonSaturation, &saturation));

  /* Video stabilisation */
  OMX_CONFIG_FRAMESTABTYPE framestab;
  OMX_INIT_STRUCTURE(framestab);
  framestab.nPortIndex = OMX_ALL;
  framestab.bStab = OMX_FALSE;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonFrameStabilisation, &framestab));

  /* Set EV compensation, ISO and metering mode */
  OMX_CONFIG_EXPOSUREVALUETYPE exposurevalue;
  OMX_INIT_STRUCTURE(exposurevalue);
  exposurevalue.nPortIndex = OMX_ALL;
  OERR(OMX_GetConfig(pipe->camera.h, OMX_IndexConfigCommonExposureValue, &exposurevalue));
  fprintf(stderr,"nSensitivity=%d\n",exposurevalue.nSensitivity);
  exposurevalue.xEVCompensation = 0;  /* Fixed point value stored as Q16 */
  exposurevalue.nSensitivity = 100;         /**< e.g. nSensitivity = 100 implies "ISO 100" */
  exposurevalue.bAutoSensitivity = OMX_FALSE;
  exposurevalue.eMetering = OMX_MeteringModeAverage; 
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonExposureValue, &exposurevalue));

  /* Set exposure mode */
  OMX_CONFIG_EXPOSURECONTROLTYPE exposure;
  OMX_INIT_STRUCTURE(exposure);
  exposure.nPortIndex = OMX_ALL;
  exposure.eExposureControl = OMX_ExposureControlAuto;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonExposure, &exposure));

  /* Set AWB mode */
  OMX_CONFIG_WHITEBALCONTROLTYPE awb;
  OMX_INIT_STRUCTURE(awb);
  awb.nPortIndex = OMX_ALL;
  awb.eWhiteBalControl = OMX_WhiteBalControlAuto;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonWhiteBalance, &awb));
  
  /* Set image effect */
  OMX_CONFIG_IMAGEFILTERTYPE imagefilter;
  OMX_INIT_STRUCTURE(imagefilter);
  imagefilter.nPortIndex = OMX_ALL;
  imagefilter.eImageFilter = OMX_ImageFilterNone;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonImageFilter, &imagefilter));

  /* Set colour effect */
  OMX_CONFIG_COLORENHANCEMENTTYPE colour;
  OMX_INIT_STRUCTURE(colour);
  colour.nPortIndex = OMX_ALL;
  colour.bColorEnhancement = OMX_FALSE;
  colour.nCustomizedU = 128;
  colour.nCustomizedV = 128;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonColorEnhancement, &colour));

  /* Turn off the LED - doesn't work! */
  OMX_CONFIG_PRIVACYINDICATORTYPE privacy;
  OMX_INIT_STRUCTURE(privacy);
  privacy.ePrivacyIndicatorMode = OMX_PrivacyIndicatorOff;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigPrivacyIndicator, &privacy));

  // Wait for the callback that OMX_IndexParamCameraDeviceNumber has
  // changed. At this point, all the drivers have been loaded. Other
  // settings can be applied whilst waiting for this event.
  fprintf(stderr,"Waiting for camera config to change\n");
  while (!pipe->camera.config_changed);  /* TODO: Use a condition variable */
  fprintf(stderr,"Config changed\n");

  // Query for OMX_IndexConfigCameraSensorModes as required.

  // Change state to IDLE, and proceed as required. 
  omx_send_command_and_wait(&pipe->camera, OMX_CommandStateSet, OMX_StateIdle, NULL);

  OMX_CONFIG_PORTBOOLEANTYPE cameraport;
  OMX_INIT_STRUCTURE(cameraport);
  cameraport.nPortIndex = 71;
  cameraport.bEnabled = OMX_TRUE;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexConfigPortCapturing, &cameraport));

  omx_init_component(pipe, &pipe->video_render, "OMX.broadcom.video_render");
  omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateIdle, NULL);

  OERR(OMX_SetupTunnel(pipe->camera.h, 71, pipe->video_render.h, 90));  /* Camera capture port to video render */

  omx_send_command_and_wait(&pipe->camera, OMX_CommandPortEnable, 71, NULL);
  omx_send_command_and_wait(&pipe->video_render, OMX_CommandPortEnable, 90, NULL);

  omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateExecuting, NULL);
  omx_send_command_and_wait(&pipe->camera, OMX_CommandStateSet, OMX_StateExecuting, NULL);

  omx_set_display_region(pipe, 1200, 180, 640, 360);

  OMX_CONFIG_DISPLAYREGIONTYPE region;
  OMX_INIT_STRUCTURE(region);
  region.nPortIndex = 90; /* Video render input port */
  region.set = OMX_DISPLAY_SET_LAYER;
  region.layer = 10;
  OERR(OMX_SetParameter(pipe->video_render.h, OMX_IndexConfigDisplayRegion, &region));

  fprintf(stderr,"Camera pipeline configured\n");

  dumpport(pipe->camera.h, 71);
}