Exemple #1
0
OMX_ERRORTYPE enceventhandler(OMX_HANDLETYPE component,
				struct context *ctx,
				OMX_EVENTTYPE event, 
				OMX_U32 data1,
				OMX_U32 data2,
				OMX_PTR eventdata)
{
	switch (event) {
	case OMX_EventError:
	if (ctx->flags & FLAGS_VERBOSE)
		printf("Encoder %p has errored: %x\n", component, data1);
		return data1;
		break;
	case OMX_EventCmdComplete:
	if (ctx->flags & FLAGS_VERBOSE)
		printf("Encoder %p has completed the last command.\n",
			component);
		break;
	case OMX_EventPortSettingsChanged: {
//	if (ctx->flags & FLAGS_VERBOSE)
		printf("Encoder %p port %d settings changed.\n", component,
			data1);
		if (ctx->verbose) dumpport(component, data1);
	}
		break;
	default:
	if (ctx->flags & FLAGS_VERBOSE)
		printf("Got an event of type %x on encoder %p (d1: %x, d2 %x)\n", event, component, data1, data2);
	}
	return OMX_ErrorNone;
}
Exemple #2
0
static void *fps(void *p)
{
	enum OMX_STATETYPE		state;
	int				lastframe;

	while (1) {
		lastframe = ctx.framecount;
		sleep(1);
		printf("Frame %6d (%5ds).  Frames last second: %d     \r",
			ctx.framecount, ctx.framecount/25,
				ctx.framecount-lastframe);
		fflush(stdout);
		if (0 && ctx.fps == 0) {
			printf("In fps thread, after %d frames:\n",
				ctx.framecount);
			dumpport(ctx.m2, ctx.decportidx);
			dumpport(ctx.m2, ctx.decportidx+1);
			dumpport(ctx.resize, ctx.resizeportidx);
			dumpport(ctx.resize, ctx.resizeportidx+1);
			dumpport(ctx.m4, ctx.encportidx);
			dumpport(ctx.m4, ctx.encportidx+1);

			OMX_GetState(ctx.m2, &state);
			printf("Decoder state: %d\n", state);
			OMX_GetState(ctx.m4, &state);
			printf("Encoder state: %d\n", state);
		}
	}
	return NULL;
}
Exemple #3
0
static void dumpportstate(void)
{
	enum OMX_STATETYPE		state;

	printf("\n\nIn exit handler, after %d frames:\n", ctx.framecount);
	dumpport(ctx.m2, ctx.decportidx);
	dumpport(ctx.m2, ctx.decportidx+1);
	dumpport(ctx.resize, ctx.resizeportidx);
	dumpport(ctx.resize, ctx.resizeportidx+1);
	dumpport(ctx.m4, ctx.encportidx);
	dumpport(ctx.m4, ctx.encportidx+1);

	OMX_GetState(ctx.m2, &state);
	printf("Decoder state: %d\n", state);
	OMX_GetState(ctx.m4, &state);
	printf("Encoder state: %d\n", state);
}
Exemple #4
0
int main(int argc, char *argv[])
{
	AVFormatContext	*ic;
	char		*iname;
	char		*oname;
	char		*size;
	int		err;
	int		vidindex;
	int		i, j;
	OMX_ERRORTYPE	oerr;
	OMX_HANDLETYPE	m2 = NULL, m4 = NULL, resize = NULL;
	OMX_VIDEO_PARAM_PORTFORMATTYPE	*pfmt;
	OMX_PORT_PARAM_TYPE		*porttype;
	OMX_PARAM_PORTDEFINITIONTYPE	*portdef;
	OMX_BUFFERHEADERTYPE		*decbufs;
	OMX_VIDEO_PORTDEFINITIONTYPE	*viddef;
	OMX_VIDEO_PARAM_PROFILELEVELTYPE *level;
	int		decportidx = 200;
	int		resizeportidx = 60;
	int		encportidx = 130;
	int		fd;
	time_t		start, end;
	int		offset;
	AVPacket	*p, *rp;
	int		ish264;
	int		filtertest;
	int		opt;
	ILCLIENT_T	*client;

	if (argc < 3)
		usage(argv[0]);

	ctx.bitrate = 2*1024*1024;
	ctx.verbose = 0;
	ctx.width = 0;
	ctx.height = 0;

	while ((opt = getopt(argc, argv, ":b:vs:")) != -1) {
		switch (opt) {
		case 'b':	//bitrate
			ctx.bitrate = atoi(optarg);
			printf("bitrate = %d\n", ctx.bitrate);
			break;
		case 'v':	//verbose`
			ctx.verbose = 1;
			break;
		case 's':	//WxH
			ctx.width = atoi(optarg);
			if ((atoi(optarg) % 16) != 0) {
				printf("W = %d is not a multiple of 16\n", ctx.width);
				usage(argv[0]);
			}
			if (ctx.width <16 || ctx.width > 1080) {
				printf("W = %d should be between 16 and 1080\n", ctx.width);
				usage(argv[0]);
			}	
			printf("W = %d\n", ctx.width);
			if ((size = strchr(optarg, 'x')) == NULL) {
				printf("wrong size parameter (no 'x') exiting\n");
				usage(argv[0]);
			}
			ctx.height = atoi(size+1);
			if ((atoi(size+1) % 16) != 0) {
				printf("H = %d is not a multiple of 16\n", ctx.height);
				usage(argv[0]);
			}
			if (ctx.height <16 || ctx.height > 1080) {
				printf("H = %d should be between 16 and 1080\n", ctx.height);
				usage(argv[0]);
			}	
			printf("H = %d\n", ctx.height);
			break;
		case '?':
			usage(argv[0]);
		}
	}
	if ((client = ilclient_init()) == NULL)
		return -2;
	iname = argv[optind++];
	oname = argv[optind++];

	MAKEME(porttype, OMX_PORT_PARAM_TYPE);
	MAKEME(portdef, OMX_PARAM_PORTDEFINITIONTYPE);
	MAKEME(pfmt, OMX_VIDEO_PARAM_PORTFORMATTYPE);

	av_register_all();

	ic = NULL;
	ish264 = 0;
	pthread_mutex_init(&ctx.lock, NULL);

#if 0
	fmt = av_oformat_next(fmt);
	while (fmt) {
		printf("Found '%s'\t\t'%s'\n", fmt->name, fmt->long_name);
		fmt = av_oformat_next(fmt);
	}
#endif

	/* Input init: */

	if ((err = avformat_open_input(&ic, iname, NULL, NULL) != 0)) {
		fprintf(stderr, "Failed to open '%s': %s\n", iname,
			strerror(err));
		exit(1);
	}
	ctx.ic = ic;

	if (avformat_find_stream_info(ic, NULL) < 0) {
		fprintf(stderr, "Failed to find streams in '%s'\n", iname);
		exit(1);
	}

	av_dump_format(ic, 0, iname, 0);

	vidindex = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, -1, -1,
		NULL, 0);
	if (vidindex < 0) {
		fprintf(stderr, "Failed to find a video stream in '%s'\n",
			iname);
		exit(1);
	}
	printf("Found a video at index %d\n", vidindex);

	printf("Frame size: %dx%d\n", ic->streams[vidindex]->codec->width, 
		ic->streams[vidindex]->codec->height);
	ish264 = (ic->streams[vidindex]->codec->codec_id == CODEC_ID_H264);

	/* Output init: */
	ctx.fd = fd = open(oname, O_CREAT | O_LARGEFILE | O_WRONLY | O_TRUNC,
			0666);
	printf("File descriptor %d\n", fd);


#if 0
	avformat_alloc_output_context(&oc, NULL, /*NULL,*/ oname);
	if (!oc) {
		printf("Couldn't determine output from '%s'; using MPEG.\n",
			oname);
		avformat_alloc_output_context(&oc, NULL, /*"matroska",*/ oname);
	}
#endif
//	if (!oc)
//		exit(1);
	
//	fmt = oc->oformat;
	
	for (i = 0; i < ic->nb_streams; i++) {
		printf("Found stream %d, context %p\n",
			ic->streams[i]->index, ic->streams[i]->codec);
	}

	bcm_host_init();
	OERR(OMX_Init(), ctx.verbose);
	OERR(OMX_GetHandle(&m2, DECNAME, &ctx, &decevents), ctx.verbose);
	OERR(OMX_GetHandle(&m4, ENCNAME, &ctx, &encevents), ctx.verbose);
	OERR(OMX_GetHandle(&resize, RESIZENAME, &ctx, &resizeevents), ctx.verbose);
	ctx.m2 = m2;
	ctx.m4 = m4;
	ctx.resize = resize;

	if (ctx.verbose) printf("Obtained handles.  %p decode, %p resize, %p encode\n",
		m2, resize, m4);

	OERR(OMX_GetParameter(m2, OMX_IndexParamVideoInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on decoder\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.decportidx = decportidx = porttype->nStartPortNumber;

	OERR(OMX_GetParameter(resize, OMX_IndexParamImageInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on resizer\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.resizeportidx = resizeportidx = porttype->nStartPortNumber;

	OERR(OMX_GetParameter(m4, OMX_IndexParamVideoInit, porttype), ctx.verbose);
	if (ctx.verbose) printf("Found %d ports, starting at %d (%x) on encoder\n",
		porttype->nPorts, porttype->nStartPortNumber,
		porttype->nStartPortNumber);
	ctx.encportidx = encportidx = porttype->nStartPortNumber;
	free(porttype);

	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx+1, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx+1, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx+1, NULL), ctx.verbose);

	portdef->nPortIndex = decportidx;
	OERR(OMX_GetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx.verbose);
	viddef = &portdef->format.video;
	viddef->nFrameWidth = ic->streams[vidindex]->codec->width;
	viddef->nFrameHeight = ic->streams[vidindex]->codec->height;
	printf("Mapping codec %d to %d\n",
		ic->streams[vidindex]->codec->codec_id,
		mapcodec(ic->streams[vidindex]->codec->codec_id));
	viddef->eCompressionFormat = 
		mapcodec(ic->streams[vidindex]->codec->codec_id);
	viddef->bFlagErrorConcealment = 0;
//	viddef->xFramerate = 25<<16;
	OERR(OMX_SetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx.verbose);
	free(portdef);

#if 0
/* It appears these have limited effect: */
	dataunit->nPortIndex = decportidx;
	dataunit->eUnitType = OMX_DataUnitCodedPicture;
	dataunit->eEncapsulationType = OMX_DataEncapsulationGenericPayload;
	OERR(OMX_SetParameter(m2, OMX_IndexParamBrcmDataUnit, dataunit), ctx.verbose);

	if (ish264) {
		naltype->nPortIndex = decportidx;
		naltype->eNaluFormat = OMX_NaluFormatStartCodes;
		OERR(OMX_SetParameter(m2, OMX_IndexParamNalStreamFormatSelect, naltype), ctx.verbose);
	}
#endif

	MAKEME(level, OMX_VIDEO_PARAM_PROFILELEVELTYPE);
	level->nPortIndex = encportidx+1;
/* Dump what the encoder is capable of: */
	if (ctx.verbose) print_codecs();
	if (ctx.verbose) {
		for (oerr = OMX_ErrorNone, i = 0; oerr == OMX_ErrorNone; i++) {
			pfmt->nIndex = i;
			oerr = OMX_GetParameter(m4, OMX_IndexParamVideoPortFormat, pfmt);
			if (oerr == OMX_ErrorNoMore)
				break;
			printf("Codecs supported:\n"
				"\tIndex:\t\t%d\n"
				"\tCodec:\t\t%d (%x)\n"
				"\tColour:\t\t%d\n"
				"\tFramerate:\t%x (%f)\n",
				pfmt->nIndex,
				pfmt->eCompressionFormat, pfmt->eCompressionFormat,
				pfmt->eColorFormat,
				pfmt->xFramerate,
				((float)pfmt->xFramerate/(float)65536));
		}

		for (oerr = OMX_ErrorNone, i = 0; oerr == OMX_ErrorNone; i++) {
			level->nProfileIndex = i;
			oerr = OMX_GetParameter(m4,
				OMX_IndexParamVideoProfileLevelQuerySupported,
				level);
			if (oerr == OMX_ErrorNoMore)
				break;
			printf("Levels supported:\n"
				"\tIndex:\t\t%d\n"
				"\tProfile:\t%d\n"
				"\tLevel:\t\t%d\n",
				level->nProfileIndex,
				level->eProfile,
				level->eLevel);
		}
	}
	free(pfmt);
	free(level);

/* Dump current port states: */
	if (ctx.verbose) {
		dumpport(m2, decportidx);
		dumpport(m2, decportidx+1);
		dumpport(resize, resizeportidx);
		dumpport(resize, resizeportidx+1);
		dumpport(m4, encportidx);
		dumpport(m4, encportidx+1);
	}

	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);

	decbufs = allocbufs(m2, decportidx, 1);

/* Start the initial loop.  Process until we have a state change on port 131 */
	ctx.decstate = DECINIT;
	ctx.encstate = ENCPREINIT;
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx.verbose);

	rp = calloc(sizeof(AVPacket), 1);
	filtertest = ish264;

	for (offset = i = j = 0; ctx.decstate != DECFAILED; i++, j++) {
		int rc;
		int k;
		int size, nsize;
		OMX_BUFFERHEADERTYPE *spare;

		if (offset == 0 && ctx.decstate != DECFLUSH) {
			rc = av_read_frame(ic, rp);
			if (rc != 0) {
				if (ic->pb->eof_reached)
					ctx.decstate = DECFLUSH;
				break;
			}
			if (rp->stream_index != vidindex) {
				i--;
				av_free_packet(rp);
				continue;
			}
			size = rp->size;
			ctx.fps++;
			ctx.framecount++;

			if (ish264 && filtertest) {
				filtertest = 0;
				ctx.bsfc = dofiltertest(rp);
			}
			if (ctx.bsfc) {
				p = filter(&ctx, rp);
			} else {
				p = rp;
			}
		}

		switch (ctx.decstate) {
		case DECTUNNELSETUP:
			start = time(NULL);
			//printf("NOW to CONFIGURE !!!!!!!!!!!!!!\n\n");
			configure(&ctx);
			ctx.decstate = DECRUNNING;
			break;
		case DECFLUSH:
			size = 0;
			/* Add the flush code here */
			printf("IN FLUSH NOW\n\n");
			break;
		case DECINIT:
			if (i < 120) /* Bail; decoder doesn't like it */
				break;
			ctx.decstate = DECFAILED;
			/* Drop through */
		case DECFAILED:
			fprintf(stderr, "Failed to set the parameters after "
					"%d video frames.  Giving up.\n", i);
			dumpport(m2, decportidx);
			dumpport(m2, decportidx+1);
			dumpport(resize, resizeportidx);
			dumpport(resize, resizeportidx+1);
			dumpport(m4, encportidx);
			dumpport(m4, encportidx+1);
			exit(1);
			break;
		default:
			break;	/* Shuts the compiler up */
		}

		for (spare = NULL; !spare; usleep(10)) {
			pthread_mutex_lock(&ctx.lock);
			spare = ctx.bufhead;
			ctx.bufhead = NULL;
			ctx.flags &= ~FLAGS_DECEMPTIEDBUF;
			pthread_mutex_unlock(&ctx.lock);
			while (spare) {
				write(fd, &spare->pBuffer[spare->nOffset],
					spare->nFilledLen);
				spare->nFilledLen = 0;
				spare->nOffset = 0;
				OERRq(OMX_FillThisBuffer(m4, spare));
				spare = spare->pAppPrivate;
			}

			spare = decbufs;
			for (k = 0; spare && spare->nFilledLen != 0; k++)
				spare = spare->pAppPrivate;
		}

		if (size > spare->nAllocLen) {
			nsize = spare->nAllocLen;
		} else {
			nsize = size;
		}

		if (ctx.decstate != DECFLUSH) {
			memcpy(spare->pBuffer, &(p->data[offset]), nsize);
			spare->nFlags = i == 0 ? OMX_BUFFERFLAG_STARTTIME : 0;
			spare->nFlags |= size == nsize ?
				OMX_BUFFERFLAG_ENDOFFRAME : 0;
		} else {
			spare->nFlags = OMX_BUFFERFLAG_STARTTIME |
					OMX_BUFFERFLAG_EOS;
		}
		spare->nFilledLen = nsize;
		spare->nOffset = 0;
		OERRq(OMX_EmptyThisBuffer(m2, spare));
		size -= nsize;
		if (size) {
			offset += nsize;
		} else {
			offset = 0;
			av_free_packet(p);
		}
	}

	close(fd);

	end = time(NULL);

	printf("Processed %d frames in %d seconds; %df/s\n",
		ctx.framecount, end-start, (ctx.framecount/(end-start)));

	// flush the encoder
//	OERR(OMX_SendCommand(m4, OMX_CommandFlush, encportidx, NULL), ctx.verbose);
//	OERR(OMX_SendCommand(m4, OMX_CommandFlush, encportidx+ctx.verbose, NULL), ctx.verbose);



	// tear down the tunnels
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateLoaded, NULL), ctx.verbose);
	// free buffers
	vcos_free(decbufs);
	vcos_free(ctx.encbufs);
	// Apparantly the teardwon function is not implemented. Use setup function instead
	//OERR(OMX_TeardownTunnel(m2, decportidx+ctx.verbose, resize, resizeportidx), ctx.verbose);
	//OERR(OMX_TeardownTunnel(resize, resizeportidx+ctx.verbose, m4, encportidx), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m2, OMX_CommandPortDisable, decportidx+ctx.verbose, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortDisable, resizeportidx+ctx.verbose, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx, NULL), ctx.verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortDisable, encportidx+ctx.verbose, NULL), ctx.verbose);
//	ilclient_disable_port_buffers(m2, decportidx, NULL, NULL, NULL);
//	ilclient_disable_port_buffers(m4, encportidx, NULL, NULL, NULL);
	OERR(OMX_SetupTunnel(m2, decportidx+ctx.verbose, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx+ctx.verbose, NULL, 0), ctx.verbose);
	OERR(OMX_SetupTunnel(m4, encportidx, NULL, 0), ctx.verbose);
	OERR(OMX_FreeHandle(m2), ctx.verbose);
	OERR(OMX_FreeHandle(resize), ctx.verbose);
	OERR(OMX_FreeHandle(m4), ctx.verbose);

//	free(porttype);
//	free(portdef);
//	free(pfmt);
//	free(level);
	return 0;
}
Exemple #5
0
static void configure(struct context *ctx)
{
	pthread_t	fpst;
	pthread_attr_t	fpsa;
	OMX_CONFIG_FRAMERATETYPE	*framerate;
	OMX_VIDEO_PARAM_PROFILELEVELTYPE *level;
	OMX_VIDEO_PARAM_BITRATETYPE	*bitrate;
	OMX_BUFFERHEADERTYPE		*encbufs;
	OMX_PARAM_PORTDEFINITIONTYPE	*portdef, *portimgdef;
	OMX_VIDEO_PORTDEFINITIONTYPE	*viddef;
	OMX_VIDEO_PARAM_PORTFORMATTYPE	*pfmt;
	OMX_CONFIG_POINTTYPE		*pixaspect;
	int encportidx, decportidx, resizeportidx;
	OMX_HANDLETYPE	m2, m4, resize;

	encportidx = ctx->encportidx;
	decportidx = ctx->decportidx;
	resizeportidx = ctx->resizeportidx;
	m2 = ctx->m2;
	m4 = ctx->m4;
	resize = ctx->resize;

	MAKEME(portdef, OMX_PARAM_PORTDEFINITIONTYPE);
	MAKEME(portimgdef, OMX_PARAM_PORTDEFINITIONTYPE);
	viddef = &portdef->format.video;
	MAKEME(pixaspect, OMX_CONFIG_POINTTYPE);

	printf("Decoder has changed settings.  Setting up resizer.\n");

/*	We need some parameters from de decoder output to put in the resizer:
	eColorFormat (= YUV42-PackedPlanar)
	Width of the frame
	Height of the frame
*/
	portdef->nPortIndex = decportidx+1;
	OERR(OMX_GetParameter(m2, OMX_IndexParamPortDefinition, portdef), ctx->verbose);
	portimgdef->nPortIndex = resizeportidx;

	OERR(OMX_GetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);

	portimgdef->format.image.eColorFormat = portdef->format.video.eColorFormat;
	portimgdef->format.image.nFrameWidth = portdef->format.video.nFrameWidth;
	portimgdef->format.image.nFrameHeight = portdef->format.video.nFrameHeight;
	portimgdef->format.image.nStride = 0;
	portimgdef->format.image.nSliceHeight = 0;
	OERR(OMX_SetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);

//	The actual resizing if set at call
	if(ctx->width) {
	 	portimgdef->format.image.nFrameWidth = ctx->width;
		portimgdef->format.image.nFrameHeight = ctx->height;
	}
	portimgdef->format.image.nStride = 0;
	portimgdef->format.image.nSliceHeight = 0;
	portimgdef->nPortIndex = resizeportidx+1;
	OERR(OMX_SetParameter(resize, OMX_IndexParamPortDefinition, portimgdef), ctx->verbose);
	free (portimgdef);

/*	Now set the input parameters for the encoder to the scaled height/width */
	portdef->format.video.nFrameWidth = portimgdef->format.image.nFrameWidth;
	portdef->format.video.nFrameHeight = portimgdef->format.image.nFrameHeight;
	portdef->format.video.nStride = 0;
	portdef->format.video.nSliceHeight = 0;
	portdef->nPortIndex = encportidx;
	OERR(OMX_SetParameter(m4, OMX_IndexParamPortDefinition, portdef), ctx->verbose);

/*	setup tunnels */
	OERR(OMX_SetupTunnel(m2, decportidx+1, resize, resizeportidx), ctx->verbose);
	OERR(OMX_SetupTunnel(resize, resizeportidx+1, m4, encportidx), ctx->verbose);

//	OERR(OMX_SendCommand(m2, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateIdle, NULL), ctx->verbose);

	viddef = &portdef->format.video;
	if (viddef->nBitrate != 0) {
		viddef->nBitrate *= 3;
		viddef->nBitrate /= 4;
	} else {
		viddef->nBitrate = (1*1024*1024/2);
	}
//		viddef->nBitrate = (2*1024*1024);
//	viddef->nFrameWidth  /= 2;
//	viddef->nFrameHeight /= 2;

	viddef->eCompressionFormat = OMX_VIDEO_CodingAVC;
	viddef->nStride = viddef->nSliceHeight = viddef->eColorFormat = 0;
	portdef->nPortIndex = encportidx+1;
	OERR(OMX_SetParameter(m4, OMX_IndexParamPortDefinition, portdef), ctx->verbose);
	free(portdef);

	MAKEME(bitrate, OMX_VIDEO_PARAM_BITRATETYPE);
	bitrate->nPortIndex = encportidx+1;
	bitrate->eControlRate = OMX_Video_ControlRateVariable;
	bitrate->nTargetBitrate = viddef->nBitrate;
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoBitrate, bitrate), ctx->verbose);
	free(bitrate);

	MAKEME(pfmt, OMX_VIDEO_PARAM_PORTFORMATTYPE);
	pfmt->nPortIndex = encportidx+1;
	pfmt->nIndex = 0;
	pfmt->eCompressionFormat = OMX_VIDEO_CodingAVC;
	pfmt->eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
	pfmt->xFramerate = viddef->xFramerate;

	pixaspect->nPortIndex = encportidx+1;
	pixaspect->nX = 118;
	pixaspect->nY = 81;
	OERR(OMX_SetConfig(m4, OMX_IndexParamBrcmPixelAspectRatio, pixaspect), ctx->verbose);
	free(pixaspect);

//		DUMPPORT(m4, encportidx+1); exit(0);
	pfmt->nPortIndex = encportidx+1;
	pfmt->nIndex = 1;
	pfmt->eCompressionFormat = OMX_VIDEO_CodingAVC;
	pfmt->eColorFormat = 0;
	pfmt->xFramerate = 0; //viddef->xFramerate;
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoPortFormat, pfmt), ctx->verbose);
	free(pfmt);
	
	MAKEME(framerate, OMX_CONFIG_FRAMERATETYPE);
	framerate->nPortIndex = encportidx+1;
	framerate->xEncodeFramerate = viddef->xFramerate;
	OERR(OMX_SetParameter(m4, OMX_IndexConfigVideoFramerate, framerate), ctx->verbose);
	free(framerate);

#if 0 /* Doesn't seem to apply to video? */
printf("Interlacing: %d\n", ic->streams[vidindex]->codec->field_order);
	if (0 || ic->streams[vidindex]->codec->field_order == AV_FIELD_TT) {
		interlace->nPortIndex = encportidx+1;
		interlace->eMode = OMX_InterlaceFieldsInterleavedUpperFirst;
		interlace->bRepeatFirstField = 0;
		OERR(OMX_SetParameter(m4, OMX_IndexConfigCommonInterlace,
			interlace), ctx->verbose);
	}
#endif

	MAKEME(level, OMX_VIDEO_PARAM_PROFILELEVELTYPE);
	level->nPortIndex = encportidx+1;
	OERR(OMX_GetParameter(m4, OMX_IndexParamVideoProfileLevelCurrent, level), ctx->verbose);
	if (ctx->verbose) printf("Current level:\t\t%d\nCurrent profile:\t%d\n",
		level->eLevel, level->eProfile);
	OERR(OMX_SetParameter(m4, OMX_IndexParamVideoProfileLevelCurrent, level), ctx->verbose);
	free(level);
	ctx->encbufs = encbufs = allocbufs(m4, encportidx+1, 1);
	OERR(OMX_SendCommand(m2, OMX_CommandPortEnable, decportidx+1, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortEnable, resizeportidx, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandPortEnable, resizeportidx+1, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandPortEnable, encportidx, NULL), ctx->verbose);
	OERR(OMX_SendCommand(m4, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx->verbose);
	OERR(OMX_SendCommand(resize, OMX_CommandStateSet, OMX_StateExecuting, NULL), ctx->verbose);
	sleep(1);
	OERR(OMX_FillThisBuffer(m4, encbufs), ctx->verbose);

/* Dump current port states: */
	if (ctx->verbose) {
		dumpport(m2, decportidx);
		dumpport(m2, decportidx+1);
		dumpport(resize, resizeportidx);
		dumpport(resize, resizeportidx+1);
		dumpport(m4, encportidx);
		dumpport(m4, encportidx+1);
	}

	if (ctx->verbose) atexit(dumpportstate);
	pthread_attr_init(&fpsa);
	pthread_attr_setdetachstate(&fpsa, PTHREAD_CREATE_DETACHED);
	pthread_create(&fpst, &fpsa, fps, NULL);
}
Exemple #6
0
void omx_teardown_pipeline(struct omx_pipeline_t* pipe)
{
   OMX_BUFFERHEADERTYPE *buf;
   int i=1;

   DEBUGF("[vcodec] omx_teardown pipeline:\n");
   DEBUGF("pipe->video_decode.port_settings_changed = %d\n",pipe->video_decode.port_settings_changed);
   DEBUGF("pipe->image_fx.port_settings_changed = %d\n",pipe->image_fx.port_settings_changed);
   DEBUGF("pipe->video_scheduler.port_settings_changed = %d\n",pipe->video_scheduler.port_settings_changed);
   //dumpport(pipe->video_decode.h,130);

#if 0
   /* Indicate end of video stream */
   buf = get_next_buffer(&pipe->video_decode);

   buf->nFilledLen = 0;
   buf->nFlags = OMX_BUFFERFLAG_TIME_UNKNOWN | OMX_BUFFERFLAG_EOS;
   
   OERR(OMX_EmptyThisBuffer(pipe->video_decode.h, buf));

   /* NOTE: Three events are sent after the previous command:

      [EVENT] Got an event of type 4 on video_decode 0x426a10 (d1: 83, d2 1)
      [EVENT] Got an event of type 4 on video_scheduler 0x430d10 (d1: b, d2 1)
      [EVENT] Got an event of type 4 on video_render 0x430b30 (d1: 5a, d2 1)  5a = port (90) 1 = OMX_BUFFERFLAG_EOS
   */

#endif

#if 0
   DEBUGF("[vcodec] omx_teardown pipeline 2\n");
   /* Wait for video_decode to shutdown */
   pthread_mutex_lock(&pipe->video_decode.eos_mutex);
   while (!pipe->video_decode.eos)
     pthread_cond_wait(&pipe->video_decode.eos_cv,&pipe->video_decode.eos_mutex);
   pthread_mutex_unlock(&pipe->video_decode.eos_mutex);
#endif
         
   DEBUGF("[vcodec] omx_teardown pipeline 1\n");

   /* Transition all components to Idle, if they have been initialised */
   omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateIdle, NULL); 
   omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateIdle, NULL);
   DEBUGF("pipe->do_deinterlace=%d, pipe->image_fx=%d\n",pipe->do_deinterlace,(int)pipe->image_fx.h);
   if (pipe->video_decode.port_settings_changed == 2) {
      if (pipe->do_deinterlace) { 
        omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateIdle, NULL); 
      } else {
        omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL); 
      }
   }
   if (pipe->image_fx.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateIdle, NULL);
   }
   if (pipe->video_scheduler.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateIdle, NULL);
   }
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateIdle, NULL);

#if 0
   DEBUGF("[vcodec] omx_teardown pipeline 2\n");
   /* Wait for video_render to shutdown */
   pthread_mutex_lock(&pipe->video_render.eos_mutex);
   while (!pipe->video_render.eos)
     pthread_cond_wait(&pipe->video_render.eos_cv,&pipe->video_render.eos_mutex);
   pthread_mutex_unlock(&pipe->video_render.eos_mutex);
#endif

/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/

   /* Flush entrances to pipeline */
   omx_send_command_and_wait(&pipe->video_decode,OMX_CommandFlush,130,NULL);
   omx_send_command_and_wait(&pipe->audio_render,OMX_CommandFlush,100,NULL);

   /* Flush all tunnels */
   DEBUGF("[vcodec] omx_teardown pipeline 3\n");
   if (pipe->do_deinterlace) {
     omx_flush_tunnel(&pipe->video_decode, 131, &pipe->image_fx, 190);
     omx_flush_tunnel(&pipe->image_fx, 191, &pipe->video_scheduler, 10);
   } else {
     omx_flush_tunnel(&pipe->video_decode, 131, &pipe->video_scheduler, 10);
   }
   DEBUGF("[vcodec] omx_teardown pipeline 4\n");
   omx_flush_tunnel(&pipe->video_scheduler, 11, &pipe->video_render, 90);
   omx_flush_tunnel(&pipe->clock, 81, &pipe->video_scheduler, 12);

   DEBUGF("[vcodec] omx_teardown pipeline 2b\n");

   omx_send_command_and_wait(&pipe->video_scheduler,OMX_CommandFlush,10,NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 5\n");

   omx_flush_tunnel(&pipe->clock, 80, &pipe->audio_render, 101);

   /* Disable audio_render input port and buffers */
   omx_send_command_and_wait0(&pipe->audio_render, OMX_CommandPortDisable, 100, NULL);
   omx_free_buffers(&pipe->audio_render, 100);
   omx_send_command_and_wait1(&pipe->audio_render, OMX_CommandPortDisable, 100, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 9\n");

   /* Scheduler -> render tunnel */
   if (pipe->video_scheduler.port_settings_changed == 2) {
     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 11, NULL);
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandPortDisable, 90, NULL);

     omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 10, NULL);
   }

   if ((pipe->image_fx.port_settings_changed == 2) && (pipe->do_deinterlace)) {
     omx_send_command_and_wait(&pipe->image_fx, OMX_CommandPortDisable, 190, NULL);
     omx_send_command_and_wait(&pipe->image_fx, OMX_CommandPortDisable, 191, NULL);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 8a\n");

   //dumpport(pipe->video_scheduler.h,10);

   /* Teardown tunnels */
/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/
   //dumpport(pipe->video_decode.h,131);
   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 10, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 10\n");

   /* NOTE: The clock disable doesn't complete until after the video scheduler port is 
      disabled (but it completes before the video scheduler port disabling completes). */
   OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortDisable, 80, NULL));
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandPortDisable, 101, NULL);
   OERR(OMX_SendCommand(pipe->clock.h, OMX_CommandPortDisable, 81, NULL));
   omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandPortDisable, 12, NULL);

   DEBUGF("[vcodec] omx_teardown pipeline 12b\n");

   if (pipe->do_deinterlace) {
     OERR(OMX_SetupTunnel(pipe->image_fx.h, 190, NULL, 0));
     OERR(OMX_SetupTunnel(pipe->image_fx.h, 191, NULL, 0));
   }

   DEBUGF("[vcodec] omx_teardown pipeline 13\n");

   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 11, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->video_render.h, 90, NULL, 0));

   OERR(OMX_SetupTunnel(pipe->clock.h, 81, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->video_scheduler.h, 12, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 13b\n");

   OERR(OMX_SetupTunnel(pipe->clock.h, 80, NULL, 0));
   OERR(OMX_SetupTunnel(pipe->audio_render.h, 101, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 8b\n");


/* 
  Pipeline is as follows:

[video data] -> 130 video_decode 131 -> 190 image_fx 191 -> 10 video_scheduler 11 -> 90 video_render
                                                clock 81 -> 12 video_scheduler
                                                clock 80 -> 101 audio_render
                                            [audio data] -> 100 audio_render
*/

   omx_show_state(&pipe->video_decode,130,131,0);
   dumpport(pipe->video_decode.h,131);
   omx_show_state(&pipe->video_scheduler,10,11,12);
   if (pipe->do_deinterlace) { omx_show_state(&pipe->image_fx,190,191,0); }
   omx_show_state(&pipe->video_render,90,0,0);
   omx_show_state(&pipe->audio_render,100,101,0);
   omx_show_state(&pipe->clock,80,81,0);

   if (pipe->video_decode.port_settings_changed == 2) {
     //dumpport(pipe->video_decode.h,131);
     omx_send_command_and_wait(&pipe->video_decode, OMX_CommandPortDisable, 131, NULL);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 11\n");

   /* Disable video_decode input port and buffers */
   //dumpport(pipe->video_decode.h,130);
   omx_send_command_and_wait0(&pipe->video_decode, OMX_CommandPortDisable, 130, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 6\n");
   omx_free_buffers(&pipe->video_decode, 130);
   DEBUGF("[vcodec] omx_teardown pipeline 7\n");
   //omx_send_command_and_wait1(&pipe->video_decode, OMX_CommandPortDisable, 130, NULL);

   //dumpport(pipe->video_decode.h,130);
   if (is_port_enabled(pipe->video_decode.h, 130)) {
     fprintf(stderr,"Unexpected error video_decode port 130 is not disabled\n");
     exit(1);
   }

   DEBUGF("[vcodec] omx_teardown pipeline 12\n");

   OERR(OMX_SetupTunnel(pipe->video_decode.h, 131, NULL, 0));

   DEBUGF("[vcodec] omx_teardown pipeline 15\n");

   omx_show_state(&pipe->video_decode,130,131,0);

   /* Transition all components to Loaded */
   DEBUGF("[vcodec] omx_teardown pipeline 15a\n");
   omx_send_command_and_wait(&pipe->video_decode, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15b\n");
   omx_send_command_and_wait(&pipe->video_scheduler, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15c\n");
   if (((pipe->video_decode.port_settings_changed == 2) && (pipe->do_deinterlace)) || (pipe->image_fx.port_settings_changed == 2)) {
     omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   }
   DEBUGF("[vcodec] omx_teardown pipeline 15d\n");
   omx_send_command_and_wait(&pipe->audio_render, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15e\n");
   omx_send_command_and_wait(&pipe->clock, OMX_CommandStateSet, OMX_StateLoaded, NULL);
   DEBUGF("[vcodec] omx_teardown pipeline 15f\n");
   if (pipe->do_deinterlace) { omx_send_command_and_wait(&pipe->image_fx, OMX_CommandStateSet, OMX_StateLoaded, NULL); }

   DEBUGF("[vcodec] omx_teardown pipeline 16\n");
   /* Finally free the component handles */
   OERR(OMX_FreeHandle(pipe->video_decode.h));
   OERR(OMX_FreeHandle(pipe->video_scheduler.h));
   OERR(OMX_FreeHandle(pipe->video_render.h));
   OERR(OMX_FreeHandle(pipe->audio_render.h));
   OERR(OMX_FreeHandle(pipe->clock.h));
   if (pipe->do_deinterlace) { OERR(OMX_FreeHandle(pipe->image_fx.h)); }
   DEBUGF("[vcodec] omx_teardown pipeline 17\n");
}
Exemple #7
0
OMX_ERRORTYPE omx_setup_camera_pipeline(struct omx_pipeline_t* pipe)
{

  // Create component.
  omx_init_component(pipe, &pipe->camera, "OMX.broadcom.camera");

  // Use OMX_IndexConfigRequestCallback to request callbacks on OMX_IndexParamCameraDeviceNumber.
  OMX_CONFIG_REQUESTCALLBACKTYPE cbtype;
  OMX_INIT_STRUCTURE(cbtype);
  cbtype.nPortIndex=OMX_ALL;
  cbtype.nIndex=OMX_IndexParamCameraDeviceNumber;
  cbtype.bEnable = OMX_TRUE;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigRequestCallback, &cbtype));

  // Set OMX_IndexParamISPTunerName.

  // Set OMX_IndexParamCameraFlashType.

  // Set OMX_IndexParamCameraDeviceNumber.
  OMX_PARAM_U32TYPE device;
  OMX_INIT_STRUCTURE(device);
  device.nPortIndex = OMX_ALL;
  device.nU32 = 0;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexParamCameraDeviceNumber, &device));

  dumpport(pipe->camera.h, 71);

  /* Set the resolution */
  OMX_PARAM_PORTDEFINITIONTYPE portdef;
  OMX_INIT_STRUCTURE(portdef);
  portdef.nPortIndex = 71;
  OERR(OMX_GetParameter(pipe->camera.h, OMX_IndexParamPortDefinition, &portdef));
  portdef.format.image.nFrameWidth = 640;
  portdef.format.image.nFrameHeight = 360;
  portdef.format.image.nStride = 640;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexParamPortDefinition, &portdef));

  /* Set the framerate */
  OMX_CONFIG_FRAMERATETYPE framerate;
  OMX_INIT_STRUCTURE(framerate);
  framerate.nPortIndex = 71;
  framerate.xEncodeFramerate = 25 << 16; // Q16 format - 25fps
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigVideoFramerate, &framerate));

  /* Set the sharpness */
  OMX_CONFIG_SHARPNESSTYPE sharpness;
  OMX_INIT_STRUCTURE(sharpness);
  sharpness.nPortIndex = OMX_ALL;
  sharpness.nSharpness = -50; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonSharpness, &sharpness));

  /* Set the contrast */
  OMX_CONFIG_CONTRASTTYPE contrast;
  OMX_INIT_STRUCTURE(contrast);
  contrast.nPortIndex = OMX_ALL;
  contrast.nContrast = -10; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonContrast, &contrast));

  /* Set the brightness */
  OMX_CONFIG_BRIGHTNESSTYPE brightness;
  OMX_INIT_STRUCTURE(brightness);
  brightness.nPortIndex = OMX_ALL;
  brightness.nBrightness = 50; /* 0 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonBrightness, &brightness));

  /* Set the saturation */
  OMX_CONFIG_SATURATIONTYPE saturation;
  OMX_INIT_STRUCTURE(saturation);
  saturation.nPortIndex = OMX_ALL;
  saturation.nSaturation = 0; /* -100 to 100 */
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonSaturation, &saturation));

  /* Video stabilisation */
  OMX_CONFIG_FRAMESTABTYPE framestab;
  OMX_INIT_STRUCTURE(framestab);
  framestab.nPortIndex = OMX_ALL;
  framestab.bStab = OMX_FALSE;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonFrameStabilisation, &framestab));

  /* Set EV compensation, ISO and metering mode */
  OMX_CONFIG_EXPOSUREVALUETYPE exposurevalue;
  OMX_INIT_STRUCTURE(exposurevalue);
  exposurevalue.nPortIndex = OMX_ALL;
  OERR(OMX_GetConfig(pipe->camera.h, OMX_IndexConfigCommonExposureValue, &exposurevalue));
  fprintf(stderr,"nSensitivity=%d\n",exposurevalue.nSensitivity);
  exposurevalue.xEVCompensation = 0;  /* Fixed point value stored as Q16 */
  exposurevalue.nSensitivity = 100;         /**< e.g. nSensitivity = 100 implies "ISO 100" */
  exposurevalue.bAutoSensitivity = OMX_FALSE;
  exposurevalue.eMetering = OMX_MeteringModeAverage; 
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonExposureValue, &exposurevalue));

  /* Set exposure mode */
  OMX_CONFIG_EXPOSURECONTROLTYPE exposure;
  OMX_INIT_STRUCTURE(exposure);
  exposure.nPortIndex = OMX_ALL;
  exposure.eExposureControl = OMX_ExposureControlAuto;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonExposure, &exposure));

  /* Set AWB mode */
  OMX_CONFIG_WHITEBALCONTROLTYPE awb;
  OMX_INIT_STRUCTURE(awb);
  awb.nPortIndex = OMX_ALL;
  awb.eWhiteBalControl = OMX_WhiteBalControlAuto;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonWhiteBalance, &awb));
  
  /* Set image effect */
  OMX_CONFIG_IMAGEFILTERTYPE imagefilter;
  OMX_INIT_STRUCTURE(imagefilter);
  imagefilter.nPortIndex = OMX_ALL;
  imagefilter.eImageFilter = OMX_ImageFilterNone;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonImageFilter, &imagefilter));

  /* Set colour effect */
  OMX_CONFIG_COLORENHANCEMENTTYPE colour;
  OMX_INIT_STRUCTURE(colour);
  colour.nPortIndex = OMX_ALL;
  colour.bColorEnhancement = OMX_FALSE;
  colour.nCustomizedU = 128;
  colour.nCustomizedV = 128;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigCommonColorEnhancement, &colour));

  /* Turn off the LED - doesn't work! */
  OMX_CONFIG_PRIVACYINDICATORTYPE privacy;
  OMX_INIT_STRUCTURE(privacy);
  privacy.ePrivacyIndicatorMode = OMX_PrivacyIndicatorOff;
  OERR(OMX_SetConfig(pipe->camera.h, OMX_IndexConfigPrivacyIndicator, &privacy));

  // Wait for the callback that OMX_IndexParamCameraDeviceNumber has
  // changed. At this point, all the drivers have been loaded. Other
  // settings can be applied whilst waiting for this event.
  fprintf(stderr,"Waiting for camera config to change\n");
  while (!pipe->camera.config_changed);  /* TODO: Use a condition variable */
  fprintf(stderr,"Config changed\n");

  // Query for OMX_IndexConfigCameraSensorModes as required.

  // Change state to IDLE, and proceed as required. 
  omx_send_command_and_wait(&pipe->camera, OMX_CommandStateSet, OMX_StateIdle, NULL);

  OMX_CONFIG_PORTBOOLEANTYPE cameraport;
  OMX_INIT_STRUCTURE(cameraport);
  cameraport.nPortIndex = 71;
  cameraport.bEnabled = OMX_TRUE;
  OERR(OMX_SetParameter(pipe->camera.h, OMX_IndexConfigPortCapturing, &cameraport));

  omx_init_component(pipe, &pipe->video_render, "OMX.broadcom.video_render");
  omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateIdle, NULL);

  OERR(OMX_SetupTunnel(pipe->camera.h, 71, pipe->video_render.h, 90));  /* Camera capture port to video render */

  omx_send_command_and_wait(&pipe->camera, OMX_CommandPortEnable, 71, NULL);
  omx_send_command_and_wait(&pipe->video_render, OMX_CommandPortEnable, 90, NULL);

  omx_send_command_and_wait(&pipe->video_render, OMX_CommandStateSet, OMX_StateExecuting, NULL);
  omx_send_command_and_wait(&pipe->camera, OMX_CommandStateSet, OMX_StateExecuting, NULL);

  omx_set_display_region(pipe, 1200, 180, 640, 360);

  OMX_CONFIG_DISPLAYREGIONTYPE region;
  OMX_INIT_STRUCTURE(region);
  region.nPortIndex = 90; /* Video render input port */
  region.set = OMX_DISPLAY_SET_LAYER;
  region.layer = 10;
  OERR(OMX_SetParameter(pipe->video_render.h, OMX_IndexConfigDisplayRegion, &region));

  fprintf(stderr,"Camera pipeline configured\n");

  dumpport(pipe->camera.h, 71);
}