Пример #1
0
int main(int, char **)
{
	// Create the tigr window.
	Tigr *screen = tigrWindow(WINDOW_WIDTH, WINDOW_HEIGHT, "WidgetZero Example - Custom Renderer", 0);

	// Create the custom renderer.
	TigrRenderer *renderer = new TigrRenderer(screen);

	// Create the main window.
	wz::MainWindow *mainWindow = new wz::MainWindow(renderer);
	mainWindow->setSize(WINDOW_WIDTH, WINDOW_HEIGHT);

	// Create a button.
	wz::Button *button = new wz::Button("Click me!");
	button->setAlign(wz::Align::Center | wz::Align::Middle);
	mainWindow->add(button);

	// For tracking input state changes.
	int lastMouseX = 0, lastMouseY = 0, lastMouseButtons = 0;

	while (!tigrClosed(screen))
	{
		// Handle mouse movement.
		int mouseX, mouseY, mouseButtons;
		tigrMouse(screen, &mouseX, &mouseY, &mouseButtons);

		if (mouseX != lastMouseX || mouseY != lastMouseY)
		{
			mainWindow->mouseMove(mouseX, mouseY, mouseX - lastMouseX, mouseY - lastMouseY);
		}

		lastMouseX = mouseX;
		lastMouseY = mouseY;
		lastMouseButtons = mouseButtons;

		// Draw.
		tigrClear(screen, tigrRGB(192, 192, 192));
		mainWindow->draw();
		tigrUpdate(screen);
	}

	tigrFree(screen);
	return 0;
}
Пример #2
0
int main(int argc, char* argv[])
{
	Tigr * win = tigrWindow(700, 700, "libaveasy", TIGR_AUTO);

	aveasy_t * film = aveasy_open(url);


	//if(!ms_init(44100, 44100, 1))
	//	printf("ms nope\n");

//	ms_pcm_t buf[44100];

//	for(uint32_t i = 0; i < 44100; ++i)
//	{
//		float freq = 441.0f; // in hz
//		float rate = 44100.0f; // in hz

//		float t = freq * ((float)i) / rate;
//		t = t - ceilf(t); // get 0 - 1 in f

//		float v = sinf(t * 2.0f * 3.14f);

//		buf[i] = v * 20000;
//	}

//	ms_pcm_t* woah[1] = {buf};

	//printf("well hi there\n");

	while(!tigrClosed(win) && !tigrKeyDown(win, TK_ESCAPE))
	{
		tigrClear(win, tigrRGB(0x64, 0x64, 0x64));

		aveasy_lock(film);
		Tigr wut = aveasy_to_tigr(film);
		tigrBlit(win, &wut, 10, 10, 0, 0, wut.w, wut.h);
		aveasy_unlock(film);

		tigrPrint(win, tfont, 10, 15, tigrRGB(0xff, 0xff, 0xff), "testing stuff");
		tigrUpdate(win);

		//if(ms_ready())
		//	ms_fill(woah);
	}

	//ms_deinit();

	aveasy_close(film);


	tigrFree(win);

	/*
	av_register_all();

	AVFormatContext *pFormatCtx = NULL;
	int i, videoStream;
	AVCodecContext *pCodecCtx;
	AVCodec *pCodec;
	AVFrame *pFrame;
	AVFrame *pFrameRGB;
	struct SwsContext * pSwsCtx;
	AVPacket packet;
	int frameFinished;
	int numBytes;
	uint8_t *buffer;

	// Register all formats and codecs
	av_register_all();

	// Open video file
	if (avformat_open_input(&pFormatCtx, url, NULL, NULL) != 0)
			return -1; // Couldn't open file

	// Retrieve stream information
	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
			return -1; // Couldn't find stream information

	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, url, 0);

	// Find the first video stream
	videoStream = -1;
	for (i = 0; i < pFormatCtx->nb_streams; i++)
			if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
					videoStream = i;
					break;
			}
	if (videoStream == -1)
			return -1; // Didn't find a video stream

	// Get a pointer to the codec context for the video stream
	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	// Find the decoder for the video stream
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
			fprintf(stderr, "Unsupported codec!\n");
			return -1; // Codec not found
	}
	// Open codec
	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
			return -1; // Could not open codec

	// Allocate video frame
	pFrame = av_frame_alloc();

	// Allocate an AVFrame structure
	pFrameRGB = av_frame_alloc();
	if (pFrameRGB == NULL)
			return -1;

	// Determine required buffer size and allocate buffer
	numBytes = avpicture_get_size(AV_PIX_FMT_BGRA, pCodecCtx->width,
					pCodecCtx->height);
	buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));

	printf("%i %i\n", pCodecCtx->width, pCodecCtx->height);


	// Assign appropriate parts of buffer to image planes in pFrameRGB
	// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *) pFrameRGB, buffer, AV_PIX_FMT_BGRA,
					pCodecCtx->width, pCodecCtx->height);

	pSwsCtx = sws_getContext(pCodecCtx->width,
					pCodecCtx->height, pCodecCtx->pix_fmt,
					pCodecCtx->width, pCodecCtx->height,
					AV_PIX_FMT_BGRA, SWS_FAST_BILINEAR, NULL, NULL, NULL);

	if (pSwsCtx == NULL) {
			fprintf(stderr, "Cannot initialize the sws context\n");
			return -1;
	}

//	Soloud *soloud = Soloud_create();
//	Speech *speech = Speech_create();

//	Speech_setText(speech, "1 2 3       A B C        Doooooo    Reeeeee    Miiiiii    Faaaaaa    Soooooo    Laaaaaa    Tiiiiii    Doooooo!");

//	Soloud_initEx(soloud, SOLOUD_CLIP_ROUNDOFF | SOLOUD_ENABLE_VISUALIZATION, SOLOUD_AUTO, SOLOUD_AUTO, SOLOUD_AUTO, SOLOUD_AUTO);

//	Soloud_setGlobalVolume(soloud, 4);
//	Soloud_play(soloud, speech);


	Tigr * win = tigrWindow(700, 700, "wut wut ffmpeg", TIGR_AUTO);

	while(!tigrClosed(win) && !tigrKeyDown(win, TK_ESCAPE))
	{


		if(av_read_frame(pFormatCtx, &packet) >= 0)
		{
			// Is this a packet from the video stream?
			if (packet.stream_index == videoStream) {
					// Decode video frame
					avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

					// Did we get a video frame?
					if (frameFinished) {

							// Convert the image from its native format to RGB
							sws_scale(pSwsCtx,
													(const uint8_t * const *) pFrame->data,
													pFrame->linesize, 0, pCodecCtx->height,
													pFrameRGB->data,
													pFrameRGB->linesize);

							// Save the frame to disk
							//SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, i);

							tigrClear(win, tigrRGB(0x64, 0x64, 0x64));


							Tigr wut;
							wut.handle = NULL;
							wut.pix = buffer;//pFrameRGB->data;
							wut.w = pCodecCtx->width;
							wut.h = pCodecCtx->height;
							tigrBlit(win, &wut, 10, 10, 0, 0, wut.w, wut.h);

							tigrPrint(win, tfont, 10, 15, tigrRGB(0xff, 0xff, 0xff), "WUT WUT TEST");

//							if(Soloud_getVoiceCount(soloud) > 0)
//							{
//								int p;
//								float * v = Soloud_calcFFT(soloud);
//								p = (int)(v[10] * 30);
//								if (p > 59) p = 59;

//								char temp[64] = {0};
//								memset(temp, '=', p);
//								tigrPrint(win, tfont, 10, 30, tigrRGB(0xff, 0xff, 0xff), temp);
//							}
					}
			}

			// Free the packet that was allocated by av_read_frame
			av_free_packet(&packet);
		}

		tigrUpdate(win);
	}

	tigrFree(win);

	// Free the RGB image
	av_free(buffer);
	av_free(pFrameRGB);

	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);

	// Close the video file
	avformat_close_input(&pFormatCtx);

	*/
	return 0;
}