Ejemplo n.º 1
0
/* Initialize the PVR chip to ready status, enabling the specified lists
   and using the specified parameters; note that bins and vertex buffers
   come from the texture memory pool! Expects that a 2D mode was 
   initialized already using the vid_* API. */
int pvr_init(pvr_init_params_t *params) {
	/* If we're already initialized, fail */
	if (pvr_state.valid == 1) {
		dbglog(DBG_WARNING, "pvr: pvr_init called twice!\n");
		return -1;
	}

	/* Make sure we got valid parameters */
	assert(params != NULL);

	/* Make sure that a video mode has been initialized */
	assert(vid_mode != NULL);
	assert(vid_mode->width != 0 && vid_mode->height != 0);

	/* Check for compatibility with 3D stuff */
	if ((vid_mode->width % 32) != 0) {
		dbglog(DBG_WARNING, "pvr: mode %dx%d isn't usable for 3D (width not multiples of 32)\n",
			vid_mode->width, vid_mode->height);
		return -1;
	}

	/* Clear out video memory */
	vid_empty();

	/* Reset all PVR systems (in case it's still doing something) */
	PVR_SET(PVR_RESET, PVR_RESET_ALL);
	PVR_SET(PVR_RESET, PVR_RESET_NONE);

	/* Start off with a nice empty structure */
	memset((void *)&pvr_state, 0, sizeof(pvr_state));

	// Enable DMA if the user wants that.
	pvr_state.dma_mode = params->dma_enabled;

	/* Everything's clear, do the initial buffer pointer setup */
	pvr_allocate_buffers(params);

	// Initialize tile matrices
	pvr_init_tile_matrices();

	// Setup all pipeline targets. Yes, this is redundant. :) I just
	// like to have it explicit.
	pvr_state.ram_target = 0;
	pvr_state.ta_target = 0;
	pvr_state.view_target = 0;
	
	pvr_state.list_reg_open = -1;

	// Sync all the hardware registers with our pipeline state.
	pvr_sync_view();
	pvr_sync_reg_buffer();

	// Clear out our stats
	pvr_state.vbl_count = 0;
	pvr_state.frame_last_time = 0;
	pvr_state.buf_start_time = 0;
	pvr_state.reg_start_time = 0;
	pvr_state.rnd_start_time = 0;
	pvr_state.frame_last_len = -1;
	pvr_state.buf_last_len = -1;
	pvr_state.reg_last_len = -1;
	pvr_state.rnd_last_len = -1;
	pvr_state.vtx_buf_used = 0;
	pvr_state.vtx_buf_used_max = 0;

	/* If we're on a VGA box, disable vertical smoothing */
	if (vid_mode->cable_type == CT_VGA) {
		dbglog(DBG_KDEBUG, "pvr: disabling vertical scaling for VGA\n");
		PVR_SET(PVR_SCALER_CFG, 0x400);
	} else {
		dbglog(DBG_KDEBUG, "pvr: enabling vertical scaling for non-VGA\n");
		PVR_SET(PVR_SCALER_CFG, 0x401);
	}

	/* Hook the PVR interrupt events on G2 */
	pvr_state.vbl_handle = vblank_handler_add(pvr_int_handler);
	asic_evt_set_handler(ASIC_EVT_PVR_OPAQUEDONE, pvr_int_handler);		asic_evt_enable(ASIC_EVT_PVR_OPAQUEDONE, ASIC_IRQ_DEFAULT);
	asic_evt_set_handler(ASIC_EVT_PVR_OPAQUEMODDONE, pvr_int_handler);	asic_evt_enable(ASIC_EVT_PVR_OPAQUEMODDONE, ASIC_IRQ_DEFAULT);
	asic_evt_set_handler(ASIC_EVT_PVR_TRANSDONE, pvr_int_handler);		asic_evt_enable(ASIC_EVT_PVR_TRANSDONE, ASIC_IRQ_DEFAULT);
	asic_evt_set_handler(ASIC_EVT_PVR_TRANSMODDONE, pvr_int_handler);	asic_evt_enable(ASIC_EVT_PVR_TRANSMODDONE, ASIC_IRQ_DEFAULT);
	asic_evt_set_handler(ASIC_EVT_PVR_PTDONE, pvr_int_handler);		asic_evt_enable(ASIC_EVT_PVR_PTDONE, ASIC_IRQ_DEFAULT);
	asic_evt_set_handler(ASIC_EVT_PVR_RENDERDONE, pvr_int_handler);		asic_evt_enable(ASIC_EVT_PVR_RENDERDONE, ASIC_IRQ_DEFAULT);

	/* 3d-specific parameters; these are all about rendering and
	   nothing to do with setting up the video; some stuff in here
	   is still unknown. */
   	PVR_SET(PVR_UNK_00A8, 0x15d1c951);		/* M (Unknown magic value) */
   	PVR_SET(PVR_UNK_00A0, 0x00000020);		/* M */
   	PVR_SET(PVR_FB_CFG_2, 0x00000009);		/* alpha config */
	PVR_SET(PVR_UNK_0110, 0x00093f39);		/* M */
	PVR_SET(PVR_UNK_0098, 0x00800408);		/* M */
	PVR_SET(PVR_TEXTURE_CLIP, 0x00000000);		/* texture clip distance */
	PVR_SET(PVR_SPANSORT_CFG, 0x00000101);		/* M */
	PVR_SET(PVR_FOG_TABLE_COLOR, 0x007f7f7f);	/* Fog table color */
	PVR_SET(PVR_FOG_VERTEX_COLOR, 0x007f7f7f);	/* Fog vertex color */
	PVR_SET(PVR_COLOR_CLAMP_MIN, 0x00000000);	/* color clamp min */
	PVR_SET(PVR_COLOR_CLAMP_MAX, 0xffffffff);	/* color clamp max */
	PVR_SET(PVR_UNK_0080, 0x00000007);		/* M */
	PVR_SET(PVR_CHEAP_SHADOW, 0x00000001);		/* cheap shadow */
	PVR_SET(PVR_UNK_007C, 0x0027df77);		/* M */
	PVR_SET(PVR_TEXTURE_MODULO, 0x00000000);	/* stride width */
	PVR_SET(PVR_FOG_DENSITY, 0x0000ff07);		/* fog density */
	PVR_SET(PVR_UNK_00C8, PVR_GET(0x00d4) << 16);	/* M */
	PVR_SET(PVR_UNK_0118, 0x00008040);		/* M */

	/* Initialize PVR DMA */
	pvr_state.dma_lock = mutex_create();
	pvr_dma_init();

	/* Setup our wait-ready semaphore */
	pvr_state.ready_sem = sem_create(0);

	/* Set us as valid and return success */
	pvr_state.valid = 1;

	/* Validate our memory pool */
	pvr_mem_reset();
/* This doesn't work right now... */
/*#ifndef NDEBUG
	dbglog(DBG_KDEBUG, "pvr: free memory is %08lx bytes\n",
		pvr_mem_available());
#endif*//* !NDEBUG */

	return 0;
}
Ejemplo n.º 2
0
int ffplay(const char *filename, const char *force_format) {

	char errbuf[256];
	int r = 0;
	
	int frameFinished;
	AVPacket packet;
	int audio_buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
	int16_t *audio_buf = (int16_t *) malloc((AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2);
	
	if(!audio_buf) {
		ds_printf("DS_ERROR: No free memory\n");
		return -1;
	}
	
	memset(audio_buf, 0, (AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2);
	
	AVFormatContext *pFormatCtx = NULL;
	AVFrame *pFrame = NULL;
	AVCodecContext *pVideoCodecCtx = NULL, *pAudioCodecCtx = NULL;
	AVInputFormat *file_iformat = NULL;
	
	video_txr_t movie_txr;
	int videoStream = -1, audioStream = -1;
	
	maple_device_t *cont = NULL;
	cont_state_t *state = NULL;
	int pause = 0, done = 0;
	
	char fn[MAX_FN_LEN];
	sprintf(fn, "ds:%s", filename);

	memset(&movie_txr, 0, sizeof(movie_txr));
	
	if(!codecs_inited) {
		avcodec_register_all();
		avcodec_register(&mp1_decoder);
		avcodec_register(&mp2_decoder);
		avcodec_register(&mp3_decoder);
		avcodec_register(&vorbis_decoder);
		//avcodec_register(&mpeg4_decoder);
		codecs_inited = 1;
	}
	
	if(force_format)
      file_iformat = av_find_input_format(force_format);
    else
      file_iformat = NULL;


	// Open video file
	ds_printf("DS_PROCESS_FFMPEG: Opening file: %s\n", filename);
	if((r = av_open_input_file((AVFormatContext**)(&pFormatCtx), fn, file_iformat, /*FFM_PACKET_SIZE*/0, NULL)) != 0) {
		av_strerror(r, errbuf, 256);
		ds_printf("DS_ERROR_FFMPEG: %s\n", errbuf);
		free(audio_buf);
		return -1; // Couldn't open file
	}
	
	// Retrieve stream information
	ds_printf("DS_PROCESS_FFMPEG: Retrieve stream information...\n");
	if((r = av_find_stream_info(pFormatCtx)) < 0) {
		av_strerror(r, errbuf, 256);
		ds_printf("DS_ERROR_FFMPEG: %s\n", errbuf);
		av_close_input_file(pFormatCtx);
		free(audio_buf);
		return -1; // Couldn't find stream information
	}

	// Dump information about file onto standard error
	dump_format(pFormatCtx, 0, filename, 0);
	//thd_sleep(5000);
	
	pVideoCodecCtx = findDecoder(pFormatCtx, AVMEDIA_TYPE_VIDEO, &videoStream);
	pAudioCodecCtx = findDecoder(pFormatCtx, AVMEDIA_TYPE_AUDIO, &audioStream);
	
	//LockInput();
	
	if(pVideoCodecCtx) {
		
		//LockVideo();
		ShutdownVideoThread();
		SDL_DS_FreeScreenTexture(0);
		int format = 0;
		
		switch(pVideoCodecCtx->pix_fmt) {
			case PIX_FMT_YUV420P:
			case PIX_FMT_YUVJ420P:
			
				format = PVR_TXRFMT_YUV422;
#ifdef USE_HW_YUV				
				yuv_conv_init();
#endif
				break;
				
			case PIX_FMT_UYVY422:
			case PIX_FMT_YUVJ422P:
			
				format = PVR_TXRFMT_YUV422;
				break;
				
			default:
				format = PVR_TXRFMT_RGB565;
				break;
		}
		
		MakeVideoTexture(&movie_txr, pVideoCodecCtx->width, pVideoCodecCtx->height, format | PVR_TXRFMT_NONTWIDDLED, PVR_FILTER_BILINEAR);
		
#ifdef USE_HW_YUV				
		yuv_conv_setup(movie_txr.addr, PVR_YUV_MODE_MULTI, PVR_YUV_FORMAT_YUV420, movie_txr.width, movie_txr.height);
		pvr_dma_init();
#endif

	} else {
		ds_printf("DS_ERROR: Didn't find a video stream.\n");
	}
	
	
	if(pAudioCodecCtx) {
		
#ifdef USE_DIRECT_AUDIO
		audioinit(pAudioCodecCtx);
#else

		sprintf(fn, "%s/firmware/aica/ds_stream.drv", getenv("PATH"));
		
		if(snd_init_fw(fn) < 0) {
			goto exit_free;
		}
	
		if(aica_audio_open(pAudioCodecCtx->sample_rate, pAudioCodecCtx->channels, 8192) < 0) {
			goto exit_free;
		}
		//snd_cpu_clock(0x19);
		//snd_init_decoder(8192);
#endif
		
	} else {
		ds_printf("DS_ERROR: Didn't find a audio stream.\n");
	}
	
	//ds_printf("FORMAT: %d\n", pVideoCodecCtx->pix_fmt);

	// Allocate video frame
	pFrame = avcodec_alloc_frame();

	if(pFrame == NULL) {
		ds_printf("DS_ERROR: Can't alloc memory\n");
		goto exit_free;
	}
	
	int pressed = 0, framecnt = 0;
	uint32 fa = 0;
	
	fa = GET_EXPORT_ADDR("ffplay_format_handler");
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			void (*ff_format_func)(AVFormatContext *, AVCodecContext *, AVCodecContext *) = 
				(void (*)(AVFormatContext *, AVCodecContext *, AVCodecContext *))fa;
			ff_format_func(pFormatCtx, pVideoCodecCtx, pAudioCodecCtx);
		EXPT_GUARD_CATCH;
		EXPT_GUARD_END;
	}
	
	fa = GET_EXPORT_ADDR("ffplay_frame_handler");
	void (*ff_frame_func)(AVFrame *) = NULL;
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			ff_frame_func = (void (*)(AVFrame *))fa;
			// Test call
			ff_frame_func(NULL);
		EXPT_GUARD_CATCH;
			ff_frame_func = NULL;
		EXPT_GUARD_END;
	}
	
	fa = GET_EXPORT_ADDR("ffplay_render_handler");
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			movie_txr.render_cb = (void (*)(void *))fa;
			// Test call
			movie_txr.render_cb(NULL);
		EXPT_GUARD_CATCH;
			movie_txr.render_cb = NULL;
		EXPT_GUARD_END;
	}
	
	while(av_read_frame(pFormatCtx, &packet) >= 0 && !done) {
		
		do {
			if(ff_frame_func) 
				ff_frame_func(pFrame);
					
			cont = maple_enum_type(0, MAPLE_FUNC_CONTROLLER);
			framecnt++;

			if(cont) {
				state = (cont_state_t *)maple_dev_status(cont);
				
				if (!state) {
					break;
				}
				if (state->buttons & CONT_START || state->buttons & CONT_B) {
					av_free_packet(&packet);
					done = 1;
				}
				if (state->buttons & CONT_A) {
					if((framecnt - pressed) > 10) {
						pause = pause ? 0 : 1;
						if(pause) {
#ifdef USE_DIRECT_AUDIO
							audio_end();
#else
							stop_audio();
#endif
						} else {
#ifndef USE_DIRECT_AUDIO
							start_audio();
#endif
						}
					}
					pressed = framecnt;
				}
				
				if(state->buttons & CONT_DPAD_LEFT) {
					//av_seek_frame(pFormatCtx, -1, timestamp * ( AV_TIME_BASE / 1000 ), AVSEEK_FLAG_BACKWARD);
				}
				
				if(state->buttons & CONT_DPAD_RIGHT) {
					//av_seek_frame(pFormatCtx, -1, timestamp * ( AV_TIME_BASE / 1000 ), AVSEEK_FLAG_BACKWARD);
				}
			}
			
			if(pause) thd_sleep(100);
			
		} while(pause);
		
		//printf("Packet: size: %d data: %02x%02x%02x pst: %d\n", packet.size, packet.data[0], packet.data[1], packet.data[2], pFrame->pts);
		
		// Is this a packet from the video stream?
		if(packet.stream_index == videoStream) {
			//printf("video\n");
			// Decode video frame
			if((r = avcodec_decode_video2(pVideoCodecCtx, pFrame, &frameFinished, &packet)) < 0) {
				//av_strerror(r, errbuf, 256);
				//printf("DS_ERROR_FFMPEG: %s\n", errbuf);
			} else {
				
				// Did we get a video frame?
				if(frameFinished && !pVideoCodecCtx->hurry_up) {
					RenderVideo(&movie_txr, pFrame, pVideoCodecCtx);
				}
			}

		} else if(packet.stream_index == audioStream) {
			//printf("audio\n");
			//snd_decode((uint8*)audio_buf, audio_buf_size, AICA_CODEC_MP3);
			
			audio_buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
			if((r = avcodec_decode_audio3(pAudioCodecCtx, audio_buf, &audio_buf_size, &packet)) < 0) {
				//av_strerror(r, errbuf, 256);
				//printf("DS_ERROR_FFMPEG: %s\n", errbuf);
				//continue;
			} else {
				
				if(audio_buf_size > 0 && !pAudioCodecCtx->hurry_up) {

#ifdef USE_DIRECT_AUDIO
					audio_write(pAudioCodecCtx, audio_buf, audio_buf_size);
#else
					aica_audio_write((char*)audio_buf, audio_buf_size);
#endif
				}
			}
		}

		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}
	
	goto exit_free;
	
exit_free:

	if(pFrame)
		av_free(pFrame);
	
	if(pFormatCtx)
		av_close_input_file(pFormatCtx);
	
	if(audioStream > -1) {
		if(pAudioCodecCtx)
			avcodec_close(pAudioCodecCtx);
#ifdef USE_DIRECT_AUDIO
		audio_end();
#else
		aica_audio_close();
		sprintf(fn, "%s/firmware/aica/kos_stream.drv", getenv("PATH"));
		snd_init_fw(fn);
#endif
	}
	
	if(audio_buf) {
		free(audio_buf);
	}
	
	if(videoStream > -1) {
		if(pVideoCodecCtx)
			avcodec_close(pVideoCodecCtx);
		FreeVideoTexture(&movie_txr);
		SDL_DS_AllocScreenTexture(GetScreen());
		InitVideoThread();
		//UnlockVideo();
	}
	
	//UnlockInput();
	ProcessVideoEventsUpdate(NULL);
	return 0;
}