Exemplo n.º 1
0
/* -------------------------------------
 * gap_sdl_memory_buffer
 * -------------------------------------
 */
int
gap_sdl_memory_buffer(char *buffer, long buffer_len, GapSdlErrFunc erf)
{
  int rc = 1;
  int ii = 0;
  AudioPlaybackThreadUserData  *usrPtr;

  usrPtr = getUsrPtr();
  if (usrPtr != NULL)
  {
    stop_audio();
    usrPtr->sounds[ii].data = buffer;   /* point to memory provided by the caller */
    usrPtr->sounds[ii].dpos = 0;
    usrPtr->sounds[ii].dlen = buffer_len;

    if (usrPtr->sounds[ii].fpWav != NULL)
    {
      fclose(usrPtr->sounds[ii].fpWav);
      usrPtr->sounds[ii].fpWav = NULL;
    }
    rc = 0;  /* OK */
  }

  if ( (rc != 0) && (erf != NULL) )
  {
    call_errfunc(erf, "memory_buffer: %ld len: %ld not accepted", (long)&buffer, buffer_len);
  }

  return (rc);

}  /* end gap_sdl_memory_buffer */
  int read(unsigned char* data, int num_samples)
  {
    MutexHolder mh(m_mutex);

    //    m_logger(2, "Read request...");

    if (m_outputProcState == kRunning &&
	m_buffer.size() < num_samples * m_input_format.mBytesPerFrame)
      {
	stop_audio();
	return 0;
      }

    const UInt32 inputDataSizeBytes = my_min(num_samples * m_input_format.mBytesPerFrame,
					     m_buffer.size());

    std::vector<unsigned char> input_data(m_buffer.begin(),
					  m_buffer.begin() + inputDataSizeBytes);

    m_buffer.erase(m_buffer.begin(),
		   m_buffer.begin() + inputDataSizeBytes);

    int outputDataSizeBytes = num_samples*m_format.mBytesPerFrame;

    OSStatus err;

    err = m_converter->convert(&input_data[0],
			       input_data.size(),
			       data,
			       outputDataSizeBytes);


    if (err != kAudioHardwareNoError)
      {
	char buf[1024] = {0};
	sprintf(buf,
		"Conversion failed, num_samples=%i, inputDataSize = %i\n"
		"buffer_size = %i, outputDataSize = %i (err=0x%lX (= %i))",
		num_samples,
		inputDataSizeBytes,
		m_buffer.size(),
		outputDataSizeBytes,
		err, err);

	m_logger(0, buf);
	return 0;
      }
    /*
    char buf[512] = {0};
    sprintf(buf, "Converted %i bytes to %i bytes (num_samples=%i)",
	    inputDataSizeBytes, outputDataSizeBytes, num_samples);
	    m_logger(2, buf);*/

    return outputDataSizeBytes / m_format.mBytesPerFrame;

  }
Exemplo n.º 3
0
Arquivo: main.c Projeto: dsqmoore/dsra
void start_audio(const struct params * prm)
{
	gettimeofday(&last_data_packet_time,NULL);
	if (audio_started && audio_params && prm && (memcmp(audio_params,prm,sizeof(struct params))==0))
		return;
	if (audio_started)
		stop_audio();
	audio_started = 1;
	
 	audio_params = malloc(sizeof(struct params));
	memcpy(audio_params,prm,sizeof(struct params));
	
	keep_going = 1;
	pthread_create(&audio,NULL,(void * (*)(void*))audio_thread,(void*)audio_params);
}
Exemplo n.º 4
0
JNIEXPORT void JNICALL Java_cn_edu_hust_buildingtalkback_jni_NativeInterface_stopAudio(
		JNIEnv *env, jclass clazz)
{
	LOG_FUNC();
	stop_audio();
}
Exemplo n.º 5
0
static status_t
cd_ioctl(void* cookie, uint32 op, void* buffer, size_t length)
{
	cd_handle* handle = (cd_handle*)cookie;
	cd_driver_info *info = handle->info;

	TRACE("ioctl(op = %lu)\n", op);

	switch (op) {
		case B_GET_DEVICE_SIZE:
		{
			status_t status = update_capacity(info);
			if (status != B_OK)
				return status;

			size_t size = info->capacity * info->block_size;
			return user_memcpy(buffer, &size, sizeof(size_t));
		}

		case B_GET_GEOMETRY:
		{
			if (buffer == NULL /*|| length != sizeof(device_geometry)*/)
				return B_BAD_VALUE;

		 	device_geometry geometry;
			status_t status = get_geometry(handle, &geometry);
			if (status != B_OK)
				return status;

			return user_memcpy(buffer, &geometry, sizeof(device_geometry));
		}

		case B_GET_ICON_NAME:
			return user_strlcpy((char*)buffer, "devices/drive-optical",
				B_FILE_NAME_LENGTH);

		case B_GET_VECTOR_ICON:
		{
			device_icon iconData;
			if (length != sizeof(device_icon))
				return B_BAD_VALUE;
			if (user_memcpy(&iconData, buffer, sizeof(device_icon)) != B_OK)
				return B_BAD_ADDRESS;

			if (iconData.icon_size >= (int32)sizeof(kCDIcon)) {
				if (user_memcpy(iconData.icon_data, kCDIcon,
						sizeof(kCDIcon)) != B_OK)
					return B_BAD_ADDRESS;
			}

			iconData.icon_size = sizeof(kCDIcon);
			return user_memcpy(buffer, &iconData, sizeof(device_icon));
		}

		case B_SCSI_GET_TOC:
			// TODO: we pass a user buffer here!
			return get_toc(info, (scsi_toc *)buffer);

		case B_EJECT_DEVICE:
		case B_SCSI_EJECT:
			return load_eject(info, false);

		case B_LOAD_MEDIA:
			return load_eject(info, true);

		case B_SCSI_GET_POSITION:
		{
			if (buffer == NULL)
				return B_BAD_VALUE;

			scsi_position position;
			status_t status = get_position(info, &position);
			if (status != B_OK)
				return status;

			return user_memcpy(buffer, &position, sizeof(scsi_position));
		}

		case B_SCSI_GET_VOLUME:
			// TODO: we pass a user buffer here!
			return get_set_volume(info, (scsi_volume *)buffer, false);
		case B_SCSI_SET_VOLUME:
			// TODO: we pass a user buffer here!
			return get_set_volume(info, (scsi_volume *)buffer, true);

		case B_SCSI_PLAY_TRACK:
		{
			scsi_play_track track;
			if (user_memcpy(&track, buffer, sizeof(scsi_play_track)) != B_OK)
				return B_BAD_ADDRESS;

			return play_track_index(info, &track);
		}
		case B_SCSI_PLAY_POSITION:
		{
			scsi_play_position position;
			if (user_memcpy(&position, buffer, sizeof(scsi_play_position))
					!= B_OK)
				return B_BAD_ADDRESS;

			return play_msf(info, &position);
		}

		case B_SCSI_STOP_AUDIO:
			return stop_audio(info);
		case B_SCSI_PAUSE_AUDIO:
			return pause_resume(info, false);
		case B_SCSI_RESUME_AUDIO:
			return pause_resume(info, true);

		case B_SCSI_SCAN:
		{
			scsi_scan scanBuffer;
			if (user_memcpy(&scanBuffer, buffer, sizeof(scsi_scan)) != B_OK)
				return B_BAD_ADDRESS;

			return scan(info, &scanBuffer);
		}
		case B_SCSI_READ_CD:
			// TODO: we pass a user buffer here!
			return read_cd(info, (scsi_read_cd *)buffer);

		default:
			return sSCSIPeripheral->ioctl(handle->scsi_periph_handle, op,
				buffer, length);
	}
}
Exemplo n.º 6
0
Arquivo: main.c Projeto: dsqmoore/dsra
int start_listening(const char * port)
{
    struct addrinfo hints, *res,*p;
    int sockfd;
	int socks[100];
	int nsocks = 0;
	int i;
	
    // first, load up address structs with getaddrinfo():
	
    memset(&hints, 0, sizeof hints);
    hints.ai_family = AF_UNSPEC;  // use IPv4 or IPv6, whichever
    hints.ai_socktype = SOCK_DGRAM;
    hints.ai_flags = AI_PASSIVE;     // fill in my IP for me
	
	int r;
    if ((r = getaddrinfo(NULL, port, &hints, &res))!=0)
	{
		fprintf(stderr,"getaddrinfo: %s",gai_strerror(r));
		return -1;
	}
	
	for (p=res; p; p=p->ai_next)
	{
	
		// make a socket, bind it, and listen on it:
	
		if ((sockfd = socket(res->ai_family, res->ai_socktype, res->ai_protocol))<0)
			continue;
		
		if (fcntl(sockfd, F_SETFL, O_NONBLOCK)<0)
		{
			close(sockfd);
			continue;
		}
		
		if (bind(sockfd, res->ai_addr, res->ai_addrlen)<0)
		{
			close(sockfd);
			continue;
		}
		
		socks[nsocks++]=sockfd;
		if (nsocks==100)
			break;
	}
	
	if (nsocks==0)
	{
		fprintf(stderr,"no valid interfaces to listen on found\n");
		return -1;
	}
	
	pthread_mutex_init(&queue_mutex,NULL);

	int asocks = nsocks;
	while (asocks)
	{
		fd_set fds;
		int max=0;
		FD_ZERO(&fds);
		for (i=0; i<nsocks; i++)
		{
			if (socks[i]<0) continue;
			FD_SET(socks[i],&fds);
			if (socks[i]+1>max)
				max=socks[i]+1;
		}
		struct timeval timeout;
		timeout.tv_sec = 1;
		timeout.tv_usec = 0;
		if ((select(max,&fds,NULL,NULL,&timeout)<0)&&(errno!=EINTR))
		{
			fprintf(stderr,"select(): %s\n",strerror(errno));
			for (i=0; i<nsocks; i++)
				close(socks[i]);
			return -1;
		}
		for (i=0; i<nsocks; i++)
		{
			if (socks[i]<0) continue;
			if (FD_ISSET(socks[i],&fds))
			{
				struct sockaddr_storage their_addr;
				socklen_t addr_size = sizeof their_addr;
				
				uint8_t * buffer = memory[memory_pointer];
				ssize_t ret = recvfrom(socks[i],buffer,MAX_BUFFER_SIZE,0,(struct sockaddr *)&their_addr,&addr_size);
				if (ret == -1)
				{
					fprintf(stderr,"recvfrom(): %s\n",strerror(errno));
					close(socks[i]);
					socks[i] = -1;
					asocks--;
				} else {
					if (!parse_data(buffer,(size_t)ret))
						memory_pointer++;
				}
			}
		}
		if (timedout(&last_data_packet_time,1000000))
			stop_audio();
	}
	for (i=0; i<nsocks; i++)
		if (socks[i]>=0)
			close(socks[i]);
	stop_audio();
    return 0;
}
Exemplo n.º 7
0
int main(void) {
	FRESULT res;
	WORD br;

	sei(); // Globally enable interrupts

	hibernate_init();
	DAC_init();
	keys_init();

	if (pf_mount(&fs)) { sound_osch(); }
	else { sound_gut(); }

	while(1) {
		cli(); // disable interrupts to avoid race condition with sleep function
		if (FLAG_CHECK(NEW_SOUND)) {
			hibernate_timer_stop();
			FLAG_CLEAR(NEW_SOUND);
			sei();
			switch (special_mode) {
				case 1:
				if (new_sound_id == old_sound_id - 1) {
					special_mode = 2;
					goto sound_ende;
				} else if (new_sound_id == old_sound_id + 1) {
					special_mode = 4;
					goto sound_ende;
				} else if (new_sound_id == old_sound_id) {
					credits_counter = CREDITS_COUNTER_MAX - 5;
					special_mode = 0;
					goto sound_ende;
				} else special_mode = 0;
				break;
				case 2:
				special_mode = 3;
				break;
				case 4:
				special_mode = 5;
				break;
				default:
				special_mode = 0;
			}
			if (new_sound_id == 36) {
				special_mode = 1;
				goto sound_ende;
			}
			old_sound_id = new_sound_id;
			char* filename;
			if (++credits_counter > CREDITS_COUNTER_MAX) {
				credits_counter = 0;
				filename = "image.hex";
			} else {
				if (new_sound_id == 255) goto sound_ende;
				filename = filenames(bank, new_sound_id);
			}
			uint8_t tries = 3;
			while (pf_open(filename) && pf_open("error1.wav")) {
				if ((tries--) == 0) goto sound_ende;
				_delay_ms(10);
				pf_mount(&fs);
			}
			if (parse_wav_header()) {
				if (pf_open("error2.wav") || parse_wav_header()) goto sound_ende;
			}
			do {
				#define read_length 16384
				if (wavinfo.data_length > read_length) {
					res = pf_read(0, read_length, &br);
					wavinfo.data_length -= read_length;
				} else {
					res = pf_read(0, wavinfo.data_length, &br);
					break;
				}
			} while (res==0 && br==read_length && wavinfo.data_length>0 && !FLAG_CHECK(NEW_SOUND));
			stop_audio();
			sound_ende:
			hibernate_timer_init();
		} else {
			sleep_enable();
			sei();
			sleep_cpu();
			sleep_disable();
		}
		hibernate_check();
	}
}
Exemplo n.º 8
0
bool obs_transition_audio_render(obs_source_t *transition,
                                 uint64_t *ts_out, struct obs_source_audio_mix *audio,
                                 uint32_t mixers, size_t channels, size_t sample_rate,
                                 obs_transition_audio_mix_callback_t mix_a,
                                 obs_transition_audio_mix_callback_t mix_b)
{
    obs_source_t *sources[2];
    struct transition_state state = {0};
    bool stopped = false;
    uint64_t min_ts;
    float t;

    if (!transition_valid(transition, "obs_transition_audio_render"))
        return false;

    lock_transition(transition);

    sources[0] = transition->transition_sources[0];
    sources[1] = transition->transition_sources[1];

    min_ts = calc_min_ts(sources);

    if (min_ts) {
        t = calc_time(transition, min_ts);

        if (t >= 1.0f && transition->transitioning_audio)
            stopped = stop_audio(transition);

        sources[0] = transition->transition_sources[0];
        sources[1] = transition->transition_sources[1];
        min_ts = calc_min_ts(sources);
        if (min_ts)
            copy_transition_state(transition, &state);

    } else if (transition->transitioning_audio) {
        stopped = stop_audio(transition);
    }

    unlock_transition(transition);

    if (min_ts) {
        if (state.transitioning_audio) {
            if (state.s[0])
                process_audio(transition, state.s[0], audio,
                              min_ts, mixers, channels,
                              sample_rate, mix_a);
            if (state.s[1])
                process_audio(transition, state.s[1], audio,
                              min_ts, mixers, channels,
                              sample_rate, mix_b);
        } else if (state.s[0]) {
            memcpy(audio->output[0].data[0],
                   state.s[0]->audio_output_buf[0][0],
                   TOTAL_AUDIO_SIZE);
        }

        obs_source_release(state.s[0]);
        obs_source_release(state.s[1]);
    }

    if (stopped)
        obs_source_dosignal(transition, "source_transition_stop",
                            "transition_stop");

    *ts_out = min_ts;
    return !!min_ts;
}
Exemplo n.º 9
0
int ffplay(const char *filename, const char *force_format) {

	char errbuf[256];
	int r = 0;
	
	int frameFinished;
	AVPacket packet;
	int audio_buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
	int16_t *audio_buf = (int16_t *) malloc((AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2);
	
	if(!audio_buf) {
		ds_printf("DS_ERROR: No free memory\n");
		return -1;
	}
	
	memset(audio_buf, 0, (AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2);
	
	AVFormatContext *pFormatCtx = NULL;
	AVFrame *pFrame = NULL;
	AVCodecContext *pVideoCodecCtx = NULL, *pAudioCodecCtx = NULL;
	AVInputFormat *file_iformat = NULL;
	
	video_txr_t movie_txr;
	int videoStream = -1, audioStream = -1;
	
	maple_device_t *cont = NULL;
	cont_state_t *state = NULL;
	int pause = 0, done = 0;
	
	char fn[MAX_FN_LEN];
	sprintf(fn, "ds:%s", filename);

	memset(&movie_txr, 0, sizeof(movie_txr));
	
	if(!codecs_inited) {
		avcodec_register_all();
		avcodec_register(&mp1_decoder);
		avcodec_register(&mp2_decoder);
		avcodec_register(&mp3_decoder);
		avcodec_register(&vorbis_decoder);
		//avcodec_register(&mpeg4_decoder);
		codecs_inited = 1;
	}
	
	if(force_format)
      file_iformat = av_find_input_format(force_format);
    else
      file_iformat = NULL;


	// Open video file
	ds_printf("DS_PROCESS_FFMPEG: Opening file: %s\n", filename);
	if((r = av_open_input_file((AVFormatContext**)(&pFormatCtx), fn, file_iformat, /*FFM_PACKET_SIZE*/0, NULL)) != 0) {
		av_strerror(r, errbuf, 256);
		ds_printf("DS_ERROR_FFMPEG: %s\n", errbuf);
		free(audio_buf);
		return -1; // Couldn't open file
	}
	
	// Retrieve stream information
	ds_printf("DS_PROCESS_FFMPEG: Retrieve stream information...\n");
	if((r = av_find_stream_info(pFormatCtx)) < 0) {
		av_strerror(r, errbuf, 256);
		ds_printf("DS_ERROR_FFMPEG: %s\n", errbuf);
		av_close_input_file(pFormatCtx);
		free(audio_buf);
		return -1; // Couldn't find stream information
	}

	// Dump information about file onto standard error
	dump_format(pFormatCtx, 0, filename, 0);
	//thd_sleep(5000);
	
	pVideoCodecCtx = findDecoder(pFormatCtx, AVMEDIA_TYPE_VIDEO, &videoStream);
	pAudioCodecCtx = findDecoder(pFormatCtx, AVMEDIA_TYPE_AUDIO, &audioStream);
	
	//LockInput();
	
	if(pVideoCodecCtx) {
		
		//LockVideo();
		ShutdownVideoThread();
		SDL_DS_FreeScreenTexture(0);
		int format = 0;
		
		switch(pVideoCodecCtx->pix_fmt) {
			case PIX_FMT_YUV420P:
			case PIX_FMT_YUVJ420P:
			
				format = PVR_TXRFMT_YUV422;
#ifdef USE_HW_YUV				
				yuv_conv_init();
#endif
				break;
				
			case PIX_FMT_UYVY422:
			case PIX_FMT_YUVJ422P:
			
				format = PVR_TXRFMT_YUV422;
				break;
				
			default:
				format = PVR_TXRFMT_RGB565;
				break;
		}
		
		MakeVideoTexture(&movie_txr, pVideoCodecCtx->width, pVideoCodecCtx->height, format | PVR_TXRFMT_NONTWIDDLED, PVR_FILTER_BILINEAR);
		
#ifdef USE_HW_YUV				
		yuv_conv_setup(movie_txr.addr, PVR_YUV_MODE_MULTI, PVR_YUV_FORMAT_YUV420, movie_txr.width, movie_txr.height);
		pvr_dma_init();
#endif

	} else {
		ds_printf("DS_ERROR: Didn't find a video stream.\n");
	}
	
	
	if(pAudioCodecCtx) {
		
#ifdef USE_DIRECT_AUDIO
		audioinit(pAudioCodecCtx);
#else

		sprintf(fn, "%s/firmware/aica/ds_stream.drv", getenv("PATH"));
		
		if(snd_init_fw(fn) < 0) {
			goto exit_free;
		}
	
		if(aica_audio_open(pAudioCodecCtx->sample_rate, pAudioCodecCtx->channels, 8192) < 0) {
			goto exit_free;
		}
		//snd_cpu_clock(0x19);
		//snd_init_decoder(8192);
#endif
		
	} else {
		ds_printf("DS_ERROR: Didn't find a audio stream.\n");
	}
	
	//ds_printf("FORMAT: %d\n", pVideoCodecCtx->pix_fmt);

	// Allocate video frame
	pFrame = avcodec_alloc_frame();

	if(pFrame == NULL) {
		ds_printf("DS_ERROR: Can't alloc memory\n");
		goto exit_free;
	}
	
	int pressed = 0, framecnt = 0;
	uint32 fa = 0;
	
	fa = GET_EXPORT_ADDR("ffplay_format_handler");
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			void (*ff_format_func)(AVFormatContext *, AVCodecContext *, AVCodecContext *) = 
				(void (*)(AVFormatContext *, AVCodecContext *, AVCodecContext *))fa;
			ff_format_func(pFormatCtx, pVideoCodecCtx, pAudioCodecCtx);
		EXPT_GUARD_CATCH;
		EXPT_GUARD_END;
	}
	
	fa = GET_EXPORT_ADDR("ffplay_frame_handler");
	void (*ff_frame_func)(AVFrame *) = NULL;
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			ff_frame_func = (void (*)(AVFrame *))fa;
			// Test call
			ff_frame_func(NULL);
		EXPT_GUARD_CATCH;
			ff_frame_func = NULL;
		EXPT_GUARD_END;
	}
	
	fa = GET_EXPORT_ADDR("ffplay_render_handler");
	
	if(fa > 0 && fa != 0xffffffff) {
		EXPT_GUARD_BEGIN;
			movie_txr.render_cb = (void (*)(void *))fa;
			// Test call
			movie_txr.render_cb(NULL);
		EXPT_GUARD_CATCH;
			movie_txr.render_cb = NULL;
		EXPT_GUARD_END;
	}
	
	while(av_read_frame(pFormatCtx, &packet) >= 0 && !done) {
		
		do {
			if(ff_frame_func) 
				ff_frame_func(pFrame);
					
			cont = maple_enum_type(0, MAPLE_FUNC_CONTROLLER);
			framecnt++;

			if(cont) {
				state = (cont_state_t *)maple_dev_status(cont);
				
				if (!state) {
					break;
				}
				if (state->buttons & CONT_START || state->buttons & CONT_B) {
					av_free_packet(&packet);
					done = 1;
				}
				if (state->buttons & CONT_A) {
					if((framecnt - pressed) > 10) {
						pause = pause ? 0 : 1;
						if(pause) {
#ifdef USE_DIRECT_AUDIO
							audio_end();
#else
							stop_audio();
#endif
						} else {
#ifndef USE_DIRECT_AUDIO
							start_audio();
#endif
						}
					}
					pressed = framecnt;
				}
				
				if(state->buttons & CONT_DPAD_LEFT) {
					//av_seek_frame(pFormatCtx, -1, timestamp * ( AV_TIME_BASE / 1000 ), AVSEEK_FLAG_BACKWARD);
				}
				
				if(state->buttons & CONT_DPAD_RIGHT) {
					//av_seek_frame(pFormatCtx, -1, timestamp * ( AV_TIME_BASE / 1000 ), AVSEEK_FLAG_BACKWARD);
				}
			}
			
			if(pause) thd_sleep(100);
			
		} while(pause);
		
		//printf("Packet: size: %d data: %02x%02x%02x pst: %d\n", packet.size, packet.data[0], packet.data[1], packet.data[2], pFrame->pts);
		
		// Is this a packet from the video stream?
		if(packet.stream_index == videoStream) {
			//printf("video\n");
			// Decode video frame
			if((r = avcodec_decode_video2(pVideoCodecCtx, pFrame, &frameFinished, &packet)) < 0) {
				//av_strerror(r, errbuf, 256);
				//printf("DS_ERROR_FFMPEG: %s\n", errbuf);
			} else {
				
				// Did we get a video frame?
				if(frameFinished && !pVideoCodecCtx->hurry_up) {
					RenderVideo(&movie_txr, pFrame, pVideoCodecCtx);
				}
			}

		} else if(packet.stream_index == audioStream) {
			//printf("audio\n");
			//snd_decode((uint8*)audio_buf, audio_buf_size, AICA_CODEC_MP3);
			
			audio_buf_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
			if((r = avcodec_decode_audio3(pAudioCodecCtx, audio_buf, &audio_buf_size, &packet)) < 0) {
				//av_strerror(r, errbuf, 256);
				//printf("DS_ERROR_FFMPEG: %s\n", errbuf);
				//continue;
			} else {
				
				if(audio_buf_size > 0 && !pAudioCodecCtx->hurry_up) {

#ifdef USE_DIRECT_AUDIO
					audio_write(pAudioCodecCtx, audio_buf, audio_buf_size);
#else
					aica_audio_write((char*)audio_buf, audio_buf_size);
#endif
				}
			}
		}

		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}
	
	goto exit_free;
	
exit_free:

	if(pFrame)
		av_free(pFrame);
	
	if(pFormatCtx)
		av_close_input_file(pFormatCtx);
	
	if(audioStream > -1) {
		if(pAudioCodecCtx)
			avcodec_close(pAudioCodecCtx);
#ifdef USE_DIRECT_AUDIO
		audio_end();
#else
		aica_audio_close();
		sprintf(fn, "%s/firmware/aica/kos_stream.drv", getenv("PATH"));
		snd_init_fw(fn);
#endif
	}
	
	if(audio_buf) {
		free(audio_buf);
	}
	
	if(videoStream > -1) {
		if(pVideoCodecCtx)
			avcodec_close(pVideoCodecCtx);
		FreeVideoTexture(&movie_txr);
		SDL_DS_AllocScreenTexture(GetScreen());
		InitVideoThread();
		//UnlockVideo();
	}
	
	//UnlockInput();
	ProcessVideoEventsUpdate(NULL);
	return 0;
}
Exemplo n.º 10
0
/* -------------------------------------
 * gap_sdl_cmd
 * -------------------------------------
 * perform simple audio_player command .
 * Note: some of the commands do not make sense
 *       in this SDL based implementation
 *       and are just dummies for compatibility with the
 *       API (that was designed base on the older wavplay client functions)
 */
int
gap_sdl_cmd(int cmd,int flags,GapSdlErrFunc erf) {
  static char *sdl_no_error_available = "";
  char *sdl_error;
  int rc = 1;
  AudioPlaybackThreadUserData  *usrPtr;


  sdl_error = sdl_no_error_available;
  usrPtr = getUsrPtr();
  if (usrPtr != NULL)
  {
    if(gap_debug)
    {
      printf("gap_sdl_cmd cmd:%d (%s) flags:%d SdlAudioStatus:%d\n"
            , cmd
           , msg_name(cmd)
           , flags
           , (int)SDL_GetAudioStatus()
           );
    }
    switch(cmd)
    {
      case GAP_SDL_CMD_Bye:
        stop_audio();
        close_files();
        rc = 0;  /* OK */
        break;
      case GAP_SDL_CMD_Play:
        rc = start_audio();
        sdl_error = SDL_GetError();  /* SDL_GetError uses sttically allocated message that must NOT be freed */
        break;
      case GAP_SDL_CMD_Pause:
        stop_audio();
        rc = 0;  /* OK */
        break;
      case GAP_SDL_CMD_Stop:
        stop_audio();
        close_files();
        rc = 0;  /* OK */
        break;
      case GAP_SDL_CMD_Restore:
        stop_audio();
        rc = 0;  /* OK */
        break;
      case GAP_SDL_CMD_SemReset:
        rc = 0;  /* OK */
        break;
    }

  }


  if ((rc != 0) && (erf != NULL ))
  {
    call_errfunc(erf, "%s: Sending cmd%d to audio_player failed (rc:%d) err:%s",
                        msg_name(cmd),
                        cmd,
                        sdl_error,
                        rc);
  }
  if(gap_debug)
  {
    printf("gap_sdl_cmd cmd:%d (%s) flags:%d retcode:%d\n", cmd, msg_name(cmd), flags, rc);
  }
  return rc;  /* Zero indicates success */

}  /* end gap_sdl_cmd */
Exemplo n.º 11
0
/* -------------------------------------
 * gap_sdl_path
 * -------------------------------------
 *
 * Send a pathname to the audio_player.
 * (use flags =1 to keep current playback parameters)
 * typical call with flags == 0 does init the audio playback parameters
 * from the header information of the specified RIFF WAVE file (rfered by pathname)
 */
int
gap_sdl_path(const char *pathname,int flags,GapSdlErrFunc erf)
{
  int rc = 1;
  AudioPlaybackThreadUserData  *usrPtr;

  usrPtr = getUsrPtr();
  if (usrPtr != NULL)
  {
    int rcCheck;
    long sample_rate;
    long channels;
    long bytes_per_sample;
    long bits;
    long samples;

    rcCheck = gap_audio_wav_file_check(pathname, &sample_rate, &channels
                           , &bytes_per_sample, &bits, &samples);
    if (rcCheck == 0)
    {
      int ii;

      rc = 0;  /* OK */
      ii = 0;
      stop_audio();
      usrPtr->sounds[ii].data = NULL;   /* no preloded memory available */
      usrPtr->sounds[ii].dpos = 0;
      usrPtr->sounds[ii].dlen = 0;


      if (flags != 1)
      {
        /* set audio params from wavefile header */
        usrPtr->format = AUDIO_S16;

        usrPtr->samplerate = sample_rate;
        usrPtr->channels = channels;
        usrPtr->bits_per_sample = bits;
        usrPtr->frame_size = (usrPtr->bits_per_sample / 8) * usrPtr->channels;
        switch (bits)
        {
          case 8:
            usrPtr->format = AUDIO_U8;
            break;
          case 16:
            usrPtr->format = AUDIO_S16;  /* same as AUDIO_S16LSB */
            break;
          default:
            rc = 2;  /* unsupported bits_per_channel value */
            break;
        }
      }
      if (rc == 0)
      {
        if (usrPtr->sounds[ii].fpWav != NULL)
        {
          if (strcmp(usrPtr->sounds[ii].wav_filename, pathname) != 0)
          {
            /* file differs from currently opened wavfile, force close/reopen */
            fclose(usrPtr->sounds[ii].fpWav);
            usrPtr->sounds[ii].fpWav = NULL;
          }
          else
          {
            long seekPosition;

            /* current wavfile was specified again, just rewind existing filehandle */
            seekPosition = usrPtr->sounds[ii].offset_to_first_sample;
            fseek(usrPtr->sounds[ii].fpWav, seekPosition, SEEK_SET);
          }
        }

        if (usrPtr->sounds[ii].fpWav == NULL)
        {
          g_snprintf(usrPtr->sounds[ii].wav_filename, MAX_WAV_FILENAME_LENGTH -1, pathname);
          usrPtr->sounds[ii].wav_filename[MAX_WAV_FILENAME_LENGTH -1] = '\0';
          usrPtr->sounds[ii].offset_to_first_sample = 0;
          usrPtr->sounds[ii].fpWav = gap_audio_wav_open_seek_data(usrPtr->sounds[ii].wav_filename);
        }

        if(usrPtr->sounds[ii].fpWav == NULL)
        {
          rc = 3;  /* failed to open audio data file */
        }
        else
        {
          /* store current file offset (position of the 1st audio sample data byte) */
          usrPtr->sounds[ii].offset_to_first_sample = ftell(usrPtr->sounds[ii].fpWav);
        }
      }

    }
  }

  if ( (rc != 0) && (erf != NULL) )
  {
     if (pathname != NULL)
     {
       // TODO provide to_string for errorcodes rc
       call_errfunc(erf, "path: Not a valid RIFF WAVE file %s rc:%d (%s)", pathname, rc, " ");
     }
     else
     {
       call_errfunc(erf, "path: is NULL (name of RIFF WAVE file was expected).");
     }
  }

  return (rc);

}  /* end gap_sdl_path */