static int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
{
    SDL_Aout_Opaque *opaque = aout->opaque;
    SDL_Android_AudioTrack *atrack = opaque->atrack;
    SDL_AudioCallback audio_cblk = opaque->spec.callback;
    void *userdata = opaque->spec.userdata;
    uint8_t *buffer = opaque->buffer;
    int copy_size = 256;

    assert(atrack);
    assert(buffer);

    SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);

    if (!opaque->abort_request && !opaque->pause_on)
        SDL_Android_AudioTrack_play(env, atrack);

    while (!opaque->abort_request) {
        SDL_LockMutex(opaque->wakeup_mutex);
        if (!opaque->abort_request && opaque->pause_on) {
            SDL_Android_AudioTrack_pause(env, atrack);
            while (!opaque->abort_request && opaque->pause_on) {
                SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
            }
            if (!opaque->abort_request && !opaque->pause_on)
                SDL_Android_AudioTrack_play(env, atrack);
        }
        if (opaque->need_flush) {
            opaque->need_flush = 0;
            SDL_Android_AudioTrack_flush(env, atrack);
        }
        if (opaque->need_set_volume) {
            opaque->need_set_volume = 0;
            SDL_Android_AudioTrack_set_volume(env, atrack, opaque->left_volume, opaque->right_volume);
        }
        SDL_UnlockMutex(opaque->wakeup_mutex);

        audio_cblk(userdata, buffer, copy_size);
        if (opaque->need_flush) {
            SDL_Android_AudioTrack_flush(env, atrack);
            opaque->need_flush = false;
        }

        if (opaque->need_flush) {
            opaque->need_flush = 0;
            SDL_Android_AudioTrack_flush(env, atrack);
        } else {
            int written = SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size);
            if (written != copy_size) {
                ALOGW("AudioTrack: not all data copied %d/%d", (int)written, (int)copy_size);
            }
        }

        // TODO: 1 if callback return -1 or 0
    }

    SDL_Android_AudioTrack_free(env, atrack);
    return 0;
}
Exemplo n.º 2
0
def_dll int sdl_condition::wait_timeout(sdl_mutex* p,Uint32 t)
{
	if(p)
	{
		return SDL_CondWaitTimeout(_condition,p->_mutex,t);
	}
	return -1;
}
Exemplo n.º 3
0
/*
* Sys_CondVar_Wait
*/
bool Sys_CondVar_Wait( qcondvar_t *cond, qmutex_t *mutex, unsigned int timeout_msec )
{
	if( !cond || !mutex ) {
		return false;
	}

	return SDL_CondWaitTimeout( cond->c, mutex->m, timeout_msec ) == 0;
}
Exemplo n.º 4
0
condition::WAIT_TIMEOUT_RESULT condition::wait_timeout(const mutex& m, unsigned int timeout)
{
	const int res = SDL_CondWaitTimeout(cond_,m.m_,timeout);
	switch(res) {
		case 0: return THREAD_WAIT_OK;
		case SDL_MUTEX_TIMEDOUT: return THREAD_WAIT_TIMEOUT;
		default:
			 std::cerr << "SDL_CondWaitTimeout: " << SDL_GetError() << "\n";
			return THREAD_WAIT_ERROR;
	}
}
Exemplo n.º 5
0
bool Conditional::wait(thread::Mutex *_mutex, int timeout)
{
	// Yes, I realise this can be dangerous,
	// however, you're asking for it if you're
	// mixing thread implementations.
	Mutex *mutex = (Mutex *) _mutex;
	if (timeout < 0)
		return !SDL_CondWait(cond, mutex->mutex);
	else
		return (SDL_CondWaitTimeout(cond, mutex->mutex, timeout) == 0);
}
ssize_t SDL_AMediaCodec_FakeFifo_dequeueOutputBuffer(SDL_AMediaCodec_FakeFifo *fifo, SDL_AMediaCodecBufferInfo *info, int64_t timeoutUs)
{
    if (fifo->should_abort)
        return -1;

    int64_t  timeoutMs  = (timeoutUs + 999) / 1000;    
    ssize_t  dequeue_ret = -1;
    uint64_t wait_start = SDL_GetTickHR();
    int64_t  to_wait    = timeoutMs;

    SDL_LockMutex(fifo->mutex);
    while (!fifo->should_abort) {
        if (fifo->size > 0) {
            SDL_AMediaCodec_FakeFrame *fake = &fifo->fakes[fifo->begin];
            *info        = fake->info;
            info->flags |= AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME;
            dequeue_ret  = fake->index;

            FAK_TRACE("%s, [%d]%lld", __func__, fifo->begin, info->presentationTimeUs);

            fifo->begin = (fifo->begin + 1) % FAKE_BUFFER_QUEUE_SIZE;
            fifo->size--;
            SDL_CondSignal(fifo->wakeup_enqueue_cond);
            break;
        }

        SDL_CondWaitTimeout(fifo->wakeup_dequeue_cond, fifo->mutex, to_wait);
        if (to_wait >= 0) {
            uint64_t now = SDL_GetTickHR();
            if (now < wait_start) {
                // tick overflow
                dequeue_ret = -1;
                break;
            } else {
                uint64_t elapsed = now - wait_start;
                if (elapsed >= timeoutMs) {
                    // timeout
                    dequeue_ret = -1;
                    break;
                } else {
                    to_wait = timeoutMs - elapsed;
                }
            }
        }
    }
    SDL_UnlockMutex(fifo->mutex);

    if (fifo->should_abort)
        return -1;

    return dequeue_ret;
}
int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
{
    SDL_Aout_Opaque *opaque = aout->opaque;
    SDL_AndroidAudioTrack *atrack = opaque->atrack;
    SDL_AudioCallback audio_cblk = opaque->spec.callback;
    void *userdata = opaque->spec.userdata;
    uint8_t *buffer = opaque->buffer;
    int copy_size = 256;

    assert(atrack);
    assert(buffer);

    SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);

    if (!opaque->abort_request && !opaque->pause_on)
        sdl_audiotrack_play(env, atrack);

    while (!opaque->abort_request) {
        SDL_LockMutex(opaque->wakeup_mutex);
        if (!opaque->abort_request && opaque->pause_on) {
            sdl_audiotrack_pause(env, atrack);
            while (!opaque->abort_request && opaque->pause_on) {
                SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
            }
            if (!opaque->abort_request && !opaque->pause_on)
                sdl_audiotrack_play(env, atrack);
        }
        if (opaque->need_flush) {
            opaque->need_flush = 0;
            sdl_audiotrack_flush(env, atrack);
        }
        SDL_UnlockMutex(opaque->wakeup_mutex);

        audio_cblk(userdata, buffer, copy_size);
        if (opaque->need_flush) {
            sdl_audiotrack_flush(env, atrack);
            opaque->need_flush = false;
        }

        if (opaque->need_flush) {
            opaque->need_flush = 0;
            sdl_audiotrack_flush(env, atrack);
        } else {
            sdl_audiotrack_write_byte(env, atrack, buffer, copy_size);
        }

        // TODO: 1 if callback return -1 or 0
    }

    sdl_audiotrack_free(env, atrack);
    return 0;
}
SDL_Surface *ANDROID_SetVideoModeMT(_THIS, SDL_Surface *current, int width, int height, int bpp, Uint32 flags)
{
	if( flags & SDL_OPENGL || flags & SDL_HWSURFACE )
	{
		return NULL;
	}
	SDL_mutexP(videoThread.mutex);
	while( ! videoThread.threadReady )
		SDL_CondWaitTimeout(videoThread.cond2, videoThread.mutex, 1000);
	videoThread.cmd = CMD_SETVIDEOMODE;
	videoThread._this = this;
	videoThread.current = current;
	videoThread.width = width;
	videoThread.height = height;
	videoThread.bpp = bpp;
	videoThread.flags = flags;
	videoThread.execute = 1;
	SDL_CondSignal(videoThread.cond);
	while( videoThread.execute )
		SDL_CondWaitTimeout(videoThread.cond2, videoThread.mutex, 1000);
	SDL_Surface * ret = videoThread.retcode2;
	SDL_mutexV(videoThread.mutex);
	return ret;
}
static int drain_output_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count)
{
    IJKFF_Pipenode_Opaque *opaque = node->opaque;
    SDL_LockMutex(opaque->acodec_mutex);

    if (opaque->acodec_flush_request || opaque->acodec_reconfigure_request) {
        // TODO: invalid picture here?
        // let feed_input_buffer() get mutex
        SDL_CondWaitTimeout(opaque->acodec_cond, opaque->acodec_mutex, 100);
    }

    int ret = drain_output_buffer_l(env, node, timeUs, dequeue_count);
    SDL_UnlockMutex(opaque->acodec_mutex);
    return ret;
}
Exemplo n.º 10
0
bool Condition::wait(uint32 timeout) {
	int gotSignal;

	if (_ownMutex)
		_mutex->lock();

	if (timeout == 0)
		gotSignal = SDL_CondWait(_condition, _mutex->_mutex);
	else
		gotSignal = SDL_CondWaitTimeout(_condition, _mutex->_mutex, timeout);

	if (_ownMutex)
		_mutex->unlock();

	return gotSignal == 0;
}
Exemplo n.º 11
0
int osd_event_wait(osd_event *event, osd_ticks_t timeout)
{
	LOG(("osd_event_wait"));
	if (timeout == OSD_EVENT_WAIT_INFINITE)
		timeout = osd_ticks_per_second() * (osd_ticks_t)10000;
	SDL_mutexP(event->mutex);
	if (!timeout)
	{
		if (!event->signalled)
		{
			SDL_mutexV(event->mutex);
				return FALSE;
		}
	}
	else
	{
		if (!event->signalled)
		{
			UINT64 msec = (timeout * 1000) / osd_ticks_per_second();

			do {
				int ret = SDL_CondWaitTimeout(event->cond, event->mutex, msec);
				if ( ret == SDL_MUTEX_TIMEDOUT )
				{
					if (!event->signalled)
					{
						SDL_mutexV(event->mutex);
						return FALSE;
					}
					else
						break;
				}
				if (ret == 0)
					break;
				printf("Error %d while waiting for pthread_cond_timedwait:  %s\n", ret, strerror(ret));
			} while (TRUE);
		}
	}

	if (event->autoreset)
		event->signalled = 0;

	SDL_mutexV(event->mutex);

	return TRUE;
}
Exemplo n.º 12
0
void SDL_ANDROID_MultiThreadedVideoLoop()
{
	while(1)
	{
		int signalNeeded = 0;
		int swapBuffersNeeded = 0;
		SDL_mutexP(videoThread.mutex);
		videoThread.threadReady = 1;
		SDL_CondSignal(videoThread.cond2);
		SDL_CondWaitTimeout(videoThread.cond, videoThread.mutex, 1000);
		if( videoThread.execute )
		{
			videoThread.threadReady = 0;
			switch( videoThread.cmd )
			{
				case CMD_INIT:
					videoThread.retcode = ANDROID_VideoInit(videoThread._this, videoThread.vformat);
					break;
				case CMD_SETVIDEOMODE:
					videoThread.retcode2 = ANDROID_SetVideoMode(videoThread._this, videoThread.current,
							videoThread.width, videoThread.height, videoThread.bpp, videoThread.flags);
					break;
				case CMD_QUIT:
					ANDROID_VideoQuit(videoThread._this);
					break;
				case CMD_UPDATERECTS:
					ANDROID_FlipHWSurfaceInternal();
					swapBuffersNeeded = 1;
					break;
				case CMD_FLIP:
					ANDROID_FlipHWSurfaceInternal();
					swapBuffersNeeded = 1;
					break;
			}
			videoThread.execute = 0;
			signalNeeded = 1;
		}
		SDL_mutexV(videoThread.mutex);
		if( signalNeeded )
			SDL_CondSignal(videoThread.cond2);
		if( swapBuffersNeeded )
		{
			SDL_ANDROID_CallJavaSwapBuffers();
		}
	}
}
Exemplo n.º 13
0
static mrb_value
mrb_sdl2_cond_wait_timeout(mrb_state *mrb, mrb_value self)
{
  mrb_value mutex;
  mrb_int ms;
  mrb_sdl2_mutex_data_t *mutex_data;
  mrb_sdl2_cond_data_t *data =
    (mrb_sdl2_cond_data_t*)mrb_data_get_ptr(mrb, self, &mrb_sdl2_cond_data_type);
  if (NULL == data->cond) {
    return mrb_nil_value();
  }
  mrb_get_args(mrb, "oi", &mutex, &ms);
  mutex_data =
    (mrb_sdl2_mutex_data_t*)mrb_data_get_ptr(mrb, mutex, &mrb_sdl2_mutex_data_type);
  if (0 > SDL_CondWaitTimeout(data->cond, mutex_data->mutex, (Uint32)ms)) {
    mruby_sdl2_raise_error(mrb);
  }
  return self;
}
ssize_t SDL_AMediaCodec_FakeFifo_dequeueInputBuffer(SDL_AMediaCodec_FakeFifo* fifo, int64_t timeoutUs)
{
    int ret_index = -1;
    if (fifo->should_abort)
        return SDL_AMEDIA_ERROR_UNKNOWN;

    SDL_LockMutex(fifo->mutex);
    if (!fifo->should_abort) {
        if (fifo->size >= FAKE_BUFFER_QUEUE_SIZE) {
            SDL_CondWaitTimeout(fifo->wakeup_enqueue_cond, fifo->mutex, timeoutUs / 1000);
        }

        if (fifo->size < FAKE_BUFFER_QUEUE_SIZE) {
            ret_index = fifo->end;
        }
    }
    SDL_UnlockMutex(fifo->mutex);

    if (fifo->should_abort)
        return -1;

    return ret_index;
}
Exemplo n.º 15
0
Arquivo: redraw.c Projeto: ebfe/vix
int
simtk_redraw_thread_SDL (void *arg)
{
  while (!quit_flag)
  {
    SDL_mutexP (root_redraw_mutex);

    while (!root_redraw_requested && !quit_flag)
      SDL_CondWaitTimeout (root_redraw_condition, root_redraw_mutex, 10000);
    
    SDL_mutexV (root_redraw_mutex);

    if (quit_flag)
      break;
    
    simtk_container_lock (root);
    
    simtk_container_clear_all (root);

    simtk_redraw_container (root);

    simtk_container_unlock (root);

    SDL_mutexP (root_redraw_mutex);
    
    root_redraw_requested = 0;

    should_refresh = 1;

    SDL_mutexV (root_redraw_mutex);
  }

  printf ("Redraw thread: quit\n");

  return 0;
}
Exemplo n.º 16
0
/**
 * @brief Tells the music thread to die.
 */
static void music_kill (void)
{
   int ret;
   musicLock();

   music_command = MUSIC_CMD_KILL;
   music_forced  = 1;
   while (1) {
      ret = SDL_CondWaitTimeout( music_state_cond, music_state_lock, 3000 );

      /* Timed out, just slaughter the thread. */
      if (ret == SDL_MUTEX_TIMEDOUT) {
         WARN("Music thread did not exit when asked, slaughtering...");
         SDL_KillThread( music_player );
         break;
      }

      /* Ended properly, breaking. */
      if (music_state == MUSIC_STATE_DEAD)
         break;
   }

   musicUnlock();
}
Exemplo n.º 17
0
/**
**  Fill audio thread.
*/
static int FillThread(void *)
{
	while (Audio.Running == true) {
		int status = SDL_LockMutex(Audio.Lock);
#ifdef USE_WIN32
		if (SDL_CondWaitTimeout(Audio.Cond, Audio.Lock, 1000) == 0) {
#else
		if (SDL_CondWaitTimeout(Audio.Cond, Audio.Lock, 100) == 0) {
#endif
			MixIntoBuffer(Audio.Buffer, Audio.Format.samples * Audio.Format.channels);
		}
		SDL_UnlockMutex(Audio.Lock);

#ifdef USE_OAML
		if (enableOAML && oaml)
			oaml->Update();
#endif
	}

	return 0;
}

/*----------------------------------------------------------------------------
--  Effects
----------------------------------------------------------------------------*/

/**
**  Check if this sound is already playing
*/
bool SampleIsPlaying(CSample *sample)
{
	for (int i = 0; i < MaxChannels; ++i) {
		if (Channels[i].Sample == sample && Channels[i].Playing) {
			return true;
		}
	}
	return false;
}

bool UnitSoundIsPlaying(Origin *origin)
{
	for (int i = 0; i < MaxChannels; ++i) {
		//Wyrmgus start
//		if (origin && Channels[i].Unit && origin->Id && Channels[i].Unit->Id
//			&& origin->Id == Channels[i].Unit->Id && Channels[i].Playing) {
		if (
			origin && Channels[i].Playing
			&& Channels[i].Voice != -1
			&& Channels[i].Voice != VoiceHit && Channels[i].Voice != VoiceMiss && Channels[i].Voice != VoiceStep
			&& Channels[i].Unit && origin->Id && Channels[i].Unit->Id
			&& origin->Id == Channels[i].Unit->Id
		) {
		//Wyrmgus end
			return true;
		}
	}
	return false;
}

/**
**  A channel is finished playing
*/
static void ChannelFinished(int channel)
{
	if (Channels[channel].FinishedCallback) {
		Channels[channel].FinishedCallback(channel);
	}

	//Wyrmgus start
//	delete Channels[channel].Unit;
	if (Channels[channel].Unit) {
		delete Channels[channel].Unit;
	}
	//Wyrmgus end
	Channels[channel].Unit = NULL;
	
	//Wyrmgus start
	Channels[channel].Voice = -1;
	//Wyrmgus end
	Channels[channel].Playing = false;
	Channels[channel].Point = NextFreeChannel;
	NextFreeChannel = channel;
}

/**
**  Put a sound request in the next free channel.
*/
static int FillChannel(CSample *sample, unsigned char volume, char stereo, Origin *origin)
{
	Assert(NextFreeChannel < MaxChannels);

	int old_free = NextFreeChannel;
	int next_free = Channels[NextFreeChannel].Point;

	Channels[NextFreeChannel].Volume = volume;
	Channels[NextFreeChannel].Point = 0;
	//Wyrmgus start
	Channels[NextFreeChannel].Voice = -1;
	//Wyrmgus end
	Channels[NextFreeChannel].Playing = true;
	Channels[NextFreeChannel].Sample = sample;
	Channels[NextFreeChannel].Stereo = stereo;
	Channels[NextFreeChannel].FinishedCallback = NULL;
	//Wyrmgus start
	Channels[NextFreeChannel].Unit = NULL;
	//Wyrmgus end
	if (origin && origin->Base) {
		Origin *source = new Origin;
		source->Base = origin->Base;
		source->Id = origin->Id;
		Channels[NextFreeChannel].Unit = source;
	}
	NextFreeChannel = next_free;

	return old_free;
}
static int feed_input_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *enqueue_count)
{
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    FFPlayer              *ffp      = opaque->ffp;
    IJKFF_Pipeline        *pipeline = opaque->pipeline;
    VideoState            *is       = ffp->is;
    Decoder               *d        = &is->viddec;
    PacketQueue           *q        = d->queue;
    sdl_amedia_status_t    amc_ret  = 0;
    int                    ret      = 0;
    ssize_t  input_buffer_index = 0;
    ssize_t  copy_size          = 0;
    int64_t  time_stamp         = 0;
    uint32_t queue_flags        = 0;

    if (enqueue_count)
        *enqueue_count = 0;

    if (d->queue->abort_request) {
        ret = 0;
        goto fail;
    }

    opaque->avctx = opaque->decoder->avctx;

    if (!d->packet_pending || d->queue->serial != d->pkt_serial) {
#if AMC_USE_AVBITSTREAM_FILTER
#else
        H264ConvertState convert_state = {0, 0};
#endif
        AVPacket pkt;
        do {
            if (d->queue->nb_packets == 0)
                SDL_CondSignal(d->empty_queue_cond);
            if (ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0) {
                ret = -1;
                goto fail;
            }
            if (ffp_is_flush_packet(&pkt) || opaque->acodec_flush_request) {
                // request flush before lock, or never get mutex
                opaque->acodec_flush_request = true;
                SDL_LockMutex(opaque->acodec_mutex);
                if (SDL_AMediaCodec_isStarted(opaque->acodec)) {
                    if (opaque->input_packet_count > 0) {
                        // flush empty queue cause error on OMX.SEC.AVC.Decoder (Nexus S)
                        SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
                        SDL_AMediaCodec_flush(opaque->acodec);
                        opaque->input_packet_count = 0;
                    }
                    // If codec is configured in synchronous mode, codec will resume automatically
                    // SDL_AMediaCodec_start(opaque->acodec);
                }
                opaque->acodec_flush_request = false;
                SDL_CondSignal(opaque->acodec_cond);
                SDL_UnlockMutex(opaque->acodec_mutex);
                d->finished = 0;
                d->next_pts = d->start_pts;
                d->next_pts_tb = d->start_pts_tb;
            }
        } while (ffp_is_flush_packet(&pkt) || d->queue->serial != d->pkt_serial);
        av_free_packet(&d->pkt);
        d->pkt_temp = d->pkt = pkt;
        d->packet_pending = 1;
#if AMC_USE_AVBITSTREAM_FILTER
        // d->pkt_temp->data could be allocated by av_bitstream_filter_filter
        if (d->bfsc_ret > 0) {
            if (d->bfsc_data)
                av_freep(&d->bfsc_data);
            d->bfsc_ret = 0;
        }
        d->bfsc_ret =
            av_bitstream_filter_filter(opaque->bsfc, opaque->avctx, NULL, &d->pkt_temp.data, &d->pkt_temp.size,
                                       d->pkt.data, d->pkt.size, d->pkt.flags & AV_PKT_FLAG_KEY);
        if (d->bfsc_ret > 0) {
            d->bfsc_data = d->pkt_temp.data;
        } else if (d->bfsc_ret < 0) {
            ALOGE("%s: av_bitstream_filter_filter failed\n", __func__);
            ret = -1;
            goto fail;
        }

        if (d->pkt_temp.size == d->pkt.size + opaque->avctx->extradata_size) {
            d->pkt_temp.data += opaque->avctx->extradata_size;
            d->pkt_temp.size  = d->pkt.size;
        }

        AMCTRACE("bsfc->filter(%d): %p[%d] -> %p[%d]", d->bfsc_ret, d->pkt.data, (int)d->pkt.size, d->pkt_temp.data, (int)d->pkt_temp.size);
#else
#if 0
        AMCTRACE("raw [%d][%d] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size,
            (int)opaque->nal_size,
            d->pkt_temp.data[0],
            d->pkt_temp.data[1],
            d->pkt_temp.data[2],
            d->pkt_temp.data[3],
            d->pkt_temp.data[4],
            d->pkt_temp.data[5],
            d->pkt_temp.data[6],
            d->pkt_temp.data[7]);
#endif
    if (opaque->avctx->codec_id == AV_CODEC_ID_H264 || opaque->avctx->codec_id == AV_CODEC_ID_HEVC) {
        convert_h264_to_annexb(d->pkt_temp.data, d->pkt_temp.size, opaque->nal_size, &convert_state);
        int64_t time_stamp = d->pkt_temp.pts;
        if (!time_stamp && d->pkt_temp.dts)
            time_stamp = d->pkt_temp.dts;
        if (time_stamp > 0) {
            time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
        } else {
            time_stamp = 0;
        }
    }
#if 0
        AMCTRACE("input[%d][%d][%lld,%lld (%d, %d) -> %lld] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size,
            (int)opaque->nal_size,
            (int64_t)d->pkt_temp.pts,
            (int64_t)d->pkt_temp.dts,
            (int)is->video_st->time_base.num,
            (int)is->video_st->time_base.den,
            (int64_t)time_stamp,
            d->pkt_temp.data[0],
            d->pkt_temp.data[1],
            d->pkt_temp.data[2],
            d->pkt_temp.data[3],
            d->pkt_temp.data[4],
            d->pkt_temp.data[5],
            d->pkt_temp.data[6],
            d->pkt_temp.data[7]);
#endif
#endif
    }

    if (d->pkt_temp.data) {
        // reconfigure surface if surface changed
        // NULL surface cause no display
        if (ffpipeline_is_surface_need_reconfigure_l(pipeline)) {
            jobject new_surface = NULL;

            // request reconfigure before lock, or never get mutex
            ffpipeline_lock_surface(pipeline);
            ffpipeline_set_surface_need_reconfigure_l(pipeline, false);
            new_surface = ffpipeline_get_surface_as_global_ref_l(env, pipeline);
            ffpipeline_unlock_surface(pipeline);

            if (opaque->jsurface == new_surface ||
                (opaque->jsurface && new_surface && (*env)->IsSameObject(env, new_surface, opaque->jsurface))) {
                ALOGI("%s: same surface, reuse previous surface\n", __func__);
                J4A_DeleteGlobalRef__p(env, &new_surface);
            } else {
                opaque->acodec_reconfigure_request = true;
                SDL_LockMutex(opaque->acodec_mutex);
                ret = reconfigure_codec_l(env, node, new_surface);
                opaque->acodec_reconfigure_request = false;
                SDL_CondSignal(opaque->acodec_cond);
                SDL_UnlockMutex(opaque->acodec_mutex);

                J4A_DeleteGlobalRef__p(env, &new_surface);

                if (ret != 0) {
                    ALOGE("%s: reconfigure_codec failed\n", __func__);
                    ret = 0;
                    goto fail;
                }

                SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
                while (!q->abort_request &&
                    !opaque->acodec_reconfigure_request &&
                    !opaque->acodec_flush_request &&
                    opaque->acodec_first_dequeue_output_request) {
                    SDL_CondWaitTimeout(opaque->acodec_first_dequeue_output_cond, opaque->acodec_first_dequeue_output_mutex, 1000);
                }
                SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex);

                if (q->abort_request || opaque->acodec_reconfigure_request || opaque->acodec_flush_request) {
                    ret = 0;
                    goto fail;
                }
            }
        }

#if 0
        // no need to decode without surface
        if (!opaque->jsurface) {
            ret = amc_decode_picture_fake(node, 1000);
            goto fail;
        }
#endif

        queue_flags = 0;
        input_buffer_index = SDL_AMediaCodec_dequeueInputBuffer(opaque->acodec, timeUs);
        if (input_buffer_index < 0) {
            if (SDL_AMediaCodec_isInputBuffersValid(opaque->acodec)) {
                // timeout
                ret = 0;
                goto fail;
            } else {
                // enqueue fake frame
                queue_flags |= AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME;
                copy_size    = d->pkt_temp.size;
            }
        } else {
            SDL_AMediaCodecFake_flushFakeFrames(opaque->acodec);

            copy_size = SDL_AMediaCodec_writeInputData(opaque->acodec, input_buffer_index, d->pkt_temp.data, d->pkt_temp.size);
            if (!copy_size) {
                ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
                ret = -1;
                goto fail;
            }
        }

        time_stamp = d->pkt_temp.pts;
        if (!time_stamp && d->pkt_temp.dts)
            time_stamp = d->pkt_temp.dts;
        if (time_stamp > 0) {
            time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
        } else {
            time_stamp = 0;
        }
        // ALOGE("queueInputBuffer, %lld\n", time_stamp);
        amc_ret = SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, queue_flags);
        if (amc_ret != SDL_AMEDIA_OK) {
            ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
            ret = -1;
            goto fail;
        }
        // ALOGE("%s: queue %d/%d", __func__, (int)copy_size, (int)input_buffer_size);
        opaque->input_packet_count++;
        if (enqueue_count)
            ++*enqueue_count;
    }

    if (copy_size < 0) {
        d->packet_pending = 0;
    } else {
        d->pkt_temp.dts =
        d->pkt_temp.pts = AV_NOPTS_VALUE;
        if (d->pkt_temp.data) {
            d->pkt_temp.data += copy_size;
            d->pkt_temp.size -= copy_size;
            if (d->pkt_temp.size <= 0)
                d->packet_pending = 0;
        } else {
            // FIXME: detect if decode finished
            // if (!got_frame) {
                d->packet_pending = 0;
                d->finished = d->pkt_serial;
            // }
        }
    }

fail:
    return ret;
}
static int drain_output_buffer_l(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count)
{
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    int                    ret      = 0;
    SDL_AMediaCodecBufferInfo bufferInfo;
    ssize_t                   output_buffer_index = 0;

    if (dequeue_count)
        *dequeue_count = 0;

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
        return -1;
    }

    output_buffer_index = SDL_AMediaCodec_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);
    if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED\n");
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n");
        SDL_AMediaFormat_deleteP(&opaque->output_aformat);
        opaque->output_aformat = SDL_AMediaCodec_getOutputFormat(opaque->acodec);
        if (opaque->output_aformat) {
            int width        = 0;
            int height       = 0;
            int color_format = 0;
            int stride       = 0;
            int slice_height = 0;
            int crop_left    = 0;
            int crop_top     = 0;
            int crop_right   = 0;
            int crop_bottom  = 0;

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "width",          &width);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "height",         &height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "color-format",   &color_format);

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "stride",         &stride);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "slice-height",   &slice_height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-left",      &crop_left);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-top",       &crop_top);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-right",     &crop_right);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-bottom",    &crop_bottom);

            // TI decoder could crash after reconfigure
            // ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, width, height);
            // opaque->frame_width  = width;
            // opaque->frame_height = height;
            ALOGI(
                "AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n"
                "    width-height: (%d x %d)\n"
                "    color-format: (%s: 0x%x)\n"
                "    stride:       (%d)\n"
                "    slice-height: (%d)\n"
                "    crop:         (%d, %d, %d, %d)\n"
                ,
                width, height,
                SDL_AMediaCodec_getColorFormatName(color_format), color_format,
                stride,
                slice_height,
                crop_left, crop_top, crop_right, crop_bottom);
        }
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_TRY_AGAIN_LATER) {
        AMCTRACE("AMEDIACODEC__INFO_TRY_AGAIN_LATER\n");
        // continue;
    } else if (output_buffer_index < 0) {
        // enqueue packet as a fake picture
        PacketQueue *fake_q = &opaque->fake_pictq;
        SDL_LockMutex(fake_q->mutex);
        if (!fake_q->abort_request && fake_q->nb_packets <= 0) {
            SDL_CondWaitTimeout(fake_q->cond, fake_q->mutex, 1000);
        }
        SDL_UnlockMutex(fake_q->mutex);

        if (fake_q->abort_request) {
            ret = -1;
            goto fail;
        } else {
            AVPacket pkt;
            if (ffp_packet_queue_get(&opaque->fake_pictq, &pkt, 1, &opaque->fake_pictq_serial) < 0) {
                ret = -1;
                goto fail;
            } else {
                if (!ffp_is_flush_packet(&pkt)) {
                    if (dequeue_count)
                        ++*dequeue_count;

                    ret = amc_queue_picture_fake(node, &pkt);
                    av_free_packet(&pkt);
                }
                ret = 0;
                goto fail;
            }
        }
    } else if (output_buffer_index >= 0) {
        if (dequeue_count)
            ++*dequeue_count;

        if (opaque->n_buf_out) {
            AMC_Buf_Out *buf_out;

            if (opaque->off_buf_out < opaque->n_buf_out) {
                // ALOGD("filling buffer... %d", opaque->off_buf_out);
                buf_out = &opaque->amc_buf_out[opaque->off_buf_out++];
                buf_out->port = output_buffer_index;
                buf_out->info = bufferInfo;
                buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
                sort_amc_buf_out(opaque->amc_buf_out, opaque->off_buf_out);
            } else {
                double pts;

                pts = pts_from_buffer_info(node, &bufferInfo);
                if (opaque->last_queued_pts != AV_NOPTS_VALUE &&
                    pts < opaque->last_queued_pts) {
                    // ALOGE("early picture, drop!");
                    SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
                    goto done;
                }
                /* already sorted */
                buf_out = &opaque->amc_buf_out[opaque->off_buf_out - 1];
                /* new picture is the most aged, send now */
                if (pts < buf_out->pts) {
                    ret = amc_queue_picture_buffer(node, output_buffer_index, &bufferInfo);
                    opaque->last_queued_pts = pts;
                    // ALOGD("pts = %f", pts);
                } else {
                    int i;

                    /* find one to send */
                    for (i = opaque->off_buf_out - 1; i >= 0; i--) {
                        buf_out = &opaque->amc_buf_out[i];
                        if (pts > buf_out->pts) {
                            ret = amc_queue_picture_buffer(node, buf_out->port, &buf_out->info);
                            opaque->last_queued_pts = buf_out->pts;
                            // ALOGD("pts = %f", buf_out->pts);
                            /* replace for sort later */
                            buf_out->port = output_buffer_index;
                            buf_out->info = bufferInfo;
                            buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
                            sort_amc_buf_out(opaque->amc_buf_out, opaque->n_buf_out);
                            break;
                        }
                    }
                    /* need to discard current buffer */
                    if (i < 0) {
                        // ALOGE("buffer too small, drop picture!");
                        SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
                        goto done;
                    }
                }
            }
        } else {
            ret = amc_queue_picture_buffer(node, output_buffer_index, &bufferInfo);
        }
    }

done:
    ret = 0;
fail:
    return ret;
}
static int feed_input_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *enqueue_count)
{
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    FFPlayer              *ffp      = opaque->ffp;
    IJKFF_Pipeline        *pipeline = opaque->pipeline;
    VideoState            *is       = ffp->is;
    Decoder               *d        = &is->viddec;
    PacketQueue           *q        = d->queue;
    sdl_amedia_status_t    amc_ret  = 0;
    int                    ret      = 0;
    ssize_t  input_buffer_index = 0;
    uint8_t* input_buffer_ptr   = NULL;
    size_t   input_buffer_size  = 0;
    size_t   copy_size          = 0;
    int64_t  time_stamp         = 0;

    if (enqueue_count)
        *enqueue_count = 0;

    if (d->queue->abort_request) {
        ret = 0;
        goto fail;
    }

    if (!d->packet_pending || d->queue->serial != d->pkt_serial) {
#if AMC_USE_AVBITSTREAM_FILTER
#else
        H264ConvertState convert_state = {0, 0};
#endif
        AVPacket pkt;
        do {
            if (d->queue->nb_packets == 0)
                SDL_CondSignal(d->empty_queue_cond);
            if (ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0) {
                ret = -1;
                goto fail;
            }
            if (ffp_is_flush_packet(&pkt) || opaque->acodec_flush_request) {
                // request flush before lock, or never get mutex
                opaque->acodec_flush_request = true;
                SDL_LockMutex(opaque->acodec_mutex);
                if (SDL_AMediaCodec_isStarted(opaque->acodec)) {
                    if (opaque->input_packet_count > 0) {
                        // flush empty queue cause error on OMX.SEC.AVC.Decoder (Nexus S)
                        SDL_AMediaCodec_flush(opaque->acodec);
                        opaque->input_packet_count = 0;
                    }
                    // If codec is configured in synchronous mode, codec will resume automatically
                    // SDL_AMediaCodec_start(opaque->acodec);
                }
                opaque->acodec_flush_request = false;
                SDL_CondSignal(opaque->acodec_cond);
                SDL_UnlockMutex(opaque->acodec_mutex);
                d->finished = 0;
                d->next_pts = d->start_pts;
                d->next_pts_tb = d->start_pts_tb;
            }
        } while (ffp_is_flush_packet(&pkt) || d->queue->serial != d->pkt_serial);
        av_free_packet(&d->pkt);
        d->pkt_temp = d->pkt = pkt;
        d->packet_pending = 1;
#if AMC_USE_AVBITSTREAM_FILTER
        // d->pkt_temp->data could be allocated by av_bitstream_filter_filter
        if (d->bfsc_ret > 0) {
            if (d->bfsc_data)
                av_freep(&d->bfsc_data);
            d->bfsc_ret = 0;
        }
        d->bfsc_ret =
            av_bitstream_filter_filter(opaque->bsfc, opaque->avctx, NULL, &d->pkt_temp.data, &d->pkt_temp.size,
                                       d->pkt.data, d->pkt.size, d->pkt.flags & AV_PKT_FLAG_KEY);
        if (d->bfsc_ret > 0) {
            d->bfsc_data = d->pkt_temp.data;
        } else if (d->bfsc_ret < 0) {
            ALOGE("%s: av_bitstream_filter_filter failed\n", __func__);
            ret = -1;
            goto fail;
        }

        if (d->pkt_temp.size == d->pkt.size + opaque->avctx->extradata_size) {
            d->pkt_temp.data += opaque->avctx->extradata_size;
            d->pkt_temp.size  = d->pkt.size;
        }

        AMCTRACE("bsfc->filter(%d): %p[%d] -> %p[%d]", d->bfsc_ret, d->pkt.data, (int)d->pkt.size, d->pkt_temp.data, (int)d->pkt_temp.size);
#else
#if 0
        AMCTRACE("raw [%d][%d] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size,
            (int)opaque->nal_size,
            d->pkt_temp.data[0],
            d->pkt_temp.data[1],
            d->pkt_temp.data[2],
            d->pkt_temp.data[3],
            d->pkt_temp.data[4],
            d->pkt_temp.data[5],
            d->pkt_temp.data[6],
            d->pkt_temp.data[7]);
#endif
        convert_h264_to_annexb(d->pkt_temp.data, d->pkt_temp.size, opaque->nal_size, &convert_state);
        int64_t time_stamp = d->pkt_temp.pts;
        if (!time_stamp && d->pkt_temp.dts)
            time_stamp = d->pkt_temp.dts;
        if (time_stamp > 0) {
            time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
        } else {
            time_stamp = 0;
        }
#if 0
        AMCTRACE("input[%d][%d][%lld,%lld (%d, %d) -> %lld] %02x%02x%02x%02x%02x%02x%02x%02x", (int)d->pkt_temp.size,
            (int)opaque->nal_size,
            (int64_t)d->pkt_temp.pts,
            (int64_t)d->pkt_temp.dts,
            (int)is->video_st->time_base.num,
            (int)is->video_st->time_base.den,
            (int64_t)time_stamp,
            d->pkt_temp.data[0],
            d->pkt_temp.data[1],
            d->pkt_temp.data[2],
            d->pkt_temp.data[3],
            d->pkt_temp.data[4],
            d->pkt_temp.data[5],
            d->pkt_temp.data[6],
            d->pkt_temp.data[7]);
#endif
#endif
    }

    if (d->pkt_temp.data) {
        // reconfigure surface if surface changed
        // NULL surface cause no display
        if (ffpipeline_is_surface_need_reconfigure(pipeline)) {
            // request reconfigure before lock, or never get mutex
            opaque->acodec_reconfigure_request = true;
            SDL_LockMutex(opaque->acodec_mutex);
            ret = reconfigure_codec_l(env, node);
            opaque->acodec_reconfigure_request = false;
            SDL_CondSignal(opaque->acodec_cond);
            SDL_UnlockMutex(opaque->acodec_mutex);
            if (ret != 0) {
                ALOGE("%s: reconfigure_codec failed\n", __func__);
                ret = 0;
                goto fail;
            }

            SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
            while (!q->abort_request &&
                !opaque->acodec_reconfigure_request &&
                !opaque->acodec_flush_request &&
                opaque->acodec_first_dequeue_output_request) {
                SDL_CondWaitTimeout(opaque->acodec_first_dequeue_output_cond, opaque->acodec_first_dequeue_output_mutex, 1000);
            }
            SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex);

            if (q->abort_request || opaque->acodec_reconfigure_request || opaque->acodec_flush_request) {
                ret = 0;
                goto fail;
            }
        }

        // no need to decode without surface
        if (!opaque->jsurface) {
            ret = amc_decode_picture_fake(node, 1000);
            goto fail;
        }

        input_buffer_index = SDL_AMediaCodec_dequeueInputBuffer(opaque->acodec, timeUs);
        if (input_buffer_index < 0) {
            if (SDL_AMediaCodec_isInputBuffersValid(opaque->acodec)) {
                // timeout
                ret = 0;
                goto fail;
            } else {
                // exception
                ret = amc_decode_picture_fake(node, 1000);
                goto fail;
            }
        } else {
            // remove all fake pictures
            if (opaque->fake_pictq.nb_packets > 0)
                ffp_packet_queue_flush(&opaque->fake_pictq);
        }

        input_buffer_ptr = SDL_AMediaCodec_getInputBuffer(opaque->acodec, input_buffer_index, &input_buffer_size);
        if (!input_buffer_ptr) {
            ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
            ret = -1;
            goto fail;
        }

        copy_size = FFMIN(input_buffer_size, d->pkt_temp.size);
        memcpy(input_buffer_ptr, d->pkt_temp.data, copy_size);

        time_stamp = d->pkt_temp.pts;
        if (!time_stamp && d->pkt_temp.dts)
            time_stamp = d->pkt_temp.dts;
        if (time_stamp > 0) {
            time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
        } else {
            time_stamp = 0;
        }
        // ALOGE("queueInputBuffer, %lld\n", time_stamp);
        amc_ret = SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, 0);
        if (amc_ret != SDL_AMEDIA_OK) {
            ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
            ret = -1;
            goto fail;
        }
        // ALOGE("%s: queue %d/%d", __func__, (int)copy_size, (int)input_buffer_size);
        opaque->input_packet_count++;
        if (enqueue_count)
            ++*enqueue_count;
    }

    if (input_buffer_size < 0) {
        d->packet_pending = 0;
    } else {
        d->pkt_temp.dts =
        d->pkt_temp.pts = AV_NOPTS_VALUE;
        if (d->pkt_temp.data) {
            d->pkt_temp.data += copy_size;
            d->pkt_temp.size -= copy_size;
            if (d->pkt_temp.size <= 0)
                d->packet_pending = 0;
        } else {
            // FIXME: detect if decode finished
            // if (!got_frame) {
                d->packet_pending = 0;
                d->finished = d->pkt_serial;
            // }
        }
    }
// add by WilliamShi
ffp->ab_tm = d->pkt_temp.ab_timestamp;
fail:
    return ret;
}
Exemplo n.º 21
0
int TH_MutexCondWaitTimeout (threads_mutex_t *mutex, SDL_cond *condition, int timeout)
{
	if (mutex == NULL)
		return -1;
	return SDL_CondWaitTimeout(condition, mutex->mutex, timeout);
}
Exemplo n.º 22
0
Arquivo: mt.c Projeto: yoanlcq/FATE
void fe_mt_cond_wait     (fe_mt_cond *cond, 
                          fe_mt_mutex *mutex, 
                          uint32_t timeout_milliseconds)
{
    SDL_CondWaitTimeout(*cond, *mutex, timeout_milliseconds);
}
Exemplo n.º 23
0
		int CondWaitTimeout(Cond *cond, Mutex *mutex, Uint32 ms)
		{
			return SDL_CondWaitTimeout(cond, mutex, ms);
		}
static int drain_output_buffer_l(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count)
{
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    int                    ret      = 0;
    SDL_AMediaCodecBufferInfo bufferInfo;
    ssize_t                   output_buffer_index = 0;

    if (dequeue_count)
        *dequeue_count = 0;

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
        return -1;
    }

    output_buffer_index = SDL_AMediaCodec_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);
    if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED\n");
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n");
        SDL_AMediaFormat_deleteP(&opaque->output_aformat);
        opaque->output_aformat = SDL_AMediaCodec_getOutputFormat(opaque->acodec);
        if (opaque->output_aformat) {
            int width        = 0;
            int height       = 0;
            int color_format = 0;
            int stride       = 0;
            int slice_height = 0;
            int crop_left    = 0;
            int crop_top     = 0;
            int crop_right   = 0;
            int crop_bottom  = 0;

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "width",          &width);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "height",         &height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "color-format",   &color_format);

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "stride",         &stride);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "slice-height",   &slice_height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-left",      &crop_left);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-top",       &crop_top);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-right",     &crop_right);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-bottom",    &crop_bottom);

            // TI decoder could crash after reconfigure
            // ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, width, height);
            // opaque->frame_width  = width;
            // opaque->frame_height = height;
            ALOGI(
                "AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n"
                "    width-height: (%d x %d)\n"
                "    color-format: (%s: 0x%x)\n"
                "    stride:       (%d)"
                "    slice-height: (%d)"
                "    crop:         (%d, %d, %d, %d)"
                ,
                width, height,
                SDL_AMediaCodec_getColorFormatName(color_format), color_format,
                stride,
                slice_height,
                crop_left, crop_top, crop_right, crop_bottom);
        }
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_TRY_AGAIN_LATER) {
        AMCTRACE("AMEDIACODEC__INFO_TRY_AGAIN_LATER\n");
        // continue;
    } else if (output_buffer_index < 0) {
        // enqueue packet as a fake picture
        PacketQueue *q = &opaque->fake_pictq;
        SDL_LockMutex(q->mutex);
        if (!q->abort_request && q->nb_packets <= 0) {
            SDL_CondWaitTimeout(q->cond, q->mutex, 1000);
        }
        SDL_UnlockMutex(q->mutex);

        if (q->abort_request) {
            ret = -1;
            goto fail;
        } else {
            AVPacket pkt;
            if (ffp_packet_queue_get(&opaque->fake_pictq, &pkt, 1, &opaque->fake_pictq_serial) < 0) {
                ret = -1;
                goto fail;
            } else {
                if (!ffp_is_flush_packet(&pkt)) {
                    if (dequeue_count)
                        ++*dequeue_count;

                    ret = amc_queue_picture_fake(node, &pkt);
                    av_free_packet(&pkt);
                }
                ret = 0;
                goto fail;
            }
        }
    } else if (output_buffer_index >= 0) {
        if (dequeue_count)
            ++*dequeue_count;

        ret = amc_queue_picture_buffer(node, output_buffer_index, &bufferInfo);
    }

    ret = 0;
fail:
    return ret;
}
static int drain_output_buffer_l(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count, AVFrame *frame, int *got_frame)
{
    IJKFF_Pipenode_Opaque *opaque   = node->opaque;
    FFPlayer              *ffp      = opaque->ffp;
    int                    ret      = 0;
    SDL_AMediaCodecBufferInfo bufferInfo;
    ssize_t                   output_buffer_index = 0;

    if (dequeue_count)
        *dequeue_count = 0;

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
        goto fail;
    }

    output_buffer_index = SDL_AMediaCodecFake_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);
    if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED\n");
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED) {
        ALOGI("AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n");
        SDL_AMediaFormat_deleteP(&opaque->output_aformat);
        opaque->output_aformat = SDL_AMediaCodec_getOutputFormat(opaque->acodec);
        if (opaque->output_aformat) {
            int width        = 0;
            int height       = 0;
            int color_format = 0;
            int stride       = 0;
            int slice_height = 0;
            int crop_left    = 0;
            int crop_top     = 0;
            int crop_right   = 0;
            int crop_bottom  = 0;

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "width",          &width);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "height",         &height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "color-format",   &color_format);

            SDL_AMediaFormat_getInt32(opaque->output_aformat, "stride",         &stride);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "slice-height",   &slice_height);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-left",      &crop_left);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-top",       &crop_top);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-right",     &crop_right);
            SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-bottom",    &crop_bottom);

            // TI decoder could crash after reconfigure
            // ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, width, height);
            // opaque->frame_width  = width;
            // opaque->frame_height = height;
            ALOGI(
                "AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n"
                "    width-height: (%d x %d)\n"
                "    color-format: (%s: 0x%x)\n"
                "    stride:       (%d)\n"
                "    slice-height: (%d)\n"
                "    crop:         (%d, %d, %d, %d)\n"
                ,
                width, height,
                SDL_AMediaCodec_getColorFormatName(color_format), color_format,
                stride,
                slice_height,
                crop_left, crop_top, crop_right, crop_bottom);
        }
        // continue;
    } else if (output_buffer_index == AMEDIACODEC__INFO_TRY_AGAIN_LATER) {
        AMCTRACE("AMEDIACODEC__INFO_TRY_AGAIN_LATER\n");
        // continue;
    } else if (output_buffer_index < 0) {
        SDL_LockMutex(opaque->any_input_mutex);
        SDL_CondWaitTimeout(opaque->any_input_cond, opaque->any_input_mutex, 1000);
        SDL_UnlockMutex(opaque->any_input_mutex);

        goto done;
    } else if (output_buffer_index >= 0) {
        ffp->stat.vdps = SDL_SpeedSamplerAdd(&opaque->sampler, FFP_SHOW_VDPS_MEDIACODEC, "vdps[MediaCodec]");

        if (dequeue_count)
            ++*dequeue_count;

#ifdef FFP_SHOW_AMC_VDPS
        {
            if (opaque->benchmark_start_time == 0) {
                opaque->benchmark_start_time   = SDL_GetTickHR();
            }
            opaque->benchmark_frame_count += 1;
            if (0 == (opaque->benchmark_frame_count % 240)) {
                Uint64 diff = SDL_GetTickHR() - opaque->benchmark_start_time;
                double per_frame_ms = ((double) diff) / opaque->benchmark_frame_count;
                double fps          = ((double) opaque->benchmark_frame_count) * 1000 / diff;
                ALOGE("%lf fps, %lf ms/frame, %"PRIu64" frames\n",
                      fps, per_frame_ms, opaque->benchmark_frame_count);
            }
        }
#endif
#ifdef FFP_AMC_DISABLE_OUTPUT
        if (!(bufferInfo.flags & AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME)) {
            SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);   
        }
        goto done;
#endif

        if (opaque->n_buf_out) {
            AMC_Buf_Out *buf_out;

            if (opaque->off_buf_out < opaque->n_buf_out) {
                // ALOGD("filling buffer... %d", opaque->off_buf_out);
                buf_out = &opaque->amc_buf_out[opaque->off_buf_out++];
                buf_out->acodec_serial = SDL_AMediaCodec_getSerial(opaque->acodec);
                buf_out->port = output_buffer_index;
                buf_out->info = bufferInfo;
                buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
                sort_amc_buf_out(opaque->amc_buf_out, opaque->off_buf_out);
            } else {
                double pts;

                pts = pts_from_buffer_info(node, &bufferInfo);
                if (opaque->last_queued_pts != AV_NOPTS_VALUE &&
                    pts < opaque->last_queued_pts) {
                    // FIXME: drop unordered picture to avoid dither
                    // ALOGE("early picture, drop!");
                    // SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
                    // goto done;
                }
                /* already sorted */
                buf_out = &opaque->amc_buf_out[opaque->off_buf_out - 1];
                /* new picture is the most aged, send now */
                if (pts < buf_out->pts) {
                    ret = amc_fill_frame(node, frame, got_frame, output_buffer_index, SDL_AMediaCodec_getSerial(opaque->acodec), &bufferInfo);
                    opaque->last_queued_pts = pts;
                    // ALOGD("pts = %f", pts);
                } else {
                    int i;

                    /* find one to send */
                    for (i = opaque->off_buf_out - 1; i >= 0; i--) {
                        buf_out = &opaque->amc_buf_out[i];
                        if (pts > buf_out->pts) {
                            ret = amc_fill_frame(node, frame, got_frame, buf_out->port, buf_out->acodec_serial, &buf_out->info);
                            opaque->last_queued_pts = buf_out->pts;
                            // ALOGD("pts = %f", buf_out->pts);
                            /* replace for sort later */
                            buf_out->acodec_serial = SDL_AMediaCodec_getSerial(opaque->acodec);
                            buf_out->port = output_buffer_index;
                            buf_out->info = bufferInfo;
                            buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
                            sort_amc_buf_out(opaque->amc_buf_out, opaque->n_buf_out);
                            break;
                        }
                    }
                    /* need to discard current buffer */
                    if (i < 0) {
                        // ALOGE("buffer too small, drop picture!");
                        if (!(bufferInfo.flags & AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME)) {
                            SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
                            goto done;
                        }
                    }
                }
            }
        } else {
            ret = amc_fill_frame(node, frame, got_frame, output_buffer_index, SDL_AMediaCodec_getSerial(opaque->acodec), &bufferInfo);
        }
    }

done:
    if (opaque->decoder->queue->abort_request)
        ret = -1;
    else
        ret = 0;
fail:
    return ret;
}
Exemplo n.º 26
0
int main(int argc, char **argv)
{
    init_session_config();
    init_session_callbacks();
    init_playlist_callbacks();
    init_playlist_container_callbacks();

    sp_session *sp;
    sp_error err;
    int next_timeout = 0;
    const char *username = NULL;
    const char *password = NULL;
    int opt;

    while ((opt = getopt(argc, argv, "u:p:l:d")) != EOF) {
        switch (opt) {
        case 'u':
            username = optarg;
            break;

        case 'p':
            password = optarg;
            break;

        case 'l':
            g_listname = optarg;
            break;

        case 'd':
            g_remove_tracks = 1;
            break;

        default:
            exit(1);
        }
    }

    if (!username || !password || !g_listname) {
        usage(argv[0]);
        exit(1);
    }

    audio_open();

    err = sp_session_create(&spconfig, &sp);

    if (SP_ERROR_OK != err) {
        fprintf(stderr, "Unable to create session: %s\n",
            sp_error_message(err));
        exit(1);
    }

    g_sess = sp;

    g_notify_mutex = SDL_CreateMutex();
    g_notify_cond = SDL_CreateCond();


    sp_playlistcontainer_add_callbacks(
        sp_session_playlistcontainer(g_sess),
        &pc_callbacks,
        NULL);

    sp_session_login(sp, username, password);
    SDL_mutexP(g_notify_mutex);

    for (;;) {
        if (next_timeout == 0) {
            while(!g_notify_do && !g_playback_done)
                SDL_CondWait(g_notify_cond, g_notify_mutex);
        } else {
            SDL_CondWaitTimeout(g_notify_cond, g_notify_mutex, next_timeout);
        }

        g_notify_do = 0;
        SDL_mutexV(g_notify_mutex);

        if (g_playback_done) {
            track_ended();
            g_playback_done = 0;
        }

        do {
            sp_session_process_events(sp, &next_timeout);
        } while (next_timeout == 0);

        SDL_mutexP(g_notify_mutex);
    }

    return 0;
}
Exemplo n.º 27
0
/**
**  Fill audio thread.
*/
static int FillThread(void *)
{
	while (Audio.Running == true) {
		SDL_LockMutex(Audio.Lock);
#ifdef USE_WIN32
		// This is kind of a hackfix, without this on windows audio can get sluggish
		if (SDL_CondWaitTimeout(Audio.Cond, Audio.Lock, 1000) == 0) {
#else
		if (SDL_CondWaitTimeout(Audio.Cond, Audio.Lock, 100) == 0) {
#endif
			MixIntoBuffer(Audio.Buffer, Audio.Format.samples * Audio.Format.channels);
		}
		SDL_UnlockMutex(Audio.Lock);
	}

	SDL_LockMutex(Audio.Lock);
	// Mustn't call SDL_CloseAudio here, it'll be called again from SDL_Quit
	SDL_DestroyCond(Audio.Cond);
	SDL_DestroyMutex(Audio.Lock);
	return 0;
}

/*----------------------------------------------------------------------------
--  Effects
----------------------------------------------------------------------------*/

/**
**  Check if this sound is already playing
*/
bool SampleIsPlaying(CSample *sample)
{
	for (int i = 0; i < MaxChannels; ++i) {
		if (Channels[i].Sample == sample && Channels[i].Playing) {
			return true;
		}
	}
	return false;
}

bool UnitSoundIsPlaying(Origin *origin)
{
	for (int i = 0; i < MaxChannels; ++i) {
		if (origin && Channels[i].Unit && origin->Id && Channels[i].Unit->Id
			&& origin->Id == Channels[i].Unit->Id && Channels[i].Playing) {
			return true;
		}
	}
	return false;
}

/**
**  A channel is finished playing
*/
static void ChannelFinished(int channel)
{
	if (Channels[channel].FinishedCallback) {
		Channels[channel].FinishedCallback(channel);
	}

	delete Channels[channel].Unit;
	Channels[channel].Unit = NULL;

	Channels[channel].Playing = false;
	Channels[channel].Point = NextFreeChannel;
	NextFreeChannel = channel;
}

/**
**  Put a sound request in the next free channel.
*/
static int FillChannel(CSample *sample, unsigned char volume, char stereo, Origin *origin)
{
	Assert(NextFreeChannel < MaxChannels);

	int old_free = NextFreeChannel;
	int next_free = Channels[NextFreeChannel].Point;

	Channels[NextFreeChannel].Volume = volume;
	Channels[NextFreeChannel].Point = 0;
	Channels[NextFreeChannel].Playing = true;
	Channels[NextFreeChannel].Sample = sample;
	Channels[NextFreeChannel].Stereo = stereo;
	Channels[NextFreeChannel].FinishedCallback = NULL;
	if (origin && origin->Base) {
		Origin *source = new Origin;
		source->Base = origin->Base;
		source->Id = origin->Id;
		Channels[NextFreeChannel].Unit = source;
	}
	NextFreeChannel = next_free;

	return old_free;
}
static int aout_thread_n(SDL_Aout *aout)
{
    SDL_Aout_Opaque               *opaque           = aout->opaque;
    SLPlayItf                      slPlayItf        = opaque->slPlayItf;
    SLAndroidSimpleBufferQueueItf  slBufferQueueItf = opaque->slBufferQueueItf;
    SLVolumeItf                    slVolumeItf      = opaque->slVolumeItf;
    SDL_AudioCallback              audio_cblk       = opaque->spec.callback;
    void                          *userdata         = opaque->spec.userdata;
    uint8_t                       *next_buffer      = NULL;
    int                            next_buffer_index = 0;
    size_t                         bytes_per_buffer = opaque->bytes_per_buffer;

    SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);

    if (!opaque->abort_request && !opaque->pause_on)
        (*slPlayItf)->SetPlayState(slPlayItf, SL_PLAYSTATE_PLAYING);

    while (!opaque->abort_request) {
        SLAndroidSimpleBufferQueueState slState = {0};

        SLresult slRet = (*slBufferQueueItf)->GetState(slBufferQueueItf, &slState);
        if (slRet != SL_RESULT_SUCCESS) {
            ALOGE("%s: slBufferQueueItf->GetState() failed\n", __func__);
            SDL_UnlockMutex(opaque->wakeup_mutex);
        }

        SDL_LockMutex(opaque->wakeup_mutex);
        if (!opaque->abort_request && (opaque->pause_on || slState.count >= OPENSLES_BUFFERS)) {
            while (!opaque->abort_request && (opaque->pause_on || slState.count >= OPENSLES_BUFFERS)) {
                if (!opaque->pause_on) {
                    (*slPlayItf)->SetPlayState(slPlayItf, SL_PLAYSTATE_PLAYING);
                }
                SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
                SLresult slRet = (*slBufferQueueItf)->GetState(slBufferQueueItf, &slState);
                if (slRet != SL_RESULT_SUCCESS) {
                    ALOGE("%s: slBufferQueueItf->GetState() failed\n", __func__);
                    SDL_UnlockMutex(opaque->wakeup_mutex);
                }

                if (opaque->pause_on)
                    (*slPlayItf)->SetPlayState(slPlayItf, SL_PLAYSTATE_PAUSED);
            }
            if (!opaque->abort_request && !opaque->pause_on) {
                (*slPlayItf)->SetPlayState(slPlayItf, SL_PLAYSTATE_PLAYING);
            }
        }
        if (opaque->need_flush) {
            opaque->need_flush = 0;
            (*slBufferQueueItf)->Clear(slBufferQueueItf);
        }
#if 0
        if (opaque->need_set_volume) {
            opaque->need_set_volume = 0;
            // FIXME: set volume here
        }
#endif
        if (opaque->need_set_volume) {
            opaque->need_set_volume = 0;
            SLmillibel level = android_amplification_to_sles((opaque->left_volume + opaque->right_volume) / 2);
            ALOGI("slVolumeItf->SetVolumeLevel((%f, %f) -> %d)\n", opaque->left_volume, opaque->right_volume, (int)level);
            slRet = (*slVolumeItf)->SetVolumeLevel(slVolumeItf, level);
            if (slRet != SL_RESULT_SUCCESS) {
                ALOGE("slVolumeItf->SetVolumeLevel failed %d\n", (int)slRet);
                // just ignore error
            }
        }
        SDL_UnlockMutex(opaque->wakeup_mutex);

        next_buffer = opaque->buffer + next_buffer_index * bytes_per_buffer;
        next_buffer_index = (next_buffer_index + 1) % OPENSLES_BUFFERS;
        audio_cblk(userdata, next_buffer, bytes_per_buffer);
        if (opaque->need_flush) {
            (*slBufferQueueItf)->Clear(slBufferQueueItf);
            opaque->need_flush = false;
        }

        if (opaque->need_flush) {
            ALOGE("flush");
            opaque->need_flush = 0;
            (*slBufferQueueItf)->Clear(slBufferQueueItf);
        } else {
            slRet = (*slBufferQueueItf)->Enqueue(slBufferQueueItf, next_buffer, bytes_per_buffer);
            if (slRet == SL_RESULT_SUCCESS) {
                // do nothing
            } else if (slRet == SL_RESULT_BUFFER_INSUFFICIENT) {
                // don't retry, just pass through
                ALOGE("SL_RESULT_BUFFER_INSUFFICIENT\n");
            } else {
                ALOGE("slBufferQueueItf->Enqueue() = %d\n", (int)slRet);
                break;
            }
        }

        // TODO: 1 if callback return -1 or 0
    }

    return 0;
}
Exemplo n.º 29
0
/* Wait on the condition variable forever */
DECLSPEC int SDLCALL SDL_CondWait(SDL_cond *cond, SDL_mutex *mutex)
{
	return SDL_CondWaitTimeout(cond, mutex, SDL_MUTEX_MAXWAIT);
}
Exemplo n.º 30
0
def_dll int sdl_condition::wait_timeout(sdl_mutex& p,Uint32 t)
{
	return SDL_CondWaitTimeout(_condition,p._mutex,t);
}