/*----------------------------------------------------------------------
|   AacDecoderInput_PutPacket
+---------------------------------------------------------------------*/
BLT_METHOD
AacDecoderInput_PutPacket(BLT_PacketConsumer* _self,
                          BLT_MediaPacket*    packet)
{
    AacDecoder* self = ATX_SELF_M(input, AacDecoder, BLT_PacketConsumer);
    ATX_Result  result;

    /* check to see if this is the end of a stream */
    if (BLT_MediaPacket_GetFlags(packet) & 
        BLT_MEDIA_PACKET_FLAG_END_OF_STREAM) {
        self->input.eos = BLT_TRUE;
    }

    /* check to see if we need to create a decoder for this */
    if (self->helix_decoder == NULL) {
        AacDecoderConfig             decoder_config;
        AACFrameInfo                 aac_frame_info;
        const BLT_MediaType*         media_type;
        const BLT_Mp4AudioMediaType* mp4_media_type;

        BLT_MediaPacket_GetMediaType(packet, &media_type);
        if (media_type == NULL || media_type->id != self->mp4es_type_id) {
            return BLT_ERROR_INVALID_MEDIA_TYPE;
        }
        mp4_media_type = (const BLT_Mp4AudioMediaType*)media_type;
        if (mp4_media_type->base.stream_type != BLT_MP4_STREAM_TYPE_AUDIO) {
            return BLT_ERROR_INVALID_MEDIA_TYPE;
        }
        if (BLT_FAILED(AacDecoderConfig_Parse(mp4_media_type->decoder_info, mp4_media_type->decoder_info_length, &decoder_config))) {
            return BLT_ERROR_INVALID_MEDIA_FORMAT;
        }
        if (decoder_config.object_type != BLT_AAC_OBJECT_TYPE_AAC_LC &&
            decoder_config.object_type != BLT_AAC_OBJECT_TYPE_SBR) {
            return BLT_ERROR_UNSUPPORTED_CODEC;
        }
        
        /* create the decoder */
        self->helix_decoder = AACInitDecoder();
        if (self->helix_decoder == NULL) return BLT_ERROR_OUT_OF_MEMORY;

        /* configure the decoder */
        ATX_SetMemory(&aac_frame_info, 0, sizeof(aac_frame_info));
        aac_frame_info.nChans       = AacDecoderConfig_GetChannelCount(&decoder_config);
        aac_frame_info.sampRateCore = AacDecoderConfig_GetSampleRate(&decoder_config);
        if (decoder_config.object_type == BLT_AAC_OBJECT_TYPE_AAC_LC) {
            aac_frame_info.profile = AAC_PROFILE_LC;
        }
        self->sample_buffer_size = BLT_AAC_FRAME_SIZE*2*aac_frame_info.nChans*2; /* the last *2 is for SBR support */
        AACSetRawBlockParams(self->helix_decoder, 0, &aac_frame_info);        
    }

    {
        unsigned char*   in_buffer;
        int              in_size;
        short*           out_buffer;
        BLT_MediaPacket* out_packet;
        AACFrameInfo     aac_frame_info;
        
        /* create a PCM packet for the output */
        result = BLT_Core_CreateMediaPacket(ATX_BASE(self, BLT_BaseMediaNode).core,
                                            self->sample_buffer_size,
                                            (BLT_MediaType*)&self->output.media_type,
                                            &out_packet);
        if (BLT_FAILED(result)) return result;

        /* copy the timestamp */
        BLT_MediaPacket_SetTimeStamp(out_packet, BLT_MediaPacket_GetTimeStamp(packet));

        /* decode the packet as a frame */
        in_buffer  = BLT_MediaPacket_GetPayloadBuffer(packet);
        in_size    = BLT_MediaPacket_GetPayloadSize(packet);
        out_buffer = (short*)BLT_MediaPacket_GetPayloadBuffer(out_packet);
        result = AACDecode(self->helix_decoder, &in_buffer, &in_size, out_buffer); 
        if (result != 0) {
            BLT_MediaPacket_Release(out_packet);
            return BLT_ERROR_INVALID_MEDIA_FORMAT;
        }

        /* check that the sample buffer matches our current media type */
        AACGetLastFrameInfo(self->helix_decoder, &aac_frame_info);
        if (self->output.media_type.channel_count == 0) {
            /* first time, setup our media type */
            self->output.media_type.channel_count   = aac_frame_info.nChans;
            self->output.media_type.sample_rate     = aac_frame_info.sampRateOut;
            self->output.media_type.bits_per_sample = 16;
            self->output.media_type.sample_format   = BLT_PCM_SAMPLE_FORMAT_SIGNED_INT_NE;
            self->output.media_type.channel_mask    = 0;

            /* update the stream info */
            if (ATX_BASE(self, BLT_BaseMediaNode).context) {
                BLT_StreamInfo stream_info;
                stream_info.data_type     = "MPEG-4 AAC";
                stream_info.sample_rate   = aac_frame_info.sampRateOut;
                stream_info.channel_count = aac_frame_info.nChans;
                stream_info.mask = BLT_STREAM_INFO_MASK_DATA_TYPE    |
                                   BLT_STREAM_INFO_MASK_SAMPLE_RATE  |
                                   BLT_STREAM_INFO_MASK_CHANNEL_COUNT;
                BLT_Stream_SetInfo(ATX_BASE(self, BLT_BaseMediaNode).context, &stream_info);
            }

            /* update the packet media type */
            BLT_MediaPacket_SetMediaType(out_packet, (BLT_MediaType*)&self->output.media_type);
        } else {
            /* we've already setup a media type, check that this is the same */
            if (self->output.media_type.sample_rate   != (unsigned int)aac_frame_info.sampRateOut || 
                self->output.media_type.channel_count != aac_frame_info.nChans) {
                BLT_MediaPacket_Release(out_packet);
                return BLT_ERROR_INVALID_MEDIA_FORMAT;
            }
        }

        /* add to the output packet list */
        BLT_MediaPacket_SetPayloadSize(out_packet, aac_frame_info.outputSamps*2);
        ATX_List_AddData(self->output.packets, out_packet);
    }

    return BLT_SUCCESS;
}
/*----------------------------------------------------------------------
|   Mp4ParserOutput_GetPacket
+---------------------------------------------------------------------*/
BLT_METHOD
Mp4ParserOutput_GetPacket(BLT_PacketProducer* _self,
                          BLT_MediaPacket**   packet)
{
    Mp4ParserOutput* self = ATX_SELF(Mp4ParserOutput, BLT_PacketProducer);

    *packet = NULL;
     
    // if we don't have an input yet, we can't produce packets
    //if (self->parser->input.mp4_file == NULL) {
    //    return BLT_ERROR_PORT_HAS_NO_DATA;
    //}
    
    if (self->track == NULL) {
        return BLT_ERROR_EOS;
    } else {
        // check for end-of-stream
        if (self->sample >= self->track->GetSampleCount()) {
            return BLT_ERROR_EOS;
        }

        // read one sample
        AP4_Sample sample;
        AP4_DataBuffer* sample_buffer = self->sample_buffer;
        AP4_Result result;
        if (self->parser->input.reader) {
            // linear reader mode
            result = self->parser->input.reader->ReadNextSample(self->track->GetId(), sample, *sample_buffer);
            if (AP4_SUCCEEDED(result)) self->sample++;
        } else {
            // normal mode
            result = self->track->ReadSample(self->sample++, sample, *sample_buffer);
        }
        if (AP4_FAILED(result)) {
            ATX_LOG_WARNING_1("ReadSample failed (%d)", result);
            if (result == AP4_ERROR_EOS || result == ATX_ERROR_OUT_OF_RANGE) {
                ATX_LOG_WARNING("incomplete media");
                return BLT_ERROR_INCOMPLETE_MEDIA;
            } else {
                return BLT_ERROR_PORT_HAS_NO_DATA;
            }
        }

        // update the sample description if it has changed
        if (sample.GetDescriptionIndex() != self->sample_description_index) {
            result = Mp4ParserOutput_SetSampleDescription(self, sample.GetDescriptionIndex());
            if (BLT_FAILED(result)) return result;
        }
        
        // decrypt the sample if needed
        if (self->sample_decrypter) {
            self->sample_decrypter->DecryptSampleData(*sample_buffer, *self->sample_decrypted_buffer);
            sample_buffer = self->sample_decrypted_buffer;
        }

        AP4_Size packet_size = sample_buffer->GetDataSize();
        result = BLT_Core_CreateMediaPacket(ATX_BASE(self->parser, BLT_BaseMediaNode).core,
                                            packet_size,
                                            (const BLT_MediaType*)self->media_type,
                                            packet);
        if (BLT_FAILED(result)) return result;
        BLT_MediaPacket_SetPayloadSize(*packet, packet_size);
        void* buffer = BLT_MediaPacket_GetPayloadBuffer(*packet);
        ATX_CopyMemory(buffer, sample_buffer->GetData(), packet_size);

        // set the timestamp
        AP4_UI32 media_timescale = self->track->GetMediaTimeScale();
        if (media_timescale) {
            AP4_UI64 ts = ((AP4_UI64)sample.GetCts())*1000000;
            ts /= media_timescale;
            BLT_TimeStamp bt_ts = {
                (BLT_Int32)(ts / 1000000),
                (BLT_Int32)((ts % 1000000)*1000)
            };
            BLT_MediaPacket_SetTimeStamp(*packet, bt_ts);
        }

        // set packet flags
        if (self->sample == 1) {
            BLT_MediaPacket_SetFlags(*packet, BLT_MEDIA_PACKET_FLAG_START_OF_STREAM);
        }

        return BLT_SUCCESS;
    }
}
/*----------------------------------------------------------------------
|    OsxAudioUnitsOutput_RenderCallback
+---------------------------------------------------------------------*/
static OSStatus     
OsxAudioUnitsOutput_RenderCallback(void*						inRefCon,
                                   AudioUnitRenderActionFlags*	ioActionFlags,
                                   const AudioTimeStamp*		inTimeStamp,
                                   UInt32						inBusNumber,
                                   UInt32						inNumberFrames,
                                   AudioBufferList*			    ioData)
{
    OsxAudioUnitsOutput* self = (OsxAudioUnitsOutput*)inRefCon;
    ATX_ListItem*        item;
    unsigned int         requested;
    unsigned char*       out;
    ATX_Boolean          timestamp_measured = ATX_FALSE;
    
    BLT_COMPILER_UNUSED(ioActionFlags);
    BLT_COMPILER_UNUSED(inTimeStamp);
    BLT_COMPILER_UNUSED(inBusNumber);
    BLT_COMPILER_UNUSED(inNumberFrames);
                
    /* sanity check on the parameters */
    if (ioData == NULL || ioData->mNumberBuffers == 0) return 0;
    
    /* in case we have a strange request with more than one buffer, just return silence */
    if (ioData->mNumberBuffers != 1) {
        unsigned int i;
        ATX_LOG_FINEST_1("strange request with %d buffers", 
                         (int)ioData->mNumberBuffers);
        for (i=0; i<ioData->mNumberBuffers; i++) {
            ATX_SetMemory(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
        }
        return 0;
    }
        
    /* init local variables */
    requested = ioData->mBuffers[0].mDataByteSize;
    out = (unsigned char*)ioData->mBuffers[0].mData;
    ATX_LOG_FINEST_2("request for %d bytes, %d frames", (int)requested, (int)inNumberFrames);

    /* lock the packet queue */
    pthread_mutex_lock(&self->lock);
    
    /* return now if we're paused */
    //if (self->paused) goto end;
    
    /* abort early if we have no packets */
    if (ATX_List_GetItemCount(self->packet_queue) == 0) goto end;
        
    /* fill as much as we can */
    while (requested && (item = ATX_List_GetFirstItem(self->packet_queue))) {
        BLT_MediaPacket*        packet = ATX_ListItem_GetData(item);
        const BLT_PcmMediaType* media_type;
        BLT_Size                payload_size;
        BLT_Size                chunk_size;
        BLT_TimeStamp           chunk_duration;
        BLT_TimeStamp           packet_ts;
        unsigned int            bytes_per_frame;
        unsigned int            sample_rate;
        
        /* get the packet info */
        BLT_MediaPacket_GetMediaType(packet, (const BLT_MediaType**)&media_type);
        packet_ts = BLT_MediaPacket_GetTimeStamp(packet);
        bytes_per_frame = media_type->channel_count*media_type->bits_per_sample/8;
        sample_rate = media_type->sample_rate;
        
        /* record the timestamp if we have not already done so */
        if (!timestamp_measured) {
            self->media_time_snapshot.rendered_packet_ts = packet_ts;
            self->media_time_snapshot.rendered_host_time = 
                AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
            BLT_TimeStamp_Set(self->media_time_snapshot.rendered_duration, 0, 0);
            timestamp_measured = ATX_TRUE;
            
            ATX_LOG_FINEST_2("rendered TS: packet ts=%lld, host ts=%lld",
                             BLT_TimeStamp_ToNanos(packet_ts),
                             self->media_time_snapshot.rendered_host_time);
        }
         
        /* compute how much to copy from this packet */
        payload_size = BLT_MediaPacket_GetPayloadSize(packet);
        if (payload_size <= requested) {
            /* copy the entire payload and remove the packet from the queue */
            chunk_size = payload_size;
            ATX_CopyMemory(out, BLT_MediaPacket_GetPayloadBuffer(packet), chunk_size);
            ATX_List_RemoveItem(self->packet_queue, item);
            packet = NULL;
            media_type = NULL;
            ATX_LOG_FINER_1("media packet fully consumed, %d left in queue",
                            ATX_List_GetItemCount(self->packet_queue));
        } else {
            /* only copy a portion of the payload */
            chunk_size = requested;
            ATX_CopyMemory(out, BLT_MediaPacket_GetPayloadBuffer(packet), chunk_size);            
        }
        
        /* update the counters */
        requested -= chunk_size;
        out       += chunk_size;
        
        /* update the media time snapshot */
        if (bytes_per_frame) {
            unsigned int frames_in_chunk = chunk_size/bytes_per_frame;
            chunk_duration = BLT_TimeStamp_FromSamples(frames_in_chunk, sample_rate);
        } else {
            BLT_TimeStamp_Set(chunk_duration, 0, 0);
        }
        self->media_time_snapshot.rendered_duration = 
            BLT_TimeStamp_Add(self->media_time_snapshot.rendered_duration, chunk_duration);
        
        /* update the packet unless we're done with it */
        if (packet) {
            /* update the packet offset and timestamp */
            BLT_MediaPacket_SetPayloadOffset(packet, BLT_MediaPacket_GetPayloadOffset(packet)+chunk_size);
            BLT_MediaPacket_SetTimeStamp(packet, BLT_TimeStamp_Add(packet_ts, chunk_duration));
        }
    }
   
end:
    /* fill whatever is left with silence */    
    if (requested) {
        ATX_LOG_FINEST_1("filling with %d bytes of silence", requested);
        ATX_SetMemory(out, 0, requested);
    }
    
    pthread_mutex_unlock(&self->lock);
        
    return 0;
}