예제 #1
0
static av_cold int ra144_decode_init(AVCodecContext * avctx)
{
    RA144Context *ractx = avctx->priv_data;

    ractx->avctx = avctx;
    ff_audiodsp_init(&ractx->adsp);

    ractx->lpc_coef[0] = ractx->lpc_tables[0];
    ractx->lpc_coef[1] = ractx->lpc_tables[1];

    avctx->channels       = 1;
    avctx->channel_layout = AV_CH_LAYOUT_MONO;
    avctx->sample_fmt     = AV_SAMPLE_FMT_S16;

    return 0;
}
예제 #2
0
/**
 * Cook initialization
 *
 * @param avctx     pointer to the AVCodecContext
 */
static av_cold int cook_decode_init(AVCodecContext *avctx)
{
    COOKContext *q = avctx->priv_data;
    const uint8_t *edata_ptr = avctx->extradata;
    const uint8_t *edata_ptr_end = edata_ptr + avctx->extradata_size;
    int extradata_size = avctx->extradata_size;
    int s = 0;
    unsigned int channel_mask = 0;
    int samples_per_frame = 0;
    int ret;
    q->avctx = avctx;

    /* Take care of the codec specific extradata. */
    if (extradata_size < 8) {
        av_log(avctx, AV_LOG_ERROR, "Necessary extradata missing!\n");
        return AVERROR_INVALIDDATA;
    }
    av_log(avctx, AV_LOG_DEBUG, "codecdata_length=%d\n", avctx->extradata_size);

    /* Take data from the AVCodecContext (RM container). */
    if (!avctx->channels) {
        av_log(avctx, AV_LOG_ERROR, "Invalid number of channels\n");
        return AVERROR_INVALIDDATA;
    }

    /* Initialize RNG. */
    av_lfg_init(&q->random_state, 0);

    ff_audiodsp_init(&q->adsp);

    while (edata_ptr < edata_ptr_end) {
        /* 8 for mono, 16 for stereo, ? for multichannel
           Swap to right endianness so we don't need to care later on. */
        if (extradata_size >= 8) {
            q->subpacket[s].cookversion = bytestream_get_be32(&edata_ptr);
            samples_per_frame           = bytestream_get_be16(&edata_ptr);
            q->subpacket[s].subbands = bytestream_get_be16(&edata_ptr);
            extradata_size -= 8;
        }
        if (extradata_size >= 8) {
            bytestream_get_be32(&edata_ptr);    // Unknown unused
            q->subpacket[s].js_subband_start = bytestream_get_be16(&edata_ptr);
            if (q->subpacket[s].js_subband_start >= 51) {
                av_log(avctx, AV_LOG_ERROR, "js_subband_start %d is too large\n", q->subpacket[s].js_subband_start);
                return AVERROR_INVALIDDATA;
            }

            q->subpacket[s].js_vlc_bits = bytestream_get_be16(&edata_ptr);
            extradata_size -= 8;
        }

        /* Initialize extradata related variables. */
        q->subpacket[s].samples_per_channel = samples_per_frame / avctx->channels;
        q->subpacket[s].bits_per_subpacket = avctx->block_align * 8;

        /* Initialize default data states. */
        q->subpacket[s].log2_numvector_size = 5;
        q->subpacket[s].total_subbands = q->subpacket[s].subbands;
        q->subpacket[s].num_channels = 1;

        /* Initialize version-dependent variables */

        av_log(avctx, AV_LOG_DEBUG, "subpacket[%i].cookversion=%x\n", s,
               q->subpacket[s].cookversion);
        q->subpacket[s].joint_stereo = 0;
        switch (q->subpacket[s].cookversion) {
        case MONO:
            if (avctx->channels != 1) {
                avpriv_request_sample(avctx, "Container channels != 1");
                return AVERROR_PATCHWELCOME;
            }
            av_log(avctx, AV_LOG_DEBUG, "MONO\n");
            break;
        case STEREO:
            if (avctx->channels != 1) {
                q->subpacket[s].bits_per_subpdiv = 1;
                q->subpacket[s].num_channels = 2;
            }
            av_log(avctx, AV_LOG_DEBUG, "STEREO\n");
            break;
        case JOINT_STEREO:
            if (avctx->channels != 2) {
                avpriv_request_sample(avctx, "Container channels != 2");
                return AVERROR_PATCHWELCOME;
            }
            av_log(avctx, AV_LOG_DEBUG, "JOINT_STEREO\n");
            if (avctx->extradata_size >= 16) {
                q->subpacket[s].total_subbands = q->subpacket[s].subbands +
                                                 q->subpacket[s].js_subband_start;
                q->subpacket[s].joint_stereo = 1;
                q->subpacket[s].num_channels = 2;
            }
            if (q->subpacket[s].samples_per_channel > 256) {
                q->subpacket[s].log2_numvector_size = 6;
            }
            if (q->subpacket[s].samples_per_channel > 512) {
                q->subpacket[s].log2_numvector_size = 7;
            }
            break;
        case MC_COOK:
            av_log(avctx, AV_LOG_DEBUG, "MULTI_CHANNEL\n");
            if (extradata_size >= 4)
                channel_mask |= q->subpacket[s].channel_mask = bytestream_get_be32(&edata_ptr);

            if (av_get_channel_layout_nb_channels(q->subpacket[s].channel_mask) > 1) {
                q->subpacket[s].total_subbands = q->subpacket[s].subbands +
                                                 q->subpacket[s].js_subband_start;
                q->subpacket[s].joint_stereo = 1;
                q->subpacket[s].num_channels = 2;
                q->subpacket[s].samples_per_channel = samples_per_frame >> 1;

                if (q->subpacket[s].samples_per_channel > 256) {
                    q->subpacket[s].log2_numvector_size = 6;
                }
                if (q->subpacket[s].samples_per_channel > 512) {
                    q->subpacket[s].log2_numvector_size = 7;
                }
            } else
                q->subpacket[s].samples_per_channel = samples_per_frame;

            break;
        default:
            avpriv_request_sample(avctx, "Cook version %d",
                                  q->subpacket[s].cookversion);
            return AVERROR_PATCHWELCOME;
        }