static size_t reassembleAVCC(const sp<ABuffer> &csd0, const sp<ABuffer> csd1, char *avcc) {

    avcc[0] = 1;        // version
    avcc[1] = 0x64;     // profile
    avcc[2] = 0;        // unused (?)
    avcc[3] = 0xd;      // level
    avcc[4] = 0xff;     // reserved+size

    size_t i = 0;
    int numparams = 0;
    int lastparamoffset = 0;
    int avccidx = 6;
    do {
        if (i >= csd0->size() - 4 ||
                memcmp(csd0->data() + i, "\x00\x00\x00\x01", 4) == 0) {
            if (i >= csd0->size() - 4) {
                // there can't be another param here, so use all the rest
                i = csd0->size();
            }
            ALOGV("block at %d, last was %d", i, lastparamoffset);
            if (lastparamoffset > 0) {
                int size = i - lastparamoffset;
                avcc[avccidx++] = size >> 8;
                avcc[avccidx++] = size & 0xff;
                memcpy(avcc+avccidx, csd0->data() + lastparamoffset, size);
                avccidx += size;
                numparams++;
            }
            i += 4;
            lastparamoffset = i;
        } else {
            i++;
        }
    } while(i < csd0->size());
status_t SaturationFilter::processBuffers(
        const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
    mScript->forEach_root(mAllocIn, mAllocOut);
    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());

    return OK;
}
void AH263Assembler::insertPacket(const sp<ABuffer> &buffer){
    size_t skip;
    if ((skip = getOffsetOfHeader(buffer)) == 1){
        ALOGE("Malformed packet in insertPacket");
        return;
    }

    buffer->setRange(buffer->offset() + skip, buffer->size() - skip);

    if (skip == 0) {
        buffer->data()[0] = 0x00;
        buffer->data()[1] = 0x00;
    }
    uint32_t seqNum = (uint32_t)buffer->int32Data();
    List<sp<ABuffer> >::iterator it = mPackets.begin();
    while (it != mPackets.end() && (uint32_t)(*it)->int32Data() < seqNum){
        ++it;
    }

    if (it != mPackets.end() && (uint32_t)(*it)->int32Data() == seqNum) {
        ALOGE("Discarding duplicate buffer in mPackets");
        return;
    }
    ALOGV("insert the buffer into the current packets");
    mPackets.insert(it, buffer);
}
Example #4
0
bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit) {
    const uint8_t *data = accessUnit->data();
    size_t size = accessUnit->size();
    if (data == NULL) {
        ALOGE("IsAVCReferenceFrame: called on NULL data (%p, %zu)", accessUnit.get(), size);
        return false;
    }

    const uint8_t *nalStart;
    size_t nalSize;
    while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
        if (nalSize == 0) {
            ALOGE("IsAVCReferenceFrame: invalid nalSize: 0 (%p, %zu)", accessUnit.get(), size);
            return false;
        }

        unsigned nalType = nalStart[0] & 0x1f;

        if (nalType == 5) {
            return true;
        } else if (nalType == 1) {
            unsigned nal_ref_idc = (nalStart[0] >> 5) & 3;
            return nal_ref_idc != 0;
        }
    }
static sp<ABuffer> MakeMPEGVideoESDS(const sp<ABuffer> &csd) {
    sp<ABuffer> esds = new ABuffer(csd->size() + 25);

    uint8_t *ptr = esds->data();
    *ptr++ = 0x03;
    EncodeSize14(&ptr, 22 + csd->size());

    *ptr++ = 0x00;  // ES_ID
    *ptr++ = 0x00;

    *ptr++ = 0x00;  // streamDependenceFlag, URL_Flag, OCRstreamFlag

    *ptr++ = 0x04;
    EncodeSize14(&ptr, 16 + csd->size());

    *ptr++ = 0x40;  // Audio ISO/IEC 14496-3

    for (size_t i = 0; i < 12; ++i) {
        *ptr++ = 0x00;
    }

    *ptr++ = 0x05;
    EncodeSize14(&ptr, csd->size());

    memcpy(ptr, csd->data(), csd->size());

    return esds;
}
sp<ABuffer> NuPlayer::DecoderPassThrough::aggregateBuffer(
        const sp<ABuffer> &accessUnit) {
    sp<ABuffer> aggregate;

    if (accessUnit == NULL) {
        // accessUnit is saved to mPendingAudioAccessUnit
        // return current mAggregateBuffer
        aggregate = mAggregateBuffer;
        mAggregateBuffer.clear();
        return aggregate;
    }

    size_t smallSize = accessUnit->size();
    if ((mAggregateBuffer == NULL)
            // Don't bother if only room for a few small buffers.
            && (smallSize < (kAggregateBufferSizeBytes / 3))) {
        // Create a larger buffer for combining smaller buffers from the extractor.
        mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
        mAggregateBuffer->setRange(0, 0); // start empty
    }

    if (mAggregateBuffer != NULL) {
        int64_t timeUs;
        int64_t dummy;
        bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
        bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
        // Will the smaller buffer fit?
        size_t bigSize = mAggregateBuffer->size();
        size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
        // Should we save this small buffer for the next big buffer?
        // If the first small buffer did not have a timestamp then save
        // any buffer that does have a timestamp until the next big buffer.
        if ((smallSize > roomLeft)
            || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
            mPendingAudioErr = OK;
            mPendingAudioAccessUnit = accessUnit;
            aggregate = mAggregateBuffer;
            mAggregateBuffer.clear();
        } else {
            // Grab time from first small buffer if available.
            if ((bigSize == 0) && smallTimestampValid) {
                mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
            }
            // Append small buffer to the bigger buffer.
            memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
            bigSize += smallSize;
            mAggregateBuffer->setRange(0, bigSize);

            ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
                    smallSize, bigSize, mAggregateBuffer->capacity());
        }
    } else {
        // decided not to aggregate
        aggregate = accessUnit;
    }

    return aggregate;
}
status_t NuMediaExtractor::readSampleData(const sp<ABuffer> &buffer) {
    Mutex::Autolock autoLock(mLock);

    ssize_t minIndex = fetchTrackSamples();

    if (minIndex < 0) {
        return ERROR_END_OF_STREAM;
    }

    TrackInfo *info = &mSelectedTracks.editItemAt(minIndex);

    size_t sampleSize = info->mSample->range_length();

    if (info->mTrackFlags & kIsVorbis) {
        // Each sample's data is suffixed by the number of page samples
        // or -1 if not available.
        sampleSize += sizeof(int32_t);
    }

    if (buffer->capacity() < sampleSize) {
        return -ENOMEM;
    }

    const uint8_t *src =
        (const uint8_t *)info->mSample->data()
            + info->mSample->range_offset();

    memcpy((uint8_t *)buffer->data(), src, info->mSample->range_length());

    if (info->mTrackFlags & kIsVorbis) {
        int32_t numPageSamples;
        if (!info->mSample->meta_data()->findInt32(
                    kKeyValidSamples, &numPageSamples)) {
            numPageSamples = -1;
        }

        memcpy((uint8_t *)buffer->data() + info->mSample->range_length(),
               &numPageSamples,
               sizeof(numPageSamples));
    }

    buffer->setRange(0, sampleSize);

    return OK;
}
// static
bool Converter::IsSilence(const sp<ABuffer> &accessUnit) {
    const uint8_t *ptr = accessUnit->data();
    const uint8_t *end = ptr + accessUnit->size();
    while (ptr < end) {
        if (*ptr != 0) {
            return false;
        }
        ++ptr;
    }

    return true;
}
Example #9
0
MediaBuffer::MediaBuffer(const sp<ABuffer> &buffer)
    : mObserver(NULL),
      mRefCount(0),
      mData(buffer->data()),
      mSize(buffer->size()),
      mRangeOffset(0),
      mRangeLength(mSize),
      mBuffer(buffer),
      mOwnsData(false),
      mMetaData(new MetaData),
      mOriginal(NULL) {
}
sp<ABuffer> Converter::prependCSD(const sp<ABuffer> &accessUnit) const {
    CHECK(mCSD0 != NULL);

    sp<ABuffer> dup = new ABuffer(accessUnit->size() + mCSD0->size());
    memcpy(dup->data(), mCSD0->data(), mCSD0->size());
    memcpy(dup->data() + mCSD0->size(), accessUnit->data(), accessUnit->size());

    int64_t timeUs;
    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));

    dup->meta()->setInt64("timeUs", timeUs);

    return dup;
}
void LiveDataSource::queueBuffer(const sp<ABuffer> &buffer) {
    Mutex::Autolock autoLock(mLock);

    if (mFinalResult != OK) {
        return;
    }

#if SAVE_BACKUP
    if (mBackupFile != NULL) {
        CHECK_EQ(fwrite(buffer->data(), 1, buffer->size(), mBackupFile),
                 buffer->size());
    }
#endif

    mBufferQueue.push_back(buffer);
    mCondition.broadcast();
}
bool IsAVCReferenceFrame(const sp<ABuffer> &accessUnit) {
    const uint8_t *data = accessUnit->data();
    size_t size = accessUnit->size();

    const uint8_t *nalStart;
    size_t nalSize;
    while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
        CHECK_GT(nalSize, 0u);

        unsigned nalType = nalStart[0] & 0x1f;

        if (nalType == 5) {
            return true;
        } else if (nalType == 1) {
            unsigned nal_ref_idc = (nalStart[0] >> 5) & 3;
            return nal_ref_idc != 0;
        }
    }
Example #13
0
void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
    int32_t damaged;
#ifndef ANDROID_DEFAULT_CODE
    if(mIsEOS)
    {
        return ;
    }
    // mtk80902: porting from APacketSource
    // wait IDR for 264
    if (mIsAVC && mNeedScanForIDR && mScanForIDR) {
        if ((buffer->data()[0] & 0x1f) != 5) {
            ALOGD("skipping AU while scanning for next IDR frame.");
            return;
        }
        mScanForIDR = false;
    }
#endif
    if (buffer->meta()->findInt32("damaged", &damaged) && damaged) {
        // LOG(VERBOSE) << "discarding damaged AU";
        return;
    }

    int64_t lastQueuedTimeUs;
    CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
    mLastQueuedTimeUs = lastQueuedTimeUs;
    ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);

    Mutex::Autolock autoLock(mLock);
    mBuffers.push_back(buffer);
    mCondition.signal();

    if (!mLatestEnqueuedMeta.get()) {
        mLatestEnqueuedMeta = buffer->meta();
    } else {
        int64_t latestTimeUs = 0;
        CHECK(mLatestEnqueuedMeta->findInt64("timeUs", &latestTimeUs));
        if (lastQueuedTimeUs > latestTimeUs) {
            mLatestEnqueuedMeta = buffer->meta();
        }
    }
}
bool IsIDR(const sp<ABuffer> &buffer) {
    const uint8_t *data = buffer->data();
    size_t size = buffer->size();

    bool foundIDR = false;

    const uint8_t *nalStart;
    size_t nalSize;
    while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
        CHECK_GT(nalSize, 0u);

        unsigned nalType = nalStart[0] & 0x1f;

        if (nalType == 5) {
            foundIDR = true;
            break;
        }
    }

    return foundIDR;
}
Example #15
0
status_t AudioConverter::safeConvert(const sp<MediaCodecBuffer> &src, sp<MediaCodecBuffer> &tgt) {
    if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcm16bit) {
        memcpy_to_u8_from_i16((uint8_t*)tgt->base(), (const int16_t*)src->data(), src->size() / 2);
    } else if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcmFloat) {
        memcpy_to_u8_from_float((uint8_t*)tgt->base(), (const float*)src->data(), src->size() / 4);
    } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcm8bit) {
        memcpy_to_i16_from_u8((int16_t*)tgt->base(), (const uint8_t*)src->data(), src->size());
    } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcmFloat) {
        memcpy_to_i16_from_float((int16_t*)tgt->base(), (const float*)src->data(), src->size() / 4);
    } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm8bit) {
        memcpy_to_float_from_u8((float*)tgt->base(), (const uint8_t*)src->data(), src->size());
    } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm16bit) {
        memcpy_to_float_from_i16((float*)tgt->base(), (const int16_t*)src->data(), src->size() / 2);
    } else {
        return INVALID_OPERATION;
    }
    return OK;
}
size_t AH263Assembler::getOffsetOfHeader(const sp<ABuffer> buffer) {
    //the final right offset should be 0 or 2, it is
    //initialized to 1 for checking whether errors happen
    size_t offset = 1;

    if (buffer->size() < 2) {
        ALOGW("Packet size is less than 2 bytes");
        return offset;
    }

    unsigned payloadHeader = U16_AT(buffer->data());
    unsigned P = (payloadHeader >> 10) & 1;
    unsigned V = (payloadHeader >> 9) & 1;
    unsigned PLEN = (payloadHeader >> 3) & 0x3f;
    unsigned PEBIT = payloadHeader & 7;

    // V=0
    if (V != 0u) {
        ALOGW("Packet discarded due to VRC (V != 0)");
        return offset;
    }

    // PLEN=0
    if (PLEN != 0u) {
        ALOGW("Packet discarded (PLEN != 0)");
        return offset;
    }

    // PEBIT=0
    if (PEBIT != 0u) {
        ALOGW("Packet discarded (PEBIT != 0)");
        return offset;
    }
    offset = V + PLEN + (P ? 0 : 2);
    return offset;
}
Example #17
0
status_t ESExtractor::Track::dequeueAccessUnitMPEGVideo(sp<ABuffer> &mAccessUnit) {
    const uint8_t *data = mExtractor->mBuffer->data();
    size_t size = mExtractor->mBuffer->size();
    bool sawPictureStart = false;
    int pprevStartCode = -1;
    int prevStartCode = -1;
    int currentStartCode = -1;

    size_t offset = 0;
    size_t lastGOPOff = -1;

    while (offset + 3 < size) {
        if (U24_AT(data + offset) != 0x000001) {
            ++offset;
            continue;
        }
        pprevStartCode = prevStartCode;
        prevStartCode = currentStartCode;
        currentStartCode = data[offset + 3];
        ALOGV("pprevStartCode:0x%x,prevStartCode:0x%x,currentStartCode:0x%x,offset:%d",pprevStartCode,prevStartCode,currentStartCode,offset);

        if (currentStartCode == 0xb3 && mQueueFormat == NULL) {
            memmove(mExtractor->mBuffer->data(), mExtractor->mBuffer->data() + offset, size - offset);
            size -= offset;
            offset = 0;
            mExtractor->mBuffer->setRange(0, size);
        }

        if ((prevStartCode == 0xb3 && currentStartCode != 0xb5)
                || (pprevStartCode == 0xb3 && prevStartCode == 0xb5)) {
            // seqHeader without/with extension

            if (mQueueFormat == NULL) {
                CHECK_GE(size, 7u);

                unsigned width =
                    (data[4] << 4) | data[5] >> 4;

                unsigned height =
                    ((data[5] & 0x0f) << 8) | data[6];
                
                mQueueFormat = new MetaData;
                mQueueFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG2);
                mQueueFormat->setInt32(kKeyWidth, (int32_t)width);
                mQueueFormat->setInt32(kKeyHeight, (int32_t)height);

                ALOGI("found MPEG2 video codec config (%d x %d)", width, height);

                sp<ABuffer> csd = new ABuffer(offset);
                memcpy(csd->data(), data, offset);

                memmove(mExtractor->mBuffer->data(),
                        mExtractor->mBuffer->data() + offset,
                        mExtractor->mBuffer->size() - offset);

                mExtractor->mBuffer->setRange(0, mExtractor->mBuffer->size() - offset);
                size -= offset;
                offset = 0;

                sp<ABuffer> esds = MakeMPEGVideoESDS(csd);
                mQueueFormat->setData(kKeyESDS, kTypeESDS, esds->data(), esds->size());
                ALOGV("dequeueAccessUnitMPEGVideo:get mQueueFormat,return GETFORMATDONE");
                return GETFORMATDONE;
            }
        }

        if (mQueueFormat != NULL && (currentStartCode == 0x00 || (sawPictureStart && currentStartCode == 0xB7))) { //ALPS00473447
            // Picture start
            ALOGV("dequeueAccessUnitMPEGVideo:Picture start");
            if (!sawPictureStart) {
                sawPictureStart = true;
            } else {
                mAccessUnit = new ABuffer(offset);
                memcpy(mAccessUnit->data(), data, offset);

                memmove(mExtractor->mBuffer->data(),
                        mExtractor->mBuffer->data() + offset,
                        mExtractor->mBuffer->size() - offset);

                mExtractor->mBuffer->setRange(0, mExtractor->mBuffer->size() - offset);

                offset = 0;
                mAccessUnit->meta()->setInt32("invt", (int32_t)true);
                mAccessUnit->meta()->setInt64("timeUs", 0);
                ALOGV("dequeueAccessUnitMPEGVideo:return OPCONTINUE");
                return GETAUDONE;
            }
        }
        ++offset;
    }
Example #18
0
sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
    CHECK(!mMuxConfigPresent);  // XXX to be implemented

    sp<ABuffer> out = new ABuffer(buffer->size());
    out->setRange(0, 0);

    size_t offset = 0;
    uint8_t *ptr = buffer->data();

    for (size_t i = 0; i <= mNumSubFrames; ++i) {
        // parse PayloadLengthInfo

        unsigned payloadLength = 0;

        switch (mFrameLengthType) {
            case 0:
            {
                unsigned muxSlotLengthBytes = 0;
                unsigned tmp;
                do {
                    if (offset >= buffer->size()) {
                        ALOGW("Malformed buffer received");
                        return out;
                    }
                    tmp = ptr[offset++];
                    muxSlotLengthBytes += tmp;
                } while (tmp == 0xff);

                payloadLength = muxSlotLengthBytes;
                break;
            }

            case 2:
            {
                // reserved

                TRESPASS();
                break;
            }

            default:
            {
                CHECK_GE(mFixedFrameLength, 0);

                payloadLength = mFixedFrameLength;
                break;
            }
        }
        
        CHECK_LT(offset, buffer->size());
        CHECK_LE(payloadLength, buffer->size() - offset);

        memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
        out->setRange(0, out->size() + payloadLength);

        offset += payloadLength;

        if (mOtherDataPresent) {
            // We want to stay byte-aligned.

            CHECK((mOtherDataLenBits % 8) == 0);
            CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
            offset += mOtherDataLenBits / 8;
        }
    }

    if (offset < buffer->size()) {
        ALOGI("ignoring %zu bytes of trailing data", buffer->size() - offset);
    }
    CHECK_LE(offset, buffer->size());

    return out;
}
Example #19
0
//这个函数输入为一个NAL结构体,主要功能为得到一个完整的NALU并保存在NALU_t的buf中,
//获取他的长度,填充F,IDC,TYPE位。
//并且返回两个开始字符之间间隔的字节数,即包含有前缀的NALU的长度
int GetAnnexbNALU (sp<ABuffer> nalu)
{
	int pos = 0;
	int StartCodeFound, rewind;
//	unsigned char *Buf;
	int info2,info3,startcodeprefix_len,len;

	static unsigned char Buf[50000];

	startcodeprefix_len=3;//初始化码流序列的开始字符为3个字节

	if (3 != fread (Buf, 1, 3, bits))//从码流中读3个字节
	{
		//free(Buf);
		return 0;
	}
	info2 = FindStartCode2 (Buf);//判断是否为0x000001 
	if(info2 != 1) 
	{
		//如果不是,再读一个字节
		if(1 != fread(Buf+3, 1, 1, bits))//读一个字节
		{
			//free(Buf);
			return 0;
		}
		info3 = FindStartCode3 (Buf);//判断是否为0x00000001
		if (info3 != 1)//如果不是,返回-1
		{ 
			//free(Buf);
			return -1;
		}
		else 
		{
			//如果是0x00000001,得到开始前缀为4个字节
			pos = 4;
			startcodeprefix_len = 4;
		}
	}
	else
	{
		//如果是0x000001,得到开始前缀为3个字节
		startcodeprefix_len = 3;
		pos = 3;
	}
	//查找下一个开始字符的标志位
	StartCodeFound = 0;
	info2 = 0;
	info3 = 0;

	while (!StartCodeFound)
	{
		if (feof (bits))//判断是否到了文件尾
		{
			//return 0;
			len = (pos-1)- startcodeprefix_len;
			memcpy (nalu->data(), &Buf[startcodeprefix_len], len);     
			//free(Buf);
			printf("lcy 1991 len %d\n",len);
			return pos-1;
		}
		Buf[pos++] = fgetc (bits);//读一个字节到BUF中
		info3 = FindStartCode3(&Buf[pos-4]);//判断是否为0x00000001
		if(info3 != 1)
			info2 = FindStartCode2(&Buf[pos-3]);//判断是否为0x000001
		StartCodeFound = (info2 == 1 || info3 == 1);
	}

	// Here, we have found another start code (and read length of startcode bytes more than we should
	// have.  Hence, go back in the file
	rewind = (info3 == 1)? -4 : -3;

	if (0 != fseek (bits, rewind, SEEK_CUR))//把文件指针指向前一个NALU的末尾
	{
		//free(Buf);
		printf("GetAnnexbNALU: Cannot fseek in the bit stream file");
	}

	// Here the Start code, the complete NALU, and the next start code is in the Buf.  
	// The size of Buf is pos, pos+rewind are the number of bytes excluding the next
	// start code, and (pos+rewind)-startcodeprefix_len is the size of the NALU excluding the start code

	len = (pos+rewind)-startcodeprefix_len;
	memcpy (nalu->data(), &Buf[startcodeprefix_len], len);//拷贝一个完整NALU,不拷贝起始前缀0x000001或0x00000001
	//free(Buf);
	printf("memcpy2\n");

	return (pos+rewind);//返回两个开始字符之间间隔的字节数,即包含有前缀的NALU的长度
}
Example #20
0
status_t DataConverter::safeConvert(
        const sp<MediaCodecBuffer> &source, sp<MediaCodecBuffer> &target) {
    memcpy(target->base(), source->data(), source->size());
    return OK;
}
nsresult
OMXCodecWrapper::GetNextEncodedFrame(nsTArray<uint8_t>* aOutputBuf,
                                     int64_t* aOutputTimestamp,
                                     int* aOutputFlags, int64_t aTimeOut)
{
  MOZ_ASSERT(mStarted,
             "Configure() should be called before GetNextEncodedFrame().");

  // Dequeue a buffer from output buffers.
  size_t index = 0;
  size_t outOffset = 0;
  size_t outSize = 0;
  int64_t outTimeUs = 0;
  uint32_t outFlags = 0;
  bool retry = false;
  do {
    status_t result = mCodec->dequeueOutputBuffer(&index, &outOffset, &outSize,
                                                  &outTimeUs, &outFlags,
                                                  aTimeOut);
    switch (result) {
      case OK:
        break;
      case INFO_OUTPUT_BUFFERS_CHANGED:
        // Update our references to new buffers.
        result = mCodec->getOutputBuffers(&mOutputBufs);
        // Get output from a new buffer.
        retry = true;
        break;
      case INFO_FORMAT_CHANGED:
        // It's okay: for encoder, MediaCodec reports this only to inform caller
        // that there will be a codec config buffer next.
        return NS_OK;
      case -EAGAIN:
        // Output buffer not available. Caller can try again later.
        return NS_OK;
      default:
        CODEC_ERROR("MediaCodec error:%d", result);
        MOZ_ASSERT(false, "MediaCodec error.");
        return NS_ERROR_FAILURE;
    }
  } while (retry);

  if (aOutputBuf) {
    aOutputBuf->Clear();
    const sp<ABuffer> omxBuf = mOutputBufs.itemAt(index);
    if (outFlags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
      // Codec specific data.
      if (AppendDecoderConfig(aOutputBuf, omxBuf.get()) != OK) {
        mCodec->releaseOutputBuffer(index);
        return NS_ERROR_FAILURE;
      }
    } else if ((mCodecType == AMR_NB_ENC) && !mAMRCSDProvided){
      // OMX AMR codec won't provide csd data, need to generate a fake one.
      RefPtr<EncodedFrame> audiodata = new EncodedFrame();
      // Decoder config descriptor
      const uint8_t decConfig[] = {
        0x0, 0x0, 0x0, 0x0, // vendor: 4 bytes
        0x0,                // decoder version
        0x83, 0xFF,         // mode set: all enabled
        0x00,               // mode change period
        0x01,               // frames per sample
      };
      aOutputBuf->AppendElements(decConfig, sizeof(decConfig));
      outFlags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
      mAMRCSDProvided = true;
    } else if ((mCodecType == EVRC_ENC) && !mEVRCCSDProvided){
      // OMX EVRC codec won't provide csd data, need to generate a fake one.
      RefPtr<EncodedFrame> audiodata = new EncodedFrame();
      // Decoder config descriptor
      const uint8_t decConfig[] = {
        0x0, 0x0, 0x0, 0x0, // vendor: 4 bytes
        0x0,                // decoder version
        0x01,               // frames per sample
      };
      aOutputBuf->AppendElements(decConfig, sizeof(decConfig));
      outFlags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
      mEVRCCSDProvided = true;
    } else {
      AppendFrame(aOutputBuf, omxBuf->data(), omxBuf->size());
    }
  }
  mCodec->releaseOutputBuffer(index);

  if (aOutputTimestamp) {
    *aOutputTimestamp = outTimeUs;
  }

  if (aOutputFlags) {
    *aOutputFlags = outFlags;
  }

  return NS_OK;
}
sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
    const uint8_t *data = accessUnit->data();
    size_t size = accessUnit->size();

    sp<ABuffer> seqParamSet = FindNAL(data, size, 7, NULL);
    if (seqParamSet == NULL) {
        return NULL;
    }

    int32_t width, height;
    FindAVCDimensions(seqParamSet, &width, &height);

    size_t stopOffset;
    sp<ABuffer> picParamSet = FindNAL(data, size, 8, &stopOffset);
    CHECK(picParamSet != NULL);

    size_t csdSize =
        1 + 3 + 1 + 1
        + 2 * 1 + seqParamSet->size()
        + 1 + 2 * 1 + picParamSet->size();

    sp<ABuffer> csd = new ABuffer(csdSize);
    uint8_t *out = csd->data();

    *out++ = 0x01;  // configurationVersion
    memcpy(out, seqParamSet->data() + 1, 3);  // profile/level...

    uint8_t profile = out[0];
    uint8_t level = out[2];

    out += 3;
    *out++ = (0x3f << 2) | 1;  // lengthSize == 2 bytes
    *out++ = 0xe0 | 1;

    *out++ = seqParamSet->size() >> 8;
    *out++ = seqParamSet->size() & 0xff;
    memcpy(out, seqParamSet->data(), seqParamSet->size());
    out += seqParamSet->size();

    *out++ = 1;

    *out++ = picParamSet->size() >> 8;
    *out++ = picParamSet->size() & 0xff;
    memcpy(out, picParamSet->data(), picParamSet->size());

#if 0
    LOGI("AVC seq param set");
    hexdump(seqParamSet->data(), seqParamSet->size());
#endif

    sp<MetaData> meta = new MetaData;
    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);

    meta->setData(kKeyAVCC, kTypeAVCC, csd->data(), csd->size());
    meta->setInt32(kKeyWidth, width);
    meta->setInt32(kKeyHeight, height);

    LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
         width, height, AVCProfileToString(profile), level / 10, level % 10);

    return meta;
}
void RtspMediaSource::onDescribeMediaSource(const sp<Buffer>& desc) {
	// TODO: Build a SdpParser class that parses the service desc correctly :)

	mPendingTracks->clear();

	String mediaSourceDesc((char*)desc->data(), desc->size());
	sp< List<String> > lines = mediaSourceDesc.split("\n");
	List<String>::iterator itr = lines->begin();

	String audioMediaDesc;
	String videoMediaDesc;
	String mediaType;
	String profileId;
	String spropParams;
	String codecConfig;
	String transportProtocol;

	while (itr != lines->end()) {
		String line = itr->trim();
		if (line.startsWith("m=")) {
			if (line.startsWith("m=audio")) {
				sp< List<String> > strings = line.split(" ");
				List<String>::iterator itr = strings->begin();
				String port = *(++itr);
				String protocol = *(++itr);
				mediaType = *(++itr);
				if (protocol.trim() == "RTP/AVP") {
					audioMediaDesc = line;
					transportProtocol = "UDP";
				} else {
					audioMediaDesc = NULL;
				}
				videoMediaDesc = NULL;
			} else if (line.startsWith("m=video")) {
				sp< List<String> > strings = line.split(" ");
				List<String>::iterator itr = strings->begin();
				String port = *(++itr);
				String protocol = *(++itr);
				mediaType = *(++itr);
				if (protocol.trim() == "RTP/AVP") {
					videoMediaDesc = line;
					transportProtocol = "UDP";
				} else if (protocol.trim() == "TCP/RTP/AVP") {
					videoMediaDesc = line;
					transportProtocol = "TCP";
				} else {
					videoMediaDesc = NULL;
				}
				audioMediaDesc = NULL;
			} else {
				audioMediaDesc = NULL;
				videoMediaDesc = NULL;
			}
		} else if (line.startsWith("a=")) {
			if (line.startsWith("a=fmtp:")) {
				sp< List<String> > strings = line.split(";");
				List<String>::iterator itr = strings->begin();
				while (itr != strings->end()) {
					if (itr->trim().startsWith("profile-level-id=")) {
						ssize_t pos = itr->indexOf("=");
						profileId = itr->substr(pos + 1);
					} else if (itr->trim().startsWith("sprop-parameter-sets=")) {
						ssize_t pos = itr->indexOf("=");
						spropParams = itr->substr(pos + 1);
					} else if (itr->trim().startsWith("config=")) {
						ssize_t pos = itr->indexOf("=");
						codecConfig = itr->substr(pos + 1);
					}
					++itr;
				}
			} else if (line.startsWith("a=rtpmap")) {
				sp< List<String> > strings = line.split(" ");
				List<String>::iterator itr = strings->begin();
				if (strings->size() < 2) {
					audioMediaDesc = NULL;
					videoMediaDesc = NULL;
				}
				if (!audioMediaDesc.isEmpty()) {
					String rtpmapType = *(++itr);
					if (rtpmapType != "mpeg4-generic/44100/2" && rtpmapType != "L16/44100/2") {
						audioMediaDesc = NULL;
						// TODO: add support for other audio streams
					}
				} else if (!videoMediaDesc.isEmpty()) {
					if (*(++itr) != "H264/90000") {
						videoMediaDesc = NULL;
					}
				}
			} else if (line.startsWith("a=control:")) {
				if (!audioMediaDesc.isEmpty()) {
					mAudioMediaSource.url = line.substr(String::size("a=control:")).trim();
					mAudioMediaSource.type = atoi(mediaType.c_str());
					mAudioMediaSource.transportProtocol = transportProtocol;
					mAudioMediaSource.profileId = profileId;
					mAudioMediaSource.spropParams = spropParams;
					mAudioMediaSource.codecConfig = codecConfig;

					mPendingTracks->push_back(obtainMessage(SETUP_AUDIO_TRACK));
				} else if (!videoMediaDesc.isEmpty()) {
					mVideoMediaSource.url = line.substr(String::size("a=control:")).trim();
					mVideoMediaSource.type = atoi(mediaType.c_str());
					mVideoMediaSource.transportProtocol = transportProtocol;
					mVideoMediaSource.profileId = profileId;
					mVideoMediaSource.spropParams = spropParams;
					mVideoMediaSource.codecConfig = codecConfig;

					mPendingTracks->push_back(obtainMessage(SETUP_VIDEO_TRACK));
				}
			}
		} else if (line.startsWith("c=")) {
			sp< List<String> > strings = line.substr(String::size("c=")).trim().split(" ");
			if (strings->size() >= 3) {
				List<String>::iterator itr = strings->begin();
				if (*itr++ == "IN" && *itr++ == "IP4") {
					mAudioMediaSource.serverIpAddress = *itr;
					mVideoMediaSource.serverIpAddress = *itr;
				}
			}
		}
		++itr;
	}

	if (mPendingTracks->size() > 0) {
		startPendingTracks();
	} else {
		printf("The media source does not offer any audio or video streams.\n");
		mNetHandler->obtainMessage(NetHandler::MEDIA_SOURCE_HAS_NO_STREAMS)->sendToTarget();
	}
}
status_t CaptureStream::capture(sp<CameraBuffer> aBuffer,
                                   Camera3Request* request)
{
    LOG2("@%s, name:%s", __FUNCTION__, getName());
    status_t status = OK;
    int index = 0;

    PERFORMANCE_TRACES_SHOT2SHOT_TAKE_PICTURE_HANDLE();
    // If the input params are null, means CameraHw requires to inject
    // fake buffer.
    if (aBuffer == NULL && request == NULL) {
        LOG2("inject a fake buffer, index:%d", mFakeBufferIndex);
        if (mFakeBufferIndex == 0 ||
            mFakeBufferIndex >= mFakeBufferIndexStart + mMaxNumOfSkipFrames) {
            return UNKNOWN_ERROR;
        }

        if (mV4l2CaptureBuffers[mFakeBufferIndex].m.userptr == 0) {
            LOG2("fake buffer is NULL, allocate heap buffer for it");
            sp<CameraBuffer> buf =
                MemoryUtils::allocateHeapBuffer(mConfig.width, mConfig.height,
                    widthToStride(GFXFmt2V4l2Fmt(mConfig.format, mCameraId), mConfig.width),
                    GFXFmt2V4l2Fmt(mConfig.format, mCameraId), mCameraId);
            if (buf.get()) {
                mFakeBuffers.push_front(buf);
                mV4l2CaptureBuffers.editItemAt(mFakeBufferIndex).m.userptr =
                                                (unsigned long int)buf->data();
            } else {
                LOGE("%s: no memory for fake buffer!", __FUNCTION__);
            }
        }
        mV4l2PostviewBuffers.editItemAt(mFakeBufferIndex).m.userptr =
                (unsigned long int)mPostviewBuffers[mFakeBufferIndex]->data();

        index = mFakeBufferIndex;
        mFakeBufferIndex++;
        if (mFakeBufferIndex == mFakeBufferIndexStart + mMaxNumOfSkipFrames) {
            mFakeBufferIndex = mFakeBufferIndexStart;
        }
        mHwSensor->setExpectedCaptureExpId(-1, this);
    } else if (aBuffer != NULL && request != NULL) {
        if (!aBuffer->isLocked())
            aBuffer->lock();

        index = mRealBufferIndex++;
        mRealBufferIndex = (mRealBufferIndex >= REAL_BUF_NUM) ? 0 : mRealBufferIndex;

        mV4l2CaptureBuffers.editItemAt(index).m.userptr =
            (unsigned long int)aBuffer->data();
        mV4l2PostviewBuffers.editItemAt(index).m.userptr =
            (unsigned long int)mPostviewBuffers[index]->data();
        const camera3_capture_request *req3 = request->getUserRequest();
        if (req3->settings ||
            mIsp->isIspPerframeSettingsEnabled()) {
            mV4l2CaptureBuffers.editItemAt(index).reserved2 =
                ATOMISP_BUFFER_HAS_PER_FRAME_SETTING | (request->getId()+1);
            mV4l2PostviewBuffers.editItemAt(index).reserved2 =
                ATOMISP_BUFFER_HAS_PER_FRAME_SETTING | (request->getId()+1);
        } else {
            mV4l2CaptureBuffers.editItemAt(index).reserved2 = 0;
            mV4l2PostviewBuffers.editItemAt(index).reserved2 = 0;
        }

        {
        Mutex::Autolock _l(mBufBookKeepingLock);
        aBuffer->setRequestId(request->getId());
        DeviceBuffer tmpBuffer = {aBuffer, index, \
            mCaptureWithoutPreview, (request->getNumberInputBufs()>0)};
        mReceivedBuffers.push_back(tmpBuffer); // Need to push like this to simulate FIFO behavior
        }

        mHwSensor->setExpectedCaptureExpId(request->getId(), this);
        // Capture stream need to:
        // 1. Which SOF can be used for shutter notification
        // 2. Which RAW buffer can be used to trigger capture
        //   (if it is not zsl capture)
        if (mMode == OFFLINE && mCaptureWithoutPreview)
            mHwSensor->findFrameForCapture(request->getId(), mRawLockEnabled);
        LOG2("###==%s: request_id=%d, output_frame = %p", __FUNCTION__,
                                        aBuffer->requestId(), aBuffer->data());
        mLastReqId = request->getId();
    } else {
        LOGW("%s: aBuffer or request is NULL", __FUNCTION__);
    }

    status = mCaptureDevice->putFrame(&mV4l2CaptureBuffers[index]);
    status |= mPostviewDevice->putFrame(&mV4l2PostviewBuffers[index]);
    if (status != NO_ERROR) {
        LOGE("Failed to queue a picture buffer!");
        return UNKNOWN_ERROR;
    }

    if (!mCaptureDevice->isStarted()) {
        if (mMode == OFFLINE) {
            status  = mIsp->requestContCapture(1, 1, 0);
        }

        mCaptureDevice->start(0);
        mPostviewDevice->start(0);
        status = mIsp->start();
        PERFORMANCE_TRACES_BREAKDOWN_STEP("startDevice");
    }

    if (mFlashCallback && mFlashCallback->isPreFlashUsed()) {
        mIsp->setFlash(1);
    }

    Message msg;
    msg.id = MESSAGE_ID_POLL;
    msg.request = request;
    mMessageQueue.send(&msg);

    return status;
}
sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
    CHECK(!mMuxConfigPresent);  // XXX to be implemented

    sp<ABuffer> out = new ABuffer(buffer->size());
    out->setRange(0, 0);

    size_t offset = 0;
    uint8_t *ptr = buffer->data();

    for (size_t i = 0; i <= mNumSubFrames; ++i) {
        // parse PayloadLengthInfo

        unsigned payloadLength = 0;

        switch (mFrameLengthType) {
            case 0:
            {
                unsigned muxSlotLengthBytes = 0;
                unsigned tmp;
                do {
#ifdef MTK_AOSP_ENHANCEMENT
		// mtk80902: ALPS00389414
		    if (offset >= buffer->size()) {
			ALOGI("avoid slotlength check error, offset: %d, buffer size: %d", offset, buffer->size());
			break;
		    }
#else
                    CHECK_LT(offset, buffer->size());
#endif
                    tmp = ptr[offset++];
                    muxSlotLengthBytes += tmp;
                } while (tmp == 0xff);

                payloadLength = muxSlotLengthBytes;
                break;
            }

            case 2:
            {
                // reserved

                TRESPASS();
                break;
            }

            default:
            {
                CHECK_GE(mFixedFrameLength, 0);

                payloadLength = mFixedFrameLength;
                break;
            }
        }
        
        CHECK_LT(offset, buffer->size());
        CHECK_LE(payloadLength, buffer->size() - offset);

        memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
        out->setRange(0, out->size() + payloadLength);

        offset += payloadLength;

        if (mOtherDataPresent) {
            // We want to stay byte-aligned.

#ifdef MTK_AOSP_ENHANCEMENT 
            int bits = mOtherDataLenBits % 8;
            CHECK(bits == 0);
#else
            CHECK((mOtherDataLenBits % 8) == 0);
#endif // #ifdef MTK_AOSP_ENHANCEMENT
            CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
            offset += mOtherDataLenBits / 8;
        }
    }

    if (offset < buffer->size()) {
        ALOGI("ignoring %d bytes of trailing data", buffer->size() - offset);
    }
    CHECK_LE(offset, buffer->size());

    return out;
}
Example #26
0
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
        const sp<ABuffer> &seqParamSet,
        int32_t *width, int32_t *height,
        int32_t *sarWidth, int32_t *sarHeight) {
    ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1);

    unsigned profile_idc = br.getBits(8);
    br.skipBits(16);
    parseUE(&br);  // seq_parameter_set_id

    unsigned chroma_format_idc = 1;  // 4:2:0 chroma format

    if (profile_idc == 100 || profile_idc == 110
            || profile_idc == 122 || profile_idc == 244
            || profile_idc == 44 || profile_idc == 83 || profile_idc == 86) {
        chroma_format_idc = parseUE(&br);
        if (chroma_format_idc == 3) {
            br.skipBits(1);  // residual_colour_transform_flag
        }
        parseUE(&br);  // bit_depth_luma_minus8
        parseUE(&br);  // bit_depth_chroma_minus8
        br.skipBits(1);  // qpprime_y_zero_transform_bypass_flag

        if (br.getBits(1)) {  // seq_scaling_matrix_present_flag
            for (size_t i = 0; i < 8; ++i) {
                if (br.getBits(1)) {  // seq_scaling_list_present_flag[i]

                    // WARNING: the code below has not ever been exercised...
                    // need a real-world example.

                    if (i < 6) {
                        // ScalingList4x4[i],16,...
                        skipScalingList(&br, 16);
                    } else {
                        // ScalingList8x8[i-6],64,...
                        skipScalingList(&br, 64);
                    }
                }
            }
        }
    }

    parseUE(&br);  // log2_max_frame_num_minus4
    unsigned pic_order_cnt_type = parseUE(&br);

    if (pic_order_cnt_type == 0) {
        parseUE(&br);  // log2_max_pic_order_cnt_lsb_minus4
    } else if (pic_order_cnt_type == 1) {
        // offset_for_non_ref_pic, offset_for_top_to_bottom_field and
        // offset_for_ref_frame are technically se(v), but since we are
        // just skipping over them the midpoint does not matter.

        br.getBits(1);  // delta_pic_order_always_zero_flag
        parseUE(&br);  // offset_for_non_ref_pic
        parseUE(&br);  // offset_for_top_to_bottom_field

        unsigned num_ref_frames_in_pic_order_cnt_cycle = parseUE(&br);
        for (unsigned i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
            parseUE(&br);  // offset_for_ref_frame
        }
    }

    parseUE(&br);  // num_ref_frames
    br.getBits(1);  // gaps_in_frame_num_value_allowed_flag

    unsigned pic_width_in_mbs_minus1 = parseUE(&br);
    unsigned pic_height_in_map_units_minus1 = parseUE(&br);
    unsigned frame_mbs_only_flag = br.getBits(1);

    *width = pic_width_in_mbs_minus1 * 16 + 16;

    *height = (2 - frame_mbs_only_flag)
        * (pic_height_in_map_units_minus1 * 16 + 16);

    if (!frame_mbs_only_flag) {
        br.getBits(1);  // mb_adaptive_frame_field_flag
    }

    br.getBits(1);  // direct_8x8_inference_flag

    if (br.getBits(1)) {  // frame_cropping_flag
        unsigned frame_crop_left_offset = parseUE(&br);
        unsigned frame_crop_right_offset = parseUE(&br);
        unsigned frame_crop_top_offset = parseUE(&br);
        unsigned frame_crop_bottom_offset = parseUE(&br);

        unsigned cropUnitX, cropUnitY;
        if (chroma_format_idc == 0  /* monochrome */) {
            cropUnitX = 1;
            cropUnitY = 2 - frame_mbs_only_flag;
        } else {
            unsigned subWidthC = (chroma_format_idc == 3) ? 1 : 2;
            unsigned subHeightC = (chroma_format_idc == 1) ? 2 : 1;

            cropUnitX = subWidthC;
            cropUnitY = subHeightC * (2 - frame_mbs_only_flag);
        }

        ALOGV("frame_crop = (%u, %u, %u, %u), cropUnitX = %u, cropUnitY = %u",
             frame_crop_left_offset, frame_crop_right_offset,
             frame_crop_top_offset, frame_crop_bottom_offset,
             cropUnitX, cropUnitY);


        // *width -= (frame_crop_left_offset + frame_crop_right_offset) * cropUnitX;
        if(__builtin_add_overflow(frame_crop_left_offset, frame_crop_right_offset, &frame_crop_left_offset) ||
            __builtin_mul_overflow(frame_crop_left_offset, cropUnitX, &frame_crop_left_offset) ||
            __builtin_sub_overflow(*width, frame_crop_left_offset, width) ||
            *width < 0) {
            *width = 0;
        }

        //*height -= (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
        if(__builtin_add_overflow(frame_crop_top_offset, frame_crop_bottom_offset, &frame_crop_top_offset) ||
            __builtin_mul_overflow(frame_crop_top_offset, cropUnitY, &frame_crop_top_offset) ||
            __builtin_sub_overflow(*height, frame_crop_top_offset, height) ||
            *height < 0) {
            *height = 0;
        }
    }

    if (sarWidth != NULL) {
        *sarWidth = 0;
    }

    if (sarHeight != NULL) {
        *sarHeight = 0;
    }

    if (br.getBits(1)) {  // vui_parameters_present_flag
        unsigned sar_width = 0, sar_height = 0;

        if (br.getBits(1)) {  // aspect_ratio_info_present_flag
            unsigned aspect_ratio_idc = br.getBits(8);

            if (aspect_ratio_idc == 255 /* extendedSAR */) {
                sar_width = br.getBits(16);
                sar_height = br.getBits(16);
            } else {
                static const struct { unsigned width, height; } kFixedSARs[] = {
                        {   0,  0 }, // Invalid
                        {   1,  1 },
                        {  12, 11 },
                        {  10, 11 },
                        {  16, 11 },
                        {  40, 33 },
                        {  24, 11 },
                        {  20, 11 },
                        {  32, 11 },
                        {  80, 33 },
                        {  18, 11 },
                        {  15, 11 },
                        {  64, 33 },
                        { 160, 99 },
                        {   4,  3 },
                        {   3,  2 },
                        {   2,  1 },
                };

                if (aspect_ratio_idc > 0 && aspect_ratio_idc < NELEM(kFixedSARs)) {
                    sar_width = kFixedSARs[aspect_ratio_idc].width;
                    sar_height = kFixedSARs[aspect_ratio_idc].height;
                }
            }
        }

        ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height);

        if (sarWidth != NULL) {
            *sarWidth = sar_width;
        }

        if (sarHeight != NULL) {
            *sarHeight = sar_height;
        }
    }
}
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
    const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height) {
    ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1);

    unsigned profile_idc = br.getBits(8);
    br.skipBits(16);
    parseUE(&br);  // seq_parameter_set_id

    unsigned chroma_format_idc = 1;  // 4:2:0 chroma format

    if (profile_idc == 100 || profile_idc == 110
            || profile_idc == 122 || profile_idc == 244
            || profile_idc == 44 || profile_idc == 83 || profile_idc == 86) {
        chroma_format_idc = parseUE(&br);
        if (chroma_format_idc == 3) {
            br.skipBits(1);  // residual_colour_transform_flag
        }
        parseUE(&br);  // bit_depth_luma_minus8
        parseUE(&br);  // bit_depth_chroma_minus8
        br.skipBits(1);  // qpprime_y_zero_transform_bypass_flag
        CHECK_EQ(br.getBits(1), 0u);  // seq_scaling_matrix_present_flag
    }

    parseUE(&br);  // log2_max_frame_num_minus4
    unsigned pic_order_cnt_type = parseUE(&br);

    if (pic_order_cnt_type == 0) {
        parseUE(&br);  // log2_max_pic_order_cnt_lsb_minus4
    } else if (pic_order_cnt_type == 1) {
        // offset_for_non_ref_pic, offset_for_top_to_bottom_field and
        // offset_for_ref_frame are technically se(v), but since we are
        // just skipping over them the midpoint does not matter.

        br.getBits(1);  // delta_pic_order_always_zero_flag
        parseUE(&br);  // offset_for_non_ref_pic
        parseUE(&br);  // offset_for_top_to_bottom_field

        unsigned num_ref_frames_in_pic_order_cnt_cycle = parseUE(&br);
        for (unsigned i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
            parseUE(&br);  // offset_for_ref_frame
        }
    }

    parseUE(&br);  // num_ref_frames
    br.getBits(1);  // gaps_in_frame_num_value_allowed_flag

    unsigned pic_width_in_mbs_minus1 = parseUE(&br);
    unsigned pic_height_in_map_units_minus1 = parseUE(&br);
    unsigned frame_mbs_only_flag = br.getBits(1);

    *width = pic_width_in_mbs_minus1 * 16 + 16;

    *height = (2 - frame_mbs_only_flag)
              * (pic_height_in_map_units_minus1 * 16 + 16);

    if (!frame_mbs_only_flag) {
        br.getBits(1);  // mb_adaptive_frame_field_flag
    }

    br.getBits(1);  // direct_8x8_inference_flag

    if (br.getBits(1)) {  // frame_cropping_flag
        unsigned frame_crop_left_offset = parseUE(&br);
        unsigned frame_crop_right_offset = parseUE(&br);
        unsigned frame_crop_top_offset = parseUE(&br);
        unsigned frame_crop_bottom_offset = parseUE(&br);

        unsigned cropUnitX, cropUnitY;
        if (chroma_format_idc == 0  /* monochrome */) {
            cropUnitX = 1;
            cropUnitY = 2 - frame_mbs_only_flag;
        } else {
            unsigned subWidthC = (chroma_format_idc == 3) ? 1 : 2;
            unsigned subHeightC = (chroma_format_idc == 1) ? 2 : 1;

            cropUnitX = subWidthC;
            cropUnitY = subHeightC * (2 - frame_mbs_only_flag);
        }

        ALOGV("frame_crop = (%u, %u, %u, %u), cropUnitX = %u, cropUnitY = %u",
              frame_crop_left_offset, frame_crop_right_offset,
              frame_crop_top_offset, frame_crop_bottom_offset,
              cropUnitX, cropUnitY);

        *width -=
            (frame_crop_left_offset + frame_crop_right_offset) * cropUnitX;
        *height -=
            (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
    }
}
Example #28
0
sp<ABuffer> MakeAVCCodecSpecificData(
        const sp<ABuffer> &accessUnit, int32_t *width, int32_t *height,
        int32_t *sarWidth, int32_t *sarHeight) {
    const uint8_t *data = accessUnit->data();
    size_t size = accessUnit->size();

    sp<ABuffer> seqParamSet = FindNAL(data, size, 7);
    if (seqParamSet == NULL) {
        return NULL;
    }

    FindAVCDimensions(
            seqParamSet, width, height, sarWidth, sarHeight);

    sp<ABuffer> picParamSet = FindNAL(data, size, 8);
    CHECK(picParamSet != NULL);

    size_t csdSize =
        1 + 3 + 1 + 1
        + 2 * 1 + seqParamSet->size()
        + 1 + 2 * 1 + picParamSet->size();

    sp<ABuffer> csd = new ABuffer(csdSize);
    uint8_t *out = csd->data();

    *out++ = 0x01;  // configurationVersion
    memcpy(out, seqParamSet->data() + 1, 3);  // profile/level...

    uint8_t profile = out[0];
    uint8_t level = out[2];

    out += 3;
    *out++ = (0x3f << 2) | 1;  // lengthSize == 2 bytes
    *out++ = 0xe0 | 1;

    *out++ = seqParamSet->size() >> 8;
    *out++ = seqParamSet->size() & 0xff;
    memcpy(out, seqParamSet->data(), seqParamSet->size());
    out += seqParamSet->size();

    *out++ = 1;

    *out++ = picParamSet->size() >> 8;
    *out++ = picParamSet->size() & 0xff;
    memcpy(out, picParamSet->data(), picParamSet->size());

#if 0
    ALOGI("AVC seq param set");
    hexdump(seqParamSet->data(), seqParamSet->size());
#endif


    if (sarWidth != nullptr && sarHeight != nullptr) {
        if ((*sarWidth > 0 && *sarHeight > 0) && (*sarWidth != 1 || *sarHeight != 1)) {
            ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d) "
                    "SAR %d : %d",
                    *width,
                    *height,
                    AVCProfileToString(profile),
                    level / 10,
                    level % 10,
                    *sarWidth,
                    *sarHeight);
        } else {
            // We treat *:0 and 0:* (unspecified) as 1:1.
            *sarWidth = 0;
            *sarHeight = 0;
            ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
                    *width,
                    *height,
                    AVCProfileToString(profile),
                    level / 10,
                    level % 10);
        }
    }

    return csd;
}
Example #29
0
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
        const sp<ABuffer> &seqParamSet,
        int32_t *width, int32_t *height,
        int32_t *sarWidth, int32_t *sarHeight, int32_t *isInterlaced) {
    ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1);

    unsigned profile_idc = br.getBits(8);
    br.skipBits(16);
    parseUE(&br);  // seq_parameter_set_id

    unsigned chroma_format_idc = 1;  // 4:2:0 chroma format

    if (profile_idc == 100 || profile_idc == 110
            || profile_idc == 122 || profile_idc == 244
            || profile_idc == 44 || profile_idc == 83 || profile_idc == 86) {
        chroma_format_idc = parseUE(&br);
        if (chroma_format_idc == 3) {
            br.skipBits(1);  // residual_colour_transform_flag
        }
        parseUE(&br);  // bit_depth_luma_minus8
        parseUE(&br);  // bit_depth_chroma_minus8
        br.skipBits(1);  // qpprime_y_zero_transform_bypass_flag
        bool seq_scaling_matrix_present = (br.getBits(1) != 0u);
        if (isInterlaced != NULL && seq_scaling_matrix_present) {
            return;
        }
        CHECK_EQ(seq_scaling_matrix_present, false);  // seq_scaling_matrix_present_flag
    }

    parseUE(&br);  // log2_max_frame_num_minus4
    unsigned pic_order_cnt_type = parseUE(&br);

    if (pic_order_cnt_type == 0) {
        parseUE(&br);  // log2_max_pic_order_cnt_lsb_minus4
    } else if (pic_order_cnt_type == 1) {
        // offset_for_non_ref_pic, offset_for_top_to_bottom_field and
        // offset_for_ref_frame are technically se(v), but since we are
        // just skipping over them the midpoint does not matter.

        br.getBits(1);  // delta_pic_order_always_zero_flag
        parseUE(&br);  // offset_for_non_ref_pic
        parseUE(&br);  // offset_for_top_to_bottom_field

        unsigned num_ref_frames_in_pic_order_cnt_cycle = parseUE(&br);
        for (unsigned i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
            parseUE(&br);  // offset_for_ref_frame
        }
    }

    parseUE(&br);  // num_ref_frames
    br.getBits(1);  // gaps_in_frame_num_value_allowed_flag

    unsigned pic_width_in_mbs_minus1 = parseUE(&br);
    unsigned pic_height_in_map_units_minus1 = parseUE(&br);
    unsigned frame_mbs_only_flag = br.getBits(1);

    *width = pic_width_in_mbs_minus1 * 16 + 16;

    *height = (2 - frame_mbs_only_flag)
        * (pic_height_in_map_units_minus1 * 16 + 16);

    if (!frame_mbs_only_flag) {
        br.getBits(1);  // mb_adaptive_frame_field_flag
    }

    br.getBits(1);  // direct_8x8_inference_flag

    if (br.getBits(1)) {  // frame_cropping_flag
        unsigned frame_crop_left_offset = parseUE(&br);
        unsigned frame_crop_right_offset = parseUE(&br);
        unsigned frame_crop_top_offset = parseUE(&br);
        unsigned frame_crop_bottom_offset = parseUE(&br);

        unsigned cropUnitX, cropUnitY;
        if (chroma_format_idc == 0  /* monochrome */) {
            cropUnitX = 1;
            cropUnitY = 2 - frame_mbs_only_flag;
        } else {
            unsigned subWidthC = (chroma_format_idc == 3) ? 1 : 2;
            unsigned subHeightC = (chroma_format_idc == 1) ? 2 : 1;

            cropUnitX = subWidthC;
            cropUnitY = subHeightC * (2 - frame_mbs_only_flag);
        }

        ALOGV("frame_crop = (%u, %u, %u, %u), cropUnitX = %u, cropUnitY = %u",
             frame_crop_left_offset, frame_crop_right_offset,
             frame_crop_top_offset, frame_crop_bottom_offset,
             cropUnitX, cropUnitY);

        *width -=
            (frame_crop_left_offset + frame_crop_right_offset) * cropUnitX;
        *height -=
            (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
    }

    if (isInterlaced != NULL) {
        *isInterlaced = !frame_mbs_only_flag;
    }

    if (sarWidth != NULL) {
        *sarWidth = 0;
    }

    if (sarHeight != NULL) {
        *sarHeight = 0;
    }

    if (br.getBits(1)) {  // vui_parameters_present_flag
        unsigned sar_width = 0, sar_height = 0;

        if (br.getBits(1)) {  // aspect_ratio_info_present_flag
            unsigned aspect_ratio_idc = br.getBits(8);

            if (aspect_ratio_idc == 255 /* extendedSAR */) {
                sar_width = br.getBits(16);
                sar_height = br.getBits(16);
            } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) {
                static const int32_t kFixedSARWidth[] = {
                    1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160
                };

                static const int32_t kFixedSARHeight[] = {
                    1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99
                };

                sar_width = kFixedSARWidth[aspect_ratio_idc - 1];
                sar_height = kFixedSARHeight[aspect_ratio_idc - 1];
            }
        }

        ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height);

        if (sarWidth != NULL) {
            *sarWidth = sar_width;
        }

        if (sarHeight != NULL) {
            *sarHeight = sar_height;
        }
    }
}