Exemplo n.º 1
0
  void Decode_w (GMPVideoEncodedFrame* inputFrame,
                 bool missingFrames,
                 DECODING_STATE& dState,
                 int64_t renderTimeMs = -1) {
    GMPLOG (GL_DEBUG, "Frame decode on worker thread length = "
            << inputFrame->Size());

    SBufferInfo decoded;
    bool valid = false;
    memset (&decoded, 0, sizeof (decoded));
    unsigned char* data[3] = {nullptr, nullptr, nullptr};

    dState = decoder_->DecodeFrameNoDelay (inputFrame->Buffer(),
                                     inputFrame->Size(),
                                     data,
                                     &decoded);

    if (dState) {
      GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
    } else {
      valid = true;
    }

    TrySyncRunOnMainThread (WrapTask (
                                 this,
                                 &OpenH264VideoDecoder::Decode_m,
                                 inputFrame,
                                 &decoded,
                                 data,
                                 renderTimeMs,
                                 valid));
  }
Exemplo n.º 2
0
  virtual void InitEncode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoEncoderCallback* callback,
                           int32_t numberOfCores,
                           uint32_t maxPayloadSize) {
    callback_ = callback;

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    int rv = WelsCreateSVCEncoder (&encoder_);
    if (rv) {
      Error (GMPGenericErr);
      return;
    }

    SEncParamBase param;
    memset (&param, 0, sizeof (param));

    GMPLOG (GL_INFO, "Initializing encoder at "
            << codecSettings.mWidth
            << "x"
            << codecSettings.mHeight
            << "@"
            << static_cast<int> (codecSettings.mMaxFramerate)
            << "max payload size="
            << maxPayloadSize);

    // Translate parameters.
    param.iUsageType = CAMERA_VIDEO_REAL_TIME;
    param.iPicWidth = codecSettings.mWidth;
    param.iPicHeight = codecSettings.mHeight;
    param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
    GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
            << codecSettings.mStartBitrate
            << "; Min: "
            << codecSettings.mMinBitrate
            << "; Max: "
            << codecSettings.mMaxBitrate);
    param.iRCMode = RC_BITRATE_MODE;

    // TODO([email protected]). Scary conversion from unsigned char to float below.
    param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);

    rv = encoder_->Initialize (&param);
    if (rv) {
      GMPLOG (GL_ERROR, "Couldn't initialize encoder");
      Error (GMPGenericErr);
      return;
    }

    max_payload_size_ = maxPayloadSize;

    GMPLOG (GL_INFO, "Initialized encoder");
  }
Exemplo n.º 3
0
  // Return the decoded data back to the parent.
  void Decode_m (GMPVideoEncodedFrame* inputFrame,
                 SBufferInfo* decoded,
                 unsigned char* data[3],
                 int64_t renderTimeMs,
                 bool valid) {
    // Attach a self-destructor so that this dies on return.
    SelfDestruct<GMPVideoEncodedFrame> ifd (inputFrame);

    // If we don't actually have data, just abort.
    if (!valid) {
      return;
    }

    if (decoded->iBufferStatus != 1) {
      return;
    }

    int width = decoded->UsrData.sSystemBuffer.iWidth;
    int height = decoded->UsrData.sSystemBuffer.iHeight;
    int ystride = decoded->UsrData.sSystemBuffer.iStride[0];
    int uvstride = decoded->UsrData.sSystemBuffer.iStride[1];

    GMPLOG (GL_DEBUG, "Video frame ready for display "
            << width
            << "x"
            << height
            << " timestamp="
            << inputFrame->TimeStamp());

    GMPVideoFrame* ftmp = nullptr;

    // Translate the image.
    GMPErr err = host_->CreateFrame (kGMPI420VideoFrame, &ftmp);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't allocate empty I420 frame");
      return;
    }


    GMPVideoi420Frame* frame = static_cast<GMPVideoi420Frame*> (ftmp);
    err = frame->CreateFrame (
            ystride * height, static_cast<uint8_t*> (data[0]),
            uvstride * height / 2, static_cast<uint8_t*> (data[1]),
            uvstride * height / 2, static_cast<uint8_t*> (data[2]),
            width, height,
            ystride, uvstride, uvstride);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't make decoded frame");
      return;
    }

    GMPLOG (GL_DEBUG, "Allocated size = "
            << frame->AllocatedSize (kGMPYPlane));
    frame->SetTimestamp (inputFrame->TimeStamp());
    frame->SetDuration (inputFrame->Duration());
    callback_->Decoded (frame);

    stats_.FrameOut();
  }
Exemplo n.º 4
0
  virtual void InitDecode (const GMPVideoCodec& codecSettings,
                             const uint8_t* aCodecSpecific,
                             uint32_t aCodecSpecificSize,
                             GMPVideoDecoderCallback* callback,
                             int32_t coreCount) {
    GMPLOG (GL_INFO, "InitDecode");

    callback_ = callback;
  }
Exemplo n.º 5
0
 virtual void Decode (GMPVideoEncodedFrame* inputFrame,
                        bool missingFrames,
                        const uint8_t* aCodecSpecificInfo,
                        uint32_t aCodecSpecificInfoLength,
                        int64_t renderTimeMs = -1) {
   GMPLOG (GL_DEBUG, __FUNCTION__
           << "Decoding frame size=" << inputFrame->Size()
           << " timestamp=" << inputFrame->TimeStamp());
   g_platform_api->runonmainthread(new FakeDecoderTask(this, inputFrame, renderTimeMs));
 }
Exemplo n.º 6
0
  virtual void InitEncode (const GMPVideoCodec& codecSettings,
                             const uint8_t* aCodecSpecific,
                             uint32_t aCodecSpecificSize,
                             GMPVideoEncoderCallback* callback,
                             int32_t numberOfCores,
                             uint32_t maxPayloadSize) {
    callback_ = callback;

    GMPLOG (GL_INFO, "Initialized encoder");
  }
Exemplo n.º 7
0
  virtual void InitDecode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoDecoderCallback* callback,
                           int32_t coreCount) {
    callback_ = callback;

    GMPLOG (GL_INFO, "InitDecode");

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    if (WelsCreateDecoder (&decoder_)) {
      GMPLOG (GL_ERROR, "Couldn't create decoder");
      Error (GMPGenericErr);
      return;
    }

    if (!decoder_) {
      GMPLOG (GL_ERROR, "Couldn't create decoder");
      Error (GMPGenericErr);
      return;
    }

    SDecodingParam param;
    memset (&param, 0, sizeof (param));
    param.eOutputColorFormat = videoFormatI420;
    param.uiTargetDqLayer = UCHAR_MAX;  // Default value
    param.eEcActiveIdc = ERROR_CON_SLICE_COPY; // Error concealment on.
    param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;

    if (decoder_->Initialize (&param)) {
      GMPLOG (GL_ERROR, "Couldn't initialize decoder");
      Error (GMPGenericErr);
      return;
    }
  }
Exemplo n.º 8
0
  virtual void Decode (GMPVideoEncodedFrame* inputFrame,
                       bool missingFrames,
                       const uint8_t* aCodecSpecificInfo,
                       uint32_t aCodecSpecificInfoLength,
                       int64_t renderTimeMs = -1) {
    GMPLOG (GL_DEBUG, __FUNCTION__
            << "Decoding frame size=" << inputFrame->Size()
            << " timestamp=" << inputFrame->TimeStamp());
    stats_.FrameIn();
    //const GMPCodecSpecificInfo *codecSpecificInfo = (GMPCodecSpecificInfo) aCodecSpecificInfo;

    // Convert to H.264 start codes
    switch (inputFrame->BufferType()) {
    case GMP_BufferSingle:
    case GMP_BufferLength8:
    case GMP_BufferLength16:
    case GMP_BufferLength24:
      // We should look to support these, especially GMP_BufferSingle
      assert (false);
      break;

    case GMP_BufferLength32: {
      uint8_t* start_code = inputFrame->Buffer();
      // start code should be at least four bytes from the end or we risk
      // reading/writing outside the buffer.
      while (start_code < inputFrame->Buffer() + inputFrame->Size() - 4) {
        static const uint8_t code[] = { 0x00, 0x00, 0x00, 0x01 };
        uint8_t* lenp = start_code;
        start_code += * (reinterpret_cast<int32_t*> (lenp));
        memcpy (lenp, code, 4);
      }
    }
    break;

    default:
      assert (false);
      break;
    }
    DECODING_STATE dState = dsErrorFree;
    worker_thread_->Post (WrapTaskRefCounted (
                            this, &OpenH264VideoDecoder::Decode_w,
                            inputFrame,
                            missingFrames,
                            dState,
                            renderTimeMs));
    if (dState) {
      Error (GMPGenericErr);
    }
  }
Exemplo n.º 9
0
  virtual void Encode (GMPVideoi420Frame* inputImage,
                         const uint8_t* aCodecSpecificInfo,
                         uint32_t aCodecSpecificInfoLength,
                         const GMPVideoFrameType* aFrameTypes,
                         uint32_t aFrameTypesLength) {
    GMPLOG (GL_DEBUG,
            __FUNCTION__
            << " size="
            << inputImage->Width() << "x" << inputImage->Height());

    assert (aFrameTypesLength != 0);

    g_platform_api->runonmainthread(new FakeEncoderTask(this,
                                                        inputImage,
                                                        aFrameTypes[0]));
  }
Exemplo n.º 10
0
  void FrameIn() {
    ++frames_in_;
    time_t now = time (0);

    if (now == last_time_) {
      return;
    }

    if (! (frames_in_ % 10)) {
      GMPLOG (GL_INFO, type_ << ": " << now << " Frame count "
              << frames_in_
              << "(" << (frames_in_ / (now - start_time_)) << "/"
              << (30 / (now - last_time_)) << ")"
              << " -- " << frames_out_);
      last_time_ = now;
    }
  }
Exemplo n.º 11
0
  virtual void Encode (GMPVideoi420Frame* inputImage,
                       const uint8_t* aCodecSpecificInfo,
                       uint32_t aCodecSpecificInfoLength,
                       const GMPVideoFrameType* aFrameTypes,
                       uint32_t aFrameTypesLength) {
    GMPLOG (GL_DEBUG,
            __FUNCTION__
            << " size="
            << inputImage->Width() << "x" << inputImage->Height());

    stats_.FrameIn();

    assert (aFrameTypesLength != 0);

    worker_thread_->Post (WrapTaskRefCounted (
                            this, &OpenH264VideoEncoder::Encode_w,
                            inputImage,
                            (aFrameTypes)[0]));
  }
Exemplo n.º 12
0
  virtual void InitDecode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoDecoderCallback* callback,
                           int32_t coreCount) {
    callback_ = callback;

    GMPLOG (GL_INFO, "InitDecode");

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    if (WelsCreateDecoder (&decoder_)) {
      GMPLOG (GL_ERROR, "Couldn't create decoder");
      Error (GMPGenericErr);
      return;
    }

    if (!decoder_) {
      GMPLOG (GL_ERROR, "Couldn't create decoder");
      Error (GMPGenericErr);
      return;
    }

    SDecodingParam param;
    memset (&param, 0, sizeof (param));
    param.uiTargetDqLayer = UCHAR_MAX;  // Default value
    param.eEcActiveIdc = ERROR_CON_SLICE_MV_COPY_CROSS_IDR_FREEZE_RES_CHANGE; // Error concealment on.
    param.sVideoProperty.size = sizeof(param.sVideoProperty);
    param.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;

    if (decoder_->Initialize (&param)) {
      GMPLOG (GL_ERROR, "Couldn't initialize decoder");
      Error (GMPGenericErr);
      return;
    }

    if (aCodecSpecific && aCodecSpecificSize >= sizeof(GMPVideoCodecH264)) {
      std::vector<uint8_t> annexb;

      // Convert the AVCC data, starting at the byte containing
      // numOfSequenceParameterSets, to Annex B format.
      const uint8_t* avcc = aCodecSpecific + offsetof(GMPVideoCodecH264, mAVCC.mNumSPS);

      static const int kSPSMask = (1 << 5) - 1;
      uint8_t spsCount = *avcc++ & kSPSMask;
      for (int i = 0; i < spsCount; ++i) {
        size_t size = readU16BE(avcc);
        avcc += 2;
        copyWithStartCode(annexb, avcc, size);
        avcc += size;
      }

      uint8_t ppsCount = *avcc++;
      for (int i = 0; i < ppsCount; ++i) {
        size_t size = readU16BE(avcc);
        avcc += 2;
        copyWithStartCode(annexb, avcc, size);
        avcc += size;
      }

      SBufferInfo decoded;
      memset (&decoded, 0, sizeof (decoded));
      unsigned char* data[3] = {nullptr, nullptr, nullptr};
      DECODING_STATE dState = decoder_->DecodeFrame2 (&*annexb.begin(),
                                                      annexb.size(),
                                                      data,
                                                      &decoded);
      if (dState) {
        GMPLOG (GL_ERROR, "Decoding error dState=" << dState);
      }
      GMPLOG (GL_ERROR, "InitDecode iBufferStatus=" << decoded.iBufferStatus);
    }
  }
Exemplo n.º 13
0
  void Encode_m (GMPVideoi420Frame* frame, SFrameBSInfo* encoded,
                 GMPVideoFrameType frame_type) {
    // Attach a self-destructor so that this dies on return.
    SelfDestruct<GMPVideoi420Frame> ifd (frame);
    
    if (!host_) {
      return;
    }
    
    // Now return the encoded data back to the parent.
    GMPVideoFrame* ftmp;
    GMPErr err = host_->CreateFrame (kGMPEncodedVideoFrame, &ftmp);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Error creating encoded frame");
      return;
    }

    GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);
    // Buffer up the data.
    uint32_t length = 0;
    std::vector<uint32_t> lengths;

    for (int i = 0; i < encoded->iLayerNum; ++i) {
      lengths.push_back (0);
      uint8_t* tmp = encoded->sLayerInfo[i].pBsBuf;
      for (int j = 0; j < encoded->sLayerInfo[i].iNalCount; ++j) {
        lengths[i] += encoded->sLayerInfo[i].pNalLengthInByte[j];
        // Convert from 4-byte start codes to GMP_BufferLength32 (NAL lengths)
        assert (* (reinterpret_cast<uint32_t*> (tmp)) == 0x01000000);
        // BufferType32 doesn't include the length of the length itself!
        * (reinterpret_cast<uint32_t*> (tmp)) = encoded->sLayerInfo[i].pNalLengthInByte[j] - sizeof (uint32_t);
        length += encoded->sLayerInfo[i].pNalLengthInByte[j];
        tmp += encoded->sLayerInfo[i].pNalLengthInByte[j];
      }
    }

    err = f->CreateEmptyFrame (length);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Error allocating frame data");
      f->Destroy();
      return;
    }

    // Copy the data.
    // Here we concatenate into one big buffer
    uint8_t* tmp = f->Buffer();
    for (int i = 0; i < encoded->iLayerNum; ++i) {
      memcpy (tmp, encoded->sLayerInfo[i].pBsBuf, lengths[i]);
      tmp += lengths[i];
    }

    f->SetEncodedWidth (frame->Width());
    f->SetEncodedHeight (frame->Height());
    f->SetTimeStamp (frame->Timestamp());
    f->SetFrameType (frame_type);
    f->SetCompleteFrame (true);
    f->SetBufferType (GMP_BufferLength32);

    GMPLOG (GL_DEBUG, "Encoding complete. type= "
            << f->FrameType()
            << " length="
            << f->Size()
            << " timestamp="
            << f->TimeStamp());

    // Return the encoded frame.
    GMPCodecSpecificInfo info;
    memset (&info, 0, sizeof (info)); // shouldn't be needed, we init everything
    info.mCodecType = kGMPVideoCodecH264;
    info.mBufferType = GMP_BufferLength32;
    info.mCodecSpecific.mH264.mSimulcastIdx = 0;

    if (callback_) {
      callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof (info));
    }

    stats_.FrameOut();
  }
Exemplo n.º 14
0
  void Encode_w (GMPVideoi420Frame* inputImage,
                 GMPVideoFrameType frame_type) {
    SFrameBSInfo encoded;

    if (frame_type  == kGMPKeyFrame) {
      encoder_->ForceIntraFrame (true);
      if (!inputImage)
        return;
    }
    if (!inputImage) {
      GMPLOG (GL_ERROR, "no input image");
      return;
    }
    SSourcePicture src;

    src.iColorFormat = videoFormatI420;
    src.iStride[0] = inputImage->Stride (kGMPYPlane);
    src.pData[0] = reinterpret_cast<unsigned char*> (
                     const_cast<uint8_t*> (inputImage->Buffer (kGMPYPlane)));
    src.iStride[1] = inputImage->Stride (kGMPUPlane);
    src.pData[1] = reinterpret_cast<unsigned char*> (
                     const_cast<uint8_t*> (inputImage->Buffer (kGMPUPlane)));
    src.iStride[2] = inputImage->Stride (kGMPVPlane);
    src.pData[2] = reinterpret_cast<unsigned char*> (
                     const_cast<uint8_t*> (inputImage->Buffer (kGMPVPlane)));
    src.iStride[3] = 0;
    src.pData[3] = nullptr;
    src.iPicWidth = inputImage->Width();
    src.iPicHeight = inputImage->Height();
    src.uiTimeStamp = inputImage->Timestamp() / 1000; //encoder needs millisecond
    const SSourcePicture* pics = &src;

    int result = encoder_->EncodeFrame (pics, &encoded);
    if (result != cmResultSuccess) {
      GMPLOG (GL_ERROR, "Couldn't encode frame. Error = " << result);
    }


    // Translate int to enum
    GMPVideoFrameType encoded_type;
    bool has_frame = false;

    switch (encoded.eFrameType) {
    case videoFrameTypeIDR:
      encoded_type = kGMPKeyFrame;
      has_frame = true;
      break;
    case videoFrameTypeI:
      encoded_type = kGMPKeyFrame;
      has_frame = true;
      break;
    case videoFrameTypeP:
      encoded_type = kGMPDeltaFrame;
      has_frame = true;
      break;
    case videoFrameTypeSkip:
      // Can skip the call back since no actual bitstream will be generated
      break;
    case videoFrameTypeIPMixed://this type is currently not suppported
    case videoFrameTypeInvalid:
      GMPLOG (GL_ERROR, "Couldn't encode frame. Type = "
              << encoded.eFrameType);
      break;
    default:
      // The API is defined as returning a type.
      assert (false);
      break;
    }

    if (!has_frame) {
      // This frame must be destroyed on the main thread.
      TrySyncRunOnMainThread (WrapTask (
                                   this,
                                   &OpenH264VideoEncoder::DestroyInputFrame_m,
                                   inputImage));
      return;
    }

    // Synchronously send this back to the main thread for delivery.
    TrySyncRunOnMainThread (WrapTask (
                                   this,
                                   &OpenH264VideoEncoder::Encode_m,
                                   inputImage,
                                   &encoded,
                                   encoded_type));
  }
Exemplo n.º 15
0
 virtual void SetRates (uint32_t aNewBitRate, uint32_t aFrameRate) {
   GMPLOG (GL_INFO, "[SetRates] Begin with: "
           << aNewBitRate << " , " << aFrameRate);
   //update bitrate if needed
   const int32_t newBitRate = aNewBitRate * 1000; //kbps->bps
   SBitrateInfo existEncoderBitRate;
   existEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
   int rv = encoder_->GetOption (ENCODER_OPTION_BITRATE, &existEncoderBitRate);
   if (rv != cmResultSuccess) {
     GMPLOG (GL_ERROR, "[SetRates] Error in Getting Bit Rate at Layer:"
             << rv
             << " ; Layer = "
             << existEncoderBitRate.iLayer
             << " ; BR = "
             << existEncoderBitRate.iBitrate);
     Error (GMPGenericErr);
     return;
   }
   if (rv == cmResultSuccess && existEncoderBitRate.iBitrate != newBitRate) {
     SBitrateInfo newEncoderBitRate;
     newEncoderBitRate.iLayer = SPATIAL_LAYER_ALL;
     newEncoderBitRate.iBitrate = newBitRate;
     rv = encoder_->SetOption (ENCODER_OPTION_BITRATE, &newEncoderBitRate);
     if (rv == cmResultSuccess) {
       GMPLOG (GL_INFO, "[SetRates] Update Encoder Bandwidth (AllLayers): ReturnValue: "
               << rv
               << "BitRate(kbps): "
               << aNewBitRate);
     } else {
       GMPLOG (GL_ERROR, "[SetRates] Error in Setting Bit Rate at Layer:"
               << rv
               << " ; Layer = "
               << newEncoderBitRate.iLayer
               << " ; BR = "
               << newEncoderBitRate.iBitrate);
       Error (GMPGenericErr);
       return;
     }
   }
   //update framerate if needed
   float existFrameRate = 0;
   rv = encoder_->GetOption (ENCODER_OPTION_FRAME_RATE, &existFrameRate);
   if (rv != cmResultSuccess) {
     GMPLOG (GL_ERROR, "[SetRates] Error in Getting Frame Rate:"
             << rv << " FrameRate: " << existFrameRate);
     Error (GMPGenericErr);
     return;
   }
   if (rv == cmResultSuccess &&
       (aFrameRate - existFrameRate > 0.001f ||
        existFrameRate - aFrameRate > 0.001f)) {
     float newFrameRate = static_cast<float> (aFrameRate);
     rv = encoder_->SetOption (ENCODER_OPTION_FRAME_RATE, &newFrameRate);
     if (rv == cmResultSuccess) {
       GMPLOG (GL_INFO, "[SetRates] Update Encoder Frame Rate: ReturnValue: "
               << rv << " FrameRate: " << aFrameRate);
     } else {
       GMPLOG (GL_ERROR, "[SetRates] Error in Setting Frame Rate: ReturnValue: "
               << rv << " FrameRate: " << aFrameRate);
       Error (GMPGenericErr);
       return;
     }
   }
 }
Exemplo n.º 16
0
  virtual void InitEncode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoEncoderCallback* callback,
                           int32_t numberOfCores,
                           uint32_t maxPayloadSize) {
    callback_ = callback;

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    int rv = WelsCreateSVCEncoder (&encoder_);
    if (rv) {
      Error (GMPGenericErr);
      return;
    }
    SEncParamExt param;
    memset (&param, 0, sizeof (param));
    encoder_->GetDefaultParams (&param);

    GMPLOG (GL_INFO, "Initializing encoder at "
            << codecSettings.mWidth
            << "x"
            << codecSettings.mHeight
            << "@"
            << static_cast<int> (codecSettings.mMaxFramerate));

    // Translate parameters.
    param.iUsageType = CAMERA_VIDEO_REAL_TIME;
    if(codecSettings.mMode == kGMPScreensharing)
      param.iUsageType = SCREEN_CONTENT_REAL_TIME;
    param.iPicWidth = codecSettings.mWidth;
    param.iPicHeight = codecSettings.mHeight;
    param.iRCMode = RC_BITRATE_MODE;
    param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
    param.iMaxBitrate = codecSettings.mMaxBitrate * 1000;
    GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
            << codecSettings.mStartBitrate
            << "; Min: "
            << codecSettings.mMinBitrate
            << "; Max: "
            << codecSettings.mMaxBitrate
            << "; Max payload size:"
            << maxPayloadSize);

    param.uiMaxNalSize = maxPayloadSize;

    // TODO([email protected]). Scary conversion from unsigned char to float below.
    param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);

    // Set up layers. Currently we have one layer.
    SSpatialLayerConfig* layer = &param.sSpatialLayers[0];

    // Make sure the output resolution doesn't exceed the Openh264 capability
    double width_mb = std::ceil(codecSettings.mWidth/16.0);
    double height_mb = std::ceil(codecSettings.mHeight/16.0);
    double input_mb = width_mb * height_mb;
    if (static_cast<uint32_t>(input_mb) > OPENH264_MAX_MB) {
      double scale = std::sqrt(OPENH264_MAX_MB / input_mb);
      layer->iVideoWidth = static_cast<uint32_t>(width_mb * 16 * scale);
      layer->iVideoHeight = static_cast<uint32_t>(height_mb * 16 * scale);
      GMPLOG (GL_INFO, "InitEncode: the output resolution overflows, w x h = " << codecSettings.mWidth << " x " << codecSettings.mHeight
              << ", turned to be " << layer->iVideoWidth << " x " << layer->iVideoHeight);
    } else {
      layer->iVideoWidth = codecSettings.mWidth;
      layer->iVideoHeight = codecSettings.mHeight;
    }
    if (layer->iVideoWidth < 16) {
      layer->iVideoWidth = 16;
    }
    if (layer->iVideoHeight < 16) {
      layer->iVideoHeight = 16;
    }

    layer->fFrameRate = param.fMaxFrameRate;
    layer->iSpatialBitrate = param.iTargetBitrate;
    layer->iMaxSpatialBitrate = param.iMaxBitrate;

    //for controlling the NAL size (normally for packetization-mode=0)
    if (maxPayloadSize != 0) {
      layer->sSliceArgument.uiSliceMode = SM_SIZELIMITED_SLICE;
      layer->sSliceArgument.uiSliceSizeConstraint = maxPayloadSize;
    }
    rv = encoder_->InitializeExt (&param);
    if (rv) {
      GMPLOG (GL_ERROR, "Couldn't initialize encoder");
      Error (GMPGenericErr);
      return;
    }
    max_payload_size_ = maxPayloadSize;
    GMPLOG (GL_INFO, "Initialized encoder");
  }
Exemplo n.º 17
0
  // Return the decoded data back to the parent.
  void Decode_m (GMPVideoEncodedFrame* inputFrame,
                 int64_t renderTimeMs) {
    EncodedFrame *eframe;
    if (inputFrame->Size() != (sizeof(*eframe))) {
      GMPLOG (GL_ERROR, "Couldn't decode frame. Size=" << inputFrame->Size());
      return;
    }
    eframe = reinterpret_cast<EncodedFrame*>(inputFrame->Buffer());

    if (eframe->magic_ != ENCODED_FRAME_MAGIC) {
      GMPLOG (GL_ERROR, "Couldn't decode frame. Magic=" << eframe->magic_);
      return;
    }

    int width = eframe->width_;
    int height = eframe->height_;
    int ystride = eframe->width_;
    int uvstride = eframe->width_/2;

    GMPLOG (GL_DEBUG, "Video frame ready for display "
            << width
            << "x"
            << height
            << " timestamp="
            << inputFrame->TimeStamp());

    GMPVideoFrame* ftmp = NULL;

    // Translate the image.
    GMPErr err = host_->CreateFrame (kGMPI420VideoFrame, &ftmp);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't allocate empty I420 frame");
      return;
    }

    GMPVideoi420Frame* frame = static_cast<GMPVideoi420Frame*> (ftmp);
    err = frame->CreateEmptyFrame (
        width, height,
        ystride, uvstride, uvstride);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't make decoded frame");
      return;
    }

    memset(frame->Buffer(kGMPYPlane),
           eframe->y_,
           frame->AllocatedSize(kGMPYPlane));
    memset(frame->Buffer(kGMPUPlane),
           eframe->u_,
           frame->AllocatedSize(kGMPUPlane));
    memset(frame->Buffer(kGMPVPlane),
           eframe->v_,
           frame->AllocatedSize(kGMPVPlane));

    GMPLOG (GL_DEBUG, "Allocated size = "
            << frame->AllocatedSize (kGMPYPlane));
    frame->SetTimestamp (inputFrame->TimeStamp());
    frame->SetDuration (inputFrame->Duration());
    callback_->Decoded (frame);

  }
Exemplo n.º 18
0
  void Encode_m (GMPVideoi420Frame* inputImage,
                 GMPVideoFrameType frame_type) {
    if (frame_type  == kGMPKeyFrame) {
      if (!inputImage)
        return;
    }
    if (!inputImage) {
      GMPLOG (GL_ERROR, "no input image");
      return;
    }

    // Now return the encoded data back to the parent.
    GMPVideoFrame* ftmp;
    GMPErr err = host_->CreateFrame (kGMPEncodedVideoFrame, &ftmp);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Error creating encoded frame");
      return;
    }

    GMPVideoEncodedFrame* f = static_cast<GMPVideoEncodedFrame*> (ftmp);

    // Encode this in a frame that looks a little bit like H.264.
    // Note that we don't do PPS or SPS.
    // Copy the data. This really should convert this to network byte order.
    EncodedFrame eframe;
    eframe.length_ = sizeof(eframe) - sizeof(uint32_t);
    eframe.h264_compat_ = 5; // Emulate a H.264 IDR NAL.
    eframe.magic_ = ENCODED_FRAME_MAGIC;
    eframe.width_ = inputImage->Width();
    eframe.height_ = inputImage->Height();
    eframe.y_ = AveragePlane(inputImage->Buffer(kGMPYPlane),
                             inputImage->AllocatedSize(kGMPYPlane));
    eframe.u_ = AveragePlane(inputImage->Buffer(kGMPUPlane),
                             inputImage->AllocatedSize(kGMPUPlane));
    eframe.v_ = AveragePlane(inputImage->Buffer(kGMPVPlane),
                             inputImage->AllocatedSize(kGMPVPlane));

    eframe.timestamp_ = inputImage->Timestamp();

    err = f->CreateEmptyFrame (sizeof(eframe));
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Error allocating frame data");
      f->Destroy();
      return;
    }
    memcpy(f->Buffer(), &eframe, sizeof(eframe));

    f->SetEncodedWidth (inputImage->Width());
    f->SetEncodedHeight (inputImage->Height());
    f->SetTimeStamp (inputImage->Timestamp());
    f->SetFrameType (frame_type);
    f->SetCompleteFrame (true);
    f->SetBufferType(GMP_BufferLength32);

    GMPLOG (GL_DEBUG, "Encoding complete. type= "
            << f->FrameType()
            << " length="
            << f->Size()
            << " timestamp="
            << f->TimeStamp());

    // Return the encoded frame.
    GMPCodecSpecificInfo info;
    memset (&info, 0, sizeof (info));
    info.mCodecType = kGMPVideoCodecH264;
    info.mBufferType = GMP_BufferLength32;
    info.mCodecSpecific.mH264.mSimulcastIdx = 0;
    GMPLOG (GL_DEBUG, "Calling callback");
    callback_->Encoded (f, reinterpret_cast<uint8_t*> (&info), sizeof(info));
    GMPLOG (GL_DEBUG, "Callback called");
  }