Exemplo n.º 1
0
HRESULT CDecMSDKMVC::DeliverOutput(MVCBuffer * pBaseView, MVCBuffer * pExtraView)
{
  mfxStatus sts = MFX_ERR_NONE;

  ASSERT(pBaseView->surface.Info.FrameId.ViewId == 0 && pExtraView->surface.Info.FrameId.ViewId > 0);
  ASSERT(pBaseView->surface.Data.FrameOrder == pExtraView->surface.Data.FrameOrder);

  // Sync base view
  do {
    sts = MFXVideoCORE_SyncOperation(m_mfxSession, pBaseView->sync, 1000);
  } while (sts == MFX_WRN_IN_EXECUTION);
  pBaseView->sync = nullptr;

  // Sync extra view
  do {
    sts = MFXVideoCORE_SyncOperation(m_mfxSession, pExtraView->sync, 1000);
  } while (sts == MFX_WRN_IN_EXECUTION);
  pExtraView->sync = nullptr;

  LAVFrame *pFrame = nullptr;
  AllocateFrame(&pFrame);

  pFrame->width  = pBaseView->surface.Info.CropW;
  pFrame->height = pBaseView->surface.Info.CropH;

  pFrame->data[0]   = pBaseView->surface.Data.Y;
  pFrame->data[1]   = pBaseView->surface.Data.UV;
  pFrame->stereo[0] = pExtraView->surface.Data.Y;
  pFrame->stereo[1] = pExtraView->surface.Data.UV;
  pFrame->data[2]   = (uint8_t *)pBaseView;
  pFrame->data[3]   = (uint8_t *)pExtraView;
  pFrame->stride[0] = pBaseView->surface.Data.PitchLow;
  pFrame->stride[1] = pBaseView->surface.Data.PitchLow;

  pFrame->format = LAVPixFmt_NV12;
  pFrame->bpp    = 8;
  pFrame->flags |= LAV_FRAME_FLAG_MVC;

  if (!(pBaseView->surface.Data.DataFlag & MFX_FRAMEDATA_ORIGINAL_TIMESTAMP))
    pBaseView->surface.Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN;

  if (pBaseView->surface.Data.TimeStamp != MFX_TIMESTAMP_UNKNOWN) {
    pFrame->rtStart = pBaseView->surface.Data.TimeStamp;
    pFrame->rtStart -= TIMESTAMP_OFFSET;
  }
  else {
    pFrame->rtStart = AV_NOPTS_VALUE;
  }

  int64_t num = (int64_t)pBaseView->surface.Info.AspectRatioW * pFrame->width;
  int64_t den = (int64_t)pBaseView->surface.Info.AspectRatioH * pFrame->height;
  av_reduce(&pFrame->aspect_ratio.num, &pFrame->aspect_ratio.den, num, den, INT_MAX);

  pFrame->destruct = msdk_buffer_destruct;
  pFrame->priv_data = this;

  GetOffsetSideData(pFrame, pBaseView->surface.Data.TimeStamp);

  return Deliver(pFrame);
}
Exemplo n.º 2
0
int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
                  AVPacket *pkt, const AVFrame *frame, int *got_packet)
{
    mfxBitstream bs = { { { 0 } } };

    mfxFrameSurface1 *surf = NULL;
    mfxSyncPoint sync      = NULL;
    int ret;

    if (frame) {
        ret = submit_frame(q, frame, &surf);
        if (ret < 0) {
            av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
            return ret;
        }
    }

    ret = ff_alloc_packet(pkt, q->packet_size);
    if (ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
        return ret;
    }
    bs.Data      = pkt->data;
    bs.MaxLength = pkt->size;

    do {
        ret = MFXVideoENCODE_EncodeFrameAsync(q->session, NULL, surf, &bs, &sync);
        if (ret == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);
    } while (ret > 0);

    if (ret < 0)
        return (ret == MFX_ERR_MORE_DATA) ? 0 : ff_qsv_error(ret);

    if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
        print_interlace_msg(avctx, q);

    if (sync) {
        MFXVideoCORE_SyncOperation(q->session, sync, 60000);

        if (bs.FrameType & MFX_FRAMETYPE_I || bs.FrameType & MFX_FRAMETYPE_xI)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
        else if (bs.FrameType & MFX_FRAMETYPE_P || bs.FrameType & MFX_FRAMETYPE_xP)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
        else if (bs.FrameType & MFX_FRAMETYPE_B || bs.FrameType & MFX_FRAMETYPE_xB)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;

        pkt->dts  = av_rescale_q(bs.DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
        pkt->pts  = av_rescale_q(bs.TimeStamp,       (AVRational){1, 90000}, avctx->time_base);
        pkt->size = bs.DataLength;

        if (bs.FrameType & MFX_FRAMETYPE_IDR ||
            bs.FrameType & MFX_FRAMETYPE_xIDR)
            pkt->flags |= AV_PKT_FLAG_KEY;

        *got_packet = 1;
    }

    return 0;
}
Exemplo n.º 3
0
static GstFlowReturn
gst_msdkdec_finish_task (GstMsdkDec * thiz, MsdkDecTask * task)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (thiz);
  GstFlowReturn flow;
  GstVideoCodecFrame *frame;
  MsdkSurface *surface;
  mfxStatus status;
  GList *l;

  if (G_LIKELY (task->sync_point)) {
    status =
        MFXVideoCORE_SyncOperation (gst_msdk_context_get_session
        (thiz->context), task->sync_point, 300000);
    if (status != MFX_ERR_NONE) {
      GST_ERROR_OBJECT (thiz, "failed to do sync operation");
      return GST_FLOW_ERROR;
    }
  }

  if (G_LIKELY (task->sync_point || (task->surface && task->decode_only))) {
    gboolean decode_only = task->decode_only;

    frame = gst_msdkdec_get_oldest_frame (decoder);

    l = g_list_find_custom (thiz->decoded_msdk_surfaces, task->surface,
        _find_msdk_surface);
    if (l) {
      surface = l->data;
    } else {
      GST_ERROR_OBJECT (thiz, "Couldn't find the cached MSDK surface");
      return GST_FLOW_ERROR;
    }

    if (G_LIKELY (frame)) {
      if (G_LIKELY (surface->copy.buffer == NULL)) {
        frame->output_buffer = gst_buffer_ref (surface->buf);
      } else {
        gst_video_frame_copy (&surface->copy, &surface->data);
        frame->output_buffer = gst_buffer_ref (surface->copy.buffer);
      }
    }

    free_surface (thiz, surface);
    task->sync_point = NULL;
    task->surface = NULL;
    task->decode_only = FALSE;

    if (!frame)
      return GST_FLOW_FLUSHING;
    gst_video_codec_frame_unref (frame);

    if (decode_only)
      GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame);
    flow = gst_video_decoder_finish_frame (decoder, frame);
    return flow;
  }
  return GST_FLOW_OK;
}
Exemplo n.º 4
0
extern "C" __declspec(dllexport) int encodeBitmap(IntelEncoderHandle *pHandle, void *pBitmap, void **ppBuffer)
{
	mfxStatus sts = MFX_ERR_NONE;
	mfxSyncPoint syncp;
	int nEncSurfIdx = 0;

	// Find free frame surface
	nEncSurfIdx = GetFreeSurfaceIndex(pHandle->ppEncSurfaces,pHandle->nEncSurfNum);

	// TODO: 
	//sts = LoadRawFrame(pEncSurfaces[nEncSurfIdx], fSource, true);

	for (;;) {
		sts = MFXVideoENCODE_EncodeFrameAsync(pHandle->session, NULL, pHandle->ppEncSurfaces[nEncSurfIdx], &pHandle->mfxBS, &syncp);

		if (MFX_ERR_NONE < sts && !syncp) {  // Repeat the call if warning and no output
			if (MFX_WRN_DEVICE_BUSY == sts) Sleep(5);
		} else if (MFX_ERR_NONE < sts && syncp) {  // Ignore warnings if output is available
			sts = MFX_ERR_NONE; 
			break;
		} else if (MFX_ERR_NOT_ENOUGH_BUFFER == sts) {
			return 0;
		}
		else break;
	}

	if (MFX_ERR_NONE == sts){
		sts = MFXVideoCORE_SyncOperation(pHandle->session, syncp, 60000); // Synchronize. Wait until encoded frame is ready
		if (MFX_ERR_NONE != sts) return 0;

		*ppBuffer = pHandle->mfxBS.Data + pHandle->mfxBS.DataOffset;

		int iResult = (int) pHandle->mfxBS.DataLength;
		pHandle->mfxBS.DataLength = 0;

		return iResult;
	}
}
Exemplo n.º 5
0
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
                                const AVFrame *src)
{
    QSVFramesContext   *s = ctx->internal->priv;
    mfxFrameSurface1   in = {{ 0 }};
    mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];

    mfxSyncPoint sync = NULL;
    mfxStatus err;

    in.Info = out->Info;
    in.Data.PitchLow = src->linesize[0];
    in.Data.Y        = src->data[0];
    in.Data.U        = src->data[1];
    in.Data.V        = src->data[2];
    in.Data.A        = src->data[3];

    do {
        err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
        if (err == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);
    } while (err == MFX_WRN_DEVICE_BUSY);

    if (err < 0 || !sync) {
        av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
        return AVERROR_UNKNOWN;
    }

    do {
        err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
    } while (err == MFX_WRN_IN_EXECUTION);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
        return AVERROR_UNKNOWN;
    }

    return 0;
}
Exemplo n.º 6
0
static GstFlowReturn
gst_msdkvpp_transform (GstBaseTransform * trans, GstBuffer * inbuf,
    GstBuffer * outbuf)
{
  GstMsdkVPP *thiz = GST_MSDKVPP (trans);
  GstClockTime timestamp;
  GstFlowReturn ret = GST_FLOW_OK;
  mfxSession session;
  mfxSyncPoint sync_point = NULL;
  mfxStatus status;
  MsdkSurface *in_surface = NULL;
  MsdkSurface *out_surface = NULL;

  timestamp = GST_BUFFER_TIMESTAMP (inbuf);

  in_surface = get_msdk_surface_from_input_buffer (thiz, inbuf);
  if (!in_surface)
    return GST_FLOW_ERROR;

  if (gst_msdk_is_msdk_buffer (outbuf)) {
    out_surface = g_slice_new0 (MsdkSurface);
    out_surface->surface = gst_msdk_get_surface_from_buffer (outbuf);
  } else {
    GST_ERROR ("Failed to get msdk outsurface!");
    return GST_FLOW_ERROR;
  }

  session = gst_msdk_context_get_session (thiz->context);

  /* outer loop is for handling FrameRate Control and deinterlace use cases */
  do {
    for (;;) {
      status =
          MFXVideoVPP_RunFrameVPPAsync (session, in_surface->surface,
          out_surface->surface, NULL, &sync_point);
      if (status != MFX_WRN_DEVICE_BUSY)
        break;
      /* If device is busy, wait 1ms and retry, as per MSDK's recommendation */
      g_usleep (1000);
    };

    if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA
        && status != MFX_ERR_MORE_SURFACE)
      goto vpp_error;

    /* No output generated */
    if (status == MFX_ERR_MORE_DATA)
      goto error_more_data;

    if (sync_point)
      MFXVideoCORE_SyncOperation (session, sync_point, 10000);

    /* More than one output buffers are generated */
    if (status == MFX_ERR_MORE_SURFACE) {
      GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
      GST_BUFFER_DURATION (outbuf) = thiz->buffer_duration;
      timestamp += thiz->buffer_duration;
      ret = gst_pad_push (GST_BASE_TRANSFORM_SRC_PAD (trans), outbuf);
      if (ret != GST_FLOW_OK)
        goto error_push_buffer;
      outbuf = create_output_buffer (thiz);
    } else {
      GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
      GST_BUFFER_DURATION (outbuf) = thiz->buffer_duration;
    }
  } while (status == MFX_ERR_MORE_SURFACE);

  free_msdk_surface (in_surface);
  return ret;

vpp_error:
  GST_ERROR_OBJECT (thiz, "MSDK Failed to do VPP");
  free_msdk_surface (in_surface);
  free_msdk_surface (out_surface);
  return GST_FLOW_ERROR;

error_more_data:
  GST_WARNING_OBJECT (thiz,
      "MSDK Requries additional input for processing, "
      "Retruning FLOW_DROPPED since no output buffer was generated");
  free_msdk_surface (in_surface);
  return GST_BASE_TRANSFORM_FLOW_DROPPED;

error_push_buffer:
  {
    free_msdk_surface (in_surface);
    free_msdk_surface (out_surface);
    GST_DEBUG_OBJECT (thiz, "failed to push output buffer: %s",
        gst_flow_get_name (ret));
    return ret;
  }
}
Exemplo n.º 7
0
static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
                      AVFrame *frame, int *got_frame,
                      AVPacket *avpkt)
{
    QSVFrame *out_frame;
    mfxFrameSurface1 *insurf;
    mfxFrameSurface1 *outsurf;
    mfxSyncPoint *sync;
    mfxBitstream bs = { { { 0 } } };
    int ret;

    if (avpkt->size) {
        bs.Data       = avpkt->data;
        bs.DataLength = avpkt->size;
        bs.MaxLength  = bs.DataLength;
        bs.TimeStamp  = avpkt->pts;
    }

    sync = av_mallocz(sizeof(*sync));
    if (!sync) {
        av_freep(&sync);
        return AVERROR(ENOMEM);
    }

    do {
        ret = get_surface(avctx, q, &insurf);
        if (ret < 0)
            return ret;

        ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
                                              insurf, &outsurf, sync);
        if (ret == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);

    } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);

    if (ret != MFX_ERR_NONE &&
        ret != MFX_ERR_MORE_DATA &&
        ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
        ret != MFX_ERR_MORE_SURFACE) {
        av_freep(&sync);
        return ff_qsv_print_error(avctx, ret,
                                  "Error during QSV decoding.");
    }

    /* make sure we do not enter an infinite loop if the SDK
     * did not consume any data and did not return anything */
    if (!*sync && !bs.DataOffset) {
        ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
        bs.DataOffset = avpkt->size;
    }

    if (*sync) {
        QSVFrame *out_frame = find_frame(q, outsurf);

        if (!out_frame) {
            av_log(avctx, AV_LOG_ERROR,
                   "The returned surface does not correspond to any frame\n");
            av_freep(&sync);
            return AVERROR_BUG;
        }

        out_frame->queued = 1;
        av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
        av_fifo_generic_write(q->async_fifo, &sync,      sizeof(sync),      NULL);
    } else {
        av_freep(&sync);
    }

    if (!av_fifo_space(q->async_fifo) ||
        (!avpkt->size && av_fifo_size(q->async_fifo))) {
        AVFrame *src_frame;

        av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
        av_fifo_generic_read(q->async_fifo, &sync,      sizeof(sync),      NULL);
        out_frame->queued = 0;

        do {
            ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
        } while (ret == MFX_WRN_IN_EXECUTION);

        av_freep(&sync);

        src_frame = out_frame->frame;

        ret = av_frame_ref(frame, src_frame);
        if (ret < 0)
            return ret;

        outsurf = &out_frame->surface;

#if FF_API_PKT_PTS
FF_DISABLE_DEPRECATION_WARNINGS
        frame->pkt_pts = outsurf->Data.TimeStamp;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
        frame->pts = outsurf->Data.TimeStamp;

        frame->repeat_pict =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
        frame->top_field_first =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
        frame->interlaced_frame =
            !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);

        /* update the surface properties */
        if (avctx->pix_fmt == AV_PIX_FMT_QSV)
            ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;

        *got_frame = 1;
    }

    return bs.DataOffset;
}
Exemplo n.º 8
0
    void ProcessEncodedFrame(List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp, int &ctsOffset, mfxU32 wait=0)
    {
        if(!encoded_tasks.Num())
            return;

        encode_task& task = encode_tasks[encoded_tasks[0]];
        auto& sp = task.sp;
        if(MFXVideoCORE_SyncOperation(session, sp, wait) != MFX_ERR_NONE)
            return;

        mfxBitstream& bs = task.bs;

        List<x264_nal_t> nalOut;
        mfxU8 *start = bs.Data + bs.DataOffset,
              *end = bs.Data + bs.DataOffset + bs.DataLength;
        static mfxU8 start_seq[] = {0, 0, 1};
        start = std::search(start, end, start_seq, start_seq+3);
        while(start != end)
        {
            decltype(start) next = std::search(start+1, end, start_seq, start_seq+3);
            x264_nal_t nal;
            nal.i_ref_idc = start[3]>>5;
            nal.i_type = start[3]&0x1f;
            if(nal.i_type == NAL_SLICE_IDR)
                nal.i_ref_idc = NAL_PRIORITY_HIGHEST;
            nal.p_payload = start;
            nal.i_payload = int(next-start);
            nalOut << nal;
            start = next;
        }
        size_t nalNum = nalOut.Num();

        packets.Clear();
        ClearPackets();

        INT64 dts;

        if(bUsingDecodeTimestamp && bs.DecodeTimeStamp != MFX_TIMESTAMP_UNKNOWN)
        {
            dts = msFromTimestamp(bs.DecodeTimeStamp);
        }
        else
            dts = outputTimestamp;

        INT64 in_pts = msFromTimestamp(task.surf.Data.TimeStamp),
              out_pts = msFromTimestamp(bs.TimeStamp);

        if(!bFirstFrameProcessed && nalNum)
        {
            delayOffset = -dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset;

        if(bDupeFrames)
        {
            //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
            timeOffset = int(out_pts-dts);
            timeOffset += frameShift;

            if(nalNum && timeOffset < 0)
            {
                frameShift -= timeOffset;
                timeOffset = 0;
            }
        }
        else
        {
            timeOffset = int(out_pts+delayOffset-ts);
            timeOffset += ctsOffset;

            //dynamically adjust the CTS for the stream if it gets lower than the current value
            //(thanks to cyrus for suggesting to do this instead of a single shift)
            if(nalNum && timeOffset < 0)
            {
                ctsOffset -= timeOffset;
                timeOffset = 0;
            }
        }
        //Log(TEXT("inpts: %005d, dts: %005d, pts: %005d, timestamp: %005d, offset: %005d, newoffset: %005d"), task.surf.Data.TimeStamp/90, dts, bs.TimeStamp/90, outputTimestamp, timeOffset, bs.TimeStamp/90-dts);

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(int i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (nal.p_payload[skipBytes+1] == 0x5) {
                    SEIData.Clear();
                    BufferOutputSerializer packetOut(SEIData);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                } else {
                    if (!newPacket)
                        newPacket = CurrentPackets.CreateNew();

                    BufferOutputSerializer packetOut(newPacket->Packet);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                }
            }
            /*else if(nal.i_type == NAL_FILLER) //QSV does not produce NAL_FILLER
            {
            BYTE *skip = nal.p_payload;
            while(*(skip++) != 0x1);
            int skipBytes = (int)(skip-nal.p_payload);

            int newPayloadSize = (nal.i_payload-skipBytes);

            if (!newPacket)
            newPacket = CurrentPackets.CreateNew();

            BufferOutputSerializer packetOut(newPacket->Packet);

            packetOut.OutputDword(htonl(newPayloadSize));
            packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }*/
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
            VideoPacket *newPacket = CurrentPackets.CreateNew();
            BufferOutputSerializer headerOut(newPacket->Packet);

            headerOut.OutputByte(0x17);
            headerOut.OutputByte(0);
            headerOut.Serialize(timeOffsetAddr, 3);
            headerOut.OutputByte(1);
            headerOut.Serialize(nal.p_payload+5, 3);
            headerOut.OutputByte(0xff);
            headerOut.OutputByte(0xe1);
            headerOut.OutputWord(htons(nal.i_payload-4));
            headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

            x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

            headerOut.OutputByte(1);
            headerOut.OutputWord(htons(pps.i_payload-4));
            headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        msdk_locked_tasks << encoded_tasks[0];
        encoded_tasks.Remove(0);
    }
Exemplo n.º 9
0
int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
                  AVPacket *pkt, const AVFrame *frame, int *got_packet)
{
    AVPacket new_pkt = { 0 };
    mfxBitstream *bs;

    mfxFrameSurface1 *surf = NULL;
    mfxSyncPoint sync      = NULL;
    int ret;

    if (frame) {
        ret = submit_frame(q, frame, &surf);
        if (ret < 0) {
            av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
            return ret;
        }
    }

    ret = av_new_packet(&new_pkt, q->packet_size);
    if (ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
        return ret;
    }

    bs = av_mallocz(sizeof(*bs));
    if (!bs) {
        av_packet_unref(&new_pkt);
        return AVERROR(ENOMEM);
    }
    bs->Data      = new_pkt.data;
    bs->MaxLength = new_pkt.size;

    do {
        ret = MFXVideoENCODE_EncodeFrameAsync(q->session, NULL, surf, bs, &sync);
        if (ret == MFX_WRN_DEVICE_BUSY) {
            av_usleep(500);
            continue;
        }
        break;
    } while ( 1 );

    if (ret < 0) {
        av_packet_unref(&new_pkt);
        av_freep(&bs);
        if (ret == MFX_ERR_MORE_DATA)
            return 0;
        av_log(avctx, AV_LOG_ERROR, "EncodeFrameAsync returned %d\n", ret);
        return ff_qsv_error(ret);
    }

    if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM) {
        if (frame->interlaced_frame)
            print_interlace_msg(avctx, q);
        else
            av_log(avctx, AV_LOG_WARNING,
                   "EncodeFrameAsync returned 'incompatible param' code\n");
    }
    if (sync) {
        av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
        av_fifo_generic_write(q->async_fifo, &sync,    sizeof(sync),    NULL);
        av_fifo_generic_write(q->async_fifo, &bs,      sizeof(bs),    NULL);
    } else {
        av_packet_unref(&new_pkt);
        av_freep(&bs);
    }

    if (!av_fifo_space(q->async_fifo) ||
        (!frame && av_fifo_size(q->async_fifo))) {
        av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
        av_fifo_generic_read(q->async_fifo, &sync,    sizeof(sync),    NULL);
        av_fifo_generic_read(q->async_fifo, &bs,      sizeof(bs),      NULL);

        MFXVideoCORE_SyncOperation(q->session, sync, 60000);

        new_pkt.dts  = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
        new_pkt.pts  = av_rescale_q(bs->TimeStamp,       (AVRational){1, 90000}, avctx->time_base);
        new_pkt.size = bs->DataLength;

        if (bs->FrameType & MFX_FRAMETYPE_IDR ||
            bs->FrameType & MFX_FRAMETYPE_xIDR)
            new_pkt.flags |= AV_PKT_FLAG_KEY;

#if FF_API_CODED_FRAME
FF_DISABLE_DEPRECATION_WARNINGS
        if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
        else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
        else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB)
            avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
FF_ENABLE_DEPRECATION_WARNINGS
#endif

        av_freep(&bs);

        if (pkt->data) {
            if (pkt->size < new_pkt.size) {
                av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n",
                       pkt->size, new_pkt.size);
                av_packet_unref(&new_pkt);
                return AVERROR(EINVAL);
            }

            memcpy(pkt->data, new_pkt.data, new_pkt.size);
            pkt->size = new_pkt.size;

            ret = av_packet_copy_props(pkt, &new_pkt);
            av_packet_unref(&new_pkt);
            if (ret < 0)
                return ret;
        } else
            *pkt = new_pkt;

        *got_packet = 1;
    }
Exemplo n.º 10
0
int
mfx_encode(int cid) {
	mfxStatus sts = MFX_ERR_NONE;
	mfxSyncPoint vppsync = NULL;
	mfxSyncPoint encsync = NULL;
	mfxFrameInfo vppdefinfo;
	unsigned long long id = 0;
	unsigned long long fpsunit = 90000 / fps;
	memset(&vppdefinfo, 0, sizeof(vppdefinfo));
	vppdefinfo.PicStruct		= MFX_PICSTRUCT_PROGRESSIVE;
	vppdefinfo.FourCC		= MFX_FOURCC_YV12;	// the only difference
	vppdefinfo.ChromaFormat		= MFX_CHROMAFORMAT_YUV420;
	vppdefinfo.Width		= MFX_ALIGN16(w);
	vppdefinfo.Height		= MFX_ALIGN16(h);
	vppdefinfo.CropW		= w;
	vppdefinfo.CropH		= h;
	vppdefinfo.FrameRateExtN	= fps;
	vppdefinfo.FrameRateExtD	= 1;
	//
	while(sts >= MFX_ERR_NONE || sts == MFX_ERR_MORE_DATA) {
		mfxFrameSurface1 *svppin, *svppout;
		int framesize;
		unsigned char *frame = load_frame(&framesize);
		//
		if(frame == NULL) {
			ga_error("END-OF-FRAME\n");
			break;
		}
		//
		svppin  = frame_pool_get(_vpppool[cid][0], &_vppresponse[cid][0]);
		svppout = frame_pool_get(_vpppool[cid][1], &_vppresponse[cid][1]);
		if(svppin == NULL || svppout == NULL) {
			ga_error("No surface available (%p, %p)\n", svppin, svppout);
			break;
		}
		if(fa_lock(NULL, svppin->Data.MemId, &svppin->Data) != MFX_ERR_NONE) {
			ga_error("Unable to lock VPP frame\n");
			break;
		}
		// fill frame info
		memcpy(&svppin->Info,  &vppdefinfo, sizeof(mfxFrameInfo));
		memcpy(&svppout->Info, &vppdefinfo, sizeof(mfxFrameInfo));
		svppin->Info.FourCC  = MFX_FOURCC_YV12;
		svppout->Info.FourCC = MFX_FOURCC_NV12;
		svppin->Data.TimeStamp = id * fpsunit;
		id++;
		//ga_error("In timestamp = %llu\n", svppin->Data.TimeStamp);
		// copy the frame
		do {
			mfxU8 *dst;
			int i, w2 = w/2, h2 = h/2, p2 = svppin->Data.Pitch/2;
			unsigned char *src = frame;
			// Copy Y
			for(dst = svppin->Data.Y, i = 0; i < h; i++) {
				memcpy(dst, src, w);
				dst += svppin->Data.Pitch;
				src += w;
			}
			// Copy U
			for(dst = svppin->Data.U, i = 0; i < h2; i++) {
				memcpy(dst, src, w2);
				dst += p2;
				src += w2;
			}
			// Copy V
			for(dst = svppin->Data.V, i = 0; i < h2; i++) {
				memcpy(dst, src, w2);
				dst += p2;
				src += w2;
			}
		} while(0);
		//
		if(fa_unlock(NULL, svppin->Data.MemId, &svppin->Data) != MFX_ERR_NONE) {
			ga_error("Unable to unlock VPP frame\n");
			break;
		}
		// do VPP
		sts = mfx_encode_vpp(_session[cid], svppin, svppout, &vppsync);
		// VPP errors?
		if(sts == MFX_ERR_MORE_DATA)	continue;
		if(sts == MFX_ERR_MORE_SURFACE)	continue;
		if(sts != MFX_ERR_NONE) {
			mfx_invalid_status(sts);
			ga_error("video encoder: VPP failed.\n");
			break;
		}
		//
		//MFXVideoCORE_SyncOperation(_session[cid], vppsync, MFX_INFINITE);
		// do ENCODE
		sts = mfx_encode_encode(_session[cid], svppout, &_mfxbs[cid], &encsync);
		//
		if(sts == MFX_ERR_MORE_DATA)	continue;
		if(sts != MFX_ERR_NONE) {
			mfx_invalid_status(sts);
			ga_error("video encoder: encode failed.\n");
			break;
		}
		//
		//ga_error("Out timestamp = d:%lld t:%llu\n", _mfxbs[cid].DecodeTimeStamp, _mfxbs[cid].TimeStamp);
		// save frame
		MFXVideoCORE_SyncOperation(_session[cid], encsync, MFX_INFINITE);
#if 1
		unsigned char *ptr = _mfxbs[cid].Data, *nextptr;
		int naltype, nalsize, nali = 0;
		int off, nextoff;
		// each frame can have only 1 nal ...
		if(ptr != NULL)
			ptr += _mfxbs[cid].DataOffset;
		off = nextoff = 4;
		while(ptr != NULL) {
			// search for startcode 00 00 01 or 00 00 00 01
			for(nextptr = ptr+3;
					nextptr < _mfxbs[cid].Data+_mfxbs[cid].DataOffset+_mfxbs[cid].DataLength-4;
					nextptr++) {
				if(*nextptr == 0 && *(nextptr+1) == 0) {
					if(*(nextptr+2) == 1) {
						// 00 00 01
						nextoff = 3;
						break;
					} else if(*(nextptr+2) == 0 && *(nextptr+3) == 1) {
						// 00 00 00 01
						nextoff = 4;
						break;
					}
				}
			}
			//
			if(nextptr < _mfxbs[cid].Data+_mfxbs[cid].DataOffset+_mfxbs[cid].DataLength-4) {
				nalsize = nextptr - ptr;
				ga_error("XXX: nal[%d] type=%d size=%d\n", nali, *(ptr+off) & 0x1f, nalsize);
				ptr = nextptr;
			} else {
				nalsize = _mfxbs[cid].Data+_mfxbs[cid].DataOffset+_mfxbs[cid].DataLength-ptr;
				ga_error("XXX: nal[%d] type=%d size=%d\n", nali, *(ptr+off) & 0x1f, nalsize);
				ptr = NULL;
			}
			nali++;
			off = nextoff;
		};
#endif
		write_frame(_mfxbs[cid].Data, _mfxbs[cid].DataLength);
		_mfxbs[cid].DataLength = _mfxbs[cid].DataOffset = 0;
	}
	//
	while(sts >= MFX_ERR_NONE) {
		sts = mfx_encode_encode(_session[cid], NULL, &_mfxbs[cid], &encsync);
	}
	//
	return 0;
}
Exemplo n.º 11
0
int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
                  AVFrame *frame, int *got_frame,
                  AVPacket *avpkt)
{
    mfxFrameSurface1 *insurf;
    mfxFrameSurface1 *outsurf;
    mfxSyncPoint sync;
    mfxBitstream bs = { { { 0 } } };
    int ret;

    if (avpkt->size) {
        bs.Data       = avpkt->data;
        bs.DataLength = avpkt->size;
        bs.MaxLength  = bs.DataLength;
        bs.TimeStamp  = avpkt->pts;
    }

    do {
        ret = get_surface(avctx, q, &insurf);
        if (ret < 0)
            return ret;

        ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
                                              insurf, &outsurf, &sync);
        if (ret == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);

    } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);

    if (ret != MFX_ERR_NONE &&
        ret != MFX_ERR_MORE_DATA &&
        ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
        ret != MFX_ERR_MORE_SURFACE) {
        av_log(avctx, AV_LOG_ERROR, "Error during QSV decoding.\n");
        return ff_qsv_error(ret);
    }

    if (sync) {
        AVFrame *src_frame;

        MFXVideoCORE_SyncOperation(q->session, sync, 60000);

        src_frame = find_frame(q, outsurf);
        if (!src_frame) {
            av_log(avctx, AV_LOG_ERROR,
                   "The returned surface does not correspond to any frame\n");
            return AVERROR_BUG;
        }

        ret = av_frame_ref(frame, src_frame);
        if (ret < 0)
            return ret;

        frame->pkt_pts = frame->pts = outsurf->Data.TimeStamp;

        frame->repeat_pict =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
        frame->top_field_first =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
        frame->interlaced_frame =
            !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);

        *got_frame = 1;
    }

    return bs.DataOffset;
}
Exemplo n.º 12
0
int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
                  AVFrame *frame, int *got_frame,
                  AVPacket *avpkt)
{
    QSVFrame *out_frame;
    mfxFrameSurface1 *insurf;
    mfxFrameSurface1 *outsurf;
    mfxSyncPoint sync;
    mfxBitstream bs = { { { 0 } } };
    int ret;
    int n_out_frames;
    int buffered = 0;

    if (!q->engine_ready) {
        ret = ff_qsv_decode_init(avctx, q, avpkt);
        if (ret)
            return ret;
    }

    if (avpkt->size ) {
        if (av_fifo_size(q->input_fifo)) {
            /* we have got rest of previous packet into buffer */
            if (av_fifo_space(q->input_fifo) < avpkt->size) {
                ret = av_fifo_grow(q->input_fifo, avpkt->size);
                if (ret < 0)
                    return ret;
            }
            av_fifo_generic_write(q->input_fifo, avpkt->data, avpkt->size, NULL);
            bs.Data       = q->input_fifo->rptr;
            bs.DataLength = av_fifo_size(q->input_fifo);
            buffered = 1;
        } else {
            bs.Data       = avpkt->data;
            bs.DataLength = avpkt->size;
        }
        bs.MaxLength  = bs.DataLength;
        bs.TimeStamp  = avpkt->pts;
    }

    while (1) {
        ret = get_surface(avctx, q, &insurf);
        if (ret < 0)
            return ret;
        do {
            ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
                                                  insurf, &outsurf, &sync);
            if (ret != MFX_WRN_DEVICE_BUSY)
                break;
            av_usleep(500);
        } while (1);

        if (MFX_WRN_VIDEO_PARAM_CHANGED==ret) {
            /* TODO: handle here sequence header changing */
        }

        if (sync) {
            QSVFrame *out_frame = find_frame(q, outsurf);

            if (!out_frame) {
                av_log(avctx, AV_LOG_ERROR,
                       "The returned surface does not correspond to any frame\n");
                return AVERROR_BUG;
            }

            out_frame->queued = 1;
            av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
            av_fifo_generic_write(q->async_fifo, &sync,      sizeof(sync),      NULL);

            continue;
        }
        if (MFX_ERR_MORE_SURFACE != ret && ret < 0)
            break;
    }

    /* make sure we do not enter an infinite loop if the SDK
     * did not consume any data and did not return anything */
    if (!sync && !bs.DataOffset) {
        av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n");
        bs.DataOffset = avpkt->size;
    }

    if (buffered) {
        qsv_fifo_relocate(q->input_fifo, bs.DataOffset);
    } else if (bs.DataOffset!=avpkt->size) {
        /* some data of packet was not consumed. store it to local buffer */
        av_fifo_generic_write(q->input_fifo, avpkt->data+bs.DataOffset,
                              avpkt->size - bs.DataOffset, NULL);
    }

    if (MFX_ERR_MORE_DATA!=ret && ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Error %d during QSV decoding.\n", ret);
        return ff_qsv_error(ret);
    }
    n_out_frames = av_fifo_size(q->async_fifo) / (sizeof(out_frame)+sizeof(sync));

    if (n_out_frames > q->async_depth || (!avpkt->size && n_out_frames) ) {
        AVFrame *src_frame;

        av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
        av_fifo_generic_read(q->async_fifo, &sync,      sizeof(sync),      NULL);
        out_frame->queued = 0;

        MFXVideoCORE_SyncOperation(q->session, sync, 60000);

        src_frame = out_frame->frame;

        ret = av_frame_ref(frame, src_frame);
        if (ret < 0)
            return ret;

        outsurf = out_frame->surface;

        frame->pkt_pts = frame->pts = outsurf->Data.TimeStamp;

        frame->repeat_pict =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
        frame->top_field_first =
            outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
        frame->interlaced_frame =
            !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);

        *got_frame = 1;
    }

    return avpkt->size;
}
Exemplo n.º 13
0
static int qsvscale_filter_frame(AVFilterLink *link, AVFrame *in)
{
    AVFilterContext             *ctx = link->dst;
    QSVScaleContext               *s = ctx->priv;
    AVFilterLink            *outlink = ctx->outputs[0];

    mfxSyncPoint sync = NULL;
    mfxStatus err;

    AVFrame *out = NULL;
    int ret = 0;

    out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
    if (!out) {
        ret = AVERROR(ENOMEM);
        goto fail;
    }

    do {
        err = MFXVideoVPP_RunFrameVPPAsync(s->session,
                                           (mfxFrameSurface1*)in->data[3],
                                           (mfxFrameSurface1*)out->data[3],
                                           NULL, &sync);
        if (err == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);
    } while (err == MFX_WRN_DEVICE_BUSY);

    if (err < 0 || !sync) {
        av_log(ctx, AV_LOG_ERROR, "Error during scaling\n");
        ret = AVERROR_UNKNOWN;
        goto fail;
    }

    do {
        err = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
    } while (err == MFX_WRN_IN_EXECUTION);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
        ret = AVERROR_UNKNOWN;
        goto fail;
    }

    ret = av_frame_copy_props(out, in);
    if (ret < 0)
        goto fail;

    out->width  = outlink->w;
    out->height = outlink->h;

    av_reduce(&out->sample_aspect_ratio.num, &out->sample_aspect_ratio.den,
              (int64_t)in->sample_aspect_ratio.num * outlink->h * link->w,
              (int64_t)in->sample_aspect_ratio.den * outlink->w * link->h,
              INT_MAX);

    av_frame_free(&in);
    return ff_filter_frame(outlink, out);
fail:
    av_frame_free(&in);
    av_frame_free(&out);
    return ret;
}
Exemplo n.º 14
0
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
                                const AVFrame *src)
{
    QSVFramesContext   *s = ctx->internal->priv;
    mfxFrameSurface1   in = {{ 0 }};
    mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];

    mfxSyncPoint sync = NULL;
    mfxStatus err;
    int ret;

    while (!s->session_upload_init && !s->session_upload && !ret) {
#if HAVE_PTHREADS
        if (pthread_mutex_trylock(&s->session_lock) == 0) {
#endif
            if (!s->session_upload_init) {
                ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
                if (s->session_upload)
                    s->session_upload_init = 1;
            }
#if HAVE_PTHREADS
            pthread_mutex_unlock(&s->session_lock);
            pthread_cond_signal(&s->session_cond);
        } else {
            pthread_mutex_lock(&s->session_lock);
            while (!s->session_upload_init && !s->session_upload) {
                pthread_cond_wait(&s->session_cond, &s->session_lock);
            }
            pthread_mutex_unlock(&s->session_lock);
        }
#endif
    }
    if (ret < 0)
        return ret;

    if (!s->session_upload) {
        if (s->child_frames_ref)
            return qsv_transfer_data_child(ctx, dst, src);

        av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
        return AVERROR(ENOSYS);
    }

    in.Info = out->Info;
    in.Data.PitchLow = src->linesize[0];
    in.Data.Y        = src->data[0];
    in.Data.U        = src->data[1];
    in.Data.V        = src->data[2];
    in.Data.A        = src->data[3];

    do {
        err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
        if (err == MFX_WRN_DEVICE_BUSY)
            av_usleep(1);
    } while (err == MFX_WRN_DEVICE_BUSY);

    if (err < 0 || !sync) {
        av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
        return AVERROR_UNKNOWN;
    }

    do {
        err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
    } while (err == MFX_WRN_IN_EXECUTION);
    if (err < 0) {
        av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
        return AVERROR_UNKNOWN;
    }

    return 0;
}
Exemplo n.º 15
0
    bool Encode(LPVOID picInPtr, List<DataPacket> &packets, List<PacketType> &packetTypes, DWORD outputTimestamp, int &ctsOffset)
    {
        bs.DataLength = 0;
        bs.DataOffset = 0;
        mfxFrameSurface1& pic = *(mfxFrameSurface1*)picInPtr;
        enc_surf.Data.Y = pic.Data.Y;
        enc_surf.Data.UV = pic.Data.UV;
        enc_surf.Data.Pitch = pic.Data.Pitch;
        enc_surf.Data.TimeStamp = pic.Data.TimeStamp*90;
        mfxSyncPoint sp = nullptr;
        auto sts = enc->EncodeFrameAsync(bRequestKeyframe ? &ctrl : nullptr, &enc_surf, &bs, &sp);

        sts = MFXVideoCORE_SyncOperation(session, sp, INFINITE);

        List<x264_nal_t> nalOut;
        mfxU8 *start = bs.Data + bs.DataOffset,
              *end = bs.Data + bs.DataOffset + bs.DataLength;
        static mfxU8 start_seq[] = {0, 0, 1};
        start = std::search(start, end, start_seq, start_seq+3);
        while(start != end)
        {
            decltype(start) next = std::search(start+1, end, start_seq, start_seq+3);
            x264_nal_t nal;
            nal.i_ref_idc = start[3]>>5;
            nal.i_type = start[3]&0x1f;
            if(nal.i_type == NAL_SLICE_IDR)
                nal.i_ref_idc = NAL_PRIORITY_HIGHEST;
            nal.p_payload = start;
            nal.i_payload = int(next-start);
            nalOut << nal;
            start = next;
        }
        size_t nalNum = nalOut.Num();

        packets.Clear();
        ClearPackets();

        if(bRequestKeyframe)
            bRequestKeyframe = false;

        if(!bFirstFrameProcessed && nalNum)
        {
            //delayOffset = -picOut.i_dts;
            bFirstFrameProcessed = true;
        }

        INT64 ts = INT64(outputTimestamp);
        int timeOffset = 0;//int((picOut.i_pts+delayOffset)-ts);

        if(bDupeFrames)
        {
            //if frame duplication is being used, the shift will be insignificant, so just don't bother adjusting audio
            timeOffset += frameShift;

            if(nalNum && timeOffset < 0)
            {
                frameShift -= timeOffset;
                timeOffset = 0;
            }
        }
        else
        {
            timeOffset += ctsOffset;

            //dynamically adjust the CTS for the stream if it gets lower than the current value
            //(thanks to cyrus for suggesting to do this instead of a single shift)
            if(nalNum && timeOffset < 0)
            {
                ctsOffset -= timeOffset;
                timeOffset = 0;
            }
        }

        timeOffset = htonl(timeOffset);

        BYTE *timeOffsetAddr = ((BYTE*)&timeOffset)+1;

        VideoPacket *newPacket = NULL;

        PacketType bestType = PacketType_VideoDisposable;
        bool bFoundFrame = false;

        for(int i=0; i<nalNum; i++)
        {
            x264_nal_t &nal = nalOut[i];

            if(nal.i_type == NAL_SEI)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                int newPayloadSize = (nal.i_payload-skipBytes);

                if (nal.p_payload[skipBytes+1] == 0x5) {
                    SEIData.Clear();
                    BufferOutputSerializer packetOut(SEIData);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                } else {
                    if (!newPacket)
                        newPacket = CurrentPackets.CreateNew();

                    BufferOutputSerializer packetOut(newPacket->Packet);

                    packetOut.OutputDword(htonl(newPayloadSize));
                    packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
                }
            }
            /*else if(nal.i_type == NAL_FILLER) //QSV does not produce NAL_FILLER
            {
            BYTE *skip = nal.p_payload;
            while(*(skip++) != 0x1);
            int skipBytes = (int)(skip-nal.p_payload);

            int newPayloadSize = (nal.i_payload-skipBytes);

            if (!newPacket)
            newPacket = CurrentPackets.CreateNew();

            BufferOutputSerializer packetOut(newPacket->Packet);

            packetOut.OutputDword(htonl(newPayloadSize));
            packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);
            }*/
            else if(nal.i_type == NAL_SLICE_IDR || nal.i_type == NAL_SLICE)
            {
                BYTE *skip = nal.p_payload;
                while(*(skip++) != 0x1);
                int skipBytes = (int)(skip-nal.p_payload);

                if (!newPacket)
                    newPacket = CurrentPackets.CreateNew();

                if (!bFoundFrame)
                {
                    newPacket->Packet.Insert(0, (nal.i_type == NAL_SLICE_IDR) ? 0x17 : 0x27);
                    newPacket->Packet.Insert(1, 1);
                    newPacket->Packet.InsertArray(2, timeOffsetAddr, 3);

                    bFoundFrame = true;
                }

                int newPayloadSize = (nal.i_payload-skipBytes);
                BufferOutputSerializer packetOut(newPacket->Packet);

                packetOut.OutputDword(htonl(newPayloadSize));
                packetOut.Serialize(nal.p_payload+skipBytes, newPayloadSize);

                switch(nal.i_ref_idc)
                {
                case NAL_PRIORITY_DISPOSABLE:   bestType = MAX(bestType, PacketType_VideoDisposable);  break;
                case NAL_PRIORITY_LOW:          bestType = MAX(bestType, PacketType_VideoLow);         break;
                case NAL_PRIORITY_HIGH:         bestType = MAX(bestType, PacketType_VideoHigh);        break;
                case NAL_PRIORITY_HIGHEST:      bestType = MAX(bestType, PacketType_VideoHighest);     break;
                }
            }
            /*else if(nal.i_type == NAL_SPS)
            {
            VideoPacket *newPacket = CurrentPackets.CreateNew();
            BufferOutputSerializer headerOut(newPacket->Packet);

            headerOut.OutputByte(0x17);
            headerOut.OutputByte(0);
            headerOut.Serialize(timeOffsetAddr, 3);
            headerOut.OutputByte(1);
            headerOut.Serialize(nal.p_payload+5, 3);
            headerOut.OutputByte(0xff);
            headerOut.OutputByte(0xe1);
            headerOut.OutputWord(htons(nal.i_payload-4));
            headerOut.Serialize(nal.p_payload+4, nal.i_payload-4);

            x264_nal_t &pps = nalOut[i+1]; //the PPS always comes after the SPS

            headerOut.OutputByte(1);
            headerOut.OutputWord(htons(pps.i_payload-4));
            headerOut.Serialize(pps.p_payload+4, pps.i_payload-4);
            }*/
            else
                continue;
        }

        packetTypes << bestType;

        packets.SetSize(CurrentPackets.Num());
        for(UINT i=0; i<packets.Num(); i++)
        {
            packets[i].lpPacket = CurrentPackets[i].Packet.Array();
            packets[i].size     = CurrentPackets[i].Packet.Num();
        }

        return true;
    }
Exemplo n.º 16
0
GstMfxFilterStatus
gst_mfx_filter_process (GstMfxFilter * filter, GstMfxSurface * surface,
    GstMfxSurface ** out_surface)
{
  mfxFrameSurface1 *insurf, *outsurf = NULL;
  mfxSyncPoint syncp;
  mfxStatus sts = MFX_ERR_NONE;
  GstMfxFilterStatus ret = GST_MFX_FILTER_STATUS_SUCCESS;
  gboolean more_surface = FALSE;

  /* Delayed VPP initialization to enable surface pool sharing with
   * encoder plugin */
  if (G_UNLIKELY (!filter->inited)) {
    ret = gst_mfx_filter_start (filter);
    if (ret != GST_MFX_FILTER_STATUS_SUCCESS)
      return ret;
    filter->inited = TRUE;
  }

  insurf = gst_mfx_surface_get_frame_surface (surface);

  do {
    *out_surface = gst_mfx_surface_new_from_pool (filter->vpp_pool[1]);
    if (!*out_surface)
      return GST_MFX_FILTER_STATUS_ERROR_ALLOCATION_FAILED;

    outsurf = gst_mfx_surface_get_frame_surface (*out_surface);
    sts =
        MFXVideoVPP_RunFrameVPPAsync (filter->session, insurf, outsurf, NULL,
        &syncp);

    if (MFX_WRN_INCOMPATIBLE_VIDEO_PARAM == sts)
      sts = MFX_ERR_NONE;

    if (MFX_WRN_DEVICE_BUSY == sts)
      g_usleep (500);
  } while (MFX_WRN_DEVICE_BUSY == sts);

  if (MFX_ERR_MORE_DATA == sts)
    return GST_MFX_FILTER_STATUS_ERROR_MORE_DATA;

  /* The current frame is ready. Hence treat it
   * as MFX_ERR_NONE and request for more surface
   */
  if (MFX_ERR_MORE_SURFACE == sts) {
    sts = MFX_ERR_NONE;
    more_surface = TRUE;
  }

  if (MFX_ERR_NONE != sts) {
    GST_ERROR ("Error during MFX filter process.");
    return GST_MFX_FILTER_STATUS_ERROR_OPERATION_FAILED;
  }

  if (syncp) {
    if (!gst_mfx_task_has_type (filter->vpp[1], GST_MFX_TASK_ENCODER))
      do {
        sts = MFXVideoCORE_SyncOperation (filter->session, syncp, 1000);
      } while (MFX_WRN_IN_EXECUTION == sts);

    *out_surface =
        gst_mfx_surface_pool_find_surface (filter->vpp_pool[1], outsurf);
  }

  if (more_surface)
    return GST_MFX_FILTER_STATUS_ERROR_MORE_SURFACE;

  return GST_MFX_FILTER_STATUS_SUCCESS;
}