예제 #1
0
bool VdaMixer::upload(const VideoFrame &frame, bool /*deint*/) {
	Q_ASSERT(frame.format().imgfmt() == IMGFMT_VDA);
	CGLError error = kCGLNoError;
	for (auto &texture : m_textures) {
		const auto cgl = CGLGetCurrentContext();
		const auto surface = CVPixelBufferGetIOSurface((CVPixelBufferRef)frame.data(3));
		texture.bind();
		const auto w = IOSurfaceGetWidthOfPlane(surface, texture.plane());
		const auto h = IOSurfaceGetHeightOfPlane(surface, texture.plane());
		if (_Change(error, CGLTexImageIOSurface2D(cgl, texture.target(), texture.format(), w, h, texture.transfer().format, texture.transfer().type, surface, texture.plane()))) {
			_Error("CGLError: %%(0x%%)", CGLErrorString(error), _N(error, 16));
			return false;
		}
	}
	return true;
}
예제 #2
0
	CopyResult copyFrame(const VideoFrame &videoFrame, Field field) override
	{
		Q_UNUSED(field)

		{
			QMutexLocker locker(&m_buffersMutex);
			const int idx = m_buffers.indexOf(videoFrame.surfaceId);
			if (idx < 0)
				return CopyNotReady;
			m_buffers.removeAt(idx);
			while (m_buffers.size() > 5)
				CVPixelBufferRelease((CVPixelBufferRef)m_buffers.takeFirst());
		}

		CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)videoFrame.surfaceId;
		CGLContextObj glCtx = CGLGetCurrentContext();

		IOSurfaceRef surface = CVPixelBufferGetIOSurface(pixelBuffer);

		const OSType pixelFormat = IOSurfaceGetPixelFormat(surface);
		if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
		{
			CVPixelBufferRelease(pixelBuffer);
			return CopyError;
		}

		glBindTexture(GL_TEXTURE_RECTANGLE_ARB, m_glTextures[0]);
		if (CGLTexImageIOSurface2D(glCtx, GL_TEXTURE_RECTANGLE_ARB, GL_R8, videoFrame.size.getWidth(0), videoFrame.size.getHeight(0), GL_RED, GL_UNSIGNED_BYTE, surface, 0) != kCGLNoError)
		{
			CVPixelBufferRelease(pixelBuffer);
			return CopyError;
		}

		glBindTexture(GL_TEXTURE_RECTANGLE_ARB, m_glTextures[1]);
		if (CGLTexImageIOSurface2D(glCtx, GL_TEXTURE_RECTANGLE_ARB, GL_RG8, videoFrame.size.getWidth(1), videoFrame.size.getHeight(1), GL_RG, GL_UNSIGNED_BYTE, surface, 1) != kCGLNoError)
		{
			CVPixelBufferRelease(pixelBuffer);
			return CopyError;
		}

		CVPixelBufferRelease(m_pixelBufferToRelease);
		m_pixelBufferToRelease = pixelBuffer;

		return CopyOk;
	}
예제 #3
0
bool CRendererVTB::UploadTexture(int index)
{
  YUVBUFFER &buf = m_buffers[index];
  YUVPLANE (&planes)[YuvImage::MAX_PLANES] = m_buffers[index].fields[0];

  VTB::CVideoBufferVTB *vb = dynamic_cast<VTB::CVideoBufferVTB*>(buf.videoBuffer);
  if (!vb)
  {
    return false;
  }

  CVImageBufferRef cvBufferRef = vb->GetPB();

  glEnable(m_textureTarget);

  // It is the fastest way to render a CVPixelBuffer backed
  // with an IOSurface as there is no CPU -> GPU upload.
  CGLContextObj cgl_ctx  = (CGLContextObj)g_Windowing.GetCGLContextObj();
  IOSurfaceRef surface  = CVPixelBufferGetIOSurface(cvBufferRef);
  OSType format_type = IOSurfaceGetPixelFormat(surface);

  if (format_type != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
  {
    return false;
  }

  GLsizei surfplanes = IOSurfaceGetPlaneCount(surface);

  if (surfplanes != 2)
  {
    return false;
  }

  GLsizei widthY = IOSurfaceGetWidthOfPlane(surface, 0);
  GLsizei widthUV = IOSurfaceGetWidthOfPlane(surface, 1);
  GLsizei heightY = IOSurfaceGetHeightOfPlane(surface, 0);
  GLsizei heightUV = IOSurfaceGetHeightOfPlane(surface, 1);

  glBindTexture(m_textureTarget, planes[0].id);

  CGLTexImageIOSurface2D(cgl_ctx, m_textureTarget, GL_LUMINANCE,
                         widthY, heightY, GL_LUMINANCE, GL_UNSIGNED_BYTE, surface, 0);
  glTexParameteri(m_textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(m_textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(m_textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameteri(m_textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  glBindTexture(m_textureTarget, planes[1].id);

  CGLTexImageIOSurface2D(cgl_ctx, m_textureTarget, GL_LUMINANCE_ALPHA,
                         widthUV, heightUV, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, surface, 1);
  glTexParameteri(m_textureTarget, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(m_textureTarget, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri(m_textureTarget, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameteri(m_textureTarget, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  glBindTexture(m_textureTarget, 0);

  glDisable(m_textureTarget);

  CalculateTextureSourceRects(index, 3);

  return true;
}
예제 #4
0
파일: hwdec_osx.c 프로젝트: jmglogow/mpv
static int create(struct gl_hwdec *hw)
{
    if (!check_hwdec(hw))
        return -1;

    struct priv *p = talloc_zero(hw, struct priv);
    hw->priv = p;

    hw->gl->GenTextures(MP_MAX_PLANES, p->gl_planes);

    p->hwctx = (struct mp_hwdec_ctx){
        .type = HWDEC_VIDEOTOOLBOX,
        .download_image = mp_vt_download_image,
        .ctx = &p->hwctx,
    };
    hwdec_devices_add(hw->devs, &p->hwctx);

    return 0;
}

static int reinit(struct gl_hwdec *hw, struct mp_image_params *params)
{
    struct priv *p = hw->priv;

    assert(params->imgfmt == hw->driver->imgfmt);

    if (!params->hw_subfmt) {
        MP_ERR(hw, "Unsupported CVPixelBuffer format.\n");
        return -1;
    }

    if (!gl_get_imgfmt_desc(hw->gl, params->hw_subfmt, &p->desc)) {
        MP_ERR(hw, "Unsupported texture format.\n");
        return -1;
    }

    params->imgfmt = params->hw_subfmt;
    params->hw_subfmt = 0;
    return 0;
}

static int map_frame(struct gl_hwdec *hw, struct mp_image *hw_image,
                     struct gl_hwdec_frame *out_frame)
{
    struct priv *p = hw->priv;
    GL *gl = hw->gl;

    CVPixelBufferRelease(p->pbuf);
    p->pbuf = (CVPixelBufferRef)hw_image->planes[3];
    CVPixelBufferRetain(p->pbuf);
    IOSurfaceRef surface = CVPixelBufferGetIOSurface(p->pbuf);
    if (!surface) {
        MP_ERR(hw, "CVPixelBuffer has no IOSurface\n");
        return -1;
    }

    const bool planar = CVPixelBufferIsPlanar(p->pbuf);
    const int planes  = CVPixelBufferGetPlaneCount(p->pbuf);
    assert((planar && planes == p->desc.num_planes) || p->desc.num_planes == 1);

    GLenum gl_target = GL_TEXTURE_RECTANGLE;

    for (int i = 0; i < p->desc.num_planes; i++) {
        const struct gl_format *fmt = p->desc.planes[i];

        gl->BindTexture(gl_target, p->gl_planes[i]);

        CGLError err = CGLTexImageIOSurface2D(
            CGLGetCurrentContext(), gl_target,
            fmt->internal_format,
            IOSurfaceGetWidthOfPlane(surface, i),
            IOSurfaceGetHeightOfPlane(surface, i),
            fmt->format, fmt->type, surface, i);

        if (err != kCGLNoError)
            MP_ERR(hw, "error creating IOSurface texture for plane %d: %s (%x)\n",
                   i, CGLErrorString(err), gl->GetError());

        gl->BindTexture(gl_target, 0);

        out_frame->planes[i] = (struct gl_hwdec_plane){
            .gl_texture = p->gl_planes[i],
            .gl_target = gl_target,
            .tex_w = IOSurfaceGetWidthOfPlane(surface, i),
            .tex_h = IOSurfaceGetHeightOfPlane(surface, i),
        };
    }

    snprintf(out_frame->swizzle, sizeof(out_frame->swizzle), "%s",
             p->desc.swizzle);

    return 0;
}

static void destroy(struct gl_hwdec *hw)
{
    struct priv *p = hw->priv;
    GL *gl = hw->gl;

    CVPixelBufferRelease(p->pbuf);
    gl->DeleteTextures(MP_MAX_PLANES, p->gl_planes);

    hwdec_devices_remove(hw->devs, &p->hwctx);
}

const struct gl_hwdec_driver gl_hwdec_videotoolbox = {
    .name = "videotoolbox",
    .api = HWDEC_VIDEOTOOLBOX,
    .imgfmt = IMGFMT_VIDEOTOOLBOX,
    .create = create,
    .reinit = reinit,
    .map_frame = map_frame,
    .destroy = destroy,
};