static int xv_show_frame( int x, int y, int width, int height ) { area_t video_area = xcommon_get_video_area(); area_t scale_area; scale_area.x = x; scale_area.y = y; scale_area.width = width; scale_area.height = height; xcommon_set_video_scale( scale_area ); xcommon_ping_screensaver(); if( use_shm ) { XvShmPutImage( display, xv_port, output_window, xcommon_get_gc(), image, x, y, width, height, video_area.x, video_area.y, video_area.width, video_area.height, False ); } else { XvPutImage( display, xv_port, output_window, xcommon_get_gc(), image, x, y, width, height, video_area.x, video_area.y, video_area.width, video_area.height ); } xcommon_frame_drawn(); XSync( display, False ); if( xvoutput_error ) return 0; return 1; }
/* * Function: XvMCPutSurface * * Description: * Arguments: * display: Connection to X server * surface: Surface to be displayed * draw: X Drawable on which to display the surface * srcx: X coordinate of the top left corner of the region to be * displayed within the surface. * srcy: Y coordinate of the top left corner of the region to be * displayed within the surface. * srcw: Width of the region to be displayed. * srch: Height of the region to be displayed. * destx: X cordinate of the top left corner of the destination region * in the drawable coordinates. * desty: Y cordinate of the top left corner of the destination region * in the drawable coordinates. * destw: Width of the destination region. * desth: Height of the destination region. * flags: One or more of the following. * XVMC_TOP_FIELD - Display only the Top field of the surface. * XVMC_BOTTOM_FIELD - Display only the Bottom Field of the surface. * XVMC_FRAME_PICTURE - Display both fields or frame. */ _X_EXPORT Status XvMCPutSurface(Display * display, XvMCSurface * surface, Drawable draw, short srcx, short srcy, unsigned short srcw, unsigned short srch, short destx, short desty, unsigned short destw, unsigned short desth, int flags) { Status ret = Success; XvMCContext *context; intel_xvmc_surface_ptr intel_surf; if (!display || !surface) return XvMCBadSurface; intel_surf = surface->privData; context = intel_surf->context; if (!context || !intel_surf) return XvMCBadSurface; if (intel_surf->gc_init == FALSE) { intel_surf->gc = XCreateGC(display, draw, 0, NULL); intel_surf->gc_init = TRUE; } else if (draw != intel_surf->last_draw) { XFreeGC(display, intel_surf->gc); intel_surf->gc = XCreateGC(display, draw, 0, NULL); } intel_surf->last_draw = draw; drm_intel_bo_flink(intel_surf->bo, &intel_surf->gem_handle); ret = XvPutImage(display, context->port, draw, intel_surf->gc, intel_surf->image, srcx, srcy, srcw, srch, destx, desty, destw, desth); return ret; }
void xv_update_image( xv_handle_t handle, __u8 *image_data, size_t image_data_size, int w, int h ) { int y; static int val = 0; /* memcpy( handle->image->data, image_data, image_data_size ); */ if( handle->use_shm ) { XvShmPutImage( handle->display,//GDK_DISPLAY(), handle->p_adaptor_info[0].base_id, handle->drawable,//GDK_WINDOW_XWINDOW( widget->window ), handle->gc,//GDK_GC_XGC( widget->style->fg_gc[GTK_WIDGET_STATE (widget)] ), handle->image, 0, 0, handle->image->width, handle->image->height, 0, 0, w, h, 1 ); } else { XvPutImage( handle->display,//GDK_DISPLAY(), handle->p_adaptor_info[0].base_id, handle->drawable,//GDK_WINDOW_XWINDOW( widget->window ), handle->gc,//GDK_GC_XGC( widget->style->fg_gc[GTK_WIDGET_STATE (widget)] ), handle->image, 0, 0, handle->image->width, handle->image->height, 0, 0, w, h ); } }
void display_frame() { if (!freeze && adaptor >= 0) { // copy into frame buffer for display for (int i = 0; i < numCameras; i++) { if (sync_display && i==0) { rgb2yuy2((unsigned char *) async_display_image->imageData, frame_buffer + (i * frame_length), (device_width * device_height)); } else { rgb2yuy2((unsigned char *) iplImages[i]->imageData, frame_buffer + (i * frame_length), (device_width * device_height)); } } xv_image = XvCreateImage(display, info[adaptor].base_id, format, (char*) frame_buffer, device_width, device_height * numCameras); XvPutImage(display, info[adaptor].base_id, window, gc, xv_image, 0, 0, device_width, device_height * numCameras, 0, 0, width, height); xv_image = NULL; } XFlush(display); }
void display_yuv_image(Display* display, XvPortID port, Drawable d, GC gc, XvImage* image, XShmSegmentInfo* shminfo, int src_x, int src_y, unsigned int src_w, unsigned int src_h, unsigned int dest_w, unsigned int dest_h, double aspect_ratio) { int dest_x = 0, dest_y = 0; /* Keep aspect ratio of src image. */ if (dest_w*src_h < src_w*aspect_ratio*dest_h) { dest_y = dest_h; dest_h = dest_w*src_h/(src_w*aspect_ratio); dest_y = (dest_y - dest_h)/2; } else { dest_x = dest_w; dest_w = dest_h*src_w*aspect_ratio/src_h; dest_x = (dest_x - dest_w)/2; } if (shminfo) { XvShmPutImage(display, port, d, gc, image, src_x, src_y, src_w, src_h, dest_x, dest_y, dest_w, dest_h, False); } else { XvPutImage(display, port, d, gc, image, src_x, src_y, src_w, src_h, dest_x, dest_y, dest_w, dest_h); } }
void X11Renderer::RenderFrameInt(VideoData *frame) { int ret; pMutexLock l(X11mutex); m_TStat.Update(); TIMING_DBG("X11Renderer(%s): %s", m_pTitle, m_TStat.GetStatStr()); if (m_UsingXv) { m_pXvImage->data = (char *)frame->Data(); ret = XvPutImage(m_pDisplay, m_Port, m_Window, m_Gc, m_pXvImage, 0, 0, m_VPar.width, m_VPar.height, 0, 0, m_VPar.width, m_VPar.height); } else { ColorConvert::ConvertI420toRGB(m_VPar.width, m_VPar.height, (uint8_t *)frame->Data(), m_RgbBuf); m_pXImage->data = (char *)m_RgbBuf; ret = XPutImage(m_pDisplay, m_Window, m_Gc, m_pXImage, 0, 0, 0, 0, m_VPar.width, m_VPar.height); } if (ret != Success) { ERROR("X(v)PutImage failed"); } XFlush(m_pDisplay); }
GF_Err X11_Blit(struct _video_out *vout, GF_VideoSurface *video_src, GF_Window *src, GF_Window *dest, u32 overlay_type) { XvImage *overlay; int xvport; Drawable dst_dr; GF_Err e; Window cur_wnd; XWindow *xwin = (XWindow *)vout->opaque; if (!video_src) { if (overlay_type && xwin->xvport) { } return GF_OK; } if (video_src->pixel_format != GF_PIXEL_YV12) return GF_NOT_SUPPORTED; cur_wnd = xwin->fullscreen ? xwin->full_wnd : xwin->wnd; /*init if needed*/ if ((xwin->xvport<0) || !xwin->overlay) { e = X11_InitOverlay(vout, video_src->width, video_src->height); if (e) return e; } /*different size, recreate an image*/ if ((xwin->overlay->width != video_src->width) || (xwin->overlay->height != video_src->height)) { if (xwin->overlay) XFree(xwin->overlay); xwin->overlay = XvCreateImage(xwin->display, xwin->xvport, xwin->xv_pf_format, NULL, video_src->width, video_src->height); if (!xwin->overlay) return GF_IO_ERR; } GF_LOG(GF_LOG_DEBUG, GF_LOG_MMIO, ("[X11] Blit surface to dest %d x %d - overlay type %s\n", dest->w, dest->h, (overlay_type==0)? "none" : ((overlay_type==1) ? "Top-Level" : "ColorKey") )); overlay = xwin->overlay; xvport = xwin->xvport; overlay->data = video_src->video_buffer; overlay->num_planes = 3; overlay->pitches[0] = video_src->width; overlay->pitches[1] = xwin->overlay->pitches[2] = video_src->width/2; overlay->offsets[0] = 0; overlay->offsets[1] = video_src->width*video_src->height; overlay->offsets[2] = 5*video_src->width*video_src->height/4; dst_dr = cur_wnd; if (!overlay_type) { if (!xwin->pixmap) { GF_LOG(GF_LOG_ERROR, GF_LOG_MMIO, ("[X11] Back buffer not configured for Blt\n")); return GF_BAD_PARAM; } dst_dr = xwin->pixmap; } XvPutImage(xwin->display, xvport, dst_dr, xwin->the_gc, overlay, src->x, src->y, src->w, src->h, dest->x, dest->y, dest->w, dest->h); return GF_OK; }
int BC_XvImage::write_drawable(Drawable &pixmap, GC &gc, int source_x, int source_y, int source_w, int source_h, int dest_x, int dest_y, int dest_w, int dest_h) { XvPutImage(top_level->display, bitmap->xv_portid, pixmap, gc, xv_image, source_x, source_y, source_w, source_h, dest_x, dest_y, dest_w, dest_h); return 0; }
void Yuv_window::paintEvent(QPaintEvent *) { GC gc = XCreateGC(QX11Info::display(), winId(), 0, 0); mutex.lock(); if (0 != yuv_data) { Xv_video_frame* video_frame = new Xv_video_frame(video_width, video_height, format, port, yuv_data); XvPutImage(QX11Info::display(), port, winId(), gc, video_frame->image, 0, 0, video_frame->image_width(), video_frame->image_height(), 0, 0, width(), height()); delete video_frame; } mutex.unlock(); XFreeGC(QX11Info::display(), gc); }
void XVRenderer::drawFrame() { DPTR_D(XVRenderer); QRect roi = realROI(); if (!d.use_shm) XvPutImage(d.display, d.xv_port, winId(), d.gc, d.xv_image , roi.x(), roi.y(), roi.width(), roi.height() , d.out_rect.x(), d.out_rect.y(), d.out_rect.width(), d.out_rect.height()); else XvShmPutImage(d.display, d.xv_port, winId(), d.gc, d.xv_image , roi.x(), roi.y(), roi.width(), roi.height() , d.out_rect.x(), d.out_rect.y(), d.out_rect.width(), d.out_rect.height() , false /*true: send event*/); }
void display_frames() { uint32_t i; if(!freeze && adaptor>=0){ for (i = 0; i < numCameras; i++) { if (!frames[i]) continue; switch (res) { case DC1394_VIDEO_MODE_640x480_YUV411: iyu12yuy2( frames[i]->image, (unsigned char *)(frame_buffer + (i * frame_length)), (device_width*device_height) ); break; case DC1394_VIDEO_MODE_320x240_YUV422: case DC1394_VIDEO_MODE_640x480_YUV422: memcpy( frame_buffer + (i * frame_length), frames[i]->image, device_width*device_height*2); break; case DC1394_VIDEO_MODE_640x480_RGB8: rgb2yuy2( frames[i]->image, (unsigned char *) (frame_buffer + (i * frame_length)), (device_width*device_height) ); break; } } xv_image=XvCreateImage(display,info[adaptor].base_id,format,frame_buffer, device_width,device_height* numCameras); //xv_image=XvCreateImage(display,info[adaptor].base_id,format,frame_buffer, // device_width, device_height* numCameras); XvPutImage(display,info[adaptor].base_id,window,gc,xv_image, 0,0,device_width ,device_height * numCameras, 0,0,width,height); /* XvPutImage(display,info[adaptor].base_id,window,gc,xv_image, 0,0,device_width * numCameras, device_height , 0,0,width, height);*/ /*XvPutImage(display,info[adaptor].base_id,window,gc,xv_image, 0,0,device_width , device_height * numCameras, 0,0,width, height);*/ xv_image=NULL; } }
//------------------------------------ uint8_t GUI_XvDisplay(uint8_t * src, uint32_t w, uint32_t h,renderZoom zoom) { uint32_t destW,destH; if (xvimage) { // put image in shared segment // for YV12, 4 bits for Y 4 bits for u, 4 bits for v // total 1.5* XLockDisplay (xv_display); memcpy(xvimage->data, src, (w*h*3)>>1); uint32_t factor=4; switch(zoom) { case ZOOM_1_4: factor=1;break; case ZOOM_1_2: factor=2;break; case ZOOM_1_1: factor=4;break; case ZOOM_2: factor=8;break; case ZOOM_4: factor=16;break; default : ADM_assert(0); } destW=(w*factor)/4; destH=(h*factor)/4; //printf("%u x %u => %u x %u\n",w,h,destW,destH); // And display it ! #if 1 XvShmPutImage(xv_display, xv_port, xv_win, xv_gc, xvimage, 0, 0, w, h, // src 0, 0, destW, destH, // dst False); #else XvPutImage(xv_display, xv_port, xv_win, xv_gc, xvimage, 0, 0, w, h, // src 0, 0, w, h // dst ); #endif //XSetForeground (xv_display, xv_gc, 0); XSync(xv_display, False); XUnlockDisplay (xv_display); //GUI_XvExpose(); } return 1; }
static inline void put_xvimage(struct vo *vo, XvImage *xvi) { struct xvctx *ctx = vo->priv; struct vo_x11_state *x11 = vo->x11; struct mp_rect *src = &ctx->src_rect; struct mp_rect *dst = &ctx->dst_rect; int dw = dst->x1 - dst->x0, dh = dst->y1 - dst->y0; int sw = src->x1 - src->x0, sh = src->y1 - src->y0; #ifdef HAVE_SHM if (ctx->Shmem_Flag) { XvShmPutImage(x11->display, x11->xv_port, x11->window, x11->vo_gc, xvi, src->x0, src->y0, sw, sh, dst->x0, dst->y0, dw, dh, False); } else #endif { XvPutImage(x11->display, x11->xv_port, x11->window, x11->vo_gc, xvi, src->x0, src->y0, sw, sh, dst->x0, dst->y0, dw, dh); } }
void display_yuv_image(Display* display, XvPortID port, Drawable d, GC gc, XvImage* image, XShmSegmentInfo* shminfo, int src_x, int src_y, unsigned int src_w, unsigned int src_h, struct xywh_s *dest, double aspect_ratio) { int dest_x = 0, dest_y = 0; int dest_w = dest->w; int dest_h = dest->h; /* Maybe keep aspect ratio of src image. */ if (aspect_ratio == 0.0) { /* * Don't bother correcting any aspect ratio, just scale * to size as given. */ } else if (dest_w * src_h < src_w * aspect_ratio * dest_h) { dest_y = dest_h; dest_h = dest_w * src_h / (src_w * aspect_ratio); dest_y = (dest_y - dest_h) / 2; } else { dest_x = dest_w; dest_w = dest_h * src_w * aspect_ratio / src_h; dest_x = (dest_x - dest_w) / 2; } /* Record (for the lightpen code) where the scaled screen ended up */ dest->x = dest_x; dest->y = dest_y; dest->w = dest_w; dest->h = dest_h; if (shminfo) { XvShmPutImage(display, port, d, gc, image, src_x, src_y, src_w, src_h, dest_x, dest_y, dest_w, dest_h, False); } else { XvPutImage(display, port, d, gc, image, src_x, src_y, src_w, src_h, dest_x, dest_y, dest_w, dest_h); } }
static int display_frame(X11XVDisplaySink* sink, X11DisplayFrame* frame, const FrameInfo* frameInfo) { struct timeval timeNow; long requiredUsec; long durationSlept; unsigned int windowWidth; unsigned int windowHeight; float scaleFactorX; float scaleFactorY; float scaleFactor; YUV_frame inputFrame; YUV_frame outputFrame; unsigned char* activeBuffer; int frameDurationMsec; int frameSlippage; frameDurationMsec = (int)(1000 * frameInfo->frameRate.den / (double)(frameInfo->frameRate.num)); frameSlippage = frameDurationMsec * 1000 / 2; /* half a frame */ if (frame->videoIsPresent) { /* convert if required */ if (sink->inputVideoFormat == UYVY_10BIT_FORMAT) { if (sink->swScale != 1) { /* scale required afterwards */ activeBuffer = frame->scaleInputBuffer; } else { /* no scale afterwards */ activeBuffer = (unsigned char*)frame->yuv_image->data; } ConvertFrameV210to8(activeBuffer, frame->inputBuffer, sink->inputWidth * 2, (sink->inputWidth + 47) / 48 * 128, sink->inputWidth, sink->inputHeight); } else if (sink->inputVideoFormat == YUV444_FORMAT) { if (sink->swScale != 1) { /* scale required afterwards */ activeBuffer = frame->scaleInputBuffer; } else { /* no scale afterwards */ activeBuffer = (unsigned char*)frame->yuv_image->data; } yuv444_to_uyvy(sink->inputWidth, sink->inputHeight, frame->inputBuffer, activeBuffer); } else if (sink->inputVideoFormat == YUV422_FORMAT) { if (sink->swScale != 1) { /* scale required afterwards */ activeBuffer = frame->scaleInputBuffer; } else { /* no scale afterwards */ activeBuffer = (unsigned char*)frame->yuv_image->data; } yuv422_to_uyvy_2(sink->inputWidth, sink->inputHeight, 0, frame->inputBuffer, activeBuffer); } else if (sink->inputVideoFormat == YUV422_10BIT_FORMAT) { if (sink->swScale != 1) { /* scale required afterwards */ activeBuffer = frame->scaleInputBuffer; } else { /* no scale afterwards */ activeBuffer = (unsigned char*)frame->yuv_image->data; } ConvertFrameYUV10to8_2(frame->ditherOutputBuffer, (const uint16_t*)frame->inputBuffer, sink->inputWidth, sink->inputHeight, 2, 1); yuv422_to_uyvy_2(sink->inputWidth, sink->inputHeight, 0, frame->ditherOutputBuffer, activeBuffer); } else if (sink->inputVideoFormat == YUV420_10BIT_FORMAT) { if (sink->swScale != 1) { /* scale required afterwards */ activeBuffer = frame->scaleInputBuffer; } else { /* no scale afterwards */ activeBuffer = (unsigned char*)frame->yuv_image->data; } ConvertFrameYUV10to8_2(activeBuffer, (const uint16_t*)frame->inputBuffer, sink->inputWidth, sink->inputHeight, 2, 2); } else { /* no conversion - scale input frame->input buffer != frame->yuv_image->data */ activeBuffer = frame->inputBuffer; } /* scale image and output to frame->yuv_image */ if (sink->swScale != 1) { YUV_frame_from_buffer(&inputFrame, (void*)activeBuffer, sink->inputWidth, sink->inputHeight, sink->outputYUVFormat); YUV_frame_from_buffer(&outputFrame, (void*)(unsigned char*)frame->yuv_image->data, sink->width, sink->height, sink->outputYUVFormat); small_pic(&inputFrame, &outputFrame, 0, 0, sink->swScale, sink->swScale, 1, sink->applyScaleFilter, sink->applyScaleFilter, frame->scaleWorkspace); } /* add OSD to frame */ if (sink->osd != NULL && sink->osdInitialised) { if (!osd_add_to_image(sink->osd, frameInfo, (unsigned char*)frame->yuv_image->data, frame->yuv_image->width, frame->yuv_image->height)) { ml_log_error("Failed to add OSD to frame\n"); /* continue anyway */ } } /* wait until it is time to display this frame */ gettimeofday(&timeNow, NULL); durationSlept = 0; if (frameInfo->rateControl) { durationSlept = sleep_diff(frameDurationMsec * 1000, &timeNow, &sink->lastFrameTime); } /* adjust the display width/height if the window has been resized */ windowWidth = sink->x11Common.windowWidth; windowHeight = sink->x11Common.windowHeight; scaleFactorX = windowWidth / (float)(sink->initialDisplayWidth); scaleFactorY = windowHeight / (float)(sink->initialDisplayHeight); scaleFactor = (scaleFactorX < scaleFactorY) ? scaleFactorX : scaleFactorY; sink->x11Common.displayWidth = sink->initialDisplayWidth * scaleFactor; sink->x11Common.displayHeight = sink->initialDisplayHeight * scaleFactor; XLockDisplay(sink->x11Common.windowInfo.display); if (sink->useSharedMemory) { XvShmPutImage(sink->x11Common.windowInfo.display, sink->xvport, sink->x11Common.windowInfo.window, sink->x11Common.windowInfo.gc, frame->yuv_image, 0, 0, frame->yuv_image->width, frame->yuv_image->height, 0, 0, sink->x11Common.displayWidth, sink->x11Common.displayHeight, False); } else { XvPutImage(sink->x11Common.windowInfo.display, sink->xvport, sink->x11Common.windowInfo.window, sink->x11Common.windowInfo.gc, frame->yuv_image, 0, 0, frame->yuv_image->width, frame->yuv_image->height, 0, 0, sink->x11Common.displayWidth, sink->x11Common.displayHeight); } XSync(sink->x11Common.windowInfo.display, False); XUnlockDisplay(sink->x11Common.windowInfo.display); x11c_process_events(&sink->x11Common); /* report that a new frame has been displayed */ msl_frame_displayed(sink->listener, frameInfo); /* set the time that this frame was displayed */ if (frameInfo->rateControl) { if (durationSlept < - frameSlippage) { /* reset rate control when slipped by more than frameSlippage */ sink->lastFrameTime = timeNow; } else { /* set what the frame's display time should have been */ requiredUsec = sink->lastFrameTime.tv_sec * 1000000 + sink->lastFrameTime.tv_usec + frameDurationMsec * 1000; sink->lastFrameTime.tv_usec = requiredUsec % 1000000; sink->lastFrameTime.tv_sec = requiredUsec / 1000000; } } else { gettimeofday(&sink->lastFrameTime, NULL); } } else { gettimeofday(&sink->lastFrameTime, NULL); } reset_streams(frame); return 1; }
void ffmpegWidget::paintEvent(QPaintEvent *) { // check we have a full buffer if (this->fullbuf == NULL || this->fullbuf->width <= 0 || this->fullbuf->height <= 0) { if (this->xv_format >= 0) { // xvideo supported XClearArea(this->dpy, this->w, 0, 0, this->widgetW, this->widgetH, 0); } return; } // If scale factors have changed if (this->widgetW != width() || this->widgetH != height()) { updateScalefactor(); } FFBuffer * cachedFull = this->fullbuf; cachedFull->reserve(); if (cachedFull->pix_fmt == PIX_FMT_YUVJ420P) { // xvideo supported this->xv_image->data = (char *) cachedFull->pFrame->data[0]; /* Draw the image */ XvPutImage(this->dpy, this->xv_port, this->w, this->gc, this->xv_image, _x, _y, _visW, _visH, 0, 0, _scVisW, _scVisH); } else { // QImage fallback QPainter painter(this); QImage image(cachedFull->pFrame->data[0], cachedFull->width, cachedFull->height, QImage::Format_RGB888); painter.drawImage(QPoint(0, 0), image.copy(QRect(_x, _y, _visW, _visH)).scaled(_scVisW, _scVisH)); /* Draw the grid */ if (_grid) { QPainter painter(this); // note the 0.5 gives us the middle of the pixel double scGx = (_gx-_x+0.5)*this->sfx; double scGy = (_gy-_y+0.5)*this->sfy; double scGsx = _gs*this->sfx; double scGsy = _gs*this->sfy; if (scGsx > 0.1 && scGsy > 0.1) { // Draw minor lines QColor gscol = QColor(_gcol); gscol.setAlpha(35); painter.setPen(gscol); // X lines to the left of crosshair for (double scx = scGx - scGsx; scx > 0; scx -= scGsx) { painter.drawLine((int)(scx+0.5), 0, (int)(scx+0.5), _scVisH); } // X lines to the right of crosshair for (double scx = scGx + scGsx; scx < _scVisW; scx += scGsx) { painter.drawLine((int)(scx+0.5), 0, (int)(scx+0.5), _scVisH); } // Y lines above the crosshair for (double scy = scGy - scGsy; scy > 0; scy -= scGsy) { painter.drawLine(0, (int)(scy+0.5), _scVisW, (int)(scy+0.5)); } // Y lines below the crosshair for (double scy = scGy + scGsy; scy < _scVisH; scy += scGsy) { painter.drawLine(0, (int)(scy+0.5), _scVisW, (int)(scy+0.5)); } } // Draw crosshairs painter.setPen(_gcol); painter.drawLine((int)(scGx+0.5), 0, (int)(scGx+0.5), _scVisH); painter.drawLine(0, (int)(scGy+0.5), _scVisW, (int)(scGy+0.5)); } } cachedFull->release(); }
void XVWindow::PutFrame (uint8_t* frame, uint16_t width, uint16_t height) { if (!_XVImage[_curBuffer]) return; if (width != _XVImage[_curBuffer]->width || height != _XVImage[_curBuffer]->height) { PTRACE (1, "XVideo\tDynamic switching of resolution not supported\n"); return; } XLockDisplay (_display); if (_XVImage[_curBuffer]->pitches [0] ==_XVImage[_curBuffer]->width && _XVImage[_curBuffer]->pitches [2] == (int) (_XVImage[_curBuffer]->width / 2) && _XVImage[_curBuffer]->pitches [1] == (int) (_XVImage[_curBuffer]->width / 2)) { memcpy (_XVImage[_curBuffer]->data, frame, (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height)); memcpy (_XVImage[_curBuffer]->data + (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height), frame + _XVImage[_curBuffer]->offsets [2], (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height / 4)); memcpy (_XVImage[_curBuffer]->data + (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height * 5 / 4), frame + _XVImage[_curBuffer]->offsets [1], (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height / 4)); } else { unsigned int i = 0; int width2 = (int) (_XVImage[_curBuffer]->width / 2); uint8_t* dstY = (uint8_t*) _XVImage[_curBuffer]->data; uint8_t* dstV = (uint8_t*) _XVImage[_curBuffer]->data + (_XVImage[_curBuffer]->pitches [0] * _XVImage[_curBuffer]->height); uint8_t* dstU = (uint8_t*) _XVImage[_curBuffer]->data + (_XVImage[_curBuffer]->pitches [0] * _XVImage[_curBuffer]->height) + (_XVImage[_curBuffer]->pitches [1] * (_XVImage[_curBuffer]->height/2)); uint8_t* srcY = frame; uint8_t* srcV = frame + (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height * 5 / 4); uint8_t* srcU = frame + (int) (_XVImage[_curBuffer]->width * _XVImage[_curBuffer]->height); for (i = 0 ; i < (unsigned int)_XVImage[_curBuffer]->height ; i+=2) { memcpy (dstY, srcY, _XVImage[_curBuffer]->width); dstY +=_XVImage[_curBuffer]->pitches [0]; srcY +=_XVImage[_curBuffer]->width; memcpy (dstY, srcY, _XVImage[_curBuffer]->width); dstY +=_XVImage[_curBuffer]->pitches [0]; srcY +=_XVImage[_curBuffer]->width; memcpy (dstV, srcV, width2); dstV +=_XVImage[_curBuffer]->pitches [1]; srcV += width2; memcpy(dstU, srcU, width2); dstU+=_XVImage[_curBuffer]->pitches [2]; srcU += width2; } } #ifdef HAVE_SHM if (_useShm) { XvShmPutImage (_display, _XVPort, _XWindow, _gc, _XVImage[_curBuffer], 0, 0, _XVImage[_curBuffer]->width, _XVImage[_curBuffer]->height, _state.curX, _state.curY, _state.curWidth, _state.curHeight, false); } else #endif { XvPutImage (_display, _XVPort, _XWindow, _gc, _XVImage[_curBuffer], 0, 0, _XVImage[_curBuffer]->width, _XVImage[_curBuffer]->height, _state.curX, _state.curY, _state.curWidth, _state.curHeight); } _curBuffer = (_curBuffer + 1) % NUM_BUFFERS; XUnlockDisplay (_display); }
static gboolean on_timeout (PlayerAV *self) { if (self->priv->vt_id < 0) { self->priv->frame_ready = FALSE; return FALSE; } gint res; int64_t pts; int64_t ctime; gint sw, sh, sx, sy; double ratio = 1.0 * self->priv->vctx->width / self->priv->vctx->height; if (self->priv->frame_ready) { sws_scale (self->priv->sws_ctx, self->priv->vframe->data, self->priv->vframe->linesize, 0, self->priv->vctx->height, self->priv->vframe_xv->data, self->priv->vframe_xv->linesize); if (self->priv->win_height * ratio > self->priv->win_width) { sw = self->priv->win_width; sh = self->priv->win_width / ratio; sx = 0; sy = (self->priv->win_height - sh) / 2; } else { sw = self->priv->win_height * ratio; sh = self->priv->win_height; sx = (self->priv->win_width - sw) / 2; sy = 0; } XvPutImage (self->priv->display, self->priv->xv_port_id, self->priv->win, self->priv->xv_gc, self->priv->xvimage, 0, 0, self->priv->vctx->width, self->priv->vctx->height, sx, sy, sw, sh); self->priv->frame_ready = FALSE; } if (self->priv->state != PLAYER_STATE_PLAYING) { self->priv->frame_ready = FALSE; self->priv->vt_id = -1; return FALSE; } res = player_av_get_video_frame (self, self->priv->vframe, &pts); self->priv->frame_ready = TRUE; // double delta = av_q2d (self->priv->fctx->streams[self->priv->vstream]->time_base); double delta = av_q2d (self->priv->vctx->time_base); // if (delta < 0.005) { // delta = av_q2d (self->priv->vctx->time_base); // } if (self->priv->vctx->time_base.num > 1) { delta *= 2; } double mult = 1; if (abs (pts - self->priv->vpos) < 5) { mult = delta * 1000; } self->priv->vpos = pts; ctime = (av_gettime () - self->priv->start_time) / 1000; double delay = pts * mult - ctime; if (delay > 5000 * delta) { self->priv->vt_id = g_timeout_add_full ( G_PRIORITY_HIGH, delta * 5000, (GSourceFunc) on_timeout, self, NULL); } else if (delay > 0) { self->priv->vt_id = g_timeout_add_full ( G_PRIORITY_HIGH, delay, (GSourceFunc) on_timeout, self, NULL); } else { self->priv->vt_id = g_timeout_add_full ( G_PRIORITY_DEFAULT, 1, (GSourceFunc) on_timeout, self, NULL); } // g_print ("VP(%d) PTS(%d) DIFF(%f) TIME(%d) TB(%d,%d) M(%f) D(%f) STB(%d,%d)\n", self->priv->vpos, pts, delta, ctime, // self->priv->vctx->time_base.num, self->priv->vctx->time_base.den, mult, delay, // self->priv->fctx->streams[self->priv->vstream]->time_base.num, // self->priv->fctx->streams[self->priv->vstream]->time_base.den); return FALSE; }
static int X11_xv_blit(struct DriverInstance* sh, const uint8_t* fb, int width, int height, struct blit_params* params, int needs_adjust, char* error_text, int text_len) { if (sh->xv_image == 0 || width != sh->image_width || height != sh->image_height) { if (sh->xv_image) XFree(sh->xv_image); sh->xv_image = XvCreateImage(sh->display, sh->xv_port, sh->xv_format_id, 0, width, height); sh->image_width = width; sh->image_height = height; } if (sh->data == 0 || sh->data_size < sh->xv_image->data_size) { if (sh->data) free(sh->data); sh->data = malloc(sh->xv_image->data_size); sh->data_size = sh->xv_image->data_size; if (sh->data == 0) { snprintf(error_text, text_len, "Could not allocate data for XVImage"); return 0; } } assert(sh->data); //TODO: this is a hack for big-endian machines if (big_endian()) convert_endianness(sh->data, sh->width, sh->height); sh->xv_image->data = sh->data; if (sh->vis.depth == 24) cvt_rgb32_to_i420((uint8_t*) sh->data, (uint32_t*) fb, width, height, sh->xv_image->pitches, sh->xv_image->offsets); else if (sh->vis.depth == 16) cvt_rgb16_to_i420((uint8_t*) sh->data, (uint16_t*) fb, width, height, sh->xv_image->pitches, sh->xv_image->offsets); // blit image XvPutImage(sh->display, sh->xv_port, sh->win, sh->gc, sh->xv_image, 0, 0, width, height, 0, 0, sh->width, sh->height); XFlush(sh->display); //XSync(sh->display, False); return 1; }
bool QX11VideoSurface::present(const QVideoFrame &frame) { if (!m_image) { setError(StoppedError); return false; } else if (m_image->width != frame.width() || m_image->height != frame.height()) { setError(IncorrectFormatError); return false; } else { QVideoFrame frameCopy(frame); if (!frameCopy.map(QAbstractVideoBuffer::ReadOnly)) { setError(IncorrectFormatError); return false; } else { bool presented = false; if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle && m_image->data_size > frame.mappedBytes()) { qWarning("Insufficient frame buffer size"); setError(IncorrectFormatError); } else if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle && m_image->num_planes > 0 && m_image->pitches[0] != frame.bytesPerLine()) { qWarning("Incompatible frame pitches"); setError(IncorrectFormatError); } else { if (frame.handleType() != QAbstractVideoBuffer::XvShmImageHandle) { m_image->data = reinterpret_cast<char *>(frameCopy.bits()); //qDebug() << "copy frame"; XvPutImage( QX11Info::display(), m_portId, m_winId, m_gc, m_image, m_viewport.x(), m_viewport.y(), m_viewport.width(), m_viewport.height(), m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height()); m_image->data = 0; } else { XvImage *img = frame.handle().value<XvImage*>(); //qDebug() << "render directly"; if (img) XvShmPutImage( QX11Info::display(), m_portId, m_winId, m_gc, img, m_viewport.x(), m_viewport.y(), m_viewport.width(), m_viewport.height(), m_displayRect.x(), m_displayRect.y(), m_displayRect.width(), m_displayRect.height(), false); } presented = true; } frameCopy.unmap(); return presented; } } }