int AmlogicGrabber::grabFrame_amvideocap(Image<ColorRgb> & image) { // If the device is not open, attempt to open it if (! openDev(_captureDev, CAPTURE_DEVICE)) { ErrorIf( _lastError != 1, _log,"Failed to open the AMLOGIC device (%d - %s):", errno, strerror(errno)); _lastError = 1; return -1; } long r1 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_WIDTH, _width); long r2 = ioctl(_captureDev, AMVIDEOCAP_IOW_SET_WANTFRAME_HEIGHT, _height); if (r1<0 || r2<0 || _height==0 || _width==0) { ErrorIf(_lastError != 2,_log,"Failed to configure capture size (%d - %s)", errno, strerror(errno)); closeDev(_captureDev); _lastError = 2; return -1; } // Read the snapshot into the memory image.resize(_width, _height); _image_bgr.resize(_width, _height); const ssize_t bytesToRead = _image_bgr.size(); void * image_ptr = _image_bgr.memptr(); const ssize_t bytesRead = pread(_captureDev, image_ptr, bytesToRead, 0); if (bytesRead < 0) { ErrorIf(_lastError != 3, _log,"Read of device failed: %d - %s", errno, strerror(errno)); closeDev(_captureDev); _lastError = 3; return -1; } else if (bytesToRead != bytesRead) { // Read of snapshot failed ErrorIf(_lastError != 4, _log,"Capture failed to grab entire image [bytesToRead(%d) != bytesRead(%d)]", bytesToRead, bytesRead); closeDev(_captureDev); return -1; } closeDev(_captureDev); _useImageResampler = true; _imageResampler.processImage((const uint8_t*)image_ptr, _width, _height, _width*3, PIXELFORMAT_BGR24, image); _lastError = 0; return 0; }
//_______________________________________________________________________ int main (int argc, char *argv[]) { DrvMemPtr mem = GetData(); uid_t uid = getuid (); if (argc != 2) usage (argv[0]); CheckMidiShare(); setuid (name_to_uid ("root")); mem->devId = openDev (argv[1]); SetupMidi (argv[1], mem); ThreadCreate (mem); run (mem, argv[1]); closeDriver (mem); setuid (uid); return 0; }
bool AmlogicGrabber::isVideoPlaying() { if(!QFile::exists(VIDEO_DEVICE)) return false; int videoDisabled = 1; if (!openDev(_videoDev, VIDEO_DEVICE)) { Error(_log, "Failed to open video device(%s): %d - %s", VIDEO_DEVICE, errno, strerror(errno)); return false; } else { // Check the video disabled flag if(ioctl(_videoDev, AMSTREAM_IOC_GET_VIDEO_DISABLE, &videoDisabled) < 0) { Error(_log, "Failed to retrieve video state from device: %d - %s", errno, strerror(errno)); closeDev(_videoDev); return false; } } return videoDisabled == 0; }
static Portal* openSwpb(Portal *p, Caps caps) { return openDev(p, caps); }
int SeialDevLin::setDeviceToWork(string strDevName){ m_strDev="/dev/"+strDevName; openDev(); tcflush(m_iFD, TCIOFLUSH); return m_iFD; }
bool SeialDevLin::setDevParamsByName(string strDevName,int iBaud,int iDataBits,int iStopBits, int iFlow){ setDeviceToWork(strDevName); if(openDev()==-1)return false; return setDevParams(iBaud, iDataBits, iStopBits, iFlow); }
static Portal* openUart(Portal* p, Caps caps) { return openDev(p, caps); }
static Portal* openTimer(Portal* p, Caps caps) { return openDev(p, caps); }
int AmlogicGrabber::grabFrame_ge2d(Image<ColorRgb> & image) { if ( ! openDev(_ge2dDev, GE2D_DEVICE) || ! openDev(_videoDev, VIDEO_DEVICE)) { Error(_log, "cannot open devices"); return -1; } // Ion if (_ge2dIonBuffer == nullptr) { _ge2dIonBuffer = new IonBuffer(_width * _height * 3); // BGR _ge2dVideoBufferPtr = _ge2dIonBuffer->Map(); memset(_ge2dVideoBufferPtr, 0, _ge2dIonBuffer->BufferSize()); } int canvas_index; if (ioctl(_videoDev, AMVIDEO_EXT_GET_CURRENT_VIDEOFRAME, &canvas_index) < 0) { Error(_log, "AMSTREAM_EXT_GET_CURRENT_VIDEOFRAME failed."); return -1; } uint32_t canvas0addr; if (ioctl(_videoDev, AMVIDEO_EXT_CURRENT_VIDEOFRAME_GET_CANVAS0ADDR, &canvas0addr) < 0) { Error(_log, "AMSTREAM_EXT_CURRENT_VIDEOFRAME_GET_CANVAS0ADDR failed."); return -1; } uint32_t ge2dformat; if (ioctl(_videoDev, AMVIDEO_EXT_CURRENT_VIDEOFRAME_GET_GE2D_FORMAT, &ge2dformat) <0) { Error(_log, "AMSTREAM_EXT_CURRENT_VIDEOFRAME_GET_GE2D_FORMAT failed."); return -1; } uint64_t size; if (ioctl(_videoDev, AMVIDEO_EXT_CURRENT_VIDEOFRAME_GET_SIZE, &size) < 0) { Error(_log, "AMSTREAM_EXT_CURRENT_VIDEOFRAME_GET_SIZE failed."); return -1; } unsigned cropLeft = _cropLeft; unsigned cropRight = _cropRight; unsigned cropTop = _cropTop; unsigned cropBottom = _cropBottom; int videoWidth = (size >> 32) - cropLeft - cropRight; int videoHeight = (size & 0xffffff) - cropTop - cropBottom; // calculate final image dimensions and adjust top/left cropping in 3D modes switch (_videoMode) { case VIDEO_3DSBS: videoWidth /= 2; cropLeft /= 2; break; case VIDEO_3DTAB: videoHeight /= 2; cropTop /= 2; break; case VIDEO_2D: default: break; } struct config_para_ex_s configex = { 0 }; configex.src_para.mem_type = CANVAS_TYPE_INVALID; configex.src_para.canvas_index = canvas0addr; configex.src_para.left = cropLeft; configex.src_para.top = cropTop; configex.src_para.width = videoWidth; configex.src_para.height = videoHeight / 2; configex.src_para.format = ge2dformat; configex.dst_para.mem_type = CANVAS_ALLOC; configex.dst_para.format = GE2D_FORMAT_S24_RGB; configex.dst_para.left = 0; configex.dst_para.top = 0; configex.dst_para.width = _width; configex.dst_para.height = _height; configex.dst_planes[0].addr = (long unsigned int)_ge2dIonBuffer->PhysicalAddress(); configex.dst_planes[0].w = configex.dst_para.width; configex.dst_planes[0].h = configex.dst_para.height; if (ioctl(_ge2dDev, GE2D_CONFIG_EX, &configex) < 0) { Error(_log, "video GE2D_CONFIG_EX failed."); return -1; } ge2d_para_s blitRect = { 0 }; blitRect.src1_rect.x = 0; blitRect.src1_rect.y = 0; blitRect.src1_rect.w = configex.src_para.width; blitRect.src1_rect.h = configex.src_para.height; blitRect.dst_rect.x = 0; blitRect.dst_rect.y = 0; blitRect.dst_rect.w = configex.dst_para.width ; blitRect.dst_rect.h = configex.dst_para.height; // Blit to videoBuffer if (ioctl(_ge2dDev, GE2D_STRETCHBLIT_NOALPHA, &blitRect) < 0) { Error(_log,"GE2D_STRETCHBLIT_NOALPHA failed."); return -1; } // Return video frame if (ioctl(_videoDev, AMVIDEO_EXT_PUT_CURRENT_VIDEOFRAME) < 0) { Error(_log, "AMSTREAM_EXT_PUT_CURRENT_VIDEOFRAME failed."); return -1; } _ge2dIonBuffer->Sync(); // Read the snapshot into the memory _useImageResampler = false; _imageResampler.processImage((const uint8_t*)_ge2dVideoBufferPtr, _width, _height, _width*3, PIXELFORMAT_BGR24, image); closeDev(_videoDev); closeDev(_ge2dDev); return 0; }