int main(int argc, char** argv) { DecodeStreamInput *input; int32_t fd = -1; int32_t i = 0; int32_t ioctlRet = -1; char opt; YamiMediaCodec::CalcFps calcFps; renderMode = 3; // set default render mode to 3 yamiTraceInit(); XInitThreads(); if (!process_cmdline(argc, argv)) return -1; if (!inputFileName) { ERROR("no input media file specified\n"); return -1; } INFO("input file: %s, renderMode: %d", inputFileName, renderMode); if (!dumpOutputDir) dumpOutputDir = strdup ("./"); #if !__ENABLE_V4L2_GLX__ switch (renderMode) { case 0: memoryType = VIDEO_DATA_MEMORY_TYPE_RAW_COPY; break; case 3: memoryType = VIDEO_DATA_MEMORY_TYPE_DRM_NAME; break; case 4: memoryType = VIDEO_DATA_MEMORY_TYPE_DMA_BUF; break; default: ASSERT(0 && "unsupported render mode, -m [0,3,4] are supported"); break; } #endif input = DecodeStreamInput::create(inputFileName); if (input==NULL) { ERROR("fail to init input stream\n"); return -1; } renderFrameCount = 0; calcFps.setAnchor(); // open device fd = YamiV4L2_Open("decoder", 0); ASSERT(fd!=-1); x11Display = XOpenDisplay(NULL); ASSERT(x11Display); #if __ENABLE_V4L2_GLX__ ioctlRet = YamiV4L2_SetXDisplay(fd, x11Display); #endif // set output frame memory type YamiV4L2_FrameMemoryType(fd, memoryType); // query hw capability struct v4l2_capability caps; memset(&caps, 0, sizeof(caps)); caps.capabilities = V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_QUERYCAP, &caps); ASSERT(ioctlRet != -1); // set input/output data format uint32_t codecFormat = 0; const char* mimeType = input->getMimeType(); if (!strcmp(mimeType, "video/h264")) codecFormat = V4L2_PIX_FMT_H264; else if (!strcmp(mimeType, "video/x-vnd.on2.vp8")) codecFormat = V4L2_PIX_FMT_VP8; else if (!strcmp(mimeType, "image/jpeg")) codecFormat = V4L2_PIX_FMT_MJPEG; else { ERROR("unsupported mimetype"); return -1; } struct v4l2_format format; memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; format.fmt.pix_mp.pixelformat = codecFormat; format.fmt.pix_mp.num_planes = 1; format.fmt.pix_mp.plane_fmt[0].sizeimage = k_maxInputBufferSize; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_S_FMT, &format); ASSERT(ioctlRet != -1); // set preferred output format memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_NV12M; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_S_FMT, &format); ASSERT(ioctlRet != -1); // start input port __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_STREAMON, &type); ASSERT(ioctlRet != -1); // start output port type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_STREAMON, &type); ASSERT(ioctlRet != -1); // setup input buffers struct v4l2_requestbuffers reqbufs; memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 2; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); ASSERT(reqbufs.count>0); inputQueueCapacity = reqbufs.count; inputFrames.resize(inputQueueCapacity); for (i=0; i<inputQueueCapacity; i++) { struct v4l2_plane planes[k_inputPlaneCount]; struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); memset(planes, 0, sizeof(planes)); buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; buffer.memory = V4L2_MEMORY_MMAP; buffer.m.planes = planes; buffer.length = k_inputPlaneCount; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_QUERYBUF, &buffer); ASSERT(ioctlRet != -1); // length and mem_offset should be filled by VIDIOC_QUERYBUF above void* address = YamiV4L2_Mmap(NULL, buffer.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.planes[0].m.mem_offset); ASSERT(address); inputFrames[i] = static_cast<uint8_t*>(address); DEBUG("inputFrames[%d] = %p", i, inputFrames[i]); } // feed input frames first for (i=0; i<inputQueueCapacity; i++) { if (!feedOneInputFrame(input, fd, i)) { break; } } // query video resolution memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; while (1) { if (YamiV4L2_Ioctl(fd, VIDIOC_G_FMT, &format) != 0) { if (errno != EINVAL) { // EINVAL means we haven't seen sufficient stream to decode the format. INFO("ioctl() failed: VIDIOC_G_FMT, haven't get video resolution during start yet, waiting"); } } else { break; } usleep(50); } outputPlaneCount = format.fmt.pix_mp.num_planes; ASSERT(outputPlaneCount == 2); videoWidth = format.fmt.pix_mp.width; videoHeight = format.fmt.pix_mp.height; ASSERT(videoWidth && videoHeight); // setup output buffers memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = outputPlaneCount; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); ASSERT(reqbufs.count>0); outputQueueCapacity = reqbufs.count; x11Window = XCreateSimpleWindow(x11Display, DefaultRootWindow(x11Display) , 0, 0, videoWidth, videoHeight, 0, 0 , WhitePixel(x11Display, 0)); XMapWindow(x11Display, x11Window); #if __ENABLE_V4L2_GLX__ pixmaps.resize(outputQueueCapacity); glxPixmaps.resize(outputQueueCapacity); textureIds.resize(outputQueueCapacity); if (!glxContext) { glxContext = glxInit(x11Display, x11Window); } ASSERT(glxContext); glGenTextures(outputQueueCapacity, &textureIds[0] ); for (i=0; i<outputQueueCapacity; i++) { int ret = createPixmapForTexture(glxContext, textureIds[i], videoWidth, videoHeight, &pixmaps[i], &glxPixmaps[i]); DEBUG("textureIds[%d]: 0x%x, pixmaps[%d]=0x%lx, glxPixmaps[%d]: 0x%lx", i, textureIds[i], i, pixmaps[i], i, glxPixmaps[i]); ASSERT(ret == 0); ret = YamiV4L2_UsePixmap(fd, i, pixmaps[i]); ASSERT(ret == 0); } #else if (memoryType == VIDEO_DATA_MEMORY_TYPE_RAW_COPY) { rawOutputFrames.resize(outputQueueCapacity); for (i=0; i<outputQueueCapacity; i++) { struct v4l2_plane planes[k_maxOutputPlaneCount]; struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); memset(planes, 0, sizeof(planes)); buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buffer.memory = V4L2_MEMORY_MMAP; buffer.m.planes = planes; buffer.length = outputPlaneCount; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_QUERYBUF, &buffer); ASSERT(ioctlRet != -1); rawOutputFrames[i].width = format.fmt.pix_mp.width; rawOutputFrames[i].height = format.fmt.pix_mp.height; rawOutputFrames[i].fourcc = format.fmt.pix_mp.pixelformat; for (int j=0; j<outputPlaneCount; j++) { // length and mem_offset are filled by VIDIOC_QUERYBUF above void* address = YamiV4L2_Mmap(NULL, buffer.m.planes[j].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.planes[j].m.mem_offset); ASSERT(address); if (j == 0) { rawOutputFrames[i].data = static_cast<uint8_t*>(address); rawOutputFrames[i].offset[0] = 0; } else { rawOutputFrames[i].offset[j] = static_cast<uint8_t*>(address) - rawOutputFrames[i].data; } rawOutputFrames[i].pitch[j] = format.fmt.pix_mp.plane_fmt[j].bytesperline; } } } else if (memoryType == VIDEO_DATA_MEMORY_TYPE_DMA_BUF || memoryType == VIDEO_DATA_MEMORY_TYPE_DRM_NAME) { // setup all textures and eglImages eglImages.resize(outputQueueCapacity); textureIds.resize(outputQueueCapacity); if (!eglContext) eglContext = eglInit(x11Display, x11Window, 0 /*VA_FOURCC_RGBA*/); glGenTextures(outputQueueCapacity, &textureIds[0] ); for (i=0; i<outputQueueCapacity; i++) { int ret = 0; ret = YamiV4L2_UseEglImage(fd, eglContext->eglContext.display, eglContext->eglContext.context, i, &eglImages[i]); ASSERT(ret == 0); glBindTexture(GL_TEXTURE_2D, textureIds[i]); glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, eglImages[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); DEBUG("textureIds[%d]: 0x%x, eglImages[%d]: 0x%x", i, textureIds[i], i, eglImages[i]); } } #endif // feed output frames first for (i=0; i<outputQueueCapacity; i++) { if (!takeOneOutputFrame(fd, i)) { ASSERT(0); } } bool event_pending=true; // try to get video resolution. int dqCountAfterEOS = 0; do { if (event_pending) { handleResolutionChange(fd); } takeOneOutputFrame(fd); if (!feedOneInputFrame(input, fd)) { if (stagingBufferInDevice == 0) break; dqCountAfterEOS++; } if (dqCountAfterEOS == inputQueueCapacity) // input drain break; } while (YamiV4L2_Poll(fd, true, &event_pending) == 0); // drain output buffer int retry = 3; while (takeOneOutputFrame(fd) || (--retry)>0) { // output drain usleep(10000); } calcFps.fps(renderFrameCount); // YamiV4L2_Munmap(void* addr, size_t length) possibleWait(input->getMimeType()); // release queued input/output buffer memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 0; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 0; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); // stop input port type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_STREAMOFF, &type); ASSERT(ioctlRet != -1); // stop output port type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; ioctlRet = YamiV4L2_Ioctl(fd, VIDIOC_STREAMOFF, &type); ASSERT(ioctlRet != -1); if(textureIds.size()) glDeleteTextures(textureIds.size(), &textureIds[0]); ASSERT(glGetError() == GL_NO_ERROR); #if __ENABLE_V4L2_GLX__ glxRelease(glxContext, &pixmaps[0], &glxPixmaps[0], pixmaps.size()); #else for (i=0; i<eglImages.size(); i++) { eglDestroyImageKHR(eglContext->eglContext.display, eglImages[i]); } /* there is still randomly fail in mesa; no good idea for it. seems mesa bug 0 0x00007ffff079c343 in _mesa_symbol_table_dtor () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 1 0x00007ffff073c55d in glsl_symbol_table::~glsl_symbol_table() () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 2 0x00007ffff072a4d5 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 3 0x00007ffff072a4bd in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 4 0x00007ffff064b48f in _mesa_reference_shader () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 5 0x00007ffff0649397 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 6 0x000000000040624d in releaseShader (program=0x77cd90) at ./egl/gles2_help.c:158 7 eglRelease (context=0x615920) at ./egl/gles2_help.c:310 8 0x0000000000402ca8 in main (argc=<optimized out>, argv=<optimized out>) at v4l2decode.cpp:531 */ if (eglContext) eglRelease(eglContext); #endif // close device ioctlRet = YamiV4L2_Close(fd); ASSERT(ioctlRet != -1); if(input) delete input; if (outfp) fclose(outfp); if (dumpOutputDir) free(dumpOutputDir); if (x11Display && x11Window) XDestroyWindow(x11Display, x11Window); if (x11Display) XCloseDisplay(x11Display); fprintf(stdout, "decode done\n"); }
int main(int argc, char** argv) { DecodeInput *input; int32_t fd = -1; int32_t i = 0; int32_t ioctlRet = -1; YamiMediaCodec::CalcFps calcFps; renderMode = 3; // set default render mode to 3 yamiTraceInit(); #if __ENABLE_V4L2_GLX__ XInitThreads(); #endif #if __ENABLE_V4L2_OPS__ // FIXME, use libv4l2codec_hw.so instead if (!loadV4l2CodecDevice("libyami_v4l2.so")) { ERROR("fail to init v4l2codec device with __ENABLE_V4L2_OPS__\n"); return -1; } #endif if (!process_cmdline(argc, argv)) return -1; if (!inputFileName) { ERROR("no input media file specified\n"); return -1; } INFO("input file: %s, renderMode: %d", inputFileName, renderMode); if (!dumpOutputName) dumpOutputName = strdup ("./"); #if !__ENABLE_V4L2_GLX__ switch (renderMode) { case 0: memoryType = VIDEO_DATA_MEMORY_TYPE_RAW_COPY; memoryTypeStr = typeStrRawData; break; case 3: memoryType = VIDEO_DATA_MEMORY_TYPE_DRM_NAME; memoryTypeStr = typeStrDrmName; break; case 4: memoryType = VIDEO_DATA_MEMORY_TYPE_DMA_BUF; memoryTypeStr = typeStrDmaBuf; break; default: ASSERT(0 && "unsupported render mode, -m [0,3,4] are supported"); break; } #endif input = DecodeInput::create(inputFileName); if (input==NULL) { ERROR("fail to init input stream\n"); return -1; } renderFrameCount = 0; calcFps.setAnchor(); // open device fd = SIMULATE_V4L2_OP(Open)("decoder", 0); ASSERT(fd!=-1); #ifdef ANDROID #elif __ENABLE_V4L2_GLX__ x11Display = XOpenDisplay(NULL); ASSERT(x11Display); ioctlRet = SIMULATE_V4L2_OP(SetXDisplay)(fd, x11Display); #endif // set output frame memory type #if __ENABLE_V4L2_OPS__ SIMULATE_V4L2_OP(SetParameter)(fd, "frame-memory-type", memoryTypeStr); #elif !__ENABLE_V4L2_GLX__ SIMULATE_V4L2_OP(FrameMemoryType)(fd, memoryType); #endif // query hw capability struct v4l2_capability caps; memset(&caps, 0, sizeof(caps)); caps.capabilities = V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYCAP, &caps); ASSERT(ioctlRet != -1); // set input/output data format uint32_t codecFormat = v4l2PixelFormatFromMime(input->getMimeType()); if (!codecFormat) { ERROR("unsupported mimetype, %s", input->getMimeType()); return -1; } struct v4l2_format format; memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; format.fmt.pix_mp.pixelformat = codecFormat; format.fmt.pix_mp.num_planes = 1; format.fmt.pix_mp.plane_fmt[0].sizeimage = k_maxInputBufferSize; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_S_FMT, &format); ASSERT(ioctlRet != -1); // set preferred output format memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; uint8_t* data = (uint8_t*)input->getCodecData().data(); uint32_t size = input->getCodecData().size(); //save codecdata, size+data, the type of format.fmt.raw_data is __u8[200] //we must make sure enough space (>=sizeof(uint32_t) + size) to store codecdata memcpy(format.fmt.raw_data, &size, sizeof(uint32_t)); if(sizeof(format.fmt.raw_data) >= size + sizeof(uint32_t)) memcpy(format.fmt.raw_data + sizeof(uint32_t), data, size); else { ERROR("No enough space to store codec data"); return -1; } ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_S_FMT, &format); ASSERT(ioctlRet != -1); // input port starts as early as possible to decide output frame format __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMON, &type); ASSERT(ioctlRet != -1); // setup input buffers struct v4l2_requestbuffers reqbufs; memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 2; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); ASSERT(reqbufs.count>0); inputQueueCapacity = reqbufs.count; inputFrames.resize(inputQueueCapacity); for (i=0; i<inputQueueCapacity; i++) { struct v4l2_plane planes[k_inputPlaneCount]; struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); memset(planes, 0, sizeof(planes)); buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; buffer.memory = V4L2_MEMORY_MMAP; buffer.m.planes = planes; buffer.length = k_inputPlaneCount; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYBUF, &buffer); ASSERT(ioctlRet != -1); // length and mem_offset should be filled by VIDIOC_QUERYBUF above void* address = SIMULATE_V4L2_OP(Mmap)(NULL, buffer.m.planes[0].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.planes[0].m.mem_offset); ASSERT(address); inputFrames[i] = static_cast<uint8_t*>(address); DEBUG("inputFrames[%d] = %p", i, inputFrames[i]); } // feed input frames first for (i=0; i<inputQueueCapacity; i++) { if (!feedOneInputFrame(input, fd, i)) { break; } } // query video resolution memset(&format, 0, sizeof(format)); format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; while (1) { if (SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_G_FMT, &format) != 0) { if (errno != EINVAL) { // EINVAL means we haven't seen sufficient stream to decode the format. INFO("ioctl() failed: VIDIOC_G_FMT, haven't get video resolution during start yet, waiting"); } } else { break; } usleep(50); } outputPlaneCount = format.fmt.pix_mp.num_planes; ASSERT(outputPlaneCount == 2); videoWidth = format.fmt.pix_mp.width; videoHeight = format.fmt.pix_mp.height; ASSERT(videoWidth && videoHeight); #ifdef ANDROID __u32 pixelformat = format.fmt.pix_mp.pixelformat; if (!createNativeWindow(pixelformat)) { fprintf(stderr, "create native window error\n"); return -1; } int minUndequeuedBuffs = 0; status_t err = mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffs); if (err != 0) { fprintf(stderr, "query native window min undequeued buffers error\n"); return err; } #endif // setup output buffers // Number of output buffers we need. struct v4l2_control ctrl; memset(&ctrl, 0, sizeof(ctrl)); ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_G_CTRL, &ctrl); #ifndef ANDROID uint32_t minOutputFrameCount = ctrl.value + k_extraOutputFrameCount; #else uint32_t minOutputFrameCount = ctrl.value + k_extraOutputFrameCount + minUndequeuedBuffs; #endif memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = minOutputFrameCount; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); ASSERT(reqbufs.count>0); outputQueueCapacity = reqbufs.count; #ifdef ANDROID #elif __ENABLE_V4L2_GLX__ x11Window = XCreateSimpleWindow(x11Display, DefaultRootWindow(x11Display) , 0, 0, videoWidth, videoHeight, 0, 0 , WhitePixel(x11Display, 0)); XMapWindow(x11Display, x11Window); pixmaps.resize(outputQueueCapacity); glxPixmaps.resize(outputQueueCapacity); textureIds.resize(outputQueueCapacity); if (!glxContext) { glxContext = glxInit(x11Display, x11Window); } ASSERT(glxContext); glGenTextures(outputQueueCapacity, &textureIds[0] ); for (i=0; i<outputQueueCapacity; i++) { int ret = createPixmapForTexture(glxContext, textureIds[i], videoWidth, videoHeight, &pixmaps[i], &glxPixmaps[i]); DEBUG("textureIds[%d]: 0x%x, pixmaps[%d]=0x%lx, glxPixmaps[%d]: 0x%lx", i, textureIds[i], i, pixmaps[i], i, glxPixmaps[i]); ASSERT(ret == 0); ret = SIMULATE_V4L2_OP(UsePixmap)(fd, i, pixmaps[i]); ASSERT(ret == 0); } #else if (IS_RAW_DATA()) { rawOutputFrames.resize(outputQueueCapacity); for (i=0; i<outputQueueCapacity; i++) { struct v4l2_plane planes[k_maxOutputPlaneCount]; struct v4l2_buffer buffer; memset(&buffer, 0, sizeof(buffer)); memset(planes, 0, sizeof(planes)); buffer.index = i; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buffer.memory = V4L2_MEMORY_MMAP; buffer.m.planes = planes; buffer.length = outputPlaneCount; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QUERYBUF, &buffer); ASSERT(ioctlRet != -1); rawOutputFrames[i].width = format.fmt.pix_mp.width; rawOutputFrames[i].height = format.fmt.pix_mp.height; rawOutputFrames[i].fourcc = format.fmt.pix_mp.pixelformat; for (int j=0; j<outputPlaneCount; j++) { // length and mem_offset are filled by VIDIOC_QUERYBUF above void* address = SIMULATE_V4L2_OP(Mmap)(NULL, buffer.m.planes[j].length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buffer.m.planes[j].m.mem_offset); ASSERT(address); if (j == 0) { rawOutputFrames[i].data = static_cast<uint8_t*>(address); rawOutputFrames[i].offset[0] = 0; } else { rawOutputFrames[i].offset[j] = static_cast<uint8_t*>(address) - rawOutputFrames[i].data; } rawOutputFrames[i].pitch[j] = format.fmt.pix_mp.plane_fmt[j].bytesperline; } } } else if (IS_DMA_BUF() || IS_DRM_NAME()) { // setup all textures and eglImages eglImages.resize(outputQueueCapacity); textureIds.resize(outputQueueCapacity); if (!eglContext) eglContext = eglInit(x11Display, x11Window, 0 /*VA_FOURCC_RGBA*/, IS_DMA_BUF()); glGenTextures(outputQueueCapacity, &textureIds[0] ); for (i=0; i<outputQueueCapacity; i++) { int ret = 0; ret = SIMULATE_V4L2_OP(UseEglImage)(fd, eglContext->eglContext.display, eglContext->eglContext.context, i, &eglImages[i]); ASSERT(ret == 0); GLenum target = GL_TEXTURE_2D; if (IS_DMA_BUF()) target = GL_TEXTURE_EXTERNAL_OES; glBindTexture(target, textureIds[i]); imageTargetTexture2D(target, eglImages[i]); glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_NEAREST); DEBUG("textureIds[%d]: 0x%x, eglImages[%d]: 0x%p", i, textureIds[i], i, eglImages[i]); } } #endif #ifndef ANDROID // feed output frames first for (i=0; i<outputQueueCapacity; i++) { if (!takeOneOutputFrame(fd, i)) { ASSERT(0); } } #else struct v4l2_buffer buffer; err = native_window_set_buffer_count(mNativeWindow.get(), outputQueueCapacity); if (err != 0) { fprintf(stderr, "native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err); return -1; } //queue buffs for (uint32_t i = 0; i < outputQueueCapacity; i++) { ANativeWindowBuffer* pbuf = NULL; memset(&buffer, 0, sizeof(buffer)); err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &pbuf); if (err != 0) { fprintf(stderr, "dequeueBuffer failed: %s (%d)\n", strerror(-err), -err); return -1; } buffer.m.userptr = (unsigned long)pbuf; buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; buffer.index = i; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_QBUF, &buffer); ASSERT(ioctlRet != -1); mWindBuff.push_back(pbuf); } for (uint32_t i = 0; i < minUndequeuedBuffs; i++) { memset(&buffer, 0, sizeof(buffer)); buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_DQBUF, &buffer); ASSERT(ioctlRet != -1); err = mNativeWindow->cancelBuffer(mNativeWindow.get(), mWindBuff[buffer.index], -1); if (err) { fprintf(stderr, "queue empty window buffer error\n"); return -1; } } #endif // output port starts as late as possible to adopt user provide output buffer type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMON, &type); ASSERT(ioctlRet != -1); bool event_pending=true; // try to get video resolution. int dqCountAfterEOS = 0; do { if (event_pending) { handleResolutionChange(fd); } takeOneOutputFrame(fd); if (!feedOneInputFrame(input, fd)) { if (stagingBufferInDevice == 0) break; dqCountAfterEOS++; } if (dqCountAfterEOS == inputQueueCapacity) // input drain break; } while (SIMULATE_V4L2_OP(Poll)(fd, true, &event_pending) == 0); // drain output buffer int retry = 3; while (takeOneOutputFrame(fd) || (--retry)>0) { // output drain usleep(10000); } calcFps.fps(renderFrameCount); // SIMULATE_V4L2_OP(Munmap)(void* addr, size_t length) possibleWait(input->getMimeType()); // release queued input/output buffer memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 0; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); memset(&reqbufs, 0, sizeof(reqbufs)); reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; reqbufs.memory = V4L2_MEMORY_MMAP; reqbufs.count = 0; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_REQBUFS, &reqbufs); ASSERT(ioctlRet != -1); // stop input port type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMOFF, &type); ASSERT(ioctlRet != -1); // stop output port type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; ioctlRet = SIMULATE_V4L2_OP(Ioctl)(fd, VIDIOC_STREAMOFF, &type); ASSERT(ioctlRet != -1); #ifndef ANDROID if(textureIds.size()) glDeleteTextures(textureIds.size(), &textureIds[0]); ASSERT(glGetError() == GL_NO_ERROR); #endif #ifdef ANDROID //TODO, some resources need to destroy? #elif __ENABLE_V4L2_GLX__ glxRelease(glxContext, &pixmaps[0], &glxPixmaps[0], pixmaps.size()); #else for (i=0; i<eglImages.size(); i++) { destroyImage(eglContext->eglContext.display, eglImages[i]); } /* there is still randomly fail in mesa; no good idea for it. seems mesa bug 0 0x00007ffff079c343 in _mesa_symbol_table_dtor () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 1 0x00007ffff073c55d in glsl_symbol_table::~glsl_symbol_table() () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 2 0x00007ffff072a4d5 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 3 0x00007ffff072a4bd in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 4 0x00007ffff064b48f in _mesa_reference_shader () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 5 0x00007ffff0649397 in ?? () from /usr/lib/x86_64-linux-gnu/libdricore9.2.1.so.1 6 0x000000000040624d in releaseShader (program=0x77cd90) at ./egl/gles2_help.c:158 7 eglRelease (context=0x615920) at ./egl/gles2_help.c:310 8 0x0000000000402ca8 in main (argc=<optimized out>, argv=<optimized out>) at v4l2decode.cpp:531 */ if (eglContext) eglRelease(eglContext); #endif // close device ioctlRet = SIMULATE_V4L2_OP(Close)(fd); ASSERT(ioctlRet != -1); if(input) delete input; if (outfp) fclose(outfp); if (dumpOutputName) free(dumpOutputName); #if __ENABLE_V4L2_GLX__ if (x11Display && x11Window) XDestroyWindow(x11Display, x11Window); if (x11Display) XCloseDisplay(x11Display); #endif fprintf(stdout, "decode done\n"); }