int do_jpgd(cmd_tbl_t *cmdtp, int flag, int argc, char *argv[]) { int ret; #if 0 if (argc < 4) { printf("Insufficient parameter!\n"); printf ("Usage:\n%s\n", cmdtp->usage); return -1; } #else #ifdef CONFIG_SYS_LONGHELP printf ("you should first set:\n%s\n", cmdtp->help); #endif #endif ret = load_jpeg(); if (0 != ret) { printf("load jpeg err. \n"); //todo return 0 or ret? return 0; } jpeg_decode(); printf("decode jpeg!\n"); return 0; }
//解码文件名为filename的jpeg/jpg文件 u8 jpg_decode(const u8 *filename) { u8 res=0; u16 br; #if JPEG_USE_MALLOC == 1//使用malloc res=jpeg_mallocall(); #endif if(res==0)//内存申请OK { //得到JPEG/JPG图片的开始信息 res=f_open(f_jpeg,(const TCHAR*)filename,FA_READ);//打开文件 if(res==0) { //开始时读入1024个字节到缓存里面.方便后面提取JPEG解码的信息 f_read(f_jpeg,jpg_buffer,1024,(UINT*)&br); jpeg_inittable();//初始化各个数据表 res=jpeg_inittag(); if(res==0) { if((SampRate_Y_H!=0)&&(SampRate_Y_V!=0)) { ai_draw_init(); //初始化picinfo.Div_Fac,启动智能画图 res=jpeg_decode(); //解码JPEG开始 }else res=0XFD; //采样率错误 }else res=0XFE; //初始化表头不成功 f_close(f_jpeg); } } #if JPEG_USE_MALLOC == 1//使用malloc jpeg_freeall(); //释放内存 #endif return res; }
JPEG_NODE * jpeg_link_add(PFBDEV pfbdev, JPEG_NODE *head, const char *filename) { JPEG_NODE *tmp; tmp = malloc(sizeof(JPEG_NODE)); if(NULL == tmp) { fprintf(stderr, "JPEG_NODE malloc error\n"); return NULL; } if(NULL == head) { head = tmp; tmp->next = NULL; tmp->prev = NULL; } else { tmp->next = head; tmp->prev = NULL; } head = tmp; jpeg_decode(pfbdev, tmp, filename); return head; }
bool _openslide_jpeg_decode_buffer_gray(const void *buf, uint32_t len, uint8_t *dest, int32_t w, int32_t h, GError **err) { //g_debug("decode grayscale JPEG buffer: %x %u", buf, len); return jpeg_decode(NULL, buf, len, dest, true, w, h, err); }
int imdecodeJPEG(const unsigned char* buff, int buff_size, Mat& dest) { int w,h,c; getHeader((uchar*)buff,buff_size,w,h,c); dest = Mat::zeros(Size(w,h),CV_MAKE_TYPE(CV_8U,c)); jpeg_decode((uchar*)buff,buff_size,dest.data,w,h); return 0; }
int imdecodeJPEG(const vector<uchar>& buff, Mat& dest) { uchar* dst = (uchar*)&buff[0]; int w,h,c; getHeader((uchar*)dst,buff.size(),w,h,c); dest = Mat::zeros(Size(w,h),CV_MAKE_TYPE(CV_8U,c)); jpeg_decode(dst,buff.size(),dest.data,dest.cols,dest.rows); return 0; }
void vbe_set_graphics(void) { u8 rval; vbe_info_t info; rval = vbe_info(&info); if (rval != 0) return; DEBUG_PRINTF_VBE("VbeSignature: %s\n", info.signature); DEBUG_PRINTF_VBE("VbeVersion: 0x%04x\n", info.version); DEBUG_PRINTF_VBE("OemString: %s\n", info.oem_string_ptr); DEBUG_PRINTF_VBE("Capabilities:\n"); DEBUG_PRINTF_VBE("\tDAC: %s\n", (info.capabilities & 0x1) == 0 ? "fixed 6bit" : "switchable 6/8bit"); DEBUG_PRINTF_VBE("\tVGA: %s\n", (info.capabilities & 0x2) == 0 ? "compatible" : "not compatible"); DEBUG_PRINTF_VBE("\tRAMDAC: %s\n", (info.capabilities & 0x4) == 0 ? "normal" : "use blank bit in Function 09h"); mode_info.video_mode = (1 << 14) | CONFIG_FRAMEBUFFER_VESA_MODE; vbe_get_mode_info(&mode_info); vbe_set_mode(&mode_info); #if CONFIG_BOOTSPLASH unsigned char *framebuffer = (unsigned char *) le32_to_cpu(mode_info.vesa.phys_base_ptr); DEBUG_PRINTF_VBE("FRAMEBUFFER: 0x%p\n", framebuffer); struct jpeg_decdata *decdata; /* Switching Intel IGD to 1MB video memory will break this. Who * cares. */ // int imagesize = 1024*768*2; unsigned char *jpeg = cbfs_get_file_content(CBFS_DEFAULT_MEDIA, "bootsplash.jpg", CBFS_TYPE_BOOTSPLASH, NULL); if (!jpeg) { DEBUG_PRINTF_VBE("Could not find bootsplash.jpg\n"); return; } DEBUG_PRINTF_VBE("Splash at %p ...\n", jpeg); dump(jpeg, 64); decdata = malloc(sizeof(*decdata)); int ret = 0; DEBUG_PRINTF_VBE("Decompressing boot splash screen...\n"); ret = jpeg_decode(jpeg, framebuffer, 1024, 768, 16, decdata); DEBUG_PRINTF_VBE("returns %x\n", ret); #endif }
// This function should decompress the captured MJPG image to a yuv image. void video_frame_mjpg_to_yuv() { free(jpegStart); pthread_mutex_lock(&jpg_mutex); if(jpeg_decode(&decompressed_frame_camera, buffers[bufferIndex].start, &width, &height) < 0){ printf("jpeg decode failure\n"); exit(1); } pthread_mutex_unlock(&jpg_mutex); }
int LoadJPEG(void *pRaw, int rawlen, unsigned int *puWidth, unsigned int *puHeight, void **ppData) { decjpeg_t *pJPEG; pJPEG = jpeg_decode(pRaw,rawlen); if( pJPEG == NULL ) return -1; *puWidth = pJPEG->width; *puHeight = pJPEG->height; *ppData = pJPEG->pData; free(pJPEG); return 0; }
bool _openslide_jpeg_read(const char *filename, int64_t offset, uint32_t *dest, int32_t w, int32_t h, GError **err) { //g_debug("read JPEG: %s %" G_GINT64_FORMAT, filename, offset); FILE *f = _openslide_fopen(filename, "rb", err); if (f == NULL) { return false; } if (offset && fseeko(f, offset, SEEK_SET) == -1) { _openslide_io_error(err, "Cannot seek to offset"); fclose(f); return false; } bool success = jpeg_decode(f, NULL, 0, dest, false, w, h, err); fclose(f); return success; }
void enable_bootsplash(void) { if (!CONFIG_BOOTSPLASH) return; dprintf(3, "Checking for bootsplash\n"); u32 file = romfile_find("bootsplash.jpg"); if (!file) return; int filesize = romfile_size(file); u8 *picture = NULL; u8 *filedata = malloc_tmphigh(filesize); struct vesa_info *vesa_info = malloc_tmplow(sizeof(*vesa_info)); struct vesa_mode_info *mode_info = malloc_tmplow(sizeof(*mode_info)); struct jpeg_decdata *jpeg = jpeg_alloc(); if (!filedata || !jpeg || !vesa_info || !mode_info) { warn_noalloc(); goto done; } /* Check whether we have a VESA 2.0 compliant BIOS */ memset(vesa_info, 0, sizeof(struct vesa_info)); vesa_info->vesa_signature = VBE2_SIGNATURE; struct bregs br; memset(&br, 0, sizeof(br)); br.ax = 0x4f00; br.di = FLATPTR_TO_OFFSET(vesa_info); br.es = FLATPTR_TO_SEG(vesa_info); call16_int10(&br); if (vesa_info->vesa_signature != VESA_SIGNATURE) { dprintf(1,"No VBE2 found.\n"); goto done; } /* Print some debugging information about our card. */ char *vendor = SEGOFF_TO_FLATPTR(vesa_info->oem_vendor_name_ptr); char *product = SEGOFF_TO_FLATPTR(vesa_info->oem_product_name_ptr); dprintf(3, "VESA %d.%d\nVENDOR: %s\nPRODUCT: %s\n", vesa_info->vesa_version>>8, vesa_info->vesa_version&0xff, vendor, product); // Parse jpeg and get image size. dprintf(5, "Copying bootsplash.jpg\n"); romfile_copy(file, filedata, filesize); dprintf(5, "Decoding bootsplash.jpg\n"); int ret = jpeg_decode(jpeg, filedata); if (ret) { dprintf(1, "jpeg_decode failed with return code %d...\n", ret); goto done; } int width, height; jpeg_get_size(jpeg, &width, &height); // Try to find a graphics mode with the corresponding dimensions. int videomode = find_videomode(vesa_info, mode_info, width, height); if (videomode < 0) goto done; void *framebuffer = mode_info->phys_base_ptr; int depth = mode_info->bits_per_pixel; dprintf(3, "mode: %04x\n", videomode); dprintf(3, "framebuffer: %p\n", framebuffer); dprintf(3, "bytes per scanline: %d\n", mode_info->bytes_per_scanline); dprintf(3, "bits per pixel: %d\n", depth); // Allocate space for image and decompress it. int imagesize = width * height * (depth / 8); picture = malloc_tmphigh(imagesize); if (!picture) { warn_noalloc(); goto done; } dprintf(5, "Decompressing bootsplash.jpg\n"); ret = jpeg_show(jpeg, picture, width, height, depth); if (ret) { dprintf(1, "jpeg_show failed with return code %d...\n", ret); goto done; } /* Switch to graphics mode */ dprintf(5, "Switching to graphics mode\n"); memset(&br, 0, sizeof(br)); br.ax = 0x4f02; br.bx = (1 << 14) | videomode; call16_int10(&br); if (br.ax != 0x4f) { dprintf(1, "set_mode failed.\n"); goto done; } /* Show the picture */ dprintf(5, "Showing bootsplash.jpg\n"); iomemcpy(framebuffer, picture, imagesize); dprintf(5, "Bootsplash copy complete\n"); BootsplashActive = 1; done: free(filedata); free(picture); free(vesa_info); free(mode_info); free(jpeg); return; }
void FinePixProducer::Connect(status_t error, const media_source &source, const media_destination &destination, const media_format &format, char *io_name) { PRINTF(1, ("Connect() %ldx%ld\n", \ format.u.raw_video.display.line_width, \ format.u.raw_video.display.line_count)); if (fConnected) { PRINTF(0, ("Connect: Already connected\n")); return; } if ( (source != fOutput.source) || (error < B_OK) || !const_cast<media_format *>(&format)->Matches(&fOutput.format)) { PRINTF(1, ("Connect: Connect error\n")); return; } fOutput.destination = destination; strcpy(io_name, fOutput.name); if (fOutput.format.u.raw_video.field_rate != 0.0f) { fPerformanceTimeBase = fPerformanceTimeBase + (bigtime_t) ((fFrame - fFrameBase) * (1000000 / fOutput.format.u.raw_video.field_rate)); fFrameBase = fFrame; } fConnectedFormat = format.u.raw_video; fDeltaBuffer = new uint8[MAX_FRAME_SIZE]; //ø in buffer tempInBuffer = new uint8[3 * fConnectedFormat.display.line_width * fConnectedFormat.display.line_count]; // for 24 bit color fCam->SetupCam(); //øyvind /* get the latency */ bigtime_t latency = 0; media_node_id tsID = 0; FindLatencyFor(fOutput.destination, &latency, &tsID); #define NODE_LATENCY 1000 SetEventLatency(latency + NODE_LATENCY); uint8 *tmp24 = (uint8*)tempInBuffer; uint8 *buffer, *dst; dst = buffer = (uint8 *)malloc(4 * fConnectedFormat.display.line_count * fConnectedFormat.display.line_width); if (!buffer) { PRINTF(0, ("Connect: Out of memory\n")); return; } bigtime_t now = system_time(); // Get a frame from the camera fCam->GetPic(fDeltaBuffer, frame_size); // Convert from jpeg to bitmap if (jpeg_check_size(fDeltaBuffer, FPIX_RGB24_WIDTH, FPIX_RGB24_HEIGHT)) { int n = jpeg_decode(fDeltaBuffer, tmp24, FPIX_RGB24_WIDTH, FPIX_RGB24_HEIGHT, 24, //32 not working &decdata); if (n) { PRINTF(-1, ("ooeps decode jpg result : %d", n)); } } else { PRINTF(-1, ("ooeps check_size failed")); } // Convert from 24 bit to 32 bit for (uint y=0; y<fConnectedFormat.display.line_count; y++) for (uint x=0; x<fConnectedFormat.display.line_width; x++) { *(dst++) = *tmp24; //red tmp24++; *(dst++) = *tmp24; //green tmp24++; *(dst++) = *tmp24; //blue tmp24++; dst++; //last 8 bit empty } fProcessingLatency = system_time() - now; free(buffer); /* Create the buffer group */ fBufferGroup = new BBufferGroup(4 * fConnectedFormat.display.line_width * fConnectedFormat.display.line_count, 8); if (fBufferGroup->InitCheck() < B_OK) { delete fBufferGroup; fBufferGroup = NULL; return; } fConnected = true; fEnabled = true; /* Tell frame generation thread to recalculate delay value */ release_sem(fFrameSync); }
/* The following functions form the thread that generates frames. You should * replace this with the code that interfaces to your hardware. */ int32 FinePixProducer::FrameGenerator() { bigtime_t wait_until = system_time(); while (1) { status_t err = acquire_sem_etc(fFrameSync, 1, B_ABSOLUTE_TIMEOUT, wait_until); /* The only acceptable responses are B_OK and B_TIMED_OUT. Everything * else means the thread should quit. Deleting the semaphore, as in * FinePixProducer::HandleStop(), will trigger this behavior. */ if ((err != B_OK) && (err != B_TIMED_OUT)) break; fFrame++; /* Recalculate the time until the thread should wake up to begin * processing the next frame. Subtract fProcessingLatency so that * the frame is sent in time. */ wait_until = TimeSource()->RealTimeFor(fPerformanceTimeBase, 0) + (bigtime_t) ((fFrame - fFrameBase) * (1000000 / fConnectedFormat.field_rate)) - fProcessingLatency; /* Drop frame if it's at least a frame late */ if (wait_until < system_time()) continue; /* If the semaphore was acquired successfully, it means something * changed the timing information (see FinePixProducer::Connect()) and * so the thread should go back to sleep until the newly-calculated * wait_until time. */ if (err == B_OK) continue; /* Send buffers only if the node is running and the output has been * enabled */ if (!fRunning || !fEnabled) continue; BAutolock _(fLock); // Get the frame from the camera fCam->GetPic(fDeltaBuffer, frame_size); /* Fetch a buffer from the buffer group */ BBuffer *buffer = fBufferGroup->RequestBuffer( 4 * fConnectedFormat.display.line_width * fConnectedFormat.display.line_count, 0LL); if (!buffer) continue; /* Fill out the details about this buffer. */ media_header *h = buffer->Header(); h->type = B_MEDIA_RAW_VIDEO; h->time_source = TimeSource()->ID(); h->size_used = 4 * fConnectedFormat.display.line_width * fConnectedFormat.display.line_count; /* For a buffer originating from a device, you might want to calculate * this based on the PerformanceTimeFor the time your buffer arrived at * the hardware (plus any applicable adjustments). h->start_time = fPerformanceTimeBase + (bigtime_t) ((fFrame - fFrameBase) * (1000000 / fConnectedFormat.field_rate));*/ h->start_time = TimeSource()->Now(); h->file_pos = 0; h->orig_size = 0; h->data_offset = 0; h->u.raw_video.field_gamma = 1.0; h->u.raw_video.field_sequence = fFrame; h->u.raw_video.field_number = 0; h->u.raw_video.pulldown_number = 0; h->u.raw_video.first_active_line = 1; h->u.raw_video.line_count = fConnectedFormat.display.line_count; // Frame data pointers uint8 *tmp24 = (uint8*)tempInBuffer; uint8 *dst = (uint8*)buffer->Data(); // Convert from jpeg to bitmap if (jpeg_check_size(fDeltaBuffer, FPIX_RGB24_WIDTH, FPIX_RGB24_HEIGHT)) { int n = jpeg_decode(fDeltaBuffer, tmp24, FPIX_RGB24_WIDTH, FPIX_RGB24_HEIGHT, 24, //32 not working &decdata); if (n) { PRINTF(-1, ("ooeps decode jpg result : %d", n)); } } else { PRINTF(-1, ("ooeps check_size failed")); } // Convert from 24 bit to 32 bit for (uint y=0; y<fConnectedFormat.display.line_count; y++) for (uint x=0; x<fConnectedFormat.display.line_width; x++) { *(dst++) = *tmp24; //red tmp24++; *(dst++) = *tmp24; //green tmp24++; *(dst++) = *tmp24; //blue tmp24++; dst++; //last 8 bit empty } /* Send the buffer on down to the consumer */ if (SendBuffer(buffer, fOutput.destination) < B_OK) { PRINTF(-1, ("FrameGenerator: Error sending buffer\n")); /* If there is a problem sending the buffer, return it to its * buffer group. */ buffer->Recycle(); } } return B_OK; }
int uvcGrab(struct vdIn *vd) { #define HEADERFRAME1 0xaf int ret; if (!vd->isstreaming) if (video_enable(vd)) goto err; memset(&vd->buf, 0, sizeof(struct v4l2_buffer)); vd->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; vd->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf); if (ret < 0) { printf("Unable to dequeue buffer (%d) fd is %d.\n", errno, vd->fd); goto err; } /* Capture a single raw frame */ if (vd->rawFrameCapture && vd->buf.bytesused > 0) { FILE *frame = NULL; char filename[13]; int ret; /* Disable frame capturing unless we're in frame stream mode */ if(vd->rawFrameCapture == 1) vd->rawFrameCapture = 0; /* Create a file name and open the file */ sprintf(filename, "frame%03u.raw", vd->fileCounter++ % 1000); frame = fopen(filename, "wb"); if(frame == NULL) { perror("Unable to open file for raw frame capturing"); goto end_capture; } /* Write the raw data to the file */ ret = fwrite(vd->mem[vd->buf.index], vd->buf.bytesused, 1, frame); if(ret < 1) { perror("Unable to write to file"); goto end_capture; } printf("Saved raw frame to %s (%u bytes)\n", filename, vd->buf.bytesused); if(vd->rawFrameCapture == 2) { vd->rfsBytesWritten += vd->buf.bytesused; vd->rfsFramesWritten++; } /* Clean up */ end_capture: if(frame) fclose(frame); } /* Capture raw stream data */ if (vd->captureFile && vd->buf.bytesused > 0) { int ret; ret = fwrite(vd->mem[vd->buf.index], vd->buf.bytesused, 1, vd->captureFile); if (ret < 1) { perror("Unable to write raw stream to file"); fprintf(stderr, "Stream capturing terminated.\n"); fclose(vd->captureFile); vd->captureFile = NULL; vd->framesWritten = 0; vd->bytesWritten = 0; } else { vd->framesWritten++; vd->bytesWritten += vd->buf.bytesused; if (debug) printf("Appended raw frame to stream file (%u bytes)\n", vd->buf.bytesused); } } switch (vd->formatIn) { case V4L2_PIX_FMT_MJPEG: if(vd->buf.bytesused <= HEADERFRAME1) { /* Prevent crash on empty image */ /* if(debug)*/ printf("Ignoring empty buffer ...\n"); return 0; } memcpy(vd->tmpbuffer, vd->mem[vd->buf.index],vd->buf.bytesused); if (jpeg_decode(&vd->framebuffer, vd->tmpbuffer, &vd->width, &vd->height) < 0) { printf("jpeg decode errors\n"); goto err; } if (debug) printf("bytes in used %d \n", vd->buf.bytesused); break; case V4L2_PIX_FMT_YUYV: if (vd->buf.bytesused > vd->framesizeIn) memcpy(vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->framesizeIn); else memcpy(vd->framebuffer, vd->mem[vd->buf.index], (size_t) vd->buf.bytesused); break; default: goto err; break; } ret = ioctl(vd->fd, VIDIOC_QBUF, &vd->buf); if (ret < 0) { printf("Unable to requeue buffer (%d).\n", errno); goto err; } return 0; err: vd->signalquit = 0; return -1; }
int get_image(struct image_info *info, int ds) { int w, h; /* used to center output */ int size; /* decompressed image size */ long time; /* measured ticks */ int status; struct jpeg* p_jpg = &jpg; struct t_disp* p_disp = &disp[ds]; /* short cut */ info->width = p_jpg->x_size / ds; info->height = p_jpg->y_size / ds; info->data = p_disp; if (p_disp->bitmap[0] != NULL) { /* we still have it */ return PLUGIN_OK; } /* assign image buffer */ /* physical size needed for decoding */ size = img_mem(ds); if (buf_images_size <= size) { /* have to discard the current */ int i; for (i=1; i<=8; i++) disp[i].bitmap[0] = NULL; /* invalidate all bitmaps */ buf_images = buf_root; /* start again from the beginning of the buffer */ buf_images_size = root_size; } #ifdef HAVE_LCD_COLOR if (p_jpg->blocks > 1) /* colour jpeg */ { int i; for (i = 1; i < 3; i++) { size = (p_jpg->x_phys / ds / p_jpg->subsample_x[i]) * (p_jpg->y_phys / ds / p_jpg->subsample_y[i]); p_disp->bitmap[i] = buf_images; buf_images += size; buf_images_size -= size; } p_disp->csub_x = p_jpg->subsample_x[1]; p_disp->csub_y = p_jpg->subsample_y[1]; } else { p_disp->csub_x = p_disp->csub_y = 0; p_disp->bitmap[1] = p_disp->bitmap[2] = buf_images; } #endif /* size may be less when decoded (if height is not block aligned) */ size = (p_jpg->x_phys/ds) * (p_jpg->y_size/ds); p_disp->bitmap[0] = buf_images; buf_images += size; buf_images_size -= size; if(!running_slideshow) { rb->snprintf(print, sizeof(print), "decoding %d*%d", info->width, info->height); rb->lcd_puts(0, 3, print); rb->lcd_update(); } /* update image properties */ p_disp->stride = p_jpg->x_phys / ds; /* use physical size for stride */ /* the actual decoding */ time = *rb->current_tick; #ifdef HAVE_ADJUSTABLE_CPU_FREQ rb->cpu_boost(true); status = jpeg_decode(p_jpg, p_disp->bitmap, ds, cb_progress); rb->cpu_boost(false); #else status = jpeg_decode(p_jpg, p_disp->bitmap, ds, cb_progress); #endif if (status) { rb->splashf(HZ, "decode error %d", status); return PLUGIN_ERROR; } time = *rb->current_tick - time; if(!running_slideshow) { rb->snprintf(print, sizeof(print), " %ld.%02ld sec ", time/HZ, time%HZ); rb->lcd_getstringsize(print, &w, &h); /* centered in progress bar */ rb->lcd_putsxy((LCD_WIDTH - w)/2, LCD_HEIGHT - h, print); rb->lcd_update(); } return PLUGIN_OK; }
void enable_bootsplash(void) { if (!CONFIG_BOOTSPLASH) return; /* splash picture can be bmp or jpeg file */ dprintf(3, "Checking for bootsplash\n"); u8 type = 0; /* 0 means jpg, 1 means bmp, default is 0=jpg */ int filesize; u8 *filedata = romfile_loadfile("bootsplash.jpg", &filesize); if (!filedata) { filedata = romfile_loadfile("bootsplash.bmp", &filesize); if (!filedata) return; type = 1; } dprintf(3, "start showing bootsplash\n"); u8 *picture = NULL; /* data buff used to be flushed to the video buf */ struct jpeg_decdata *jpeg = NULL; struct bmp_decdata *bmp = NULL; struct vbe_info *vesa_info = malloc_tmplow(sizeof(*vesa_info)); struct vbe_mode_info *mode_info = malloc_tmplow(sizeof(*mode_info)); if (!vesa_info || !mode_info) { warn_noalloc(); goto done; } /* Check whether we have a VESA 2.0 compliant BIOS */ memset(vesa_info, 0, sizeof(struct vbe_info)); vesa_info->signature = VBE2_SIGNATURE; struct bregs br; memset(&br, 0, sizeof(br)); br.ax = 0x4f00; br.di = FLATPTR_TO_OFFSET(vesa_info); br.es = FLATPTR_TO_SEG(vesa_info); call16_int10(&br); if (vesa_info->signature != VESA_SIGNATURE) { dprintf(1,"No VBE2 found.\n"); goto done; } /* Print some debugging information about our card. */ char *vendor = SEGOFF_TO_FLATPTR(vesa_info->oem_vendor_string); char *product = SEGOFF_TO_FLATPTR(vesa_info->oem_product_string); dprintf(3, "VESA %d.%d\nVENDOR: %s\nPRODUCT: %s\n", vesa_info->version>>8, vesa_info->version&0xff, vendor, product); int ret, width, height; int bpp_require = 0; if (type == 0) { jpeg = jpeg_alloc(); if (!jpeg) { warn_noalloc(); goto done; } /* Parse jpeg and get image size. */ dprintf(5, "Decoding bootsplash.jpg\n"); ret = jpeg_decode(jpeg, filedata); if (ret) { dprintf(1, "jpeg_decode failed with return code %d...\n", ret); goto done; } jpeg_get_size(jpeg, &width, &height); } else { bmp = bmp_alloc(); if (!bmp) { warn_noalloc(); goto done; } /* Parse bmp and get image size. */ dprintf(5, "Decoding bootsplash.bmp\n"); ret = bmp_decode(bmp, filedata, filesize); if (ret) { dprintf(1, "bmp_decode failed with return code %d...\n", ret); goto done; } bmp_get_size(bmp, &width, &height); bpp_require = 24; } /* jpeg would use 16 or 24 bpp video mode, BMP use 24bpp mode only */ // Try to find a graphics mode with the corresponding dimensions. int videomode = find_videomode(vesa_info, mode_info, width, height, bpp_require); if (videomode < 0) { dprintf(1, "failed to find a videomode with %dx%d %dbpp (0=any).\n", width, height, bpp_require); goto done; } void *framebuffer = (void *)mode_info->phys_base; int depth = mode_info->bits_per_pixel; dprintf(3, "mode: %04x\n", videomode); dprintf(3, "framebuffer: %p\n", framebuffer); dprintf(3, "bytes per scanline: %d\n", mode_info->bytes_per_scanline); dprintf(3, "bits per pixel: %d\n", depth); // Allocate space for image and decompress it. int imagesize = height * mode_info->bytes_per_scanline; picture = malloc_tmphigh(imagesize); if (!picture) { warn_noalloc(); goto done; } if (type == 0) { dprintf(5, "Decompressing bootsplash.jpg\n"); ret = jpeg_show(jpeg, picture, width, height, depth, mode_info->bytes_per_scanline); if (ret) { dprintf(1, "jpeg_show failed with return code %d...\n", ret); goto done; } } else { dprintf(5, "Decompressing bootsplash.bmp\n"); ret = bmp_show(bmp, picture, width, height, depth, mode_info->bytes_per_scanline); if (ret) { dprintf(1, "bmp_show failed with return code %d...\n", ret); goto done; } } /* Switch to graphics mode */ dprintf(5, "Switching to graphics mode\n"); memset(&br, 0, sizeof(br)); br.ax = 0x4f02; br.bx = videomode | VBE_MODE_LINEAR_FRAME_BUFFER; call16_int10(&br); if (br.ax != 0x4f) { dprintf(1, "set_mode failed.\n"); goto done; } /* Show the picture */ dprintf(5, "Showing bootsplash picture\n"); iomemcpy(framebuffer, picture, imagesize); dprintf(5, "Bootsplash copy complete\n"); BootsplashActive = 1; done: free(filedata); free(picture); free(vesa_info); free(mode_info); free(jpeg); free(bmp); return; }
/* Grab frame in YUYV mode */ void V4L2Camera::GrabRawFrame (void *frameBuffer, int maxSize) { LOG_FRAME("V4L2Camera::GrabRawFrame: frameBuffer:%p, len:%d",frameBuffer,maxSize); int ret; /* DQ */ memset(&videoIn->buf,0,sizeof(videoIn->buf)); videoIn->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; videoIn->buf.memory = V4L2_MEMORY_MMAP; ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf); if (ret < 0) { LOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed"); return; } nDequeued++; // Calculate the stride of the output image (YUYV) in bytes int strideOut = videoIn->outWidth << 1; // And the pointer to the start of the image uint8_t* src = (uint8_t*)videoIn->mem[videoIn->buf.index] + videoIn->capCropOffset; LOG_FRAME("V4L2Camera::GrabRawFrame - Got Raw frame (%dx%d) (buf:%d@0x%p, len:%d)",videoIn->format.fmt.pix.width,videoIn->format.fmt.pix.height,videoIn->buf.index,src,videoIn->buf.bytesused); /* Avoid crashing! - Make sure there is enough room in the output buffer! */ if (maxSize < videoIn->outFrameSize) { LOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize); } else { switch (videoIn->format.fmt.pix.pixelformat) { case V4L2_PIX_FMT_JPEG: case V4L2_PIX_FMT_MJPEG: if(videoIn->buf.bytesused <= HEADERFRAME1) { // Prevent crash on empty image LOGE("Ignoring empty buffer ...\n"); break; } if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) { LOGE("jpeg decode errors\n"); break; } break; case V4L2_PIX_FMT_UYVY: uyvy_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_YVYU: yvyu_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_YYUV: yyuv_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_YUV420: yuv420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_YVU420: yvu420_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_NV12: nv12_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_NV21: nv21_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_NV16: nv16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_NV61: nv61_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_Y41P: y41p_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_GREY: grey_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_Y16: y16_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SPCA501: s501_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SPCA505: s505_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SPCA508: s508_to_yuyv((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_YUYV: { int h; uint8_t* pdst = (uint8_t*)frameBuffer; uint8_t* psrc = src; int ss = videoIn->outWidth << 1; for (h = 0; h < videoIn->outHeight; h++) { memcpy(pdst,psrc,ss); pdst += strideOut; psrc += videoIn->format.fmt.pix.bytesperline; } } break; case V4L2_PIX_FMT_SGBRG8: //0 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 0); rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut, (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SGRBG8: //1 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 1); rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut, (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SBGGR8: //2 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 2); rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut, (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_SRGGB8: //3 bayer_to_rgb24 (src,(uint8_t*) videoIn->tmpBuffer, videoIn->outWidth, videoIn->outHeight, 3); rgb_to_yuyv ((uint8_t*) frameBuffer, strideOut, (uint8_t*)videoIn->tmpBuffer, videoIn->outWidth*3, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_RGB24: rgb_to_yuyv((uint8_t*) frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; case V4L2_PIX_FMT_BGR24: bgr_to_yuyv((uint8_t*) frameBuffer, strideOut, src, videoIn->format.fmt.pix.bytesperline, videoIn->outWidth, videoIn->outHeight); break; default: LOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat); break; } LOG_FRAME("V4L2Camera::GrabRawFrame - Copied frame to destination 0x%p",frameBuffer); } /* And Queue the buffer again */ ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf); if (ret < 0) { LOGE("GrabPreviewFrame: VIDIOC_QBUF Failed"); return; } nQueued++; LOG_FRAME("V4L2Camera::GrabRawFrame - Queued buffer"); }
void jpeg_to_rgb(unsigned char* rgb, unsigned char* jpeg, const unsigned int jpeg_size) { init_buffers(); jpeg_decode(yuyv_buffer, jpeg, jpeg_size); yuyv_to_rgb(rgb, yuyv_buffer); }
void jpeg_to_bgr(unsigned char* bgr, unsigned char* jpeg, const unsigned int jpeg_size) { init_buffers(); jpeg_decode(yuyv_buffer, jpeg, jpeg_size); yuyv_to_bgr(bgr, yuyv_buffer); }
/* * decode video stream ( from raw_frame to frame buffer (yuyv format)) * args: * vd - pointer to device data * frame - pointer to frame buffer * * asserts: * vd is not null * * returns: error code ( 0 - E_OK) */ int decode_v4l2_frame(v4l2_dev_t *vd, v4l2_frame_buff_t *frame) { /*asserts*/ assert(vd != NULL); if(!frame->raw_frame || frame->raw_frame_size == 0) { fprintf(stderr, "V4L2_CORE: not decoding empty raw frame (frame of size %i at 0x%p)\n", (int) frame->raw_frame_size, frame->raw_frame); return E_DECODE_ERR; } if(verbosity > 3) printf("V4L2_CORE: decoding raw frame of size %i at 0x%p\n", (int) frame->raw_frame_size, frame->raw_frame ); int ret = E_OK; int width = vd->format.fmt.pix.width; int height = vd->format.fmt.pix.height; frame->isKeyframe = 0; /*reset*/ /* * use the requested format since it may differ * from format.fmt.pix.pixelformat (muxed H264) */ int format = vd->requested_fmt; int framesizeIn =(width * height << 1);//2 bytes per pixel switch (format) { case V4L2_PIX_FMT_H264: /* * get the h264 frame in the tmp_buffer */ frame->h264_frame_size = demux_h264( frame->h264_frame, frame->raw_frame, frame->raw_frame_size, frame->h264_frame_max_size); /* * store SPS and PPS info (usually the first two NALU) * and check/store the last IDR frame */ store_extra_data(vd, frame); /* * check for keyframe and store it */ frame->isKeyframe = is_h264_keyframe(vd, frame); //decode if we already have a IDR frame if(vd->h264_last_IDR_size > 0) { /*no need to convert output*/ h264_decode(frame->yuv_frame, frame->h264_frame, frame->h264_frame_size); } break; case V4L2_PIX_FMT_JPEG: case V4L2_PIX_FMT_MJPEG: if(frame->raw_frame_size <= HEADERFRAME1) { // Prevent crash on empty image fprintf(stderr, "V4L2_CORE: (jpeg decoder) Ignoring empty buffer\n"); ret = E_DECODE_ERR; return (ret); } ret = jpeg_decode(frame->yuv_frame, frame->raw_frame, frame->raw_frame_size); //memcpy(frame->tmp_buffer, frame->raw_frame, frame->raw_frame_size); //ret = jpeg_decode(&frame->yuv_frame, frame->tmp_buffer, width, height); //if ( ret < 0) //{ // fprintf(stderr, "V4L2_CORE: jpeg decoder exit with error (%i) (res: %ix%i - %x)\n", ret, width, height, vd->format.fmt.pix.pixelformat); // return E_DECODE_ERR; //} if(verbosity > 3) fprintf(stderr, "V4L2_CORE: (jpeg decoder) decode frame of size %i\n", ret); ret = E_OK; break; case V4L2_PIX_FMT_UYVY: uyvy_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_VYUY: vyuy_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YVYU: yvyu_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YYUV: yyuv_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUV444: y444_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUV555: yuvo_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUV565: yuvp_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUV32: yuv4_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUV420: if(frame->raw_frame_size > (width * height * 3/2)) frame->raw_frame_size = width * height * 3/2; memcpy(frame->yuv_frame, frame->raw_frame, frame->raw_frame_size); break; case V4L2_PIX_FMT_YUV422P: yuv422p_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YVU420: yv12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV12: nv12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV21: nv21_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV16: nv16_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV61: nv61_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV24: nv24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_NV42: nv42_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_Y41P: y41p_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_GREY: grey_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_Y10BPACK: y10b_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_Y16: y16_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; #ifdef V4L2_PIX_FMT_Y16_BE case V4L2_PIX_FMT_Y16_BE: y16x_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; #endif case V4L2_PIX_FMT_SPCA501: s501_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_SPCA505: s505_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_SPCA508: s508_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_YUYV: if(vd->isbayer>0) { if (!(frame->tmp_buffer)) { /* rgb buffer for decoding bayer data*/ frame->tmp_buffer_max_size = width * height * 3; frame->tmp_buffer = calloc(frame->tmp_buffer_max_size, sizeof(uint8_t)); if(frame->tmp_buffer == NULL) { fprintf(stderr, "V4L2_CORE: FATAL memory allocation failure (v4l2core_frame_decode): %s\n", strerror(errno)); exit(-1); } } /*convert raw bayer to iyuv*/ bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, vd->bayer_pix_order); rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height); } else yuyv_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_SGBRG8: //0 bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 0); rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height); break; case V4L2_PIX_FMT_SGRBG8: //1 bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 1); rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height); break; case V4L2_PIX_FMT_SBGGR8: //2 bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 2); rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height); break; case V4L2_PIX_FMT_SRGGB8: //3 bayer_to_rgb24 (frame->raw_frame, frame->tmp_buffer, width, height, 3); rgb24_to_yu12(frame->yuv_frame, frame->tmp_buffer, width, height); break; case V4L2_PIX_FMT_RGB24: rgb24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_BGR24: bgr24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB332: rgb1_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB565: rgbp_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB565X: rgbr_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB444: #ifdef V4L2_PIX_FMT_ARGB444 case V4L2_PIX_FMT_ARGB444: case V4L2_PIX_FMT_XRGB444: //same as above but without alpha channel #endif ar12_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB555: #ifdef V4L2_PIX_FMT_ARGB555 case V4L2_PIX_FMT_ARGB555: case V4L2_PIX_FMT_XRGB555: //same as above but without alpha channel #endif ar15_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB555X: #ifdef V4L2_PIX_FMT_ARGB4555X case V4L2_PIX_FMT_ARGB555X: case V4L2_PIX_FMT_XRGB555X: //same as above but without alpha channel #endif ar15x_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_BGR666: bgrh_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_BGR32: #ifdef V4L2_PIX_FMT_ABGR32 case V4L2_PIX_FMT_ABGR32: case V4L2_PIX_FMT_XBGR32: //same as above but without alpha channel #endif ar24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; case V4L2_PIX_FMT_RGB32: #ifdef V4L2_PIX_FMT_ARGB32 case V4L2_PIX_FMT_ARGB32: case V4L2_PIX_FMT_XRGB32: //same as above but without alpha channel #endif ba24_to_yu12(frame->yuv_frame, frame->raw_frame, width, height); break; default: fprintf(stderr, "V4L2_CORE: error decoding frame: unknown format: %i\n", format); ret = E_UNKNOWN_ERR; break; } return ret; }