void video_cleanup( IMAGE_CONTEXT *v1, IMAGE_CONTEXT *v2 ) { stop_capturing(); uninit_device(); close_device(); image_destroy( v1 ); image_destroy( v2 ); }
PUBLIC void res_releasePng(Texture *img) { if (TRUE == hash_get(res.hash_img, img->filePath, strlen(img->filePath), NULL, NULL)) { img->usedCount--; if (img->usedCount <= 0) { hash_unset(res.hash_img, img->filePath, strlen(img->filePath)); image_destroy(img); } } else { image_destroy(img); } return; }
xint m4v_free_decoder(DEC_CTRL * xparam) { DECODER *dec = (DECODER *) xparam->handle; xvid_free(dec->mbs); xvid_free(dec->slice); image_destroy(&dec->refn, dec->edged_width, dec->edged_height); image_destroy(&dec->cur, dec->edged_width, dec->edged_height); xvid_free(dec); write_timer(); cleanup_timer(); return XVID_ERR_OK; }
int main(int argc, char **argv) { unsigned short *image565 = screen_init(); struct image *image = image_new(WIDTH, HEIGHT); struct image *font = image_new(BLOCK_X, BLOCK_Y*50); struct Glyph glyph[50]; int i; recognize_init(); event_init(); image_load(font, "data.raw"); memset(glyph, 0, sizeof(glyph)); printf("Press any key to start..."); getc(stdin); printf("Recognizing 1~25 ...\n"); screen_capture(image565); rgb565_to_rgb24(image->buf, image565); threshold(THRESHOLD, image->buf); recognize(image, font, glyph, 0); for (i = 0; i < 25; ++i) { send_touch(glyph[i].x, glyph[i].y); usleep(100); } printf("\n\nPress any key to continue..."); getc(stdin); printf("Recognizing 26~50 ...\n"); screen_capture(image565); rgb565_to_rgb24(image->buf, image565); threshold(THRESHOLD, image->buf); recognize(image, font, glyph, 1); for (i = 24; i < 50; ++i) { send_touch(glyph[i].x, glyph[i].y); usleep(100); } image_destroy(font); event_destroy(); image_destroy(image); screen_destroy(image565); return 0; }
void draw_depth_image(FILE *file, int width, int height) { image *img = image_create(width, height); image_downsample(depth_image, img); int x, y; unsigned char pixel, c; for (y = 0; y < height; y++) { for (x = 0; x < width; x++) { pixel = image_get_pixel(img, x, y); if (pixel < 0x40) { c = '%'; } else if (pixel < 0x80) { c = '+'; } else if (pixel < 0xC0) { c = '.'; } else { c = ' '; } fputc(c, file); } fputc('\n', file); } image_destroy(img); }
int recognize_font(struct image *block, struct image *font, int range) { int i, ret = 0; unsigned int weight = ~0U, w; struct image *tmpblk = image_new(BLOCK_X, BLOCK_Y); int from, stop; if (range == 0) { from = 0; stop = 25; } else { from = 24; stop = 50; } for (i = from; i < stop; ++i) { // for (i = 0; i < 50; ++i) { if (font_array[i] == 1) continue; font_getimage(font, tmpblk, i); w = image_weight(tmpblk, block); // printf("[%d] weight: %d\n", i, w); if (w < weight) { weight = w; ret = i; } } // printf("[%d] = %d\n", ret+1, w); image_destroy(tmpblk); font_array[ret] = 1; return ret; }
Image *image_generate(unsigned int width, unsigned int height) { Image *img = image_create(width, height, IMAGE_RGB32); if (!img) return NULL; int ok; const enum GenImageType genimage_type = common_get_context()->genimage_type; switch (genimage_type) { case GENIMAGE_AUTO: case GENIMAGE_FLOWERS: ok = image_generate_flowers(img); if (ok || genimage_type == GENIMAGE_FLOWERS) break; /* fall-through */ case GENIMAGE_RECTS: ok = image_generate_rects(img); break; case GENIMAGE_RGB_RECTS: ok = image_generate_rgb_rects(img); break; default: ok = 0; break; } if (!ok) { image_destroy(img); return NULL; } return img; }
image_t *image_create_from_file(image_choices *choices, const char *file_name, bits file_type) { image_t *i; i = image_create(); if (i == NULL) goto Failure; strcpy(i->file_name, file_name); i->source.file_type = file_type; if (loader_export_methods(choices, i, file_type)) goto Failure; if (i->methods.load(choices, i)) goto Failure; return i; Failure: image_destroy(i); return NULL; }
char *ascii_read() { FILE *jpeg = webcam_read(); image_t *original = image_read(jpeg), *resized = image_new(opt_width, opt_height); fclose(jpeg); image_clear(resized); image_resize(original, resized); char *ascii = image_print(resized); image_destroy(original); image_destroy(resized); return ascii; }
int main() { freenect_context *f_ctx; freenect_device *f_dev; if (freenect_init(&f_ctx, NULL) < 0) { fprintf(stderr, "freenect_init() failed\n"); return 1; } freenect_select_subdevices(f_ctx, (freenect_device_flags)(FREENECT_DEVICE_MOTOR | FREENECT_DEVICE_CAMERA)); if (freenect_num_devices(f_ctx) < 1) { fprintf(stderr, "no devices found\n"); freenect_shutdown(f_ctx); return 1; } if (freenect_open_device(f_ctx, &f_dev, 0) < 0) { fprintf(stderr, "can't open device\n"); freenect_shutdown(f_ctx); return 1; } depth_image = image_create(640, 480); freenect_set_led(f_dev, LED_GREEN); freenect_set_depth_callback(f_dev, capture_depth_image); freenect_set_depth_mode(f_dev, freenect_find_depth_mode(FREENECT_RESOLUTION_MEDIUM, FREENECT_DEPTH_11BIT)); freenect_start_depth(f_dev); if (signal(SIGINT, handle_interrupt) == SIG_IGN) { signal(SIGINT, SIG_IGN); } if (signal(SIGTERM, handle_interrupt) == SIG_IGN) { signal(SIGTERM, SIG_IGN); } fprintf(stdout, "\x1B[2J"); while (running && freenect_process_events(f_ctx) >= 0) { struct winsize w; ioctl(STDOUT_FILENO, TIOCGWINSZ, &w); fprintf(stdout, "\x1B[1;1H"); draw_depth_image(stdout, w.ws_col, w.ws_row - 1); } freenect_stop_depth(f_dev); freenect_set_led(f_dev, LED_OFF); freenect_close_device(f_dev); freenect_shutdown(f_ctx); image_destroy(depth_image); return 0; }
/* * main */ int main(int argc, char **argv) { struct prog_opts prog_opts; int i; struct image curr_img; /* configure defaults */ prog_opts.verbose_fl=0; prog_opts.tile_w=DEFAULT_W; prog_opts.tile_h=DEFAULT_H; prog_opts.out_bpp=DEFAULT_BPP; prog_opts.out_filename=DEFAULT_OUTFILE; /* load command-line configuration */ if (!parse_args(&prog_opts, argc, argv)) { return EXIT_FAILURE; } TRACE("opts: %ux%u@%u '%s'\n", prog_opts.tile_w, prog_opts.tile_h, prog_opts.out_bpp, prog_opts.out_filename); if (optind >= argc) { usage(); return EXIT_FAILURE; } if (optind+1 != argc) { fprintf(stderr, "Currently only supports exactly 1 input filename.\n"); usage(); return EXIT_FAILURE; } for (i=optind; i<argc; i++) { if (!load_png(argv[i], &curr_img)) { fprintf(stderr, "Could not load image '%s'\n", argv[i]); return EXIT_FAILURE; } if (!save_chr(prog_opts.out_filename, &curr_img, prog_opts.tile_w, prog_opts.tile_h)) { fprintf(stderr, "Could not save image '%s'\n", prog_opts.out_filename); return EXIT_FAILURE; } image_destroy(&curr_img); } return EXIT_SUCCESS; }
void texture_load2DRaw(char *filename, int width, int height, int bpp, GLenum format, GLint texID) { Image *img; img = image_create(width,height,bpp); image_loadRaw(filename,img); initTexture2D(texID,width,height,format,img->data); image_destroy(img); }
static struct siv_image_t *skel_read(unsigned char *data, unsigned long size) { #if 0 /* この関数は、引数に与えられたデータを解釈し、 * 画像を作成する。 * 作成できたら struct siv_image_t * を malloc() して返し、 * 作成できなければ NULL を返す。 */ struct siv_image_t *img; img = malloc(sizeof *img); img->has_alpha = 0; // data に alpha が存在するかどうか。 img->width = 0; // 画像の幅 img->height = 0; // 画像の高さ /* 画像データ * 画像左上から右へ。 * R, G, B, A それぞれ 1バイトで 0〜255。 * alpha なしの場合は R, G, B, R, G, B, ... * alpha ありの場合は R, G, B, A, R, G, B, A, ... * A は 0 が透明, 255 が不透明。 */ img->data = malloc(img->width * img->height * 3); /* 今のところ使ってないので、NULL にしておく。 */ img->comment = NULL; img->short_info = NULL; img->long_info = NULL; if (failed) { /* 失敗した場合、image_destroy() で img を解放しておく。 * この関数は、 * img->data * img->comment * img->short_info * img->long_info * についても、NULL でなければ解放する。 */ image_destroy(img); img = NULL; } return img; #endif return NULL; }
void gui_destroy(gui_p gui) { if (gui->image) { image_destroy(gui->image); } if (gui->frame.array_count > 0) { int i = 0; for (; i < gui->frame.array_count; i++) { gui_element_destroy(gui->frame.child[i]); } free(gui->frame.child); } free(gui); }
void vgDestroyImage(VGImage image) { struct vg_context *ctx = vg_current_context(); struct vg_image *img = (struct vg_image *)image; if (image == VG_INVALID_HANDLE) { vg_set_error(ctx, VG_BAD_HANDLE_ERROR); return; } if (!vg_object_is_valid((void*)image, VG_OBJECT_IMAGE)) { vg_set_error(ctx, VG_BAD_HANDLE_ERROR); return; } image_destroy(img); }
static void cb_compose_images_finalize (GObject *o) { CbComposeImages *self = (CbComposeImages*) o; guint i, p; cb_animation_destroy (&self->delete_animation); for (i = 0, p = self->images->len; i < p; i ++) { const Image *image = &g_array_index (self->images, Image, i); image_destroy (image); /* Don't remove from the GArray here */ } g_array_free (self->images, TRUE); G_OBJECT_CLASS (cb_compose_images_parent_class)->finalize (o); }
/* * spriteinfo_destroy() * Destroys a spriteinfo_t object */ void spriteinfo_destroy(spriteinfo_t *info) { int i; if(info->source_file != NULL) free(info->source_file); if(info->frame_data != NULL) { for(i=0; i<info->frame_count; i++) image_destroy(info->frame_data[i]); free(info->frame_data); } if(info->animation_data != NULL) { for(i=0; i<info->animation_count; i++) info->animation_data[i] = animation_delete(info->animation_data[i]); free(info->animation_data); } free(info); }
void recognize(struct image *image, struct image *font, struct Glyph *glyph, int range) { int i, j, number; struct Glyph *glyptr; struct image *outblk = image_new(BLOCK_X, BLOCK_Y); for (j = 0; j < 5; ++j) { for (i = 0; i < 5; ++i) { image_getblock(image, outblk, i, j); number = recognize_font(outblk, font, range); glyptr = glyph + (number); glyptr->number = number+1; glyptr->x = i; glyptr->y = j; printf("%02d ", number+1); } printf("\n"); } image_destroy(outblk); }
void vgWritePixels(const void * data, VGint dataStride, VGImageFormat dataFormat, VGint dx, VGint dy, VGint width, VGint height) { struct vg_context *ctx = vg_current_context(); struct pipe_context *pipe = ctx->pipe; if (!supported_image_format(dataFormat)) { vg_set_error(ctx, VG_UNSUPPORTED_IMAGE_FORMAT_ERROR); return; } if (!data || !is_aligned(data)) { vg_set_error(ctx, VG_ILLEGAL_ARGUMENT_ERROR); return; } if (width <= 0 || height <= 0) { vg_set_error(ctx, VG_ILLEGAL_ARGUMENT_ERROR); return; } vg_validate_state(ctx); { struct vg_image *img = image_create(dataFormat, width, height); image_sub_data(img, data, dataStride, dataFormat, 0, 0, width, height); #if 0 struct matrix *matrix = &ctx->state.vg.image_user_to_surface_matrix; matrix_translate(matrix, dx, dy); image_draw(img); matrix_translate(matrix, -dx, -dy); #else /* this looks like a better approach */ image_set_pixels(dx, dy, img, 0, 0, width, height); #endif image_destroy(img); } /* make sure rendering has completed */ pipe->flush(pipe, PIPE_FLUSH_RENDER_CACHE, NULL); }
static void image_load_jpeg_error_exit(j_common_ptr cinfo) { struct jpeg_decompress_struct* c = (struct jpeg_decompress_struct*) cinfo ; myerror_mgr* mgr = (myerror_mgr*) c->err; char buffer [JMSG_LENGTH_MAX]; (*cinfo->err->format_message) (cinfo, buffer); // prints the error message. printf ("error message: %s\n", buffer); if(mgr->im) { image_destroy((image_t*) mgr->im); if(mgr->image_is_owned_p) free((void*) mgr->im); } ERROR1("jpeg load error."); longjmp(mgr->env, 1); }
static void delete_animation_func (CbAnimation *self, double t, gpointer user_data) { CbComposeImages *compose_images = CB_COMPOSE_IMAGES (self->owner); Image *image = user_data; g_assert (image != NULL); g_assert (image->deleted); /* fraction of 1.0 is 'fully deleted' */ image->fraction = t; gtk_widget_queue_resize (self->owner); if (t >= 1.0) { const guint n_images = compose_images->images->len; guint index = 0; g_signal_emit (self->owner, signals[IMAGE_REMOVED], 0, image->path); for (index = 0; index < n_images; index ++) { const Image *other = &g_array_index (compose_images->images, Image, index); if (other == image) break; } g_assert (index >= 0); g_assert (index < n_images); /* Now remove the image from our actual list */ image_destroy (image); g_array_remove_index (compose_images->images, index); } }
xint m4v_init_decoder(DEC_CTRL * param, uint8 * video_header, xint header_size) { DECODER *dec; dec = xvid_malloc(sizeof(DECODER), CACHE_LINE); if (dec == NULL) { return XVID_ERR_MEMORY; } param->handle = dec; /* decode video header for frame width & height */ m4v_decode_header(dec, video_header, header_size); param->width = dec->width; param->height = dec->height; dec->mb_width = (dec->width + 15) / 16; dec->mb_height = (dec->height + 15) / 16; dec->num_mb = dec->mb_height * dec->mb_width; dec->nbits_mba = log2bin(dec->num_mb - 1); dec->edged_width = 16 * dec->mb_width + 2 * EDGE_SIZE; dec->edged_height = 16 * dec->mb_height + 2 * EDGE_SIZE; dec->decoder_clock = 0; dec->slice = xvid_malloc(sizeof(xint) * (dec->mb_width + 1) * (dec->mb_height + 1), CACHE_LINE); if (dec->slice == NULL) return XVID_ERR_MEMORY; if (image_create(&dec->cur, dec->edged_width, dec->edged_height)) { xvid_free(dec); return XVID_ERR_MEMORY; } if (image_create(&dec->refn, dec->edged_width, dec->edged_height)) { image_destroy(&dec->cur, dec->edged_width, dec->edged_height); xvid_free(dec); return XVID_ERR_MEMORY; } dec->mbs = xvid_malloc(sizeof(MACROBLOCK) * dec->mb_width * dec->mb_height, CACHE_LINE); if (dec->mbs == NULL) { image_destroy(&dec->refn, dec->edged_width, dec->edged_height); image_destroy(&dec->cur, dec->edged_width, dec->edged_height); xvid_free(dec); return XVID_ERR_MEMORY; } init_timer(); init_vlc_tables(); return XVID_ERR_OK; }
/* * RT kernel. */ int main(int argc, char **argv) { /* Number of spheres. */ #define NR_SPHERES 6 int i; /* Loop index. */ image_t img; /* Image. */ sphere_t spheres[NR_SPHERES]; /* Spheres. */ uint64_t end; /* End time. */ uint64_t start; /* Start time. */ #ifdef _XEON_PHI_ double power; #endif readargs(argc, argv); timer_init(); omp_set_num_threads(nthreads); /* Benchmark initialization. */ if (verbose) printf("initializing...\n"); start = timer_get(); /* Ground sphere. */ spheres[0] = sphere_create( VECTOR(0, -10004, -20), /* Center */ 10000, /* Radius */ VECTOR(0.2, 0.2, 0.2), /* Surface Color */ 0, /* Reflection */ 0, /* Transparency */ VECTOR(0, 0, 0)); /* Emission Color */ /* Red sphere. */ spheres[1] = sphere_create( VECTOR(0, 0, -20), /* Center */ 4, /* Radius */ VECTOR(1.00, 0.32, 0.36), /* Surface Color */ 1, /* Reflection */ 0.5, /* Transparency */ VECTOR(0, 0, 0)); /* Emission Color */ /* Yellow sphere. */ spheres[2] = sphere_create( VECTOR(5, -1, -15), 2, VECTOR(0.90, 0.76, 0.46), 1, 0.0, VECTOR(0, 0, 0)); /* Blue sphere. */ spheres[3] = sphere_create( VECTOR(5, 0, -25), 3, VECTOR(0.65, 0.77, 0.97), 1, 0.0, VECTOR(0, 0, 0)); /* Gray sphere. */ spheres[4] = sphere_create( VECTOR(-5.5, 0, -15), 3, VECTOR(0.90, 0.90, 0.90), 1, 0.0, VECTOR(0, 0, 0)); /* Light source. */ spheres[5] = sphere_create( VECTOR(0, 30, -30), 3, VECTOR(0, 0, 0), 0, 0, VECTOR(3, 3, 3)); end = timer_get(); if (verbose) printf(" time spent: %f\n", timer_diff(start, end)*MICROSEC); #ifdef _XEON_PHI_ power_init(); #endif /* Ray tracing. */ if (verbose) printf("rendering scene...\n"); start = timer_get(); img = render(spheres, NR_SPHERES, p->height, p->width, p->depth); end = timer_get(); #ifdef _XEON_PHI_ power = power_end(); #endif if (verbose) image_export("out.ppm", img, IMAGE_PPM); printf("timing statistics:\n"); printf(" total time: %f\n", timer_diff(start, end)*MICROSEC); #ifdef _XEON_PHI_ printf(" average power: %f\n", power*0.000001); #endif /* Hous keeping. */ for (i = 0; i < NR_SPHERES; i++) sphere_destroy(spheres[i]); image_destroy(img); return (EXIT_SUCCESS); }
/* * intro_release() * Releases the introduction scene */ void intro_release() { input_destroy(in); image_destroy(bg); }
/* converts giflib format image into enfle format image */ static int gif_convert(Image *p, GIF_info *g_info, GIF_image *image) { GIF_ct *ct; int i, if_animated; //int transparent_disposal; #if 0 if (image->next != NULL) { if ((p->next = image_create()) == NULL) { image_destroy(p); return 0; } if (!gif_convert(p->next, g_info, image->next)) { image_destroy(p); return 0; } } else p->next = NULL; #endif //swidth = g_info->sd->width; //sheight = g_info->sd->height; image_left(p) = image->id->left; image_top(p) = image->id->top; image_width(p) = image->id->width; image_height(p) = image->id->height; #if 0 if (image_width(p) > swidth || image_height(p) > sheight) { show_message("screen (%dx%d) but image (%dx%d)\n", swidth, sheight, p->width, p->height); swidth = image_width(p); sheight = image_height(p); } #endif p->ncolors = image->id->lct_follows ? 1 << image->id->depth : 1 << g_info->sd->depth; p->type = _INDEX; //p->delay = image->gc->delay ? image->gc->delay : 1; if_animated = g_info->npics > 1 ? 1 : 0; debug_message("GIF: %d pics animation %d\n", g_info->npics, if_animated); #if 0 if (image->gc->transparent) { p->transparent_disposal = if_animated ? _TRANSPARENT : transparent_disposal; p->transparent.index = image->gc->transparent_index; } else p->transparent_disposal = _DONOTHING; p->image_disposal = image->gc->disposal; p->background.index = g_info->sd->back; #endif if (image->id->lct_follows) ct = image->id->lct; else if (g_info->sd->gct_follows) ct = g_info->sd->gct; else { fprintf(stderr, "Null color table..\n"); ct = NULL; } for (i = 0; i < (int)p->ncolors; i++) { p->colormap[i][0] = ct->cell[i]->value[0]; p->colormap[i][1] = ct->cell[i]->value[1]; p->colormap[i][2] = ct->cell[i]->value[2]; } image_bpl(p) = image_width(p); if (!image_image(p)) image_image(p) = memory_create(); if (memory_alloc(image_image(p), image_bpl(p) * image_height(p)) == NULL) return 0; memcpy(memory_ptr(image_image(p)), image->data, image_bpl(p) * image_height(p)); return 1; }
void * thread(void *arg) { struct fw_ctx *fctx; int fd; char buf[1024]; struct image curimg; struct grab_ctx idx; struct jpegbuf jbuf; int ret; int cpid; char tsfnbuf[1024]; time_t now; struct tm tm; fctx = ((struct module_ctx *) arg)->custom; memset(&idx, 0, sizeof(idx)); for (;;) { time(&now); localtime_r(&now, &tm); strftime(tsfnbuf, sizeof(tsfnbuf) - 1, fctx->path, &tm); snprintf(buf, sizeof(buf) - 1, "%s.tmp", tsfnbuf); filter_get_image(&curimg, &idx, ((struct module_ctx *) arg)->node, NULL); jpeg_compress(&jbuf, &curimg, ((struct module_ctx *) arg)->node); fd = open(buf, O_WRONLY | O_CREAT | O_TRUNC, 0666); if (fd < 0) { log_log(MODNAME, "Open of %s failed: %s\n", buf, strerror(errno)); goto freesleeploop; } if (fctx->chmod != -1) fchmod(fd, fctx->chmod); ret = write(fd, jbuf.buf, jbuf.bufsize); if (ret != jbuf.bufsize) { log_log(MODNAME, "Write to %s failed: %s\n", buf, (ret == -1) ? strerror(errno) : "short write"); close(fd); unlink(buf); goto freesleeploop; } close(fd); if (fctx->cmd) { cpid = fork(); if (cpid < 0) { log_log(MODNAME, "fork() failed: %s\n", strerror(errno)); unlink(buf); goto freesleeploop; } if (!cpid) { /* child */ close(STDIN_FILENO); for (fd = 3; fd < 1024; fd++) close(fd); execlp(fctx->cmd, fctx->cmd, buf, NULL); /* notreached unless error */ log_log(MODNAME, "exec(\"%s\") failed: %s\n", fctx->cmd, strerror(errno)); _exit(0); } do ret = waitpid(cpid, NULL, 0); while (ret == -1 && errno == EINTR); ret = access(buf, F_OK); if (ret) goto freesleeploop; } ret = rename(buf, tsfnbuf); if (ret != 0) { log_log(MODNAME, "Rename of %s to %s failed: %s\n", buf, tsfnbuf, strerror(errno)); unlink(buf); goto freesleeploop; } freesleeploop: free(jbuf.buf); image_destroy(&curimg); if (fctx->interval > 0) sleep(fctx->interval); else { sleep(5); log_log(MODNAME, "Negative interval specified, exiting now.\n"); exit(0); } } }
void font_destroy(font Font) { if (Font.m_image != nullptr) { image_destroy(Font.m_image); } return; }
static void egd_drv_stop(ErlDrvData handle) { egd_data* d = (egd_data*)handle; image_destroy(d); driver_free((char*)handle); }
static struct siv_image_t *jpeg_read(unsigned char *data, unsigned long size) { struct jpeg_decompress_struct cinfo; struct jpeg_error_mgr jerr; struct data_src_t src; struct siv_image_t *img; unsigned char **rows = NULL; img = malloc(sizeof *img); img->has_alpha = 0; // fixme: これで segv が直ったわけだが、何か他にあるんでは? img->width = 0; img->height = 0; img->data = NULL; img->comment = NULL; img->short_info = NULL; img->long_info = NULL; memset(&cinfo, 0, sizeof cinfo); memset(&jerr, 0, sizeof jerr); cinfo.err = jpeg_std_error(&jerr); jerr.error_exit = error_exit; jpeg_create_decompress(&cinfo); memset(&src, 0, sizeof src); src.pub.init_source = init_source; src.pub.fill_input_buffer = fill_input_buffer; src.pub.skip_input_data = skip_input_data; src.pub.resync_to_restart = jpeg_resync_to_restart; src.pub.term_source = term_source; src.data = data; src.size = size; cinfo.src = (struct jpeg_source_mgr *) &src; if (setjmp(jmpbuf) == 0) { int y; jpeg_read_header(&cinfo, TRUE); cinfo.out_color_space = JCS_RGB; jpeg_start_decompress(&cinfo); img->width = cinfo.output_width; img->height = cinfo.output_height; img->data = malloc(3 * img->width * img->height); if (img->data == NULL) { fprintf(stderr, "out of memory.\n"); longjmp(jmpbuf, 1); } img->short_info = malloc(128); img->long_info = malloc(128); sprintf(img->short_info, "JPEG %dx%d", img->width, img->height); sprintf(img->long_info, "JPEG %dx%d", img->width, img->height); rows = malloc(sizeof *rows * cinfo.output_height); rows[0] = img->data; for (y = 1; y < cinfo.output_height; y++) { rows[y] = rows[y - 1] + (cinfo.output_components * cinfo.output_width); } while (cinfo.output_scanline < cinfo.output_height) { jpeg_read_scanlines(&cinfo, &rows[cinfo.output_scanline], cinfo.output_height - cinfo.output_scanline); } jpeg_finish_decompress(&cinfo); jpeg_destroy_decompress(&cinfo); free(rows); return img; } else { jpeg_abort_decompress(&cinfo); jpeg_destroy_decompress(&cinfo); image_destroy(img); if (rows != NULL) free(rows); return NULL; } }
MediaScanImage *video_create_image_from_frame(MediaScanVideo *v, MediaScanResult *r) { MediaScanImage *i = image_create(); AVFormatContext *avf = (AVFormatContext *)r->_avf; av_codecs_t *codecs = (av_codecs_t *)v->_codecs; AVCodec *codec = (AVCodec *)v->_avc; AVFrame *frame = NULL; AVPacket packet; struct SwsContext *swsc = NULL; int got_picture; int64_t duration_tb = ((double)avf->duration / AV_TIME_BASE) / av_q2d(codecs->vs->time_base); uint8_t *src; int x, y; int ofs = 0; int no_keyframe_found = 0; int skipped_frames = 0; if ((avcodec_open(codecs->vc, codec)) < 0) { LOG_ERROR("Couldn't open video codec %s for thumbnail creation\n", codec->name); goto err; } frame = avcodec_alloc_frame(); if (!frame) { LOG_ERROR("Couldn't allocate a video frame\n"); goto err; } av_init_packet(&packet); i->path = v->path; i->width = v->width; i->height = v->height; // XXX select best video frame, for example: // * Skip frames of all the same color (e.g. blank intro frames // * Use edge detection to skip blurry frames // * http://code.google.com/p/fast-edge/ // * http://en.wikipedia.org/wiki/Canny_edge_detector // * Use a frame some percentage into the video, what percentage? // * If really ambitious, use OpenCV for finding a frame with a face? // XXX other ways to seek if this fails // XXX for now, seek 10% into the video av_seek_frame(avf, codecs->vsid, (int)((double)duration_tb * 0.1), 0); for (;;) { int ret; int rgb_bufsize; AVFrame *frame_rgb = NULL; uint8_t *rgb_buffer = NULL; // Give up if we already tried the first frame if (no_keyframe_found) { LOG_ERROR("Error decoding video frame for thumbnail: %s\n", v->path); goto err; } if ((ret = av_read_frame(avf, &packet)) < 0) { if (ret == AVERROR_EOF || skipped_frames > 200) { LOG_DEBUG("Couldn't find a keyframe, using first frame\n"); no_keyframe_found = 1; av_seek_frame(avf, codecs->vsid, 0, 0); av_read_frame(avf, &packet); } else { LOG_ERROR("Couldn't read video frame (%s): ", v->path); print_averror(ret); goto err; } } // Skip frame if it's not from the video stream if (!no_keyframe_found && packet.stream_index != codecs->vsid) { av_free_packet(&packet); skipped_frames++; continue; } // Skip non-key-frames if (!no_keyframe_found && !(packet.flags & AV_PKT_FLAG_KEY)) { av_free_packet(&packet); skipped_frames++; continue; } // Skip invalid packets, not sure why this isn't an error from av_read_frame if (packet.pos < 0) { av_free_packet(&packet); skipped_frames++; continue; } LOG_DEBUG("Using video packet: pos %lld size %d, stream_index %d, duration %d\n", packet.pos, packet.size, packet.stream_index, packet.duration); if ((ret = avcodec_decode_video2(codecs->vc, frame, &got_picture, &packet)) < 0) { LOG_ERROR("Error decoding video frame for thumbnail: %s\n", v->path); print_averror(ret); goto err; } if (!got_picture) { if (skipped_frames > 200) { LOG_ERROR("Error decoding video frame for thumbnail: %s\n", v->path); goto err; } if (!no_keyframe_found) { // Try next frame av_free_packet(&packet); skipped_frames++; continue; } } // use swscale to convert from source format to RGBA in our buffer with no resizing // XXX what scaler is fastest here when not actually resizing? swsc = sws_getContext(i->width, i->height, codecs->vc->pix_fmt, i->width, i->height, PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL); if (!swsc) { LOG_ERROR("Unable to get swscale context\n"); goto err; } frame_rgb = avcodec_alloc_frame(); if (!frame_rgb) { LOG_ERROR("Couldn't allocate a video frame\n"); goto err; } // XXX There is probably a way to get sws_scale to write directly to i->_pixbuf in our RGBA format rgb_bufsize = avpicture_get_size(PIX_FMT_RGB24, i->width, i->height); rgb_buffer = av_malloc(rgb_bufsize); if (!rgb_buffer) { LOG_ERROR("Couldn't allocate an RGB video buffer\n"); av_free(frame_rgb); goto err; } LOG_MEM("new rgb_buffer of size %d @ %p\n", rgb_bufsize, rgb_buffer); avpicture_fill((AVPicture *)frame_rgb, rgb_buffer, PIX_FMT_RGB24, i->width, i->height); // Convert image to RGB24 sws_scale(swsc, frame->data, frame->linesize, 0, i->height, frame_rgb->data, frame_rgb->linesize); // Allocate space for our version of the image image_alloc_pixbuf(i, i->width, i->height); src = frame_rgb->data[0]; ofs = 0; for (y = 0; y < i->height; y++) { for (x = 0; x < i->width * 3; x += 3) { i->_pixbuf[ofs++] = COL(src[x], src[x + 1], src[x + 2]); } src += i->width * 3; } // Free the frame LOG_MEM("destroy rgb_buffer @ %p\n", rgb_buffer); av_free(rgb_buffer); av_free(frame_rgb); // Done! goto out; } err: image_destroy(i); i = NULL; out: sws_freeContext(swsc); av_free_packet(&packet); if (frame) av_free(frame); avcodec_close(codecs->vc); return i; }