static int init_schroedinger(bgav_stream_t * s) { schroedinger_priv_t * priv; schro_init(); priv = calloc(1, sizeof(*priv)); priv->last_pts = GAVL_TIME_UNDEFINED; s->decoder_priv = priv; priv->dec = schro_decoder_new(); priv->frame = gavl_video_frame_create(NULL); s->vframe = priv->frame; if(decode_picture(s) != GAVL_SOURCE_OK) /* Get format */ return 0; gavl_metadata_set(&s->m, GAVL_META_FORMAT, "Dirac"); if(!s->ext_data) priv->header_sent = 1; return 1; }
/* * picture_to_frame reads and decodes * the picture file * * side effects: allocates an AVFrame which * must be freed with av_free_frame */ FFMPEG_tmp * picture_to_frame(char *filepath) { AVFormatContext *fctx = NULL; int stream_no; AVCodecContext *cctx = NULL; AVCodec *c = NULL; AVFrame *frame = NULL; FFMPEG_tmp *tmp = malloc(sizeof(FFMPEG_tmp)); fctx = get_fcontext(filepath); if (fctx == NULL) { fprintf(stderr, "Fatal: could not open %s\n", filepath); avformat_close_input(&fctx); exit(1); } stream_no = get_video_stream(fctx); if (stream_no == -1) { fprintf(stderr, "Fatal: could not find video stream\n"); avformat_close_input(&fctx); exit(1); } cctx = get_ccontext(fctx, stream_no); if (cctx == NULL) { fprintf(stderr, "Fatal: no codec context initialized\n"); avcodec_close(cctx); avformat_close_input(&fctx); exit(1); } c = get_codec(cctx); if (c == NULL) { fprintf(stderr, "Fatal: could not open codec\n"); avcodec_close(cctx); avformat_close_input(&fctx); exit(1); } frame = decode_picture(fctx, stream_no, cctx); if (frame == NULL) { avcodec_close(cctx); avformat_close_input(&fctx); fprintf(stderr, "Fatal: could not decode image\n"); exit(1); } /* clean up successful run */ tmp->frame = frame; tmp->fctx = fctx; tmp->cctx = cctx; tmp->c = c; return tmp; }
static GstVaapiDecoderStatus decode_buffer (GstVaapiDecoderVp8 * decoder, const guchar * buf, guint buf_size) { GstVaapiDecoderVp8Private *const priv = &decoder->priv; GstVaapiDecoderStatus status; status = parse_frame_header (decoder, buf, buf_size, &priv->frame_hdr); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) return status; return decode_picture (decoder, buf, buf_size); }
void replay_image_stack_call(UINT8 type, SINT16 p1, SINT16 p2, SINT16 p3, SINT16 p4, SINT16 p5, SINT16 p6, SINT16 p7) { switch(type) { case ADD_PIC: _D (_D_WARN "--- decoding picture %d ---", p1); agi_load_resource (rPICTURE, p1); decode_picture (p1, p2); break; case ADD_VIEW: agi_load_resource (rVIEW, p1); add_to_pic (p1, p2, p3, p4, p5, p6, p7); break; } }
static test_result pic (char *res, char *raw) { int i; test_result result = TEST_OK; if (load_pic (res) == TEST_FAIL) return TEST_FAIL; test_report ("drawing and comparing 50x"); for (i = 0; i < 50; i++) { decode_picture (0,1); if ((result = compare_pic (raw)) != TEST_OK) break; } free (game.pictures[0].rdata); return result; }
static GstVaapiDecoderStatus decode_segment (GstVaapiDecoderJpeg * decoder, GstJpegSegment * seg) { GstVaapiDecoderJpegPrivate *const priv = &decoder->priv; GstVaapiDecoderStatus status; // Decode segment status = GST_VAAPI_DECODER_STATUS_SUCCESS; switch (seg->marker) { case GST_JPEG_MARKER_SOI: priv->mcu_restart = 0; priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOI; break; case GST_JPEG_MARKER_EOI: priv->decoder_state = 0; break; case GST_JPEG_MARKER_DAC: GST_ERROR ("unsupported arithmetic coding mode"); status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE; break; case GST_JPEG_MARKER_DHT: status = decode_huffman_table (decoder, seg); break; case GST_JPEG_MARKER_DQT: status = decode_quant_table (decoder, seg); break; case GST_JPEG_MARKER_DRI: status = decode_restart_interval (decoder, seg); break; case GST_JPEG_MARKER_SOS: status = decode_scan (decoder, seg); break; default: // SOFn segments if (seg->marker >= GST_JPEG_MARKER_SOF_MIN && seg->marker <= GST_JPEG_MARKER_SOF_MAX) status = decode_picture (decoder, seg); break; } return status; }
static gavl_source_status_t decode_schroedinger(bgav_stream_t * s, gavl_video_frame_t * frame) { gavl_source_status_t st; schroedinger_priv_t * priv; priv = s->decoder_priv; if(!priv->dec_frame && ((st = decode_picture(s)) != GAVL_SOURCE_OK)) return st; /* Copy frame */ // fprintf(stderr, "Decode schroedinger %p\n", frame); // if(frame) // { priv->frame->planes[0] = priv->dec_frame->components[0].data; priv->frame->planes[1] = priv->dec_frame->components[1].data; priv->frame->planes[2] = priv->dec_frame->components[2].data; priv->frame->strides[0] = priv->dec_frame->components[0].stride; priv->frame->strides[1] = priv->dec_frame->components[1].stride; priv->frame->strides[2] = priv->dec_frame->components[2].stride; // gavl_video_frame_copy(&s->data.video.format, // frame, priv->frame); bgav_pts_cache_get_first(&priv->pc, priv->frame); // } // else // bgav_pts_cache_get_first(&priv->pc, NULL); schro_frame_unref(priv->dec_frame); priv->dec_frame = NULL; return GAVL_SOURCE_OK; }
int view_pictures () { int ec = err_OK; char x[64]; int i, pic = 0, dir = 1; game.line_min_print = 1; for (i = 0; ec == err_OK; i = 1) { while (game.dir_pic[pic].offset == _EMPTY) { pic += dir; if (pic < 0) pic = MAX_DIRS - 1; if (pic > MAX_DIRS - 1) { pic = 0; if (i == 0) { /* no pics? */ ec = 1; fprintf (stderr, "No pictures found\n"); goto end_view; } } } _D ("picture = %d", pic); if ((ec = agi_load_resource (rPICTURE, pic)) != err_OK) { _D (_D_CRIT "Whoops. bad pic %d", pic); ec = err_OK; pic += dir; goto next_pic; } print_text ("[drawing]", 0, 16, 0, strlen (x) + 1, 0, 15); /* decodes the raw data to useable form */ decode_picture (pic, TRUE); show_pic (); put_screen (); update_statusline: #ifdef USE_HIRES sprintf (x, "Picture:%3i Hi-res: %3s", pic, opt.hires ? " on" : "off"); #else sprintf (x, "Picture:%3i Hi-res: N/A", pic); #endif print_text (x, 0, 0, 0, strlen (x) + 1, 0, 15); sprintf (x, "H:Hi-res P:Vis/Prio +:Next -:Prev"); print_text (x, 0, 0, 23, strlen (x) + 1, 15, 0); sprintf (x, "R:Redraw D:Screen dump Q:Quit"); print_text (x, 0, 0, 24, strlen (x) + 1, 15, 0); while (42) { decode_picture (pic, TRUE); switch (picviewer_get_key()) { case 'q': goto end_view; #ifdef USE_HIRES #ifdef USE_MOUSE case BUTTON_RIGHT: #endif case 'h': opt.hires = !opt.hires; show_pic (); put_screen (); goto update_statusline; #endif case 'p': debug.priority = !debug.priority; show_pic (); put_screen (); break; case 'd': /*opt.showscreendraw = !opt.showscreendraw;*/ screen_dump(); goto update_statusline; case 'r': goto next_pic; #ifdef USE_MOUSE case BUTTON_LEFT: if (mouse.x < GFX_WIDTH / 2) goto previous_pic; #endif /* fall through */ case '+': _D ("next pic"); if (pic < MAX_DIRS - 1) pic++; else pic = 0; dir = 1; goto next_pic; case '-': previous_pic: _D ("previous pic"); if (pic > 0) pic--; else pic = MAX_DIRS - 1; i = 0; dir = -1; goto next_pic; } } next_pic: agi_unload_resource (rPICTURE, pic); } end_view: return ec; }
static GstVaapiDecoderStatus decode_buffer(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer) { GstVaapiDecoderJpegPrivate * const priv = decoder->priv; GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; GstJpegMarkerSegment seg; GstJpegScanSegment scan_seg; GstClockTime pts; guchar *buf; guint buf_size, ofs; gboolean append_ecs; buf = GST_BUFFER_DATA(buffer); buf_size = GST_BUFFER_SIZE(buffer); if (!buf && buf_size == 0) return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; memset(&scan_seg, 0, sizeof(scan_seg)); pts = GST_BUFFER_TIMESTAMP(buffer); ofs = 0; while (gst_jpeg_parse(&seg, buf, buf_size, ofs)) { if (seg.size < 0) { GST_DEBUG("buffer to short for parsing"); return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA; } ofs += seg.size; /* Decode scan, if complete */ if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) { scan_seg.data_size = seg.offset - scan_seg.data_offset; scan_seg.is_valid = TRUE; } if (scan_seg.is_valid) { status = decode_scan( decoder, buf + scan_seg.header_offset, scan_seg.header_size, buf + scan_seg.data_offset, scan_seg.data_size ); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) break; memset(&scan_seg, 0, sizeof(scan_seg)); } append_ecs = TRUE; switch (seg.marker) { case GST_JPEG_MARKER_SOI: priv->has_quant_table = FALSE; priv->has_huf_table = FALSE; priv->mcu_restart = 0; status = GST_VAAPI_DECODER_STATUS_SUCCESS; break; case GST_JPEG_MARKER_EOI: if (decode_current_picture(decoder)) { /* Get out of the loop, trailing data is not needed */ status = GST_VAAPI_DECODER_STATUS_SUCCESS; goto end; } status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN; break; case GST_JPEG_MARKER_DHT: status = decode_huffman_table(decoder, buf + seg.offset, seg.size); break; case GST_JPEG_MARKER_DQT: status = decode_quant_table(decoder, buf + seg.offset, seg.size); break; case GST_JPEG_MARKER_DRI: status = decode_restart_interval(decoder, buf + seg.offset, seg.size); break; case GST_JPEG_MARKER_DAC: GST_ERROR("unsupported arithmetic coding mode"); status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE; break; case GST_JPEG_MARKER_SOS: scan_seg.header_offset = seg.offset; scan_seg.header_size = seg.size; scan_seg.data_offset = seg.offset + seg.size; scan_seg.data_size = 0; append_ecs = FALSE; break; default: /* Restart marker */ if (seg.marker >= GST_JPEG_MARKER_RST_MIN && seg.marker <= GST_JPEG_MARKER_RST_MAX) { append_ecs = FALSE; break; } /* Frame header */ if (seg.marker >= GST_JPEG_MARKER_SOF_MIN && seg.marker <= GST_JPEG_MARKER_SOF_MAX) { status = decode_picture( decoder, seg.marker, buf + seg.offset, seg.size, pts ); break; } /* Application segments */ if (seg.marker >= GST_JPEG_MARKER_APP_MIN && seg.marker <= GST_JPEG_MARKER_APP_MAX) { status = GST_VAAPI_DECODER_STATUS_SUCCESS; break; } GST_WARNING("unsupported marker (0x%02x)", seg.marker); status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER; break; } /* Append entropy coded segments */ if (append_ecs) scan_seg.data_size = seg.offset - scan_seg.data_offset; if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) break; } end: return status; }