bool gst_vlc_picture_plane_allocator_hold(
    GstVlcPicturePlaneAllocator *p_allocator, GstBuffer *p_buffer )
{
    picture_t* p_pic = NULL;
    decoder_t* p_dec = p_allocator->p_dec;
    GstVlcPicturePlane *p_mem;
    int i_plane;

    if( !decoder_UpdateVideoFormat( p_dec ) )
        p_pic = decoder_NewPicture( p_dec );
    if( !p_pic )
    {
        msg_Err( p_allocator->p_dec, "failed to acquire picture from vout" );
        return false;
    }

    for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ )
    {
        p_mem = (GstVlcPicturePlane*) gst_buffer_peek_memory ( p_buffer,
                i_plane );
        p_mem->p_pic = p_pic;
        p_mem->p_plane = &p_pic->p[ i_plane ];
    }

    return true;
}
Exemple #2
0
/*****************************************************************************
 * DecodePacket: decodes an OggSpots packet.
 *****************************************************************************/
static picture_t* DecodePacket(decoder_t* p_dec, block_t* p_block)
{
    decoder_sys_t* p_sys = p_dec->p_sys;
    uint32_t i_img_offset;
    picture_t* p_pic;

    if (p_block->i_buffer < 20) {
        msg_Dbg(p_dec, "Packet too short");
        goto error;
    }

    /* Byte offset */
    i_img_offset = GetDWLE(p_block->p_buffer);
    if (i_img_offset < 20) {
        msg_Dbg(p_dec, "Invalid byte offset");
        goto error;
    }

    /* Image format */
    if ( !memcmp(&p_block->p_buffer[4], "PNG", 3) ) {
        p_dec->fmt_in.video.i_chroma = VLC_CODEC_PNG;
    }
    else if ( !memcmp(&p_block->p_buffer[4], "JPEG", 4) ) {
        p_dec->fmt_in.video.i_chroma = VLC_CODEC_JPEG;
    }
    else {
        char psz_image_type[8+1];
        strncpy(psz_image_type, (char*)&p_block->p_buffer[4], 8);
        psz_image_type[sizeof(psz_image_type)-1] = '\0';

        msg_Dbg(p_dec, "Unsupported image format: %s", psz_image_type);
        goto error;
    }

    /* We currently ignore the rest of the header and let the image format
     * handle the details */

    p_block->i_buffer -= i_img_offset;
    p_block->p_buffer += i_img_offset;

    p_pic = image_Read(p_sys->p_image, p_block,
                       &p_dec->fmt_in,
                       &p_dec->fmt_out.video);
    if (p_pic == NULL) {
        return NULL;
    }

    p_pic->b_force = true;
    p_dec->fmt_out.i_codec = p_dec->fmt_out.video.i_chroma;
    decoder_UpdateVideoFormat(p_dec);

    return p_pic;

error:
    block_Release(p_block);
    return NULL;
}
Exemple #3
0
/*****************************************************************************
 * GetNewPicture: Get a new picture from the vout and set the buf struct
 *****************************************************************************/
static picture_t *GetNewPicture( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic;

    p_dec->fmt_out.video.i_width = p_sys->p_info->sequence->width;
    p_dec->fmt_out.video.i_visible_width =
        p_sys->p_info->sequence->picture_width;
    p_dec->fmt_out.video.i_height = p_sys->p_info->sequence->height;
    p_dec->fmt_out.video.i_visible_height =
        p_sys->p_info->sequence->picture_height;
    p_dec->fmt_out.video.i_sar_num = p_sys->i_sar_num;
    p_dec->fmt_out.video.i_sar_den = p_sys->i_sar_den;

    if( p_sys->p_info->sequence->frame_period > 0 )
    {
        p_dec->fmt_out.video.i_frame_rate =
            (uint32_t)( (uint64_t)1001000000 * 27 /
                        p_sys->p_info->sequence->frame_period );
        p_dec->fmt_out.video.i_frame_rate_base = 1001;
    }

    p_dec->fmt_out.i_codec =
        ( p_sys->p_info->sequence->chroma_height <
          p_sys->p_info->sequence->height ) ?
        VLC_CODEC_I420 : VLC_CODEC_I422;

    /* Get a new picture */
    if( decoder_UpdateVideoFormat( p_dec ) )
        return NULL;
    p_pic = decoder_NewPicture( p_dec );

    if( p_pic == NULL )
        return NULL;

    p_pic->b_progressive = p_sys->p_info->current_picture != NULL ?
        p_sys->p_info->current_picture->flags & PIC_FLAG_PROGRESSIVE_FRAME : 1;
    p_pic->b_top_field_first = p_sys->p_info->current_picture != NULL ?
        p_sys->p_info->current_picture->flags & PIC_FLAG_TOP_FIELD_FIRST : 1;
    p_pic->i_nb_fields = p_sys->p_info->current_picture != NULL ?
        p_sys->p_info->current_picture->nb_fields : 2;

    return p_pic;
}
Exemple #4
0
/*****************************************************************************
 * DecodeFrame: decodes a video frame.
 *****************************************************************************/
static int DecodeFrame( decoder_t *p_dec, block_t *p_block )
{
    if( p_block == NULL ) /* No Drain */
        return VLCDEC_SUCCESS;

    p_block = DecodeBlock( p_dec, p_block );
    if( p_block == NULL )
        return VLCDEC_SUCCESS;

    decoder_sys_t *p_sys = p_dec->p_sys;

    /* Get a new picture */
    picture_t *p_pic = NULL;
    if( !decoder_UpdateVideoFormat( p_dec ) )
        p_pic = decoder_NewPicture( p_dec );
    if( p_pic == NULL )
    {
        block_Release( p_block );
        return VLCDEC_SUCCESS;
    }

    FillPicture( p_dec, p_block, p_pic );

    /* Date management: 1 frame per packet */
    p_pic->date = date_Get( &p_dec->p_sys->pts );
    date_Increment( &p_sys->pts, 1 );

    if( p_block->i_flags & BLOCK_FLAG_INTERLACED_MASK )
    {
        p_pic->b_progressive = false;
        p_pic->i_nb_fields = (p_block->i_flags & BLOCK_FLAG_SINGLE_FIELD) ? 1 : 2;
        if( p_block->i_flags & BLOCK_FLAG_TOP_FIELD_FIRST )
            p_pic->b_top_field_first = true;
        else
            p_pic->b_top_field_first = false;
    }
    else
        p_pic->b_progressive = true;

    block_Release( p_block );
    decoder_QueueVideo( p_dec, p_pic );
    return VLCDEC_SUCCESS;
}
Exemple #5
0
/*****************************************************************************
 * DecodePacket: decodes a Theora packet.
 *****************************************************************************/
static picture_t *DecodePacket( decoder_t *p_dec, ogg_packet *p_oggpacket )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic;
    th_ycbcr_buffer ycbcr;

    /* TODO: Implement _granpos (3rd parameter here) and add the
     * call to TH_DECCTL_SET_GRANDPOS after seek */
    /* TODO: If the return is TH_DUPFRAME, we don't need to display a new
     * frame, but we do need to keep displaying the previous one. */
    if (th_decode_packetin( p_sys->tcx, p_oggpacket, NULL ) < 0)
        return NULL; /* bad packet */

    /* Check for keyframe */
    if( !(p_oggpacket->packet[0] & 0x80) /* data packet */ &&
        !(p_oggpacket->packet[0] & 0x40) /* intra frame */ )
        p_sys->b_decoded_first_keyframe = true;

    /* If we haven't seen a single keyframe yet, don't let Theora decode
     * anything, otherwise we'll get display artifacts.  (This is impossible
     * in the general case, but can happen if e.g. we play a network stream
     * using a timed URL, such that the server doesn't start the video with a
     * keyframe). */
    if( !p_sys->b_decoded_first_keyframe )
        return NULL; /* Wait until we've decoded the first keyframe */

    if( th_decode_ycbcr_out( p_sys->tcx, ycbcr ) ) /* returns 0 on success */
        return NULL;

    /* Get a new picture */
    if( decoder_UpdateVideoFormat( p_dec ) )
        return NULL;
    p_pic = decoder_NewPicture( p_dec );
    if( !p_pic ) return NULL;

    theora_CopyPicture( p_pic, ycbcr );

    p_pic->date = p_sys->i_pts;
    p_pic->b_progressive = true;

    return p_pic;
}
Exemple #6
0
/****************************************************************************
 * DecodeBlock: the whole thing
 ****************************************************************************
 * This function must be fed with a complete image.
 ****************************************************************************/
static int DecodeBlock( decoder_t *p_dec, block_t *p_block )
{
    decoder_sys_t *p_sys  = (decoder_sys_t *) p_dec->p_sys;
    picture_t *p_pic = NULL;
    int32_t i_width, i_height;

    RsvgHandle *rsvg = NULL;
    cairo_surface_t *surface = NULL;
    cairo_t *cr = NULL;

    if( p_block == NULL ) /* No Drain */
        return VLCDEC_SUCCESS;

    if( p_block->i_flags & BLOCK_FLAG_CORRUPTED)
    {
        block_Release( p_block );
        return VLCDEC_SUCCESS;
    }

    rsvg = rsvg_handle_new_from_data( p_block->p_buffer, p_block->i_buffer, NULL );
    if( !rsvg )
        goto done;

    RsvgDimensionData dim;
    rsvg_handle_get_dimensions( rsvg, &dim );

    if( p_sys->f_scale > 0.0 )
    {
        i_width  = (int32_t)(p_sys->f_scale * dim.width);
        i_height = (int32_t)(p_sys->f_scale * dim.height);
    }
    else
    {
        /* Keep aspect */
        if( p_sys->i_width < 0 && p_sys->i_height > 0 )
        {
            i_width  = dim.width * p_sys->i_height / dim.height;
            i_height = p_sys->i_height;
        }
        else if( p_sys->i_width > 0 && p_sys->i_height < 0 )
        {
            i_width  = p_sys->i_width;
            i_height = dim.height * p_sys->i_width / dim.height;
        }
        else if( p_sys->i_width > 0 && p_sys->i_height > 0 )
        {
            i_width  = dim.width * p_sys->i_height / dim.height;
            i_height = p_sys->i_height;
        }
        else
        {
            i_width  = dim.width;
            i_height = dim.height;
        }
    }

    p_dec->fmt_out.i_codec =
    p_dec->fmt_out.video.i_chroma = VLC_CODEC_BGRA;
    p_dec->fmt_out.video.i_width  = i_width;
    p_dec->fmt_out.video.i_height = i_height;
    p_dec->fmt_out.video.i_visible_width  = i_width;
    p_dec->fmt_out.video.i_visible_height = i_height;
    p_dec->fmt_out.video.i_sar_num = 1;
    p_dec->fmt_out.video.i_sar_den = 1;
    p_dec->fmt_out.video.i_rmask = 0x80800000; /* Since librsvg v1.0 */
    p_dec->fmt_out.video.i_gmask = 0x0000ff00;
    p_dec->fmt_out.video.i_bmask = 0x000000ff;
    video_format_FixRgb(&p_dec->fmt_out.video);

    /* Get a new picture */
    if( decoder_UpdateVideoFormat( p_dec ) )
        goto done;
    p_pic = decoder_NewPicture( p_dec );
    if( !p_pic )
        goto done;

    /* NOTE: Do not use the stride calculation from cairo, because it is wrong:
     * stride = cairo_format_stride_for_width(CAIRO_FORMAT_ARGB32, dim.width);
     * Use the stride from VLC its picture_t::p[0].i_pitch, which is correct.
     */
    memset(p_pic->p[0].p_pixels, 0, p_pic->p[0].i_pitch * p_pic->p[0].i_lines);
    surface = cairo_image_surface_create_for_data( p_pic->p->p_pixels,
                                                   CAIRO_FORMAT_ARGB32,
                                                   i_width, i_height,
                                                   p_pic->p[0].i_pitch );
    if( !surface )
    {
        picture_Release( p_pic );
        p_pic = NULL;
        goto done;
    }

    /* Decode picture */
    cr = cairo_create( surface );
    if( !cr )
    {
        picture_Release( p_pic );
        p_pic = NULL;
        goto done;
    }

    if ( i_width != dim.width || i_height != dim.height )
    {
        double sw, sh;
        if ( p_sys->f_scale > 0.0 && !(p_sys->i_width > 0 || p_sys->i_height > 0) )
            sw = sh = p_sys->f_scale;
        else
        {
            double aspect = (double) (dim.width * p_dec->fmt_out.video.i_sar_num) /
                    (dim.height * p_dec->fmt_out.video.i_sar_den);
            sw = aspect * i_width / dim.width;
            sh = aspect * i_height / dim.height;
        }
        cairo_scale(cr, sw, sh);
    }

    if( !rsvg_handle_render_cairo( rsvg, cr ) )
    {
        picture_Release( p_pic );
        p_pic = NULL;
        goto done;
    }

    p_pic->date = p_block->i_pts != VLC_TICK_INVALID ? p_block->i_pts : p_block->i_dts;

done:
    if( rsvg )
        g_object_unref( G_OBJECT( rsvg ) );
    if( cr )
        cairo_destroy( cr );
    if( surface )
        cairo_surface_destroy( surface );

    block_Release( p_block );
    if( p_pic != NULL )
        decoder_QueueVideo( p_dec, p_pic );
    return VLCDEC_SUCCESS;
}
Exemple #7
0
/**
 * Sets the decoder output format.
 */
static int lavc_UpdateVideoFormat( decoder_t *p_dec, AVCodecContext *p_context,
                                   bool hwaccel )
{
    int width = p_context->coded_width;
    int height = p_context->coded_height;

    if( !hwaccel )
    {
        int aligns[AV_NUM_DATA_POINTERS];

        if (GetVlcChroma(&p_dec->fmt_out.video, p_context->pix_fmt))
            return -1;

        avcodec_align_dimensions2(p_context, &width, &height, aligns);
    }
    p_dec->fmt_out.i_codec = p_dec->fmt_out.video.i_chroma;

    if( width == 0 || height == 0 || width > 8192 || height > 8192 )
    {
        msg_Err( p_dec, "Invalid frame size %dx%d.", width, height );
        return -1; /* invalid display size */
    }
    p_dec->fmt_out.video.i_width = width;
    p_dec->fmt_out.video.i_height = height;

    if( width != p_context->width || height != p_context->height )
    {
        p_dec->fmt_out.video.i_visible_width = p_context->width;
        p_dec->fmt_out.video.i_visible_height = p_context->height;
    }
    else
    {
        p_dec->fmt_out.video.i_visible_width = width;
        p_dec->fmt_out.video.i_visible_height = height;
    }

    /* If an aspect-ratio was specified in the input format then force it */
    if( p_dec->fmt_in.video.i_sar_num > 0 && p_dec->fmt_in.video.i_sar_den > 0 )
    {
        p_dec->fmt_out.video.i_sar_num = p_dec->fmt_in.video.i_sar_num;
        p_dec->fmt_out.video.i_sar_den = p_dec->fmt_in.video.i_sar_den;
    }
    else
    {
        p_dec->fmt_out.video.i_sar_num = p_context->sample_aspect_ratio.num;
        p_dec->fmt_out.video.i_sar_den = p_context->sample_aspect_ratio.den;

        if( !p_dec->fmt_out.video.i_sar_num || !p_dec->fmt_out.video.i_sar_den )
        {
            p_dec->fmt_out.video.i_sar_num = 1;
            p_dec->fmt_out.video.i_sar_den = 1;
        }
    }

    if( p_dec->fmt_in.video.i_frame_rate > 0 &&
        p_dec->fmt_in.video.i_frame_rate_base > 0 )
    {
        p_dec->fmt_out.video.i_frame_rate =
            p_dec->fmt_in.video.i_frame_rate;
        p_dec->fmt_out.video.i_frame_rate_base =
            p_dec->fmt_in.video.i_frame_rate_base;
    }
    else if( p_context->time_base.num > 0 && p_context->time_base.den > 0 )
    {
        p_dec->fmt_out.video.i_frame_rate = p_context->time_base.den;
        p_dec->fmt_out.video.i_frame_rate_base = p_context->time_base.num * __MAX( p_context->ticks_per_frame, 1 );
    }
    return decoder_UpdateVideoFormat( p_dec );
}
Exemple #8
0
Fichier : mft.c Projet : IAPark/vlc
static int ProcessOutputStream(decoder_t *p_dec, DWORD stream_id)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    HRESULT hr;
    picture_t *picture = NULL;
    block_t *aout_buffer = NULL;

    DWORD output_status = 0;
    MFT_OUTPUT_DATA_BUFFER output_buffer = { stream_id, p_sys->output_sample, 0, NULL };
    hr = IMFTransform_ProcessOutput(p_sys->mft, 0, 1, &output_buffer, &output_status);
    if (output_buffer.pEvents)
        IMFCollection_Release(output_buffer.pEvents);
    /* Use the returned sample since it can be provided by the MFT. */
    IMFSample *output_sample = output_buffer.pSample;

    if (hr == S_OK)
    {
        if (!output_sample)
            return VLC_SUCCESS;

        LONGLONG sample_time;
        hr = IMFSample_GetSampleTime(output_sample, &sample_time);
        if (FAILED(hr))
            goto error;
        /* Convert from 100 nanoseconds unit to microseconds. */
        sample_time /= 10;

        DWORD total_length = 0;
        hr = IMFSample_GetTotalLength(output_sample, &total_length);
        if (FAILED(hr))
            goto error;

        if (p_dec->fmt_in.i_cat == VIDEO_ES)
        {
            if (decoder_UpdateVideoFormat(p_dec))
                return VLC_SUCCESS;
            picture = decoder_NewPicture(p_dec);
            if (!picture)
                return VLC_SUCCESS;

            UINT32 interlaced = false;
            hr = IMFSample_GetUINT32(output_sample, &MFSampleExtension_Interlaced, &interlaced);
            picture->b_progressive = !interlaced;

            picture->date = sample_time;
        }
        else
        {
            if (decoder_UpdateAudioFormat(p_dec))
                goto error;
            if (p_dec->fmt_out.audio.i_bitspersample == 0 || p_dec->fmt_out.audio.i_channels == 0)
                goto error;
            int samples = total_length / (p_dec->fmt_out.audio.i_bitspersample * p_dec->fmt_out.audio.i_channels / 8);
            aout_buffer = decoder_NewAudioBuffer(p_dec, samples);
            if (!aout_buffer)
                return VLC_SUCCESS;
            if (aout_buffer->i_buffer < total_length)
                goto error;

            aout_buffer->i_pts = sample_time;
        }

        IMFMediaBuffer *output_media_buffer = NULL;
        hr = IMFSample_GetBufferByIndex(output_sample, 0, &output_media_buffer);

        BYTE *buffer_start;
        hr = IMFMediaBuffer_Lock(output_media_buffer, &buffer_start, NULL, NULL);
        if (FAILED(hr))
            goto error;

        if (p_dec->fmt_in.i_cat == VIDEO_ES)
            CopyPackedBufferToPicture(picture, buffer_start);
        else
            memcpy(aout_buffer->p_buffer, buffer_start, total_length);

        hr = IMFMediaBuffer_Unlock(output_media_buffer);
        IMFSample_Release(output_media_buffer);
        if (FAILED(hr))
            goto error;

        if (p_sys->output_sample)
        {
            /* Sample is not provided by the MFT: clear its content. */
            hr = IMFMediaBuffer_SetCurrentLength(output_media_buffer, 0);
            if (FAILED(hr))
                goto error;
        }
        else
        {
            /* Sample is provided by the MFT: decrease refcount. */
            IMFSample_Release(output_sample);
        }
    }
    else if (hr == MF_E_TRANSFORM_STREAM_CHANGE || hr == MF_E_TRANSFORM_TYPE_NOT_SET)
    {
        if (p_sys->output_type)
            IMFMediaType_Release(p_sys->output_type);
        if (SetOutputType(p_dec, p_sys->output_stream_id, &p_sys->output_type))
            goto error;

        /* Reallocate output sample. */
        if (p_sys->output_sample)
            IMFSample_Release(p_sys->output_sample);
        p_sys->output_sample = NULL;
        if (AllocateOutputSample(p_dec, 0, &p_sys->output_sample))
            goto error;
        return VLC_SUCCESS;
    }
    else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
    {
        return VLC_SUCCESS;
    }
    else /* An error not listed above occurred */
    {
        msg_Err(p_dec, "Unexpected error in IMFTransform::ProcessOutput: %#lx",
                hr);
        goto error;
    }

    if (p_dec->fmt_in.i_cat == VIDEO_ES)
        decoder_QueueVideo(p_dec, picture);
    else
        decoder_QueueAudio(p_dec, aout_buffer);

    return VLC_SUCCESS;

error:
    msg_Err(p_dec, "Error in ProcessOutputStream()");
    if (picture)
        picture_Release(picture);
    if (aout_buffer)
        block_Release(aout_buffer);
    return VLC_EGENERIC;
}
Exemple #9
0
Fichier : xwd.c Projet : Geal/vlc
static picture_t *Decode (decoder_t *dec, block_t **pp)
{
    picture_t *pic = NULL;

    if (pp == NULL)
        return NULL;

    block_t *block = *pp;
    if (block == NULL)
        return NULL;
    *pp = NULL;

    if (block->i_pts <= VLC_TS_INVALID)
        goto drop; /* undated block, should never happen */
    if (block->i_buffer < sz_XWDheader)
        goto drop;

    /* Skip XWD header */
    const XWDFileHeader *hdr = (const void *)block->p_buffer;
    uint32_t hdrlen = ntohl(hdr->header_size);
    if (hdrlen < sz_XWDheader
     || ntohl(hdr->file_version) < XWD_FILE_VERSION
     || ntohl(hdr->pixmap_format) != 2 /* ZPixmap */)
        goto drop;

    hdrlen += ntohl(hdr->ncolors) * sz_XWDColor;
    if (hdrlen > block->i_buffer)
        goto drop;
    block->p_buffer += hdrlen;
    block->i_buffer -= hdrlen;

    /* Parse XWD header */
    vlc_fourcc_t chroma = 0;
    switch (ntohl(hdr->pixmap_depth))
    {
        case 8:
            if (ntohl(hdr->bits_per_pixel) == 8)
                chroma = VLC_CODEC_RGB8;
            break;
        case 15:
            if (ntohl(hdr->bits_per_pixel) == 16)
                chroma = VLC_CODEC_RGB15;
            break;
        case 16:
            if (ntohl(hdr->bits_per_pixel) == 16)
                chroma = VLC_CODEC_RGB16;
            break;
        case 24:
            switch (ntohl(hdr->bits_per_pixel))
            {
                case 32: chroma = VLC_CODEC_RGB32; break;
                case 24: chroma = VLC_CODEC_RGB24; break;
            }
            break;
        case 32:
            if (ntohl(hdr->bits_per_pixel) == 32)
                chroma = VLC_CODEC_ARGB;
            break;
    }
    /* TODO: check image endianess, set RGB mask */
    if (!chroma)
        goto drop;

    video_format_Setup(&dec->fmt_out.video, chroma,
                       ntohl(hdr->pixmap_width), ntohl(hdr->pixmap_height),
                       ntohl(hdr->pixmap_width), ntohl(hdr->pixmap_height),
                       dec->fmt_in.video.i_sar_num,
                       dec->fmt_in.video.i_sar_den);

    const size_t copy = dec->fmt_out.video.i_width
                        * (dec->fmt_out.video.i_bits_per_pixel / 8);
    const uint32_t pitch = ntohl(hdr->bytes_per_line);

    /* Build picture */
    if (pitch < copy
     || (block->i_buffer / pitch) < dec->fmt_out.video.i_height)
        goto drop;

    if (decoder_UpdateVideoFormat(dec))
        goto drop;
    pic = decoder_NewPicture(dec);
    if (pic == NULL)
        goto drop;

    const uint8_t *in = block->p_buffer;
    uint8_t *out = pic->p->p_pixels;
    for (unsigned i = 0; i < dec->fmt_out.video.i_height; i++)
    {
        memcpy(out, in, copy);
        in += pitch;
        out += pic->p->i_pitch;
    }
    pic->date = block->i_pts;
    pic->b_progressive = true;

drop:
    block_Release(block);
    return pic;
}
Exemple #10
0
static int Video_GetOutput(decoder_t *p_dec, picture_t **pp_out_pic,
                           block_t **pp_out_block, bool *p_abort,
                           mtime_t i_timeout)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    mc_api_out out;
    picture_t *p_pic = NULL;
    int i_ret;

    assert(pp_out_pic && !pp_out_block);

    /* FIXME: A new picture shouldn't be created each time.  If
     * decoder_NewPicture fails because the decoder is flushing/exiting,
     * GetVideoOutput will either fail (or crash in function of devices), or
     * never return an output buffer. Indeed, if the Decoder is flushing,
     * MediaCodec can be stalled since the input is waiting for the output or
     * vice-versa. Therefore, call decoder_NewPicture before GetVideoOutput as
     * a safeguard. */

    if (p_sys->b_has_format)
    {
        if (p_sys->b_update_format)
        {
            p_sys->b_update_format = false;
            if (decoder_UpdateVideoFormat(p_dec) != 0)
            {
                msg_Err(p_dec, "decoder_UpdateVideoFormat failed");
                return -1;
            }
        }
        p_pic = decoder_NewPicture(p_dec);
        if (!p_pic) {
            msg_Warn(p_dec, "NewPicture failed");
            /* abort current Decode call */
            *p_abort = true;
            return 0;
        }
    }

    i_ret = p_sys->api->get_out(p_sys->api, &out, i_timeout);
    if (i_ret != 1)
        goto end;

    if (out.type == MC_OUT_TYPE_BUF)
    {
        /* If the oldest input block had no PTS, the timestamp of
         * the frame returned by MediaCodec might be wrong so we
         * overwrite it with the corresponding dts. Call FifoGet
         * first in order to avoid a gap if buffers are released
         * due to an invalid format or a preroll */
        int64_t forced_ts = timestamp_FifoGet(p_sys->u.video.timestamp_fifo);

        if (!p_sys->b_has_format) {
            msg_Warn(p_dec, "Buffers returned before output format is set, dropping frame");
            i_ret = p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false);
            goto end;
        }

        if (out.u.buf.i_ts <= p_sys->i_preroll_end)
        {
            i_ret = p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false);
            goto end;
        }

        if (forced_ts == VLC_TS_INVALID)
            p_pic->date = out.u.buf.i_ts;
        else
            p_pic->date = forced_ts;

        if (p_sys->api->b_direct_rendering)
        {
            picture_sys_t *p_picsys = p_pic->p_sys;
            p_picsys->pf_lock_pic = NULL;
            p_picsys->pf_unlock_pic = UnlockPicture;
            p_picsys->priv.hw.p_dec = p_dec;
            p_picsys->priv.hw.i_index = out.u.buf.i_index;
            p_picsys->priv.hw.b_valid = true;

            vlc_mutex_lock(get_android_opaque_mutex());
            InsertInflightPicture(p_dec, p_pic, out.u.buf.i_index);
            vlc_mutex_unlock(get_android_opaque_mutex());
        } else {
            unsigned int chroma_div;
            GetVlcChromaSizes(p_dec->fmt_out.i_codec,
                              p_dec->fmt_out.video.i_width,
                              p_dec->fmt_out.video.i_height,
                              NULL, NULL, &chroma_div);
            CopyOmxPicture(p_sys->u.video.i_pixel_format, p_pic,
                           p_sys->u.video.i_slice_height, p_sys->u.video.i_stride,
                           (uint8_t *)out.u.buf.p_ptr, chroma_div,
                           &p_sys->u.video.ascd);

            if (p_sys->api->release_out(p_sys->api, out.u.buf.i_index, false))
                i_ret = -1;
        }
        i_ret = 1;
    } else {
        assert(out.type == MC_OUT_TYPE_CONF);
        p_sys->u.video.i_pixel_format = out.u.conf.video.pixel_format;
        ArchitectureSpecificCopyHooksDestroy(p_sys->u.video.i_pixel_format,
                                             &p_sys->u.video.ascd);

        const char *name = "unknown";
        if (p_sys->api->b_direct_rendering)
            p_dec->fmt_out.i_codec = VLC_CODEC_ANDROID_OPAQUE;
        else
        {
            if (!GetVlcChromaFormat(p_sys->u.video.i_pixel_format,
                                    &p_dec->fmt_out.i_codec, &name)) {
                msg_Err(p_dec, "color-format not recognized");
                i_ret = -1;
                goto end;
            }
        }

        msg_Err(p_dec, "output: %d %s, %dx%d stride %d %d, crop %d %d %d %d",
                p_sys->u.video.i_pixel_format, name, out.u.conf.video.width, out.u.conf.video.height,
                out.u.conf.video.stride, out.u.conf.video.slice_height,
                out.u.conf.video.crop_left, out.u.conf.video.crop_top,
                out.u.conf.video.crop_right, out.u.conf.video.crop_bottom);

        p_dec->fmt_out.video.i_width = out.u.conf.video.crop_right + 1 - out.u.conf.video.crop_left;
        p_dec->fmt_out.video.i_height = out.u.conf.video.crop_bottom + 1 - out.u.conf.video.crop_top;
        if (p_dec->fmt_out.video.i_width <= 1
            || p_dec->fmt_out.video.i_height <= 1) {
            p_dec->fmt_out.video.i_width = out.u.conf.video.width;
            p_dec->fmt_out.video.i_height = out.u.conf.video.height;
        }
        p_dec->fmt_out.video.i_visible_width = p_dec->fmt_out.video.i_width;
        p_dec->fmt_out.video.i_visible_height = p_dec->fmt_out.video.i_height;

        p_sys->u.video.i_stride = out.u.conf.video.stride;
        p_sys->u.video.i_slice_height = out.u.conf.video.slice_height;
        if (p_sys->u.video.i_stride <= 0)
            p_sys->u.video.i_stride = out.u.conf.video.width;
        if (p_sys->u.video.i_slice_height <= 0)
            p_sys->u.video.i_slice_height = out.u.conf.video.height;

        ArchitectureSpecificCopyHooks(p_dec, out.u.conf.video.pixel_format,
                                      out.u.conf.video.slice_height,
                                      p_sys->u.video.i_stride, &p_sys->u.video.ascd);
        if (p_sys->u.video.i_pixel_format == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar)
            p_sys->u.video.i_slice_height -= out.u.conf.video.crop_top/2;
        if (IgnoreOmxDecoderPadding(p_sys->psz_name)) {
            p_sys->u.video.i_slice_height = 0;
            p_sys->u.video.i_stride = p_dec->fmt_out.video.i_width;
        }
        p_sys->b_update_format = true;
        p_sys->b_has_format = true;
        i_ret = 0;
    }
end:
    if (p_pic)
    {
        if (i_ret == 1)
            *pp_out_pic = p_pic;
        else
            picture_Release(p_pic);
    }
    return i_ret;
}
Exemple #11
0
/*****************************************************************************
 * StartMediaCodec: Create the mediacodec instance
 *****************************************************************************/
static int StartMediaCodec(decoder_t *p_dec)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    int i_ret = 0;
    union mc_api_args args;

    if (p_dec->fmt_in.i_extra && !p_sys->p_csd)
    {
        /* Try first to configure specific Video CSD */
        if (p_dec->fmt_in.i_cat == VIDEO_ES)
            i_ret = ParseVideoExtra(p_dec, p_dec->fmt_in.p_extra,
                                    p_dec->fmt_in.i_extra);

        if (i_ret != VLC_SUCCESS)
            return i_ret;

        /* Set default CSD if ParseVideoExtra failed to configure one */
        if (!p_sys->p_csd)
        {
            struct csd csd;

            csd.p_buf = p_dec->fmt_in.p_extra;
            csd.i_size = p_dec->fmt_in.i_extra;
            CSDDup(p_dec, &csd, 1);
        }

        p_sys->i_csd_send = 0;
    }

    if (p_dec->fmt_in.i_cat == VIDEO_ES)
    {
        if (!p_sys->u.video.i_width || !p_sys->u.video.i_height)
        {
            msg_Err(p_dec, "invalid size, abort MediaCodec");
            return VLC_EGENERIC;
        }
        args.video.i_width = p_sys->u.video.i_width;
        args.video.i_height = p_sys->u.video.i_height;

        switch (p_dec->fmt_in.video.orientation)
        {
            case ORIENT_ROTATED_90:
                args.video.i_angle = 90;
                break;
            case ORIENT_ROTATED_180:
                args.video.i_angle = 180;
                break;
            case ORIENT_ROTATED_270:
                args.video.i_angle = 270;
                break;
            default:
                args.video.i_angle = 0;
        }

        /* Check again the codec name if h264 profile changed */
        if (p_dec->fmt_in.i_codec == VLC_CODEC_H264
         && !p_sys->u.video.i_h264_profile)
        {
            h264_get_profile_level(&p_dec->fmt_in,
                                   &p_sys->u.video.i_h264_profile, NULL, NULL);
            if (p_sys->u.video.i_h264_profile)
            {
                free(p_sys->psz_name);
                p_sys->psz_name = MediaCodec_GetName(VLC_OBJECT(p_dec),
                                                     p_sys->mime,
                                                     p_sys->u.video.i_h264_profile);
                if (!p_sys->psz_name)
                    return VLC_EGENERIC;
            }
        }

        if (!p_sys->u.video.p_awh && var_InheritBool(p_dec, CFG_PREFIX "dr"))
        {
            if ((p_sys->u.video.p_awh = AWindowHandler_new(VLC_OBJECT(p_dec))))
            {
                /* Direct rendering:
                 * The surface must be released by the Vout before calling
                 * start. Request a valid OPAQUE Vout to release any non-OPAQUE
                 * Vout that will release the surface.
                 */
                p_dec->fmt_out.video.i_width = p_sys->u.video.i_width;
                p_dec->fmt_out.video.i_height = p_sys->u.video.i_height;
                p_dec->fmt_out.i_codec = VLC_CODEC_ANDROID_OPAQUE;
                decoder_UpdateVideoFormat(p_dec);
            }
        }
        args.video.p_awh = p_sys->u.video.p_awh;
    }
    else
    {
        date_Set(&p_sys->u.audio.i_end_date, VLC_TS_INVALID);

        args.audio.i_sample_rate    = p_dec->fmt_in.audio.i_rate;
        args.audio.i_channel_count  = p_dec->p_sys->u.audio.i_channels;
    }

    return p_sys->api->start(p_sys->api, p_sys->psz_name, p_sys->mime, &args);
}
static bool gst_vlc_video_info_from_vout( GstVideoInfo *p_info,
        GstVideoAlignment *p_align, GstCaps *p_caps, decoder_t *p_dec,
        picture_t *p_pic_info )
{
    const GstVideoFormatInfo *p_vinfo = p_info->finfo;
    picture_t *p_pic = NULL;
    int i;

    /* Ensure the queue is empty */
    gst_vlc_dec_ensure_empty_queue( p_dec );
    gst_video_info_align( p_info, p_align );

    if( !gst_vlc_set_vout_fmt( p_info, p_align, p_caps, p_dec ))
    {
        msg_Err( p_dec, "failed to set output format to vout" );
        return false;
    }

    /* Acquire a picture and release it. This is to get the picture
     * stride/offsets info for the Gstreamer decoder looking to use
     * downstream bufferpool directly; Zero-Copy */
    if( !decoder_UpdateVideoFormat( p_dec ) )
        p_pic = decoder_NewPicture( p_dec );
    if( !p_pic )
    {
        msg_Err( p_dec, "failed to acquire picture from vout; for pic info" );
        return false;
    }

    /* reject if strides don't match */
    for( i = 0; i < p_pic->i_planes; i++ )
        if( p_info->stride[i] != p_pic->p[i].i_pitch )
            goto strides_mismatch;

    p_info->offset[0] = 0;
    for( i = 1; i < p_pic->i_planes; i++ )
    {
        p_info->offset[i] = p_info->offset[i-1] +
            p_pic->p[i-1].i_pitch * p_pic->p[i-1].i_lines;
    }
    GST_VIDEO_INFO_SIZE( p_info ) = p_info->offset[i-1] +
        p_pic->p[i-1].i_pitch * p_pic->p[i-1].i_lines;

    for( i = 0; i < p_pic->i_planes; i++ )
    {
        int i_v_edge, i_h_edge;

        i_h_edge =
            GST_VIDEO_FORMAT_INFO_SCALE_WIDTH( p_vinfo, i,
                    p_align->padding_left);
        i_v_edge =
            GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT( p_vinfo, i,
                    p_align->padding_top);

        p_info->offset[i] += ( i_v_edge * p_info->stride[i] ) +
            ( i_h_edge * GST_VIDEO_FORMAT_INFO_PSTRIDE( p_vinfo, i ));
    }

    memcpy( p_pic_info, p_pic, sizeof( picture_t ));
    picture_Release( p_pic );

    return true;

strides_mismatch:
    msg_Err( p_dec, "strides mismatch" );
    picture_Release( p_pic );
    return false;
}
Exemple #13
0
static int Video_ProcessOutput(decoder_t *p_dec, mc_api_out *p_out,
                               picture_t **pp_out_pic, block_t **pp_out_block)
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    assert(pp_out_pic && !pp_out_block);

    if (p_out->type == MC_OUT_TYPE_BUF)
    {
        picture_t *p_pic = NULL;

        /* Use the aspect ratio provided by the input (ie read from packetizer).
         * Don't check the current value of the aspect ratio in fmt_out, since we
         * want to allow changes in it to propagate. */
        if (p_dec->fmt_in.video.i_sar_num != 0 && p_dec->fmt_in.video.i_sar_den != 0
         && (p_dec->fmt_out.video.i_sar_num != p_dec->fmt_in.video.i_sar_num ||
             p_dec->fmt_out.video.i_sar_den != p_dec->fmt_in.video.i_sar_den))
        {
            p_dec->fmt_out.video.i_sar_num = p_dec->fmt_in.video.i_sar_num;
            p_dec->fmt_out.video.i_sar_den = p_dec->fmt_in.video.i_sar_den;
            p_sys->b_update_format = true;
        }

        if (p_sys->b_update_format)
        {
            p_sys->b_update_format = false;
            if (decoder_UpdateVideoFormat(p_dec) != 0)
            {
                msg_Err(p_dec, "decoder_UpdateVideoFormat failed");
                p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false);
                return -1;
            }
        }

        /* If the oldest input block had no PTS, the timestamp of
         * the frame returned by MediaCodec might be wrong so we
         * overwrite it with the corresponding dts. Call FifoGet
         * first in order to avoid a gap if buffers are released
         * due to an invalid format or a preroll */
        int64_t forced_ts = timestamp_FifoGet(p_sys->u.video.timestamp_fifo);

        if (!p_sys->b_has_format) {
            msg_Warn(p_dec, "Buffers returned before output format is set, dropping frame");
            return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false);
        }

        if (p_out->u.buf.i_ts <= p_sys->i_preroll_end)
            return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false);

        p_pic = decoder_NewPicture(p_dec);
        if (!p_pic) {
            msg_Warn(p_dec, "NewPicture failed");
            return p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false);
        }

        if (forced_ts == VLC_TS_INVALID)
            p_pic->date = p_out->u.buf.i_ts;
        else
            p_pic->date = forced_ts;

        if (p_sys->api->b_direct_rendering)
        {
            picture_sys_t *p_picsys = p_pic->p_sys;
            p_picsys->pf_lock_pic = NULL;
            p_picsys->pf_unlock_pic = UnlockPicture;
            p_picsys->priv.hw.p_dec = p_dec;
            p_picsys->priv.hw.i_index = p_out->u.buf.i_index;
            p_picsys->priv.hw.b_valid = true;

            vlc_mutex_lock(get_android_opaque_mutex());
            InsertInflightPicture(p_dec, p_pic, p_out->u.buf.i_index);
            vlc_mutex_unlock(get_android_opaque_mutex());
        } else {
            unsigned int chroma_div;
            GetVlcChromaSizes(p_dec->fmt_out.i_codec,
                              p_dec->fmt_out.video.i_width,
                              p_dec->fmt_out.video.i_height,
                              NULL, NULL, &chroma_div);
            CopyOmxPicture(p_sys->u.video.i_pixel_format, p_pic,
                           p_sys->u.video.i_slice_height, p_sys->u.video.i_stride,
                           (uint8_t *)p_out->u.buf.p_ptr, chroma_div,
                           &p_sys->u.video.ascd);

            if (p_sys->api->release_out(p_sys->api, p_out->u.buf.i_index, false))
            {
                picture_Release(p_pic);
                return -1;
            }
        }
        assert(!(*pp_out_pic));
        *pp_out_pic = p_pic;
        return 1;
    } else {
        assert(p_out->type == MC_OUT_TYPE_CONF);
        p_sys->u.video.i_pixel_format = p_out->u.conf.video.pixel_format;
        ArchitectureSpecificCopyHooksDestroy(p_sys->u.video.i_pixel_format,
                                             &p_sys->u.video.ascd);

        const char *name = "unknown";
        if (p_sys->api->b_direct_rendering)
            p_dec->fmt_out.i_codec = VLC_CODEC_ANDROID_OPAQUE;
        else
        {
            if (!GetVlcChromaFormat(p_sys->u.video.i_pixel_format,
                                    &p_dec->fmt_out.i_codec, &name)) {
                msg_Err(p_dec, "color-format not recognized");
                return -1;
            }
        }

        msg_Err(p_dec, "output: %d %s, %dx%d stride %d %d, crop %d %d %d %d",
                p_sys->u.video.i_pixel_format, name, p_out->u.conf.video.width, p_out->u.conf.video.height,
                p_out->u.conf.video.stride, p_out->u.conf.video.slice_height,
                p_out->u.conf.video.crop_left, p_out->u.conf.video.crop_top,
                p_out->u.conf.video.crop_right, p_out->u.conf.video.crop_bottom);

        p_dec->fmt_out.video.i_width = p_out->u.conf.video.crop_right + 1 - p_out->u.conf.video.crop_left;
        p_dec->fmt_out.video.i_height = p_out->u.conf.video.crop_bottom + 1 - p_out->u.conf.video.crop_top;
        if (p_dec->fmt_out.video.i_width <= 1
            || p_dec->fmt_out.video.i_height <= 1) {
            p_dec->fmt_out.video.i_width = p_out->u.conf.video.width;
            p_dec->fmt_out.video.i_height = p_out->u.conf.video.height;
        }
        p_dec->fmt_out.video.i_visible_width = p_dec->fmt_out.video.i_width;
        p_dec->fmt_out.video.i_visible_height = p_dec->fmt_out.video.i_height;

        p_sys->u.video.i_stride = p_out->u.conf.video.stride;
        p_sys->u.video.i_slice_height = p_out->u.conf.video.slice_height;
        if (p_sys->u.video.i_stride <= 0)
            p_sys->u.video.i_stride = p_out->u.conf.video.width;
        if (p_sys->u.video.i_slice_height <= 0)
            p_sys->u.video.i_slice_height = p_out->u.conf.video.height;

        ArchitectureSpecificCopyHooks(p_dec, p_out->u.conf.video.pixel_format,
                                      p_out->u.conf.video.slice_height,
                                      p_sys->u.video.i_stride, &p_sys->u.video.ascd);
        if (p_sys->u.video.i_pixel_format == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar)
            p_sys->u.video.i_slice_height -= p_out->u.conf.video.crop_top/2;
        if ((p_sys->i_quirks & OMXCODEC_VIDEO_QUIRKS_IGNORE_PADDING))
        {
            p_sys->u.video.i_slice_height = 0;
            p_sys->u.video.i_stride = p_dec->fmt_out.video.i_width;
        }
        p_sys->b_update_format = true;
        p_sys->b_has_format = true;
        return 0;
    }
}
Exemple #14
0
static int send_output_buffer(decoder_t *dec)
{
    decoder_sys_t *sys = dec->p_sys;
    MMAL_BUFFER_HEADER_T *buffer;
    picture_sys_t *p_sys;
    picture_t *picture = NULL;
    MMAL_STATUS_T status;
    unsigned buffer_size = 0;
    int ret = 0;

    if (!sys->output->is_enabled)
        return VLC_EGENERIC;

    /* If local output pool is allocated, use it - this is only the case for
     * non-opaque modes */
    if (sys->output_pool) {
        buffer = mmal_queue_get(sys->output_pool->queue);
        if (!buffer) {
            msg_Warn(dec, "Failed to get new buffer");
            return VLC_EGENERIC;
        }
    }

    if (!decoder_UpdateVideoFormat(dec))
        picture = decoder_NewPicture(dec);
    if (!picture) {
        msg_Warn(dec, "Failed to get new picture");
        ret = -1;
        goto err;
    }

    p_sys = picture->p_sys;
    for (int i = 0; i < picture->i_planes; i++)
        buffer_size += picture->p[i].i_lines * picture->p[i].i_pitch;

    if (sys->output_pool) {
        mmal_buffer_header_reset(buffer);
        buffer->alloc_size = sys->output->buffer_size;
        if (buffer_size < sys->output->buffer_size) {
            msg_Err(dec, "Retrieved picture with too small data block (%d < %d)",
                    buffer_size, sys->output->buffer_size);
            ret = VLC_EGENERIC;
            goto err;
        }

        if (!sys->opaque)
            buffer->data = picture->p[0].p_pixels;
    } else {
        buffer = p_sys->buffer;
        if (!buffer) {
            msg_Warn(dec, "Picture has no buffer attached");
            picture_Release(picture);
            return VLC_EGENERIC;
        }
        buffer->data = p_sys->buffer->data;
    }
    buffer->user_data = picture;
    buffer->cmd = 0;

    status = mmal_port_send_buffer(sys->output, buffer);
    if (status != MMAL_SUCCESS) {
        msg_Err(dec, "Failed to send buffer to output port (status=%"PRIx32" %s)",
                status, mmal_status_to_string(status));
        ret = -1;
        goto err;
    }
    atomic_fetch_add(&sys->output_in_transit, 1);

    return ret;

err:
    if (picture)
        picture_Release(picture);
    if (sys->output_pool && buffer) {
        buffer->data = NULL;
        mmal_buffer_header_release(buffer);
    }
    return ret;
}
Exemple #15
0
/*
 * This function must be fed with a complete compressed frame.
 */
static int DecodeBlock(decoder_t *p_dec, block_t *p_block)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic = 0;

    JSAMPARRAY p_row_pointers = NULL;

    if (!p_block) /* No Drain */
        return VLCDEC_SUCCESS;

    if (p_block->i_flags & BLOCK_FLAG_CORRUPTED )
    {
        block_Release(p_block);
        return VLCDEC_SUCCESS;
    }

    /* libjpeg longjmp's there in case of error */
    if (setjmp(p_sys->setjmp_buffer))
    {
        goto error;
    }

    jpeg_create_decompress(&p_sys->p_jpeg);
    jpeg_mem_src(&p_sys->p_jpeg, p_block->p_buffer, p_block->i_buffer);
    jpeg_save_markers( &p_sys->p_jpeg, EXIF_JPEG_MARKER, 0xffff );
    jpeg_read_header(&p_sys->p_jpeg, TRUE);

    p_sys->p_jpeg.out_color_space = JCS_RGB;

    jpeg_start_decompress(&p_sys->p_jpeg);

    /* Set output properties */
    p_dec->fmt_out.i_codec = VLC_CODEC_RGB24;
    p_dec->fmt_out.video.i_visible_width  = p_dec->fmt_out.video.i_width  = p_sys->p_jpeg.output_width;
    p_dec->fmt_out.video.i_visible_height = p_dec->fmt_out.video.i_height = p_sys->p_jpeg.output_height;
    p_dec->fmt_out.video.i_sar_num = 1;
    p_dec->fmt_out.video.i_sar_den = 1;

    int i_otag; /* Orientation tag has valid range of 1-8. 1 is normal orientation, 0 = unspecified = normal */
    i_otag = jpeg_GetOrientation( &p_sys->p_jpeg );
    if ( i_otag > 1 )
    {
        msg_Dbg( p_dec, "Jpeg orientation is %d", i_otag );
        p_dec->fmt_out.video.orientation = ORIENT_FROM_EXIF( i_otag );
    }
    jpeg_GetProjection(&p_sys->p_jpeg, &p_dec->fmt_out.video);

    /* Get a new picture */
    if (decoder_UpdateVideoFormat(p_dec))
    {
        goto error;
    }
    p_pic = decoder_NewPicture(p_dec);
    if (!p_pic)
    {
        goto error;
    }

    /* Decode picture */
    p_row_pointers = malloc(sizeof(JSAMPROW) * p_sys->p_jpeg.output_height);
    if (!p_row_pointers)
    {
        goto error;
    }
    for (unsigned i = 0; i < p_sys->p_jpeg.output_height; i++) {
        p_row_pointers[i] = p_pic->p->p_pixels + p_pic->p->i_pitch * i;
    }

    while (p_sys->p_jpeg.output_scanline < p_sys->p_jpeg.output_height)
    {
        jpeg_read_scanlines(&p_sys->p_jpeg,
                p_row_pointers + p_sys->p_jpeg.output_scanline,
                p_sys->p_jpeg.output_height - p_sys->p_jpeg.output_scanline);
    }

    jpeg_finish_decompress(&p_sys->p_jpeg);
    jpeg_destroy_decompress(&p_sys->p_jpeg);
    free(p_row_pointers);

    p_pic->date = p_block->i_pts > VLC_TS_INVALID ? p_block->i_pts : p_block->i_dts;

    block_Release(p_block);
    decoder_QueueVideo( p_dec, p_pic );
    return VLCDEC_SUCCESS;

error:

    jpeg_destroy_decompress(&p_sys->p_jpeg);
    free(p_row_pointers);

    block_Release(p_block);
    return VLCDEC_SUCCESS;
}
Exemple #16
0
/****************************************************************************
 * DecodeBlock: the whole thing
 ****************************************************************************
 * This function must be fed with a complete compressed frame.
 ****************************************************************************/
static int DecodeBlock( decoder_t *p_dec, block_t *p_block )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    picture_t *p_pic = NULL;
    SDL_Surface *p_surface;
    SDL_RWops *p_rw;

    if( p_block == NULL ) /* No Drain */
        return VLCDEC_SUCCESS;

    if( p_block->i_flags & BLOCK_FLAG_CORRUPTED )
    {
        block_Release( p_block );
        return VLCDEC_SUCCESS;
    }

    p_rw = SDL_RWFromConstMem( p_block->p_buffer, p_block->i_buffer );

    /* Decode picture. */
    p_surface = IMG_LoadTyped_RW( p_rw, 1, (char*)p_sys->psz_sdl_type );
    if ( p_surface == NULL )
    {
        msg_Warn( p_dec, "SDL_image couldn't load the image (%s)",
                  IMG_GetError() );
        goto error;
    }

    switch ( p_surface->format->BitsPerPixel )
    {
    case 16:
        p_dec->fmt_out.i_codec = VLC_CODEC_RGB16;
        break;
    case 8:
    case 24:
        p_dec->fmt_out.i_codec = VLC_CODEC_RGB24;
        break;
    case 32:
        p_dec->fmt_out.i_codec = VLC_CODEC_RGB32;
        break;
    default:
        msg_Warn( p_dec, "unknown bits/pixel format (%d)",
                  p_surface->format->BitsPerPixel );
        goto error;
    }
    p_dec->fmt_out.video.i_width = p_surface->w;
    p_dec->fmt_out.video.i_height = p_surface->h;
    p_dec->fmt_out.video.i_sar_num = 1;
    p_dec->fmt_out.video.i_sar_den = 1;

    /* Get a new picture. */
    if( decoder_UpdateVideoFormat( p_dec ) )
        goto error;
    p_pic = decoder_NewPicture( p_dec );
    if ( p_pic == NULL ) goto error;

    switch ( p_surface->format->BitsPerPixel )
    {
        case 8:
        {
            for ( int i = 0; i < p_surface->h; i++ )
            {
                uint8_t *p_src = (uint8_t*)p_surface->pixels + i * p_surface->pitch;
                uint8_t *p_dst = p_pic->p[0].p_pixels + i * p_pic->p[0].i_pitch;
                for ( int j = 0; j < p_surface->w; j++ )
                {
                    uint8_t r, g, b;
                    SDL_GetRGB( *(p_src++), p_surface->format,
                                &r, &g, &b );
                    *(p_dst++) = r;
                    *(p_dst++) = g;
                    *(p_dst++) = b;
                }
            }
            break;
        }
        case 16:
        {
            uint8_t *p_src = p_surface->pixels;
            uint8_t *p_dst = p_pic->p[0].p_pixels;
            int i_pitch = p_pic->p[0].i_pitch < p_surface->pitch ?
                p_pic->p[0].i_pitch : p_surface->pitch;

            for ( int i = 0; i < p_surface->h; i++ )
            {
                memcpy( p_dst, p_src, i_pitch );
                p_src += p_surface->pitch;
                p_dst += p_pic->p[0].i_pitch;
            }
            break;
        }
        case 24:
        {
            for ( int i = 0; i < p_surface->h; i++ )
            {
                uint8_t *p_src = (uint8_t*)p_surface->pixels + i * p_surface->pitch;
                uint8_t *p_dst = p_pic->p[0].p_pixels + i * p_pic->p[0].i_pitch;
                for ( int j = 0; j < p_surface->w; j++ )
                {
                    uint8_t r, g, b;
                    SDL_GetRGB( *(uint32_t*)p_src, p_surface->format,
                                &r, &g, &b );
                    *(p_dst++) = r;
                    *(p_dst++) = g;
                    *(p_dst++) = b;
                    p_src += 3;
                }
            }
            break;
        }
        case 32:
        {
            for ( int i = 0; i < p_surface->h; i++ )
            {
                uint8_t *p_src = (uint8_t*)p_surface->pixels + i * p_surface->pitch;
                uint8_t *p_dst = p_pic->p[0].p_pixels + i * p_pic->p[0].i_pitch;
                for ( int j = 0; j < p_surface->w; j++ )
                {
                    uint8_t r, g, b, a;
                    SDL_GetRGBA( *(uint32_t*)p_src, p_surface->format,
                                &r, &g, &b, &a );
                    *(p_dst++) = b;
                    *(p_dst++) = g;
                    *(p_dst++) = r;
                    *(p_dst++) = a;
                    p_src += 4;
                }
            }
            break;
        }
    }

    p_pic->date = (p_block->i_pts != VLC_TICK_INVALID) ?
        p_block->i_pts : p_block->i_dts;

    decoder_QueueVideo( p_dec, p_pic );

error:
    if ( p_surface != NULL ) SDL_FreeSurface( p_surface );
    block_Release( p_block );
    return VLCDEC_SUCCESS;
}