Esempio n. 1
0
static block_t *PacketizeParse(void *p_private, bool *pb_ts_used, block_t *p_block)
{
    decoder_t *p_dec = p_private;
    decoder_sys_t *p_sys = p_dec->p_sys;

    block_t * p_nal = NULL;

    while (p_block->i_buffer > 5 && p_block->p_buffer[p_block->i_buffer-1] == 0x00 )
        p_block->i_buffer--;

    bs_t bs;
    bs_init(&bs, p_block->p_buffer+3, p_block->i_buffer-3);

    /* Get NALU type */
    uint32_t forbidden_zero_bit = bs_read1(&bs);

    if (forbidden_zero_bit)
    {
        msg_Err(p_dec,"Forbidden zero bit not null, corrupted NAL");
        p_sys->p_frame = NULL;
        p_sys->b_vcl = false;
        return NULL;
    }
    uint32_t nalu_type = bs_read(&bs,6);
    bs_skip(&bs, 9);

    if (nalu_type < VPS)
    {
        /* NAL is a VCL NAL */
        p_sys->b_vcl = true;

        uint32_t first_slice_in_pic = bs_read1(&bs);

        if (first_slice_in_pic && p_sys->p_frame)
        {
            p_nal = block_ChainGather(p_sys->p_frame);
            p_sys->p_frame = NULL;
        }

        block_ChainAppend(&p_sys->p_frame, p_block);
    }
    else
    {
        if (p_sys->b_vcl)
        {
            p_nal = block_ChainGather(p_sys->p_frame);
            p_nal->p_next = p_block;
            p_sys->p_frame = NULL;
            p_sys->b_vcl =false;
        }
        else
            p_nal = p_block;
    }

    *pb_ts_used = false;
    return p_nal;
}
Esempio n. 2
0
block_t *scan_GetM3U( scan_t *p_scan )
{
    vlc_object_t *p_obj = p_scan->p_obj;
    block_t *p_playlist = NULL;

    if( p_scan->i_service <= 0 )
        return NULL;

    /* */
    qsort( p_scan->pp_service, p_scan->i_service, sizeof(scan_service_t*), ScanServiceCmp );

    /* */
    p_playlist = BlockString( "#EXTM3U\n\n" );/* */

    for( int i = 0; i < p_scan->i_service; i++ )
    {
        scan_service_t *s = p_scan->pp_service[i];

        if( s->type == SERVICE_UNKNOWN )
        {
            /* We should only select service that have been described by SDT */
            msg_Dbg( p_obj, "scan_GetM3U: ignoring service number %d", s->i_program );
            continue;
        }

        const char *psz_type;
        switch( s->type )
        {
        case SERVICE_DIGITAL_TELEVISION:       psz_type = "Digital television"; break;
        case SERVICE_DIGITAL_TELEVISION_AC_SD: psz_type = "Digital television advanced codec SD"; break;
        case SERVICE_DIGITAL_TELEVISION_AC_HD: psz_type = "Digital television advanced codec HD"; break;
        case SERVICE_DIGITAL_RADIO:            psz_type = "Digital radio"; break;
        default:
            psz_type = "Unknown";
            break;
        }
        msg_Warn( p_obj, "scan_GetM3U: service number %d type '%s' name '%s' channel %d cypted=%d| network_id %d (nit:%d sdt:%d)| f=%d bw=%d snr=%d",
                  s->i_program, psz_type, s->psz_name, s->i_channel, s->b_crypted,
                  s->i_network_id, s->i_nit_version, s->i_sdt_version,
                  s->cfg.i_frequency, s->cfg.i_bandwidth, s->i_snr );

        char *psz;
        if( asprintf( &psz, "#EXTINF:,,%s\n"
                        "#EXTVLCOPT:program=%d\n"
                        "dvb://frequency=%d:bandwidth=%d\n"
                        "\n",
                      s->psz_name && * s->psz_name ? s->psz_name : "Unknown",
                      s->i_program,
                      s->cfg.i_frequency, s->cfg.i_bandwidth ) < 0 )
            psz = NULL;
        if( psz )
        {
            block_t *p_block = BlockString( psz );
            if( p_block )
                block_ChainAppend( &p_playlist, p_block );
        }
    }

    return p_playlist ? block_ChainGather( p_playlist ) : NULL;
}
Esempio n. 3
0
File: hevc.c Progetto: Akilklk/vlc
/*****************************************************************************
 * ParseNALBlock: parses annexB type NALs
 * All p_frag blocks are required to start with 0 0 0 1 4-byte startcode
 *****************************************************************************/
static block_t *ParseNALBlock(decoder_t *p_dec, bool *pb_ts_used, block_t *p_frag)
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    block_t * p_ret = NULL;

    if(unlikely(p_frag->i_buffer < 5))
    {
        msg_Warn(p_dec,"NAL too small");
        block_Release(p_frag);
        return NULL;
    }

    if(p_frag->p_buffer[4] & 0x80)
    {
        msg_Warn(p_dec,"Forbidden zero bit not null, corrupted NAL");
        block_ChainRelease(p_sys->p_frame);
        block_Release(p_frag);
        p_sys->p_frame = NULL;
        p_sys->pp_frame_last = &p_sys->p_frame;
        return NULL;
    }

    /* Get NALU type */
    uint8_t i_nal_type = hevc_getNALType(&p_frag->p_buffer[4]);
    if (i_nal_type < HEVC_NAL_VPS)
    {
        /* NAL is a VCL NAL */
        p_ret = ParseVCL(p_dec, i_nal_type, p_frag);
    }
    else
    {
        p_ret = ParseNonVCL(p_dec, i_nal_type, p_frag);
        if (p_sys->p_frame)
        {
            p_frag = p_ret;
            if( (p_ret = block_ChainGather(p_sys->p_frame)) )
            {
                p_sys->p_frame = NULL;
                p_sys->pp_frame_last = &p_sys->p_frame;
                p_ret->p_next = p_frag;
            }
            else p_ret = p_frag;
        }
    }

    *pb_ts_used = false;
    return p_ret;
}
Esempio n. 4
0
File: hevc.c Progetto: etix/vlc
static block_t *GatherAndValidateChain(block_t *p_outputchain)
{
    block_t *p_output = NULL;

    if(p_outputchain)
    {
        if(p_outputchain->i_flags & BLOCK_FLAG_CORRUPTED)
            p_output = p_outputchain; /* Avoid useless gather */
        else
            p_output = block_ChainGather(p_outputchain);
    }

    if(p_output && (p_output->i_flags & BLOCK_FLAG_CORRUPTED))
    {
        block_ChainRelease(p_output); /* Chain! see above */
        p_output = NULL;
    }

    return p_output;
}
Esempio n. 5
0
File: sections.c Progetto: fabn/vlc
void ts_sections_processor_Push( ts_sections_processor_t *p_chain,
                                 uint8_t i_table_id, uint8_t i_stream_type,
                                 demux_t *p_demux, ts_pid_t *p_pid,
                                 block_t *p_blockschain )
{
    while( p_chain )
    {
        if( ( !p_chain->i_stream_type || p_chain->i_stream_type == i_stream_type ) &&
            ( !p_chain->i_table_id || p_chain->i_table_id == i_table_id ) )
        {
            block_t *p_block = block_ChainGather( p_blockschain );
            if( p_block )
            {
                p_block = ts_sections_assembler_Append( &p_chain->assembler, p_block );
                if( p_block )
                    p_chain->pf_callback( p_demux, p_pid, p_block );
            }
            return;
        }
        p_chain = p_chain->p_next;
    }
    block_ChainRelease( p_blockschain );
}
Esempio n. 6
0
File: h264.c Progetto: r1k/vlc
static block_t *OutputPicture( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic;

    if ( !p_sys->b_header && p_sys->i_recovery_frames != -1 )
    {
        if( p_sys->i_recovery_frames == 0 )
        {
            msg_Dbg( p_dec, "Recovery from SEI recovery point complete" );
            p_sys->b_header = true;
        }
        --p_sys->i_recovery_frames;
    }

    if( !p_sys->b_header && p_sys->i_recovery_frames == -1 &&
         p_sys->slice.i_frame_type != BLOCK_FLAG_TYPE_I)
        return NULL;

    const bool b_sps_pps_i = p_sys->slice.i_frame_type == BLOCK_FLAG_TYPE_I &&
                             p_sys->b_sps &&
                             p_sys->b_pps;
    if( b_sps_pps_i || p_sys->b_frame_sps || p_sys->b_frame_pps )
    {
        block_t *p_head = NULL;
        if( p_sys->p_frame->i_flags & BLOCK_FLAG_PRIVATE_AUD )
        {
            p_head = p_sys->p_frame;
            p_sys->p_frame = p_sys->p_frame->p_next;
            if( p_sys->p_frame == NULL )
                p_sys->pp_frame_last = &p_sys->p_frame;
            p_head->p_next = NULL;
        }

        block_t *p_list = NULL;
        block_t **pp_list_tail = &p_list;
        for( int i = 0; i <= H264_SPS_ID_MAX && (b_sps_pps_i || p_sys->b_frame_sps); i++ )
        {
            if( p_sys->pp_sps[i] )
                block_ChainLastAppend( &pp_list_tail, block_Duplicate( p_sys->pp_sps[i] ) );
        }
        for( int i = 0; i < H264_PPS_ID_MAX && (b_sps_pps_i || p_sys->b_frame_pps); i++ )
        {
            if( p_sys->pp_pps[i] )
                block_ChainLastAppend( &pp_list_tail, block_Duplicate( p_sys->pp_pps[i] ) );
        }
        if( b_sps_pps_i && p_list )
            p_sys->b_header = true;

        if( p_list )
            block_ChainAppend( &p_head, p_list );

        if( p_sys->p_frame )
            block_ChainAppend( &p_head, p_sys->p_frame );

        p_pic = block_ChainGather( p_head );
    }
    else
    {
        p_pic = block_ChainGather( p_sys->p_frame );
    }

    unsigned i_num_clock_ts = 2;
    if( p_sys->b_frame_mbs_only == 0 )
    {
        if( p_sys->b_pic_struct_present_flag && p_sys->i_pic_struct < 9 )
        {
            const uint8_t rgi_numclock[9] = { 1, 1, 1, 2, 2, 3, 3, 2, 3 };
            i_num_clock_ts = rgi_numclock[ p_sys->i_pic_struct ];
        }
        else if( p_sys->slice.i_field_pic_flag ) /* See D-1 and E-6 */
        {
            i_num_clock_ts = 1;
        }
    }

    if( p_sys->i_time_scale && p_pic->i_length == 0 )
    {
        p_pic->i_length = CLOCK_FREQ * i_num_clock_ts *
                          p_sys->i_num_units_in_tick / p_sys->i_time_scale;
    }

    mtime_t i_field_pts_diff = -1;
    if( p_sys->b_frame_mbs_only == 0 && p_sys->b_pic_struct_present_flag )
    {
        switch( p_sys->i_pic_struct )
        {
        /* Top and Bottom field slices */
        case 1:
        case 2:
            if( !p_sys->b_even_frame )
            {
                p_pic->i_flags |= (p_sys->i_pic_struct == 1) ? BLOCK_FLAG_TOP_FIELD_FIRST
                                                             : BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            }
            else if( p_pic->i_pts <= VLC_TS_INVALID && p_sys->i_prev_pts > VLC_TS_INVALID && p_pic->i_length )
            {
                /* interpolate from even frame */
                i_field_pts_diff = p_pic->i_length;
            }

            p_sys->b_even_frame = !p_sys->b_even_frame;
            break;
        /* Each of the following slices contains multiple fields */
        case 3:
            p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            p_sys->b_even_frame = false;
            break;
        case 4:
            p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            p_sys->b_even_frame = false;
            break;
        case 5:
            p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            break;
        case 6:
            p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        default:
            p_sys->b_even_frame = false;
            break;
        }
    }

    /* set dts/pts to current block timestamps */
    p_pic->i_dts = p_sys->i_frame_dts;
    p_pic->i_pts = p_sys->i_frame_pts;

    /* Fixup missing timestamps after split (multiple AU/block)*/
    if( p_pic->i_dts <= VLC_TS_INVALID )
        p_pic->i_dts = p_sys->i_prev_dts;

    /* PTS Fixup, interlaced fields (multiple AU/block) */
    if( p_pic->i_pts <= VLC_TS_INVALID && p_sys->i_time_scale )
    {
        mtime_t i_pts_delay = CLOCK_FREQ * p_sys->i_dpb_output_delay *
                              p_sys->i_num_units_in_tick / p_sys->i_time_scale;
        p_pic->i_pts = p_pic->i_dts + i_pts_delay;
        if( i_field_pts_diff >= 0 )
            p_pic->i_pts += i_field_pts_diff;
    }

    /* save for next pic fixups */
    p_sys->i_prev_dts = p_pic->i_dts;
    p_sys->i_prev_pts = p_pic->i_pts;

    p_pic->i_flags |= p_sys->slice.i_frame_type;
    p_pic->i_flags &= ~BLOCK_FLAG_PRIVATE_AUD;
    if( !p_sys->b_header )
        p_pic->i_flags |= BLOCK_FLAG_PREROLL;

    /* reset after output */
    p_sys->i_frame_dts = VLC_TS_INVALID;
    p_sys->i_frame_pts = VLC_TS_INVALID;
    p_sys->i_dpb_output_delay = 0;
    p_sys->slice.i_frame_type = 0;
    p_sys->p_frame = NULL;
    p_sys->pp_frame_last = &p_sys->p_frame;
    p_sys->b_frame_sps = false;
    p_sys->b_frame_pps = false;
    p_sys->b_slice = false;

    /* CC */
    cc_storage_commit( p_sys->p_ccs, p_pic );

    return p_pic;
}
Esempio n. 7
0
File: hevc.c Progetto: Akilklk/vlc
static block_t *ParseVCL(decoder_t *p_dec, uint8_t i_nal_type, block_t *p_frag)
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_frame = NULL;

    const uint8_t *p_buffer = p_frag->p_buffer;
    size_t i_buffer = p_frag->i_buffer;

    if(unlikely(!hxxx_strip_AnnexB_startcode(&p_buffer, &i_buffer) || i_buffer < 3))
    {
        block_ChainAppend(&p_sys->p_frame, p_frag); /* might corrupt */
        return NULL;
    }

    bool b_first_slice_in_pic = p_buffer[2] & 0x80;
    if (b_first_slice_in_pic)
    {
        if(p_sys->p_frame)
        {
            /* Starting new frame, gather and return previous frame data */
            p_frame = block_ChainGather(p_sys->p_frame);
            p_sys->p_frame = NULL;
            p_sys->pp_frame_last = &p_sys->p_frame;
        }

        switch(i_nal_type)
        {
            case HEVC_NAL_BLA_W_LP:
            case HEVC_NAL_BLA_W_RADL:
            case HEVC_NAL_BLA_N_LP:
            case HEVC_NAL_IDR_W_RADL:
            case HEVC_NAL_IDR_N_LP:
            case HEVC_NAL_CRA:
                p_frag->i_flags |= BLOCK_FLAG_TYPE_I;
                break;

            default:
            {
                hevc_slice_segment_header_t *p_sli = hevc_decode_slice_header( p_buffer, i_buffer, true,
                                                                               p_sys->rgi_p_decsps, p_sys->rgi_p_decpps );
                if( p_sli )
                {
                    enum hevc_slice_type_e type;
                    if( hevc_get_slice_type( p_sli, &type ) )
                    {
                        if( type == HEVC_SLICE_TYPE_P )
                            p_frag->i_flags |= BLOCK_FLAG_TYPE_P;
                        else
                            p_frag->i_flags |= BLOCK_FLAG_TYPE_B;
                    }
                    hevc_rbsp_release_slice_header( p_sli );
                }
                else p_frag->i_flags |= BLOCK_FLAG_TYPE_B;
            }
            break;
        }
    }

    block_ChainLastAppend(&p_sys->pp_frame_last, p_frag);

    return p_frame;
}
Esempio n. 8
0
/*****************************************************************************
 * ParseMPEGBlock: Re-assemble fragments into a block containing a picture
 *****************************************************************************/
static block_t *ParseMPEGBlock( decoder_t *p_dec, block_t *p_frag )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic = NULL;

    /*
     * Check if previous picture is finished
     */
    if( ( p_sys->b_frame_slice &&
          (p_frag->p_buffer[3] == 0x00 || p_frag->p_buffer[3] > 0xaf) ) &&
          p_sys->p_seq == NULL )
    {
        /* We have a picture but without a sequence header we can't
         * do anything */
        msg_Dbg( p_dec, "waiting for sequence start" );
        if( p_sys->p_frame ) block_ChainRelease( p_sys->p_frame );
        p_sys->p_frame = NULL;
        p_sys->pp_last = &p_sys->p_frame;
        p_sys->b_frame_slice = false;

    }
    else if( p_sys->b_frame_slice &&
             (p_frag->p_buffer[3] == 0x00 || p_frag->p_buffer[3] > 0xaf) )
    {
        const bool b_eos = p_frag->p_buffer[3] == 0xb7;

        mtime_t i_duration;

        if( b_eos )
        {
            block_ChainLastAppend( &p_sys->pp_last, p_frag );
            p_frag = NULL;
        }

        p_pic = block_ChainGather( p_sys->p_frame );

        if( b_eos )
            p_pic->i_flags |= BLOCK_FLAG_END_OF_SEQUENCE;

        i_duration = (mtime_t)( 1000000 * p_sys->i_frame_rate_base /
                                p_sys->i_frame_rate );

        if( !p_sys->b_seq_progressive && p_sys->i_picture_structure != 0x03 )
        {
            i_duration /= 2;
        }

        if( p_sys->b_seq_progressive )
        {
            if( p_sys->i_top_field_first == 0 &&
                p_sys->i_repeat_first_field == 1 )
            {
                i_duration *= 2;
            }
            else if( p_sys->i_top_field_first == 1 &&
                     p_sys->i_repeat_first_field == 1 )
            {
                i_duration *= 3;
            }
        }
        else
        {
            if( p_sys->i_picture_structure == 0x03 )
            {
                if( p_sys->i_progressive_frame && p_sys->i_repeat_first_field )
                {
                    i_duration += i_duration / 2;
                }
            }
        }

        if( p_sys->b_low_delay || p_sys->i_picture_type == 0x03 )
        {
            /* Trivial case (DTS == PTS) */
            /* Correct interpolated dts when we receive a new pts/dts */
            if( p_sys->i_pts > VLC_TS_INVALID )
                p_sys->i_interpolated_dts = p_sys->i_pts;
            if( p_sys->i_dts > VLC_TS_INVALID )
                p_sys->i_interpolated_dts = p_sys->i_dts;
        }
        else
        {
            /* Correct interpolated dts when we receive a new pts/dts */
            if(p_sys->i_last_ref_pts > VLC_TS_INVALID && !p_sys->b_second_field)
                p_sys->i_interpolated_dts = p_sys->i_last_ref_pts;
            if( p_sys->i_dts > VLC_TS_INVALID )
                p_sys->i_interpolated_dts = p_sys->i_dts;

            if( !p_sys->b_second_field )
                p_sys->i_last_ref_pts = p_sys->i_pts;
        }

        p_pic->i_dts = p_sys->i_interpolated_dts;
        p_sys->i_interpolated_dts += i_duration;

        /* Set PTS only if we have a B frame or if it comes from the stream */
        if( p_sys->i_pts > VLC_TS_INVALID )
        {
            p_pic->i_pts = p_sys->i_pts;
        }
        else if( p_sys->i_picture_type == 0x03 )
        {
            p_pic->i_pts = p_pic->i_dts;
        }
        else
        {
            p_pic->i_pts = VLC_TS_INVALID;
        }

        switch ( p_sys->i_picture_type )
        {
        case 0x01:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_I;
            break;
        case 0x02:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_P;
            break;
        case 0x03:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_B;
            break;
        }

        p_pic->i_length = p_sys->i_interpolated_dts - p_pic->i_dts;

#if 0
        msg_Dbg( p_dec, "pic: type=%d dts=%"PRId64" pts-dts=%"PRId64,
        p_sys->i_picture_type, p_pic->i_dts, p_pic->i_pts - p_pic->i_dts);
#endif

        /* Reset context */
        p_sys->p_frame = NULL;
        p_sys->pp_last = &p_sys->p_frame;
        p_sys->b_frame_slice = false;

        if( p_sys->i_picture_structure != 0x03 )
        {
            p_sys->b_second_field = !p_sys->b_second_field;
        }
        else
        {
            p_sys->b_second_field = 0;
        }

        /* CC */
        p_sys->b_cc_reset = true;
        p_sys->i_cc_pts = p_pic->i_pts;
        p_sys->i_cc_dts = p_pic->i_dts;
        p_sys->i_cc_flags = p_pic->i_flags;
    }

    if( !p_pic && p_sys->b_cc_reset )
    {
        p_sys->b_cc_reset = false;
        cc_Flush( &p_sys->cc );
    }

    if( !p_frag )
        return p_pic;
    /*
     * Check info of current fragment
     */
    if( p_frag->p_buffer[3] == 0xb8 )
    {
        /* Group start code */
        if( p_sys->p_seq &&
            p_sys->i_seq_old > p_sys->i_frame_rate/p_sys->i_frame_rate_base )
        {
            /* Useful for mpeg1: repeat sequence header every second */
            block_ChainLastAppend( &p_sys->pp_last, block_Duplicate( p_sys->p_seq ) );
            if( p_sys->p_ext )
            {
                block_ChainLastAppend( &p_sys->pp_last, block_Duplicate( p_sys->p_ext ) );
            }

            p_sys->i_seq_old = 0;
        }
    }
    else if( p_frag->p_buffer[3] == 0xb3 && p_frag->i_buffer >= 8 )
    {
        /* Sequence header code */
        static const int code_to_frame_rate[16][2] =
        {
            { 1, 1 },  /* invalid */
            { 24000, 1001 }, { 24, 1 }, { 25, 1 },       { 30000, 1001 },
            { 30, 1 },       { 50, 1 }, { 60000, 1001 }, { 60, 1 },
            /* Unofficial 15fps from Xing*/
            { 15, 1001 },
            /* Unofficial economy rates from libmpeg3 */
            { 5000, 1001 }, { 1000, 1001 }, { 12000, 1001 }, { 15000, 1001 },
            { 1, 1 },  { 1, 1 }  /* invalid */
        };

        if( p_sys->p_seq ) block_Release( p_sys->p_seq );
        if( p_sys->p_ext ) block_Release( p_sys->p_ext );

        p_sys->p_seq = block_Duplicate( p_frag );
        p_sys->i_seq_old = 0;
        p_sys->p_ext = NULL;

        p_dec->fmt_out.video.i_width =
            ( p_frag->p_buffer[4] << 4)|(p_frag->p_buffer[5] >> 4 );
        p_dec->fmt_out.video.i_height =
            ( (p_frag->p_buffer[5]&0x0f) << 8 )|p_frag->p_buffer[6];
        p_sys->i_aspect_ratio_info = p_frag->p_buffer[7] >> 4;

        /* TODO: MPEG1 aspect ratio */

        p_sys->i_frame_rate = code_to_frame_rate[p_frag->p_buffer[7]&0x0f][0];
        p_sys->i_frame_rate_base =
            code_to_frame_rate[p_frag->p_buffer[7]&0x0f][1];

        p_dec->fmt_out.video.i_frame_rate = p_sys->i_frame_rate;
        p_dec->fmt_out.video.i_frame_rate_base = p_sys->i_frame_rate_base;

        p_sys->b_seq_progressive = true;
        p_sys->b_low_delay = true;

        if ( !p_sys->b_inited )
        {
            msg_Dbg( p_dec, "size %dx%d fps=%.3f",
                 p_dec->fmt_out.video.i_width, p_dec->fmt_out.video.i_height,
                 p_sys->i_frame_rate / (float)p_sys->i_frame_rate_base );
            p_sys->b_inited = 1;
        }
    }
    else if( p_frag->p_buffer[3] == 0xb5 )
Esempio n. 9
0
File: h264.c Progetto: sdelmas/SDesk
static block_t *ParseNALBlock( decoder_t *p_dec, block_t *p_frag )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic = NULL;

    const int i_nal_ref_idc = (p_frag->p_buffer[3] >> 5)&0x03;
    const int i_nal_type = p_frag->p_buffer[3]&0x1f;

    if( p_sys->b_slice && !p_sys->b_sps )
    {
        block_ChainRelease( p_sys->p_frame );
        msg_Warn( p_dec, "waiting for SPS" );

        /* Reset context */
        p_sys->p_frame = NULL;
        p_sys->b_slice = VLC_FALSE;
    }

    if( !p_sys->b_sps &&
            i_nal_type >= NAL_SLICE && i_nal_type <= NAL_SLICE_IDR )
    {
        p_sys->b_slice = VLC_TRUE;
        /* Fragment will be discarded later on */
    }
    else if( i_nal_type >= NAL_SLICE && i_nal_type <= NAL_SLICE_IDR )
    {
        uint8_t *dec;
        int i_dec, i_first_mb, i_slice_type, i_frame_num, i_pic_flags = 0;
        vlc_bool_t b_pic = VLC_FALSE;
        bs_t s;

        /* do not convert the whole frame */
        nal_get_decoded( &dec, &i_dec, &p_frag->p_buffer[4],
                         __MIN( p_frag->i_buffer - 4, 60 ) );
        bs_init( &s, dec, i_dec );

        /* first_mb_in_slice */
        i_first_mb = bs_read_ue( &s );

        /* slice_type */
        switch( (i_slice_type = bs_read_ue( &s )) )
        {
        case 0:
        case 5:
            i_pic_flags = BLOCK_FLAG_TYPE_P;
            break;
        case 1:
        case 6:
            i_pic_flags = BLOCK_FLAG_TYPE_B;
            break;
        case 2:
        case 7:
            i_pic_flags = BLOCK_FLAG_TYPE_I;
            break;
        case 3:
        case 8: /* SP */
            i_pic_flags = BLOCK_FLAG_TYPE_P;
            break;
        case 4:
        case 9:
            i_pic_flags = BLOCK_FLAG_TYPE_I;
            break;
        }

        /* pic_parameter_set_id */
        bs_read_ue( &s );
        /* frame_num */
        i_frame_num = bs_read( &s, p_sys->i_log2_max_frame_num + 4 );

        /* Detection of the first VCL NAL unit of a primary coded picture
         * (cf. 7.4.1.2.4) */
        if( i_frame_num != p_sys->i_frame_num ||
                ( (i_nal_ref_idc != p_sys->i_nal_ref_idc) &&
                  (!i_nal_ref_idc || !p_sys->i_nal_ref_idc) ) )
        {
            b_pic = VLC_TRUE;
        }
        p_sys->i_frame_num = i_frame_num;
        p_sys->i_nal_ref_idc = i_nal_ref_idc;

        if( !p_sys->b_frame_mbs_only )
        {
            /* field_pic_flag */
            if( bs_read( &s, 1 ) )
            {
                /* bottom_field_flag */
                bs_read( &s, 1 );
            }
        }

        if( i_nal_type == NAL_SLICE_IDR )
        {
            /* id_pic_id */
            int i_idr_pic_id = bs_read_ue( &s );
            if( p_sys->i_nal_type != i_nal_type ) b_pic = VLC_TRUE;
            if( p_sys->i_idr_pic_id != i_idr_pic_id ) b_pic = VLC_TRUE;
            p_sys->i_idr_pic_id = i_idr_pic_id;
        }
        p_sys->i_nal_type = i_nal_type;

        if( b_pic && p_sys->b_slice )
        {
            p_pic = block_ChainGather( p_sys->p_frame );
            p_pic->i_dts = p_sys->i_dts;
            p_pic->i_pts = p_sys->i_pts;
            p_pic->i_length = 0;    /* FIXME */
            p_pic->i_flags = p_sys->i_flags;

            /* Reset context */
            p_sys->p_frame = NULL;
            p_sys->b_slice = VLC_FALSE;
        }

        p_sys->b_slice = VLC_TRUE;
        p_sys->i_flags = i_pic_flags;
        p_sys->i_dts   = p_frag->i_dts;
        p_sys->i_pts   = p_frag->i_pts;

        free( dec );
    }
    else if( i_nal_type == NAL_SPS )
    {
        uint8_t *dec;
        int     i_dec;
        bs_t s;
        int i_tmp;

        msg_Dbg( p_dec, "found NAL_SPS" );

        p_sys->b_sps = VLC_TRUE;

        nal_get_decoded( &dec, &i_dec, &p_frag->p_buffer[4],
                         p_frag->i_buffer - 4 );

        bs_init( &s, dec, i_dec );
        /* Skip profile(8), constraint_set012, reserver(5), level(8) */
        bs_skip( &s, 8 + 1+1+1 + 5 + 8 );
        /* sps id */
        bs_read_ue( &s );
        /* Skip i_log2_max_frame_num */
        p_sys->i_log2_max_frame_num = bs_read_ue( &s );
        /* Read poc_type */
        i_tmp = bs_read_ue( &s );
        if( i_tmp == 0 )
        {
            /* skip i_log2_max_poc_lsb */
            bs_read_ue( &s );
        }
        else if( i_tmp == 1 )
        {
            int i_cycle;
            /* skip b_delta_pic_order_always_zero */
            bs_skip( &s, 1 );
            /* skip i_offset_for_non_ref_pic */
            bs_read_se( &s );
            /* skip i_offset_for_top_to_bottom_field */
            bs_read_se( &s );
            /* read i_num_ref_frames_in_poc_cycle */
            i_cycle = bs_read_ue( &s );
            if( i_cycle > 256 ) i_cycle = 256;
            while( i_cycle > 0 )
            {
                /* skip i_offset_for_ref_frame */
                bs_read_se(&s );
            }
        }
        /* i_num_ref_frames */
        bs_read_ue( &s );
        /* b_gaps_in_frame_num_value_allowed */
        bs_skip( &s, 1 );

        /* Read size */
        p_dec->fmt_out.video.i_width  = 16 * ( bs_read_ue( &s ) + 1 );
        p_dec->fmt_out.video.i_height = 16 * ( bs_read_ue( &s ) + 1 );

        /* b_frame_mbs_only */
        p_sys->b_frame_mbs_only = bs_read( &s, 1 );
        if( p_sys->b_frame_mbs_only == 0 )
        {
            bs_skip( &s, 1 );
        }
        /* b_direct8x8_inference */
        bs_skip( &s, 1 );

        /* crop */
        i_tmp = bs_read( &s, 1 );
        if( i_tmp )
        {
            /* left */
            bs_read_ue( &s );
            /* right */
            bs_read_ue( &s );
            /* top */
            bs_read_ue( &s );
            /* bottom */
            bs_read_ue( &s );
        }

        /* vui */
        i_tmp = bs_read( &s, 1 );
        if( i_tmp )
        {
            /* read the aspect ratio part if any FIXME check it */
            i_tmp = bs_read( &s, 1 );
            if( i_tmp )
            {
                static const struct {
                    int w, h;
                } sar[14] =
                {
                    { 0,   0 }, { 1,   1 }, { 12, 11 }, { 10, 11 },
                    { 16, 11 }, { 40, 33 }, { 24, 11 }, { 20, 11 },
                    { 32, 11 }, { 80, 33 }, { 18, 11 }, { 15, 11 },
                    { 64, 33 }, { 160,99 },
                };
                int i_sar = bs_read( &s, 8 );
                int w, h;

                if( i_sar < 14 )
                {
                    w = sar[i_sar].w;
                    h = sar[i_sar].h;
                }
                else
                {
                    w = bs_read( &s, 16 );
                    h = bs_read( &s, 16 );
                }
                p_dec->fmt_out.video.i_aspect =
                    VOUT_ASPECT_FACTOR * w / h * p_dec->fmt_out.video.i_width /
                    p_dec->fmt_out.video.i_height;
            }
        }

        free( dec );
    }
    else if( i_nal_type == NAL_PPS )
    {
        bs_t s;
        bs_init( &s, &p_frag->p_buffer[4], p_frag->i_buffer - 4 );

        /* TODO */
        msg_Dbg( p_dec, "found NAL_PPS" );
    }

    /* Append the block */
    block_ChainAppend( &p_sys->p_frame, p_frag );

    return p_pic;
}
Esempio n. 10
0
File: soxr.c Progetto: mstorsjo/vlc
static block_t *
Resample( filter_t *p_filter, block_t *p_in )
{
    filter_sys_t *p_sys = p_filter->p_sys;
    const vlc_tick_t i_pts = p_in->i_pts;

    if( p_sys->vr_soxr )
    {
        /* "audio resampler" with variable ratio: use the fixed resampler when
         * the ratio is the same than the fixed one, otherwise use the variable
         * resampler. */

        soxr_t soxr;
        block_t *p_flushed_out = NULL, *p_out = NULL;
        const double f_ratio = p_filter->fmt_out.audio.i_rate
                             / (double) p_filter->fmt_in.audio.i_rate;
        const size_t i_olen = SoXR_GetOutLen( p_in->i_nb_samples, f_ratio );

        if( f_ratio != p_sys->f_fixed_ratio )
        {
            /* using variable resampler */
            soxr_set_io_ratio( p_sys->vr_soxr, 1 / f_ratio, i_olen );
            soxr = p_sys->vr_soxr;
        }
        else if( f_ratio == 1.0f )
        {
            /* not using any resampler */
            soxr = NULL;
            p_out = p_in;
        }
        else
        {
            /* using fixed resampler */
            soxr = p_sys->soxr;
        }

        /* If the new soxr is different than the last one, flush it */
        if( p_sys->last_soxr && soxr != p_sys->last_soxr && p_sys->i_last_olen )
        {
            p_flushed_out = SoXR_Resample( p_filter, p_sys->last_soxr,
                                           NULL, p_sys->i_last_olen );
            if( soxr )
                msg_Dbg( p_filter, "Using '%s' engine", soxr_engine( soxr ) );
        }

        if( soxr )
        {
            assert( !p_out );
            p_out = SoXR_Resample( p_filter, soxr, p_in, i_olen );
            if( !p_out )
                goto error;
        }

        if( p_flushed_out )
        {
            /* Prepend the flushed output data to p_out */
            const unsigned i_nb_samples = p_flushed_out->i_nb_samples
                                        + p_out->i_nb_samples;

            block_ChainAppend( &p_flushed_out, p_out );
            p_out = block_ChainGather( p_flushed_out );
            if( !p_out )
                goto error;
            p_out->i_nb_samples = i_nb_samples;
        }
        p_out->i_pts = i_pts;
        return p_out;
    }
    else
    {
        /* "audio converter" with fixed ratio */

        const size_t i_olen = SoXR_GetOutLen( p_in->i_nb_samples,
                                              p_sys->f_fixed_ratio );
        block_t *p_out = SoXR_Resample( p_filter, p_sys->soxr, p_in, i_olen );
        if( p_out )
            p_out->i_pts = i_pts;
        return p_out;
    }
error:
    block_Release( p_in );
    return NULL;
}
Esempio n. 11
0
File: h264.c Progetto: 5UN5H1N3/vlc
static block_t *OutputPicture( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic;

    if ( !p_sys->b_header && p_sys->i_recovery_frames != -1 )
    {
        if( p_sys->i_recovery_frames == 0 )
        {
            msg_Dbg( p_dec, "Recovery from SEI recovery point complete" );
            p_sys->b_header = true;
        }
        --p_sys->i_recovery_frames;
    }

    if( !p_sys->b_header && p_sys->i_recovery_frames == -1 &&
         p_sys->slice.i_frame_type != BLOCK_FLAG_TYPE_I)
        return NULL;

    const bool b_sps_pps_i = p_sys->slice.i_frame_type == BLOCK_FLAG_TYPE_I &&
                             p_sys->b_sps &&
                             p_sys->b_pps;
    if( b_sps_pps_i || p_sys->b_frame_sps || p_sys->b_frame_pps )
    {
        block_t *p_head = NULL;
        if( p_sys->p_frame->i_flags & BLOCK_FLAG_PRIVATE_AUD )
        {
            p_head = p_sys->p_frame;
            p_sys->p_frame = p_sys->p_frame->p_next;
        }

        block_t *p_list = NULL;
        for( int i = 0; i < SPS_MAX && (b_sps_pps_i || p_sys->b_frame_sps); i++ )
        {
            if( p_sys->pp_sps[i] )
                block_ChainAppend( &p_list, block_Duplicate( p_sys->pp_sps[i] ) );
        }
        for( int i = 0; i < PPS_MAX && (b_sps_pps_i || p_sys->b_frame_pps); i++ )
        {
            if( p_sys->pp_pps[i] )
                block_ChainAppend( &p_list, block_Duplicate( p_sys->pp_pps[i] ) );
        }
        if( b_sps_pps_i && p_list )
            p_sys->b_header = true;

        if( p_head )
            p_head->p_next = p_list;
        else
            p_head = p_list;
        block_ChainAppend( &p_head, p_sys->p_frame );

        p_pic = block_ChainGather( p_head );
    }
    else
    {
        p_pic = block_ChainGather( p_sys->p_frame );
    }
    p_pic->i_dts = p_sys->i_frame_dts;
    p_pic->i_pts = p_sys->i_frame_pts;
    p_pic->i_length = 0;    /* FIXME */
    p_pic->i_flags |= p_sys->slice.i_frame_type;
    p_pic->i_flags &= ~BLOCK_FLAG_PRIVATE_AUD;
    if( !p_sys->b_header )
        p_pic->i_flags |= BLOCK_FLAG_PREROLL;

    p_sys->slice.i_frame_type = 0;
    p_sys->p_frame = NULL;
    p_sys->i_frame_dts = VLC_TS_INVALID;
    p_sys->i_frame_pts = VLC_TS_INVALID;
    p_sys->b_frame_sps = false;
    p_sys->b_frame_pps = false;
    p_sys->b_slice = false;

    /* CC */
    p_sys->i_cc_pts = p_pic->i_pts;
    p_sys->i_cc_dts = p_pic->i_dts;
    p_sys->i_cc_flags = p_pic->i_flags;

    p_sys->cc = p_sys->cc_next;
    cc_Flush( &p_sys->cc_next );

    return p_pic;
}
Esempio n. 12
0
File: h264.c Progetto: mark5242/vlc
static block_t *OutputPicture( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic;

    if ( !p_sys->b_header && p_sys->i_recovery_frames != -1 )
    {
        if( p_sys->i_recovery_frames == 0 )
        {
            msg_Dbg( p_dec, "Recovery from SEI recovery point complete" );
            p_sys->b_header = true;
        }
        --p_sys->i_recovery_frames;
    }

    if( !p_sys->b_header && p_sys->i_recovery_frames == -1 &&
         p_sys->slice.i_frame_type != BLOCK_FLAG_TYPE_I)
        return NULL;

    const bool b_sps_pps_i = p_sys->slice.i_frame_type == BLOCK_FLAG_TYPE_I &&
                             p_sys->b_sps &&
                             p_sys->b_pps;
    if( b_sps_pps_i || p_sys->b_frame_sps || p_sys->b_frame_pps )
    {
        block_t *p_head = NULL;
        if( p_sys->p_frame->i_flags & BLOCK_FLAG_PRIVATE_AUD )
        {
            p_head = p_sys->p_frame;
            p_sys->p_frame = p_sys->p_frame->p_next;
        }

        block_t *p_list = NULL;
        for( int i = 0; i < SPS_MAX && (b_sps_pps_i || p_sys->b_frame_sps); i++ )
        {
            if( p_sys->pp_sps[i] )
                block_ChainAppend( &p_list, block_Duplicate( p_sys->pp_sps[i] ) );
        }
        for( int i = 0; i < PPS_MAX && (b_sps_pps_i || p_sys->b_frame_pps); i++ )
        {
            if( p_sys->pp_pps[i] )
                block_ChainAppend( &p_list, block_Duplicate( p_sys->pp_pps[i] ) );
        }
        if( b_sps_pps_i && p_list )
            p_sys->b_header = true;

        if( p_head )
            p_head->p_next = p_list;
        else
            p_head = p_list;
        block_ChainAppend( &p_head, p_sys->p_frame );

        p_pic = block_ChainGather( p_head );
    }
    else
    {
        p_pic = block_ChainGather( p_sys->p_frame );
    }
    p_pic->i_dts = p_sys->i_frame_dts;
    p_pic->i_pts = p_sys->i_frame_pts;
    p_pic->i_length = 0;    /* FIXME */
    p_pic->i_flags |= p_sys->slice.i_frame_type;
    p_pic->i_flags &= ~BLOCK_FLAG_PRIVATE_AUD;
    if( !p_sys->b_header )
        p_pic->i_flags |= BLOCK_FLAG_PREROLL;

    if( p_sys->b_frame_mbs_only == 0 && p_sys->b_pic_struct_present_flag )
    {
        switch( p_sys->i_pic_struct )
        {
        case 1:
            if( p_sys->i_fields_dts == 2 )
                p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            else
                p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        case 3:
        case 5:
            p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            break;
        case 2:
            if( p_sys->i_fields_dts == 2 )
                p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            else
                p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            break;
        case 4:
        case 6:
            p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        default:
            break;
        }
    }

    p_sys->i_fields_dts -= (1 + p_sys->b_frame_mbs_only);
    if( p_sys->i_fields_dts <= 0 )
    {
        p_sys->i_frame_dts = VLC_TS_INVALID;
        p_sys->i_frame_pts = VLC_TS_INVALID;
    }
    else if( p_sys->b_timing_info_present_flag && p_sys->i_time_scale )
    {
        p_sys->i_frame_pts += CLOCK_FREQ * p_sys->i_num_units_in_tick / p_sys->i_time_scale;
    }
    else p_sys->i_frame_pts = VLC_TS_INVALID;

    p_sys->slice.i_frame_type = 0;
    p_sys->p_frame = NULL;
    p_sys->b_frame_sps = false;
    p_sys->b_frame_pps = false;
    p_sys->b_slice = false;

    /* CC */
    p_sys->i_cc_pts = p_pic->i_pts;
    p_sys->i_cc_dts = p_pic->i_dts;
    p_sys->i_cc_flags = p_pic->i_flags;

    p_sys->cc = p_sys->cc_next;
    cc_Flush( &p_sys->cc_next );

    return p_pic;
}
Esempio n. 13
0
/*****************************************************************************
 * ParseMPEGBlock: Re-assemble fragments into a block containing a picture
 *****************************************************************************/
static block_t *ParseMPEGBlock( decoder_t *p_dec, block_t *p_frag )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic = NULL;

    if( p_frag->p_buffer[3] == 0xB0 || p_frag->p_buffer[3] == 0xB1 || p_frag->p_buffer[3] == 0xB2 )
    {   /* VOS and USERDATA */
#if 0
        /* Remove VOS start/end code from the original stream */
        block_Release( p_frag );
#else
        /* Append the block for now since ts/ps muxers rely on VOL
         * being present in the stream */
        block_ChainLastAppend( &p_sys->pp_last, p_frag );
#endif
        return NULL;
    }
    if( p_frag->p_buffer[3] >= 0x20 && p_frag->p_buffer[3] <= 0x2f )
    {
        /* Copy the complete VOL */
        if( (size_t)p_dec->fmt_out.i_extra != p_frag->i_buffer )
        {
            p_dec->fmt_out.p_extra =
                xrealloc( p_dec->fmt_out.p_extra, p_frag->i_buffer );
            p_dec->fmt_out.i_extra = p_frag->i_buffer;
        }
        memcpy( p_dec->fmt_out.p_extra, p_frag->p_buffer, p_frag->i_buffer );
        ParseVOL( p_dec, &p_dec->fmt_out,
                  p_dec->fmt_out.p_extra, p_dec->fmt_out.i_extra );

#if 0
        /* Remove from the original stream */
        block_Release( p_frag );
#else
        /* Append the block for now since ts/ps muxers rely on VOL
         * being present in the stream */
        block_ChainLastAppend( &p_sys->pp_last, p_frag );
#endif
        return NULL;
    }
    else
    {
        if( !p_dec->fmt_out.i_extra )
        {
            msg_Warn( p_dec, "waiting for VOL" );
            block_Release( p_frag );
            return NULL;
        }

        /* Append the block */
        block_ChainLastAppend( &p_sys->pp_last, p_frag );
    }

    if( p_frag->p_buffer[3] == 0xb6 &&
        ParseVOP( p_dec, p_frag ) == VLC_SUCCESS )
    {
        /* We are dealing with a VOP */
        p_pic = block_ChainGather( p_sys->p_frame );
        p_pic->i_flags = p_sys->i_flags;
        p_pic->i_pts = p_sys->i_interpolated_pts;
        p_pic->i_dts = p_sys->i_interpolated_dts;
        if ( p_dec->fmt_out.video.i_cpb_buffer && p_dec->fmt_out.i_bitrate )
        {
            mtime_t i_cpb_delay = p_sys->i_vbv_occupancy * INT64_C(1000000)
                                   / p_dec->fmt_out.i_bitrate;
            p_pic->i_delay = i_cpb_delay;
        }
        else
            p_pic->i_delay = DEFAULT_DELAY * 1000;

        /* Reset context */
        p_sys->p_frame = NULL;
        p_sys->pp_last = &p_sys->p_frame;
    }

    return p_pic;
}
Esempio n. 14
0
static block_t *Reassemble( decoder_t *p_dec, block_t *p_block )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    uint8_t *p_buffer;
    uint16_t i_expected_image;
    uint8_t  i_packet, i_expected_packet;

    if( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED) )
    {
        block_Release( p_block );
        return NULL;
    }

    if( p_block->i_buffer < SPU_HEADER_LEN )
    {
        msg_Dbg( p_dec, "invalid packet header (size %zu < %u)" ,
                 p_block->i_buffer, SPU_HEADER_LEN );
        block_Release( p_block );
        return NULL;
    }

    p_buffer = p_block->p_buffer;

    if( p_sys->i_state == SUBTITLE_BLOCK_EMPTY )
    {
        i_expected_image  = p_sys->i_image + 1;
        i_expected_packet = 0;
    }
    else
    {
        i_expected_image  = p_sys->i_image;
        i_expected_packet = p_sys->i_packet + 1;
    }

    /* The dummy ES that the menu selection uses has an 0x70 at
       the head which we need to strip off. */
    p_buffer += 2;

    if( *p_buffer & 0x80 )
    {
        p_sys->i_state = SUBTITLE_BLOCK_COMPLETE;
        i_packet       = *p_buffer++ & 0x7F;
    }
    else
    {
        p_sys->i_state = SUBTITLE_BLOCK_PARTIAL;
        i_packet       = *p_buffer++;
    }

    p_sys->i_image = GETINT16(p_buffer);

    if( p_sys->i_image != i_expected_image )
    {
        msg_Warn( p_dec, "expected subtitle image %u but found %u",
                  i_expected_image, p_sys->i_image );
    }

    if( i_packet != i_expected_packet )
    {
        msg_Warn( p_dec, "expected subtitle image packet %u but found %u",
                  i_expected_packet, i_packet );
    }

    p_block->p_buffer += SPU_HEADER_LEN;
    p_block->i_buffer -= SPU_HEADER_LEN;

    p_sys->i_packet = i_packet;
    /* First packet in the subtitle block */
    if( !p_sys->i_packet ) ParseHeader( p_dec, p_block );

    block_ChainAppend( &p_sys->p_spu, p_block );

    if( p_sys->i_state == SUBTITLE_BLOCK_COMPLETE )
    {
        block_t *p_spu = block_ChainGather( p_sys->p_spu );

        if( p_spu->i_buffer != p_sys->i_spu_size )
        {
            msg_Warn( p_dec, "subtitle packets size=%zu should be %zu",
                      p_spu->i_buffer, p_sys->i_spu_size );
        }

        dbg_print( "subtitle packet complete, size=%zu", p_spu->i_buffer );

        p_sys->i_state = SUBTITLE_BLOCK_EMPTY;
        p_sys->p_spu = 0;
        return p_spu;
    }

    return NULL;
}
/* ParseIDU: parse an Independent Decoding Unit */
static block_t *ParseIDU( decoder_t *p_dec, bool *pb_used_ts, block_t *p_frag )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic;
    const idu_type_t idu = (const idu_type_t)(p_frag->p_buffer[3]);			// sunqueen modify

    *pb_used_ts = false;
    if( !p_sys->b_sequence_header && idu != IDU_TYPE_SEQUENCE_HEADER )
    {
        msg_Warn( p_dec, "waiting for sequence header" );
        block_Release( p_frag );
        return NULL;
    }
    if( p_sys->b_sequence_header && !p_sys->b_entry_point && idu != IDU_TYPE_ENTRY_POINT )
    {
        msg_Warn( p_dec, "waiting for entry point" );
        block_Release( p_frag );
        return NULL;
    }
    /* TODO we do not gather ENTRY_POINT and SEQUENCE_DATA user data
     * But It should not be a problem for decoder */

    /* Do we have completed a frame */
    p_pic = NULL;
    if( p_sys->b_frame &&
        idu != IDU_TYPE_FRAME_USER_DATA &&
        idu != IDU_TYPE_FIELD && idu != IDU_TYPE_FIELD_USER_DATA &&
        idu != IDU_TYPE_SLICE && idu != IDU_TYPE_SLICE_USER_DATA &&
        idu != IDU_TYPE_END_OF_SEQUENCE )
    {
        /* Prepend SH and EP on I */
        if( p_sys->p_frame->i_flags & BLOCK_FLAG_TYPE_I )
        {
            block_t *p_list = block_Duplicate( p_sys->sh.p_sh );
            block_ChainAppend( &p_list, block_Duplicate( p_sys->ep.p_ep ) );
            block_ChainAppend( &p_list, p_sys->p_frame );

            p_list->i_flags = p_sys->p_frame->i_flags;

            p_sys->p_frame = p_list;
        }

        /* */
        p_pic = block_ChainGather( p_sys->p_frame );
        p_pic->i_dts = p_sys->i_frame_dts;
        p_pic->i_pts = p_sys->i_frame_pts;

        /* */
        if( p_pic->i_dts > VLC_TS_INVALID )
            p_sys->i_interpolated_dts = p_pic->i_dts;

        /* We can interpolate dts/pts only if we have a frame rate */
        if( p_dec->fmt_out.video.i_frame_rate != 0 && p_dec->fmt_out.video.i_frame_rate_base != 0 )
        {
            if( p_sys->i_interpolated_dts > VLC_TS_INVALID )
                p_sys->i_interpolated_dts += INT64_C(1000000) *
                                             p_dec->fmt_out.video.i_frame_rate_base /
                                             p_dec->fmt_out.video.i_frame_rate;

            //msg_Dbg( p_dec, "-------------- XXX0 dts=%"PRId64" pts=%"PRId64" interpolated=%"PRId64,
            //         p_pic->i_dts, p_pic->i_pts, p_sys->i_interpolated_dts );
            if( p_pic->i_dts <= VLC_TS_INVALID )
                p_pic->i_dts = p_sys->i_interpolated_dts;

            if( p_pic->i_pts <= VLC_TS_INVALID )
            {
                if( !p_sys->sh.b_has_bframe || (p_pic->i_flags & BLOCK_FLAG_TYPE_B ) )
                    p_pic->i_pts = p_pic->i_dts;
                /* TODO compute pts for other case */
            }
        }

        //msg_Dbg( p_dec, "-------------- dts=%"PRId64" pts=%"PRId64, p_pic->i_dts, p_pic->i_pts );

        /* Reset context */
        p_sys->b_frame = false;
        p_sys->i_frame_dts = VLC_TS_INVALID;
        p_sys->i_frame_pts = VLC_TS_INVALID;
        p_sys->p_frame = NULL;
        p_sys->pp_last = &p_sys->p_frame;
    }

    /*  */
    if( p_sys->i_frame_dts <= VLC_TS_INVALID && p_sys->i_frame_pts <= VLC_TS_INVALID )
    {
        p_sys->i_frame_dts = p_frag->i_dts;
        p_sys->i_frame_pts = p_frag->i_pts;
        *pb_used_ts = true;
    }

    /* We will add back SH and EP on I frames */
    block_t *p_release = NULL;
    if( idu != IDU_TYPE_SEQUENCE_HEADER && idu != IDU_TYPE_ENTRY_POINT )
        block_ChainLastAppend( &p_sys->pp_last, p_frag );
    else
        p_release = p_frag;

    /* Parse IDU */
    if( idu == IDU_TYPE_SEQUENCE_HEADER )
    {
        es_format_t *p_es = &p_dec->fmt_out;
        bs_t s;
        int i_profile;
        uint8_t ridu[32];
        int     i_ridu = sizeof(ridu);

        /* */
        if( p_sys->sh.p_sh )
            block_Release( p_sys->sh.p_sh );
        p_sys->sh.p_sh = block_Duplicate( p_frag );

        /* Extract the raw IDU */
        DecodeRIDU( ridu, &i_ridu, &p_frag->p_buffer[4], p_frag->i_buffer - 4 );

        /* Auto detect VC-1_SPMP_PESpacket_PayloadFormatHeader (SMPTE RP 227) for simple/main profile
         * TODO find a test case and valid it */
        if( i_ridu > 4 && (ridu[0]&0x80) == 0 ) /* for advanced profile, the first bit is 1 */
        {
            video_format_t *p_v = &p_dec->fmt_in.video;
            const size_t i_potential_width  = GetWBE( &ridu[0] );
            const size_t i_potential_height = GetWBE( &ridu[2] );

            if( i_potential_width >= 2  && i_potential_width <= 8192 &&
                i_potential_height >= 2 && i_potential_height <= 8192 )
            {
                if( ( p_v->i_width <= 0 && p_v->i_height <= 0  ) ||
                    ( p_v->i_width  == i_potential_width &&  p_v->i_height == i_potential_height ) )
                {
                    static const uint8_t startcode[4] = { 0x00, 0x00, 0x01, IDU_TYPE_SEQUENCE_HEADER };
                    p_es->video.i_width  = i_potential_width;
                    p_es->video.i_height = i_potential_height;

                    /* Remove it */
                    p_frag->p_buffer += 4;
                    p_frag->i_buffer -= 4;
                    memcpy( p_frag->p_buffer, startcode, sizeof(startcode) );
                }
            }
        }

        /* Parse it */
        bs_init( &s, ridu, i_ridu );
        i_profile = bs_read( &s, 2 );
        if( i_profile == 3 )
        {
            const int i_level = bs_read( &s, 3 );

            /* Advanced profile */
            p_sys->sh.b_advanced_profile = true;
            p_sys->sh.b_range_reduction = false;
            p_sys->sh.b_has_bframe = true;

            bs_skip( &s, 2+3+5+1 ); // chroma format + frame rate Q + bit rate Q + postprocflag

            p_es->video.i_width  = 2*bs_read( &s, 12 )+2;
            p_es->video.i_height = 2*bs_read( &s, 12 )+2;

            if( !p_sys->b_sequence_header )
                msg_Dbg( p_dec, "found sequence header for advanced profile level L%d resolution %dx%d",
                         i_level, p_es->video.i_width, p_es->video.i_height);

            bs_skip( &s, 1 );// pulldown
            p_sys->sh.b_interlaced = bs_read( &s, 1 );
            bs_skip( &s, 1 );// frame counter
            p_sys->sh.b_frame_interpolation = bs_read( &s, 1 );
            bs_skip( &s, 1 );       // Reserved
            bs_skip( &s, 1 );       // Psf

            if( bs_read( &s, 1 ) )  /* Display extension */
            {
                const int i_display_width  = bs_read( &s, 14 )+1;
                const int i_display_height = bs_read( &s, 14 )+1;

                p_es->video.i_sar_num = i_display_width  * p_es->video.i_height;
                p_es->video.i_sar_den = i_display_height * p_es->video.i_width;

                if( !p_sys->b_sequence_header )
                    msg_Dbg( p_dec, "display size %dx%d", i_display_width, i_display_height );

                if( bs_read( &s, 1 ) )  /* Pixel aspect ratio (PAR/SAR) */
                {
                    static const int p_ar[16][2] = {
                        { 0, 0}, { 1, 1}, {12,11}, {10,11}, {16,11}, {40,33},
                        {24,11}, {20,11}, {32,11}, {80,33}, {18,11}, {15,11},
                        {64,33}, {160,99},{ 0, 0}, { 0, 0}
                    };
                    int i_ar = bs_read( &s, 4 );
                    unsigned i_ar_w, i_ar_h;

                    if( i_ar == 15 )
                    {
                        i_ar_w = bs_read( &s, 8 );
                        i_ar_h = bs_read( &s, 8 );
                    }
                    else
                    {
                        i_ar_w = p_ar[i_ar][0];
                        i_ar_h = p_ar[i_ar][1];
                    }
                    vlc_ureduce( &i_ar_w, &i_ar_h, i_ar_w, i_ar_h, 0 );
                    if( !p_sys->b_sequence_header )
                        msg_Dbg( p_dec, "aspect ratio %d:%d", i_ar_w, i_ar_h );
                }
            }
            if( bs_read( &s, 1 ) )  /* Frame rate */
            {
                int i_fps_num = 0;
                int i_fps_den = 0;
                if( bs_read( &s, 1 ) )
                {
                    i_fps_num = bs_read( &s, 16 )+1;
                    i_fps_den = 32;
                }
                else
                {
                    const int i_nr = bs_read( &s, 8 );
                    const int i_dn = bs_read( &s, 4 );

                    switch( i_nr )
                    {
                    case 1: i_fps_num = 24000; break;
                    case 2: i_fps_num = 25000; break;
                    case 3: i_fps_num = 30000; break;
                    case 4: i_fps_num = 50000; break;
                    case 5: i_fps_num = 60000; break;
                    case 6: i_fps_num = 48000; break;
                    case 7: i_fps_num = 72000; break;
                    }
                    switch( i_dn )
                    {
                    case 1: i_fps_den = 1000; break;
                    case 2: i_fps_den = 1001; break;
                    }
                }
                if( i_fps_num != 0 && i_fps_den != 0 )
                    vlc_ureduce( &p_es->video.i_frame_rate, &p_es->video.i_frame_rate_base, i_fps_num, i_fps_den, 0 );

                if( !p_sys->b_sequence_header )
                    msg_Dbg( p_dec, "frame rate %d/%d", p_es->video.i_frame_rate, p_es->video.i_frame_rate_base );
            }
        }
        else
        {
            /* Simple and main profile */
            p_sys->sh.b_advanced_profile = false;
            p_sys->sh.b_interlaced = false;

            if( !p_sys->b_sequence_header )
                msg_Dbg( p_dec, "found sequence header for %s profile", i_profile == 0 ? "simple" : "main" );

            bs_skip( &s, 2+3+5+1+1+     // reserved + frame rate Q + bit rate Q + loop filter + reserved
                         1+1+1+1+2+     // multiresolution + reserved + fast uv mc + extended mv + dquant
                         1+1+1+1 );     // variable size transform + reserved + overlap + sync marker
            p_sys->sh.b_range_reduction = bs_read( &s, 1 );
            if( bs_read( &s, 3 ) > 0 )
                p_sys->sh.b_has_bframe = true;
            else
                p_sys->sh.b_has_bframe = false;
            bs_skip( &s, 2 );           // quantizer

            p_sys->sh.b_frame_interpolation = bs_read( &s, 1 );
        }
        p_sys->b_sequence_header = true;
        BuildExtraData( p_dec );
    }
    else if( idu == IDU_TYPE_ENTRY_POINT )
    {
        if( p_sys->ep.p_ep )
            block_Release( p_sys->ep.p_ep );
        p_sys->ep.p_ep = block_Duplicate( p_frag );

        if( !p_sys->b_entry_point )
            msg_Dbg( p_dec, "found entry point" );

        p_sys->b_entry_point = true;
        BuildExtraData( p_dec );
    }
    else if( idu == IDU_TYPE_FRAME )
    {
        bs_t s;
        uint8_t ridu[8];
        int     i_ridu = sizeof(ridu);

        /* Extract the raw IDU */
        DecodeRIDU( ridu, &i_ridu, &p_frag->p_buffer[4], p_frag->i_buffer - 4 );

        /* Parse it + interpolate pts/dts if possible */
        bs_init( &s, ridu, i_ridu );

        if( p_sys->sh.b_advanced_profile )
        {
            int i_fcm = 0;

            if( p_sys->sh.b_interlaced )
            {
                if( bs_read( &s, 1 ) )
                {
                    if( bs_read( &s, 1 ) )
                        i_fcm = 1;  /* interlaced field */
                    else
                        i_fcm = 2;  /* interlaced frame */
                }
            }

            if( i_fcm == 1 ) /*interlaced field */
            {
                /* XXX for mixed I/P we should check reference usage before marking them I (too much work) */
                switch( bs_read( &s, 3 ) )
                {
                case 0: /* II */
                case 1: /* IP */
                case 2: /* PI */
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_I;
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_I;
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_I;
                    break;
                case 3: /* PP */
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_P;
                    break;
                case 4: /* BB */
                case 5: /* BBi */
                case 6: /* BiB */
                case 7: /* BiBi */
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_B;
                    break;
                }
            }
            else
            {
                if( !bs_read( &s, 1 ) )
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_P;
                else if( !bs_read( &s, 1 ) )
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_B;
                else if( !bs_read( &s, 1 ) )
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_I;
                else if( !bs_read( &s, 1 ) )
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_B;   /* Bi */
                else
                    p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_P;   /* P Skip */
            }
        }
        else
        {
            if( p_sys->sh.b_frame_interpolation )
                bs_skip( &s, 1 );   // interpolate
            bs_skip( &s, 2 );       // frame count
            if( p_sys->sh.b_range_reduction )
                bs_skip( &s, 1 );   // range reduction

            if( bs_read( &s, 1 ) )
                p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_P;
            else if( !p_sys->sh.b_has_bframe || bs_read( &s, 1 ) )
                p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_I;
            else
                p_sys->p_frame->i_flags |= BLOCK_FLAG_TYPE_B;
        }
        p_sys->b_frame = true;
    }

    if( p_release )
        block_Release( p_release );
    return p_pic;
}
Esempio n. 16
0
File: sections.c Progetto: fabn/vlc
static block_t * ts_sections_assembler_Append( ts_sections_assembler_t *p_as, block_t *p_content )
{
    const bool b_short = !( p_content->p_buffer[1] & 0x80 );
    const uint16_t i_private_length = ((p_content->p_buffer[1] & 0x0f) << 8) | p_content->p_buffer[2];
    if( b_short )
    {
        /* Short, unsegmented section */
        if(unlikely(( i_private_length > 0xFFD || i_private_length > p_content->i_buffer - 3 )))
        {
            block_Release( p_content );
            return NULL;
        }

        if( !p_as->b_raw )
        {
            p_content->p_buffer += 3;
            p_content->i_buffer = i_private_length;
        }
        return p_content;
    }
    else
    {
        /* Payload can span on multiple sections */
        if (unlikely( p_content->i_buffer < (size_t)12 + i_private_length))
        {
            block_Release( p_content );
            return NULL;
        }
        /* TODO: CRC32 */
        const uint8_t i_version = ( p_content->p_buffer[5] & 0x3F ) >> 1;
        const uint8_t i_current = p_content->p_buffer[5] & 0x01;
        const uint8_t i_section = p_content->p_buffer[6];
        const uint8_t i_section_last = p_content->p_buffer[7];

        if( !p_as->b_raw )
        {
            p_content->p_buffer += 3 + 5;
            p_content->i_buffer = i_private_length - 4;
        }

        if( !i_current ||
           ( p_as->i_version != -1 && i_version != p_as->i_version ) || /* Only merge same version */
             p_as->i_prev_version == i_version || /* No duplicates */
             i_section > i_section_last )
        {
            block_Release( p_content );
            return NULL;
        }

        if( i_section != p_as->i_prev_section + 1 ) /* first or unfinished sections gathering */
        {
            ts_sections_assembler_Reset( p_as, false );
            if( i_section > 0 || i_version == p_as->i_prev_version )
            {
                block_Release( p_content );
                return NULL;
            }
        }

        p_as->i_version = i_version;
        p_as->i_prev_section = i_section;

        /* Add one more section */
        block_ChainLastAppend( &p_as->pp_sections_tail, p_content );

        /* We finished gathering our sections */
        if( i_section == i_section_last )
        {
            block_t *p_all_sections = block_ChainGather( p_as->p_sections );
            p_as->p_sections = NULL;
            p_as->pp_sections_tail = &p_as->p_sections;
            p_as->i_prev_version = i_version;
            ts_sections_assembler_Reset( p_as, false );
            return p_all_sections;
        }
    }

    return NULL;
}
Esempio n. 17
0
File: h264.c Progetto: IAPark/vlc
static block_t *OutputPicture( decoder_t *p_dec )
{
    decoder_sys_t *p_sys = p_dec->p_sys;
    block_t *p_pic = NULL;
    block_t **pp_pic_last = &p_pic;

    if( unlikely(!p_sys->p_frame) )
    {
        assert( p_sys->p_frame );
        return NULL;
    }

    /* Bind matched/referred PPS and SPS */
    const h264_picture_parameter_set_t *p_pps = p_sys->p_active_pps;
    const h264_sequence_parameter_set_t *p_sps = p_sys->p_active_sps;
    if( !p_pps || !p_sps )
    {
        DropStoredNAL( p_sys );
        return NULL;
    }

    if( !p_sys->b_recovered && p_sys->i_recoveryfnum == UINT_MAX &&
         p_sys->i_recovery_frame_cnt == UINT_MAX && p_sys->slice.type == H264_SLICE_TYPE_I )
    {
        /* No way to recover using SEI, just sync on I Slice */
        p_sys->b_recovered = true;
    }

    bool b_need_sps_pps = p_sys->slice.type == H264_SLICE_TYPE_I &&
                          p_sys->p_active_pps && p_sys->p_active_sps;

    /* Handle SEI recovery */
    if ( !p_sys->b_recovered && p_sys->i_recovery_frame_cnt != UINT_MAX &&
         p_sys->i_recoveryfnum == UINT_MAX )
    {
        p_sys->i_recoveryfnum = p_sys->slice.i_frame_num + p_sys->i_recovery_frame_cnt;
        b_need_sps_pps = true; /* SPS/PPS must be inserted for SEI recovery */
        msg_Dbg( p_dec, "Recovering using SEI, prerolling %u reference pics", p_sys->i_recovery_frame_cnt );
    }

    if( p_sys->i_recoveryfnum != UINT_MAX )
    {
        assert(p_sys->b_recovered == false);
        const unsigned maxFrameNum = 1 << (p_sps->i_log2_max_frame_num + 4);
        if( (p_sys->i_recoveryfnum > maxFrameNum &&
            (unsigned)p_sys->slice.i_frame_num <= maxFrameNum / 2 &&
            (unsigned)p_sys->slice.i_frame_num >= p_sys->i_recoveryfnum % maxFrameNum ) ||
            (unsigned)p_sys->slice.i_frame_num >= p_sys->i_recoveryfnum )
        {
            p_sys->i_recoveryfnum = UINT_MAX;
            p_sys->b_recovered = true;
            msg_Dbg( p_dec, "Recovery from SEI recovery point complete" );
        }
    }

    /* Gather PPS/SPS if required */
    block_t *p_xpsnal = NULL;
    block_t **pp_xpsnal_tail = &p_xpsnal;
    if( b_need_sps_pps || p_sys->b_new_sps || p_sys->b_new_pps )
    {
        for( int i = 0; i <= H264_SPS_ID_MAX && (b_need_sps_pps || p_sys->b_new_sps); i++ )
        {
            if( p_sys->sps[i].p_block )
                block_ChainLastAppend( &pp_xpsnal_tail, block_Duplicate( p_sys->sps[i].p_block ) );
        }
        for( int i = 0; i < H264_PPS_ID_MAX && (b_need_sps_pps || p_sys->b_new_pps); i++ )
        {
            if( p_sys->pps[i].p_block )
                block_ChainLastAppend( &pp_xpsnal_tail, block_Duplicate( p_sys->pps[i].p_block ) );
        }
    }

    /* Now rebuild NAL Sequence, inserting PPS/SPS if any */
    if( p_sys->p_frame->i_flags & BLOCK_FLAG_PRIVATE_AUD )
    {
        block_t *p_au = p_sys->p_frame;
        p_sys->p_frame = p_au->p_next;
        p_au->p_next = NULL;
        p_au->i_flags &= ~BLOCK_FLAG_PRIVATE_AUD;
        block_ChainLastAppend( &pp_pic_last, p_au );
    }

    if( p_xpsnal )
        block_ChainLastAppend( &pp_pic_last, p_xpsnal );

    if( p_sys->p_sei )
        block_ChainLastAppend( &pp_pic_last, p_sys->p_sei );

    assert( p_sys->p_frame );
    if( p_sys->p_frame )
        block_ChainLastAppend( &pp_pic_last, p_sys->p_frame );

    /* Reset chains, now empty */
    p_sys->p_frame = NULL;
    p_sys->pp_frame_last = &p_sys->p_frame;
    p_sys->p_sei = NULL;
    p_sys->pp_sei_last = &p_sys->p_sei;

    p_pic = block_ChainGather( p_pic );

    if( !p_pic )
        return NULL;

    /* for PTS Fixup, interlaced fields (multiple AU/block) */
    int tFOC = 0, bFOC = 0, PictureOrderCount = 0;
    h264_compute_poc( p_sps, &p_sys->slice, &p_sys->pocctx, &PictureOrderCount, &tFOC, &bFOC );

    unsigned i_num_clock_ts = h264_get_num_ts( p_sps, &p_sys->slice, p_sys->i_pic_struct, tFOC, bFOC );

    if( p_sps->frame_mbs_only_flag == 0 && p_sps->vui.b_pic_struct_present_flag )
    {
        switch( p_sys->i_pic_struct )
        {
        /* Top and Bottom field slices */
        case 1:
        case 2:
            p_pic->i_flags |= BLOCK_FLAG_SINGLE_FIELD;
            p_pic->i_flags |= (!p_sys->slice.i_bottom_field_flag) ? BLOCK_FLAG_TOP_FIELD_FIRST
                                                                  : BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        /* Each of the following slices contains multiple fields */
        case 3:
            p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            break;
        case 4:
            p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        case 5:
            p_pic->i_flags |= BLOCK_FLAG_TOP_FIELD_FIRST;
            break;
        case 6:
            p_pic->i_flags |= BLOCK_FLAG_BOTTOM_FIELD_FIRST;
            break;
        default:
            break;
        }
    }

    /* set dts/pts to current block timestamps */
    p_pic->i_dts = p_sys->i_frame_dts;
    p_pic->i_pts = p_sys->i_frame_pts;

    /* Fixup missing timestamps after split (multiple AU/block)*/
    if( p_pic->i_dts <= VLC_TS_INVALID )
        p_pic->i_dts = date_Get( &p_sys->dts );

    if( p_sys->slice.type == H264_SLICE_TYPE_I )
        p_sys->prevdatedpoc.pts = VLC_TS_INVALID;

    if( p_pic->i_pts == VLC_TS_INVALID )
    {
        if( p_sys->prevdatedpoc.pts > VLC_TS_INVALID &&
            date_Get( &p_sys->dts ) != VLC_TS_INVALID )
        {
            date_t pts = p_sys->dts;
            date_Set( &pts, p_sys->prevdatedpoc.pts );

            int diff = tFOC - p_sys->prevdatedpoc.num;
            if( diff > 0 )
                date_Increment( &pts, diff );
            else
                date_Decrement( &pts, -diff );

            p_pic->i_pts = date_Get( &pts );
        }
        /* In case there's no PTS at all */
        else if( p_sys->slice.i_nal_ref_idc == 0 &&
                 p_sys->slice.type == H264_SLICE_TYPE_B )
        {
            p_pic->i_pts = p_pic->i_dts;
        }
        else if( p_sys->slice.type == H264_SLICE_TYPE_I &&
                 date_Get( &p_sys->dts ) != VLC_TS_INVALID )
        {
            /* Hell no PTS on IDR. We're totally blind */
            date_t pts = p_sys->dts;
            date_Increment( &pts, 2 );
            p_pic->i_pts = date_Get( &pts );
        }
    }

    if( p_pic->i_pts > VLC_TS_INVALID )
    {
        p_sys->prevdatedpoc.pts = p_pic->i_pts;
        p_sys->prevdatedpoc.num = PictureOrderCount;
    }

    if( p_pic->i_length == 0 )
    {
        if( p_sps->vui.i_time_scale )
        {
            p_pic->i_length = CLOCK_FREQ * i_num_clock_ts *
                              p_sps->vui.i_num_units_in_tick / p_sps->vui.i_time_scale;
        }
        else
        {
            date_t next = p_sys->dts;
            date_Increment( &next, i_num_clock_ts );
            p_pic->i_length = date_Get( &next ) - date_Get( &p_sys->dts );
        }
    }

#if 0
    msg_Err(p_dec, "F/BOC %d/%d POC %d %d rec %d flags %x ref%d fn %d fp %d %d pts %ld len %ld",
                    tFOC, bFOC, PictureOrderCount,
                    p_sys->slice.type, p_sys->b_recovered, p_pic->i_flags,
                    p_sys->slice.i_nal_ref_idc, p_sys->slice.i_frame_num, p_sys->slice.i_field_pic_flag,
                    p_pic->i_pts - p_pic->i_dts, p_pic->i_pts % (100*CLOCK_FREQ), p_pic->i_length);
#endif

    /* save for next pic fixups */
    if( date_Get( &p_sys->dts ) != VLC_TS_INVALID )
    {
        if( p_sys->i_next_block_flags & BLOCK_FLAG_DISCONTINUITY )
            date_Set( &p_sys->dts, VLC_TS_INVALID );
        else
            date_Increment( &p_sys->dts, i_num_clock_ts );
    }

    if( p_pic )
    {
        p_pic->i_flags |= p_sys->i_next_block_flags;
        p_sys->i_next_block_flags = 0;
    }

    switch( p_sys->slice.type )
    {
        case H264_SLICE_TYPE_P:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_P;
            break;
        case H264_SLICE_TYPE_B:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_B;
            break;
        case H264_SLICE_TYPE_I:
            p_pic->i_flags |= BLOCK_FLAG_TYPE_I;
        default:
            break;
    }

    if( !p_sys->b_recovered )
    {
        if( p_sys->i_recoveryfnum != UINT_MAX ) /* recovering from SEI */
            p_pic->i_flags |= BLOCK_FLAG_PREROLL;
        else
            p_pic->i_flags |= BLOCK_FLAG_DROP;
    }

    p_pic->i_flags &= ~BLOCK_FLAG_PRIVATE_AUD;

    /* reset after output */
    p_sys->i_frame_dts = VLC_TS_INVALID;
    p_sys->i_frame_pts = VLC_TS_INVALID;
    p_sys->i_dpb_output_delay = 0;
    p_sys->i_pic_struct = UINT8_MAX;
    p_sys->i_recovery_frame_cnt = UINT_MAX;
    p_sys->slice.type = H264_SLICE_TYPE_UNKNOWN;
    p_sys->p_sei = NULL;
    p_sys->pp_sei_last = &p_sys->p_sei;
    p_sys->b_new_sps = false;
    p_sys->b_new_pps = false;
    p_sys->b_slice = false;

    /* CC */
    cc_storage_commit( p_sys->p_ccs, p_pic );

    return p_pic;
}
Esempio n. 18
0
File: dirac.c Progetto: CSRedRat/vlc
/****************************************************************************
 * Encode: the whole thing
 ****************************************************************************
 * This function spits out encapsulation units.
 ****************************************************************************/
static block_t *Encode( encoder_t *p_enc, picture_t *p_pic )
{
    encoder_sys_t *p_sys = p_enc->p_sys;
    block_t *p_block, *p_output_chain = NULL;
    int i_plane, i_line, i_width, i_src_stride;
    uint8_t *p_dst;

    if( !p_pic ) return NULL;
    /* we only know if the sequence is interlaced when the first
     * picture arrives, so final setup is done here */
    /* XXX todo, detect change of interlace */
    p_sys->ctx.src_params.topfieldfirst = p_pic->b_top_field_first;
    p_sys->ctx.src_params.source_sampling = !p_pic->b_progressive;

    if( p_sys->b_auto_field_coding )
        p_sys->ctx.enc_params.picture_coding_mode = !p_pic->b_progressive;

    if( !p_sys->p_dirac )
    {
        date_t date;
        /* Initialise the encoder with the encoder context */
        p_sys->p_dirac = dirac_encoder_init( &p_sys->ctx, 0 );
        if( !p_sys->p_dirac )
        {
            msg_Err( p_enc, "Failed to initialize dirac encoder" );
            return NULL;
        }
        date_Init( &date, p_enc->fmt_in.video.i_frame_rate, p_enc->fmt_in.video.i_frame_rate_base );
#if DIRAC_RESEARCH_VERSION_ATLEAST(1,0,2)
        int i_delayinpics = dirac_encoder_pts_offset( p_sys->p_dirac );
        i_delayinpics /= p_sys->ctx.enc_params.picture_coding_mode + 1;
        date_Increment( &date, i_delayinpics );
#else
        date_Increment( &date, 1 );
#endif
        p_sys->i_pts_offset = date_Get( &date );

        /* picture_coding_mode = 1 == FIELD_CODING, two pictures are produced
         * for each frame input. Calculate time between fields for offsetting
         * the second field later. */
        if( 1 == p_sys->ctx.enc_params.picture_coding_mode )
        {
            date_Set( &date, 0 );
            date_Increment( &date, 1 );
            p_sys->i_field_time = date_Get( &date ) / 2;
        }
    }

    /* Copy input picture into encoder input buffer (stride by stride) */
    /* Would be lovely to just pass the picture in, but there is noway for the
     * library to free it */
    p_dst = p_sys->p_buffer_in;
    for( i_plane = 0; i_plane < p_pic->i_planes; i_plane++ )
    {
        uint8_t *p_src = p_pic->p[i_plane].p_pixels;
        i_width = p_pic->p[i_plane].i_visible_pitch;
        i_src_stride = p_pic->p[i_plane].i_pitch;

        for( i_line = 0; i_line < p_pic->p[i_plane].i_visible_lines; i_line++ )
        {
            vlc_memcpy( p_dst, p_src, i_width );
            p_dst += i_width;
            p_src += i_src_stride;
        }
    }

    /* Load one frame of data into encoder */
    if( dirac_encoder_load( p_sys->p_dirac, p_sys->p_buffer_in,
                            p_sys->i_buffer_in ) < 0 )
    {
        msg_Dbg( p_enc, "dirac_encoder_load() error" );
        return NULL;
    }

    /* store pts in a lookaside buffer, so that the same pts may
     * be used for the picture in coded order */
    StorePicturePTS( p_enc, p_sys->i_input_picnum, p_pic->date );
    p_sys->i_input_picnum++;

    /* store dts in a queue, so that they appear in order in
     * coded order */
    p_block = block_New( p_enc, 1 );
    if( !p_block )
        return NULL;
    p_block->i_dts = p_pic->date - p_sys->i_pts_offset;
    block_FifoPut( p_sys->p_dts_fifo, p_block );
    p_block = NULL;

    /* for field coding mode, insert an extra value into both the
     * pts lookaside buffer and dts queue, offset to correspond
     * to a one field delay. */
    if( 1 == p_sys->ctx.enc_params.picture_coding_mode )
    {
        StorePicturePTS( p_enc, p_sys->i_input_picnum, p_pic->date + p_sys->i_field_time );
        p_sys->i_input_picnum++;

        p_block = block_New( p_enc, 1 );
        if( !p_block )
            return NULL;
        p_block->i_dts = p_pic->date - p_sys->i_pts_offset + p_sys->i_field_time;
        block_FifoPut( p_sys->p_dts_fifo, p_block );
        p_block = NULL;
    }

    dirac_encoder_state_t state;
    /* Retrieve encoded frames from encoder */
    do
    {
        p_sys->p_dirac->enc_buf.buffer = p_sys->p_buffer_out;
        p_sys->p_dirac->enc_buf.size = p_sys->i_buffer_out;
        state = dirac_encoder_output( p_sys->p_dirac );
        switch( state )
        {
        case ENC_STATE_AVAIL: {
            uint32_t pic_num;

            /* extract data from encoder temporary buffer. */
            p_block = block_New( p_enc, p_sys->p_dirac->enc_buf.size );
            if( !p_block )
                return NULL;
            memcpy( p_block->p_buffer, p_sys->p_dirac->enc_buf.buffer,
                    p_sys->p_dirac->enc_buf.size );

            /* if some flags were set for a previous block, prevent
             * them from getting lost */
            if( p_sys->p_chain )
                p_block->i_flags |= p_sys->p_chain->i_flags;

            /* store all extracted blocks in a chain and gather up when an
             * entire encapsulation unit is avaliable (ends with a picture) */
            block_ChainAppend( &p_sys->p_chain, p_block );

            /* Presence of a Sequence header indicates a seek point */
            if( 0 == p_block->p_buffer[4] )
            {
                p_block->i_flags |= BLOCK_FLAG_TYPE_I;

                if( !p_enc->fmt_out.p_extra ) {
                    const uint8_t eos[] = { 'B','B','C','D',0x10,0,0,0,13,0,0,0,0 };
                    uint32_t len = GetDWBE( p_block->p_buffer + 5 );
                    /* if it hasn't been done so far, stash a copy of the
                     * sequence header for muxers such as ogg */
                    /* The OggDirac spec advises that a Dirac EOS DataUnit
                     * is appended to the sequence header to allow guard
                     * against poor streaming servers */
                    /* XXX, should this be done using the packetizer ? */
                    p_enc->fmt_out.p_extra = malloc( len + sizeof(eos) );
                    if( !p_enc->fmt_out.p_extra )
                        return NULL;
                    memcpy( p_enc->fmt_out.p_extra, p_block->p_buffer, len);
                    memcpy( (uint8_t*)p_enc->fmt_out.p_extra + len, eos, sizeof(eos) );
                    SetDWBE( (uint8_t*)p_enc->fmt_out.p_extra + len + 10, len );
                    p_enc->fmt_out.i_extra = len + sizeof(eos);
                }
            }

            if( ReadDiracPictureNumber( &pic_num, p_block ) )
            {
                /* Finding a picture terminates an ecapsulation unit, gather
                 * all data and output; use the next dts value queued up
                 * and find correct pts in the tlb */
                p_block = block_FifoGet( p_sys->p_dts_fifo );
                p_sys->p_chain->i_dts = p_block->i_dts;
                p_sys->p_chain->i_pts = GetPicturePTS( p_enc, pic_num );
                block_Release( p_block );
                block_ChainAppend( &p_output_chain, block_ChainGather( p_sys->p_chain ) );
                p_sys->p_chain = NULL;
            } else {
                p_block = NULL;
            }
            break;
            }

        case ENC_STATE_BUFFER:
            break;
        case ENC_STATE_INVALID:
        default:
            break;
        }
    } while( state == ENC_STATE_AVAIL );

    return p_output_chain;
}
Esempio n. 19
0
File: dirac.c Progetto: AsamQi/vlc
/***
 * Encapsulation (packetization) suitable for all muxing standards
 * maps [DataUnit] -> EncapsulationUnit
 */
static block_t *dirac_BuildEncapsulationUnit( decoder_t *p_dec, block_t *p_block )
{
    decoder_sys_t *p_sys = p_dec->p_sys;

    assert(p_block->i_buffer >= 13 && 0x42424344 == GetDWBE( p_block->p_buffer ));

    if( p_sys->i_eu_pts <= VLC_TS_INVALID && p_sys->i_eu_dts <= VLC_TS_INVALID )
    {
        /* earliest block with pts/dts gets to set the pts/dts for the dated
         * encapsulation unit as a whole */
        /* NB, the 'earliest block' criteria is aribtary */
        if( p_block->i_pts > VLC_TS_INVALID || p_block->i_dts > VLC_TS_INVALID )
        {
            p_sys->i_eu_pts = p_block->i_pts;
            p_sys->i_eu_dts = p_block->i_dts;
        }
    }

    /* inpectdataunit also updates flags for the EU.
     *  - if this is the first block in the EU, then it hasn't been added
     *    to the chain yet (so, p_block will become the front of the chain
     *  - otherwise, use the flags of the chain (first block) */
    block_t *p_eu = p_sys->p_eu ? p_sys->p_eu : p_block;
    int i_block = dirac_InspectDataUnit( p_dec, &p_block, p_eu);

    if( !p_block )
    {
        /* block has been discarded during inspection */
        /* becareful, don't discard anything that is dated,
         * as this needs to go into the timegen loop.  set
         * the DIRAC_DISCARD block flag, and it'll be dropped
         * at output time */
        return NULL;
    }

    block_ChainLastAppend( &p_sys->pp_eu_last, p_block );

    dirac_block_encap_t *p_dbe = dirac_GetBlockEncap( p_block );
#ifdef SANITIZE_PREV_PARSE_OFFSET
    /* fixup prev_parse_offset to point to the last data unit
     * to arrive */
    if( p_dbe )
    {
        SetDWBE( p_block->p_buffer + 9, p_sys->u_eu_last_npo );
        p_sys->u_eu_last_npo = p_dbe->u_last_next_offset;
    }
#endif

    if( i_block != DIRAC_DU_ENDS_EU )
    {
        /* encapsulation unit not ended */
        return NULL;
    }

    /* gather up encapsulation unit, reassociating the final
     * private state with the gathered block */
    block_t *p_eu_last = (block_t*) p_sys->pp_eu_last - offsetof( block_t, p_next );
    p_dbe = dirac_RemoveBlockEncap( p_eu_last );

    uint8_t u_parse_code = p_block->p_buffer[4];

    /* gather up the encapsulation unit */
    p_block = block_ChainGather( p_sys->p_eu );
    assert( p_block ); /* block_ChainGather doesn't define when it frees chain */

    p_block->i_flags |= DIRAC_NON_DATED;
    if( p_dbe )
    {
        dirac_AddBlockEncap( &p_block, p_dbe );
        if( dirac_isPicture( u_parse_code ) ) p_block->i_flags &= ~DIRAC_NON_DATED;
    }
    p_sys->p_eu = NULL;
    p_sys->pp_eu_last = &p_sys->p_eu;
    return p_block;
}