/* put packet into reorder buffer, sequence is the packet's sequnce number */
void video_rtp_reorder_put_pkt(kal_uint8 *data, kal_uint32 size, kal_uint32 sequence)
{
    kal_uint32 index;
    kal_uint32 match_index;
    kal_uint8 *p_buffer = NULL;

    /* free previous got buffer memory */
    if(g_video_rtp_reorder_buff_info.p_free_addr !=NULL)
    {
        kal_adm_free(g_video_rtp_reorder_buff_info.adm_id, g_video_rtp_reorder_buff_info.p_free_addr);
        g_video_rtp_reorder_buff_info.p_free_addr = NULL;
    }

    if(g_video_rtp_reorder_buff_info.total_pkt >= VIDEO_RTP_MAX_REORDER_NUM)
    {
#ifdef STREAM_REORDER_BUFFER_DBG_TRACE
	    video_dbg_trace(STREAM_REORDER_BUFFER_DROP_SEQ, sequence);
#endif
        /* re-order buffer is full, drop the packet */
        VIDEO_ASSERT(0);
        return;
    }

    p_buffer = kal_adm_internal_alloc(g_video_rtp_reorder_buff_info.adm_id, size, __FILE__, __LINE__);
    if(p_buffer==NULL)
    {
#ifdef STREAM_REORDER_BUFFER_DBG_TRACE
    	video_dbg_trace(STREAM_REORDER_BUFFER_DROP_SEQ, sequence);
#endif
        /* re-order buffer is full, drop the packet */
        VIDEO_ASSERT(0);
        g_video_rtp_reorder_buff_info.b_buffer_full = KAL_TRUE;
        return;
    }

    /* search for the matched sequnce index*/
    for(index=0; index<g_video_rtp_reorder_buff_info.total_pkt; index++)
    {
        if(g_video_rtp_reorder_buff_info.sequence[index] > sequence)
        {
            break;
        }
    }

    /* shift the items that has larger sequnce than the input item */
    match_index = index;
    for(index = 0; index < (g_video_rtp_reorder_buff_info.total_pkt-match_index); index++)
    {
        g_video_rtp_reorder_buff_info.sequence[g_video_rtp_reorder_buff_info.total_pkt - index] = g_video_rtp_reorder_buff_info.sequence[g_video_rtp_reorder_buff_info.total_pkt - index - 1];
        g_video_rtp_reorder_buff_info.start_addr[g_video_rtp_reorder_buff_info.total_pkt - index] = g_video_rtp_reorder_buff_info.start_addr[g_video_rtp_reorder_buff_info.total_pkt - index - 1];
        g_video_rtp_reorder_buff_info.buffer_size[g_video_rtp_reorder_buff_info.total_pkt - index] = g_video_rtp_reorder_buff_info.buffer_size[g_video_rtp_reorder_buff_info.total_pkt - index - 1];
    }

    /* add the input  packet */
    g_video_rtp_reorder_buff_info.sequence[match_index] = sequence;
    g_video_rtp_reorder_buff_info.start_addr[match_index] = (kal_uint32)p_buffer;
    g_video_rtp_reorder_buff_info.buffer_size[match_index] = size;
    kal_mem_cpy(p_buffer, data, size);
    g_video_rtp_reorder_buff_info.total_pkt++;
}
kal_uint32 open_api_h264_get_dpb_frame_number(H264_DPB_entry *p_dpb)
{
    if (p_dpb == 0)
    {
        VIDEO_ASSERT(0);
        return 0;
    }
    return g_open_api_h264_dpb_frame_info.frame_list[p_dpb->index_to_list].frame_no;
}
void open_api_h264_set_dpb_decode_done(void)
{
	if (g_open_api_h264_dpb_frame_info.p_dpb_decode == NULL)//if the player is not initalized properly
	{
		VIDEO_ASSERT(0);
	}
	else
	{
		g_open_api_h264_dpb_frame_info.p_dpb_decode->b_dec_done = KAL_TRUE;
	}
}
Exemple #4
0
//MEDIA_STATUS_CODE video_file_get_vos_data(MP4VIDEO_VOS_STRUCT **VOS)
MEDIA_STATUS_CODE video_file_get_vos_data(kal_uint8* p_vos, kal_uint32 *p_vos_size)
{
    kal_uint32 vos_pos;

    /* Get VOS information */
    vos_pos = MP4_Video_GetVOSOffset(g_video_dec_info_ptr->pMp4Parser);
    *p_vos_size = MP4_Video_GetVOSSize(g_video_dec_info_ptr->pMp4Parser);

    if ((*p_vos_size) > 100)
    {
        VIDEO_ASSERT(0);

        g_video_dec_status.VIDEO_STATUS = VIDEO_DEC_FATAL_ERROR;
        return VIDEO_ERROR;
    }

    /* Read VOS from file system*/
    g_video_dec_status.FSAL_STATUS = FSAL_Seek(g_video_dec_info_ptr->pstFSAL, vos_pos);
    if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
    {
        VIDEO_ASSERT(0);

        g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
        return FSAL_ERROR;
    }

    g_video_dec_status.FSAL_STATUS = FSAL_Read(g_video_dec_info_ptr->pstFSAL, p_vos, *p_vos_size);
    if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
    {
        VIDEO_ASSERT(0);

        g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
        return FSAL_ERROR;
    }

    return MEDIA_STATUS_OK;
}
Exemple #5
0
/* This function get frame duration from media file parser.
* @param  frame_no start frame number
* @param  size How many frames information is needed.
* @param  result the memory that store the result of frame time.
* @return Media status, which is based MEDIA_STATUS_CODE structure.
*/
MEDIA_STATUS_CODE video_file_get_frametime(kal_uint32 frame_no, kal_uint32 size, kal_uint64 *result)
{
    kal_uint32 index;
    kal_uint32 video_duration;
    kal_uint32 min_frame_time;
    MEDIA_STATUS_CODE result_code;

    /* get frame time */
    g_video_dec_status.PARSE_STATUS =
        MP4_GetDecodeTimeDelta_Array(g_video_dec_info_ptr->pMp4Parser, frame_no, result, &size, MP4_TRACK_VIDEO);
    if (g_video_dec_status.PARSE_STATUS != MP4_PARSER_OK)
    {
        VIDEO_ASSERT(0);
        g_video_dec_info_ptr->stop_frame_no = frame_no + size;
    }

    /* get total duration */
    g_video_dec_status.PARSE_STATUS = MP4_GetMediaDuration(g_video_dec_info_ptr->pMp4Parser, &video_duration,
                                                           MP4_TRACK_VIDEO);
    if (g_video_dec_status.PARSE_STATUS != MP4_PARSER_OK)
    {
        VIDEO_ASSERT(0);
        return MP4_PARSER_ERROR;
    }

    min_frame_time = VIDEO_ANYBASE_TO_ANYBASE(VIDEO_FILE_MIN_FRAME_DURATION, 1000, g_video_dec_info_ptr->video_time_scale);

    /* check each frame time and translate to AV ticks */
    for (index = 0; index < size; index++)
    {
        if ( (result[index] > video_duration) || (result[index] < min_frame_time))
        {
            VIDEO_ASSERT(0);
            /*Tricky to fix error file, modify frame interval*/
            if (index != 0)
            {
            	    result[index] = result[index-1];
            	}
            else
            {
                result_code = video_dec_translate_av_time(min_frame_time, &result[index]);
                if (result_code != MEDIA_STATUS_OK)
                {
                    VIDEO_ASSERT(0);
                    return result_code;
                }  
            }    
            /* previous one has translated, so not translate again */
            continue;
        }

        result_code = video_dec_translate_av_time(result[index], &result[index]);

        if (result_code != MEDIA_STATUS_OK)
        {
            VIDEO_ASSERT(0);
            return result_code;
        }
        if (result[index] == 0)
        {
            /* After translate, frame time is zero*/
            VIDEO_ASSERT(0);
            /*Tricky to fix error file, modify frame interval*/
            if (index != 0)
            {
                /* share time with previous one */
                result[index] = result[index - 1] / 2;
                result[index - 1] = result[index - 1] - result[index];
            }
            else
            {
                result[index] = 64;
            }
        }
    }

    return MEDIA_STATUS_OK;
}
Exemple #6
0
/* Get sample data from media file to video buffer. It will try to get as mamy data as possible
 *  If not continous, it will try to merge them
 * @param  VIDEO_DEC_FILE_READ_STRUCT structure.
 * @return Media status, which is based MEDIA_STATUS_CODE structure.
 */
MEDIA_STATUS_CODE video_file_get_sample_data(VIDEO_DEC_FILE_READ_STRUCT *p_file_info)
{
    kal_uint32 current_pos = 0;
    kal_uint32 current_size = 0;
    kal_uint32 current_buffer_addr = 0;
    kal_uint32 sum = 0;
    kal_uint32 index;
    kal_uint32 result_frame_no;
    kal_uint32 result_frame_no_check;
    kal_uint32 start_read_no;
    MEDIA_STATUS_CODE result;

    p_file_info->result_frame_no = 0;
    p_file_info->result_size = 0;
    result_frame_no =  p_file_info->total_frame_no;
    start_read_no = 0;
    	
    /* get sample size */
    for (index = 0; index < result_frame_no; index++)
    {
        g_video_dec_status.PARSE_STATUS = MP4_GetSampleSize(g_video_dec_info_ptr->pMp4Parser,
                                                            (p_file_info->start_frame_no + index),
                                                            &p_file_info->p_frame_length[index],
                                                            MP4_TRACK_VIDEO);
        if (g_video_dec_status.PARSE_STATUS != MP4_PARSER_OK)
        {
#ifdef PGDL_SUPPORT
            if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
               kal_prompt_trace(MOD_MED, "MP4_GetSampleSize Error: %d, %d\n", g_video_dec_status.PARSE_STATUS, p_file_info->start_frame_no, index);
            }
#endif        
        
            VIDEO_ASSERT(0);

            result_frame_no = index;
            g_video_dec_info_ptr->stop_frame_no = p_file_info->start_frame_no + result_frame_no;
            break;
        }
    }

    /* get sample offset */
    result_frame_no_check = result_frame_no;
    g_video_dec_status.PARSE_STATUS =
        MP4_GetSampleOffset_Array(g_video_dec_info_ptr->pMp4Parser,
                                  p_file_info->start_frame_no,
                                  p_file_info->temp_file_offset,
                                  &result_frame_no,
                                  MP4_TRACK_VIDEO);

    if (g_video_dec_status.PARSE_STATUS != MP4_PARSER_OK)
    {
        if((result_frame_no_check==result_frame_no) && (result_frame_no_check!=0))
        {
            /* since it is error, result frame no must be different from original */ 
            EXT_ASSERT(0, result_frame_no_check, 0, 0);
        }
#ifdef PGDL_SUPPORT
        if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
           kal_prompt_trace(MOD_MED, "MP4_GetSampleOffset_Array Error: %d, %d\n", g_video_dec_status.PARSE_STATUS, p_file_info->start_frame_no, p_file_info->temp_file_offset);
        }
#endif

        VIDEO_ASSERT(0);
        g_video_dec_info_ptr->stop_frame_no = p_file_info->start_frame_no + result_frame_no;
    }

    /* set the last one offset as 0 */
    p_file_info->temp_file_offset[result_frame_no] = 0;

    /* calculate smaple sizes and read from file to buffer */
    current_pos = p_file_info->temp_file_offset[0];
    current_size = 0;
    current_buffer_addr = p_file_info->p_frame_buffer_addr[0];
    sum = 0;

    for (index = 0; index < result_frame_no; index++)
    {
        /*Search the max frame data to be prepared to buffer*/
        if ((index + 1) < result_frame_no)
        {
            p_file_info->p_frame_buffer_addr[index + 1] =
                     p_file_info->p_frame_buffer_addr[index]
                    + p_file_info->p_frame_length[index];
        }

        sum += p_file_info->p_frame_length[index];

        if (sum > p_file_info->max_size)
        {
            if(index > 0)
            {
                /* reach frame buffer size limit */
                result = video_file_read_bitstream_data(current_pos, current_size, current_buffer_addr);
           
                if (result != MEDIA_STATUS_OK)
                {
#ifdef PGDL_SUPPORT
                    if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                        kal_prompt_trace(MOD_MED, "video_file_read_bitstream_data Error: %d, %d, %d\n", result, current_pos, current_size, current_buffer_addr);
                    }
#endif                
                    VIDEO_ASSERT(0);
                    return result;
                }
                
                index--;
                p_file_info->result_size += current_size; 
                p_file_info->result_frame_no += (index - start_read_no + 1);
                start_read_no = index + 1;
            }
            break;
        }

        current_size += p_file_info->p_frame_length[index];
        if ((current_pos + current_size) != p_file_info->temp_file_offset[index + 1])
        {
            /* not continous data */
            result = video_file_read_bitstream_data(current_pos, current_size, current_buffer_addr);

            if (result != MEDIA_STATUS_OK)
            {
#ifdef PGDL_SUPPORT
                if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                    kal_prompt_trace(MOD_MED, "video_file_read_bitstream_data Error2: %d, %d, %d\n", result, current_pos, current_size, current_buffer_addr);
                }
#endif
                VIDEO_ASSERT(0);
                return result;
            }

            p_file_info->result_size += current_size;
            p_file_info->result_frame_no += (index - start_read_no + 1);
            start_read_no = index + 1;
            current_buffer_addr = p_file_info->p_frame_buffer_addr[index+1];
            current_pos = p_file_info->temp_file_offset[index + 1];
            current_size = 0;
        }
    }
  
    return MEDIA_STATUS_OK;
}
Exemple #7
0
/* Read bitstream from file system to buffer
 * @param  current_pos is start position in the file
 * @param  current_size is total bytes that should be read
  * @param  current_buffer_addr is the buffer start address
 * @return Media status, which is based MEDIA_STATUS_CODE structure.
 */
static MEDIA_STATUS_CODE video_file_read_bitstream_data(kal_uint32 current_pos, kal_uint32 current_size,
                                                        kal_uint32 current_buffer_addr)
{
    if (current_size > 512)
    {
        g_video_dec_status.FSAL_STATUS = FSAL_Direct_Seek(g_video_dec_info_ptr->pstFSAL, current_pos);

        if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
        {
            VIDEO_ASSERT(0);
#ifdef PGDL_SUPPORT
            if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                kal_prompt_trace(MOD_MED, "FSAL_Direct_Seek Error: %d, %d, %d\n", g_video_dec_status.FSAL_STATUS, current_pos, current_size, current_buffer_addr);
            }
#endif
            g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
            return FSAL_ERROR;
        }

        g_video_dec_status.FSAL_STATUS = FSAL_Direct_Read(g_video_dec_info_ptr->pstFSAL,
                                                          (kal_uint8 *)current_buffer_addr,
                                                          current_size);
        if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
        {
            VIDEO_ASSERT(0);
#ifdef PGDL_SUPPORT
            if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                kal_prompt_trace(MOD_MED, "FSAL_Direct_Read Error: %d, %d, %d\n", g_video_dec_status.FSAL_STATUS, current_pos, current_size, current_buffer_addr);
            }
#endif

            g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
            return FSAL_ERROR;
        }
    }
    else
    {
        g_video_dec_status.FSAL_STATUS = FSAL_Seek(g_video_dec_info_ptr->pstFSAL, current_pos);

        if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
        {
            VIDEO_ASSERT(0);
#ifdef PGDL_SUPPORT
            if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                kal_prompt_trace(MOD_MED, "FSAL_Seek Error: %d, %d, %d\n", g_video_dec_status.FSAL_STATUS, current_pos, current_size, current_buffer_addr);
            }
#endif

            g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
            return FSAL_ERROR;
        }

        g_video_dec_status.FSAL_STATUS = FSAL_Read(g_video_dec_info_ptr->pstFSAL, (kal_uint8 *)current_buffer_addr,
                                                   current_size);
        if (g_video_dec_status.FSAL_STATUS != FSAL_OK)
        {
            VIDEO_ASSERT(0);
#ifdef PGDL_SUPPORT
            if (g_video_dec_status.scenario == VIDEO_SCENARIO_PGDL) {
                kal_prompt_trace(MOD_MED, "FSAL_Read Error: %d, %d, %d\n", g_video_dec_status.FSAL_STATUS, current_pos, current_size, current_buffer_addr);
            }
#endif

            g_video_dec_status.FSAL_ERROR_FILE = g_video_dec_info_ptr->pstFSAL;
            return FSAL_ERROR;
        }
    }

    return MEDIA_STATUS_OK;
}
kal_bool open_api_h264_add_dpb_info(kal_uint32 frame_addr, kal_uint32 frame_length)
{
    kal_uint32 index;
    kal_bool b_found = KAL_FALSE;
    H264_dpb_frame_entry *p_frame_entry = g_open_api_h264_dpb_frame_info.frame_list;

    dbg_print("[DRV H264]open_api_h264_add_dpb_info(), frame_no:%d\n\r", g_open_api_h264_dec_info_ptr->hdr_add_frames_no);
    video_dbg_trace(H264_DEC_ADD_FRAME, g_open_api_h264_dec_info_ptr->hdr_add_frames_no);

    //Pre Check If the frame_no is exists
    for(index = 0; index < (g_open_api_h264_dpb_frame_info.max_queue_num*2); index++)
    {
        if((p_frame_entry->b_used == KAL_TRUE) && 
           (p_frame_entry->frame_no==g_open_api_h264_dec_info_ptr->hdr_add_frames_no))
        {
            //if previous decode fail is due to dpb full 
            video_dbg_trace(H264_DEC_ADD_EXIST_FRAME,g_open_api_h264_dec_info_ptr->hdr_add_frames_no);
            dbg_print("[DRV H264]open_api_h264_add_dpb_info(), frame_no:%d\n\r", g_open_api_h264_dec_info_ptr->hdr_add_frames_no);
            b_found = KAL_TRUE;
            if(p_frame_entry->p_dpb != 0)
            {
            	video_dbg_trace(H264_DEC_ASSERT,__LINE__);
             VIDEO_ASSERT(0);
            }
             
            p_frame_entry->addr = frame_addr;
            p_frame_entry->length = frame_length;
            
            if(g_open_api_h264_dpb_frame_info.p_next_parse == NULL)
               g_open_api_h264_dpb_frame_info.p_next_parse = p_frame_entry;
            
            return KAL_TRUE;
        }
        
        p_frame_entry++;      
    }
    
    p_frame_entry = g_open_api_h264_dpb_frame_info.frame_list;
    // find empty slot from list
    for(index = 0; index < (g_open_api_h264_dpb_frame_info.max_queue_num*2); index++)
    {
        if(p_frame_entry->b_used == KAL_FALSE)
        {
            b_found = KAL_TRUE;
            break;
        }
        p_frame_entry++;
    }

   
    //if(b_found == KAL_FALSE)
        //EXT_ASSERT(0, g_open_api_h264_dpb_frame_info.max_queue_num, 0, 0);
    
    if(b_found == KAL_FALSE)
    {
    	video_dbg_trace(H264_DEC_DPB_FRAME_ENTRY_FULL,video_get_current_time());
    	dbg_print("[DRV H264]open_api_h264_add_dpb_info()\n\r");
    	return KAL_FALSE;  
    }

    // store the information into the found slot
    p_frame_entry->addr = frame_addr;
    p_frame_entry->length = frame_length;
    p_frame_entry->frame_no=g_open_api_h264_dec_info_ptr->hdr_add_frames_no;
    //p_frame_entry->b_dec_done = KAL_FALSE;
    p_frame_entry->b_used = KAL_TRUE;
    p_frame_entry->p_next_dpb = NULL;
    p_frame_entry->p_prev_dpb = NULL;
    p_frame_entry->p_dpb = NULL;


    //reconstruct the list
    if(g_open_api_h264_dpb_frame_info.p_start == NULL)
    {
        g_open_api_h264_dpb_frame_info.p_start = p_frame_entry;	
        g_open_api_h264_dpb_frame_info.p_end = p_frame_entry;
        g_open_api_h264_dpb_frame_info.p_next_parse = p_frame_entry;
    }
    else
    {
        g_open_api_h264_dpb_frame_info.p_end->p_next_dpb = p_frame_entry;
        p_frame_entry->p_prev_dpb = g_open_api_h264_dpb_frame_info.p_end;
        g_open_api_h264_dpb_frame_info.p_end = p_frame_entry;
        if(g_open_api_h264_dpb_frame_info.p_next_parse == NULL)
            g_open_api_h264_dpb_frame_info.p_next_parse = p_frame_entry;
    }

    return KAL_TRUE;
}