Beispiel #1
0
static char* PrintCntlInfo(
	MP4FileHandle mp4File,
	MP4TrackId trackId)
{
  const char *media_data_name = MP4GetTrackMediaDataName(mp4File, trackId);
  const char *typeName = "Unknown";

  if (media_data_name == NULL) {
    typeName = "Unknown - no media data name";
  } else if (strcasecmp(media_data_name, "href") == 0) {
    typeName = "ISMA Href";
  } else {
    typeName = media_data_name;
  }

  MP4Duration trackDuration = 
    MP4GetTrackDuration(mp4File, trackId);
 
  double msDuration = 
    UINT64_TO_DOUBLE(MP4ConvertFromTrackDuration(mp4File, trackId, 
						 trackDuration, MP4_MSECS_TIME_SCALE));
  char *sInfo = (char *)MP4Malloc(256);

  snprintf(sInfo, 256,
	   "%u\tcontrol\t%s, %.3f secs\n",
	   trackId, 
	   typeName,
	   msDuration / 1000.0);
  return sInfo;
}
/**************************************************************************
 * Quicktime stream base class functions
 **************************************************************************/
CMp4ByteStream::CMp4ByteStream (CMp4File *parent,
				MP4TrackId track,
				const char *type,
				bool has_video)
  : COurInByteStream(type)
{
#ifdef ISMACRYP_DEBUG
  my_enc_file = fopen("encbuffer.raw", "w");
  my_unenc_file = fopen("unencbuffer.raw", "w");
  my_unenc_file2 = fopen("unencbuffer2.raw", "w");
#endif
#ifdef OUTPUT_TO_FILE
  char buffer[80];
  strcpy(buffer, type);
  strcat(buffer, ".raw");
  m_output_file = fopen(buffer, "w");
#endif
  m_track = track;
  m_frame_on = 1;
  m_parent = parent;
  m_eof = false;
  MP4FileHandle fh = parent->get_file();
  m_frames_max = MP4GetTrackNumberOfSamples(fh, m_track);
  mp4f_message(LOG_DEBUG, "%s - %u samples", type, m_frames_max);
  m_max_frame_size = MP4GetTrackMaxSampleSize(fh, m_track) + 4; 
  m_sample_freq = MP4GetTrackTimeScale(fh, m_track);
  m_buffer = (u_int8_t *) malloc(m_max_frame_size * sizeof(u_int8_t));
  m_has_video = has_video;
  m_frame_in_buffer = 0xffffffff;
  MP4Duration trackDuration;
  trackDuration = MP4GetTrackDuration(fh, m_track);
  uint64_t max_ts;
  max_ts = MP4ConvertFromTrackDuration(fh, 
				       m_track, 
				       trackDuration,
				       MP4_MSECS_TIME_SCALE);
  m_max_time = UINT64_TO_DOUBLE(max_ts);
  m_max_time /= 1000.0;
  mp4f_message(LOG_DEBUG, 
	       "MP4 %s max time is "U64" %g", type, max_ts, m_max_time);
  read_frame(1, NULL);
}
Beispiel #3
0
int AacPcm::getInfos(MediaInfo *infos)
{
	if(!infos)
		return 1;

	if(hDecoder)
	{
		SHOW_INFO()
	    return 0;
	}

	IsAAC=strcmpi(infos->getFilename()+lstrlen(infos->getFilename())-4,".aac")==0;

	if(!IsAAC) // MP4 file ---------------------------------------------------------------------
	{
	MP4Duration			length;
	unsigned __int32	buffer_size;
    mp4AudioSpecificConfig mp4ASC;

		if(!(mp4File=MP4Read(infos->getFilename(), 0)))
			ERROR_getInfos("Error opening file");

		if((track=GetAACTrack(mp4File))<0)
			ERROR_getInfos(0); //"Unable to find correct AAC sound track");

		if(!(hDecoder=faacDecOpen()))
			ERROR_getInfos("Error initializing decoder library");

		MP4GetTrackESConfiguration(mp4File, track, (unsigned __int8 **)&buffer, &buffer_size);
		if(!buffer)
			ERROR_getInfos("MP4GetTrackESConfiguration");
		AudioSpecificConfig(buffer, buffer_size, &mp4ASC);
        Channels=mp4ASC.channelsConfiguration;

		if(faacDecInit2(hDecoder, buffer, buffer_size, &Samplerate, &Channels) < 0)
			ERROR_getInfos("Error initializing decoder library");
		FREE_ARRAY(buffer);

		length=MP4GetTrackDuration(mp4File, track);
		len_ms=(DWORD)MP4ConvertFromTrackDuration(mp4File, track, length, MP4_MSECS_TIME_SCALE);
		file_info.bitrate=MP4GetTrackBitRate(mp4File, track);
		file_info.version=MP4GetTrackAudioType(mp4File, track)==MP4_MPEG4_AUDIO_TYPE ? 4 : 2;
		numSamples=MP4GetTrackNumberOfSamples(mp4File, track);
		sampleId=1;
	}
	else // AAC file ------------------------------------------------------------------------------
	{   
	DWORD			read,
					tmp;
	BYTE			Channels4Raw=0;

		if(!(aacFile=fopen(infos->getFilename(),"rb")))
			ERROR_getInfos("Error opening file"); 

		// use bufferized stream
		setvbuf(aacFile,NULL,_IOFBF,32767);

		// get size of file
		fseek(aacFile, 0, SEEK_END);
		src_size=ftell(aacFile);
		fseek(aacFile, 0, SEEK_SET);

		if(!(buffer=(BYTE *)malloc(FAAD_STREAMSIZE)))
			ERROR_getInfos("Memory allocation error: buffer")

		tmp=src_size<FAAD_STREAMSIZE ? src_size : FAAD_STREAMSIZE;
		read=fread(buffer, 1, tmp, aacFile);
		if(read==tmp)
		{
			bytes_read=read;
			bytes_into_buffer=read;
		}
		else
			ERROR_getInfos("Read failed!")

		if(tagsize=id3v2_tag(buffer))
		{
			if(tagsize>(long)src_size)
				ERROR_getInfos("Corrupt stream!");
			if(tagsize<bytes_into_buffer)
			{
				bytes_into_buffer-=tagsize;
				memcpy(buffer,buffer+tagsize,bytes_into_buffer);
			}
			else
			{
				bytes_read=tagsize;
				bytes_into_buffer=0;
				if(tagsize>bytes_into_buffer)
					fseek(aacFile, tagsize, SEEK_SET);
			}
			if(src_size<bytes_read+FAAD_STREAMSIZE-bytes_into_buffer)
				tmp=src_size-bytes_read;
			else
				tmp=FAAD_STREAMSIZE-bytes_into_buffer;
			read=fread(buffer+bytes_into_buffer, 1, tmp, aacFile);
			if(read==tmp)
			{
				bytes_read+=read;
				bytes_into_buffer+=read;
			}
			else
				ERROR_getInfos("Read failed!");
		}

		if(get_AAC_format((char *)infos->getFilename(), &file_info, &seek_table, &seek_table_length, 0))
			ERROR_getInfos("get_AAC_format");
		IsSeekable=file_info.headertype==ADTS && seek_table && seek_table_length>0;
		BlockSeeking=!IsSeekable;

		if(!(hDecoder=faacDecOpen()))
			ERROR_getInfos("Can't open library");

		if(file_info.headertype==RAW)
		{
		faacDecConfiguration	config;

			config.defSampleRate=atoi(cfg_samplerate);
			switch(cfg_profile[1])
			{
			case 'a':
				config.defObjectType=MAIN;
				break;
			case 'o':
				config.defObjectType=LOW;
				break;
			case 'S':
				config.defObjectType=SSR;
				break;
			case 'T':
				config.defObjectType=LTP;
				break;
			}
			switch(cfg_bps[0])
			{
			case '1':
				config.outputFormat=FAAD_FMT_16BIT;
				break;
			case '2':
				config.outputFormat=FAAD_FMT_24BIT;
				break;
			case '3':
				config.outputFormat=FAAD_FMT_32BIT;
				break;
			case 'F':
				config.outputFormat=FAAD_FMT_24BIT;
				break;
			}
			faacDecSetConfiguration(hDecoder, &config);

			if(!FindBitrate)
			{
			AacPcm *NewInst;
				if(!(NewInst=new AacPcm()))
					ERROR_getInfos("Memory allocation error: NewInst");

				NewInst->FindBitrate=TRUE;
				if(NewInst->getInfos(infos))
					ERROR_getInfos(0);
				Channels4Raw=NewInst->frameInfo.channels;
				file_info.bitrate=NewInst->file_info.bitrate*Channels4Raw;
				delete NewInst;
			}
			else
			{
			DWORD	Samples,
					BytesConsumed;

				if((bytes_consumed=faacDecInit(hDecoder,buffer,bytes_into_buffer,&Samplerate,&Channels))<0)
					ERROR_getInfos("Can't init library");
				bytes_into_buffer-=bytes_consumed;
				if(!processData(infos,0,0))
					ERROR_getInfos(0);
				Samples=frameInfo.samples/sizeof(short);
				BytesConsumed=frameInfo.bytesconsumed;
				processData(infos,0,0);
				if(BytesConsumed<frameInfo.bytesconsumed)
					BytesConsumed=frameInfo.bytesconsumed;
				file_info.bitrate=(BytesConsumed*8*Samplerate)/Samples;
				if(!file_info.bitrate)
					file_info.bitrate=1000; // try to continue decoding
				return 0;
			}
		}

		if((bytes_consumed=faacDecInit(hDecoder, buffer, bytes_into_buffer, &Samplerate, &Channels))<0)
			ERROR_getInfos("faacDecInit failed!")
		bytes_into_buffer-=bytes_consumed;

		if(Channels4Raw)
			Channels=Channels4Raw;

		len_ms=(DWORD)((1000*((float)src_size*8))/file_info.bitrate);
	}

	SHOW_INFO();
    return 0;
}
Beispiel #4
0
static int
aac_read (DB_fileinfo_t *_info, char *bytes, int size) {
    aac_info_t *info = (aac_info_t *)_info;
    if (info->eof) {
        trace ("aac_read: received call after eof\n");
        return 0;
    }
    int samplesize = _info->fmt.channels * _info->fmt.bps / 8;
    if (!info->file->vfs->is_streaming ()) {
        if (info->currentsample + size / samplesize > info->endsample) {
            size = (info->endsample - info->currentsample + 1) * samplesize;
            if (size <= 0) {
                trace ("aac_read: eof (current=%d, total=%d)\n", info->currentsample, info->endsample);
                return 0;
            }
        }
    }

    int initsize = size;

    while (size > 0) {
        if (info->skipsamples > 0 && info->out_remaining > 0) {
            int skip = min (info->out_remaining, info->skipsamples);
            if (skip < info->out_remaining) {
                memmove (info->out_buffer, info->out_buffer + skip * samplesize, (info->out_remaining - skip) * samplesize);
            }
            info->out_remaining -= skip;
            info->skipsamples -= skip;
        }
        if (info->out_remaining > 0) {
            int n = size / samplesize;
            n = min (info->out_remaining, n);

            char *src = info->out_buffer;
            if (info->noremap) {
                memcpy (bytes, src, n * samplesize);
                bytes += n * samplesize;
                src += n * samplesize;
            }
            else {
                int i, j;
                if (info->remap[0] == -1) {
                    // build remap mtx

                    // FIXME: should build channelmask 1st; then remap based on channelmask
                    for (i = 0; i < _info->fmt.channels; i++) {
                        switch (info->frame_info.channel_position[i]) {
                        case FRONT_CHANNEL_CENTER:
                            trace ("FC->%d\n", i);
                            info->remap[2] = i;
                            break;
                        case FRONT_CHANNEL_LEFT:
                            trace ("FL->%d\n", i);
                            info->remap[0] = i;
                            break;
                        case FRONT_CHANNEL_RIGHT:
                            trace ("FR->%d\n", i);
                            info->remap[1] = i;
                            break;
                        case SIDE_CHANNEL_LEFT:
                            trace ("SL->%d\n", i);
                            info->remap[6] = i;
                            break;
                        case SIDE_CHANNEL_RIGHT:
                            trace ("SR->%d\n", i);
                            info->remap[7] = i;
                            break;
                        case BACK_CHANNEL_LEFT:
                            trace ("RL->%d\n", i);
                            info->remap[4] = i;
                            break;
                        case BACK_CHANNEL_RIGHT:
                            trace ("RR->%d\n", i);
                            info->remap[5] = i;
                            break;
                        case BACK_CHANNEL_CENTER:
                            trace ("BC->%d\n", i);
                            info->remap[8] = i;
                            break;
                        case LFE_CHANNEL:
                            trace ("LFE->%d\n", i);
                            info->remap[3] = i;
                            break;
                        default:
                            trace ("aac: unknown ch(%d)->%d\n", info->frame_info.channel_position[i], i);
                            break;
                        }
                    }
                    for (i = 0; i < _info->fmt.channels; i++) {
                        trace ("%d ", info->remap[i]);
                    }
                    trace ("\n");
                    if (info->remap[0] == -1) {
                        info->remap[0] = 0;
                    }
                    if ((_info->fmt.channels == 1 && info->remap[0] == FRONT_CHANNEL_CENTER)
                        || (_info->fmt.channels == 2 && info->remap[0] == FRONT_CHANNEL_LEFT && info->remap[1] == FRONT_CHANNEL_RIGHT)) {
                        info->noremap = 1;
                    }
                }

                for (i = 0; i < n; i++) {
                    for (j = 0; j < _info->fmt.channels; j++) {
                        ((int16_t *)bytes)[j] = ((int16_t *)src)[info->remap[j]];
                    }
                    src += samplesize;
                    bytes += samplesize;
                }
            }
            size -= n * samplesize;

            if (n == info->out_remaining) {
                info->out_remaining = 0;
            }
            else {
                memmove (info->out_buffer, src, (info->out_remaining - n) * samplesize);
                info->out_remaining -= n;
            }
            continue;
        }

        char *samples = NULL;

        if (info->mp4file) {
            if (info->mp4sample >= info->mp4samples) {
                break;
            }
            
            unsigned char *buffer = NULL;
            int buffer_size = 0;
#ifdef USE_MP4FF
            int rc = mp4ff_read_sample (info->mp4file, info->mp4track, info->mp4sample, &buffer, &buffer_size);
            if (rc == 0) {
                trace ("mp4ff_read_sample failed\n");
                info->eof = 1;
                break;
            }
#else

            buffer = info->samplebuffer;
            buffer_size = info->maxSampleSize;
            MP4Timestamp sampleTime;
            MP4Duration sampleDuration;
            MP4Duration sampleRenderingOffset;
            bool isSyncSample;
            MP4ReadSample (info->mp4file, info->mp4track, info->mp4sample, &buffer, &buffer_size, &sampleTime, &sampleDuration, &sampleRenderingOffset, &isSyncSample);
            // convert timestamp and duration from track time to milliseconds
            u_int64_t myTime = MP4ConvertFromTrackTimestamp (info->mp4file, info->mp4track, 
                    sampleTime, MP4_MSECS_TIME_SCALE);

            u_int64_t myDuration = MP4ConvertFromTrackDuration (info->mp4file, info->mp4track,
                    sampleDuration, MP4_MSECS_TIME_SCALE);
#endif
            info->mp4sample++;
            samples = NeAACDecDecode(info->dec, &info->frame_info, buffer, buffer_size);

            if (buffer) {
                free (buffer);
            }
            if (!samples) {
                break;
            }
        }
        else {
            if (info->remaining < AAC_BUFFER_SIZE) {
                trace ("fread from offs %lld\n", deadbeef->ftell (info->file));
                size_t res = deadbeef->fread (info->buffer + info->remaining, 1, AAC_BUFFER_SIZE-info->remaining, info->file);
                info->remaining += res;
                trace ("remain: %d\n", info->remaining);
                if (!info->remaining) {
                    break;
                }
            }

            trace ("NeAACDecDecode %d bytes\n", info->remaining)
            samples = NeAACDecDecode (info->dec, &info->frame_info, info->buffer, info->remaining);
            trace ("samples =%p\n", samples);
            if (!samples) {
                trace ("NeAACDecDecode failed with error %s (%d), consumed=%d\n", NeAACDecGetErrorMessage(info->frame_info.error), (int)info->frame_info.error, info->frame_info.bytesconsumed);

                if (info->num_errors > 10) {
                    trace ("NeAACDecDecode failed %d times, interrupting\n", info->num_errors);
                    break;
                }
                info->num_errors++;
                info->remaining = 0;
                continue;
            }
            info->num_errors=0;
            int consumed = info->frame_info.bytesconsumed;
            if (consumed > info->remaining) {
                trace ("NeAACDecDecode consumed more than available! wtf?\n");
                break;
            }
            if (consumed == info->remaining) {
                info->remaining = 0;
            }
            else if (consumed > 0) {
                memmove (info->buffer, info->buffer + consumed, info->remaining - consumed);
                info->remaining -= consumed;
            }
        }

        if (info->frame_info.samples > 0) {
            memcpy (info->out_buffer, samples, info->frame_info.samples * 2);
            info->out_remaining = info->frame_info.samples / info->frame_info.channels;
        }
    }

    info->currentsample += (initsize-size) / samplesize;
    return initsize-size;
}
Beispiel #5
0
static char* PrintAudioInfo(
	MP4FileHandle mp4File,
	MP4TrackId trackId)
{
	static const char* mpeg4AudioNames[] = {
		"MPEG-4 AAC main",
		"MPEG-4 AAC LC",
		"MPEG-4 AAC SSR",
		"MPEG-4 AAC LTP",
		"MPEG-4 AAC HE",
		"MPEG-4 AAC Scalable",
		"MPEG-4 TwinVQ",
		"MPEG-4 CELP",
		"MPEG-4 HVXC",
		NULL, NULL,
		"MPEG-4 TTSI",
		"MPEG-4 Main Synthetic",
		"MPEG-4 Wavetable Syn",
		"MPEG-4 General MIDI",
		"MPEG-4 Algo Syn and Audio FX",
		"MPEG-4 ER AAC LC",
		NULL,
		"MPEG-4 ER AAC LTP",
		"MPEG-4 ER AAC Scalable",
		"MPEG-4 ER TwinVQ",
		"MPEG-4 ER BSAC",
		"MPEG-4 ER ACC LD",
		"MPEG-4 ER CELP",
		"MPEG-4 ER HVXC",
		"MPEG-4 ER HILN",
		"MPEG-4 ER Parametric",
		"MPEG-4 SSC",
		"MPEG-4 PS",
		"MPEG-4 MPEG Surround",
		NULL,
		"MPEG-4 Layer-1",
		"MPEG-4 Layer-2",
		"MPEG-4 Layer-3",
		"MPEG-4 DST",
		"MPEG-4 Audio Lossless",
		"MPEG-4 SLS",
		"MPEG-4 SLS non-core", 
	};

	static const u_int8_t mpegAudioTypes[] = {
		MP4_MPEG2_AAC_MAIN_AUDIO_TYPE,	// 0x66
		MP4_MPEG2_AAC_LC_AUDIO_TYPE,	// 0x67
		MP4_MPEG2_AAC_SSR_AUDIO_TYPE,	// 0x68
		MP4_MPEG2_AUDIO_TYPE,			// 0x69
		MP4_MPEG1_AUDIO_TYPE,			// 0x6B
		// private types
		MP4_PCM16_LITTLE_ENDIAN_AUDIO_TYPE,
		MP4_VORBIS_AUDIO_TYPE,
		MP4_ALAW_AUDIO_TYPE,
		MP4_ULAW_AUDIO_TYPE,
		MP4_G723_AUDIO_TYPE,
		MP4_PCM16_BIG_ENDIAN_AUDIO_TYPE,
	};
	static const char* mpegAudioNames[] = {
		"MPEG-2 AAC Main",
		"MPEG-2 AAC LC",
		"MPEG-2 AAC SSR",
		"MPEG-2 Audio (13818-3)",
		"MPEG-1 Audio (11172-3)",
		// private types
		"PCM16 (little endian)",
		"Vorbis",
		"G.711 aLaw",
		"G.711 uLaw",
		"G.723.1",
		"PCM16 (big endian)",
	};
	u_int8_t numMpegAudioTypes =
		sizeof(mpegAudioTypes) / sizeof(u_int8_t);

	const char* typeName = "Unknown";
	bool foundType = false;
	u_int8_t type = 0;
	const char *media_data_name;

	media_data_name = MP4GetTrackMediaDataName(mp4File, trackId);

	if (media_data_name == NULL) {
	  typeName = "Unknown - no media data name";
	} else if (strcasecmp(media_data_name, "samr") == 0) {
	    typeName = "AMR";
	    foundType = true;
	} else if (strcasecmp(media_data_name, "sawb") == 0) {
	    typeName = "AMR-WB";
	    foundType = true;
	} else if (strcasecmp(media_data_name, "mp4a") == 0) {
	    
	  type = MP4GetTrackEsdsObjectTypeId(mp4File, trackId);
	  switch (type) {
	  case MP4_INVALID_AUDIO_TYPE:
	    typeName = "AAC from .mov";
	    foundType = true;
	    break;
	  case MP4_MPEG4_AUDIO_TYPE:  {
	
	    type = MP4GetTrackAudioMpeg4Type(mp4File, trackId);
	    if (type == MP4_MPEG4_INVALID_AUDIO_TYPE ||
		type > NUM_ELEMENTS_IN_ARRAY(mpeg4AudioNames) || 
		mpeg4AudioNames[type - 1] == NULL) {
	      typeName = "MPEG-4 Unknown Profile";
	    } else {
	      typeName = mpeg4AudioNames[type - 1];
	      foundType = true;
	    }
	    break;
	  }
	    // fall through
	  default:
	    for (u_int8_t i = 0; i < numMpegAudioTypes; i++) {
	      if (type == mpegAudioTypes[i]) {
		typeName = mpegAudioNames[i];
		foundType = true;
		break;
	      }
	    }
	  }
	} else {
	  typeName = media_data_name;
	  foundType = true;
	}

	u_int32_t timeScale =
		MP4GetTrackTimeScale(mp4File, trackId);

	MP4Duration trackDuration =
		MP4GetTrackDuration(mp4File, trackId);

	double msDuration =
		UINT64_TO_DOUBLE(MP4ConvertFromTrackDuration(mp4File, trackId,
			trackDuration, MP4_MSECS_TIME_SCALE));

	u_int32_t avgBitRate =
		MP4GetTrackBitRate(mp4File, trackId);

	char *sInfo = (char*)MP4Malloc(256);

	// type duration avgBitrate samplingFrequency
	if (foundType)
	  snprintf(sInfo, 256, 
		  "%u\taudio\t%s%s, %.3f secs, %u kbps, %u Hz\n",
		  trackId,
		  MP4IsIsmaCrypMediaTrack(mp4File, trackId) ? "enca - " : "",
		  typeName,
		  msDuration / 1000.0,
		  (avgBitRate + 500) / 1000,
		  timeScale);
	else
	  snprintf(sInfo, 256,
		  "%u\taudio\t%s%s(%u), %.3f secs, %u kbps, %u Hz\n",
		  trackId,
		  MP4IsIsmaCrypMediaTrack(mp4File, trackId) ? "enca - " : "",
		  typeName,
		  type,
		  msDuration / 1000.0,
		  (avgBitRate + 500) / 1000,
		  timeScale);

	return sInfo;
}
Beispiel #6
0
static char* PrintVideoInfo(
	MP4FileHandle mp4File,
	MP4TrackId trackId)
{

	static const u_int8_t mpegVideoTypes[] = {
		MP4_MPEG2_SIMPLE_VIDEO_TYPE,	// 0x60
		MP4_MPEG2_MAIN_VIDEO_TYPE,		// 0x61
		MP4_MPEG2_SNR_VIDEO_TYPE,		// 0x62
		MP4_MPEG2_SPATIAL_VIDEO_TYPE,	// 0x63
		MP4_MPEG2_HIGH_VIDEO_TYPE,		// 0x64
		MP4_MPEG2_442_VIDEO_TYPE,		// 0x65
		MP4_MPEG1_VIDEO_TYPE,			// 0x6A
		MP4_JPEG_VIDEO_TYPE,			// 0x6C
		MP4_YUV12_VIDEO_TYPE,
		MP4_H263_VIDEO_TYPE,
		MP4_H261_VIDEO_TYPE,
	};
	static const char* mpegVideoNames[] = {
		"MPEG-2 Simple",
		"MPEG-2 Main",
		"MPEG-2 SNR",
		"MPEG-2 Spatial",
		"MPEG-2 High",
		"MPEG-2 4:2:2",
		"MPEG-1",
		"JPEG",
		"YUV12",
		"H.263",
		"H.261",
	};
	u_int8_t numMpegVideoTypes =
		sizeof(mpegVideoTypes) / sizeof(u_int8_t);
	bool foundTypeName = false;
	const char* typeName = "Unknown";

	const char *media_data_name;
	char originalFormat[8];
	char  oformatbuffer[32];
	originalFormat[0] = 0;
	*oformatbuffer = 0;
	uint8_t type = 0;
	
	media_data_name = MP4GetTrackMediaDataName(mp4File, trackId);
	// encv 264b
	if (strcasecmp(media_data_name, "encv") == 0) {
	  if (MP4GetTrackMediaDataOriginalFormat(mp4File, 
						 trackId, 
						 originalFormat, 
						 sizeof(originalFormat)) == false)
	      media_data_name = NULL;
	      
	}
  
	char  typebuffer[80];
	if (media_data_name == NULL) {
	  typeName = "Unknown - no media data name";
	  foundTypeName = true;
	} else if ((strcasecmp(media_data_name, "avc1") == 0) ||
	  	(strcasecmp(originalFormat, "264b") == 0)) {
	  // avc
	  uint8_t profile, level;
	  char profileb[20], levelb[20];
	  if (MP4GetTrackH264ProfileLevel(mp4File, trackId, 
					  &profile, &level)) {
	    if (profile == 66) {
	      strcpy(profileb, "Baseline");
	    } else if (profile == 77) {
	      strcpy(profileb, "Main");
	    } else if (profile == 88) {
	      strcpy(profileb, "Extended");
	    } else if (profile == 100) {
	      strcpy(profileb, "High");
	    } else if (profile == 110) {
	      strcpy(profileb, "High 10");
	    } else if (profile == 122) {
	      strcpy(profileb, "High 4:2:2");
	    } else if (profile == 144) {
	      strcpy(profileb, "High 4:4:4");
	    } else {
	      snprintf(profileb, 20, "Unknown Profile %x", profile);
	    } 
	    switch (level) {
	    case 10: case 20: case 30: case 40: case 50:
	      snprintf(levelb, 20, "%u", level / 10);
	      break;
	    case 11: case 12: case 13:
	    case 21: case 22:
	    case 31: case 32:
	    case 41: case 42:
	    case 51:
	      snprintf(levelb, 20, "%u.%u", level / 10, level % 10);
	      break;
	    default:
	      snprintf(levelb, 20, "unknown level %x", level);
	      break;
	    }
	    if (originalFormat != NULL && originalFormat[0] != '\0') 
	      snprintf(oformatbuffer, 32, "(%s) ", originalFormat);
	    snprintf(typebuffer, sizeof(typebuffer), "H264 %s%s@%s", 
		    oformatbuffer, profileb, levelb);
	    typeName = typebuffer;
	  } else {
	    typeName = "H.264 - profile/level error";
	  }
	  foundTypeName = true;
	} else if (strcasecmp(media_data_name, "s263") == 0) {
	  // 3gp h.263
	  typeName = "H.263";
	  foundTypeName = true;
	} else if ((strcasecmp(media_data_name, "mp4v") == 0) ||
		   (strcasecmp(media_data_name, "encv") == 0)) {
	  // note encv might needs it's own field eventually.
	  type = MP4GetTrackEsdsObjectTypeId(mp4File, trackId);
	  if (type == MP4_MPEG4_VIDEO_TYPE) {
	    type = MP4GetVideoProfileLevel(mp4File, trackId);
	    typeName = Mpeg4VisualProfileName(type);
	    if (typeName == NULL) {
	      typeName = "MPEG-4 Unknown Profile";
	    } else {
	      foundTypeName = true;
	    }
	  } else {
	    for (u_int8_t i = 0; i < numMpegVideoTypes; i++) {
	      if (type == mpegVideoTypes[i]) {
		typeName = mpegVideoNames[i];
		foundTypeName = true;
		break;
	      }
	    }
	  }
	} else {
	  typeName = media_data_name;
	  foundTypeName = true; // we don't have a type value to display
	}

	MP4Duration trackDuration =
		MP4GetTrackDuration(mp4File, trackId);

	double msDuration =
		UINT64_TO_DOUBLE(MP4ConvertFromTrackDuration(mp4File, trackId,
			trackDuration, MP4_MSECS_TIME_SCALE));

	u_int32_t avgBitRate =
		MP4GetTrackBitRate(mp4File, trackId);

	// Note not all mp4 implementations set width and height correctly
	// The real answer can be buried inside the ES configuration info
	u_int16_t width = MP4GetTrackVideoWidth(mp4File, trackId);

	u_int16_t height = MP4GetTrackVideoHeight(mp4File, trackId);

	double fps = MP4GetTrackVideoFrameRate(mp4File, trackId);

	char *sInfo = (char*)MP4Malloc(256);

	// type duration avgBitrate frameSize frameRate
	if (foundTypeName) {
	  sprintf(sInfo,
		  "%u\tvideo\t%s%s, %.3f secs, %u kbps, %ux%u @ %f fps\n",
		  trackId,
		  MP4IsIsmaCrypMediaTrack(mp4File, trackId) ? "encv - " : "",
		  typeName,
		  msDuration / 1000.0,
		  (avgBitRate + 500) / 1000,
		  width,
		  height,
		  fps
		  );
	} else {
	  sprintf(sInfo,
		  "%u\tvideo\t%s(%u), %.3f secs, %u kbps, %ux%u @ %f fps\n",
		  trackId,
		  typeName,
		  type, 
		  msDuration / 1000.0,
		  (avgBitRate + 500) / 1000,
		  width,
		  height,
		  fps
		  );
	}

	return sInfo;
}
Beispiel #7
0
static void *mp4Decode(void *args)
{
  MP4FileHandle mp4file;

  pthread_mutex_lock(&mutex);
  seekPosition = -1;
  bPlaying = TRUE;
  if(!(mp4file = MP4Read(args, 0))){
    mp4cfg.file_type = FILE_AAC;
    MP4Close(mp4file);
  }else{
    mp4cfg.file_type = FILE_MP4;
  }

  if(mp4cfg.file_type == FILE_MP4){
    // We are reading a MP4 file
    gint		mp4track;

    if((mp4track = getAACTrack(mp4file)) < 0){
      //TODO: check here for others Audio format.....
      g_print("Unsupported Audio track type\n");
      g_free(args);
      MP4Close(mp4file);
      bPlaying = FALSE;
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }else{
      faacDecHandle	decoder;
      unsigned char	*buffer	= NULL;
      guint		bufferSize = 0;
      gulong		samplerate;
      guchar		channels;
      guint		avgBitrate;
      MP4Duration	duration;
      gulong		msDuration;
      MP4SampleId	numSamples;
      MP4SampleId	sampleID = 1;

      decoder = faacDecOpen();
      MP4GetTrackESConfiguration(mp4file, mp4track, &buffer, &bufferSize);
      if(!buffer){
	g_free(args);
	faacDecClose(decoder);
	MP4Close(mp4file);
	bPlaying = FALSE;
	pthread_mutex_unlock(&mutex);
	pthread_exit(NULL);
      }
      if(faacDecInit2(decoder, buffer, bufferSize, &samplerate, &channels)<0){
	g_free(args);
	faacDecClose(decoder);
	MP4Close(mp4file);
	bPlaying = FALSE;
	pthread_mutex_unlock(&mutex);
	pthread_exit(NULL);
      }
      g_free(buffer);
      if(channels == 0){
	g_print("Number of Channels not supported\n");
	g_free(args);
	faacDecClose(decoder);
	MP4Close(mp4file);
	bPlaying = FALSE;
	pthread_mutex_unlock(&mutex);
	pthread_exit(NULL);
      }
      duration = MP4GetTrackDuration(mp4file, mp4track);
      msDuration = MP4ConvertFromTrackDuration(mp4file, mp4track, duration,
					       MP4_MSECS_TIME_SCALE);
      numSamples = MP4GetTrackNumberOfSamples(mp4file, mp4track);
      mp4_ip.output->open_audio(FMT_S16_NE, samplerate, channels);
      mp4_ip.output->flush(0);
      mp4_ip.set_info(args, msDuration, -1, samplerate/1000, channels);
      g_print("MP4 - %d channels @ %d Hz\n", channels, samplerate);

      while(bPlaying){
	void*			sampleBuffer;
	faacDecFrameInfo	frameInfo;    
	gint			rc;

	if(seekPosition!=-1){
	  duration = MP4ConvertToTrackDuration(mp4file,
					       mp4track,
					       seekPosition*1000,
					       MP4_MSECS_TIME_SCALE);
	  sampleID = MP4GetSampleIdFromTime(mp4file, mp4track, duration, 0);
	  mp4_ip.output->flush(seekPosition*1000);
	  seekPosition = -1;
	}
	buffer=NULL;
	bufferSize=0;
	if(sampleID > numSamples){
	  mp4_ip.output->close_audio();
	  g_free(args);
	  faacDecClose(decoder);
	  MP4Close(mp4file);
	  bPlaying = FALSE;
	  pthread_mutex_unlock(&mutex);
	  pthread_exit(NULL);
	}
	rc = MP4ReadSample(mp4file, mp4track, sampleID++, &buffer, &bufferSize,
			   NULL, NULL, NULL, NULL);
	//g_print("%d/%d\n", sampleID-1, numSamples);
	if((rc==0) || (buffer== NULL)){
	  g_print("MP4: read error\n");
	  sampleBuffer = NULL;
	  sampleID=0;
	  mp4_ip.output->buffer_free();
	  mp4_ip.output->close_audio();
	  g_free(args);
	  faacDecClose(decoder);
	  MP4Close(mp4file);
	  bPlaying = FALSE;
	  pthread_mutex_unlock(&mutex);
	  pthread_exit(NULL);
	}else{
	  sampleBuffer = faacDecDecode(decoder, &frameInfo, buffer, bufferSize);
	  if(frameInfo.error > 0){
	    g_print("MP4: %s\n",
		    faacDecGetErrorMessage(frameInfo.error));
	    mp4_ip.output->close_audio();
	    g_free(args);
	    faacDecClose(decoder);
	    MP4Close(mp4file);
	    bPlaying = FALSE;
	    pthread_mutex_unlock(&mutex);
	    pthread_exit(NULL);
	  }
	  if(buffer){
	    g_free(buffer); buffer=NULL; bufferSize=0;
	  }
	  while(bPlaying && mp4_ip.output->buffer_free()<frameInfo.samples<<1)
	    xmms_usleep(30000);
	}
	mp4_ip.add_vis_pcm(mp4_ip.output->written_time(),
			   FMT_S16_NE,
			   channels,
			   frameInfo.samples<<1,
			   sampleBuffer);
	mp4_ip.output->write_audio(sampleBuffer, frameInfo.samples<<1);
      }
      while(bPlaying && mp4_ip.output->buffer_free()){
	xmms_usleep(10000);
      }
      mp4_ip.output->close_audio();
      g_free(args);
      faacDecClose(decoder);
      MP4Close(mp4file);
      bPlaying = FALSE;
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
  } else{
    // WE ARE READING AN AAC FILE
    FILE		*file = NULL;
    faacDecHandle	decoder = 0;
    guchar		*buffer = 0;
    gulong		bufferconsumed = 0;
    gulong		samplerate = 0;
    guchar		channels;
    gulong		buffervalid = 0;
    TitleInput*		input;
    gchar		*temp = g_strdup(args);
    gchar		*ext  = strrchr(temp, '.');
    gchar		*xmmstitle = NULL;
    faacDecConfigurationPtr config;

    if((file = fopen(args, "rb")) == 0){
      g_print("AAC: can't find file %s\n", args);
      bPlaying = FALSE;
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
    if((decoder = faacDecOpen()) == NULL){
      g_print("AAC: Open Decoder Error\n");
      fclose(file);
      bPlaying = FALSE;
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
    config = faacDecGetCurrentConfiguration(decoder);
    config->useOldADTSFormat = 0;
    faacDecSetConfiguration(decoder, config);
    if((buffer = g_malloc(BUFFER_SIZE)) == NULL){
      g_print("AAC: error g_malloc\n");
      fclose(file);
      bPlaying = FALSE;
      faacDecClose(decoder);
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
    if((buffervalid = fread(buffer, 1, BUFFER_SIZE, file))==0){
      g_print("AAC: Error reading file\n");
      g_free(buffer);
      fclose(file);
      bPlaying = FALSE;
      faacDecClose(decoder);
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
    XMMS_NEW_TITLEINPUT(input);
    input->file_name = g_basename(temp);
    input->file_ext = ext ? ext+1 : NULL;
    input->file_path = temp;
    if(!strncmp(buffer, "ID3", 3)){
      gint size = 0;

      fseek(file, 0, SEEK_SET);
      size = (buffer[6]<<21) | (buffer[7]<<14) | (buffer[8]<<7) | buffer[9];
      size+=10;
      fread(buffer, 1, size, file);
      buffervalid = fread(buffer, 1, BUFFER_SIZE, file);
    }
    xmmstitle = xmms_get_titlestring(xmms_get_gentitle_format(), input);
    if(xmmstitle == NULL)
      xmmstitle = g_strdup(input->file_name);
    if(temp) g_free(temp);
    if(input->performer) g_free(input->performer);
    if(input->album_name) g_free(input->album_name);
    if(input->track_name) g_free(input->track_name);
    if(input->genre) g_free(input->genre);
    g_free(input);
    bufferconsumed = faacDecInit(decoder,
				 buffer,
				 buffervalid,
				 &samplerate,
				 &channels);
    if(mp4_ip.output->open_audio(FMT_S16_NE,samplerate,channels) == FALSE){
      g_print("AAC: Output Error\n");
      g_free(buffer); buffer=0;
      faacDecClose(decoder);
      fclose(file);
      mp4_ip.output->close_audio();
      /*
      if(positionTable){
	g_free(positionTable); positionTable=0;
      }
      */
      g_free(xmmstitle);
      bPlaying = FALSE;
      pthread_mutex_unlock(&mutex);
      pthread_exit(NULL);
    }
    //if(bSeek){
    //mp4_ip.set_info(xmmstitle, lenght*1000, -1, samplerate, channels);
      //}else{
    mp4_ip.set_info(xmmstitle, -1, -1, samplerate, channels);
      //}
    mp4_ip.output->flush(0);

    while(bPlaying && buffervalid > 0){
      faacDecFrameInfo	finfo;
      unsigned long	samplesdecoded;
      char*		sample_buffer = NULL;
      /*
	if(bSeek && seekPosition!=-1){
	fseek(file, positionTable[seekPosition], SEEK_SET);
	bufferconsumed=0;
	buffervalid = fread(buffer, 1, BUFFER_SIZE, file);
	aac_ip.output->flush(seekPosition*1000);
	seekPosition=-1;
	}
      */
      if(bufferconsumed > 0){
	memmove(buffer, &buffer[bufferconsumed], buffervalid-bufferconsumed);
	buffervalid -= bufferconsumed;
	buffervalid += fread(&buffer[buffervalid], 1,
			     BUFFER_SIZE-buffervalid, file);
	bufferconsumed = 0;
      }
      sample_buffer = faacDecDecode(decoder, &finfo, buffer, buffervalid);
      if(finfo.error){
	config = faacDecGetCurrentConfiguration(decoder);
	if(config->useOldADTSFormat != 1){
	  faacDecClose(decoder);
	  decoder = faacDecOpen();
	  config = faacDecGetCurrentConfiguration(decoder);
	  config->useOldADTSFormat = 1;
	  faacDecSetConfiguration(decoder, config);
	  finfo.bytesconsumed=0;
	  finfo.samples = 0;
	  faacDecInit(decoder,
		      buffer,
		      buffervalid,
		      &samplerate,
		      &channels);
	}else{
	  g_print("FAAD2 Warning %s\n", faacDecGetErrorMessage(finfo.error));
	  buffervalid = 0;
	}
      }
      bufferconsumed += finfo.bytesconsumed;
      samplesdecoded = finfo.samples;
      if((samplesdecoded<=0) && !sample_buffer){
	g_print("AAC: error sample decoding\n");
	continue;
      }
      while(bPlaying && mp4_ip.output->buffer_free() < (samplesdecoded<<1)){
	xmms_usleep(10000);
      }
      mp4_ip.add_vis_pcm(mp4_ip.output->written_time(),
			 FMT_S16_LE, channels,
			 samplesdecoded<<1, sample_buffer);
      mp4_ip.output->write_audio(sample_buffer, samplesdecoded<<1);
    }
    while(bPlaying && mp4_ip.output->buffer_playing()){
      xmms_usleep(10000);
    }
    mp4_ip.output->buffer_free();
    mp4_ip.output->close_audio();
    bPlaying = FALSE;
    g_free(buffer);
    faacDecClose(decoder);
    g_free(xmmstitle);
    fclose(file);
    seekPosition = -1;
    /*
    if(positionTable){
      g_free(positionTable); positionTable=0;
    }
    */
    bPlaying = FALSE;
    pthread_mutex_unlock(&mutex);
    pthread_exit(NULL);
    
  }
}
void main(int argc, char** argv)
{


	if (argc < 2) {
		fprintf(stderr, "Usage: %s <file>\n", argv[0]);
		exit(1);
	}

	//u_int32_t verbosity = MP4_DETAILS_ALL;
	char* fileName = argv[1];

	// open the mp4 file, and read meta-info
	MP4FileHandle mp4File = MP4Read(fileName  );

	uint8_t profileLevel = MP4GetVideoProfileLevel(mp4File);

	// get a handle on the first video track
	MP4TrackId trackId = MP4FindTrackId(mp4File, 0, "video");

	// gather the crucial track information 

	uint32_t timeScale = MP4GetTrackTimeScale(mp4File, trackId);

	// note all times and durations 
	// are in units of the track time scale

	MP4Duration trackDuration = MP4GetTrackDuration(mp4File, trackId);

	MP4SampleId numSamples = MP4GetTrackNumberOfSamples(mp4File, trackId);

	uint32_t maxSampleSize = MP4GetTrackMaxSampleSize(mp4File, trackId);

	uint8_t* pConfig;
	uint32_t configSize = 0;

	MP4GetTrackESConfiguration(mp4File, trackId, &pConfig, &configSize);

	// initialize decoder with Elementary Stream (ES) configuration

	// done with our copy of ES configuration
	free(pConfig);


	// now consecutively read and display the track samples

	uint8_t* pSample = (uint8_t*)malloc(maxSampleSize);
	uint32_t sampleSize;

	MP4Timestamp sampleTime;
	MP4Duration sampleDuration;
	MP4Duration sampleRenderingOffset;
	bool isSyncSample;

	for (MP4SampleId sampleId = 1; sampleId <= numSamples; sampleId++) {

		// give ReadSample our own buffer, and let it know how big it is
		sampleSize = maxSampleSize;

		// read next sample from video track
		MP4ReadSample(mp4File, trackId, sampleId, 
			&pSample, &sampleSize,
			&sampleTime, &sampleDuration, &sampleRenderingOffset, 
			&isSyncSample);

		// convert timestamp and duration from track time to milliseconds
		uint64_t myTime = MP4ConvertFromTrackTimestamp(mp4File, trackId, 
			sampleTime, MP4_MSECS_TIME_SCALE);

		uint64_t myDuration = MP4ConvertFromTrackDuration(mp4File, trackId,
			sampleDuration, MP4_MSECS_TIME_SCALE);

		// decode frame and display it
	}

	// close mp4 file
	MP4Close(mp4File);


	// Note to seek to time 'when' in the track
	// use MP4GetSampleIdFromTime(MP4FileHandle hFile, 
	//		MP4Timestamp when, bool wantSyncSample)
	// 'wantSyncSample' determines if a sync sample is desired or not
	// e.g.
	// MP4Timestamp when = 
	//	MP4ConvertToTrackTimestamp(mp4File, trackId, 30, MP4_SECS_TIME_SCALE);
	// MP4SampleId newSampleId = MP4GetSampleIdFromTime(mp4File, when, true);
	// MP4ReadSample(mp4File, trackId, newSampleId, ...);
	// 
	// Note that start time for sample may be later than 'when'

	exit(0);
}