Ejemplo n.º 1
0
bool GetValues_mp3infp(FILE_INFO *pFileMP3)
{
	// mp3infp.dllを利用して演奏時間などの情報を取得
	if (!Load_mp3infp()) {
		return false;
	}
	if(lpmp3infp_Load(NULL,(char*)(const char *)GetFullPath(pFileMP3)) != ERROR_SUCCESS)
	{
		//printf("ロード失敗\n");
		return false;
	}
	char* buf = NULL;
	// 演奏時間
	{
		lpmp3infp_GetValue("TIME",&buf);
		UINT nDummy, nTime = 0;
		if (buf[0] != '\0') {
			if (strchr(buf, ':') == strrchr(buf, ':')) { /* Hurricane 217 */
				sscanf(buf, "%u:%u (%usec)", &nDummy, &nDummy, &nTime);
			} else {
				sscanf(buf, "%u:%u:%u (%usec)", &nDummy, &nDummy, &nDummy, &nTime);  /* Hurricane 217 */
			}
			SetPlayTime(pFileMP3, nTime);
		}
	}
	// 音声フォーマット
	{
		lpmp3infp_GetValue("AFMT",&buf);
		SetAudioFormat(pFileMP3, buf);
	}
    return true;
}
Ejemplo n.º 2
0
bool OmxDecoder::ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs)
{
  MOZ_ASSERT(aSeekTimeUs >= -1);

  status_t err;
  if (mAudioMetadataRead && aSeekTimeUs == -1) {
    // Use the data read into the buffer during metadata time
    err = OK;
  }
  else {
    ReleaseAudioBuffer();
    if (aSeekTimeUs != -1) {
      ReadOptions options;
      options.setSeekTo(aSeekTimeUs);
      err = mAudioSource->read(&mAudioBuffer, &options);
    } else {
      err = mAudioSource->read(&mAudioBuffer);
    }
  }
  mAudioMetadataRead = false;

  aSeekTimeUs = -1;

  if (err == OK && mAudioBuffer->range_length() != 0) {
    int64_t timeUs;
    if (!mAudioBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
      LOG("no frame time");
      return false;
    }

    if (timeUs < 0) {
      LOG("frame time %lld must be nonnegative", timeUs);
      return false;
    }

    return ToAudioFrame(aFrame, timeUs,
                        mAudioBuffer->data(),
                        mAudioBuffer->range_offset(),
                        mAudioBuffer->range_length(),
                        mAudioChannels, mAudioSampleRate);
  }
  else if (err == INFO_FORMAT_CHANGED) {
    // If the format changed, update our cached info.
    LOG("mAudioSource INFO_FORMAT_CHANGED");
    if (!SetAudioFormat())
      return false;
    else
      return ReadAudio(aFrame, aSeekTimeUs);
  }
  else if (err == ERROR_END_OF_STREAM) {
    LOG("mAudioSource END_OF_STREAM");
  }
  else if (err != OK) {
    LOG("mAudioSource ERROR %#x", err);
  }

  return err == OK;
}
Ejemplo n.º 3
0
/// 打开
BOOL CAudioPlayer::Open(void)
{
	BOOL bResult = FALSE;
	do
	{
		// 创建GraphBuilder
		if(!CreateGraphBuilder())
			break;

		HRESULT hr = NOERROR;
		if(FAILED(m_pGraphBulider->QueryInterface(IID_IBasicAudio, (void **)&m_pBasicAudio)))
			break;

		// 创建AudioCaptreu
		m_pAudioCapture = new CAudioCapture(NULL, &hr);
		if(NULL == m_pAudioCapture)
			break;
		m_pAudioCapture->AddRef();
		if (FAILED(m_pGraphBulider->AddFilter(m_pAudioCapture, L"Audio Capture")))
			break;

		// 设置音频信息
		if(!SetAudioFormat(m_enFrequency, m_enChannel, m_enSample))
			break;

		// 创建AudioReander
		hr = CoCreateInstance(CLSID_DSoundRender, NULL, CLSCTX_INPROC_SERVER, 
			IID_IBaseFilter, (void **)&m_pAudioRender);
		if (FAILED(hr))
			break;
		if (FAILED(m_pGraphBulider->AddFilter(m_pAudioRender, L"Audio Renderer")))
			break;

		// 连接Filter
		IPin* pOutPin = GetOutputPin(m_pAudioCapture, (uint16_t)0);
		IPin* pInPin  = GetInputPin(m_pAudioRender, (uint16_t)0);

		hr = m_pGraphBulider->Connect(pOutPin, pInPin);
		SAFE_RELEASE(pOutPin);
		SAFE_RELEASE(pInPin);

		if(FAILED(hr))
		{
			WCHAR szError[256] = {0};
			AMGetErrorText(hr, szError, 256);
			break;
		}

		bResult = TRUE;
	}while(FALSE);

	if(!bResult)
		Close();

	return bResult;
}
Ejemplo n.º 4
0
/// 打开音频采集设备
BOOL CAudioCapture::Open(ICaptureEvent* pCaptureEvent, 
	const TCHAR* szDeviceName)
{
	ASSERT(pCaptureEvent);

	// 参数检查
	if(NULL == pCaptureEvent)
		return FALSE;

	BOOL bResult = FALSE;
	do
	{
		// 创建GraphBuilder
		if(!CreateGraphBuilder())
			break;

		// 创建CaptureGraphBuilder
		HRESULT hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, 
			CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void**)&m_pCGBuilder);
		if(FAILED(hr))
		{
			break;
		}

		hr = m_pCGBuilder->SetFiltergraph(m_pGraphBulider);
		if(FAILED(hr))
		{
			break;
		}

		// 创建视频采集设备
		m_pCaptureFilter = CreateCaptureFiler(szDeviceName);
		if(NULL == m_pCaptureFilter)
			break;

		if (FAILED(m_pGraphBulider->AddFilter(m_pCaptureFilter, L"Audio Capture")))
			break;

		// 设置视频信息
		if(!SetAudioFormat(m_enFrequency, m_enChannel, m_enSample))
			break;

		// 创建Read Filter
		m_pAudioReander = new CAudioRenderer(NULL, &hr);
		if(NULL == m_pAudioReander)
			break;
		m_pAudioReander->AddRef();
		m_pAudioReander->SetCaptureEvent(pCaptureEvent);

		if (FAILED(m_pGraphBulider->AddFilter(m_pAudioReander, L"Audio Render")))
			break;

		// 连接Filter
		IPin* pOutPin = GetOutputPin(m_pCaptureFilter, (uint16_t)0);
		IPin* pInPin  = GetInputPin(m_pAudioReander, (uint16_t)0);

		hr = m_pGraphBulider->Connect(pOutPin, pInPin);

		//AM_MEDIA_TYPE mt;
		//pOutPin->ConnectionMediaType(&mt);
		//WAVEFORMATEX* pWF = (WAVEFORMATEX *) mt.pbFormat;

		SAFE_RELEASE(pOutPin);
		SAFE_RELEASE(pInPin);

		if(FAILED(hr))
			break;

		bResult = TRUE;
	}while(FALSE);

	if(!bResult)
		Close();

	return bResult;
}
Ejemplo n.º 5
0
/*
函数: BulidPrivewGraph()
功能: 创建函数
*/
int CCaptureAudio::BulidCaptureGraph()
{
	HRESULT hr=NOERROR;
	if(m_iDeviceId <0)
	{
		return -1;
	}
	// 解决声卡不存在时出现调用程序异常的问题 [10/23/2014-16:28:00 Dingshuai]
	if(m_pBaseFilter==NULL)
	{
		return -1;
	}

	GUID pCategorySuc = PIN_CATEGORY_PREVIEW;
	GUID pCategoryFail = PIN_CATEGORY_CAPTURE;
	if(m_nPinType==0)
	{
		pCategorySuc = PIN_CATEGORY_PREVIEW;
		pCategoryFail = PIN_CATEGORY_CAPTURE;

	}
	else
	{
		pCategorySuc = PIN_CATEGORY_CAPTURE;
		pCategoryFail = PIN_CATEGORY_PREVIEW;	
	}
	hr = SetAudioFormat(m_nChannels,m_nBytesPerSample,m_nSampleRate,m_nAudioBufferType,m_nPinType);
	
	if(FAILED(hr))
	{
		//ERR_DEBUG("SetAudioFormat Failed");
		return -1;
		
	}
	hr = CreateCaptureSampleGrabber();
	if(FAILED(hr))
	{
		SAFE_RELEASE(m_pSampleGrabberFilter);
		SAFE_RELEASE(m_pSampleGrabber);
		//ERR_DEBUG("CreateCaptureSampleGrabber Failed");
		return -1;
	}

	hr = m_pCaptureGraphBulid->RenderStream(&pCategorySuc,&MEDIATYPE_Audio,m_pBaseFilter,NULL,m_pSampleGrabberFilter);
	if(FAILED(hr))
	{	
			//ERR_DEBUG("PrivewVideoDev RenderStream Failed ");
			return -1;
	}
	if(m_bThread)//标识是否创建线程
	{
		m_pSampleGrabber->SetBufferSamples(TRUE);
		m_pSampleGrabber->SetOneShot(FALSE);
	}
	else
	{
		m_pSampleGrabber->SetBufferSamples(TRUE);
		m_pSampleGrabber->SetOneShot(FALSE);
		m_cSampleGrabberCB.SetDataInfo(-1, m_nDataType);
		int nMode=1;//0--SampleCB,1--BufferCB
		m_pSampleGrabber->SetCallback(&m_cSampleGrabberCB, nMode);
	}

	hr = StartPreview();
	if(FAILED(hr))
	{
		return -1;
	}
	return 1;
}
Ejemplo n.º 6
0
bool OmxDecoder::Init()
{
#if defined(MOZ_WIDGET_ANDROID)
  // OMXClient::connect() always returns OK and aborts fatally if
  // it can't connect. We may need to implement the connect functionality
  // ourselves if this proves to be an issue.
  if (!sClientInstance.IsValid()) {
    LOG("OMXClient failed to connect");
    return false;
  }
#endif

  //register sniffers, if they are not registered in this process.
  DataSource::RegisterDefaultSniffers();

  sp<DataSource> dataSource =
    DataSource::CreateFromURI(static_cast<char*>(mDecoder->mResource));
  if (!dataSource.get() || dataSource->initCheck()) {
    return false;
  }

  sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
  if (extractor == nullptr) {
    return false;
  }

  ssize_t audioTrackIndex = -1;
  ssize_t videoTrackIndex = -1;
  const char *audioMime = nullptr;
  const char *videoMime = nullptr;

  for (size_t i = 0; i < extractor->countTracks(); ++i) {
    sp<MetaData> meta = extractor->getTrackMetaData(i);

    const char *mime;
    if (!meta->findCString(kKeyMIMEType, &mime)) {
      continue;
    }

    if (videoTrackIndex == -1 && !strncasecmp(mime, "video/", 6)) {
      videoTrackIndex = i;
      videoMime = mime;
    } else if (audioTrackIndex == -1 && !strncasecmp(mime, "audio/", 6)) {
      audioTrackIndex = i;
      audioMime = mime;
    }
  }

  if (videoTrackIndex == -1 && audioTrackIndex == -1) {
    return false;
  }

  int64_t totalDurationUs = 0;

#ifdef MOZ_WIDGET_GONK
  sp<IOMX> omx = GetOMX();
#else
  sp<IOMX> omx = sClientInstance.get()->interface();
#endif

  sp<MediaSource> videoTrack;
  sp<MediaSource> videoSource;
  if (videoTrackIndex != -1 && (videoTrack = extractor->getTrack(videoTrackIndex)) != nullptr) {
    videoSource = CreateVideoSource(mPluginHost, omx, videoTrack);
    if (videoSource == nullptr) {
      LOG("OMXCodec failed to initialize video decoder for \"%s\"", videoMime);
      return false;
    }
    status_t status = videoSource->start();
    if (status != OK) {
      LOG("videoSource->start() failed with status %#x", status);
      return false;
    }
    int64_t durationUs;
    if (videoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("video duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  sp<MediaSource> audioTrack;
  sp<MediaSource> audioSource;
  if (audioTrackIndex != -1 && (audioTrack = extractor->getTrack(audioTrackIndex)) != nullptr)
  {
    if (!strcasecmp(audioMime, "audio/raw")) {
      audioSource = audioTrack;
    } else {
      audioSource = OMXCodec::Create(omx,
                                     audioTrack->getFormat(),
                                     false, // decoder
                                     audioTrack);
    }

    if (audioSource == nullptr) {
      LOG("OMXCodec failed to initialize audio decoder for \"%s\"", audioMime);
      return false;
    }

    status_t status = audioSource->start();
    if (status != OK) {
      LOG("audioSource->start() failed with status %#x", status);
      return false;
    }

    int64_t durationUs;
    if (audioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
      if (durationUs < 0)
        LOG("audio duration %lld should be nonnegative", durationUs);
      if (durationUs > totalDurationUs)
        totalDurationUs = durationUs;
    }
  }

  // set decoder state
  mVideoTrack = videoTrack;
  mVideoSource = videoSource;
  mAudioTrack = audioTrack;
  mAudioSource = audioSource;
  mDurationUs = totalDurationUs;

  if (mVideoSource.get() && !SetVideoFormat())
    return false;

  // To reliably get the channel and sample rate data we need to read from the
  // audio source until we get a INFO_FORMAT_CHANGE status
  if (mAudioSource.get()) {
    if (mAudioSource->read(&mAudioBuffer) != INFO_FORMAT_CHANGED) {
      sp<MetaData> meta = mAudioSource->getFormat();
      if (!meta->findInt32(kKeyChannelCount, &mAudioChannels) ||
          !meta->findInt32(kKeySampleRate, &mAudioSampleRate)) {
        return false;
      }
      mAudioMetadataRead = true;

      if (mAudioChannels < 0) {
        LOG("audio channel count %d must be nonnegative", mAudioChannels);
        return false;
      }

      if (mAudioSampleRate < 0) {
        LOG("audio sample rate %d must be nonnegative", mAudioSampleRate);
        return false;
      }
    }
    else if (!SetAudioFormat()) {
        return false;
    }
  }
  return true;
}