nsresult AudioStream::Init(uint32_t aNumChannels, uint32_t aRate, const dom::AudioChannel aAudioChannel) { auto startTime = TimeStamp::Now(); auto isFirst = CubebUtils::GetFirstStream(); LOG("%s channels: %d, rate: %d", __FUNCTION__, aNumChannels, aRate); mChannels = aNumChannels; mOutChannels = aNumChannels; mDumpFile = OpenDumpFile(aNumChannels, aRate); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel); #else params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif params.format = ToCubebFormat<AUDIO_OUTPUT_FORMAT>::value; mAudioClock.Init(aRate); return OpenCubeb(params, startTime, isFirst); }
nsresult AudioStream::Init(uint32_t aNumChannels, uint32_t aChannelMap, uint32_t aRate, const dom::AudioChannel aAudioChannel) { auto startTime = TimeStamp::Now(); LOG("%s channels: %d, rate: %d", __FUNCTION__, aNumChannels, aRate); mChannels = aNumChannels; mOutChannels = aNumChannels; mDumpFile = OpenDumpFile(aNumChannels, aRate); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; params.layout = CubebUtils::ConvertChannelMapToCubebLayout(aChannelMap); #if defined(__ANDROID__) params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif params.format = ToCubebFormat<AUDIO_OUTPUT_FORMAT>::value; mAudioClock.Init(aRate); cubeb* cubebContext = CubebUtils::GetCubebContext(); if (!cubebContext) { NS_WARNING("Can't get cubeb context!"); CubebUtils::ReportCubebStreamInitFailure(true); return NS_ERROR_DOM_MEDIA_CUBEB_INITIALIZATION_ERR; } return OpenCubeb(cubebContext, params, startTime, CubebUtils::GetFirstStream()); }
nsresult BufferedAudioStream::Init(int32_t aNumChannels, int32_t aRate, const dom::AudioChannelType aAudioChannelType) { cubeb* cubebContext = GetCubebContext(); if (!cubebContext || aNumChannels < 0 || aRate < 0) { return NS_ERROR_FAILURE; } mInRate = mOutRate = aRate; mChannels = aNumChannels; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = aNumChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) params.stream_type = ConvertChannelToCubebType(aAudioChannelType); #else params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mBytesPerFrame = sizeof(AudioDataValue) * aNumChannels; mAudioClock.Init(); { cubeb_stream* stream; if (cubeb_stream_init(cubebContext, &stream, "BufferedAudioStream", params, GetCubebLatency(), DataCallback_S, StateCallback_S, this) == CUBEB_OK) { mCubebStream.own(stream); } } if (!mCubebStream) { return NS_ERROR_FAILURE; } // Size mBuffer for one second of audio. This value is arbitrary, and was // selected based on the observed behaviour of the existing AudioStream // implementations. uint32_t bufferLimit = FramesToBytes(aRate); NS_ABORT_IF_FALSE(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames"); mBuffer.SetCapacity(bufferLimit); return NS_OK; }
nsresult AudioStream::Init(int32_t aNumChannels, int32_t aRate, const dom::AudioChannel aAudioChannel) { mStartTime = TimeStamp::Now(); mIsFirst = CubebUtils::GetFirstStream(); if (!CubebUtils::GetCubebContext() || aNumChannels < 0 || aRate < 0) { return NS_ERROR_FAILURE; } MOZ_LOG(gAudioStreamLog, LogLevel::Debug, ("%s channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) mAudioChannel = aAudioChannel; params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel); #else mAudioChannel = dom::AudioChannel::Content; params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mBytesPerFrame = sizeof(AudioDataValue) * mOutChannels; mAudioClock.Init(); // Size mBuffer for one second of audio. This value is arbitrary, and was // selected based on the observed behaviour of the existing AudioStream // implementations. uint32_t bufferLimit = FramesToBytes(aRate); MOZ_ASSERT(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames"); mBuffer.SetCapacity(bufferLimit); return OpenCubeb(params); }
nsresult AudioStream::Init(uint32_t aNumChannels, uint32_t aRate, const dom::AudioChannel aAudioChannel) { mStartTime = TimeStamp::Now(); mIsFirst = CubebUtils::GetFirstStream(); if (!CubebUtils::GetCubebContext()) { return NS_ERROR_FAILURE; } MOZ_LOG(gAudioStreamLog, LogLevel::Debug, ("%s channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) mAudioChannel = aAudioChannel; params.stream_type = CubebUtils::ConvertChannelToCubebType(aAudioChannel); #else mAudioChannel = dom::AudioChannel::Content; params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mAudioClock.Init(); return OpenCubeb(params); }
nsresult AudioStream::Init(uint32_t aNumChannels, AudioConfig::ChannelLayout::ChannelMap aChannelMap, uint32_t aRate, AudioDeviceInfo* aSinkInfo) { auto startTime = TimeStamp::Now(); LOG("%s channels: %d, rate: %d", __FUNCTION__, aNumChannels, aRate); mChannels = aNumChannels; mOutChannels = aNumChannels; mDumpFile = OpenDumpFile(aNumChannels, aRate); mSinkInfo = aSinkInfo; cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; params.layout = static_cast<uint32_t>(aChannelMap); params.format = ToCubebFormat<AUDIO_OUTPUT_FORMAT>::value; params.prefs = CubebUtils::GetDefaultStreamPrefs(); mAudioClock.Init(aRate); cubeb* cubebContext = CubebUtils::GetCubebContext(); if (!cubebContext) { LOGE("Can't get cubeb context!"); CubebUtils::ReportCubebStreamInitFailure(true); return NS_ERROR_DOM_MEDIA_CUBEB_INITIALIZATION_ERR; } // cubeb's winmm backend prefills buffers on init rather than stream start. // See https://github.com/kinetiknz/cubeb/issues/150 mPrefillQuirk = !strcmp(cubeb_get_backend_id(cubebContext), "winmm"); return OpenCubeb(cubebContext, params, startTime, CubebUtils::GetFirstStream()); }
// NOTE: this must not block a LowLatency stream for any significant amount // of time, or it will block the entirety of MSG nsresult AudioStream::Init(int32_t aNumChannels, int32_t aRate, const dom::AudioChannel aAudioChannel, LatencyRequest aLatencyRequest) { mStartTime = TimeStamp::Now(); mIsFirst = GetFirstStream(); if (!GetCubebContext() || aNumChannels < 0 || aRate < 0) { return NS_ERROR_FAILURE; } PR_LOG(gAudioStreamLog, PR_LOG_DEBUG, ("%s channels: %d, rate: %d for %p", __FUNCTION__, aNumChannels, aRate, this)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mLatencyRequest = aLatencyRequest; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) params.stream_type = ConvertChannelToCubebType(aAudioChannel); #else params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mBytesPerFrame = sizeof(AudioDataValue) * mOutChannels; mAudioClock.Init(); // Size mBuffer for one second of audio. This value is arbitrary, and was // selected based on the observed behaviour of the existing AudioStream // implementations. uint32_t bufferLimit = FramesToBytes(aRate); NS_ABORT_IF_FALSE(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames"); mBuffer.SetCapacity(bufferLimit); if (aLatencyRequest == LowLatency) { // Don't block this thread to initialize a cubeb stream. // When this is done, it will start callbacks from Cubeb. Those will // cause us to move from INITIALIZED to RUNNING. Until then, we // can't access any cubeb functions. // Use a RefPtr to avoid leaks if Dispatch fails RefPtr<AudioInitTask> init = new AudioInitTask(this, aLatencyRequest, params); init->Dispatch(); return NS_OK; } // High latency - open synchronously nsresult rv = OpenCubeb(params, aLatencyRequest); // See if we need to start() the stream, since we must do that from this // thread for now (cubeb API issue) { MonitorAutoLock mon(mMonitor); CheckForStart(); } return rv; }
int Process(char *pszBioSeqFile, char *pszDumpFile, etFMode FMode) { CBioSeqFile *pBioSeqFile = NULL; CMAlignFile *pAlignFile = NULL; char *pszSpeciesName; UINT8 *pSeq; int Len; char *pszBuff; int Rslt; char szSource[cBSFSourceSize]; char szDescription[cBSFDescriptionSize]; int SeqLen; tBSFEntryID CurEntryID; pszBuff = NULL; m_pSeq = NULL; m_AllocSeqLen = 0; pBioSeqFile = new CBioSeqFile; // try to open as a bioseq file if((Rslt=pBioSeqFile->Open(pszBioSeqFile,cBSFTypeAny,false))!=eBSFSuccess) { while(pBioSeqFile->NumErrMsgs()) gDiagnostics.DiagOut(eDLFatal,gszProcName,pBioSeqFile->GetErrMsg()); gDiagnostics.DiagOut(eDLFatal,gszProcName,"Unable to open %s",pszBioSeqFile); delete pBioSeqFile; return(Rslt); } m_FMode = FMode; if(!OpenDumpFile(pszDumpFile)) return(eBSFerrOpnFile); pszBuff = new char [cMaxReadLen+ 1]; if(m_FMode == eFMbed) { Len = sprintf(pszBuff,"track type=bed name=\"Contigs\" description=\"Contigs dump\"\n"); CUtility::SafeWrite(hDumpFile,pszBuff,Len); } CurEntryID = 0; pSeq = NULL; if(pBioSeqFile->GetType() == cBSFTypeMultiAlign) { pBioSeqFile->Close(); delete pBioSeqFile; pBioSeqFile = NULL; pAlignFile = new CMAlignFile; if((Rslt = pAlignFile->Open(pszBioSeqFile))!=eBSFSuccess) { while(pAlignFile->NumErrMsgs()) gDiagnostics.DiagOut(eDLFatal,gszProcName,pAlignFile->GetErrMsg()); gDiagnostics.DiagOut(eDLFatal,gszProcName,"Unable to open %s",pszBioSeqFile); delete pAlignFile; delete pszBuff; return(Rslt); } int NumSpecies = pAlignFile->GetNumSpecies(); for(CurEntryID = 1; CurEntryID <= NumSpecies; CurEntryID++) { pszSpeciesName = pAlignFile->GetSpeciesName(CurEntryID); WriteDumpFile(CurEntryID,pszSpeciesName,NULL,0,NULL); } Rslt = CurEntryID == NumSpecies ? eBSFSuccess : eBSFerrInternal; } else { while((CurEntryID = pBioSeqFile->Next(CurEntryID)) > 0 ) { pBioSeqFile->GetNameDescription(CurEntryID,cBSFSourceSize-1,(char *)&szSource, cBSFDescriptionSize-1,(char *)&szDescription); SeqLen = pBioSeqFile->GetDataLen(CurEntryID); if(m_FMode == eFMdefault) { if(m_pSeq == NULL || m_AllocSeqLen == 0 || SeqLen >= (int)m_AllocSeqLen) { if(m_pSeq != NULL) { delete m_pSeq; m_pSeq = NULL; m_AllocSeqLen = 0; } m_AllocSeqLen = SeqLen + 100000; if((m_pSeq = new UINT8 [m_AllocSeqLen])==NULL) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"Unable to allocate %d memory",m_AllocSeqLen); delete pszBuff; return(eBSFerrMem); } } pBioSeqFile->GetData(CurEntryID,eAsciiType,0,m_pSeq,SeqLen); } WriteDumpFile(CurEntryID,szSource,szDescription,SeqLen,m_pSeq); } Rslt = CurEntryID == eBSFerrEntry ? eBSFSuccess : (int)CurEntryID; } CloseDumpFile(); if(pBioSeqFile != NULL) delete pBioSeqFile; if(pAlignFile != NULL) delete pAlignFile; if(m_pSeq != NULL) delete m_pSeq; if(pszBuff != NULL) delete pszBuff; return(Rslt); }
nsresult AudioStream::Init(int32_t aNumChannels, int32_t aRate, const dom::AudioChannelType aAudioChannelType, LatencyRequest aLatencyRequest) { cubeb* cubebContext = GetCubebContext(); if (!cubebContext || aNumChannels < 0 || aRate < 0) { return NS_ERROR_FAILURE; } PR_LOG(gAudioStreamLog, PR_LOG_DEBUG, ("%s channels: %d, rate: %d", __FUNCTION__, aNumChannels, aRate)); mInRate = mOutRate = aRate; mChannels = aNumChannels; mOutChannels = (aNumChannels > 2) ? 2 : aNumChannels; mLatencyRequest = aLatencyRequest; mDumpFile = OpenDumpFile(this); cubeb_stream_params params; params.rate = aRate; params.channels = mOutChannels; #if defined(__ANDROID__) #if defined(MOZ_B2G) params.stream_type = ConvertChannelToCubebType(aAudioChannelType); #else params.stream_type = CUBEB_STREAM_TYPE_MUSIC; #endif if (params.stream_type == CUBEB_STREAM_TYPE_MAX) { return NS_ERROR_INVALID_ARG; } #endif if (AUDIO_OUTPUT_FORMAT == AUDIO_FORMAT_S16) { params.format = CUBEB_SAMPLE_S16NE; } else { params.format = CUBEB_SAMPLE_FLOAT32NE; } mBytesPerFrame = sizeof(AudioDataValue) * mOutChannels; mAudioClock.Init(); // If the latency pref is set, use it. Otherwise, if this stream is intended // for low latency playback, try to get the lowest latency possible. // Otherwise, for normal streams, use 100ms. uint32_t latency; if (aLatencyRequest == LowLatency && !CubebLatencyPrefSet()) { if (cubeb_get_min_latency(cubebContext, params, &latency) != CUBEB_OK) { latency = GetCubebLatency(); } } else { latency = GetCubebLatency(); } { cubeb_stream* stream; if (cubeb_stream_init(cubebContext, &stream, "AudioStream", params, latency, DataCallback_S, StateCallback_S, this) == CUBEB_OK) { mCubebStream.own(stream); } } if (!mCubebStream) { return NS_ERROR_FAILURE; } // Size mBuffer for one second of audio. This value is arbitrary, and was // selected based on the observed behaviour of the existing AudioStream // implementations. uint32_t bufferLimit = FramesToBytes(aRate); NS_ABORT_IF_FALSE(bufferLimit % mBytesPerFrame == 0, "Must buffer complete frames"); mBuffer.SetCapacity(bufferLimit); // Start the stream right away when low latency has been requested. This means // that the DataCallback will feed silence to cubeb, until the first frames // are writtent to this AudioStream. if (mLatencyRequest == LowLatency) { Start(); } return NS_OK; }