Exemple #1
0
void CAESinkPULSE::Deinitialize()
{
  CSingleLock lock(m_sec);
  m_IsAllocated = false;
  m_passthrough = false;
  m_periodSize = 0;

  if (m_Stream)
    Drain();

  if (m_MainLoop)
    pa_threaded_mainloop_stop(m_MainLoop);

  if (m_Stream)
  {
    pa_stream_disconnect(m_Stream);
    pa_stream_unref(m_Stream);
    m_Stream = NULL;
    m_IsStreamPaused = false;
  }

  if (m_Context)
  {
    pa_context_disconnect(m_Context);
    pa_context_unref(m_Context);
    m_Context = NULL;
  }

  if (m_MainLoop)
  {
    pa_threaded_mainloop_free(m_MainLoop);
    m_MainLoop = NULL;
  }
}
Exemple #2
0
CDVDVideoCodec::VCReturn CDVDVideoCodecDRMPRIME::GetPicture(VideoPicture* pVideoPicture)
{
  if (m_codecControlFlags & DVD_CODEC_CTRL_DRAIN)
    Drain();

  int ret = avcodec_receive_frame(m_pCodecContext, m_pFrame);
  if (ret == AVERROR(EAGAIN))
    return VC_BUFFER;
  else if (ret == AVERROR_EOF)
    return VC_EOF;
  else if (ret)
  {
    CLog::Log(LOGERROR, "CDVDVideoCodecDRMPRIME::%s - receive frame failed, ret:%d", __FUNCTION__, ret);
    return VC_ERROR;
  }

  if (pVideoPicture->videoBuffer)
    pVideoPicture->videoBuffer->Release();
  pVideoPicture->videoBuffer = nullptr;

  SetPictureParams(pVideoPicture);

  CVideoBufferDRMPRIME* buffer = dynamic_cast<CVideoBufferDRMPRIME*>(m_videoBufferPool->Get());
  buffer->SetRef(m_pFrame);
  pVideoPicture->videoBuffer = buffer;

  return VC_PICTURE;
}
Exemple #3
0
void
AudioSink::AudioLoop()
{
  AssertOnAudioThread();
  SINK_LOG("AudioLoop started");

  if (NS_FAILED(InitializeAudioStream())) {
    NS_WARNING("Initializing AudioStream failed.");
    mStateMachine->DispatchOnAudioSinkError();
    return;
  }

  while (1) {
    {
      ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
      WaitForAudioToPlay();
      if (!IsPlaybackContinuing()) {
        break;
      }
    }
    // See if there's a gap in the audio. If there is, push silence into the
    // audio hardware, so we can play across the gap.
    // Calculate the timestamp of the next chunk of audio in numbers of
    // samples.
    NS_ASSERTION(AudioQueue().GetSize() > 0, "Should have data to play");
    CheckedInt64 sampleTime = UsecsToFrames(AudioQueue().PeekFront()->mTime, mInfo.mRate);

    // Calculate the number of frames that have been pushed onto the audio hardware.
    CheckedInt64 playedFrames = UsecsToFrames(mStartTime, mInfo.mRate) + mWritten;

    CheckedInt64 missingFrames = sampleTime - playedFrames;
    if (!missingFrames.isValid() || !sampleTime.isValid()) {
      NS_WARNING("Int overflow adding in AudioLoop");
      break;
    }

    if (missingFrames.value() > AUDIO_FUZZ_FRAMES) {
      // The next audio chunk begins some time after the end of the last chunk
      // we pushed to the audio hardware. We must push silence into the audio
      // hardware so that the next audio chunk begins playback at the correct
      // time.
      missingFrames = std::min<int64_t>(UINT32_MAX, missingFrames.value());
      mWritten += PlaySilence(static_cast<uint32_t>(missingFrames.value()));
    } else {
      mWritten += PlayFromAudioQueue();
    }
    int64_t endTime = GetEndTime();
    if (endTime != -1) {
      mOnAudioEndTimeUpdateTask->Dispatch(endTime);
    }
  }
  ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
  MOZ_ASSERT(mStopAudioThread || AudioQueue().AtEndOfStream());
  if (!mStopAudioThread && mPlaying) {
    Drain();
  }
  SINK_LOG("AudioLoop complete");
  Cleanup();
  SINK_LOG("AudioLoop exit");
}
mfxStatus QSV_Encoder_Internal::ClearData()
{
	mfxStatus sts = MFX_ERR_NONE;
	sts = Drain();

	sts = m_pmfxENC->Close();

	if (m_bUseD3D11 || m_bD3D9HACK)
		m_mfxAllocator.Free(m_mfxAllocator.pthis, &m_mfxResponse);

	for (int i = 0; i < m_nSurfNum; i++) {
		if (!m_bUseD3D11 && !m_bD3D9HACK)
			delete m_pmfxSurfaces[i]->Data.Y;

		delete m_pmfxSurfaces[i];
	}
	MSDK_SAFE_DELETE_ARRAY(m_pmfxSurfaces);

	for (int i = 0; i < m_nTaskPool; i++)
		delete m_pTaskPool[i].mfxBS.Data;
	MSDK_SAFE_DELETE_ARRAY(m_pTaskPool);

	delete m_outBitstream.Data;

	delete m_pmfxENC;
	m_pmfxENC = NULL;

	if (m_bUseD3D11 || m_bD3D9HACK)
		Release();

	m_session.Close();

	return sts;
}
Exemple #5
0
void Comm::comm_putc(unsigned char c, bool bDrain )
{
    ssize_t rv = write( m_fd, &c, 1 );
    //printf("Char=%2X\n", c );
 
    if ( bDrain ) {
        Drain();
    }	
}
Exemple #6
0
int Comm::comm_puts( char *Buffer, int len, bool bDrain )
{
  int rv = write( m_fd, Buffer, len );		
	
  if ( bDrain ) {
    Drain();
  }
	
  return rv;
}
Exemple #7
0
int Comm::comm_puts(char *Buffer, bool bDrain )
{
  int rv = write( m_fd, Buffer, strlen( Buffer ) );
  
  if ( bDrain ) {
    Drain();
  }
	
  return rv;
}
void
DecodedAudioDataSink::FinishAudioLoop()
{
  AssertOnAudioThread();
  MOZ_ASSERT(mStopAudioThread || AudioQueue().AtEndOfStream());
  if (!mStopAudioThread && mPlaying) {
    Drain();
  }
  SINK_LOG("AudioLoop complete");
  Cleanup();
  SINK_LOG("AudioLoop exit");
}
Exemple #9
0
double FGTank::Calculate(double dt, double TAT_C)
{
  if(ExternalFlow < 0.) Drain( -ExternalFlow *dt);
  else Fill(ExternalFlow * dt);

  if (Temperature == -9999.0) return 0.0;
  double HeatCapacity = 900.0;        // Joules/lbm/C
  double TempFlowFactor = 1.115;      // Watts/sqft/C
  double Tdiff = TAT_C - Temperature;
  double dTemp = 0.0;                 // Temp change due to one surface
  if (fabs(Tdiff) > 0.1 && Contents > 0.01) {
    dTemp = (TempFlowFactor * Area * Tdiff * dt) / (Contents * HeatCapacity);
  }

  return Temperature += (dTemp + dTemp);    // For now, assume upper/lower the same
}
Exemple #10
0
//Purpose:         Open connection to Khepera on port passed
//                 as a parameter e.g., "ttya".
//Preconditions:   Port name is defined
//Postconditions:  Connection open to given port. Returns true if successful,
//                 false otherwise.
bool Serial::Open(apstring portname)
{
  if (VERBOSE)
    cout << "Serial::Entering Serial Open" << endl;

  int i;
  char buffer[64];
  //if (VERBOSE)
	//cout << "Serial::buffer=";
  // convert apstring to a c string
  for (i=0; i<portname.length(); i++){
    //if (VERBOSE)
	//cout << portname[i];
    buffer[i] = portname[i];
  }
  buffer[i] = '\0';

  SERIAL_ID = open(buffer,O_RDWR|O_EXCL);
  //cerr << "Serial ID: " << SERIAL_ID << endl;
  if (SERIAL_ID == -1)
    {
      if (VERBOSE)
	cout << "Serial::Serial Port Failed to Open --> Serial ID" << endl;
      return false;
    }
  if (!Configure())
    return false;

  if (!Drain())
    return false;
  if (Talk("G,0,0\n") == "")
    {
      if (VERBOSE)
	cout << "Serial::Serial Port Failed to Open --> unable to talk" << endl;
      return false;
    }

  if (VERBOSE)
    cout << "Serial::Serial Port Opened" << endl;

  return true;
}//end Open(apstring)
mfxStatus QSV_Encoder_Internal::ClearData()
{
	mfxStatus sts = MFX_ERR_NONE;
	sts = Drain();
	
	sts = m_pmfxENC->Close();
	
	m_mfxAllocator.Free(m_mfxAllocator.pthis, &m_mfxResponse);

	for (int i = 0; i < m_nSurfNum; i++)
		delete m_pmfxSurfaces[i];
	MSDK_SAFE_DELETE_ARRAY(m_pmfxSurfaces);

	for (int i = 0; i < m_nTaskPool; i++)
		delete m_pTaskPool[i].mfxBS.Data; 
	MSDK_SAFE_DELETE_ARRAY(m_pTaskPool);

	delete m_outBitstream.Data;

	return sts;
}
Exemple #12
0
void bbcp_Stream::Close() 
{
    int retc=0;

    // Wait for any associated process on this stream
    //
    Drain();

    // Close the associated file descriptor if it was open
    //
    if (FD >= 0)             close(FD);
    if (FE >= 0 && FE != FD) close(FE);

    // Release the buffer if it was allocated.
    //
    if (buff) free(buff);

    // Clear all data values by attaching a dummy FD
    //
    FD = FE = -1;
    buff = 0;
}
Exemple #13
0
int bbcp_Stream::Exec(char **parm, int inrd, int inerr)
{
    int fildes_In[2], fildes_Out[2] = {-1,-1}, fildes_Err[2] = {0,0};
    int retc, Child_Out = FD, Child_In = FE, Child_Err = 0;

    // Wait for any previous command to finish on this stream
    //
    Drain();

    // Create a pipe if we have no attached FD. Recall that FD is the
    // descriptor we read from and FE is the one we write to. This is
    // true for sockets. For pipes, the relationship is reversed in
    // the child. Plus, we need to get two pipes if the child's STDIN
    // is to be redirected. This is because pipes suffer from backflow.
    //
    if (FD < 0)
       {if (pipe(fildes_Out) || (inrd  && pipe(fildes_In))
                            || (inerr && pipe(fildes_Err)))
           return bbcp_Emsg("Exec",errno,"creating a pipe for",parm[0]);
        Child_In=fildes_In[0]; Child_Out=fildes_Out[1]; Child_Err=fildes_Err[1];
        fildes_Out[1] = (inrd ? fildes_In[1] : -1);
        if (retc = Attach(fildes_Out)) return retc;
       }

    // Fork a process first so we can pick up the next request.
    //
    if ((child = bbcp_OS.Fork()) != 0)
       {close(Child_Out); retc = -errno;
        if (inrd)  close(Child_In);
        if (inerr) close(Child_Err);
        if (child > 0) return fildes_Err[0];
        retc=bbcp_Emsg("Exec", retc,"forking request process for",parm[0]);
                   close(fildes_In[1]);
        if (inrd)  close(fildes_Out[0]);
        if (inerr) close(fildes_Err[0]);
        return retc;
       }

    /*****************************************************************/
    /*                  C h i l d   P r o c e s s                    */
    /*****************************************************************/

    // Close the parent end of the pipe
    //
    if (fildes_In[1]  >= 0) close(fildes_In[1]);
    if (fildes_Out[0] >= 0) close(fildes_Out[0]);
    if (fildes_Err[0] >  0) close(fildes_Err[0]);


    // Redirect standard in if so requested
    //
    if (inrd)
       {if (dup2(Child_In, STDIN_FILENO) < 0)
           {bbcp_Emsg("Exec", errno, "setting up standard in for", parm[0]);
            exit(255);
           } else close(Child_In);
       }

    // Reassign the stream to be standard out to capture all of the output.
    //
    if (dup2(Child_Out, STDOUT_FILENO) < 0)
       {bbcp_Emsg("Exec", errno, "setting up standard out for", parm[0]);
        exit(255);
       } else close(Child_Out);

    // Reassign the stream to be standard err to capture all of the output.
    //
    if (inerr && Child_Err)
       {if (dup2(Child_Err, STDERR_FILENO) < 0)
           {bbcp_Emsg("Exec", errno, "setting up standard err for", parm[0]);
            exit(255);
           } else close(Child_Err);
       }

    // Invoke the command never to return
    //
    DEBUG("PATH=" <<getenv("PATH"));
    execvp(parm[0], parm);
    bbcp_Emsg("Exec", errno, "executing", parm[0], parm[1]);
    exit(255);
    return(255);  // some compilers demand a return in int functions
}
void
WidevineVideoDecoder::Decode(GMPVideoEncodedFrame* aInputFrame,
                             bool aMissingFrames,
                             const uint8_t* aCodecSpecificInfo,
                             uint32_t aCodecSpecificInfoLength,
                             int64_t aRenderTimeMs)
{
  // We should not be given new input if a drain has been initiated
  MOZ_ASSERT(!mDrainPending);
  // We may not get the same out of the CDM decoder as we put in, and there
  // may be some latency, i.e. we may need to input (say) 30 frames before
  // we receive output. So we need to store the durations of the frames input,
  // and retrieve them on output.
  mFrameDurations[aInputFrame->TimeStamp()] = aInputFrame->Duration();

  mSentInput = true;
  InputBuffer sample;

  RefPtr<MediaRawData> raw(new MediaRawData(aInputFrame->Buffer(), aInputFrame->Size()));
  raw->mExtraData = mExtraData;
  raw->mKeyframe = (aInputFrame->FrameType() == kGMPKeyFrame);
  // Convert input from AVCC, which GMPAPI passes in, to AnnexB, which
  // Chromium uses internally.
  mp4_demuxer::AnnexB::ConvertSampleToAnnexB(raw);

  const GMPEncryptedBufferMetadata* crypto = aInputFrame->GetDecryptionData();
  nsTArray<SubsampleEntry> subsamples;
  InitInputBuffer(crypto, aInputFrame->TimeStamp(), raw->Data(), raw->Size(), sample, subsamples);

  // For keyframes, ConvertSampleToAnnexB will stick the AnnexB extra data
  // at the start of the input. So we need to account for that as clear data
  // in the subsamples.
  if (raw->mKeyframe && !subsamples.IsEmpty()) {
    subsamples[0].clear_bytes += mAnnexB->Length();
  }

  WidevineVideoFrame frame;
  Status rv = CDM()->DecryptAndDecodeFrame(sample, &frame);
  Log("WidevineVideoDecoder::Decode(timestamp=%lld) rv=%d", sample.timestamp, rv);

  // Destroy frame, so that the shmem is now free to be used to return
  // output to the Gecko process.
  aInputFrame->Destroy();
  aInputFrame = nullptr;

  if (rv == kSuccess) {
    if (!ReturnOutput(frame)) {
      Log("WidevineVideoDecoder::Decode() Failed in ReturnOutput()");
      mCallback->Error(GMPDecodeErr);
      return;
    }
    // A reset should only be started at most at level mReturnOutputCallDepth 1,
    // and if it's started it should be finished by that call by the time
    // the it returns, so it should always be false by this point.
    MOZ_ASSERT(!mResetInProgress);
    // Only request more data if we don't have pending samples.
    if (mFrameAllocationQueue.empty()) {
      MOZ_ASSERT(mCDMWrapper);
      mCallback->InputDataExhausted();
    }
  } else if (rv == kNeedMoreData) {
    MOZ_ASSERT(mCDMWrapper);
    mCallback->InputDataExhausted();
  } else {
    mCallback->Error(ToGMPErr(rv));
  }
  // Finish a drain if pending and we have no pending ReturnOutput calls on the stack.
  if (mDrainPending && mReturnOutputCallDepth == 0) {
    Drain();
  }
}