Ejemplo n.º 1
0
char *gethost_name(void) {
  char      hostname[MAXHOSTNAMELEN];
  struct addrinfo *hres=NULL, *hres_list;
  int rc,count;

  rc = gethostname(hostname,MAXHOSTNAMELEN);
  if (rc)
    {
      fprintf(stderr, "%s| %s: error while resolving hostname '%s'\n", LogTime(), PROGRAM, hostname);
      return NULL;
    }
  rc = getaddrinfo(hostname,NULL,NULL,&hres);
  if (rc != 0) {
    fprintf(stderr, "%s| %s: error while resolving hostname with getaddrinfo: %s\n", LogTime(), PROGRAM, gai_strerror(rc));
    return NULL;
  }
  hres_list=hres;
  count=0;
  while (hres_list) {
    count++;
    hres_list=hres_list->ai_next;
  }
  rc = getnameinfo (hres->ai_addr, hres->ai_addrlen,hostname, sizeof (hostname), NULL, 0, 0);
  if (rc != 0) {
    fprintf(stderr, "%s| %s: error while resolving ip address with getnameinfo: %s\n", LogTime(), PROGRAM, gai_strerror(rc));
    freeaddrinfo(hres);
    return NULL ;
  }

  freeaddrinfo(hres);
  hostname[MAXHOSTNAMELEN]='\0';
  return(strdup(hostname));
}
Ejemplo n.º 2
0
status_t DenybbleizeData(const String & nybbleizedText, ByteBuffer & retBuf)
{
   const uint32 numBytes = nybbleizedText.Length();
   if ((numBytes%2)!=0)
   {
      LogTime(MUSCLE_LOG_ERROR, "DenybblizeData:  Nybblized text [%s] has an odd length; that shouldn't ever happen!\n", nybbleizedText());
      return B_ERROR;
   }

   if (retBuf.SetNumBytes(numBytes/2, false) != B_NO_ERROR) return B_ERROR;

   uint8 * b = retBuf.GetBuffer();
   for (uint32 i=0; i<numBytes; i+=2)
   {
      const char c1 = nybbleizedText[i+0];
      const char c2 = nybbleizedText[i+1];
      if ((muscleInRange(c1, 'A', 'P') == false)||(muscleInRange(c2, 'A', 'P') == false))
      {
         LogTime(MUSCLE_LOG_ERROR, "DenybblizeData:  Nybblized text [%s] contains characters other than A through P!\n", nybbleizedText());
         return B_ERROR;
      }
      *b++ = (uint8) (((c1-'A')<<0)|((c2-'A')<<4));
   }
   return B_NO_ERROR;
}
Ejemplo n.º 3
0
int check_gss_err(OM_uint32 major_status, OM_uint32 minor_status, const char* function, int debug, int log)
{
    if (GSS_ERROR(major_status)) {
        OM_uint32 maj_stat,min_stat;
        OM_uint32 msg_ctx = 0;
        gss_buffer_desc status_string;
        char buf[1024];
        size_t len;

        len = 0;
        msg_ctx = 0;
        while (!msg_ctx) {
            /* convert major status code (GSS-API error) to text */
            maj_stat = gss_display_status(&min_stat, major_status,
                                          GSS_C_GSS_CODE,
                                          GSS_C_NULL_OID,
                                          &msg_ctx, &status_string);
            if (maj_stat == GSS_S_COMPLETE) {
                if (sizeof(buf) > len + status_string.length + 1) {
                    sprintf(buf+len, "%s", (char*) status_string.value);
                    len += status_string.length;
                }
                gss_release_buffer(&min_stat, &status_string);
                break;
            }
            gss_release_buffer(&min_stat, &status_string);
        }
        if (sizeof(buf) > len + 2) {
            sprintf(buf+len, "%s", ". ");
            len += 2;
        }
        msg_ctx = 0;
        while (!msg_ctx) {
            /* convert minor status code (underlying routine error) to text */
            maj_stat = gss_display_status(&min_stat, minor_status,
                                          GSS_C_MECH_CODE,
                                          GSS_C_NULL_OID,
                                          &msg_ctx, &status_string);
            if (maj_stat == GSS_S_COMPLETE) {
                if (sizeof(buf) > len + status_string.length ) {
                    sprintf(buf+len, "%s", (char*) status_string.value);
                    len += status_string.length;
                }
                gss_release_buffer(&min_stat, &status_string);
                break;
            }
            gss_release_buffer(&min_stat, &status_string);
        }
        if (debug)
            fprintf(stderr, "%s| %s: ERROR: %s failed: %s\n", LogTime(), PROGRAM, function, buf);
        fprintf(stdout, "BH %s failed: %s\n",function, buf);
        if (log)
            fprintf(stderr, "%s| %s: INFO: User not authenticated\n", LogTime(), PROGRAM);
        return(1);
    }
    return(0);
}
Ejemplo n.º 4
0
char *gethost_name(void)
{
    char      hostname[sysconf(_SC_HOST_NAME_MAX)];
    struct addrinfo *hres=NULL, *hres_list;
    int rc,count;

    rc = gethostname(hostname,sysconf(_SC_HOST_NAME_MAX));
    if (rc) {
        fprintf(stderr, "%s| %s: ERROR: resolving hostname '%s' failed\n", LogTime(), PROGRAM, hostname);
        return NULL;
    }
    rc = xgetaddrinfo(hostname,NULL,NULL,&hres);
    if (rc != 0) {
        fprintf(stderr, "%s| %s: ERROR: resolving hostname with getaddrinfo: %s failed\n", LogTime(), PROGRAM, xgai_strerror(rc));
        return NULL;
    }
    hres_list=hres;
    count=0;
    while (hres_list) {
        count++;
        hres_list=hres_list->ai_next;
    }
    rc = xgetnameinfo (hres->ai_addr, hres->ai_addrlen,hostname, sizeof (hostname), NULL, 0, 0);
    if (rc != 0) {
        fprintf(stderr, "%s| %s: ERROR: resolving ip address with getnameinfo: %s failed\n", LogTime(), PROGRAM, xgai_strerror(rc));
        xfreeaddrinfo(hres);
        return NULL ;
    }

    xfreeaddrinfo(hres);
    hostname[sysconf(_SC_HOST_NAME_MAX)-1]='\0';
    return(xstrdup(hostname));
}
Ejemplo n.º 5
0
TInt CRecordBySize::PostProcess(CStifSectionParser* /*aParser*/)
{
	if (!stopByMMF)
	{
		logger->Log(_L("Recording wasn't stopped for reaching file size"));
		return KErrRecBySize;
	}

	TTimeIntervalMicroSeconds duration = Duration();
	//logger->Log(_L("Recorded clip duration: %d"),duration);
	LogTime(_L("Recorded clip duration:"),duration);

	TInt realSize = 0, err;
	if ((err=GetFileSize(recordingFileName, realSize)) != KErrNone)
		return err;

	logger->Log(_L("Recorded size: %d bytes"),realSize);
	logger->Log(_L("Expected size: %d bytes"),maxFileSize);
	TInt dif = Abs(maxFileSize - realSize);
	if (dif > errorRange)
	{
		logger->Log(_L("Recorded file size differs from expected size by: %d bytes"),dif);
		return KErrOutOfRange;
	}
	return KErrNone;
}
Ejemplo n.º 6
0
int32 PacketizedProxyDataIO :: Write(const void * buffer, uint32 size)
{
   if (size > _maxTransferUnit)
   {
      LogTime(MUSCLE_LOG_ERROR, "PacketizedProxyDataIO:  Error, tried to send packet with size " UINT32_FORMAT_SPEC ", max transfer unit is set to " UINT32_FORMAT_SPEC "\n", size, _maxTransferUnit);
      return -1;
   }

   // Only accept more data if we are done sending the data we already have buffered up
   bool tryAgainAfter = false;
   int32 ret = 0;
   if (HasBufferedOutput()) tryAgainAfter = true;
   else
   {
      // No data buffered?
      _outputBufferBytesSent = 0;

      if (_outputBuffer.SetNumBytes(sizeof(uint32)+size, false) != B_NO_ERROR) return 0;
      muscleCopyOut(_outputBuffer.GetBuffer(), B_HOST_TO_LENDIAN_INT32(size));
      memcpy(_outputBuffer.GetBuffer()+sizeof(uint32), buffer, size);
      ret = size;
   }

   if (WriteBufferedOutputAux() != B_NO_ERROR) return -1;

   return ((tryAgainAfter)&&(HasBufferedOutput() == false)) ? Write(buffer, size) : ret;
}
TInt CRecordTimeAvailable::PostProcess(CStifSectionParser* /*aParser*/)
{
	TTimeIntervalMicroSeconds newTimeAvailable = recorder->RecordTimeAvailable();
	LogTime(_L("Clip duration:"),fileDuration);
	LogTime(_L("New record time available:"),newTimeAvailable);

	TInt64 val = newTimeAvailable.Int64();

	if ((val = Abs(timeAvailable.Int64() - newTimeAvailable.Int64() - fileDuration.Int64())) > errorRange.Int64())
	{
		LogTime(_L("Time available difference is out of specified range:"),val);
		return KErrOutOfRange;
	}

	return KErrNone;
}
Ejemplo n.º 8
0
// This method is here to 'wrap' the internal thread's virtual method call with some standard setup/tear-down code of our own
void Thread::InternalThreadEntryAux()
{
   const uint32 threadStackBase = 0;  // only here so we can get its address below
   _threadStackBase = &threadStackBase;  // remember this stack location so GetCurrentStackUsage() can reference it later on

   muscle_thread_key curThreadKey = GetCurrentThreadKey();
   if (_curThreadsMutex.Lock() == B_NO_ERROR)
   {
      (void) _curThreads.Put(curThreadKey, this);
      _curThreadsMutex.Unlock();
   }

   if ((_threadPriority != PRIORITY_UNSPECIFIED)&&(SetThreadPriorityAux(_threadPriority) != B_NO_ERROR))
   {
      LogTime(MUSCLE_LOG_ERROR, "Thread %p:  Unable to set thread priority to %i\n", this, _threadPriority);
   }

   if (_threadData[MESSAGE_THREAD_OWNER]._messages.HasItems()) SignalOwner();
   InternalThreadEntry();
   _threadData[MESSAGE_THREAD_INTERNAL]._messageSocket.Reset();  // this will wake up the owner thread with EOF on socket

   if (_curThreadsMutex.Lock() == B_NO_ERROR)
   {
      (void) _curThreads.Remove(curThreadKey);
      _curThreadsMutex.Unlock();
   }

   _threadStackBase = NULL;
}
Ejemplo n.º 9
0
//WebRTC::RTP Callback Implementation
// Called on AudioGUM or MSG thread
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
{
  CSFLogDebug(logTag,  "%s : channel %d", __FUNCTION__, channel);

#if !defined(MOZILLA_EXTERNAL_LINKAGE)
  if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
    if (mProcessing.Length() > 0) {
      TimeStamp started = mProcessing[0].mTimeStamp;
      mProcessing.RemoveElementAt(0);
      mProcessing.RemoveElementAt(0); // 20ms packetization!  Could automate this by watching sizes
      TimeDuration t = TimeStamp::Now() - started;
      int64_t delta = t.ToMilliseconds();
      LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
    }
  }
#endif
  ReentrantMonitorAutoEnter enter(mTransportMonitor);
  if(mTransmitterTransport &&
     (mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
  {
    CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
    return len;
  } else {
    CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
    return -1;
  }
}
Ejemplo n.º 10
0
char *get_netbios_name(struct main_args *margs,char *netbios) {
  struct ndstruct *nd;

  nd = margs->ndoms;
  while(nd && netbios) {
    if (margs->debug)
      fprintf(stderr,"%s| %s: Netbios domain loop: netbios@domain %s@%s\n",LogTime(), PROGRAM,nd->netbios,nd->domain);
    if (nd->netbios && !strcasecmp(nd->netbios,netbios)) {
      if (margs->debug)
        fprintf(stderr,"%s| %s: Found netbios@domain %s@%s\n",LogTime(), PROGRAM,nd->netbios,nd->domain);
      return(nd->domain);
    }
    nd = nd->next;
  }

  return NULL;
}
Ejemplo n.º 11
0
int32 PacketizedProxyDataIO :: Read(void * buffer, uint32 size)
{
   int32 ret = 0;

   if (_inputBufferSizeBytesRead < sizeof(uint32))
   {
      uint8 * ip = (uint8 *) &_inputBufferSize;
      const int32 numSizeBytesRead = ProxyDataIO::Read(&ip[_inputBufferSizeBytesRead], sizeof(uint32)-_inputBufferSizeBytesRead);
      if (numSizeBytesRead < 0) return -1;
      _inputBufferSizeBytesRead += numSizeBytesRead;
      if (_inputBufferSizeBytesRead == sizeof(uint32))
      {
         _inputBufferSize = B_LENDIAN_TO_HOST_INT32(_inputBufferSize);
         if (_inputBufferSize > _maxTransferUnit)
         {
            LogTime(MUSCLE_LOG_ERROR, "PacketizedProxyDataIO:  Error, incoming packet with size " UINT32_FORMAT_SPEC ", max transfer unit is set to " UINT32_FORMAT_SPEC "\n", _inputBufferSize, _maxTransferUnit);
            return -1;
         }
         if (_inputBuffer.SetNumBytes(_inputBufferSize, false) != B_NO_ERROR) return -1;
         _inputBufferBytesRead = 0;

         // Special case for empty packets
         if (_inputBufferSize == 0) _inputBufferSizeBytesRead = 0;
      }
   }

   const uint32 inBufSize = _inputBuffer.GetNumBytes();
   if ((_inputBufferSizeBytesRead == sizeof(uint32))&&(_inputBufferBytesRead < inBufSize))
   {
      const int32 numBytesRead = ProxyDataIO::Read(_inputBuffer.GetBuffer()+_inputBufferBytesRead, inBufSize-_inputBufferBytesRead);
      if (numBytesRead < 0) return -1;

      _inputBufferBytesRead += numBytesRead;
      if (_inputBufferBytesRead == inBufSize)
      {
         const uint32 copyBytes = muscleMin(size, inBufSize);
         if (size < inBufSize) LogTime(MUSCLE_LOG_WARNING, "PacketizedProxyDataIO:  Truncating incoming packet (" UINT32_FORMAT_SPEC " bytes available, only " UINT32_FORMAT_SPEC " bytes in user buffer)\n", inBufSize, size);
         memcpy(buffer, _inputBuffer.GetBuffer(), copyBytes);
         ret = copyBytes;

         _inputBufferSizeBytesRead = _inputBufferBytesRead = 0;
         _inputBuffer.Clear(inBufSize>(64*1024));  // free up memory after a large packet recv
      }
   }
   return ret;
}
Ejemplo n.º 12
0
static status_t SetRealTimePriority(const char * priStr, bool useFifo)
{
   struct sched_param schedparam; memset(&schedparam, 0, sizeof(schedparam));
   const int pri = (strlen(priStr) > 0) ? atoi(priStr) : 11;
   schedparam.sched_priority = pri;

   const char * desc = useFifo ? "SCHED_FIFO" : "SCHED_RR";
   if (sched_setscheduler(0, useFifo?SCHED_FIFO:SCHED_RR, &schedparam) == 0)
   {
      LogTime(MUSCLE_LOG_INFO, "Set process to real-time (%s) priority %i\n", desc, pri);
      return B_NO_ERROR;
   }
   else
   {
      LogTime(MUSCLE_LOG_ERROR, "Could not invoke real time (%s) scheduling priority %i (access denied?)\n", desc, pri);
      return B_ERROR;
   }
}
Ejemplo n.º 13
0
status_t FilterSessionFactory :: RemoveRequirePattern(const String & requirePattern)
{
   if (_requires.ContainsKey(requirePattern))  // don't Remove() here, since then our argument might be a dangling reference during the LogTime() call
   {
      if (_tempLogFor) LogTime(MUSCLE_LOG_DEBUG, "Session [%s/%s] is removing requirement [%s] on port %u\n", _tempLogFor->GetHostName()(), _tempLogFor->GetSessionIDString()(), requirePattern(), _tempLogFor->GetPort());
      (void) _requires.Remove(requirePattern);
      return B_NO_ERROR;
   }
   return B_ERROR;
}
Ejemplo n.º 14
0
bool
AbstractReflectSession ::
ClientConnectionClosed()
{
   if (_autoReconnectDelay == MUSCLE_TIME_NEVER) return true;  // true == okay to remove this session
   else
   {
      if (_wasConnected) LogTime(MUSCLE_LOG_DEBUG, "%s:  Connection severed, will auto-reconnect in " UINT64_FORMAT_SPEC "mS\n", GetSessionDescriptionString()(), MicrosToMillis(_autoReconnectDelay));
      PlanForReconnect();
      return false;
   }
}
Ejemplo n.º 15
0
StdinDataIO :: StdinDataIO(bool blocking, bool writeToStdout)
   : _stdinBlocking(blocking)
   , _writeToStdout(writeToStdout)
#ifdef USE_WIN32_STDINDATAIO_IMPLEMENTATION
   , _slaveSocketTag(0)
#else
   , _fdIO(ConstSocketRef(&_stdinSocket, false), true)
#endif
{
#ifdef USE_WIN32_STDINDATAIO_IMPLEMENTATION
   if (_stdinBlocking == false)
   {
      // For non-blocking I/O, we need to handle stdin in a separate thread. 
      // note that I freopen stdin to "nul" so that other code (read: Python)
      // won't try to muck about with stdin and interfere with StdinDataIO's
      // operation.  I don't know of any good way to restore it again after,
      // though... so a side effect of StdinDataIO under Windows is that
      // stdin gets redirected to nul... once you've created one non-blocking
      // StdinDataIO, you'll need to continue accessing stdin only via
      // non-blocking StdinDataIOs.
      bool okay = false;
      ConstSocketRef slaveSocket;
      if ((CreateConnectedSocketPair(_masterSocket, slaveSocket, false) == B_NO_ERROR)&&(SetSocketBlockingEnabled(slaveSocket, true) == B_NO_ERROR)&&(_slaveSocketsMutex.Lock() == B_NO_ERROR))
      {
         bool threadCreated = false;
         if (_stdinThreadStatus == STDIN_THREAD_STATUS_UNINITIALIZED)
         {
            DWORD junkThreadID;
#if __STDC_WANT_SECURE_LIB__
            FILE * junkFD;
#endif
            _stdinThreadStatus = ((DuplicateHandle(GetCurrentProcess(), GetStdHandle(STD_INPUT_HANDLE), GetCurrentProcess(), &_stdinHandle, 0, false, DUPLICATE_SAME_ACCESS))&&
#if __STDC_WANT_SECURE_LIB__
               (freopen_s(&junkFD, "nul", "r", stdin) == 0)
#else
               (freopen("nul", "r", stdin) != NULL)
#endif
               &&((_slaveThread = (::HANDLE) _beginthreadex(NULL, 0, StdinThreadEntryFunc, NULL, CREATE_SUSPENDED, (unsigned *) &junkThreadID)) != 0)) ? STDIN_THREAD_STATUS_RUNNING : STDIN_THREAD_STATUS_EXITED;
            threadCreated = (_stdinThreadStatus == STDIN_THREAD_STATUS_RUNNING);
         }
         if ((_stdinThreadStatus == STDIN_THREAD_STATUS_RUNNING)&&(_slaveSockets.Put(_slaveSocketTag = (++_slaveSocketTagCounter), slaveSocket) == B_NO_ERROR)) okay = true;
                                                                                                                                                        else LogTime(MUSCLE_LOG_ERROR, "StdinDataIO:  Could not start stdin thread!\n");
         _slaveSocketsMutex.Unlock();

         // We don't start the thread running until here, that way there's no chance of race conditions if the thread exits immediately
         if (threadCreated) ResumeThread(_slaveThread);
      }
      else LogTime(MUSCLE_LOG_ERROR, "StdinDataIO:  Error setting up I/O sockets!\n");

      if (okay == false) Close();
   }
#endif
}
Ejemplo n.º 16
0
void
AbstractReflectSession :: 
Pulse(const PulseArgs & args)
{
   PulseNode::Pulse(args);
   if ((args.GetCallbackTime() >= _reconnectTime)&&(IsThisSessionScheduledForPostSleepReconnect() == false))
   {
      if (_autoReconnectDelay == MUSCLE_TIME_NEVER) _reconnectTime = MUSCLE_TIME_NEVER;
      else
      {
         // FogBugz #3810
         if (_wasConnected) LogTime(MUSCLE_LOG_DEBUG, "%s is attempting to auto-reconnect...\n", GetSessionDescriptionString()());
         _reconnectTime = MUSCLE_TIME_NEVER;
         if (Reconnect() != B_NO_ERROR)
         {
            LogTime(MUSCLE_LOG_DEBUG, "%s: Could not auto-reconnect, will try again later...\n", GetSessionDescriptionString()());
            PlanForReconnect();  // okay, we'll try again later!
         }
      }
   }
   else if ((IsConnectingAsync())&&(args.GetCallbackTime() >= _asyncConnectTimeoutTime)) (void) DisconnectSession();  // force us to terminate our async-connect now 
}
Ejemplo n.º 17
0
bool ChatLog::AddMessage( const wxString& text )
{
    if ( !LogEnabled() || ! m_active ) {
        return true;
    }
    else if ( !m_logfile.IsOpened() ) {
        m_active = OpenLogFile();
    }
    if ( m_active )
    {
        return WriteLine( LogTime() + _T( " " ) + text + wxTextBuffer::GetEOL() );
    }
    else return false;
}
Ejemplo n.º 18
0
void InitTPM2(void)
{
  struct sockaddr_in RecAddr;
  int val ;
  
  val = (0==0) ;

  if (TPMVerbose==1) {
    fprintf (logfd,"%s: ",LogTime()) ;
    fprintf (logfd,"Init TPM2\n") ;
  } ;

  // Reserve Receive-Socket
  if ((TPM2FD = socket(AF_INET, SOCK_DGRAM,0)) == -1) {
    perror("CANGateway: TPM2 socket");
    exit(0);
  }
  
  memset(&RecAddr, 0, sizeof(struct sockaddr_in));
  RecAddr.sin_family      = AF_INET;
  RecAddr.sin_addr.s_addr = INADDR_ANY;   // zero means "accept data from any IP address"
  RecAddr.sin_port        = htons(TPM2_NET_PORT);

  // Bind the socket to the Address
  if (bind(TPM2FD, (struct sockaddr *) &RecAddr, sizeof(RecAddr)) == -1) {
    // If address in use then try one lower - CANControl might be running
    close (TPM2FD) ;
    perror("CANGateway: TPM2FD socket in use");
    exit(0);
  }
    
  setsockopt(TPM2FD, SOL_SOCKET, SO_BROADCAST, (char *) &val, sizeof(val)) ;  
  if (TPMVerbose==1) {
    fprintf (logfd,"%s: ",LogTime()) ;
    fprintf (logfd,"TPM2 initialized\n") ;
  } ;
}
Ejemplo n.º 19
0
void
MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
                                                 size_t aFrames,
                                                 uint32_t aChannels)
{
  if (mState != kStarted) {
    return;
  }

  if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
    mTotalFrames += aFrames;
    if (mTotalFrames > mLastLogFrames + mSampleFrequency) { // ~ 1 second
      MOZ_LOG(AudioLogModule(), LogLevel::Debug,
              ("%p: Inserting %" PRIuSIZE " samples into graph, total frames = %" PRIu64,
               (void*)this, aFrames, mTotalFrames));
      mLastLogFrames = mTotalFrames;
    }
  }

  size_t len = mSources.Length();
  for (size_t i = 0; i < len; i++) {
    if (!mSources[i]) {
      continue;
    }
    RefPtr<SharedBuffer> buffer =
      SharedBuffer::Create(aFrames * aChannels * sizeof(T));
    PodCopy(static_cast<T*>(buffer->Data()),
            aBuffer, aFrames * aChannels);

    TimeStamp insertTime;
    // Make sure we include the stream and the track.
    // The 0:1 is a flag to note when we've done the final insert for a given input block.
    LogTime(AsyncLatencyLogger::AudioTrackInsertion,
            LATENCY_STREAM_ID(mSources[i].get(), mTrackID),
            (i+1 < len) ? 0 : 1, insertTime);

    nsAutoPtr<AudioSegment> segment(new AudioSegment());
    AutoTArray<const T*, 1> channels;
    // XXX Bug 971528 - Support stereo capture in gUM
    MOZ_ASSERT(aChannels == 1,
        "GraphDriver only supports us stereo audio for now");
    channels.AppendElement(static_cast<T*>(buffer->Data()));
    segment->AppendFrames(buffer.forget(), channels, aFrames,
                         mPrincipalHandles[i]);
    segment->GetStartTime(insertTime);

    mSources[i]->AppendToTrack(mTrackID, segment);
  }
}
Ejemplo n.º 20
0
status_t FilterSessionFactory :: PutRequirePattern(const String & requirePattern)
{
   TCHECKPOINT;

   if (_requires.ContainsKey(requirePattern)) return B_NO_ERROR;
   StringMatcherRef newMatcherRef(newnothrow StringMatcher(requirePattern));
   if (newMatcherRef())
   {
      if (_requires.Put(requirePattern, newMatcherRef) == B_NO_ERROR)
      {
         if (_tempLogFor) LogTime(MUSCLE_LOG_DEBUG, "Session [%s/%s] is requiring [%s] on port %u\n", _tempLogFor->GetHostName()(), _tempLogFor->GetSessionIDString()(), requirePattern(), _tempLogFor->GetPort());
         return B_NO_ERROR;
      }
   }
   else WARN_OUT_OF_MEMORY;

   return B_ERROR;
}
Ejemplo n.º 21
0
static uint32 GetNextGlobalID(uint32 & counter)
{
   uint32 ret;

   Mutex * ml = GetGlobalMuscleLock();
   MASSERT(ml, "Please instantiate a CompleteSetupSystem object on the stack before creating any session or session-factory objects (at beginning of main() is preferred)\n");

   if (ml->Lock() == B_NO_ERROR) 
   {
      ret = counter++;
      ml->Unlock();
   }
   else
   {
      LogTime(MUSCLE_LOG_CRITICALERROR, "Could not lock global muscle lock while assigning new ID!!?!\n");
      ret = counter++;  // do it anyway, I guess
   }
   return ret;
}
Ejemplo n.º 22
0
void
MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
                                                 size_t aFrames,
                                                 uint32_t aChannels)
{
  if (mState != kStarted) {
    return;
  }

  size_t len = mSources.Length();
  for (size_t i = 0; i < len; i++) {
    if (!mSources[i]) {
      continue;
    }
    RefPtr<SharedBuffer> buffer =
      SharedBuffer::Create(aFrames * aChannels * sizeof(T));
    PodCopy(static_cast<T*>(buffer->Data()),
            aBuffer, aFrames * aChannels);

    TimeStamp insertTime;
    // Make sure we include the stream and the track.
    // The 0:1 is a flag to note when we've done the final insert for a given input block.
    LogTime(AsyncLatencyLogger::AudioTrackInsertion,
            LATENCY_STREAM_ID(mSources[i].get(), mTrackID),
            (i+1 < len) ? 0 : 1, insertTime);

    nsAutoPtr<AudioSegment> segment(new AudioSegment());
    AutoTArray<const T*, 1> channels;
    // XXX Bug 971528 - Support stereo capture in gUM
    MOZ_ASSERT(aChannels == 1,
        "GraphDriver only supports us stereo audio for now");
    channels.AppendElement(static_cast<T*>(buffer->Data()));
    segment->AppendFrames(buffer.forget(), channels, aFrames,
                         mPrincipalHandles[i]);
    segment->GetStartTime(insertTime);

    RUN_ON_THREAD(mThread,
                  WrapRunnable(mSources[i], &SourceMediaStream::AppendToTrack,
                               mTrackID, segment,
                               static_cast<AudioSegment*>(nullptr)),
                  NS_DISPATCH_NORMAL);
  }
}
Ejemplo n.º 23
0
void CConnClient::ShutDown(const char* szMsg)
{
	if(m_bShutting)
		return;
	m_bShutting = true;
	CCoreSceneMgrClient::Inst()->DestroyMainSceneIfNotBeingUsedByLogic();
	CTimeSystemClient::Inst()->UnRegister(this);
	if (IntShutDown() && szMsg)
	{
		ostringstream strm;
		CAddress addr;
		const char* szUserName = GetValue("UserName");
		szUserName = szUserName?szUserName:"";
		GetLocalAddress(addr);
		LogTime(strm);
		strm << " account:" << szUserName << " reason:" << szMsg << " localip:" << addr.GetAddress() << " remoteip: " << endl;
		LogOnOffLineMsg(strm.str().c_str());
	}
	Inst()=NULL;
}
Ejemplo n.º 24
0
void *
cv1d_a_real (int  border_effect,
	     void *res_data,
	     int  *first_exact_ptr,
	     int  *last_exact_ptr)
{
  int method;
  real * ret_value = 0;
  void * (*the_cv1d_fct_ptr)();

  LogMessage("a r ");
  if (cv1d_method == CV1D_UNDEFINED) {
    method =
      cv1d_convolution_method_ (sig_n, flt.d_n, lim_array[ANALYTICAL][border_effect]);
  } else {
    method = cv1d_method;
  }
  if ((method == DIRECT_CONVOLUTION) && (!flt.d_real_ptr && !flt.d_imag_ptr)) {
    method = MULTI_PART_CONVOLUTION;
  }

  if ((method == FOURIER_TRANSFORM_CONVOLUTION)
      && !cv1d_is_good_fft_size (sig_n)) {
    method = MULTI_PART_CONVOLUTION;
  }

#ifdef LOG_MESSAGES
  LogMessage2("%s ", method_str[method]);
  LogMessage2("%d ", sig_n);
  LogMessage2("%d ", flt.d_n);
#endif
  the_cv1d_fct_ptr = cv1d_a_fct_ptr_array[REAL][method];
  SetLogTimeBegin();
  ret_value = 
    the_cv1d_fct_ptr (border_effect, res_data, first_exact_ptr, last_exact_ptr);
  LogTime();

  return ret_value;
}
Ejemplo n.º 25
0
void
MediaEngineWebRTCAudioSource::Process(int channel,
  webrtc::ProcessingTypes type, sample* audio10ms,
  int length, int samplingFreq, bool isStereo)
{
  MonitorAutoLock lock(mMonitor);
  if (mState != kStarted)
    return;

  uint32_t len = mSources.Length();
  for (uint32_t i = 0; i < len; i++) {
    nsRefPtr<SharedBuffer> buffer = SharedBuffer::Create(length * sizeof(sample));

    sample* dest = static_cast<sample*>(buffer->Data());
    memcpy(dest, audio10ms, length * sizeof(sample));

    AudioSegment segment;
    nsAutoTArray<const sample*,1> channels;
    channels.AppendElement(dest);
    segment.AppendFrames(buffer.forget(), channels, length);
    TimeStamp insertTime;
    segment.GetStartTime(insertTime);

    SourceMediaStream *source = mSources[i];
    if (source) {
      // This is safe from any thread, and is safe if the track is Finished
      // or Destroyed.
      // Make sure we include the stream and the track.
      // The 0:1 is a flag to note when we've done the final insert for a given input block.
      LogTime(AsyncLatencyLogger::AudioTrackInsertion, LATENCY_STREAM_ID(source, mTrackID),
              (i+1 < len) ? 0 : 1, insertTime);

      source->AppendToTrack(mTrackID, &segment);
    }
  }

  return;
}
Ejemplo n.º 26
0
void CheckThreadStackUsage(const char * fileName, uint32 line)
{
   Thread * curThread = Thread::GetCurrentThread();
   if (curThread)
   {
      const uint32 maxUsage = curThread->GetSuggestedStackSize();
      if (maxUsage != 0)  // if the Thread doesn't have a suggested stack size, then we don't know what the limit is
      {
         const uint32 curUsage = curThread->GetCurrentStackUsage();
         if (curUsage > maxUsage)
         {
            char buf[20];
            LogTime(MUSCLE_LOG_CRITICALERROR, "Thread %s exceeded its suggested stack usage (" UINT32_FORMAT_SPEC " > " UINT32_FORMAT_SPEC ") at (%s:" UINT32_FORMAT_SPEC "), aborting program!\n", muscle_thread_id::GetCurrentThreadID().ToString(buf), curUsage, maxUsage, fileName, line);
            MCRASH("MUSCLE Thread exceeded its suggested stack allowance");
         }
      }
   }
   else
   {
      char buf[20];
      printf("Warning, CheckThreadStackUsage() called from non-MUSCLE thread %s at (%s:" UINT32_FORMAT_SPEC ")\n", muscle_thread_id::GetCurrentThreadID().ToString(buf), fileName, line);
   }
}
Ejemplo n.º 27
0
//WebRTC::RTP Callback Implementation
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
{
  CSFLogDebug(logTag,  "%s : channel %d %s", __FUNCTION__, channel,
              (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");

  if (mEngineReceiving)
  {
    if (mOtherDirection)
    {
      return mOtherDirection->SendPacket(channel, data, len);
    }
    CSFLogDebug(logTag,  "%s : Asked to send RTP without an RTP sender on channel %d",
                __FUNCTION__, channel);
    return -1;
  } else {
#ifdef MOZILLA_INTERNAL_API
    if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
      if (mProcessing.Length() > 0) {
        TimeStamp started = mProcessing[0].mTimeStamp;
        mProcessing.RemoveElementAt(0);
        mProcessing.RemoveElementAt(0); // 20ms packetization!  Could automate this by watching sizes
        TimeDuration t = TimeStamp::Now() - started;
        int64_t delta = t.ToMilliseconds();
        LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
      }
    }
#endif
    if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
    {
      CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
      return len;
    } else {
      CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
      return -1;
    }
  }
}
TInt CRecordTimeAvailable::PreProcess(CStifSectionParser *aParser)
{
	if (recorder)
	{
//		recorder->SetAudioDeviceMode(CMdaAudioRecorderUtility::ELocal);
//		recorder->SetGain(recorder->MaxGain());
//		recorder->SetPosition(TTimeIntervalMicroSeconds(TInt64(0)));
		// Crop delete the whole file. This made the new content is not attached in the end.
		// It over write the previous file.
//		TRAPD(err,recorder->CropL());
//		if (err != KErrNone) logger->Log(_L("Error cropping file: %d"),err);

		//Get error range
		CStifItemParser* item = aParser->GetItemLineL(KErrorRange, ENoTag);
		CleanupStack::PushL(item);
		errorRange = CRecordingBase::ParseTime(item);
		CleanupStack::PopAndDestroy();
		LogTime(_L("Error range:"),errorRange);

//		timeAvailable = recorder->RecordTimeAvailable();
//		LogTime(_L("Record time available:"),timeAvailable);
	}
	return KErrNone;
}
Ejemplo n.º 29
0
void InitializeDLL()
{
	if (!gInitialized)
	{
		gInitialized = true;

		wchar_t dir[MAX_PATH];
		GetModuleFileName(0, dir, MAX_PATH);
		wcsrchr(dir, L'\\')[1] = 0;
		wcscat(dir, L"d3dx.ini");

		// If specified in Logging section, wait for Attach to Debugger.
		bool waitfordebugger = false;
		int debugger = GetPrivateProfileInt(L"Logging", L"waitfordebugger", 0, dir);
		if (debugger > 0)
		{
			waitfordebugger = true;
			do
			{
				Sleep(250);
			} while (!IsDebuggerPresent());
			if (debugger > 1)
				__debugbreak();
		}

		// Switch to unbuffered logging to remove need for fflush calls, and r/w access to make it easy
		// to open active files.
		if (GetPrivateProfileInt(L"Logging", L"calls", 1, dir))
		{
			LogFile = _fsopen("dxgi_log.txt", "w", _SH_DENYNO);
			LogInfo("\n *** DXGI DLL starting init  -  %s\n\n", LogTime().c_str());
		}

		gLogDebug = GetPrivateProfileInt(L"Logging", L"debug", 0, dir) == 1;

		// Unbuffered logging to remove need for fflush calls, and r/w access to make it easy
		// to open active files.
		int unbuffered = -1;
		if (GetPrivateProfileInt(L"Logging", L"unbuffered", 0, dir))
		{
			unbuffered = setvbuf(LogFile, NULL, _IONBF, 0);
			LogInfo("  unbuffered=1  return: %d\n", unbuffered);
		}

		// Set the CPU affinity based upon d3dx.ini setting.  Useful for debugging and shader hunting in AC3.
		if (GetPrivateProfileInt(L"Logging", L"force_cpu_affinity", 0, dir))
		{
			DWORD one = 0x01;
			BOOL result = SetProcessAffinityMask(GetCurrentProcess(), one);
			LogInfo("  CPU Affinity forced to 1- no multithreading: %s\n", result ? "true" : "false");
		}

		if (LogFile)
		{
			LogInfo("[Logging]\n");
			LogInfo("  calls=1\n");
			LogDebug("  debug=1\n");
			if (waitfordebugger) LogInfo("  waitfordebugger=1\n");
		}

		wchar_t val[MAX_PATH];
		int read = GetPrivateProfileString(L"Device", L"width", 0, val, MAX_PATH, dir);
		if (read) swscanf_s(val, L"%d", &SCREEN_WIDTH);
		read = GetPrivateProfileString(L"Device", L"height", 0, val, MAX_PATH, dir);
		if (read) swscanf_s(val, L"%d", &SCREEN_HEIGHT);
		read = GetPrivateProfileString(L"Device", L"refresh_rate", 0, val, MAX_PATH, dir);
		if (read) swscanf_s(val, L"%d", &SCREEN_REFRESH);
		SCREEN_FULLSCREEN = GetPrivateProfileInt(L"Device", L"full_screen", 0, dir) == 1;
		SCREEN_ALLOW_COMMANDS = GetPrivateProfileInt(L"Device", L"allow_windowcommands", 0, dir) == 1;
		read = GetPrivateProfileString(L"Device", L"filter_refresh_rate", 0, val, MAX_PATH, dir);
		if (read) swscanf_s(val, L"%d,%d,%d,%d,%d,%d,%d,%d,%d,%d",
			FILTER_REFRESH+0, FILTER_REFRESH+1, FILTER_REFRESH+2, FILTER_REFRESH+3, &FILTER_REFRESH+4,
			FILTER_REFRESH+5, FILTER_REFRESH+6, FILTER_REFRESH+7, FILTER_REFRESH+8, &FILTER_REFRESH+9);

		if (LogFile)
		{
			LogInfo("[Device]\n");
			if (SCREEN_WIDTH != -1) LogInfo("  width=%d\n", SCREEN_WIDTH);
			if (SCREEN_HEIGHT != -1) LogInfo("  height=%d\n", SCREEN_HEIGHT);
			if (SCREEN_REFRESH != -1) LogInfo("  refresh_rate=%d\n", SCREEN_REFRESH);
			if (FILTER_REFRESH[0]) LogInfoW(L"  filter_refresh_rate=%s\n", FILTER_REFRESH[0]);
			if (SCREEN_FULLSCREEN) LogInfo("  full_screen=1\n");

			if (SCREEN_ALLOW_COMMANDS) LogInfo("  allow_windowcommands=1\n");
		}
	}

	LogInfo(" *** DXGI DLL successfully initialized. *** \n\n");
}
Ejemplo n.º 30
0
MediaConduitErrorCode
WebrtcAudioConduit::GetAudioFrame(int16_t speechData[],
                                   int32_t samplingFreqHz,
                                   int32_t capture_delay,
                                   int& lengthSamples)
{

  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
  unsigned int numSamples = 0;

  //validate params
  if(!speechData )
  {
    CSFLogError(logTag,"%s Null Audio Buffer Pointer", __FUNCTION__);
    MOZ_ASSERT(PR_FALSE);
    return kMediaConduitMalformedArgument;
  }

  // Validate sample length
  if((numSamples = GetNum10msSamplesForFrequency(samplingFreqHz)) == 0  )
  {
    CSFLogError(logTag,"%s Invalid Sampling Frequency ", __FUNCTION__);
    MOZ_ASSERT(PR_FALSE);
    return kMediaConduitMalformedArgument;
  }

  //validate capture time
  if(capture_delay < 0 )
  {
    CSFLogError(logTag,"%s Invalid Capture Delay ", __FUNCTION__);
    MOZ_ASSERT(PR_FALSE);
    return kMediaConduitMalformedArgument;
  }

  //Conduit should have reception enabled before we ask for decoded
  // samples
  if(!mEngineReceiving)
  {
    CSFLogError(logTag, "%s Engine not Receiving ", __FUNCTION__);
    return kMediaConduitSessionNotInited;
  }


  lengthSamples = 0;  //output paramter

  if(mPtrVoEXmedia->ExternalPlayoutGetData( speechData,
                                            samplingFreqHz,
                                            capture_delay,
                                            lengthSamples) == -1)
  {
    int error = mPtrVoEBase->LastError();
    CSFLogError(logTag,  "%s Getting audio data Failed %d", __FUNCTION__, error);
    if(error == VE_RUNTIME_PLAY_ERROR)
    {
      return kMediaConduitPlayoutError;
    }
    return kMediaConduitUnknownError;
  }

  // Not #ifdef DEBUG or on a log module so we can use it for about:webrtc/etc
  mSamples += lengthSamples;
  if (mSamples >= mLastSyncLog + samplingFreqHz) {
    int jitter_buffer_delay_ms;
    int playout_buffer_delay_ms;
    int avsync_offset_ms;
    if (GetAVStats(&jitter_buffer_delay_ms,
                   &playout_buffer_delay_ms,
                   &avsync_offset_ms)) {
#ifdef MOZILLA_INTERNAL_API
      if (avsync_offset_ms < 0) {
        Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_VIDEO_LAGS_AUDIO_MS,
                              -avsync_offset_ms);
      } else {
        Telemetry::Accumulate(Telemetry::WEBRTC_AVSYNC_WHEN_AUDIO_LAGS_VIDEO_MS,
                              avsync_offset_ms);
      }
#endif
      CSFLogError(logTag,
                  "A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
                  avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
    } else {
      CSFLogError(logTag, "A/V sync: GetAVStats failed");
    }
    mLastSyncLog = mSamples;
  }

#ifdef MOZILLA_INTERNAL_API
  if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
    if (mProcessing.Length() > 0) {
      unsigned int now;
      mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
      if (static_cast<uint32_t>(now) != mLastTimestamp) {
        mLastTimestamp = static_cast<uint32_t>(now);
        // Find the block that includes this timestamp in the network input
        while (mProcessing.Length() > 0) {
          // FIX! assumes 20ms @ 48000Hz
          // FIX handle wrap-around
          if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) {
            TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp;
            // Wrap-around?
            int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000);
            LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
            break;
          }
          mProcessing.RemoveElementAt(0);
        }
      }
    }
  }
#endif
  CSFLogDebug(logTag,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
                                                               lengthSamples);
  return kMediaConduitNoError;
}