Example #1
0
uint32_t CoreAudioEncoder::encodeChunk(UInt32 npackets)
{
    prepareOutputBuffer(npackets);
    AudioBufferList *abl = m_output_abl.get();
    AudioStreamPacketDescription *aspd = &m_packet_desc[0];

    CHECKCA(AudioConverterFillComplexBuffer(m_converter, staticInputDataProc,
                                            this, &npackets, abl, aspd));

    if (samplesRead() == 0)
        return false;

    if (npackets == 0 && abl->mBuffers[0].mDataByteSize == 0)
        return 0;

    if (!m_requires_packet_desc) {
        writeSamples(abl->mBuffers[0].mData,
                     abl->mBuffers[0].mDataByteSize, npackets);
    } else {
        for (uint32_t i = 0; i < npackets; ++i) {
            if (aspd[i].mVariableFramesInPacket) m_variable_packet_len = true;
            uint32_t nsamples =
                m_variable_packet_len ? aspd[i].mVariableFramesInPacket
                                      : m_output_desc.mFramesPerPacket;
            if (nsamples) {
                uint8_t *p = static_cast<uint8_t*>(abl->mBuffers[0].mData);
                writeSamples(p + aspd[i].mStartOffset,
                             aspd[i].mDataByteSize, nsamples);
            }
        }
    }
    return npackets;
}
Example #2
0
/*
 Description: write IQ samples to a given WARP node from a given array

 Arguments:
	samples (double my_complex*) 		- pointer to sample array
	start_sample (int)				- offset to the first sample to write
	num_samples (int)				- number of samples to write (between 1 and 2^15)
	node_sock (int)					- identifier of the node socket
	node_id (int)					- identifier of the node
	buffer_id (int)					- identifier of the buffer
	host_id (int)					- identifier of the host
*/
void writeIQ(double my_complex* samples, int start_sample, int num_samples, int node_sock, int node_id, int buffer_id, int host_id){

	// assert(initialized==1);

	int node_port = 9000 + node_id; // source port at host for node
	int max_length =  8928;//1438, 8938 1422, 8928; // number of bytes available for IQ samples after all headers
	int num_pkts = (int)(num_samples*4/max_length) + 1;
	int max_samples = 2232; //366 2232

	char writeIQ_buffer[22] =  {0, 0, 0, node_id, 0, host_id, 0, 1, 0, 8, 0, 9, 0, 0, 48, 0, 0, 7, 0, 0, 0, 0};

	uint16* sample_I_buffer = (uint16* ) malloc(num_samples*sizeof(uint16));
	uint16* sample_Q_buffer = (uint16* ) malloc(num_samples*sizeof(uint16));

	bzero(sample_I_buffer, num_samples*sizeof(uint16));
	bzero(sample_Q_buffer, num_samples*sizeof(uint16));

	int index;

	for (index = 0; index < num_samples; index++){
	  sample_I_buffer[index] = (uint16) pow(2,15)*creal(samples[index]); //UFix_16_15
	  sample_Q_buffer[index] = (uint16) pow(2,15)*cimag(samples[index]);
	}

	char base_ip_addr[20];
	char str[15];
	sprintf(str, "%d", node_id+1);

	strcpy(base_ip_addr, "10.0.0.");
	strcat(base_ip_addr, str);

	writeSamples(node_sock, writeIQ_buffer, 8962, (char*) base_ip_addr, node_port, num_samples, sample_I_buffer, sample_Q_buffer, (uint32) buffer_id, start_sample, num_pkts, max_samples, TRANSPORT_WARP_HW_v3);

	free(sample_I_buffer);
	free(sample_Q_buffer);
}
Example #3
0
bool
DumpGui::run()
{
    if ( _fileOutputFPS ) {
        _fileOutputAdvance = static_cast<int>(1000/_fileOutputFPS);
    } else {
        _fileOutputAdvance = _interval;
        _fileOutputFPS = static_cast<int>(1000/_fileOutputAdvance);
    }

    log_debug("DumpGui entering main loop with interval of %d ms", _interval);

    // heart-beat interval, in milliseconds
    // TODO: extract this value from the swf's FPS
    //       by default and allow overriding it
    //
    unsigned int clockAdvance = _interval;

    const bool doDisplay = _fileStream.is_open();

    terminate_request = false;

    _startTime = _clock.elapsed();

    while (!terminate_request) {

        _clock.advance(clockAdvance); 

        // advance movie now
        advanceMovie(doDisplay);

        if (_started) {

            writeSamples();

            // Dump a video frame if it's time for it or no frame
            // was dumped yet
            size_t elapsed = _clock.elapsed();
            if (!_framecount || 
                    (elapsed - _lastVideoFrameDump) >= _fileOutputAdvance) {
                writeFrame();
            }

            // check if we've reached a timeout
            if (_timeout && _clock.elapsed() >= _timeout) {
                break;
            }
        }

        if (_sleepUS) gnashSleep(_sleepUS);

        if (!_started && !_startTrigger.empty()) {

            // Check whether to start
            std::string path;
            std::string var;
            if (parsePath(_startTrigger, path, var)) {
                movie_root& mr = *getStage();
                const as_environment& env = mr.getRootMovie().get_environment();
                as_object* o = findObject(env, path);
                if (o) {
                    as_value val;
                    o->get_member(getURI(mr.getVM(), "_ready"), &val);
                    if (val.equals(true, 8)) {
                        log_debug("Starting dump");
                        _started = true;
                        _startTime = _clock.elapsed();
                        _lastVideoFrameDump = _startTime;
                    }
                }
            }
        }
    }

    const boost::uint32_t total_time = _clock.elapsed() - _startTime;

    std::cout << "TIME=" << total_time << std::endl;
    std::cout << "FPS_ACTUAL=" << _fileOutputFPS << std::endl;
    
    // In this Gui, quit() does not exit, but it is necessary to catch the
    // last frame for screenshots.
    quit();
    return true;
}
Example #4
0
bool
DumpGui::run()
{
    if ( _fileOutputFPS ) {
        _fileOutputAdvance = static_cast<int>(1000/_fileOutputFPS);
    } else {
        _fileOutputAdvance = _interval;
        _fileOutputFPS = static_cast<int>(1000/_fileOutputAdvance);
    }
    

    log_debug("DumpGui entering main loop with interval of %d ms", _interval);

    // heart-beat interval, in milliseconds
    // TODO: extract this value from the swf's FPS
    //       by default and allow overriding it
    //
    unsigned int clockAdvance = _interval;

    VirtualClock& timer = getClock();

    const bool doDisplay = _fileStream.is_open();

    _terminate_request = false;
    while (!_terminate_request) {

        _manualClock.advance(clockAdvance); 

        // advance movie now
        advanceMovie(doDisplay);

        writeSamples();

        // Dump a video frame if it's time for it or no frame
        // was dumped yet
        size_t elapsed = timer.elapsed();
        if ( ! _framecount ||
                elapsed - _lastVideoFrameDump >= _fileOutputAdvance )
        {
            writeFrame();
        }

        // check if we've reached a timeout
        if (_timeout && timer.elapsed() >= _timeout ) {
            break;
        }

        if ( _sleepUS ) gnashSleep(_sleepUS);

    }

    boost::uint32_t total_time = timer.elapsed();

    std::cout << "TIME=" << total_time << std::endl;
    std::cout << "FPS_ACTUAL=" << _fileOutputFPS << std::endl;
    
    // In this Gui, quit() does not exit, but it is necessary to catch the
    // last frame for screenshots.
    quit();
    return true;
}
Example #5
0
bool CDMRDMORX::processSample(q15_t sample, uint16_t rssi)
{
  m_buffer[m_dataPtr] = sample;
  m_rssi[m_dataPtr] = rssi;

  m_bitBuffer[m_bitPtr] <<= 1;
  if (sample < 0)
    m_bitBuffer[m_bitPtr] |= 0x01U;

  if (m_state == DMORXS_NONE) {
    correlateSync(true);
  } else {

    uint16_t min  = m_syncPtr + DMO_BUFFER_LENGTH_SAMPLES - 1U;
    uint16_t max  = m_syncPtr + 1U;

    if (min >= DMO_BUFFER_LENGTH_SAMPLES)
      min -= DMO_BUFFER_LENGTH_SAMPLES;
    if (max >= DMO_BUFFER_LENGTH_SAMPLES)
      max -= DMO_BUFFER_LENGTH_SAMPLES;

    if (min < max) {
      if (m_dataPtr >= min && m_dataPtr <= max)
        correlateSync(false);
    } else {
      if (m_dataPtr >= min || m_dataPtr <= max)
        correlateSync(false);
    }
  }

  if (m_dataPtr == m_endPtr) {
    // Find the average centre and threshold values
    q15_t centre    = (m_centre[0U]    + m_centre[1U]    + m_centre[2U]    + m_centre[3U])    >> 2;
    q15_t threshold = (m_threshold[0U] + m_threshold[1U] + m_threshold[2U] + m_threshold[3U]) >> 2;

    uint8_t frame[DMR_FRAME_LENGTH_BYTES + 3U];
    frame[0U] = m_control;

    uint16_t ptr = m_endPtr + DMO_BUFFER_LENGTH_SAMPLES - DMR_FRAME_LENGTH_SAMPLES + DMR_RADIO_SYMBOL_LENGTH + 1U;
    if (ptr >= DMO_BUFFER_LENGTH_SAMPLES)
      ptr -= DMO_BUFFER_LENGTH_SAMPLES;

    samplesToBits(ptr, DMR_FRAME_LENGTH_SYMBOLS, frame, 8U, centre, threshold);

    if (m_control == CONTROL_DATA) {
      // Data sync
      uint8_t colorCode;
      uint8_t dataType;
      CDMRSlotType slotType;
      slotType.decode(frame + 1U, colorCode, dataType);

      if (colorCode == m_colorCode) {
        m_syncCount = 0U;
        m_n         = 0U;

        frame[0U] |= dataType;

        switch (dataType) {
          case DT_DATA_HEADER:
            DEBUG4("DMRDMORX: data header found pos/centre/threshold", m_syncPtr, centre, threshold);
            writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
            writeSamples(ptr, frame[0U]);
#endif
            m_state = DMORXS_DATA;
            m_type  = 0x00U;
            break;
          case DT_RATE_12_DATA:
          case DT_RATE_34_DATA:
          case DT_RATE_1_DATA:
            if (m_state == DMORXS_DATA) {
              DEBUG4("DMRDMORX: data payload found pos/centre/threshold", m_syncPtr, centre, threshold);
              writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
              writeSamples(ptr, frame[0U]);
#endif
              m_type = dataType;
            }
            break;
          case DT_VOICE_LC_HEADER:
            DEBUG4("DMRDMORX: voice header found pos/centre/threshold", m_syncPtr, centre, threshold);
            writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
            writeSamples(ptr, frame[0U]);
#endif
            m_state = DMORXS_VOICE;
            break;
          case DT_VOICE_PI_HEADER:
            if (m_state == DMORXS_VOICE) {
              DEBUG4("DMRDMORX: voice pi header found pos/centre/threshold", m_syncPtr, centre, threshold);
              writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
              writeSamples(ptr, frame[0U]);
#endif
            }
            m_state = DMORXS_VOICE;
            break;
          case DT_TERMINATOR_WITH_LC:
            if (m_state == DMORXS_VOICE) {
              DEBUG4("DMRDMORX: voice terminator found pos/centre/threshold", m_syncPtr, centre, threshold);
              writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
              writeSamples(ptr, frame[0U]);
#endif
              reset();
            }
            break;
          default:    // DT_CSBK
            DEBUG4("DMRDMORX: csbk found pos/centre/threshold", m_syncPtr, centre, threshold);
            writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
            writeSamples(ptr, frame[0U]);
#endif
            reset();
            break;
        }
      }
    } else if (m_control == CONTROL_VOICE) {
      // Voice sync
      DEBUG4("DMRDMORX: voice sync found pos/centre/threshold", m_syncPtr, centre, threshold);
	    writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
      writeSamples(ptr, frame[0U]);
#endif
      m_state     = DMORXS_VOICE;
      m_syncCount = 0U;
      m_n         = 0U;
    } else {
      if (m_state != DMORXS_NONE) {
        m_syncCount++;
        if (m_syncCount >= MAX_SYNC_LOST_FRAMES) {
          serial.writeDMRLost(true);
          reset();
        }
      }

      if (m_state == DMORXS_VOICE) {
        if (m_n >= 5U) {
          frame[0U] = CONTROL_VOICE;
          m_n = 0U;
        } else {
          frame[0U] = ++m_n;
        }

        serial.writeDMRData(true, frame, DMR_FRAME_LENGTH_BYTES + 1U);
#if defined(DUMP_SAMPLES)
        writeSamples(ptr, frame[0U]);
#endif
      } else if (m_state == DMORXS_DATA) {
        if (m_type != 0x00U) {
          frame[0U] = CONTROL_DATA | m_type;
          writeRSSIData(frame);
#if defined(DUMP_SAMPLES)
          writeSamples(ptr, frame[0U]);
#endif
        }
      }
    }

    // End of this slot, reset some items for the next slot.
    m_maxCorr = 0;
    m_control = CONTROL_NONE;
  }