void onFinishLoading(Awesomium::WebView* caller) { std::cout << "Finished loading the page!" << std::endl; myfile << "ONFinished\n "; unsigned char* buffer = new unsigned char[texWidth * texHeight* 4]; m_buffer = new float[texWidth * texHeight* 4]; webView->render(buffer, texWidth * 4, 4); saveImageTGA(".\\output\\result.tga", buffer, texWidth, texHeight); convertBuffer(buffer,m_buffer); delete buffer; myfile << "Size of buffer after delete: " << sizeof(buffer); std::cout << "Saved a render of the page to 'result.tga'." << std::endl; //system("pause"); }
pair<size_t, size_t> ConverterImplR8brain::convertImpl( const Buffer *sourceBuffer, Buffer *destBuffer, int readCount ) { convertBuffer( sourceBuffer, &mBufferd ); int outCount = 0; for( size_t ch = 0; ch < mBufferd.getNumChannels(); ch++ ) { double *out = nullptr; outCount = mResamplers[ch]->process( mBufferd.getChannel( ch ), readCount, out ); dsp::convert( out, destBuffer->getChannel( ch ), (size_t)outCount ); } return make_pair( readCount, (size_t)outCount ); }
bool audio::orchestra::api::Core::callbackEvent(AudioDeviceID _deviceId, const AudioBufferList *_inBufferList, const audio::Time& _inTime, const AudioBufferList *_outBufferList, const audio::Time& _outTime) { if ( m_state == audio::orchestra::state::stopped || m_state == audio::orchestra::state::stopping) { return true; } if (m_state == audio::orchestra::state::closed) { ATA_ERROR("the stream is closed ... this shouldn't happen!"); return false; } // Check if we were draining the stream and signal is finished. if (m_private->drainCounter > 3) { m_state = audio::orchestra::state::stopping; ATA_VERBOSE("Set state as stopping"); if (m_private->internalDrain == true) { new std::thread(&audio::orchestra::api::Core::coreStopStream, this); } else { // external call to stopStream() m_private->condition.notify_one(); } return true; } AudioDeviceID outputDevice = m_private->id[0]; // Invoke user callback to get fresh output data UNLESS we are // draining stream or duplex mode AND the input/output devices are // different AND this function is called for the input device. if (m_private->drainCounter == 0 && (m_mode != audio::orchestra::mode_duplex || _deviceId == outputDevice)) { std::vector<enum audio::orchestra::status> status; if ( m_mode != audio::orchestra::mode_input && m_private->xrun[0] == true) { status.push_back(audio::orchestra::status::underflow); m_private->xrun[0] = false; } if ( m_mode != audio::orchestra::mode_output && m_private->xrun[1] == true) { status.push_back(audio::orchestra::status::overflow); m_private->xrun[1] = false; } int32_t cbReturnValue = m_callback(&m_userBuffer[1][0], _inTime, &m_userBuffer[0][0], _outTime, m_bufferSize, status); if (cbReturnValue == 2) { m_state = audio::orchestra::state::stopping; ATA_VERBOSE("Set state as stopping"); m_private->drainCounter = 2; abortStream(); return true; } else if (cbReturnValue == 1) { m_private->drainCounter = 1; m_private->internalDrain = true; } } if ( m_mode == audio::orchestra::mode_output || ( m_mode == audio::orchestra::mode_duplex && _deviceId == outputDevice)) { if (m_private->drainCounter > 1) { // write zeros to the output stream if (m_private->nStreams[0] == 1) { memset(_outBufferList->mBuffers[m_private->iStream[0]].mData, 0, _outBufferList->mBuffers[m_private->iStream[0]].mDataByteSize); } else { // fill multiple streams with zeros for (uint32_t i=0; i<m_private->nStreams[0]; i++) { memset(_outBufferList->mBuffers[m_private->iStream[0]+i].mData, 0, _outBufferList->mBuffers[m_private->iStream[0]+i].mDataByteSize); } } } else if (m_private->nStreams[0] == 1) { if (m_doConvertBuffer[0]) { // convert directly to CoreAudio stream buffer convertBuffer((char*)_outBufferList->mBuffers[m_private->iStream[0]].mData, &m_userBuffer[0][0], m_convertInfo[0]); } else { // copy from user buffer memcpy(_outBufferList->mBuffers[m_private->iStream[0]].mData, &m_userBuffer[0][0], _outBufferList->mBuffers[m_private->iStream[0]].mDataByteSize); } } else { // fill multiple streams float *inBuffer = (float *) &m_userBuffer[0][0]; if (m_doConvertBuffer[0]) { convertBuffer(m_deviceBuffer, &m_userBuffer[0][0], m_convertInfo[0]); inBuffer = (float *) m_deviceBuffer; } if (m_deviceInterleaved[0] == false) { // mono mode uint32_t bufferBytes = _outBufferList->mBuffers[m_private->iStream[0]].mDataByteSize; for (uint32_t i=0; i<m_nUserChannels[0]; i++) { memcpy(_outBufferList->mBuffers[m_private->iStream[0]+i].mData, (void *)&inBuffer[i*m_bufferSize], bufferBytes); } } else { // fill multiple multi-channel streams with interleaved data uint32_t streamChannels, channelsLeft, inJump, outJump, inOffset; float *out, *in; bool inInterleaved = true; uint32_t inChannels = m_nUserChannels[0]; if (m_doConvertBuffer[0]) { inInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode inChannels = m_nDeviceChannels[0]; } if (inInterleaved) { inOffset = 1; } else { inOffset = m_bufferSize; } channelsLeft = inChannels; for (uint32_t i=0; i<m_private->nStreams[0]; i++) { in = inBuffer; out = (float *) _outBufferList->mBuffers[m_private->iStream[0]+i].mData; streamChannels = _outBufferList->mBuffers[m_private->iStream[0]+i].mNumberChannels; outJump = 0; // Account for possible channel offset in first stream if (i == 0 && m_channelOffset[0] > 0) { streamChannels -= m_channelOffset[0]; outJump = m_channelOffset[0]; out += outJump; } // Account for possible unfilled channels at end of the last stream if (streamChannels > channelsLeft) { outJump = streamChannels - channelsLeft; streamChannels = channelsLeft; } // Determine input buffer offsets and skips if (inInterleaved) { inJump = inChannels; in += inChannels - channelsLeft; } else { inJump = 1; in += (inChannels - channelsLeft) * inOffset; } for (uint32_t i=0; i<m_bufferSize; i++) { for (uint32_t j=0; j<streamChannels; j++) { *out++ = in[j*inOffset]; } out += outJump; in += inJump; } channelsLeft -= streamChannels; } } } if (m_private->drainCounter) { m_private->drainCounter++; goto unlock; } } AudioDeviceID inputDevice; inputDevice = m_private->id[1]; if ( m_mode == audio::orchestra::mode_input || ( m_mode == audio::orchestra::mode_duplex && _deviceId == inputDevice)) { if (m_private->nStreams[1] == 1) { if (m_doConvertBuffer[1]) { // convert directly from CoreAudio stream buffer convertBuffer(&m_userBuffer[1][0], (char *) _inBufferList->mBuffers[m_private->iStream[1]].mData, m_convertInfo[1]); } else { // copy to user buffer memcpy(&m_userBuffer[1][0], _inBufferList->mBuffers[m_private->iStream[1]].mData, _inBufferList->mBuffers[m_private->iStream[1]].mDataByteSize); } } else { // read from multiple streams float *outBuffer = (float *) &m_userBuffer[1][0]; if (m_doConvertBuffer[1]) { outBuffer = (float *) m_deviceBuffer; } if (m_deviceInterleaved[1] == false) { // mono mode uint32_t bufferBytes = _inBufferList->mBuffers[m_private->iStream[1]].mDataByteSize; for (uint32_t i=0; i<m_nUserChannels[1]; i++) { memcpy((void *)&outBuffer[i*m_bufferSize], _inBufferList->mBuffers[m_private->iStream[1]+i].mData, bufferBytes); } } else { // read from multiple multi-channel streams uint32_t streamChannels, channelsLeft, inJump, outJump, outOffset; float *out, *in; bool outInterleaved = true; uint32_t outChannels = m_nUserChannels[1]; if (m_doConvertBuffer[1]) { outInterleaved = true; // device buffer will always be interleaved for nStreams > 1 and not mono mode outChannels = m_nDeviceChannels[1]; } if (outInterleaved) { outOffset = 1; } else { outOffset = m_bufferSize; } channelsLeft = outChannels; for (uint32_t i=0; i<m_private->nStreams[1]; i++) { out = outBuffer; in = (float *) _inBufferList->mBuffers[m_private->iStream[1]+i].mData; streamChannels = _inBufferList->mBuffers[m_private->iStream[1]+i].mNumberChannels; inJump = 0; // Account for possible channel offset in first stream if (i == 0 && m_channelOffset[1] > 0) { streamChannels -= m_channelOffset[1]; inJump = m_channelOffset[1]; in += inJump; } // Account for possible unread channels at end of the last stream if (streamChannels > channelsLeft) { inJump = streamChannels - channelsLeft; streamChannels = channelsLeft; } // Determine output buffer offsets and skips if (outInterleaved) { outJump = outChannels; out += outChannels - channelsLeft; } else { outJump = 1; out += (outChannels - channelsLeft) * outOffset; } for (uint32_t i=0; i<m_bufferSize; i++) { for (uint32_t j=0; j<streamChannels; j++) { out[j*outOffset] = *in++; } out += outJump; in += inJump; } channelsLeft -= streamChannels; } } if (m_doConvertBuffer[1]) { // convert from our internal "device" buffer convertBuffer(&m_userBuffer[1][0], m_deviceBuffer, m_convertInfo[1]); } } } unlock: //m_mutex.unlock(); audio::orchestra::Api::tickStreamTime(); return true; }
int main(void) { uint8_t byteBuffer[14+6]; int data[7+3]; initUSART(); i2c_init(); // init I2C interface // clear screen transmitByte(0x1b); printString("[2J"); //search_i2c(); /* configure NineAxis registers*/ if (init_nineAxis() & init_nineAxisMag()) { char b; while ((b = receiveByte()) != 'x') transmitByte(b); while (1) { read_nineAxis(59,byteBuffer,14); read_NineAxisMag(0x03,byteBuffer+14,6); //swapBuffer(byteBuffer, 14+6); convertBuffer(byteBuffer, data, 7+3); // clear screen transmitByte(0x1b); printString("[2J"); positionCursor(1,8); printString("9-Axis readings"); positionCursor(3,14); printString("X"); positionCursor(3,24); printString("Y"); positionCursor(3,34); printString("Z"); positionCursor(5,4); printString("accel:"); positionCursor(7,5); printString("gyro:"); positionCursor(9,6); printString("mag:"); positionCursor(11,5); printString("temp:"); positionCursor(5,10); printInt(data[0]); positionCursor(5,20); printInt(data[1]); positionCursor(5,30); printInt(data[2]); positionCursor(7,10); printInt(data[4]); positionCursor(7,20); printInt(data[5]); positionCursor(7,30); printInt(data[6]); positionCursor(9,10); printInt(data[7]); positionCursor(9,20); printInt(data[8]); positionCursor(9,30); printInt(data[9]); positionCursor(11,10); float tempc = data[3]/340.0 + 35; float tempf = tempc * 1.8 + 32; printInt((int) tempc); transmitByte(0xc2); transmitByte(0xb0); // degree symbol printString("C ("); printInt((int) tempf); transmitByte(0xc2); transmitByte(0xb0); // degree symbol printString("F)"); _delay_ms(1000); } } return(0); }
void ReadBuffer::readMessage(const unsigned char *message, unsigned int length) { // // To be here we must be the real owner // of the buffer and there must not be // pending bytes in the transport. // #ifdef TEST if (owner_ == 0) { *logofs << "ReadBuffer: PANIC! Class for FD#" << transport_ -> fd() << " doesn't " << "appear to be the owner of the buffer " << "while borrowing from the caller.\n" << logofs_flush; HandleCleanup(); } #endif // // Be sure that any outstanding data from // the transport is appended to our own // byffer. // if (transport_ -> pending() != 0) { #ifdef WARNING *logofs << "ReadBuffer: WARNING! Class for FD#" << transport_ -> fd() << " has pending " << "data in the transport while " << "borrowing from the caller.\n" << logofs_flush; #endif readMessage(); if (owner_ == 0) { convertBuffer(); } } // // Can't borrow the buffer if there is data // from a partial message. In this case add // the new data to the end of our buffer. // if (length_ == 0) { #ifdef TEST *logofs << "ReadBuffer: Borrowing " << length << " bytes from the caller for FD#" << transport_ -> fd() << " with " << length_ << " bytes in the buffer.\n" << logofs_flush; #endif delete [] buffer_; buffer_ = (unsigned char *) message; size_ = length; length_ = length; owner_ = 0; start_ = 0; } else { #ifdef TEST *logofs << "ReadBuffer: Appending " << length << " bytes from the caller for FD#" << transport_ -> fd() << " with " << length_ << " bytes in the buffer.\n" << logofs_flush; #endif appendBuffer(message, length); } }
const unsigned char *ReadBuffer::getMessage(unsigned int &controlLength, unsigned int &dataLength) { #ifdef TEST if (transport_ -> pending() > 0) { *logofs << "ReadBuffer: PANIC! The transport " << "appears to have data pending.\n" << logofs_flush; HandleCleanup(); } #endif if (length_ == 0) { #ifdef DEBUG *logofs << "ReadBuffer: No message can be located " << "for FD#" << transport_ -> fd() << ".\n" << logofs_flush; #endif if (owner_ == 0) { buffer_ = NULL; size_ = 0; transport_ -> pendingReset(); owner_ = 1; start_ = 0; } return NULL; } unsigned int trailerLength; #ifdef DEBUG *logofs << "ReadBuffer: Going to locate message with " << "start at " << start_ << " and length " << length_ << " for FD#" << transport_ -> fd() << ".\n" << logofs_flush; #endif int located = locateMessage(buffer_ + start_, buffer_ + start_ + length_, controlLength, dataLength, trailerLength); if (located == 0) { // // No more complete messages are in // the buffer. // #ifdef DEBUG *logofs << "ReadBuffer: No message was located " << "for FD#" << transport_ -> fd() << ".\n" << logofs_flush; #endif if (owner_ == 0) { // // Must move the remaining bytes in // our own buffer. // convertBuffer(); } return NULL; } else { const unsigned char *result = buffer_ + start_; if (dataLength > 0) { // // Message contains data, so go to the // first byte of payload. // result += trailerLength; start_ += (dataLength + trailerLength); length_ -= (dataLength + trailerLength); } else { // // It is a control message. // start_ += (controlLength + trailerLength); length_ -= (controlLength + trailerLength); } #ifdef DEBUG *logofs << "ReadBuffer: Located message for FD#" << transport_ -> fd() << " with control length " << controlLength << " and data length " << dataLength << ".\n" << logofs_flush; #endif remaining_ = 0; return result; } }