示例#1
0
void EdoGaduLog::run()
{
	if (!checkSchema())
	{
		Logger::writeToLog (T("EdoGaduLog::run schema is invalid, can't log"));
		return;
	}
	else
	{
		Logger::writeToLog (T("EdoGaduLog::run schema created or is valid"));
	}

	while (1)
	{
		wait (1000);

		if (threadShouldExit())
		{
			flushBuffers();
			return;
		}

		flushBuffers();
	}
}
void Node::run() {
	//Time to wait for the network connection to be setup
	//May be reduced
	waitUntil(NOW()+5000*MILLISECONDS);
	//Detects the nodes ID
	nodeID = getNodeNumber();

	PRINTF("This nodes ID: %i\n", nodeID);

	//Used to count the times a worker could not be detected
	short failcounter = 0;

	//Used for reading from the workerAlive topic
	long workerTopicID;

	//true when this node is the worker, false when the monitor
	//Each node starts as a monitor.
	bool worker = false;

	//Endless loop. Components should never stop working
	while(true) {
		PRINTF("Being monitor now!\n");

		//Monitoring loop
		while(!worker) {
			//If there was no topic from the worker within the specified time...
			if(workerAlive.waitAndGet(workerTopicID,workerTimeOut) == -1) {
				//... then increase failcounter
				++failcounter;
				//If we couldn't detect a worker multiple times we become a worker ourself
				if(failcounter > 2) worker = true;
			}else {
				//Worker topic found, resetting the counter
				failcounter = 0;
			}
		}

		//During role changes we flush the buffers
		flushBuffers();

		PRINTF("Being the worker now!\n");
		//Working loop
		while(worker)
		{
			//Publish worker topic //"I'm alive"
			workerAlive.publish(nodeID);
			//Work
			work();

			//Handle all messages
			worker = wHandleMessages();
		}

		//During role changes we flush the buffers
		flushBuffers();
	}
}
示例#3
0
void EJCanvasContext::pushQuad(EJVector2 v1, EJVector2 v2, EJVector2 v3, EJVector2 v4, EJVector2 t1, EJVector2 t2, EJVector2 t3, EJVector2 t4, EJColorRGBA color, CGAffineTransform transform)
{
	if( vertexBufferIndex >= EJ_CANVAS_VERTEX_BUFFER_SIZE - 6 ) {
		flushBuffers();
	}
	
	if( !CGAffineTransformIsIdentity(transform) ) {
		v1 = EJVector2ApplyTransform( v1, transform );
		v2 = EJVector2ApplyTransform( v2, transform );
		v3 = EJVector2ApplyTransform( v3, transform );
		v4 = EJVector2ApplyTransform( v4, transform );
	}
	
	EJVertex * vb = &CanvasVertexBuffer[vertexBufferIndex];

	EJVertex vb_0 = { v1, t1, color };
	EJVertex vb_1 = { v2, t2, color };
	EJVertex vb_2 = { v3, t3, color };
	EJVertex vb_3 = { v2, t2, color };
	EJVertex vb_4 = { v3, t3, color };
	EJVertex vb_5 = { v4, t4, color };

	vb[0] = vb_0;
	vb[1] = vb_1;
	vb[2] = vb_2;
	vb[3] = vb_3;
	vb[4] = vb_4;
	vb[5] = vb_5;
	
	vertexBufferIndex += 6;
}
示例#4
0
void EJCanvasContext::pushTri(float x1, float y1, float x2, float y2, float x3, float y3, EJColorRGBA color, CGAffineTransform transform)
{
	if( vertexBufferIndex >= EJ_CANVAS_VERTEX_BUFFER_SIZE - 3 ) {
		flushBuffers();
	}
	
	EJVector2 d1 = { x1, y1 };
	EJVector2 d2 = { x2, y2 };
	EJVector2 d3 = { x3, y3 };
	
	if( !CGAffineTransformIsIdentity(transform) ) {
		d1 = EJVector2ApplyTransform( d1, transform );
		d2 = EJVector2ApplyTransform( d2, transform );
		d3 = EJVector2ApplyTransform( d3, transform );
	}
	
	EJVertex * vb = &CanvasVertexBuffer[vertexBufferIndex];

	EJVertex vb_0 = {d1, {0.5, 1}, color};
	EJVertex vb_1 = { d2, {0.5, 0.5}, color };
	EJVertex vb_2 = { d3, {0.5, 1}, color };

	vb[0] = vb_0;
	vb[1] = vb_1;
	vb[2] = vb_2;
	
	vertexBufferIndex += 3;
}
示例#5
0
void EJCanvasContext::clip()
{
	flushBuffers();
	state->clipPath->release();
	state->clipPath = NULL;
	
	state->clipPath = (EJPath*)(path->copy());
	state->clipPath->drawPolygonsToContext(this, kEJPathPolygonTargetDepth);
}
示例#6
0
EJImageData* EJCanvasContext::getImageData(float sx, float sy, float sw, float sh)
{
	flushBuffers();
	GLubyte * pixels = (GLubyte*)malloc( (size_t)sw * (size_t)sh * 4 * sizeof(GLubyte));
	glReadPixels((GLint)sx, (GLint)sy, (GLsizei)sw, (GLsizei)sh, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
	EJImageData* imageData = new EJImageData((int)sw, (int)sh, pixels);
	imageData->autorelease();
	return imageData;
}
示例#7
0
 Stream::~Stream()
 {
     disconnect();
     flushBuffers();
     
     if (m_formatCtx && m_stream && m_stream->codec)
     {
         avcodec_close(m_stream->codec);
     }
 }
//==============================================================================
void BufferedOutputStream::close()
{
	if(m_rpOutputStream)
	{
		flushBuffers();
		freeBuffers();
		m_rpOutputStream->close();
		m_rpOutputStream.release();
	}
}
示例#9
0
	void GraphDistributor::close()
	{
		if (!isOpen) {
			log << "\nClosing not openned distributor!\n";
			return;
		}

		isOpen = false;
		flushBuffers();
		waitForOthersToEnd();
	}
示例#10
0
void EJCanvasContext::resetClip()
{
	if( state->clipPath ) {
		flushBuffers();
		state->clipPath->release();
		state->clipPath = NULL;
		
		glDepthMask(GL_TRUE);
		glClear(GL_DEPTH_BUFFER_BIT);
		glDepthMask(GL_FALSE);
		glDepthFunc(GL_ALWAYS);
	}
}
示例#11
0
void EJCanvasContext::putImageData(EJImageData* imageData, float dx, float dy)
{
	EJTexture * texture = imageData->m_texture;
	setTexture(texture);
	
	short tw = texture->realWidth;
	short th = texture->realHeight;
	
	static EJColorRGBA white = {0xffffffff};
	
	pushRect(dx, dy, tw, th, 0, 0, 1, 1, white, CGAffineTransformIdentity);
	flushBuffers();

}
示例#12
0
 Demuxer::~Demuxer()
 {
     if (m_timer->getStatus() != Stopped)
         m_timer->stop();
     
     m_timer->removeObserver(*this);
     
     if (m_formatCtx)
     {
         // Be very careful with this call: it'll also destroy its codec contexts and streams
         avformat_close_input(&m_formatCtx);
     }
     
     flushBuffers();
 }
void ResamplingAudioSource::prepareToPlay (int samplesPerBlockExpected, double sampleRate)
{
    const SpinLock::ScopedLockType sl (ratioLock);

    const int scaledBlockSize = roundToInt (samplesPerBlockExpected * ratio);
    input->prepareToPlay (scaledBlockSize, sampleRate * ratio);

    buffer.setSize (numChannels, scaledBlockSize + 32);

    filterStates.calloc ((size_t) numChannels);
    srcBuffers.calloc ((size_t) numChannels);
    destBuffers.calloc ((size_t) numChannels);
    createLowPass (ratio);

    flushBuffers();
}
示例#14
0
void EJCanvasContext::setTexture(EJTexture * newTexture)
{
	if( currentTexture == newTexture ) { return; }
	
	flushBuffers();
		
	if( !newTexture && currentTexture ) {
		// Was enabled; should be disabled
		glDisable(GL_TEXTURE_2D);
	}
	else if( newTexture && !currentTexture ) {
		// Was disabled; should be enabled
		glEnable(GL_TEXTURE_2D);
	}
	
	currentTexture = newTexture;
	if(currentTexture)currentTexture->bind();
}
示例#15
0
	void DepthBuilderBuffered::buildNextStep()
	{
		prepareBuffers();
		isNextStepRequired = false;

		//main action
		for (Vertex localVertex = 0; localVertex < graph->numLocalVertex; ++localVertex) {
			if (vertexState[localVertex] == stateJustFilled) {
				//printf("%d: dist vertex, depth [%ld]\n", rank, depth[localVertex]);
				distributeVertexDepth(localVertex);
				vertexState[localVertex] = stateSent;
			}

			probeSynchData();
		}

		flushBuffers();
		waitForOthersToEnd();
		comm->Allreduce(IN_PLACE, &isNextStepRequired, 1, SHORT, LOR);
	}
示例#16
0
void QOpenSLESAudioInput::stopRecording()
{
    flushBuffers();

    (*m_recorder)->SetRecordState(m_recorder, SL_RECORDSTATE_STOPPED);
    (*m_bufferQueue)->Clear(m_bufferQueue);

    (*m_recorderObject)->Destroy(m_recorderObject);
    m_recorderObject = 0;

    for (int i = 0; i < NUM_BUFFERS; ++i)
        m_buffers[i].clear();
    m_currentBuffer = 0;

    if (!m_pullMode && m_bufferIODevice) {
        m_bufferIODevice->close();
        delete m_bufferIODevice;
        m_bufferIODevice = 0;
        m_pushBuffer.clear();
    }
}
示例#17
0
void EJCanvasContext::pushRect(float x, float y, float w, float h, float tx, float ty, float tw, float th, EJColorRGBA color, CGAffineTransform transform)
{

	if( vertexBufferIndex >= EJ_CANVAS_VERTEX_BUFFER_SIZE - 6 ) {
		flushBuffers();
	}
	
	EJVector2 d11 = { x, y };
	EJVector2 d21 = { x+w, y };
	EJVector2 d12 = { x, y+h };
	EJVector2 d22 = { x+w, y+h };
	
	if( !CGAffineTransformIsIdentity(transform) ) {
		d11 = EJVector2ApplyTransform( d11, transform );
		d21 = EJVector2ApplyTransform( d21, transform );
		d12 = EJVector2ApplyTransform( d12, transform );
		d22 = EJVector2ApplyTransform( d22, transform );
	}
	
	EJVertex * vb = &CanvasVertexBuffer[vertexBufferIndex];

	EJVertex vb_0 = { d11, {tx, ty}, color };	// top left
	EJVertex vb_1 = { d21, {tx+tw, ty}, color };	// top right
	EJVertex vb_2 = { d12, {tx, ty+th}, color };	// bottom left

	EJVertex vb_3 = { d21, {tx+tw, ty}, color };	// top right
	EJVertex vb_4 = { d12, {tx, ty+th}, color };	// bottom left
	EJVertex vb_5 = { d22, {tx+tw, ty+th}, color };// bottom right

	vb[0] = vb_0;	// top left
	vb[1] = vb_1;	// top right
	vb[2] = vb_2;	// bottom left
		
	vb[3] = vb_3;	// top right
	vb[4] = vb_4;	// bottom left
	vb[5] = vb_5;// bottom right
	
	vertexBufferIndex += 6;
}
EJImageData* EJCanvasContextScreen::getImageData(float sx, float sy, float sw, float sh)
{
	if(backingStoreRatio != 1 && EJTexture::smoothScaling()) {
		NSLOG(
			"Warning: The screen canvas has been scaled; getImageData() may not work as expected. \n%s",
			"Set imageSmoothingEnabled=false or use an off-screen Canvas for more accurate results."
		);
	}
	
	// [self flushBuffers];
	flushBuffers();
	
	// Read pixels; take care of the upside down screen layout and the backingStoreRatio
	int internalWidth = (int)(sw * backingStoreRatio);
	int internalHeight = (int)(sh * backingStoreRatio);
	int internalX = (int)(sx * backingStoreRatio);
	int internalY = (int)((height-sy-sh) * backingStoreRatio);
	
	EJColorRGBA * internalPixels = (EJColorRGBA*)malloc( internalWidth * internalHeight * sizeof(EJColorRGBA));
	glReadPixels( internalX, internalY, internalWidth, internalHeight, GL_RGBA, GL_UNSIGNED_BYTE, internalPixels );

	GLubyte * pixels = (GLubyte*)malloc( (size_t)sw * (size_t)sh * sizeof(GLubyte) * 4);
	int index = 0;
	for( int y = 0; y < sh; y++ ) {
		for( int x = 0; x < sw; x++ ) {
			int internalIndex = (int)((sh-y-1) * backingStoreRatio) * internalWidth + (int)(x * backingStoreRatio);
			pixels[ index *4 + 0 ] = (GLubyte)internalPixels[ internalIndex ].rgba.r;
			pixels[ index *4 + 1 ] = (GLubyte)internalPixels[ internalIndex ].rgba.g;
			pixels[ index *4 + 2 ] = (GLubyte)internalPixels[ internalIndex ].rgba.b;
			pixels[ index *4 + 3 ] = (GLubyte)internalPixels[ internalIndex ].rgba.a;
			index++;
		}
	}
	free(internalPixels);
	
	EJImageData* m_EJImageDate = new EJImageData(sw, sh, pixels);
	m_EJImageDate->autorelease();
	return m_EJImageDate;
}
示例#19
0
 bool Demuxer::didSeek(const Timer &timer, sf::Time oldPosition)
 {
     resetEndOfFileStatus();
     sf::Time newPosition = timer.getOffset();
     std::set< std::shared_ptr<Stream> > connectedStreams;
     
     if (m_connectedVideoStream)
         connectedStreams.insert(m_connectedVideoStream);
     if (m_connectedAudioStream)
         connectedStreams.insert(m_connectedAudioStream);
     if (m_connectedSubtitleStream)
         connectedStreams.insert(m_connectedSubtitleStream);
     
     CHECK(!connectedStreams.empty(), "Inconcistency error: seeking with no active stream");
     
     // Trivial seeking to beginning
     if (newPosition == sf::Time::Zero)
     {
         int64_t timestamp = 0;
         
         if (m_formatCtx->iformat->flags & AVFMT_SEEK_TO_PTS && m_formatCtx->start_time != AV_NOPTS_VALUE)
             timestamp += m_formatCtx->start_time;
         
         
         // Flush all streams
         for (std::shared_ptr<Stream> stream : connectedStreams)
             stream->flushBuffers();
         flushBuffers();
         
         // Seek to beginning
         int err = avformat_seek_file(m_formatCtx, -1, INT64_MIN, timestamp, INT64_MAX, AVSEEK_FLAG_BACKWARD);
         if (err < 0)
         {
             sfeLogError("Error while seeking at time " + s(newPosition.asMilliseconds()) + "ms");
             return false;
         }
     }
     else // Seeking to some other position
     {
         // Initial target seek point
         int64_t timestamp = newPosition.asSeconds() * AV_TIME_BASE;
         
         // < 0 = before seek point
         // > 0 = after seek point
         std::map< std::shared_ptr<Stream>, sf::Time> seekingGaps;
         
         static const float brokenSeekingThreshold = 60.f; // seconds
         bool didReseekBackward = false;
         bool didReseekForward = false;
         int tooEarlyCount = 0;
         int tooLateCount = 0;
         int brokenSeekingCount = 0;
         int ffmpegSeekFlags = AVSEEK_FLAG_BACKWARD;
         
         do
         {
             // Flush all streams
             for (std::shared_ptr<Stream> stream : connectedStreams)
                 stream->flushBuffers();
             flushBuffers();
             
             // Seek to new estimated target
             if (m_formatCtx->iformat->flags & AVFMT_SEEK_TO_PTS && m_formatCtx->start_time != AV_NOPTS_VALUE)
                 timestamp += m_formatCtx->start_time;
             
             int err = avformat_seek_file(m_formatCtx, -1, timestamp - 10 * AV_TIME_BASE,
                                          timestamp, timestamp, ffmpegSeekFlags);
             CHECK0(err, "avformat_seek_file failure");
             
             // Compute the new gap
             for (std::shared_ptr<Stream> stream : connectedStreams)
             {
                 sf::Time gap = stream->computeEncodedPosition() - newPosition;
                 seekingGaps[stream] = gap;
             }
             
             tooEarlyCount = 0;
             tooLateCount = 0;
             brokenSeekingCount = 0;
             
             // Check the current situation
             for (std::pair< std::shared_ptr<Stream>, sf::Time>&& gapByStream : seekingGaps)
             {
                 // < 0 = before seek point
                 // > 0 = after seek point
                 const sf::Time& gap = gapByStream.second;
                 float absoluteDiff = fabs(gap.asSeconds());
                 
                 // Before seek point
                 if (gap < sf::Time::Zero)
                 {
                     if (absoluteDiff > brokenSeekingThreshold)
                     {
                         brokenSeekingCount++;
                         tooEarlyCount++;
                     }
                 
                     // else: a bit early but not too much, this is the final situation we want
                 }
                 // After seek point
                 else if (gap > sf::Time::Zero)
                 {
                     tooLateCount++;
                 
                     if (absoluteDiff > brokenSeekingThreshold)
                         brokenSeekingCount++; // TODO: unhandled for now => should seek to non-key frame
                 }
                 
                 if (brokenSeekingCount > 0)
                     sfeLogWarning("Seeking on " + gapByStream.first->description() + " is broken! Gap: "
                                   + s(gap.asSeconds()) + "s");
             }
             
             CHECK(false == (tooEarlyCount && tooLateCount),
                   "Both too late and too early for different streams, unhandled situation!");
             
             // Define what to do next
             if (tooEarlyCount)
             {
                 // Go forward by 1 sec
                 timestamp += AV_TIME_BASE;
                 didReseekForward = true;
             }
             else if (tooLateCount)
             {
                 // Go backward by 1 sec
                 timestamp -= AV_TIME_BASE;
                 didReseekBackward = true;
             }
             
             if (brokenSeekingCount)
             {
                 if (ffmpegSeekFlags & AVSEEK_FLAG_ANY)
                 {
                     sfeLogError("Seeking is really broken in the media, giving up");
                     return false;
                 }
                 else
                 {
                     // Try to seek to non-key frame before giving up
                     // Image may be wrong but it's better than nothing :)
                     ffmpegSeekFlags |= AVSEEK_FLAG_ANY;
                     sfeLogError("Media has broken seeking index, trying to seek to non-key frame");
                 }
             }
             
             CHECK(!(didReseekBackward && didReseekForward), "infinitely seeking backward and forward");
         }
         while (tooEarlyCount != 0 || tooLateCount != 0);
     }
     
     return true;
 }
示例#20
0
//==============================================================================
// BufferedOutputStream::flush
//
//==============================================================================
void BufferedOutputStream::flush()
{
	flushBuffers();
	m_rpOutputStream->flush();
}
示例#21
0
void OpenALRenderer::stop(bool drain)
{
    alSourceStop(m_AudioSource);
    flushBuffers();
}