Example #1
0
void ThreadPool::job_worker(ThreadPool* th, uint32_t index)
{
	setTLSSys(th->m_sys);

	ThreadProfile* profile=th->m_sys->allocateProfiler(RGB(200,200,0));
	char buf[16];
	snprintf(buf,16,"Thread %u",index);
	profile->setTag(buf);

	Chronometer chronometer;
	while(1)
	{
		th->num_jobs.wait();
		if(th->stopFlag)
			return;
		Locker l(th->mutex);
		IThreadJob* myJob=th->jobs.front();
		th->jobs.pop_front();
		th->curJobs[index]=myJob;
		l.release();

		chronometer.checkpoint();
		try
		{
			// it's possible that a job was added and will be executed while forcestop() has been called
			if(th->stopFlag)
				return;
			myJob->execute();
		}
		catch(JobTerminationException& ex)
		{
			LOG(LOG_NOT_IMPLEMENTED,"Job terminated");
		}
		catch(LightsparkException& e)
		{
			LOG(LOG_ERROR,_("Exception in ThreadPool ") << e.what());
			th->m_sys->setError(e.cause);
		}
		catch(std::exception& e)
		{
			LOG(LOG_ERROR,"std Exception in ThreadPool:"<<myJob<<" "<<e.what());
			th->m_sys->setError(e.what());
		}
		
		profile->accountTime(chronometer.checkpoint());

		l.acquire();
		th->curJobs[index]=NULL;
		l.release();

		//jobFencing is allowed to happen outside the mutex
		myJob->jobFence();
	}
}
Example #2
0
void NetStream::execute()
{
	if(downloader->hasFailed())
	{
		sys->currentVm->addEvent(this,Class<Event>::getInstanceS("ioError"));
		sys->downloadManager->destroy(downloader);
		return;
	}

	//The downloader hasn't failed yet at this point

	//mutex access to downloader
	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	//We need to catch possible EOF and other error condition in the non reliable stream
	uint32_t decodedAudioBytes=0;
	uint32_t decodedVideoFrames=0;
	//The decoded time is computed from the decodedAudioBytes to avoid drifts
	uint32_t decodedTime=0;
	bool waitForFlush=true;
	try
	{
		ScriptDataTag tag;
		Chronometer chronometer;
		STREAM_TYPE t=classifyStream(s);
		if(t==FLV_STREAM)
		{
			FLV_HEADER h(s);
			if(!h.isValid())
				throw ParseException("FLV is not valid");

			unsigned int prevSize=0;
			bool done=false;
			do
			{
				//Check if threadAbort has been called, if so, stop this loop
				if(closed)
					done = true;
				UI32 PreviousTagSize;
				s >> PreviousTagSize;
				PreviousTagSize.bswap();
				assert_and_throw(PreviousTagSize==prevSize);

				//Check tag type and read it
				UI8 TagType;
				s >> TagType;
				switch(TagType)
				{
					case 8:
					{
						AudioDataTag tag(s);
						prevSize=tag.getTotalLen();

						if(audioDecoder==NULL)
						{
							audioCodec=tag.SoundFormat;
							switch(tag.SoundFormat)
							{
								case AAC:
									assert_and_throw(tag.isHeader())
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,
											tag.packetData, tag.packetLen);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									tag.releaseBuffer();
									break;
								case MP3:
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,NULL,0);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									decodedAudioBytes+=
										audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
									//Adjust timing
									decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
									break;
								default:
									throw RunTimeException("Unsupported SoundFormat");
							}
							if(audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
						}
						else
						{
							assert_and_throw(audioCodec==tag.SoundFormat);
							decodedAudioBytes+=
								audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
							if(audioStream==0 && audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
							//Adjust timing
							decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
						}
						break;
					}
					case 9:
					{
						VideoDataTag tag(s);
						prevSize=tag.getTotalLen();
						//If the framerate is known give the right timing, otherwise use decodedTime from audio
						uint32_t frameTime=(frameRate!=0.0)?(decodedVideoFrames*1000/frameRate):decodedTime;

						if(videoDecoder==NULL)
						{
							//If the isHeader flag is on then the decoder becomes the owner of the data
							if(tag.isHeader())
							{
								//The tag is the header, initialize decoding
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=
									new FFMpegVideoDecoder(tag.codec,tag.packetData,tag.packetLen, frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								tag.releaseBuffer();
							}
							else if(videoDecoder==NULL)
							{
								//First packet but no special handling
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=new FFMpegVideoDecoder(tag.codec,NULL,0,frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
								decodedVideoFrames++;
							}
							Event* status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start");
							getVm()->addEvent(this, status);
							status->decRef();
							status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full");
							getVm()->addEvent(this, status);
							status->decRef();
						}
						else
						{
							videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
							decodedVideoFrames++;
						}
						break;
					}
					case 18:
					{
						tag = ScriptDataTag(s);
						prevSize=tag.getTotalLen();

						//The frameRate of the container overrides the stream
						
						if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
							frameRate=tag.metadataDouble["framerate"];
						break;
					}
					default:
						LOG(LOG_ERROR,_("Unexpected tag type ") << (int)TagType << _(" in FLV"));
						threadAbort();
				}
				if(!tickStarted && isReady())
				{
					{
						multiname onMetaDataName;
						onMetaDataName.name_type=multiname::NAME_STRING;
						onMetaDataName.name_s="onMetaData";
						onMetaDataName.ns.push_back(nsNameAndKind("",NAMESPACE));
						ASObject* callback = client->getVariableByMultiname(onMetaDataName);
						if(callback && callback->getObjectType() == T_FUNCTION)
						{
							ASObject* callbackArgs[1];
							ASObject* metadata = Class<ASObject>::getInstanceS();
							if(tag.metadataDouble.find("width") != tag.metadataDouble.end())
								metadata->setVariableByQName("width", "", 
										abstract_d(tag.metadataDouble["width"]));
							else
								metadata->setVariableByQName("width", "", abstract_d(getVideoWidth()));
							if(tag.metadataDouble.find("height") != tag.metadataDouble.end())
								metadata->setVariableByQName("height", "", 
										abstract_d(tag.metadataDouble["height"]));
							else
								metadata->setVariableByQName("height", "", abstract_d(getVideoHeight()));

							if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
								metadata->setVariableByQName("framerate", "", 
										abstract_d(tag.metadataDouble["framerate"]));
							if(tag.metadataDouble.find("duration") != tag.metadataDouble.end())
								metadata->setVariableByQName("duration", "", 
										abstract_d(tag.metadataDouble["duration"]));
							if(tag.metadataInteger.find("canseekontime") != tag.metadataInteger.end())
								metadata->setVariableByQName("canSeekToEnd", "", 
										abstract_b(tag.metadataInteger["canseekontime"] == 1));

							if(tag.metadataDouble.find("audiodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("audiodatarate", "", 
										abstract_d(tag.metadataDouble["audiodatarate"]));
							if(tag.metadataDouble.find("videodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("videodatarate", "", 
										abstract_d(tag.metadataDouble["videodatarate"]));

							//TODO: missing: audiocodecid (Number), cuePoints (Object[]), 
							//videocodecid (Number), custommetadata's
							callbackArgs[0] = metadata;
							client->incRef();
							metadata->incRef();
							FunctionEvent* event = 
								new FunctionEvent(static_cast<IFunction*>(callback), client, callbackArgs, 1);
							getVm()->addEvent(NULL,event);
							event->decRef();
						}
					}

					tickStarted=true;
					if(frameRate==0)
					{
						assert(videoDecoder->frameRate);
						frameRate=videoDecoder->frameRate;
					}
					sys->addTick(1000/frameRate,this);
					//Also ask for a render rate equal to the video one (capped at 24)
					float localRenderRate=dmin(frameRate,24);
					sys->setRenderRate(localRenderRate);
				}
				profile->accountTime(chronometer.checkpoint());
				if(aborting)
				{
					throw JobTerminationException();
				}
			}
			while(!done);
		}
		else
			threadAbort();

	}
Example #3
0
void* RenderThread::sdl_worker(RenderThread* th)
{
	sys=th->m_sys;
	rt=th;
	SemaphoreLighter lighter(th->initialized);
	RECT size=sys->getFrameSize();
	int initialWidth=size.Xmax/20;
	int initialHeight=size.Ymax/20;
	th->windowWidth=initialWidth;
	th->windowHeight=initialHeight;
	SDL_GL_SetAttribute( SDL_GL_RED_SIZE, 8 );
	SDL_GL_SetAttribute( SDL_GL_GREEN_SIZE, 8 );
	SDL_GL_SetAttribute( SDL_GL_BLUE_SIZE, 8 );
	SDL_GL_SetAttribute( SDL_GL_SWAP_CONTROL, 0 );
	SDL_GL_SetAttribute( SDL_GL_ACCELERATED_VISUAL, 1); 
	SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );

	SDL_SetVideoMode(th->windowWidth, th->windowHeight, 24, SDL_OPENGL|SDL_RESIZABLE);
	th->commonGLInit(th->windowWidth, th->windowHeight);
	th->commonGLResize(th->windowWidth, th->windowHeight);
	lighter.light();

	ThreadProfile* profile=sys->allocateProfiler(RGB(200,0,0));
	profile->setTag("Render");
	FTTextureFont font(th->fontPath.c_str());
	if(font.Error())
		throw RunTimeException("Unable to load font");
	
	font.FaceSize(12);
	try
	{
		Chronometer chronometer;
		while(1)
		{
			sem_wait(&th->event);
			if(th->m_sys->isShuttingDown())
				break;
			chronometer.checkpoint();

			if(th->resizeNeeded)
			{
				if(th->windowWidth!=th->newWidth ||
					th->windowHeight!=th->newHeight)
				{
					th->windowWidth=th->newWidth;
					th->windowHeight=th->newHeight;
					SDL_SetVideoMode(th->windowWidth, th->windowHeight, 24, SDL_OPENGL|SDL_RESIZABLE);
				}
				th->newWidth=0;
				th->newHeight=0;
				th->resizeNeeded=false;
				th->commonGLResize(th->windowWidth, th->windowHeight);
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(th->newTextureNeeded)
				th->handleNewTexture();

			if(th->prevUploadJob)
				th->finalizeUpload();

			if(th->uploadNeeded)
			{
				th->handleUpload();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			assert(th->renderNeeded);

			SDL_PumpEvents();
			if(th->m_sys->isOnError())
			{
				glLoadIdentity();
				glScalef(1.0f/th->scaleX,-1.0f/th->scaleY,1);
				glTranslatef(-th->offsetX,(th->offsetY+th->windowHeight)*(-1.0f),0);
				glUseProgram(0);
				glActiveTexture(GL_TEXTURE1);
				glDisable(GL_TEXTURE_2D);
				glActiveTexture(GL_TEXTURE0);

				glBindFramebuffer(GL_FRAMEBUFFER, 0);
				glDrawBuffer(GL_BACK);

				glClearColor(0,0,0,1);
				glClear(GL_COLOR_BUFFER_BIT);
				glColor3f(0.8,0.8,0.8);
					    
				font.Render("We're sorry, Lightspark encountered a yet unsupported Flash file",
						-1,FTPoint(0,th->windowHeight/2));

				stringstream errorMsg;
				errorMsg << "SWF file: " << th->m_sys->getOrigin().getParsedURL();
				font.Render(errorMsg.str().c_str(),
						-1,FTPoint(0,th->windowHeight/2-20));
					    
				errorMsg.str("");
				errorMsg << "Cause: " << th->m_sys->errorCause;
				font.Render(errorMsg.str().c_str(),
						-1,FTPoint(0,th->windowHeight/2-40));

				font.Render("Press 'Q' to exit",-1,FTPoint(0,th->windowHeight/2-60));
				
				glFlush();
				SDL_GL_SwapBuffers( );
			}
			else
			{
				SDL_GL_SwapBuffers( );
				th->coreRendering(font, true);
				//Call glFlush to offload work on the GPU
				glFlush();
			}
			profile->accountTime(chronometer.checkpoint());
			th->renderNeeded=false;
		}
	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR,_("Exception in RenderThread ") << e.cause);
		sys->setError(e.cause);
	}
	th->commonGLDeinit();
	return NULL;
}
Example #4
0
void* RenderThread::gtkplug_worker(RenderThread* th)
{
	sys=th->m_sys;
	rt=th;
	NPAPI_params* p=th->npapi_params;
	SemaphoreLighter lighter(th->initialized);

	th->windowWidth=p->width;
	th->windowHeight=p->height;
	
	Display* d=XOpenDisplay(NULL);

	int a,b;
	Bool glx_present=glXQueryVersion(d,&a,&b);
	if(!glx_present)
	{
		LOG(LOG_ERROR,_("glX not present"));
		return NULL;
	}
	int attrib[10]={GLX_BUFFER_SIZE,24,GLX_DOUBLEBUFFER,True,None};
	GLXFBConfig* fb=glXChooseFBConfig(d, 0, attrib, &a);
	if(!fb)
	{
		attrib[2]=None;
		fb=glXChooseFBConfig(d, 0, attrib, &a);
		LOG(LOG_ERROR,_("Falling back to no double buffering"));
	}
	if(!fb)
	{
		LOG(LOG_ERROR,_("Could not find any GLX configuration"));
		::abort();
	}
	int i;
	for(i=0;i<a;i++)
	{
		int id;
		glXGetFBConfigAttrib(d,fb[i],GLX_VISUAL_ID,&id);
		if(id==(int)p->visual)
			break;
	}
	if(i==a)
	{
		//No suitable id found
		LOG(LOG_ERROR,_("No suitable graphics configuration available"));
		return NULL;
	}
	th->mFBConfig=fb[i];
	cout << "Chosen config " << hex << fb[i] << dec << endl;
	XFree(fb);

	th->mContext = glXCreateNewContext(d,th->mFBConfig,GLX_RGBA_TYPE ,NULL,1);
	GLXWindow glxWin=p->window;
	glXMakeCurrent(d, glxWin,th->mContext);
	if(!glXIsDirect(d,th->mContext))
		cout << "Indirect!!" << endl;

	th->commonGLInit(th->windowWidth, th->windowHeight);
	th->commonGLResize(th->windowWidth, th->windowHeight);
	lighter.light();
	
	ThreadProfile* profile=sys->allocateProfiler(RGB(200,0,0));
	profile->setTag("Render");
	FTTextureFont font(th->fontPath.c_str());
	if(font.Error())
	{
		LOG(LOG_ERROR,_("Unable to load serif font"));
		throw RunTimeException("Unable to load font");
	}
	
	font.FaceSize(12);

	glEnable(GL_TEXTURE_2D);
	try
	{
		Chronometer chronometer;
		while(1)
		{
			sem_wait(&th->event);
			if(th->m_sys->isShuttingDown())
				break;
			chronometer.checkpoint();
			
			if(th->resizeNeeded)
			{
				if(th->windowWidth!=th->newWidth ||
					th->windowHeight!=th->newHeight)
				{
					th->windowWidth=th->newWidth;
					th->windowHeight=th->newHeight;
					LOG(LOG_ERROR,_("Window resize not supported in plugin"));
				}
				th->newWidth=0;
				th->newHeight=0;
				th->resizeNeeded=false;
				th->commonGLResize(th->windowWidth, th->windowHeight);
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(th->newTextureNeeded)
				th->handleNewTexture();

			if(th->prevUploadJob)
				th->finalizeUpload();

			if(th->uploadNeeded)
			{
				th->handleUpload();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			assert(th->renderNeeded);
			if(th->m_sys->isOnError())
			{
				glLoadIdentity();
				glScalef(1.0f/th->scaleX,-1.0f/th->scaleY,1);
				glTranslatef(-th->offsetX,(th->offsetY+th->windowHeight)*(-1.0f),0);
				glUseProgram(0);
				glActiveTexture(GL_TEXTURE1);
				glDisable(GL_TEXTURE_2D);
				glActiveTexture(GL_TEXTURE0);

				glBindFramebuffer(GL_FRAMEBUFFER, 0);
				glDrawBuffer(GL_BACK);

				glClearColor(0,0,0,1);
				glClear(GL_COLOR_BUFFER_BIT);
				glColor3f(0.8,0.8,0.8);
					    
				font.Render("We're sorry, Lightspark encountered a yet unsupported Flash file",
						-1,FTPoint(0,th->windowHeight/2));

				stringstream errorMsg;
				errorMsg << "SWF file: " << th->m_sys->getOrigin().getParsedURL();
				font.Render(errorMsg.str().c_str(),
						-1,FTPoint(0,th->windowHeight/2-20));
					    
				errorMsg.str("");
				errorMsg << "Cause: " << th->m_sys->errorCause;
				font.Render(errorMsg.str().c_str(),
						-1,FTPoint(0,th->windowHeight/2-40));
				
				glFlush();
				glXSwapBuffers(d,glxWin);
			}
			else
			{
				glXSwapBuffers(d,glxWin);
				th->coreRendering(font, false);
				//Call glFlush to offload work on the GPU
				glFlush();
			}
			profile->accountTime(chronometer.checkpoint());
			th->renderNeeded=false;
		}
	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR,_("Exception in RenderThread ") << e.what());
		sys->setError(e.cause);
	}
	glDisable(GL_TEXTURE_2D);
	th->commonGLDeinit();
	glXMakeCurrent(d,None,NULL);
	glXDestroyContext(d,th->mContext);
	XCloseDisplay(d);
	return NULL;
}
Example #5
0
void NetStream::execute()
{
	//mutex access to downloader
	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	//We need to catch possible EOF and other error condition in the non reliable stream
	try
	{
		Chronometer chronometer;
		STREAM_TYPE t=classifyStream(s);
		if(t==FLV_STREAM)
		{
			FLV_HEADER h(s);
			if(!h.isValid())
				threadAbort();

			unsigned int prevSize=0;
			bool done=false;
			do
			{
				UI32 PreviousTagSize;
				s >> PreviousTagSize;
				PreviousTagSize.bswap();
				assert(PreviousTagSize==prevSize);

				//Check tag type and read it
				UI8 TagType;
				s >> TagType;
				switch(TagType)
				{
					case 8:
					{
						AudioDataTag tag(s);
						prevSize=tag.getTotalLen();
						break;
					}
					case 9:
					{
						VideoDataTag tag(s);
						prevSize=tag.getTotalLen();
						assert(tag.codecId==7);

						if(tag.isHeader())
						{
							//The tag is the header, initialize decoding
							assert(decoder==NULL); //The decoder can be set only once
							//NOTE: there is not need to mutex the decoder, as an async transition from NULL to
							//valid is not critical
							decoder=new FFMpegDecoder(tag.packetData,tag.packetLen);
							assert(decoder);
							assert(frameRate!=0);
							//Now that the decoder is valid, let's start the ticking
							sys->addTick(1000/frameRate,this);
							//sys->setRenderRate(frameRate);

							tag.releaseBuffer();
						}
						else
							decoder->decodeData(tag.packetData,tag.packetLen);
						break;
					}
					case 18:
					{
						ScriptDataTag tag(s);
						prevSize=tag.getTotalLen();

						//HACK: initialize frameRate from the container
						frameRate=tag.frameRate;
						break;
					}
					default:
						cout << (int)TagType << endl;
						threadAbort();
				}
				profile->accountTime(chronometer.checkpoint());
				if(aborting)
					throw JobTerminationException();
			}
			while(!done);
		}
		else
			threadAbort();

	}
Example #6
0
void NetStream::execute()
{
	//checkPolicyFile only applies to per-pixel access, loading and playing is always allowed.
	//So there is no need to disallow playing if policy files disallow it.
	//We do need to check if per-pixel access is allowed.
	SecurityManager::EVALUATIONRESULT evaluationResult = sys->securityManager->evaluatePoliciesURL(url, true);
	if(evaluationResult == SecurityManager::NA_CROSSDOMAIN_POLICY)
		rawAccessAllowed = true;

	if(downloader->hasFailed())
	{
		this->incRef();
		sys->currentVm->addEvent(_MR(this),_MR(Class<Event>::getInstanceS("ioError")));
		sys->downloadManager->destroy(downloader);
		return;
	}

	//The downloader hasn't failed yet at this point

	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	bool waitForFlush=true;
	StreamDecoder* streamDecoder=NULL;
	//We need to catch possible EOF and other error condition in the non reliable stream
	try
	{
		Chronometer chronometer;
		streamDecoder=new FFMpegStreamDecoder(s);
		if(!streamDecoder->isValid())
			threadAbort();

		bool done=false;
		while(!done)
		{
			//Check if threadAbort has been called, if so, stop this loop
			if(closed)
				done = true;
			bool decodingSuccess=streamDecoder->decodeNextFrame();
			if(decodingSuccess==false)
				done = true;

			if(videoDecoder==NULL && streamDecoder->videoDecoder)
			{
				videoDecoder=streamDecoder->videoDecoder;
				this->incRef();
				getVm()->addEvent(_MR(this),
						_MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start")));
				this->incRef();
				getVm()->addEvent(_MR(this),
						_MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full")));
			}

			if(audioDecoder==NULL && streamDecoder->audioDecoder)
				audioDecoder=streamDecoder->audioDecoder;

			if(audioStream==NULL && audioDecoder && audioDecoder->isValid() && sys->audioManager->pluginLoaded())
				audioStream=sys->audioManager->createStreamPlugin(audioDecoder);

			if(audioStream && audioStream->paused() && !paused)
			{
				//The audio stream is paused but should not!
				//As we have new data fill the stream
				audioStream->fill();
			}

			if(!tickStarted && isReady())
			{
				multiname onMetaDataName;
				onMetaDataName.name_type=multiname::NAME_STRING;
				onMetaDataName.name_s="onMetaData";
				onMetaDataName.ns.push_back(nsNameAndKind("",NAMESPACE));
				ASObject* callback = client->getVariableByMultiname(onMetaDataName);
				if(callback && callback->getObjectType() == T_FUNCTION)
				{
					ASObject* callbackArgs[1];
					ASObject* metadata = Class<ASObject>::getInstanceS();
					double d;
					uint32_t i;
					if(streamDecoder->getMetadataDouble("width",d))
						metadata->setVariableByQName("width", "",abstract_d(d),DYNAMIC_TRAIT);
					else
						metadata->setVariableByQName("width", "", abstract_d(getVideoWidth()),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("height",d))
						metadata->setVariableByQName("height", "",abstract_d(d),DYNAMIC_TRAIT);
					else
						metadata->setVariableByQName("height", "", abstract_d(getVideoHeight()),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("framerate",d))
						metadata->setVariableByQName("framerate", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("duration",d))
						metadata->setVariableByQName("duration", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataInteger("canseekontime",i))
						metadata->setVariableByQName("canSeekToEnd", "",abstract_b(i == 1),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("audiodatarate",d))
						metadata->setVariableByQName("audiodatarate", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("videodatarate",d))
						metadata->setVariableByQName("videodatarate", "",abstract_d(d),DYNAMIC_TRAIT);

					//TODO: missing: audiocodecid (Number), cuePoints (Object[]),
					//videocodecid (Number), custommetadata's
					client->incRef();
					metadata->incRef();
					callbackArgs[0] = metadata;
					callback->incRef();
					_R<FunctionEvent> event(new FunctionEvent(_MR(static_cast<IFunction*>(callback)),
							_MR(client), callbackArgs, 1));
					getVm()->addEvent(NullRef,event);
				}

				tickStarted=true;
				if(frameRate==0)
				{
					assert(videoDecoder->frameRate);
					frameRate=videoDecoder->frameRate;
				}
				sys->addTick(1000/frameRate,this);
				//Also ask for a render rate equal to the video one (capped at 24)
				float localRenderRate=dmin(frameRate,24);
				sys->setRenderRate(localRenderRate);
			}
			profile->accountTime(chronometer.checkpoint());
			if(aborting)
				throw JobTerminationException();
		}

	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR, "Exception in NetStream " << e.cause);
		threadAbort();
		waitForFlush=false;
	}
	catch(JobTerminationException& e)
	{
		waitForFlush=false;
	}
	catch(exception& e)
	{
		LOG(LOG_ERROR, _("Exception in reading: ")<<e.what());
	}
	if(waitForFlush)
	{
		//Put the decoders in the flushing state and wait for the complete consumption of contents
		if(audioDecoder)
			audioDecoder->setFlushing();
		if(videoDecoder)
			videoDecoder->setFlushing();
		
		if(audioDecoder)
			audioDecoder->waitFlushed();
		if(videoDecoder)
			videoDecoder->waitFlushed();

		this->incRef();
		getVm()->addEvent(_MR(this), _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Stop")));
		this->incRef();
		getVm()->addEvent(_MR(this), _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Flush")));
	}
	//Before deleting stops ticking, removeJobs also spin waits for termination
	sys->removeJob(this);
	tickStarted=false;
	sem_wait(&mutex);
	//Change the state to invalid to avoid locking
	videoDecoder=NULL;
	audioDecoder=NULL;
	//Clean up everything for a possible re-run
	sys->downloadManager->destroy(downloader);
	//This transition is critical, so the mutex is needed
	downloader=NULL;
	if(audioStream)
		sys->audioManager->freeStreamPlugin(audioStream);
	audioStream=NULL;
	sem_post(&mutex);
	delete streamDecoder;
}
Example #7
0
void RenderThread::worker()
{
	setTLSSys(m_sys);
	/* set TLS variable for getRenderThread() */
	g_static_private_set(&renderThread, this, NULL);

	ThreadProfile* profile=m_sys->allocateProfiler(RGB(200,0,0));
	profile->setTag("Render");
	try
	{
		init();

		ThreadProfile* profile=m_sys->allocateProfiler(RGB(200,0,0));
		profile->setTag("Render");

		glEnable(GL_TEXTURE_2D);

		Chronometer chronometer;
		while(1)
		{
			event.wait();
			if(m_sys->isShuttingDown())
				break;
			chronometer.checkpoint();

			if(resizeNeeded)
			{
				windowWidth=newWidth;
				windowHeight=newHeight;
				newWidth=0;
				newHeight=0;
				resizeNeeded=false;
				LOG(LOG_INFO,_("Window resized to ") << windowWidth << 'x' << windowHeight);
				commonGLResize();
				m_sys->resizeCompleted();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(newTextureNeeded)
				handleNewTexture();

			if(prevUploadJob)
				finalizeUpload();

			if(uploadNeeded)
			{
				handleUpload();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(m_sys->isOnError())
			{
				renderErrorPage(this, m_sys->standalone);
			}

#if defined(_WIN32)
			SwapBuffers(mDC);
#elif !defined(ENABLE_GLES2)
			glXSwapBuffers(mDisplay, engineData->window);
#else
			eglSwapBuffers(mEGLDisplay, mEGLSurface);
#endif
			if(!m_sys->isOnError())
			{
				coreRendering();
				//Call glFlush to offload work on the GPU
				glFlush();
			}
			profile->accountTime(chronometer.checkpoint());
			renderNeeded=false;
		}

		deinit();
	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR,_("Exception in RenderThread ") << e.what());
		m_sys->setError(e.cause);
	}

	/* cleanup */
	//Keep addUploadJob from enqueueing
	status=TERMINATED;
	//Fence existing jobs
	Locker l(mutexUploadJobs);
	if(prevUploadJob)
		prevUploadJob->uploadFence();
	for(auto i=uploadJobs.begin(); i != uploadJobs.end(); ++i)
		(*i)->uploadFence();
}
Example #8
0
void NetStream::execute()
{
	//mutex access to downloader
	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	//We need to catch possible EOF and other error condition in the non reliable stream
	uint32_t decodedTime=0;
	uint32_t videoFrameCount=0;
	try
	{
		Chronometer chronometer;
		STREAM_TYPE t=classifyStream(s);
		if(t==FLV_STREAM)
		{
			FLV_HEADER h(s);
			if(!h.isValid())
				threadAbort();

			unsigned int prevSize=0;
			bool done=false;
			do
			{
				UI32 PreviousTagSize;
				s >> PreviousTagSize;
				PreviousTagSize.bswap();
				assert_and_throw(PreviousTagSize==prevSize);

				//Check tag type and read it
				UI8 TagType;
				s >> TagType;
				switch(TagType)
				{
					case 8:
					{
						AudioDataTag tag(s);
						prevSize=tag.getTotalLen();
#ifdef ENABLE_SOUND
						if(audioDecoder)
						{
							assert_and_throw(audioCodec==tag.SoundFormat);
							uint32_t decodedBytes=audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
							if(soundStreamId==0 && audioDecoder->isValid())
								soundStreamId=sys->soundManager->createStream(audioDecoder);
							//Adjust timing
							decodedTime+=decodedBytes/audioDecoder->getBytesPerMSec();
						}
						else
						{
							audioCodec=tag.SoundFormat;
							switch(tag.SoundFormat)
							{
								case AAC:
									assert_and_throw(tag.isHeader())
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,
											tag.packetData, tag.packetLen);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									break;
								default:
									throw RunTimeException("Unsupported SoundFormat");
							}
							if(audioDecoder->isValid())
								soundStreamId=sys->soundManager->createStream(audioDecoder);
						}
#endif
						break;
					}
					case 9:
					{
						VideoDataTag tag(s);
						prevSize=tag.getTotalLen();
						//Reset the current time, the video flow driver the stream
						decodedTime=videoFrameCount*1000/frameRate;
						videoFrameCount++;
						assert_and_throw(tag.codecId==7);

						if(tag.isHeader())
						{
							//The tag is the header, initialize decoding
							assert_and_throw(videoDecoder==NULL); //The decoder can be set only once
#ifdef ENABLE_LIBAVCODEC
							videoDecoder=new FFMpegVideoDecoder(tag.packetData,tag.packetLen);
#else
							videoDecoder=new NullVideoDecoder();
#endif
							tag.releaseBuffer();
							Event* status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start");
							getVm()->addEvent(this, status);
							status->decRef();
							status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full");
							getVm()->addEvent(this, status);
							status->decRef();
						}
						else
							videoDecoder->decodeData(tag.packetData,tag.packetLen, decodedTime);
						break;
					}
					case 18:
					{
						ScriptDataTag tag(s);
						prevSize=tag.getTotalLen();

						//The frameRate of the container overrides the stream
						if(tag.frameRate)
							frameRate=tag.frameRate;
						break;
					}
					default:
						LOG(LOG_ERROR,"Unexpected tag type " << (int)TagType << " in FLV");
						threadAbort();
				}
				if(!tickStarted && isReady())
				{
					tickStarted=true;
					if(frameRate==0)
					{
						assert(videoDecoder->frameRate);
						frameRate=videoDecoder->frameRate;
					}
					sys->addTick(1000/frameRate,this);
				}
				profile->accountTime(chronometer.checkpoint());
				if(aborting)
					throw JobTerminationException();
			}
			while(!done);
		}
		else
			threadAbort();

	}
Example #9
0
void* RenderThread::worker(RenderThread* th)
{
	sys=th->m_sys;
	rt=th;
	const EngineData* e=th->engineData;
	SemaphoreLighter lighter(th->initialized);

	//Get information about changes in the available space
	g_signal_connect(e->container,"size-allocate",G_CALLBACK(SizeAllocateCallback),th);

	th->windowWidth=e->width;
	th->windowHeight=e->height;
	
	Display* d=XOpenDisplay(NULL);

#ifndef ENABLE_GLES2
	int a,b;
	Bool glx_present=glXQueryVersion(d,&a,&b);
	if(!glx_present)
	{
		LOG(LOG_ERROR,_("glX not present"));
		return NULL;
	}
	int attrib[10]={GLX_RED_SIZE, 8, GLX_GREEN_SIZE, 8, GLX_BLUE_SIZE, 8, GLX_DOUBLEBUFFER, True, None};
	GLXFBConfig* fb=glXChooseFBConfig(d, 0, attrib, &a);
	if(!fb)
	{
		attrib[6]=None;
		LOG(LOG_ERROR,_("Falling back to no double buffering"));
		fb=glXChooseFBConfig(d, 0, attrib, &a);
	}
	if(!fb)
	{
		LOG(LOG_ERROR,_("Could not find any GLX configuration"));
		::abort();
	}
	int i;
	for(i=0;i<a;i++)
	{
		int id;
		glXGetFBConfigAttrib(d,fb[i],GLX_VISUAL_ID,&id);
		if(id==(int)e->visual)
			break;
	}
	if(i==a)
	{
		//No suitable id found
		LOG(LOG_ERROR,_("No suitable graphics configuration available"));
		return NULL;
	}
	th->mFBConfig=fb[i];
	cout << "Chosen config " << hex << fb[i] << dec << endl;
	XFree(fb);

	th->mContext = glXCreateNewContext(d,th->mFBConfig,GLX_RGBA_TYPE ,NULL,1);
	GLXWindow glxWin=e->window;
	glXMakeCurrent(d, glxWin,th->mContext);
	if(!glXIsDirect(d,th->mContext))
		cout << "Indirect!!" << endl;
#else
	int a;
	eglBindAPI(EGL_OPENGL_ES_API);
	EGLDisplay ed = EGL_NO_DISPLAY;
	ed = eglGetDisplay(d);

	if (ed == EGL_NO_DISPLAY) {
		LOG(LOG_ERROR, _("EGL not present"));
		return NULL;
	}

	EGLint major, minor;
	if (eglInitialize(ed, &major, &minor) == EGL_FALSE) {
		LOG(LOG_ERROR, _("EGL initialization failed"));
		return NULL;
	}

	LOG(LOG_NO_INFO, _("EGL version: ") << eglQueryString(ed, EGL_VERSION));

	EGLint config_attribs[] = {
				EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
				EGL_RED_SIZE, 8,
				EGL_GREEN_SIZE, 8,
				EGL_BLUE_SIZE, 8,
				EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
				EGL_NONE
	};

	EGLint context_attribs[] = {
		EGL_CONTEXT_CLIENT_VERSION, 2,
		EGL_NONE
	};

	if (!eglChooseConfig(ed, config_attribs, 0, 0, &a)) {
		LOG(LOG_ERROR,_("Could not get number of EGL configurations"));
	} else {
	    LOG(LOG_NO_INFO, "Number of EGL configurations: " << a);
	}
	EGLConfig *conf = new EGLConfig[a];
	if (!eglChooseConfig(ed, config_attribs, conf, a, &a))
	{
		LOG(LOG_ERROR,_("Could not find any EGL configuration"));
		::abort();
	}

	int i;
	for(i=0;i<a;i++)
	{
		EGLint id;
		eglGetConfigAttrib(ed, conf[i], EGL_NATIVE_VISUAL_ID, &id);
		LOG(LOG_ERROR, id <<" -> "<<e->visual);
		if(id==(int)e->visual)
			break;
	}
	if(i==a)
	{
		//No suitable id found
		LOG(LOG_ERROR,_("No suitable graphics configuration available"));
		return NULL;
	}
	th->mEGLConfig=conf[i];
	cout << "Chosen config " << hex << conf[i] << dec << endl;

	th->mEGLContext = eglCreateContext(ed, th->mEGLConfig, EGL_NO_CONTEXT, context_attribs);
	if (th->mEGLContext == EGL_NO_CONTEXT) {
		LOG(LOG_ERROR,_("Could not create EGL context"));
		return NULL;
	}
	EGLSurface win = eglCreateWindowSurface(ed, th->mEGLConfig, e->window, NULL);
	if (win == EGL_NO_SURFACE) {
		LOG(LOG_ERROR,_("Could not create EGL surface"));
		return NULL;
	}
	eglMakeCurrent(ed, win, win, th->mEGLContext);
#endif

	th->commonGLInit(th->windowWidth, th->windowHeight);
	th->commonGLResize();
	lighter.light();
	
	ThreadProfile* profile=sys->allocateProfiler(RGB(200,0,0));
	profile->setTag("Render");

	glEnable(GL_TEXTURE_2D);
	try
	{
		Chronometer chronometer;
		while(1)
		{
			sem_wait(&th->event);
			if(th->m_sys->isShuttingDown())
				break;
			chronometer.checkpoint();
			
			if(th->resizeNeeded)
			{
				th->windowWidth=th->newWidth;
				th->windowHeight=th->newHeight;
				th->newWidth=0;
				th->newHeight=0;
				th->resizeNeeded=false;
				LOG(LOG_NO_INFO,_("Window resized to ") << th->windowWidth << 'x' << th->windowHeight);
				th->commonGLResize();
				th->m_sys->resizeCompleted();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(th->newTextureNeeded)
				th->handleNewTexture();

			if(th->prevUploadJob)
				th->finalizeUpload();

			if(th->uploadNeeded)
			{
				th->handleUpload();
				profile->accountTime(chronometer.checkpoint());
				continue;
			}

			if(th->m_sys->isOnError())
			{
				th->renderErrorPage(th, th->m_sys->standalone);
#ifndef ENABLE_GLES2
				glXSwapBuffers(d,glxWin);
#else
				eglSwapBuffers(ed, win);
#endif
			}
			else
			{
#ifndef ENABLE_GLES2
				glXSwapBuffers(d,glxWin);
#else
				eglSwapBuffers(ed, win);
#endif
				th->coreRendering();
				//Call glFlush to offload work on the GPU
				glFlush();
			}
			profile->accountTime(chronometer.checkpoint());
			th->renderNeeded=false;
		}
	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR,_("Exception in RenderThread ") << e.what());
		sys->setError(e.cause);
	}
	glDisable(GL_TEXTURE_2D);
	th->commonGLDeinit();
#ifndef ENABLE_GLES2
	glXMakeCurrent(d,None,NULL);
	glXDestroyContext(d,th->mContext);
#else
	eglMakeCurrent(ed, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
	eglDestroyContext(ed, th->mEGLContext);
#endif
	XCloseDisplay(d);
	return NULL;
}