Beispiel #1
0
ASFUNCTIONBODY(Vector,_map)
{
	Vector* th=static_cast<Vector*>(obj);
	_NR<IFunction> func;
	_NR<ASObject> thisObject;
	
	if (argslen >= 1 && !args[0]->is<IFunction>())
		throwError<TypeError>(kCheckTypeFailedError, args[0]->getClassName(), "Function");

	ARG_UNPACK(func)(thisObject,NullRef);
	
	Vector* ret= (Vector*)obj->getClass()->getInstance(true,NULL,0);

	ASObject* thisObj;
	for(uint32_t i=0;i<th->size();i++)
	{
		ASObject* funcArgs[3];
		if (!th->vec[i])
			funcArgs[0]=obj->getSystemState()->getNullRef();
		else
		{
			if(th->vec[i])
			{
				funcArgs[0]=th->vec[i];
				funcArgs[0]->incRef();
			}
			else
				funcArgs[0]=obj->getSystemState()->getUndefinedRef();
		}
		funcArgs[1]=abstract_i(obj->getSystemState(),i);
		funcArgs[2]=th;
		funcArgs[2]->incRef();
		if (thisObject.isNull())
			thisObj = obj->getSystemState()->getNullRef();
		else
		{
			thisObj = thisObject.getPtr();
			thisObj->incRef();
		}
		ASObject* funcRet=func->call(thisObj, funcArgs, 3);
		assert_and_throw(funcRet);
		ret->vec.push_back(funcRet);
	}

	return ret;
}
Beispiel #2
0
ASObject* Vector::generator(TemplatedClass<Vector>* o_class, ASObject* const* args, const unsigned int argslen)
{
	assert_and_throw(argslen == 1);
	assert_and_throw(args[0]->getClass());
	assert_and_throw(o_class->getTypes().size() == 1);

	Type* type = o_class->getTypes()[0];

	if(args[0]->getClass() == Class<Array>::getClass())
	{
		//create object without calling _constructor
		Vector* ret = o_class->getInstance(false,NULL,0);

		Array* a = static_cast<Array*>(args[0]);
		for(unsigned int i=0;i<a->size();++i)
		{
			ASObject* obj = a->at(i).getPtr();
			obj->incRef();
			//Convert the elements of the array to the type of this vector
			ret->vec.push_back( type->coerce(obj) );
		}
		return ret;
	}
	else if(args[0]->getClass()->getTemplate() == Template<Vector>::getTemplate())
	{
		Vector* arg = static_cast<Vector*>(args[0]);

		//create object without calling _constructor
		Vector* ret = o_class->getInstance(false,NULL,0);
		for(auto i = arg->vec.begin(); i != arg->vec.end(); ++i)
		{
			(*i)->incRef();
			ret->vec.push_back( type->coerce(*i) );
		}
		return ret;
	}
	else
	{
		throw Class<ArgumentError>::getInstanceS("global Vector() function takes Array or Vector");
	}
}
Beispiel #3
0
/*
 * (creates and) sets the property 'prototype' to o
 * 'prototype' is usually DYNAMIC_TRAIT, but on Class_base
 * it is a DECLARED_TRAIT, which is gettable only
 */
void ASObject::setprop_prototype(_NR<ASObject>& o)
{
	ASObject* obj = o.getPtr();
	obj->incRef();

	multiname prototypeName;
	prototypeName.name_type=multiname::NAME_STRING;
	prototypeName.name_s="prototype";
	prototypeName.ns.push_back(nsNameAndKind("",NAMESPACE));
	bool has_getter = false;
	variable* ret=findSettable(prototypeName,false, &has_getter);
	if(!ret && has_getter)
		throw Class<ReferenceError>::getInstanceS("Error #1074: Illegal write to read-only property prototype");
	if(!ret)
		ret = Variables.findObjVar(prototypeName,DYNAMIC_TRAIT,DECLARED_TRAIT|DYNAMIC_TRAIT);
	if(ret->setter)
	{
		this->incRef();
		_MR( ret->setter->call(this,&obj,1) );
	}
	else
		ret->setVar(obj);
}
Beispiel #4
0
void NetStream::execute()
{
	if(downloader->hasFailed())
	{
		sys->currentVm->addEvent(this,Class<Event>::getInstanceS("ioError"));
		sys->downloadManager->destroy(downloader);
		return;
	}

	//The downloader hasn't failed yet at this point

	//mutex access to downloader
	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	//We need to catch possible EOF and other error condition in the non reliable stream
	uint32_t decodedAudioBytes=0;
	uint32_t decodedVideoFrames=0;
	//The decoded time is computed from the decodedAudioBytes to avoid drifts
	uint32_t decodedTime=0;
	bool waitForFlush=true;
	try
	{
		ScriptDataTag tag;
		Chronometer chronometer;
		STREAM_TYPE t=classifyStream(s);
		if(t==FLV_STREAM)
		{
			FLV_HEADER h(s);
			if(!h.isValid())
				throw ParseException("FLV is not valid");

			unsigned int prevSize=0;
			bool done=false;
			do
			{
				//Check if threadAbort has been called, if so, stop this loop
				if(closed)
					done = true;
				UI32 PreviousTagSize;
				s >> PreviousTagSize;
				PreviousTagSize.bswap();
				assert_and_throw(PreviousTagSize==prevSize);

				//Check tag type and read it
				UI8 TagType;
				s >> TagType;
				switch(TagType)
				{
					case 8:
					{
						AudioDataTag tag(s);
						prevSize=tag.getTotalLen();

						if(audioDecoder==NULL)
						{
							audioCodec=tag.SoundFormat;
							switch(tag.SoundFormat)
							{
								case AAC:
									assert_and_throw(tag.isHeader())
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,
											tag.packetData, tag.packetLen);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									tag.releaseBuffer();
									break;
								case MP3:
#ifdef ENABLE_LIBAVCODEC
									audioDecoder=new FFMpegAudioDecoder(tag.SoundFormat,NULL,0);
#else
									audioDecoder=new NullAudioDecoder();
#endif
									decodedAudioBytes+=
										audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
									//Adjust timing
									decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
									break;
								default:
									throw RunTimeException("Unsupported SoundFormat");
							}
							if(audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
						}
						else
						{
							assert_and_throw(audioCodec==tag.SoundFormat);
							decodedAudioBytes+=
								audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime);
							if(audioStream==0 && audioDecoder->isValid() && sys->audioManager->pluginLoaded())
								audioStream=sys->audioManager->createStreamPlugin(audioDecoder);
							//Adjust timing
							decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec();
						}
						break;
					}
					case 9:
					{
						VideoDataTag tag(s);
						prevSize=tag.getTotalLen();
						//If the framerate is known give the right timing, otherwise use decodedTime from audio
						uint32_t frameTime=(frameRate!=0.0)?(decodedVideoFrames*1000/frameRate):decodedTime;

						if(videoDecoder==NULL)
						{
							//If the isHeader flag is on then the decoder becomes the owner of the data
							if(tag.isHeader())
							{
								//The tag is the header, initialize decoding
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=
									new FFMpegVideoDecoder(tag.codec,tag.packetData,tag.packetLen, frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								tag.releaseBuffer();
							}
							else if(videoDecoder==NULL)
							{
								//First packet but no special handling
#ifdef ENABLE_LIBAVCODEC
								videoDecoder=new FFMpegVideoDecoder(tag.codec,NULL,0,frameRate);
#else
								videoDecoder=new NullVideoDecoder();
#endif
								videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
								decodedVideoFrames++;
							}
							Event* status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start");
							getVm()->addEvent(this, status);
							status->decRef();
							status=Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full");
							getVm()->addEvent(this, status);
							status->decRef();
						}
						else
						{
							videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime);
							decodedVideoFrames++;
						}
						break;
					}
					case 18:
					{
						tag = ScriptDataTag(s);
						prevSize=tag.getTotalLen();

						//The frameRate of the container overrides the stream
						
						if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
							frameRate=tag.metadataDouble["framerate"];
						break;
					}
					default:
						LOG(LOG_ERROR,_("Unexpected tag type ") << (int)TagType << _(" in FLV"));
						threadAbort();
				}
				if(!tickStarted && isReady())
				{
					{
						multiname onMetaDataName;
						onMetaDataName.name_type=multiname::NAME_STRING;
						onMetaDataName.name_s="onMetaData";
						onMetaDataName.ns.push_back(nsNameAndKind("",NAMESPACE));
						ASObject* callback = client->getVariableByMultiname(onMetaDataName);
						if(callback && callback->getObjectType() == T_FUNCTION)
						{
							ASObject* callbackArgs[1];
							ASObject* metadata = Class<ASObject>::getInstanceS();
							if(tag.metadataDouble.find("width") != tag.metadataDouble.end())
								metadata->setVariableByQName("width", "", 
										abstract_d(tag.metadataDouble["width"]));
							else
								metadata->setVariableByQName("width", "", abstract_d(getVideoWidth()));
							if(tag.metadataDouble.find("height") != tag.metadataDouble.end())
								metadata->setVariableByQName("height", "", 
										abstract_d(tag.metadataDouble["height"]));
							else
								metadata->setVariableByQName("height", "", abstract_d(getVideoHeight()));

							if(tag.metadataDouble.find("framerate") != tag.metadataDouble.end())
								metadata->setVariableByQName("framerate", "", 
										abstract_d(tag.metadataDouble["framerate"]));
							if(tag.metadataDouble.find("duration") != tag.metadataDouble.end())
								metadata->setVariableByQName("duration", "", 
										abstract_d(tag.metadataDouble["duration"]));
							if(tag.metadataInteger.find("canseekontime") != tag.metadataInteger.end())
								metadata->setVariableByQName("canSeekToEnd", "", 
										abstract_b(tag.metadataInteger["canseekontime"] == 1));

							if(tag.metadataDouble.find("audiodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("audiodatarate", "", 
										abstract_d(tag.metadataDouble["audiodatarate"]));
							if(tag.metadataDouble.find("videodatarate") != tag.metadataDouble.end())
								metadata->setVariableByQName("videodatarate", "", 
										abstract_d(tag.metadataDouble["videodatarate"]));

							//TODO: missing: audiocodecid (Number), cuePoints (Object[]), 
							//videocodecid (Number), custommetadata's
							callbackArgs[0] = metadata;
							client->incRef();
							metadata->incRef();
							FunctionEvent* event = 
								new FunctionEvent(static_cast<IFunction*>(callback), client, callbackArgs, 1);
							getVm()->addEvent(NULL,event);
							event->decRef();
						}
					}

					tickStarted=true;
					if(frameRate==0)
					{
						assert(videoDecoder->frameRate);
						frameRate=videoDecoder->frameRate;
					}
					sys->addTick(1000/frameRate,this);
					//Also ask for a render rate equal to the video one (capped at 24)
					float localRenderRate=dmin(frameRate,24);
					sys->setRenderRate(localRenderRate);
				}
				profile->accountTime(chronometer.checkpoint());
				if(aborting)
				{
					throw JobTerminationException();
				}
			}
			while(!done);
		}
		else
			threadAbort();

	}
Beispiel #5
0
void NetStream::execute()
{
	//checkPolicyFile only applies to per-pixel access, loading and playing is always allowed.
	//So there is no need to disallow playing if policy files disallow it.
	//We do need to check if per-pixel access is allowed.
	SecurityManager::EVALUATIONRESULT evaluationResult = sys->securityManager->evaluatePoliciesURL(url, true);
	if(evaluationResult == SecurityManager::NA_CROSSDOMAIN_POLICY)
		rawAccessAllowed = true;

	if(downloader->hasFailed())
	{
		this->incRef();
		sys->currentVm->addEvent(_MR(this),_MR(Class<Event>::getInstanceS("ioError")));
		sys->downloadManager->destroy(downloader);
		return;
	}

	//The downloader hasn't failed yet at this point

	istream s(downloader);
	s.exceptions ( istream::eofbit | istream::failbit | istream::badbit );

	ThreadProfile* profile=sys->allocateProfiler(RGB(0,0,200));
	profile->setTag("NetStream");
	bool waitForFlush=true;
	StreamDecoder* streamDecoder=NULL;
	//We need to catch possible EOF and other error condition in the non reliable stream
	try
	{
		Chronometer chronometer;
		streamDecoder=new FFMpegStreamDecoder(s);
		if(!streamDecoder->isValid())
			threadAbort();

		bool done=false;
		while(!done)
		{
			//Check if threadAbort has been called, if so, stop this loop
			if(closed)
				done = true;
			bool decodingSuccess=streamDecoder->decodeNextFrame();
			if(decodingSuccess==false)
				done = true;

			if(videoDecoder==NULL && streamDecoder->videoDecoder)
			{
				videoDecoder=streamDecoder->videoDecoder;
				this->incRef();
				getVm()->addEvent(_MR(this),
						_MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start")));
				this->incRef();
				getVm()->addEvent(_MR(this),
						_MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Full")));
			}

			if(audioDecoder==NULL && streamDecoder->audioDecoder)
				audioDecoder=streamDecoder->audioDecoder;

			if(audioStream==NULL && audioDecoder && audioDecoder->isValid() && sys->audioManager->pluginLoaded())
				audioStream=sys->audioManager->createStreamPlugin(audioDecoder);

			if(audioStream && audioStream->paused() && !paused)
			{
				//The audio stream is paused but should not!
				//As we have new data fill the stream
				audioStream->fill();
			}

			if(!tickStarted && isReady())
			{
				multiname onMetaDataName;
				onMetaDataName.name_type=multiname::NAME_STRING;
				onMetaDataName.name_s="onMetaData";
				onMetaDataName.ns.push_back(nsNameAndKind("",NAMESPACE));
				ASObject* callback = client->getVariableByMultiname(onMetaDataName);
				if(callback && callback->getObjectType() == T_FUNCTION)
				{
					ASObject* callbackArgs[1];
					ASObject* metadata = Class<ASObject>::getInstanceS();
					double d;
					uint32_t i;
					if(streamDecoder->getMetadataDouble("width",d))
						metadata->setVariableByQName("width", "",abstract_d(d),DYNAMIC_TRAIT);
					else
						metadata->setVariableByQName("width", "", abstract_d(getVideoWidth()),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("height",d))
						metadata->setVariableByQName("height", "",abstract_d(d),DYNAMIC_TRAIT);
					else
						metadata->setVariableByQName("height", "", abstract_d(getVideoHeight()),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("framerate",d))
						metadata->setVariableByQName("framerate", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("duration",d))
						metadata->setVariableByQName("duration", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataInteger("canseekontime",i))
						metadata->setVariableByQName("canSeekToEnd", "",abstract_b(i == 1),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("audiodatarate",d))
						metadata->setVariableByQName("audiodatarate", "",abstract_d(d),DYNAMIC_TRAIT);
					if(streamDecoder->getMetadataDouble("videodatarate",d))
						metadata->setVariableByQName("videodatarate", "",abstract_d(d),DYNAMIC_TRAIT);

					//TODO: missing: audiocodecid (Number), cuePoints (Object[]),
					//videocodecid (Number), custommetadata's
					client->incRef();
					metadata->incRef();
					callbackArgs[0] = metadata;
					callback->incRef();
					_R<FunctionEvent> event(new FunctionEvent(_MR(static_cast<IFunction*>(callback)),
							_MR(client), callbackArgs, 1));
					getVm()->addEvent(NullRef,event);
				}

				tickStarted=true;
				if(frameRate==0)
				{
					assert(videoDecoder->frameRate);
					frameRate=videoDecoder->frameRate;
				}
				sys->addTick(1000/frameRate,this);
				//Also ask for a render rate equal to the video one (capped at 24)
				float localRenderRate=dmin(frameRate,24);
				sys->setRenderRate(localRenderRate);
			}
			profile->accountTime(chronometer.checkpoint());
			if(aborting)
				throw JobTerminationException();
		}

	}
	catch(LightsparkException& e)
	{
		LOG(LOG_ERROR, "Exception in NetStream " << e.cause);
		threadAbort();
		waitForFlush=false;
	}
	catch(JobTerminationException& e)
	{
		waitForFlush=false;
	}
	catch(exception& e)
	{
		LOG(LOG_ERROR, _("Exception in reading: ")<<e.what());
	}
	if(waitForFlush)
	{
		//Put the decoders in the flushing state and wait for the complete consumption of contents
		if(audioDecoder)
			audioDecoder->setFlushing();
		if(videoDecoder)
			videoDecoder->setFlushing();
		
		if(audioDecoder)
			audioDecoder->waitFlushed();
		if(videoDecoder)
			videoDecoder->waitFlushed();

		this->incRef();
		getVm()->addEvent(_MR(this), _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Stop")));
		this->incRef();
		getVm()->addEvent(_MR(this), _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Flush")));
	}
	//Before deleting stops ticking, removeJobs also spin waits for termination
	sys->removeJob(this);
	tickStarted=false;
	sem_wait(&mutex);
	//Change the state to invalid to avoid locking
	videoDecoder=NULL;
	audioDecoder=NULL;
	//Clean up everything for a possible re-run
	sys->downloadManager->destroy(downloader);
	//This transition is critical, so the mutex is needed
	downloader=NULL;
	if(audioStream)
		sys->audioManager->freeStreamPlugin(audioStream);
	audioStream=NULL;
	sem_post(&mutex);
	delete streamDecoder;
}