Example #1
0
void EFEditorWorker::create_edit_element()
{
	CCSize screenSize = NXCCDirector::getSharedDirector().getRunningScene()->getContentSize() ;

	float imageWidth = _mediaFormat.video_format.width ;
	float imageHeight = _mediaFormat.video_format.height ;

	_effectCanvas = new NXCCSpriteNV12();
	_effectCanvas->init(false, ccp(screenSize.width, screenSize.height), ccsz(imageWidth, imageHeight), CCRectMake(0, 0, 1, 1)) ;
	_effectCanvas->setPosition(ccp(screenSize.width/2, screenSize.height/2)) ;
	NXCCDirector::getSharedDirector().getRunningScene()->addChild(_effectCanvas, 1) ;

	_effectLayers = new EFEffectsLayer(_rootsPath.c_str(), this);
	_effectLayers->init() ;
	_effectLayers->setPosition(screenSize.width/2, screenSize.height/2) ;
	_effectLayers->setContentSize(screenSize) ;
	NXCCDirector::getSharedDirector().getRunningScene()->addChild(_effectLayers, 2) ;
}
bool CCMWManager::parseClip(CCMWFileData* animationFileData, int clipAdditionalDataIndex, CCMWClipType type, float clipPosX, float clipPosY, CCAFCClip* afcClip) {
	CCAFCClipData& afcClipData = afcClip->getData();

	// process based on clip type
	if(type % 2 == 0) {
		afcClip->setType(AFC_CLIP_IMAGE);

		// image index
		afcClipData.i.imageIndex = type / 8;

		// flip flag
		switch(type & 0x7) {
			case ClipType_ImageFlipX:
				afcClipData.i.flipX = true;
				break;
			case ClipType_ImageFlipY:
				afcClipData.i.flipX = true;
				afcClipData.i.rotation = 180;
				break;
			case ClipType_ImageFlipXY:
				afcClipData.i.rotation = 180;
				break;
		}

		// image rect in atlas
		afcClipData.i.rect = ccr(resolve(animationFileData->m_imageClipPool[clipAdditionalDataIndex * 4]),
				resolve(animationFileData->m_imageClipPool[clipAdditionalDataIndex * 4 + 1]),
				resolve(animationFileData->m_imageClipPool[clipAdditionalDataIndex * 4 + 2]),
				resolve(animationFileData->m_imageClipPool[clipAdditionalDataIndex * 4 + 3]));

		// in motion welder coordinate system, upper y axis is negative
		// we need to reverse it
		// don't resolve y again because clippos and image rect are all resolved before
		afcClipData.clipPos = ccpt(clipPosX + afcClipData.i.rect.width / 2,
				-clipPosY - afcClipData.i.rect.height / 2);
	} else if(type == ClipType_CollisionRect) {
		afcClip->setType(AFC_CLIP_COLLISION_RECT);
		afcClipData.cr.size = ccsz(resolve(animationFileData->m_positionerRectangleClipPool[clipAdditionalDataIndex * 2]),
				resolve(animationFileData->m_positionerRectangleClipPool[clipAdditionalDataIndex * 2 + 1]));

		// in motion welder coordinate system, upper y axis is negative
		// we need to reverse it
		// don't resolve y again because clippos and image rect are all resolved before
		afcClipData.clipPos = ccpt(clipPosX + afcClipData.cr.size.width / 2,
				-clipPosY - afcClipData.cr.size.height / 2);
	} else if(type == ClipType_Line) {
		afcClip->setType(AFC_CLIP_LINE);
		afcClipData.l.endPoint = ccpt(resolve(animationFileData->m_lineClipPool[clipAdditionalDataIndex * 2]),
				resolve(animationFileData->m_lineClipPool[clipAdditionalDataIndex * 2 + 1]));
		afcClipData.l.color = animationFileData->m_lineClipPool[clipAdditionalDataIndex * 2 + 2];
		afcClipData.clipPos = ccpt(clipPosX, -clipPosY);
	} else if(type == ClipType_Rect || type == ClipType_Rect_Filled) {
		afcClip->setType(AFC_CLIP_RECT);
		afcClipData.r.size = ccsz(resolve(animationFileData->m_rectangleClipPool[clipAdditionalDataIndex * 2]),
				resolve(animationFileData->m_rectangleClipPool[clipAdditionalDataIndex * 2 + 1]));
		afcClipData.r.color = animationFileData->m_rectangleClipPool[clipAdditionalDataIndex * 2 + 2];

		// in motion welder coordinate system, upper y axis is negative
		// we need to reverse it
		// don't resolve y again because clippos and image rect are all resolved before
		afcClipData.clipPos = ccpt(clipPosX + afcClipData.r.size.width / 2,
				-clipPosY - afcClipData.r.size.height / 2);
	} else if(type == ClipType_RoundRect || type == ClipType_RoundRect_Filled) {
		afcClip->setType(AFC_CLIP_ROUNDRECT);
		afcClipData.rr.size = ccsz(resolve(animationFileData->m_roundedRectangleClipPool[clipAdditionalDataIndex * 2]),
				resolve(animationFileData->m_roundedRectangleClipPool[clipAdditionalDataIndex * 2 + 1]));
		afcClipData.rr.arcWidth = resolve(animationFileData->m_roundedRectangleClipPool[clipAdditionalDataIndex * 2 + 2]);
		afcClipData.rr.arcHeight = resolve(animationFileData->m_roundedRectangleClipPool[clipAdditionalDataIndex * 2 + 3]);
		afcClipData.rr.color = animationFileData->m_roundedRectangleClipPool[clipAdditionalDataIndex * 2 + 4];

		// in motion welder coordinate system, upper y axis is negative
		// we need to reverse it
		// don't resolve y again because clippos and image rect are all resolved before
		afcClipData.clipPos = ccpt(clipPosX + afcClipData.rr.size.width / 2,
				-clipPosY - afcClipData.rr.size.height / 2);
	} else if(type == ClipType_Ecllipse || type == ClipType_Ecllipse_Filled) {
		afcClip->setType(AFC_CLIP_ELLIPSE);
		afcClipData.e.size = ccsz(resolve(animationFileData->m_ellipseClipPool[clipAdditionalDataIndex * 2]),
				resolve(animationFileData->m_ellipseClipPool[clipAdditionalDataIndex * 2 + 1]));
		afcClipData.e.startAngle = animationFileData->m_ellipseClipPool[clipAdditionalDataIndex * 2 + 2];
		afcClipData.e.endAngle = animationFileData->m_ellipseClipPool[clipAdditionalDataIndex * 2 + 3];
		afcClipData.e.color = animationFileData->m_ellipseClipPool[clipAdditionalDataIndex * 2 + 4];

		// in motion welder coordinate system, upper y axis is negative
		// we need to reverse it
		// don't resolve y again because clippos and image rect are all resolved before
		afcClipData.clipPos = ccpt(clipPosX + afcClipData.e.size.width / 2,
				-clipPosY - afcClipData.e.size.height / 2);
	} else {
		return false;
	}

	return true;
}
void CCArcticManager::parseFrameModules(CCArcticFileData* afd, CCArcticFrame* arcticFrame, CCAFCFrame* afcFrame, int offsetX, int offsetY, CCAFCClipMapping* mapping) {
	for(int j = 0; j < arcticFrame->moduleCount; j++) {
		CCAFCClip* afcClip = CCAFCClip::create();
		
		// arctic frame module
		CCArcticFrameModule* arcticFrameModule = afd->m_frameModules + j + arcticFrame->firstModuleIndex;
		
		// index, maybe module or hyper frame
		int index = ((arcticFrameModule->flags & AS_INDEX_EX_MASK) << AS_INDEX_EX_SHIFT) | arcticFrameModule->index;
		
		// is hyper frame?
		if((arcticFrameModule->flags & AS_HYPER_FM) != 0) {
			CCArcticFrame* hyperFrame = afd->m_frames + index;
			parseFrameModules(afd, hyperFrame, afcFrame, arcticFrameModule->x, arcticFrameModule->y, mapping);
		} else {
			// need search clip mapping first
			CCAFCClipMappingRule* rule = NULL;
			if(mapping)
				rule = mapping->findRule(index);
			
			// depend on this rule, build clip way different
			if(!rule) {
				// arctic module
				CCArcticModule* arcticModule = afd->m_modules + index;
				parseModule(arcticModule, arcticFrameModule, afcClip, index, offsetX, offsetY);

				// set index
				afcClip->setIndex(index);
			} else {
				switch(rule->type) {
					case AFC_CMR_INTERNAL_CLIP:
					{
						// if internal mapping, redirect clip index
						if(rule)
							index = rule->icr.destClipIndex;

						// arctic module
						CCArcticModule* arcticModule = afd->m_modules + index;
						parseModule(arcticModule, arcticFrameModule, afcClip, index, offsetX, offsetY);

						break;
					}
					case AFC_CMR_EXTERNAL_CLIP:
					{
						// get external file data
						CCArcticFileData* externalAFD = getArcticFileData(rule->ecr.path);
						if(!externalAFD)
							continue;

						// parse arctic module
						CCArcticModule* arcticModule = externalAFD->m_modules + rule->ecr.destClipIndex;
						parseModule(arcticModule, arcticFrameModule, afcClip, rule->ecr.destClipIndex, offsetX, offsetY);

						// need redirect sheet to external sheet
						afcClip->getData().i.sheet = rule->ecr.sheet;

						break;
					}
					case AFC_CMR_EXTERNAL_ATLAS:
					{
						CCAFCClipData& clipData = afcClip->getData();

						// sheet
						clipData.i.sheet = rule->ear.sheet;

						// clip pos
						clipData.clipPos = ccpt(resolve(arcticFrameModule->x) + rule->ear.pos.x,
								resolve(-arcticFrameModule->y) + rule->ear.pos.y);

						// clip image rect
						clipData.i.rect = rule->ear.texRect;

						// flip flag
						clipData.i.flipX = rule->ear.flipX;
						clipData.i.rotation = rule->ear.rotation;

						break;
					}
				}
			}
			
			// add clip
			afcFrame->addClip(afcClip);
		}
	}
	
	// collision rects
	if(arcticFrame->collisionRectCount > 0) {
		ccRect* cr = arcticFrame->collisionRects;
		for(int i = 0; i < arcticFrame->collisionRectCount; i++, cr++) {
			CCAFCClip* afcClip = CCAFCClip::create();
			CCAFCClipData& afcClipData = afcClip->getData();
			
			// set type
			afcClip->setType(AFC_CLIP_COLLISION_RECT);
			
			// clip pos
			// ASprite y axis is reversed with opengl y axis, and origin is top left corner
			afcClipData.clipPos = ccpt(resolve(cr->x + cr->width / 2 + offsetX),
					resolve(-cr->y - cr->height / 2 - offsetY));
			
			// set rect
			afcClipData.cr.size = ccsz(resolve(cr->width), resolve(cr->height));
			
			// add clip
			afcFrame->addClip(afcClip);
		}
	}
}