Пример #1
0
void Creature::loadModel() {
	if (_model)
		return;

	if (_appearanceID == Aurora::kFieldIDInvalid) {
		warning("Creature \"%s\" has no appearance", _tag.c_str());
		return;
	}

	const Aurora::TwoDARow &appearance = TwoDAReg.get2DA("appearance").getRow(_appearanceID);

	if (_portrait.empty())
		_portrait = appearance.getString("PORTRAIT");

	_environmentMap = appearance.getString("ENVMAP");

	if (appearance.getString("MODELTYPE") == "P") {
		getArmorModels();
		getPartModels();
		_model = loadModelObject(_partsSuperModelName);

		for (size_t i = 0; i < kBodyPartMAX; i++) {
			if (_bodyParts[i].modelName.empty())
				continue;

			TextureMan.startRecordNewTextures();

			// Try to load in the corresponding part model
			Graphics::Aurora::Model *partModel = loadModelObject(_bodyParts[i].modelName, _bodyParts[i].textureName);
			if (!partModel)
				continue;

			// Add the loaded model to the appropriate part node
			Graphics::Aurora::ModelNode *partNode = _model->getNode(kBodyPartNodes[i]);
			if (partNode)
				partNode->addChild(partModel);

			std::list<Common::UString> newTextures;
			TextureMan.stopRecordNewTextures(newTextures);

			for (std::list<Common::UString>::const_iterator t = newTextures.begin(); t != newTextures.end(); ++t) {
				Graphics::Aurora::TextureHandle texture = TextureMan.getIfExist(*t);
				if (texture.empty())
					continue;

				_bodyParts[i].textures.push_back(texture);
			}

			finishPLTs(_bodyParts[i].textures);
		}

	} else
		_model = loadModelObject(appearance.getString("RACE"));

	// Positioning

	float x, y, z, angle;

	getPosition(x, y, z);
	setPosition(x, y, z);

	getOrientation(x, y, z, angle);
	setOrientation(x, y, z, angle);

	// Clickable

	if (_model) {
		_model->setTag(_tag);
		_model->setClickable(isClickable());

		_ids.push_back(_model->getID());

		if (!_environmentMap.empty()) {
			Common::UString environmentMap = _environmentMap;
			if (environmentMap.equalsIgnoreCase("default"))
				environmentMap = _area ? _area->getEnvironmentMap() : "";

			_model->setEnvironmentMap(environmentMap);
		}
	}
}
	const Capsule<float> CollisionCapsuleObject::retrieveCapsule() const
	{
		return Capsule<float>(getRadius(), getCylinderHeight(), getCapsuleOrientation(), getCenterPosition(), getOrientation());
	}
Пример #3
0
bool Transform::preserveRects() const
{
    return (getOrientation() & ROT_INVALID) ? false : true;
}
Пример #4
0
/**
 * @brief Parsing particular entry in exif directory
 *          This is internal function and is not exposed to client
 *
 *      Entries are divided into 12-bytes blocks each
 *      Each block corresponds the following structure:
 *
 *      +------+-------------+-------------------+------------------------+
 *      | Type | Data format | Num of components | Data or offset to data |
 *      +======+=============+===================+========================+
 *      | TTTT | ffff        | NNNNNNNN          | DDDDDDDD               |
 *      +------+-------------+-------------------+------------------------+
 *
 *      Details can be found here: http://www.media.mit.edu/pia/Research/deepview/exif.html
 *
 * @param [in] offset Offset to entry in bytes inside raw exif data
 * @return ExifEntry_t structure which corresponds to particular entry
 *
 */
ExifEntry_t ExifReader::parseExifEntry(const size_t offset)
{
    ExifEntry_t entry;
    uint16_t tagNum = getExifTag( offset );
    entry.tag = tagNum;

    switch( tagNum )
    {
        case IMAGE_DESCRIPTION:
            entry.field_str = getString( offset );
            break;
        case MAKE:
            entry.field_str = getString( offset );
            break;
        case MODEL:
            entry.field_str = getString( offset );
            break;
        case ORIENTATION:
            entry.field_u16 = getOrientation( offset );
            break;
        case XRESOLUTION:
            entry.field_u_rational = getResolution( offset );
            break;
        case YRESOLUTION:
            entry.field_u_rational = getResolution( offset );
            break;
        case RESOLUTION_UNIT:
            entry.field_u16 = getResolutionUnit( offset );
            break;
        case SOFTWARE:
            entry.field_str = getString( offset );
            break;
        case DATE_TIME:
            entry.field_str = getString( offset );
            break;
        case WHITE_POINT:
            entry.field_u_rational = getWhitePoint( offset );
            break;
        case PRIMARY_CHROMATICIES:
            entry.field_u_rational = getPrimaryChromaticies( offset );
            break;
        case Y_CB_CR_COEFFICIENTS:
            entry.field_u_rational = getYCbCrCoeffs( offset );
            break;
        case Y_CB_CR_POSITIONING:
            entry.field_u16 = getYCbCrPos( offset );
            break;
        case REFERENCE_BLACK_WHITE:
            entry.field_u_rational = getRefBW( offset );
            break;
        case COPYRIGHT:
            entry.field_str = getString( offset );
            break;
        case EXIF_OFFSET:
            break;
        default:
            entry.tag = INVALID_TAG;
            break;
    }
    return entry;
}
Пример #5
0
	//[20091123 exif Ratnesh
	void CameraHal::CreateExif(unsigned char* pInThumbnailData, int Inthumbsize,
		unsigned char* pOutExifBuf, int& OutExifSize, int flag)
	{
		//                                0   90  180 270 360
		const int MAIN_ORIENTATION[]  = { 1,  6,  3,  8,  1};
		const int FRONT_ORIENTATION[] = { 3,  6,  1,  8,  3};
			
		ExifCreator* mExifCreator = new ExifCreator();
		unsigned int ExifSize = 0;
		ExifInfoStructure ExifInfo;
		char ver_date[5] = {NULL,};
		unsigned short tempISO = 0;
		struct v4l2_exif exifobj;
		
		int orientationValue = getOrientation();
		LOGV("CreateExif orientationValue = %d \n", orientationValue);	

		memset(&ExifInfo, NULL, sizeof(ExifInfoStructure));

		strcpy( (char *)&ExifInfo.maker, "SAMSUNG");
		
		mParameters.getPictureSize((int*)&ExifInfo.imageWidth , (int*)&ExifInfo.imageHeight);
		mParameters.getPictureSize((int*)&ExifInfo.pixelXDimension, (int*)&ExifInfo.pixelYDimension);

		struct tm *t = NULL;
		time_t nTime;
		time(&nTime);
		t = localtime(&nTime);

		if(t != NULL)
		{
			sprintf((char *)&ExifInfo.dateTimeOriginal, "%4d:%02d:%02d %02d:%02d:%02d", t->tm_year + 1900, t->tm_mon + 1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec);
			sprintf((char *)&ExifInfo.dateTimeDigitized, "%4d:%02d:%02d %02d:%02d:%02d", t->tm_year + 1900, t->tm_mon + 1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec);						
			sprintf((char *)&ExifInfo.dateTime, "%4d:%02d:%02d %02d:%02d:%02d", t->tm_year + 1900, t->tm_mon + 1, t->tm_mday, t->tm_hour, t->tm_min, t->tm_sec); 					
		}
				
		if(mCameraIndex==MAIN_CAMERA)
		{
			if(orientationValue<=360)
				ExifInfo.orientation = MAIN_ORIENTATION[orientationValue/90];
			else
				ExifInfo.orientation = 0;
			
			getExifInfoFromDriver(&exifobj);
			strcpy( (char *)&ExifInfo.model, "GT-I8320 M4MO");
			int cam_ver = GetCamera_version();
			ExifInfo.Camversion[0] = (cam_ver & 0xFF);
			ExifInfo.Camversion[1] = ((cam_ver >> 8) & 0xFF);
			//HAL_PRINT("CreateExif GetCamera_version =[%x][%x][%x][%x]\n", ExifInfo.Camversion[2],ExifInfo.Camversion[3],ExifInfo.Camversion[0],ExifInfo.Camversion[1]);	
			sprintf((char *)&ExifInfo.software, "%02X%02X", ExifInfo.Camversion[1], ExifInfo.Camversion[0]); 	
// TODO: get thumbnail offset of m4mo jpeg data
			// if(mThumbnailWidth > 0 && mThumbnailHeight > 0)
			// {
				// ExifInfo.hasThumbnail = true;
				// ExifInfo.thumbStream			= pInThumbnailData;
				// ExifInfo.thumbSize				= Inthumbsize;
				// ExifInfo.thumbImageWidth		= mThumbnailWidth;
				// ExifInfo.thumbImageHeight		= mThumbnailHeight;
			// }
			// else
			{
				ExifInfo.hasThumbnail = false;
			}

			ExifInfo.exposureProgram            = 3;
			ExifInfo.exposureMode               = 0;
			ExifInfo.contrast                   = convertToExifLMH(getContrast(), 2);
			ExifInfo.fNumber.numerator          = 26;
			ExifInfo.fNumber.denominator        = 10;
			ExifInfo.aperture.numerator         = 26;
			ExifInfo.aperture.denominator       = 10;
			ExifInfo.maxAperture.numerator      = 26;
			ExifInfo.maxAperture.denominator    = 10;
			ExifInfo.focalLength.numerator      = 4610;
			ExifInfo.focalLength.denominator    = 1000;
			//[ 2010 05 01 exif
			ExifInfo.shutterSpeed.numerator 	= exifobj.shutter_speed_numerator;
			ExifInfo.shutterSpeed.denominator   = exifobj.shutter_speed_denominator;
			ExifInfo.exposureTime.numerator     = exifobj.exposure_time_numerator;
			ExifInfo.exposureTime.denominator   = exifobj.exposure_time_denominator;
			//]
			ExifInfo.brightness.numerator       = exifobj.brigtness_numerator;
			ExifInfo.brightness.denominator     = exifobj.brightness_denominator;
			ExifInfo.iso                        = 1;
			ExifInfo.isoSpeedRating             = roundIso(exifobj.iso);
			// Flash
			// bit 0    -whether the flash fired
			// bit 1,2 -status of returned light
			// bit 3,4 - indicating the camera's flash mode
			// bit 5    -presence of a flash function
			// bit 6    - red-eye mode

			// refer to flash_mode[] at CameraHal.cpp
			// off = 1
			// on = 2
			// auto = 3
			ExifInfo.flash  					= exifobj.flash 
												| (mPreviousFlashMode == 3)?(3<<4):0;	// default value
			
			ExifInfo.whiteBalance               = (mPreviousWB <= 1)?0:1;
			ExifInfo.meteringMode               = mPreviousMetering;
			ExifInfo.saturation                 = convertToExifLMH(getSaturation(), 2);
			ExifInfo.sharpness                  = convertToExifLMH(getSharpness(), 2);  
			ExifInfo.exposureBias.numerator 	= (getBrightness()-4)*5;
			ExifInfo.exposureBias.denominator   = 10;
			ExifInfo.sceneCaptureType           = mPreviousSceneMode;
		
			// ExifInfo.meteringMode               = 2;
			// ExifInfo.whiteBalance               = 1;
			// ExifInfo.saturation                 = 0;
			// ExifInfo.sharpness                  = 0;
			// ExifInfo.isoSpeedRating             = tempISO;
			// ExifInfo.exposureBias.numerator     = 0;
			// ExifInfo.exposureBias.denominator   = 10;
			// ExifInfo.sceneCaptureType           = 4;
		}
		
		else // VGA Camera
		{	
			if(orientationValue<=360)
Пример #6
0
Common::UString GFFStruct::getString(const Common::UString &field,
                                        const Common::UString &def) const {
	load();

	const Field *f = getField(field);
	if (!f)
		return def;

	if (f->type == kFieldTypeExoString) {
		Common::SeekableReadStream &data = getData(*f);

		uint32 length = data.readUint32LE();

		Common::UString str;
		str.readFixedASCII(data, length);
		return str;
	}

	if (f->type == kFieldTypeResRef) {
		Common::SeekableReadStream &data = getData(*f);

		uint32 length = data.readByte();

		Common::UString str;
		str.readFixedASCII(data, length);
		return str;
	}

	if ((f->type == kFieldTypeByte  ) ||
	    (f->type == kFieldTypeUint16) ||
	    (f->type == kFieldTypeUint32) ||
	    (f->type == kFieldTypeUint64)) {

		return Common::UString::sprintf("%lu", getUint(field));
	}

	if ((f->type == kFieldTypeChar  ) ||
	    (f->type == kFieldTypeSint16) ||
	    (f->type == kFieldTypeSint32) ||
	    (f->type == kFieldTypeSint64)) {

		return Common::UString::sprintf("%ld", getSint(field));
	}

	if ((f->type == kFieldTypeFloat) ||
	    (f->type == kFieldTypeDouble)) {

		return Common::UString::sprintf("%lf", getDouble(field));
	}

	if (f->type == kFieldTypeVector) {
		float x, y, z;

		getVector(field, x, y, z);
		return Common::UString::sprintf("%f/%f/%f", x, y, z);
	}

	if (f->type == kFieldTypeOrientation) {
		float a, b, c, d;

		getOrientation(field, a, b, c, d);
		return Common::UString::sprintf("%f/%f/%f/%f", a, b, c, d);
	}

	throw Common::Exception("Field is not a string(able) type");
}
Пример #7
0
void EarClippingTriangulation::clipEars()
{
	std::list<std::size_t>::iterator it, prev, next;
	// *** clip an ear
	while (_vertex_list.size() > 3) {
		// pop ear from list
		std::size_t ear = _ear_list.front();
		_ear_list.pop_front();
		// remove ear tip from _convex_vertex_list
		_convex_vertex_list.remove(ear);

		// remove ear from vertex_list, apply changes to _ear_list, _convex_vertex_list
		bool nfound(true);
		it = _vertex_list.begin();
		prev = _vertex_list.end();
		--prev;
		while (nfound && it != _vertex_list.end()) {
			if (*it == ear) {
				nfound = false;
				it = _vertex_list.erase(it); // remove ear tip
				next = it;
				if (next == _vertex_list.end()) {
					next = _vertex_list.begin();
					prev = _vertex_list.end();
					--prev;
				}
				// add triangle
				_triangles.push_back(GeoLib::Triangle(_pnts, *prev, *next, ear));

				// check the orientation of prevprev, prev, next
				std::list<std::size_t>::iterator prevprev;
				if (prev == _vertex_list.begin()) {
					prevprev = _vertex_list.end();
				} else {
					prevprev = prev;
				}
				--prevprev;

				// apply changes to _convex_vertex_list and _ear_list looking "backward"
				GeoLib::Orientation orientation = GeoLib::getOrientation(_pnts[*prevprev], _pnts[*prev],
						_pnts[*next]);
				if (orientation == GeoLib::CW) {
					BaseLib::uniquePushBack(_convex_vertex_list, *prev);
					// prev is convex
					if (isEar(*prevprev, *prev, *next)) {
						// prev is an ear tip
						BaseLib::uniquePushBack(_ear_list, *prev);
					} else {
						// if necessary remove prev
						_ear_list.remove(*prev);
					}
				} else {
					// prev is not convex => reflex or collinear
					_convex_vertex_list.remove(*prev);
					_ear_list.remove(*prev);
					if (orientation == GeoLib::COLLINEAR) {
						prev = _vertex_list.erase(prev);
						if (prev == _vertex_list.begin()) {
							prev = _vertex_list.end();
							--prev;
						} else {
							--prev;
						}
					}
				}

				// check the orientation of prev, next, nextnext
				std::list<std::size_t>::iterator nextnext,
						help_it(_vertex_list.end());
				--help_it;
				if (next == help_it) {
					nextnext = _vertex_list.begin();
				} else {
					nextnext = next;
					++nextnext;
				}

				// apply changes to _convex_vertex_list and _ear_list looking "forward"
				orientation = getOrientation(_pnts[*prev], _pnts[*next],
						_pnts[*nextnext]);
				if (orientation == GeoLib::CW) {
					BaseLib::uniquePushBack(_convex_vertex_list, *next);
					// next is convex
					if (isEar(*prev, *next, *nextnext)) {
						// next is an ear tip
						BaseLib::uniquePushBack(_ear_list, *next);
					} else {
						// if necessary remove *next
						_ear_list.remove(*next);
					}
				} else {
					// next is not convex => reflex or collinear
					_convex_vertex_list.remove(*next);
					_ear_list.remove(*next);
					if (orientation == GeoLib::COLLINEAR) {
						next = _vertex_list.erase(next);
						if (next == _vertex_list.end())
							next = _vertex_list.begin();
					}
				}
			} else {
				prev = it;
				++it;
			}
		}

	}

	// add last triangle
	next = _vertex_list.begin();
	prev = next;
	++next;
	if (next == _vertex_list.end())
		return;
	it = next;
	++next;
	if (next == _vertex_list.end())
		return;

	if (getOrientation(_pnts[*prev], _pnts[*it], _pnts[*next]) == GeoLib::CCW)
		_triangles.push_back(GeoLib::Triangle(_pnts, *prev, *it, *next));
	else
		_triangles.push_back(GeoLib::Triangle(_pnts, *prev, *next, *it));
}
Пример #8
0
	int CameraHal::CapturePicture()
	{
		int image_width, image_height, preview_width, preview_height;
        int capture_len;
		unsigned long base, offset;
      
#ifdef R3D4_CONVERT     
        CColorConvert* pConvert;    //class for image processing
#endif		
		struct v4l2_buffer buffer; // for VIDIOC_QUERYBUF and VIDIOC_QBUF
		struct v4l2_format format;
		//struct v4l2_buffer cfilledbuffer; // for VIDIOC_DQBUF
		struct v4l2_requestbuffers creqbuf; // for VIDIOC_REQBUFS and VIDIOC_STREAMON and VIDIOC_STREAMOFF

		sp<MemoryBase> 		mPictureBuffer;
		sp<MemoryBase> 		mFinalPictureBuffer;
		sp<MemoryHeapBase>  mJPEGPictureHeap;
		sp<MemoryBase>		mJPEGPictureMemBase;


		ssize_t newoffset;
		size_t newsize;

		mCaptureFlag = true;
		int jpegSize;
		void* outBuffer;
		int err, i;
		int err_cnt = 0;


		int exifDataSize = 0;
		int thumbnaiDataSize = 0;
		unsigned char* pExifBuf = new unsigned char[64*1024];

		int twoSecondReviewMode = getTwoSecondReviewMode();
		int orientation = getOrientation();

		LOG_FUNCTION_NAME
		
		                           
		if (CameraSetFrameRate())
		{
			LOGE("Error in setting Camera frame rate\n");
			return -1;
		}
        
		LOGD("\n\n\n PICTURE NUMBER =%d\n\n\n",++pictureNumber);
       
        mParameters.getPictureSize(&image_width, &image_height);
		mParameters.getPreviewSize(&preview_width, &preview_height);	
		LOGV("mCameraIndex = %d\n", mCameraIndex);
		LOGD("Picture Size: Width = %d \t Height = %d\n", image_width, image_height);

        /* set size & format of the video image */
		format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		format.fmt.pix.width = image_width;
		format.fmt.pix.height = image_height;
        
		if(mCamera_Mode == CAMERA_MODE_JPEG)
		{
            format.fmt.pix.pixelformat = PIXEL_FORMAT_JPEG;
			capture_len =  GetJPEG_Capture_Width() * GetJPEG_Capture_Height() * JPG_BYTES_PER_PIXEL;
		}
		else
		{
            format.fmt.pix.pixelformat = PIXEL_FORMAT;
			capture_len = image_width * image_height * UYV_BYTES_PER_PIXEL;   
		}

         // round up to 4096 bytes
		if (capture_len & 0xfff)   
			capture_len = (capture_len & 0xfffff000) + 0x1000;

		LOGV("capture: %s mode, pictureFrameSize = 0x%x = %d\n", 
            (mCamera_Mode == CAMERA_MODE_JPEG)?"jpeg":"yuv", capture_len, capture_len);

            
		mPictureHeap = new MemoryHeapBase(capture_len);
		base = (unsigned long)mPictureHeap->getBase();
		base = (base + 0xfff) & 0xfffff000;
		offset = base - (unsigned long)mPictureHeap->getBase();


        // set capture format
		if (ioctl(camera_device, VIDIOC_S_FMT, &format) < 0)
		{
			LOGE ("Failed to set VIDIOC_S_FMT.\n");
			return -1;
		}
#if OMAP_SCALE       
        if(mCameraIndex == VGA_CAMERA && mCamMode != VT_MODE)
            if(orientation == 0 || orientation == 180)
                setFlip(CAMERA_FLIP_MIRROR);
#endif
		/* Shutter CallBack */
		if(mMsgEnabled & CAMERA_MSG_SHUTTER)
		{
			mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
		} 
		
		/* Check if the camera driver can accept 1 buffer */
		creqbuf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		creqbuf.memory = V4L2_MEMORY_USERPTR;
		creqbuf.count  = 1;
		if (ioctl(camera_device, VIDIOC_REQBUFS, &creqbuf) < 0)
		{
			LOGE ("VIDIOC_REQBUFS Failed. errno = %d\n", errno);
			return -1;
		}

		buffer.type = creqbuf.type;
		buffer.memory = creqbuf.memory;
		buffer.index = 0;
		if (ioctl(camera_device, VIDIOC_QUERYBUF, &buffer) < 0) {
			LOGE("VIDIOC_QUERYBUF Failed");
			return -1;
		}

		buffer.m.userptr = base;
		mPictureBuffer = new MemoryBase(mPictureHeap, offset, buffer.length);
		LOGD("Picture Buffer: Base = %p Offset = 0x%x\n", (void *)base, (unsigned int)offset);

		if (ioctl(camera_device, VIDIOC_QBUF, &buffer) < 0) {
			LOGE("CAMERA VIDIOC_QBUF Failed");
			return -1;
		}

		/* turn on streaming */
		if (ioctl(camera_device, VIDIOC_STREAMON, &creqbuf.type) < 0)
		{
			LOGE("VIDIOC_STREAMON Failed\n");
			return -1;
		}

		LOGD("De-queue the next avaliable buffer\n");

		/* De-queue the next avaliable buffer */       
        //try to get buffer from camearo for 10 times
		while (ioctl(camera_device, VIDIOC_DQBUF, &buffer) < 0) 
		{
			LOGE("VIDIOC_DQBUF Failed cnt = %d\n", err_cnt);
			if(err_cnt++ > 10)
			{
				mNotifyCb(CAMERA_MSG_ERROR, CAMERA_DEVICE_ERROR_FOR_RESTART, 0, mCallbackCookie);

				mPictureBuffer.clear();
				mPictureHeap.clear();

				return NO_ERROR;           
			}
		}
		PPM("AFTER CAPTURE YUV IMAGE\n");
		/* turn off streaming */
        
		if (ioctl(camera_device, VIDIOC_STREAMOFF, &creqbuf.type) < 0) 
		{
			LOGE("VIDIOC_STREAMON Failed\n");
			return -1;
		}
#if OMAP_SCALE          
        if(mCameraIndex == VGA_CAMERA && mCamMode != VT_MODE)
            if(orientation == 0 || orientation == 180)
                setFlip(CAMERA_FLIP_NONE);
#endif                
        // camera returns processed jpeg image
		if(mCamera_Mode == CAMERA_MODE_JPEG)
		{
			int JPEG_Image_Size = GetJpegImageSize();
			int thumbNailOffset = 0;	//m4mo doesnt store offset ?
			int yuvOffset =0;			//m4mo doesnt store yuv image ?
			// int thumbNailOffset = GetThumbNailOffset();
			// int yuvOffset = GetYUVOffset();
			thumbnaiDataSize = GetThumbNailDataSize();
			sp<IMemoryHeap> heap = mPictureBuffer->getMemory(&newoffset, &newsize);
			uint8_t* pInJPEGDataBUuf = (uint8_t *)heap->base() + newoffset ;			//ptr to jpeg data
			uint8_t* pInThumbNailDataBuf = (uint8_t *)heap->base() + thumbNailOffset;	//ptr to thmubnail
			uint8_t* pYUVDataBuf = (uint8_t *)heap->base() + yuvOffset;

			// FILE* fOut = NULL;
			// fOut = fopen("/dump/dump.jpg", "w");
			// fwrite(pInJPEGDataBUuf, 1, JPEG_Image_Size, fOut);
			// fclose(fOut);
			
			CreateExif(pInThumbNailDataBuf, thumbnaiDataSize, pExifBuf, exifDataSize, EXIF_SET_JPEG_LENGTH);

			//create a new binder object 
			mFinalPictureHeap = new MemoryHeapBase(exifDataSize+JPEG_Image_Size);
			mFinalPictureBuffer = new MemoryBase(mFinalPictureHeap,0,exifDataSize+JPEG_Image_Size);
			heap = mFinalPictureBuffer->getMemory(&newoffset, &newsize);
			uint8_t* pOutFinalJpegDataBuf = (uint8_t *)heap->base();

			
			//create a new binder obj to send yuv data
			if(yuvOffset)
			{
				int mFrameSizeConvert = (preview_width*preview_height*3/2) ;

				mYUVPictureHeap = new MemoryHeapBase(mFrameSizeConvert);
				mYUVPictureBuffer = new MemoryBase(mYUVPictureHeap,0,mFrameSizeConvert);
				mYUVNewheap = mYUVPictureBuffer->getMemory(&newoffset, &newsize);

				PPM("YUV COLOR CONVERSION STARTED\n");
#ifdef NEON

				Neon_Convert_yuv422_to_NV21((uint8_t *)pYUVDataBuf, 
                    (uint8_t *)mYUVNewheap->base(), mPreviewWidth, mPreviewHeight);

				PPM("YUV COLOR CONVERSION ENDED\n");

				if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE)
				{
					mDataCb(CAMERA_MSG_RAW_IMAGE, mYUVPictureBuffer, mCallbackCookie);
				}	
#else
                if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE)
                    mDataCb(CAMERA_MSG_RAW_IMAGE, pYUVDataBuf, mCallbackCookie);

#endif
			}
			//create final JPEG with EXIF into that
			int OutJpegSize = 0;
			if(!CreateJpegWithExif( pInJPEGDataBUuf, JPEG_Image_Size, pExifBuf, exifDataSize, pOutFinalJpegDataBuf, OutJpegSize))
            {
                LOGE("createJpegWithExif fail!!\n");
                return -1;
            }

            if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)
            {
                mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mFinalPictureBuffer, mCallbackCookie);
             }

		}   //CAMERA_MODE_JPEG
        
        // camera returns 16 bit uyv image
        // -> needs to process (rotate/flip) 
        // -> and compess to jpeg (with dsp)
		if(mCamera_Mode == CAMERA_MODE_YUV)
		{
#ifdef HARDWARE_OMX
            // create new buffer for image processing
			int mFrameSizeConvert = (image_width*image_height*2) ;
			mYUVPictureHeap = new MemoryHeapBase(mFrameSizeConvert);
			mYUVPictureBuffer = new MemoryBase(mYUVPictureHeap,0,mFrameSizeConvert);
			mYUVNewheap = mYUVPictureBuffer->getMemory(&newoffset, &newsize);
            
            // buffer from v4l holding the actual image
            uint8_t *pYuvBuffer = (uint8_t*)buffer.m.userptr;    
            
			LOGD("PictureThread: generated a picture, pYuvBuffer=%p yuv_len=%d\n", 
                pYuvBuffer, capture_len);
                     
              
			PPM("YUV COLOR ROTATION STARTED\n");
    
#ifdef R3D4_CONVERT     
            if(mCameraIndex == VGA_CAMERA)
            {
				LOGV("use rotation");
                 // color converter and image processing (flip/rotate)
                 // neon lib doesnt seem to work, jpeg was corrupted?
                 // so use own stuff
                pConvert = new CColorConvert(pYuvBuffer, image_width, image_height, UYV2);
                
                //pConvert->writeFile(DUMP_PATH "before_rotate.uyv", SOURCE);  
                //pConvert->writeFile(DUMP_PATH "before_rotate.bmp", BMP);      
               
                if(mCameraIndex == VGA_CAMERA )
                    pConvert->rotateImage(ROTATE_270);
                // else
                   // pConvert->flipImage(FLIP_VERTICAL);
                
                // write rotatet image back to input buffer
                //pConvert->writeFile(DUMP_PATH "after_rotate.bmp", BMP);   
                pConvert->makeUYV2(NULL, INPLACE);  //INPLACE: no new buffer, write to input buffer   
                image_width = pConvert->getWidth();
                image_height = pConvert->geHeight();
            }
#else

#endif            
			PPM("YUV COLOR ROTATION Done\n");           
         
             //pYuvBuffer: [Reused]Output buffer with YUV 420P 270 degree rotated.             
			if(mMsgEnabled & CAMERA_MSG_RAW_IMAGE)
			{   
                // convert pYuvBuffer(YUV422I) to mYUVPictureBuffer(YUV420P)
				Neon_Convert_yuv422_to_YUV420P(pYuvBuffer, (uint8_t *)mYUVNewheap->base(), image_width, image_height);         	
				mDataCb(CAMERA_MSG_RAW_IMAGE, mYUVPictureBuffer, mCallbackCookie);
			}

#endif //HARDWARE_OMX

			if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)
			{
#ifdef HARDWARE_OMX  
                // int inputFormat = PIX_YUV420P;
                // int imputSize = image_width * image_height * PIX_YUV420P_BYTES_PER_PIXEL; 

                int inputFormat = PIX_YUV422I;
                int inputSize = image_width * image_height * PIX_YUV422I_BYTES_PER_PIXEL;
				int jpegSize = image_width * image_height * JPG_BYTES_PER_PIXEL;
                
				CreateExif(NULL, 0, pExifBuf, exifDataSize, EXIF_NOTSET_JPEG_LENGTH);
				HAL_PRINT("VGA EXIF size : %d\n", exifDataSize);
                
				mJPEGPictureHeap = new MemoryHeapBase(jpegSize + 256);
				outBuffer = (void *)((unsigned long)(mJPEGPictureHeap->getBase()) + 128);


      
				HAL_PRINT("YUV capture : outbuffer = 0x%x, jpegSize = %d, pYuvBuffer = 0x%x, yuv_len = %d, image_width = %d, image_height = %d, quality = %d, mippMode =%d\n", 
							outBuffer, jpegSize, pYuvBuffer, capture_len, image_width, image_height, mYcbcrQuality, mippMode); 

				if(jpegEncoder)
				{
                	PPM("BEFORE JPEG Encode Image\n");
					err = jpegEncoder->encodeImage(
                            outBuffer,                          // void* outputBuffer, 
                            jpegSize,                           // int outBuffSize, 
                            pYuvBuffer,                         // void *inputBuffer, 
                            inputSize,                          // int inBuffSize, 
                            pExifBuf,                           // unsigned char* pExifBuf,
                            exifDataSize,                       // int ExifSize,
                            image_width,	                    // int width, 
                            image_height,	                    // int height, 
                            mThumbnailWidth,                    // int ThumbWidth, 
                            mThumbnailHeight,                   // int ThumbHeight, 
                            mYcbcrQuality,                      // int quality,
                            inputFormat);                       // int isPixelFmt420p)
                    PPM("AFTER JPEG Encode Image\n");
					LOGD("JPEG ENCODE END\n");

					if(err != true) 
                    {
						LOGE("Jpeg encode failed!!\n");
						return -1;
					} 
                    else 
						LOGD("Jpeg encode success!!\n");
				}

				mJPEGPictureMemBase = new MemoryBase(mJPEGPictureHeap, 128, jpegEncoder->jpegSize);

				if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)
				{
					mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJPEGPictureMemBase, mCallbackCookie);
				}

				mJPEGPictureMemBase.clear();
				mJPEGPictureHeap.clear();
#endif //HARDWARE_OMX
			}//END of CAMERA_MSG_COMPRESSED_IMAGE
       

 
#ifdef R3D4_CONVERT 
            delete pConvert;  
#endif            

            
		}//END of CAMERA_MODE_YUV
        
		mPictureBuffer.clear();
		mPictureHeap.clear();
        
		if(mCamera_Mode == CAMERA_MODE_JPEG)
		{
			mFinalPictureBuffer.clear();
			mFinalPictureHeap.clear();

		}
         
        mYUVPictureBuffer.clear();
        mYUVPictureHeap.clear();
        
		delete []pExifBuf;
		mCaptureFlag = false;
                
		LOG_FUNCTION_NAME_EXIT

		return NO_ERROR;

	}
Пример #9
0
void CObject::rotate(const Quaternion& q)
{
	setOrientation(getOrientation() * q);
};
Пример #10
0
void CObject::pitch(const Radian& angle)
{
	Vector3 xAxis = getOrientation() * Vector3::UNIT_X;
	rotate(xAxis, angle);
};
Пример #11
0
void CObject::roll(const Radian& angle)
{
	Vector3 zAxis = getOrientation() * Vector3::UNIT_Z;
	rotate(zAxis, angle);
};
Пример #12
0
void CObject::yaw(const Radian& angle)
{
	Vector3 yAxis = getOrientation() * Vector3::UNIT_Y;
	rotate(yAxis, angle);
};
Пример #13
0
int main(int argc, char **argv)
{
	//Register signal and signal handler
	signal(SIGINT, signal_callback_handler);
	
	//Init UDP with callbacks and pointer to run status
	initUDP( &UDP_Command_Handler, &UDP_Control_Handler, &Running );
	
	print("Eddie starting...\r\n");

	initIdentity();
	
	double EncoderPos[2] = {0};
	
	initEncoders( 183, 46, 45, 44 );
	print("Encoders activated.\r\n");

	imuinit();
	print("IMU Started.\r\n");

	float kalmanAngle;
	InitKalman();
	
#ifndef DISABLE_MOTORS
	print( "Starting motor driver (and resetting wireless) please be patient..\r\n" );
	if ( motor_driver_enable() < 1 )
		{
			print("Startup Failed; Error starting motor driver.\r\n");
			motor_driver_disable();
			return -1;
		}
	print("Motor Driver Started.\r\n");
#endif
	
	print("Eddie is starting the UDP network thread..\r\n");
	pthread_create( &udplistenerThread, NULL, &udplistener_Thread, NULL );
	
	print( "Eddie is Starting PID controllers\r\n" );
	/*Set default PID values and init pitchPID controllers*/
	pidP_P_GAIN = PIDP_P_GAIN;	pidP_I_GAIN = PIDP_I_GAIN;	pidP_D_GAIN = PIDP_D_GAIN;	pidP_I_LIMIT = PIDP_I_LIMIT; pidP_EMA_SAMPLES = PIDP_EMA_SAMPLES;
	PIDinit( &pitchPID[0], &pidP_P_GAIN, &pidP_I_GAIN, &pidP_D_GAIN, &pidP_I_LIMIT, &pidP_EMA_SAMPLES );
	PIDinit( &pitchPID[1], &pidP_P_GAIN, &pidP_I_GAIN, &pidP_D_GAIN, &pidP_I_LIMIT, &pidP_EMA_SAMPLES );
	
	/*Set default values and init speedPID controllers*/
	pidS_P_GAIN = PIDS_P_GAIN;	pidS_I_GAIN = PIDS_I_GAIN;	pidS_D_GAIN = PIDS_D_GAIN;	pidS_I_LIMIT = PIDS_I_LIMIT; pidS_EMA_SAMPLES = PIDS_EMA_SAMPLES;
	PIDinit( &speedPID[0], &pidS_P_GAIN, &pidS_I_GAIN, &pidS_D_GAIN, &pidS_I_LIMIT, &pidS_EMA_SAMPLES );
	PIDinit( &speedPID[1], &pidS_P_GAIN, &pidS_I_GAIN, &pidS_D_GAIN, &pidS_I_LIMIT, &pidS_EMA_SAMPLES );
	
	//Get estimate of starting angle and specify complementary filter and kalman filter start angles
	getOrientation();
	kalmanAngle = filteredPitch = i2cPitch;
	setkalmanangle( filteredPitch );
	filteredRoll = i2cRoll;
	
	print( "Eddie startup complete. Hold me upright to begin\r\n" );
	
	double gy_scale = 0.01;
	last_PID_ms = last_gy_ms = current_milliseconds();
	
	while(Running)
	{
		GetEncoders( EncoderPos );
		
		if( fabs(GetEncoder()) > 2000 && !inRunAwayState )
		{
			print( "Help! I'm running and not moving.\r\n");
			ResetEncoders();
			inRunAwayState=1;
		}
		
		/*Read IMU and calculate rough angle estimates*/
		getOrientation();
		
		/*Calculate time since last IMU reading and determine gyro scale (dt)*/
		gy_scale = ( current_milliseconds() - last_gy_ms ) / 1000.0f;
	
		last_gy_ms = current_milliseconds();
		
		/*Complementary filters to smooth rough pitch and roll estimates*/
		filteredPitch = 0.995 * ( filteredPitch + ( gy * gy_scale ) ) + ( 0.005 * i2cPitch );
		filteredRoll = 0.98 * ( filteredRoll + ( gx * gy_scale ) ) + ( 0.02 * i2cRoll );

		/*Kalman filter for most accurate pitch estimates*/	
		kalmanAngle = -getkalmanangle(filteredPitch, gy, gy_scale /*dt*/);

		/* Monitor angles to determine if Eddie has fallen too far... or if Eddie has been returned upright*/
		if ( ( inRunAwayState || ( fabs( kalmanAngle ) > 50 || fabs( filteredRoll ) > 45 ) ) && !inFalloverState ) 
		{
#ifndef DISABLE_MOTORS
			motor_driver_standby(1);
#endif
			inFalloverState = 1;
			print( "Help! I've fallen over and I can't get up =)\r\n");
		} 
		else if ( fabs( kalmanAngle ) < 10 && inFalloverState && fabs( filteredRoll ) < 20 )
		{
			if ( ++inSteadyState == 100 )
			{
				inRunAwayState = 0;
				inSteadyState = 0;
#ifndef DISABLE_MOTORS
				motor_driver_standby(0);
#endif
				inFalloverState = 0;
				print( "Thank you!\r\n" );
			}
		}
		else
		{
			inSteadyState = 0;
		}

		if ( !inFalloverState )
		{
			/* Drive operations */
			smoothedDriveTrim = ( 0.99 * smoothedDriveTrim ) + ( 0.01 * driveTrim );
			if( smoothedDriveTrim != 0 ) 
			{
				EncoderAddPos(smoothedDriveTrim); //Alter encoder position to generate movement
			}
			
			/* Turn operations */
			if( turnTrim != 0  )
			{
				EncoderAddPos2( turnTrim, -turnTrim ); //Alter encoder positions to turn
			}
						
			double timenow = current_milliseconds();

			speedPIDoutput[0] = PIDUpdate( 0, EncoderPos[0], timenow - last_PID_ms, &speedPID[0] );//Wheel Speed PIDs
			speedPIDoutput[1] = PIDUpdate( 0, EncoderPos[1], timenow - last_PID_ms, &speedPID[1] );//Wheel Speed PIDs
			pitchPIDoutput[0] = PIDUpdate( speedPIDoutput[0], kalmanAngle, timenow - last_PID_ms, &pitchPID[0] );//Pitch Angle PIDs		
			pitchPIDoutput[1] = PIDUpdate( speedPIDoutput[1], kalmanAngle, timenow - last_PID_ms, &pitchPID[1] );//Pitch Angle PIDs
			
			last_PID_ms = timenow;
			
			//Limit PID output to +/-100 to match 100% motor throttle
			if ( pitchPIDoutput[0] > 100.0 )  pitchPIDoutput[0] = 100.0;
			if ( pitchPIDoutput[1] > 100.0 )  pitchPIDoutput[1] = 100.0;
			if ( pitchPIDoutput[0] < -100.0 ) pitchPIDoutput[0] = -100.0;
			if ( pitchPIDoutput[1] < -100.0 ) pitchPIDoutput[1] = -100.0;

		}
		else //We are inFalloverState
		{
			ResetEncoders();
			pitchPID[0].accumulatedError = 0;
			pitchPID[1].accumulatedError = 0;
			speedPID[0].accumulatedError = 0;
			speedPID[1].accumulatedError = 0;
			driveTrim = 0;
			turnTrim = 0;
		}
	
#ifndef DISABLE_MOTORS
		set_motor_speed_right( pitchPIDoutput[0] );
		set_motor_speed_left( pitchPIDoutput[1] );
#endif

		if ( (!inFalloverState || outputto == UDP) && StreamData )
		{			
			print( "PIDout: %0.2f,%0.2f\tcompPitch: %6.2f kalPitch: %6.2f\tPe: %0.3f\tIe: %0.3f\tDe: %0.3f\tPe: %0.3f\tIe: %0.3f\tDe: %0.3f\r\n",
				speedPIDoutput[0], 
				pitchPIDoutput[0], 
				filteredPitch, 
				kalmanAngle,
				pitchPID[0].error, 
				pitchPID[0].accumulatedError, 
				pitchPID[0].differentialError, 
				speedPID[0].error, 
				speedPID[0].accumulatedError, 
				speedPID[0].differentialError 
				);
		}

	} //--while(Running)
	
	print( "Eddie is cleaning up...\r\n" );
	
	CloseEncoder();
	
	pthread_join(udplistenerThread, NULL);
	print( "UDP Thread Joined..\r\n" );

#ifndef DISABLE_MOTORS
	motor_driver_disable();
	print( "Motor Driver Disabled..\r\n" );
#endif
	
	print( "Eddie cleanup complete. Good Bye!\r\n" );
	return 0;
}
Пример #14
0
    void Stack::specialTransformAndSize()
    {
        // Super call
        Container::specialTransformAndSize();

        // Calculate separator sizes and adjust inner sizes
        int separatorCount = (int)mChildren.size() - 1;
        int separatorSize = 0;
        std::vector<int> separatorPositions;

        // Only use separators, if wished or reasonable
        if (mSeparator > 0 && separatorCount >= 1)
        {
            // Test even if enough pixels are there to render the separators
            if (getOrientation() == Element::Orientation::HORIZONTAL
                && separatorCount <= mInnerWidth)
            {
                separatorSize = (int)(mInnerWidth * mSeparator);
                separatorSize = separatorSize < 1 ? 1 : separatorSize;
                mInnerWidth -= separatorSize * separatorCount;
            }
            else if (separatorCount <= mInnerHeight)
            {
                separatorSize = (int)(mInnerHeight * mSeparator);
                separatorSize = separatorSize < 1 ? 1 : separatorSize;
                mInnerHeight -= separatorSize * separatorCount;
            }
        }

        // Get all dynamic scales together if necessary
        float completeScale = 0;
        float maxDynamicScale = 0;
        for (const std::unique_ptr<Element>& element : mChildren)
        {
            float dynamicScale = element->getDynamicScale();
            completeScale += dynamicScale;
            maxDynamicScale = std::max(dynamicScale, maxDynamicScale);
        }

        // Determine direction of stacking
        if (getOrientation() == Element::Orientation::HORIZONTAL)
        {
            // Horizontal
            int usedElemX = 0;
            int sumElemWidth = 0;
            int sumUsedWidth = 0;
            int sumUsedHeight = 0;
            std::vector<int> elemWidths;

            // Collect used size
            std::vector<int> usedWidths, usedHeights;
            int elementNumber = 1;
            for (const std::unique_ptr<Element>& element : mChildren)
            {
                int usedWidth, usedHeight;
                int localElemWidth;
                int localElemHeight;

                // Element width
                if (elementNumber == mChildren.size())
                {
                    // Fill stack with last element
                    localElemWidth = mInnerWidth - sumElemWidth;
                }
                else
                {
                    // Use dynamic scale
                    localElemWidth = (int)((float)mInnerWidth
                        * (element->getDynamicScale() / completeScale));
                    sumElemWidth += localElemWidth;
                }

                // Element height
                if (mRelativeScaling == RelativeScaling::BOTH_AXES)
                {
                    localElemHeight = (int)((float)mInnerHeight
                        * (element->getDynamicScale() / maxDynamicScale));
                }
                else
                {
                    localElemHeight = mInnerHeight;
                }
                elemWidths.push_back(localElemWidth);
                element->evaluateSize(localElemWidth, localElemHeight, usedWidth, usedHeight);
                usedWidths.push_back(usedWidth);
                usedHeights.push_back(usedHeight);
                sumUsedWidth += usedWidth;
                sumUsedHeight += usedHeight;

                // Next looping
                elementNumber++;
            }

            // Alignment
            int i = 0;
            int usedPadding = (int)((float)(mInnerWidth - sumUsedWidth) * mPadding);
            int usedElemPadding = usedPadding / (int)mChildren.size();

            // No padding when alignment is filled
            if (mAlignment == Alignment::FILL)
            {
                usedPadding = 0;
                usedElemPadding = 0;
            }

            // Determine final values and assign them
            for (const std::unique_ptr<Element>& element : mChildren)
            {
                int deltaX;
                int deltaY = mInnerHeight - usedHeights[i];
                int offsetX;

                int finalX, finalY, finalWidth, finalHeight;

                // Do alignment specific calculations
                switch (mAlignment)
                {
                case Alignment::FILL:
                    deltaX = elemWidths[i] - usedWidths[i];
                    offsetX = mInnerX;
                    break;
                case Alignment::TAIL:
                    deltaX = usedElemPadding;
                    offsetX = mInnerX;
                    break;
                case Alignment::HEAD:
                    deltaX = usedElemPadding;
                    offsetX = mInnerX + (mInnerWidth - (sumUsedWidth + usedPadding));
                    break;
                default: // Alignment::CENTER
                    deltaX = usedElemPadding;
                    offsetX = mInnerX + (mInnerWidth - (sumUsedWidth + usedPadding)) / 2;
                    break;
                }

                // Those values are for all alignments the same
                finalX = usedElemX;
                finalY = mInnerY + (deltaY / 2);
                finalWidth = usedWidths[i];
                finalHeight = usedHeights[i];

                // Calculate the now used space for next element
                usedElemX = finalX + finalWidth + deltaX;

                // Separators (only add new ones if not last element)
                if (separatorSize > 0 && separatorPositions.size() < separatorCount)
                {
                    separatorPositions.push_back(usedElemX + offsetX);
                    usedElemX += separatorSize;
                }

                // Finalize x coordinate for current element
                finalX += (deltaX / 2 + offsetX);

                // Tell element about it
                element->transformAndSize(finalX, finalY, finalWidth, finalHeight);

                // Next looping
                i++;
            }
        }
        else
        {
            // Vertical
            int usedElemY = 0;
            int sumElemHeight = 0;
            int sumUsedWidth = 0;
            int sumUsedHeight = 0;
            std::vector<int> elemHeights;

            // Collect used size
            std::vector<int> usedWidths, usedHeights;
            int elementNumber = 1;
            for (const std::unique_ptr<Element>& element : mChildren)
            {
                int usedWidth, usedHeight;
                int localElemWidth;
                int localElemHeight;

                // Element width
                if (mRelativeScaling == RelativeScaling::BOTH_AXES)
                {
                    localElemWidth = (int)((float)mInnerWidth
                        * (element->getDynamicScale() / maxDynamicScale));
                }
                else
                {
                    localElemWidth = mInnerWidth;
                }

                // Element height
                if (elementNumber == mChildren.size())
                {
                    // Fill stack with last element
                    localElemHeight = mInnerHeight - sumElemHeight;
                }
                else
                {
                    // Use dynamic scale
                    localElemHeight = (int)((float)mInnerHeight
                        * (element->getDynamicScale() / completeScale));
                    sumElemHeight += localElemHeight;
                }

                elemHeights.push_back(localElemHeight);
                element->evaluateSize(localElemWidth, localElemHeight, usedWidth, usedHeight);
                usedWidths.push_back(usedWidth);
                usedHeights.push_back(usedHeight);
                sumUsedWidth += usedWidth;
                sumUsedHeight += usedHeight;

                // Next looping
                elementNumber++;
            }

            // Alignment
            int i = 0;
            int usedPadding = (int)((float)(mInnerHeight - sumUsedHeight) * mPadding);
            int usedElemPadding = usedPadding / (int)mChildren.size();

            // No padding when alignment is filled
            if (mAlignment == Alignment::FILL)
            {
                usedPadding = 0;
                usedElemPadding = 0;
            }

            // Determine final values and assign them
            for (const std::unique_ptr<Element>& element : mChildren)
            {
                int deltaX = mInnerWidth - usedWidths[i];
                int deltaY;
                int offsetY;

                int finalX, finalY, finalWidth, finalHeight;

                // Do alignment specific calculations
                switch (mAlignment)
                {
                case Alignment::FILL:
                    deltaY = elemHeights[i] - usedHeights[i];
                    offsetY = mInnerY;
                    break;
                case Alignment::TAIL:
                    deltaY = usedElemPadding;
                    offsetY = mInnerY;
                    break;
                case Alignment::HEAD:
                    deltaY = usedElemPadding;
                    offsetY = mInnerY + (mInnerHeight - (sumUsedHeight + usedPadding));
                    break;
                default: // Alignment::CENTER
                    deltaY = usedElemPadding;
                    offsetY = mInnerY + (mInnerHeight - (sumUsedHeight + usedPadding)) / 2;
                    break;
                }

                // Those values are for all alignments the same
                finalX = mInnerX + (deltaX / 2);
                finalY = usedElemY;
                finalWidth = usedWidths[i];
                finalHeight = usedHeights[i];

                // Calculate the now used space for next element
                usedElemY = finalY + finalHeight + deltaY;

                // Separators (only add new ones if not last element)
                if (separatorSize > 0 && separatorPositions.size() < separatorCount)
                {
                    separatorPositions.push_back(usedElemY + offsetY);
                    usedElemY += separatorSize;
                }

                // Finalize y coordinate for current element
                finalY += (deltaY / 2 + offsetY);

                // Tell element about it
                element->transformAndSize(finalX, finalY, finalWidth, finalHeight);

                // Next looping
                i++;
            }
        }

        // Calculate draw matrices of separators using new data
        mSeparatorDrawMatrices.clear();

        // Only think about separators if necessary
        if (mSeparator > 0 && separatorCount >= 1)
        {
            // Calculate correct transformation
            int separatorWidth, separatorHeight;

            // Scale depending on orientation
            if (getOrientation() == Element::Orientation::HORIZONTAL)
            {
                separatorWidth = separatorSize;
                separatorHeight = mHeight;
            }
            else
            {
                separatorWidth = mWidth;
                separatorHeight = separatorSize;
            }

            for (int i = 0; i < separatorPositions.size(); i++)
            {
                // Translation depending on orientation
                if (getOrientation() == Element::Orientation::HORIZONTAL)
                {
                    mSeparatorDrawMatrices.push_back(
                        calculateDrawMatrix(
                            separatorPositions[i],
                            mY,
                            separatorWidth,
                            separatorHeight));
                }
                else
                {
                    mSeparatorDrawMatrices.push_back(
                        calculateDrawMatrix(
                            mX,
                            separatorPositions[i],
                            separatorWidth,
                            separatorHeight));
                }
            }
        }
    }
Пример #15
0
Common::UString GFF3Struct::getString(const Common::UString &field,
                                      const Common::UString &def) const {

	const Field *f = getField(field);
	if (!f)
		return def;

	if (f->type == kFieldTypeExoString) {
		Common::SeekableReadStream &data = getData(*f);

		const uint32 length = data.readUint32LE();
		return Common::readStringFixed(data, Common::kEncodingASCII, length);
	}

	if (f->type == kFieldTypeResRef) {
		Common::SeekableReadStream &data = getData(*f);

		const uint32 length = data.readByte();
		return Common::readStringFixed(data, Common::kEncodingASCII, length);
	}

	if (f->type == kFieldTypeLocString) {
		LocString locString;
		getLocString(field, locString);

		return locString.getString();
	}

	if ((f->type == kFieldTypeByte  ) ||
	    (f->type == kFieldTypeUint16) ||
	    (f->type == kFieldTypeUint32) ||
	    (f->type == kFieldTypeUint64) ||
	    (f->type == kFieldTypeStrRef)) {

		return Common::composeString(getUint(field));
	}

	if ((f->type == kFieldTypeChar  ) ||
	    (f->type == kFieldTypeSint16) ||
	    (f->type == kFieldTypeSint32) ||
	    (f->type == kFieldTypeSint64)) {

		return Common::composeString(getSint(field));
	}

	if ((f->type == kFieldTypeFloat) ||
	    (f->type == kFieldTypeDouble)) {

		return Common::composeString(getDouble(field));
	}

	if (f->type == kFieldTypeVector) {
		float x = 0.0, y = 0.0, z = 0.0;

		getVector(field, x, y, z);
		return Common::composeString(x) + "/" +
		       Common::composeString(y) + "/" +
		       Common::composeString(z);
	}

	if (f->type == kFieldTypeOrientation) {
		float a = 0.0, b = 0.0, c = 0.0, d = 0.0;

		getOrientation(field, a, b, c, d);
		return Common::composeString(a) + "/" +
		       Common::composeString(b) + "/" +
		       Common::composeString(c) + "/" +
		       Common::composeString(d);
	}

	throw Common::Exception("GFF3: Field is not a string(able) type");
}