Пример #1
0
static int amagent_init(apr_pool_t *pconf, apr_pool_t *plog, apr_pool_t *ptemp,
        server_rec *s) {
    /* main process init */
    int status;
    apr_status_t rv = APR_SUCCESS;
    void *data;

#define AMAGENT_INIT_ONCE "AMAGENT_INIT_ONCE"
    apr_pool_userdata_get(&data, AMAGENT_INIT_ONCE, s->process->pool);
    if (!data) {
        /* module has already been initialized */
        apr_pool_userdata_set((const void *) 1, AMAGENT_INIT_ONCE,
                apr_pool_cleanup_null, s->process->pool);
        return rv;
    }
    apr_pool_cleanup_register(pconf, s, amagent_cleanup, apr_pool_cleanup_null);
    ap_add_version_component(pconf, MODINFO);
    LOG_S(APLOG_DEBUG, s, "amagent_init() %d", getpid());

#ifndef _WIN32
    status = am_init();
    if (status != AM_SUCCESS) {
        rv = APR_EINIT;
        LOG_S(APLOG_ERR, s, "amagent_init() status: %s", am_strerror(status));
    }
#endif
    return rv;
}
Пример #2
0
void test_stream()
{
	LOG_SCOPE_FUNCTION(INFO);
	LOG_S(INFO) << "Testing stream-logging.";
	LOG_S(1) << "Stream-logging with verbosity 1";
	LOG_S(2) << "Stream-logging with verbosity 2";
	LOG_S(3) << "Stream-logging with verbosity 3";
	LOG_IF_S(INFO, true) << "Should be visible";
	LOG_IF_S(INFO, false) << "SHOULD NOT BE VISIBLE";
	LOG_IF_S(1, true) << "Should be visible if verbosity is at least 1";
	LOG_IF_S(1, false) << "SHOULD NOT BE VISIBLE";
	CHECK_LT_S(1, 2);
	CHECK_GT_S(3, 2) << "Weird";
}
Пример #3
0
 void changeState(State newState)
 {
     if (newState == state)
         return;
     LOG_S(changeState_, "Changing state from " << state << " to " << newState);
     state = newState;
 }
Пример #4
0
/**
 * Selects the page
 */
void CTabControl::selectPage(int page)
{
	LOG_S("Selected the page:", pages[page]->getText());
	TabCtrl_SetCurSel(getHandle(), page);	
	currentPage = pages[page];
	hideAllPages();
	currentPage->show();
}
Пример #5
0
void CAssParser::LoadPieceTransformations(SAssPiece* piece, const LuaTable& pieceMetaTable)
{
	//! Process transforms
	float3 rotate, scale, offset;
	aiVector3D _scale, _offset;
	aiQuaternion _rotate;
	piece->node->mTransformation.Decompose(_scale,_rotate,_offset);

	LOG_S(LOG_SECTION_PIECE,
			"(%d:%s) Assimp offset (%f,%f,%f), rotate (%f,%f,%f), scale (%f,%f,%f)",
			piece->model->numPieces, piece->name.c_str(),
			_offset.x, _offset.y, _offset.z,
			_rotate.x, _rotate.y, _rotate.z,
			_scale.x, _scale.y, _scale.z
			);

	offset   = pieceMetaTable.GetFloat3("offset", float3(_offset.x, _offset.y, _offset.z));
	offset.x = pieceMetaTable.GetFloat("offsetx", offset.x);
	offset.y = pieceMetaTable.GetFloat("offsety", offset.y);
	offset.z = pieceMetaTable.GetFloat("offsetz", offset.z);

	rotate   = QuaternionToRadianAngles(_rotate);
	rotate   = float3(rotate.z, rotate.x, rotate.y); //! swizzle
	rotate   = pieceMetaTable.GetFloat3("rotate", rotate * RADTODEG);
	rotate.x = pieceMetaTable.GetFloat("rotatex", rotate.x);
	rotate.y = pieceMetaTable.GetFloat("rotatey", rotate.y);
	rotate.z = pieceMetaTable.GetFloat("rotatez", rotate.z);
	rotate  *= DEGTORAD;

	scale   = pieceMetaTable.GetFloat3("scale", float3(_scale.x, _scale.z, _scale.y));
	scale.x = pieceMetaTable.GetFloat("scalex", scale.x);
	scale.y = pieceMetaTable.GetFloat("scaley", scale.y);
	scale.z = pieceMetaTable.GetFloat("scalez", scale.z);

	LOG_S(LOG_SECTION_PIECE,
			"(%d:%s) Relative offset (%f,%f,%f), rotate (%f,%f,%f), scale (%f,%f,%f)",
			piece->model->numPieces, piece->name.c_str(),
			offset.x, offset.y, offset.z,
			rotate.x, rotate.y, rotate.z,
			scale.x, scale.y, scale.z
			);
	piece->offset = offset;
	piece->rot = rotate;
	piece->scale = scale;
}
Пример #6
0
static apr_status_t amagent_cleanup(void *arg) {
    /* main process cleanup */
    server_rec *s = (server_rec *) arg;
    LOG_S(APLOG_DEBUG, s, "amagent_cleanup() %d", getpid());
#ifndef _WIN32
    am_shutdown();
#endif
    return APR_SUCCESS;
}
Пример #7
0
unsigned int CArchiveScanner::GetArchiveCompleteChecksum(const std::string& name) const
{
	const std::vector<std::string>& ars = GetAllArchivesUsedBy(name);
	unsigned int checksum = 0;

	for (unsigned int a = 0; a < ars.size(); a++) {
		checksum ^= GetSingleArchiveChecksum(ars[a]);
	}
	LOG_S(LOG_SECTION_ARCHIVESCANNER, "archive checksum %s: %d/%u", name.c_str(), checksum, checksum);
	return checksum;
}
Пример #8
0
/**
 * Hides all the pages of the control
 */
void CTabControl::hideAllPages()
{
	LOG("Hiding all the pages");
size_t size = pages.size();
	for(size_t i=0; i<size; i++)
	{
		LOG_S("All.Hiding page:", pages[i]->getText());
		pages[i]->hide();
	}
	LOG("Leaving");
}
Пример #9
0
std::vector<std::string> CArchiveScanner::GetArchives(const std::string& root, int depth) const
{
	LOG_S(LOG_SECTION_ARCHIVESCANNER, "GetArchives: %s (depth %u)",
			root.c_str(), depth);
	// Protect against circular dependencies
	// (worst case depth is if all archives form one huge dependency chain)
	if ((unsigned)depth > archiveInfos.size()) {
		throw content_error("Circular dependency");
	}

	std::vector<std::string> ret;
	std::string lcname = StringToLower(ArchiveFromName(root));
	std::map<std::string, ArchiveInfo>::const_iterator aii = archiveInfos.find(lcname);
	if (aii == archiveInfos.end()) {
#ifdef UNITSYNC
		// unresolved dep, add it, so unitsync still shows this file
		if (!ret.empty()) {
			ret.push_back(lcname);
		}
		return ret;
#else
		throw content_error("Archive \"" + lcname + "\" not found");
#endif
	}

	// Check if this archive has been replaced
	while (aii->second.replaced.length() > 0) {
		// FIXME instead of this, call this function recursively, to get the propper error handling
		aii = archiveInfos.find(aii->second.replaced);
		if (aii == archiveInfos.end()) {
			throw content_error("Unknown error parsing archive replacements");
		}
	}

	ret.push_back(aii->second.path + aii->second.origName);

	// add depth-first
	for (std::vector<std::string>::const_iterator i = aii->second.archiveData.GetDependencies().begin(); i != aii->second.archiveData.GetDependencies().end(); ++i) {
		const std::vector<std::string>& deps = GetArchives(*i, depth + 1);

		for (std::vector<std::string>::const_iterator j = deps.begin(); j != deps.end(); ++j) {
			if (std::find(ret.begin(), ret.end(), *j) == ret.end()) {
				// add only if this dependency is not already somewhere
				// in the chain (which can happen if ArchiveCache.lua has
				// not been written yet) so its checksum is not XOR'ed
				// with the running one multiple times (Get*Checksum())
				ret.push_back(*j);
			}
		}
	}

	return ret;
}
Пример #10
0
unsigned int CArchiveScanner::GetArchiveCompleteChecksum(const std::string& name)
{
	const std::vector<std::string> ars = GetAllArchivesUsedBy(name);

	unsigned int checksum = 0;

	for (const std::string& depName: ars) {
		const std::string& archive = ArchiveFromName(depName);
		checksum ^= GetSingleArchiveChecksum(GetArchivePath(archive) + archive);
	}
	LOG_S(LOG_SECTION_ARCHIVESCANNER, "archive checksum %s: %d/%u", name.c_str(), checksum, checksum);
	return checksum;
}
Пример #11
0
unsigned int CArchiveScanner::GetSingleArchiveChecksum(const std::string& name) const
{
	std::string lcname = FileSystem::GetFilename(name);
	StringToLowerInPlace(lcname);

	std::map<std::string, ArchiveInfo>::const_iterator aii = archiveInfos.find(lcname);
	if (aii == archiveInfos.end()) {
		LOG_SL(LOG_SECTION_ARCHIVESCANNER, L_WARNING, "%s checksum: not found (0)", name.c_str());
		return 0;
	}

	LOG_S(LOG_SECTION_ARCHIVESCANNER,"%s checksum: %d/%u", name.c_str(), aii->second.checksum, aii->second.checksum);
	return aii->second.checksum;
}
Пример #12
0
unsigned int CArchiveScanner::GetSingleArchiveChecksum(const std::string& filePath)
{
	ComputeChecksumForArchive(filePath);
	const std::string lcname = std::move(StringToLower(FileSystem::GetFilename(filePath)));

	const auto aii = archiveInfos.find(lcname);
	if (aii == archiveInfos.end()) {
		LOG_SL(LOG_SECTION_ARCHIVESCANNER, L_WARNING, "%s checksum: not found (0)", filePath.c_str());
		return 0;
	}

	LOG_S(LOG_SECTION_ARCHIVESCANNER,"%s checksum: %d/%u", filePath.c_str(), aii->second.checksum, aii->second.checksum);
	return aii->second.checksum;
}
Пример #13
0
std::vector<std::string> CArchiveScanner::GetAllArchivesUsedBy(const std::string& root, int depth) const
{
	LOG_S(LOG_SECTION_ARCHIVESCANNER, "GetArchives: %s (depth %u)", root.c_str(), depth);
	// Protect against circular dependencies
	// (worst case depth is if all archives form one huge dependency chain)
	if ((unsigned)depth > archiveInfos.size()) {
		throw content_error("Circular dependency");
	}

	std::vector<std::string> ret;
	std::string resolvedName = ArchiveNameResolver::GetGame(root);
	std::string lcname = StringToLower(ArchiveFromName(resolvedName));
	std::map<std::string, ArchiveInfo>::const_iterator aii = archiveInfos.find(lcname);
	if (aii == archiveInfos.end()) {
#ifdef UNITSYNC
		// unresolved dep, add it, so unitsync still shows this file
		ret.push_back(lcname);
		return ret;
#else
		throw content_error("Archive \"" + lcname + "\" not found");
#endif
	}

	// Check if this archive has been replaced
	while (aii->second.replaced.length() > 0) {
		// FIXME instead of this, call this function recursively, to get the propper error handling
		aii = archiveInfos.find(aii->second.replaced);
		if (aii == archiveInfos.end()) {
#ifdef UNITSYNC
			// unresolved dep, add it, so unitsync still shows this file
			ret.push_back(lcname);
			return ret;
#else
			throw content_error("Unknown error parsing archive replacements");
#endif
		}
	}

	// add depth-first
	ret.push_back(aii->second.path + aii->second.origName);
	for (const std::string& dep: aii->second.archiveData.GetDependencies()) {
		const std::vector<std::string>& deps = GetAllArchivesUsedBy(dep, depth + 1);
		for (const std::string& depSub: deps) {
			AddDependency(ret, depSub);
		}
	}

	return ret;
}
Пример #14
0
/*! Setup the global ci_cfg_opts struct.
 */
int ci_cfg_query(void)
{
  /* Install "modified" netif opts default values, the rest
   * (citp_opts, user_opts) don't need any modification of their "original"
   * default values.
   */
  ci_netif_config_opts_defaults(&ci_cfg_opts.netif_opts);

  /* adjust the netif options again... */
  ci_netif_config_opts_getenv(&ci_cfg_opts.netif_opts);
  ci_netif_config_opts_rangecheck(&ci_cfg_opts.netif_opts);
  LOG_S(ci_netif_config_opts_dump(&ci_cfg_opts.netif_opts));

  return 0;
}
Пример #15
0
 double density(Domain value) const
 {
     LOG_S(density);
     if (data_.empty())
         return 0.0;
     double sum = 0.0;
     typename Weights::const_iterator weight = weights_.begin();
     for (auto data: data_)
     {
         LOG_M(STREAM(data, *weight));
         sum += *(weight++)*scaledKernel_(value-data);
     }
     LOG_M(sum);
     return sum/weightDistribution_.sum();
 }
Пример #16
0
/**
 * Deletes the given item. Also removes the item from the items vector...
 */
void CTreeControl::deleteItem(CTreeItem* _item)
{
	LOG_S("Deleting item: ", _item->getText());
	TreeView_DeleteItem(hwnd, _item->handle);
size_t i=0;
CTreeItem* actItem = _item;
	while(i<items.size())
	{
		if(items[i] == _item)
		{
			items.erase(items.begin() + i);
			return; 
		}
		i++;
	}
}
Пример #17
0
int ci_tp_init(citp_init_thread_callback cb)
{
  const char* s;

#ifndef NDEBUG
  static int done = 0;
  ci_assert(!done);
  done = 1;
#endif

  /*! ?? \TODO setup config options etc. */
  if( (s = getenv("TP_LOG")) )  sscanf(s, "%x", &ci_tp_log);
  LOG_S(log("TP_LOG = %x", ci_tp_log));

  init_thread_callback = cb;
  oo_per_thread_init();

  return 0;
}
Пример #18
0
int file_count(const char * directory)
{
	LOG("Entering counting files for %s\n", directory);
	// Count files in a given directory
	int ret = 0;
	DIR *dir = opendir(directory);
	struct dirent * dp = NULL;
	if(dir == NULL)
	{
		LOG_S("[***]ERROR opening directory!\n");
		return 0;
	}
	while( (dp = readdir(dir)) != NULL){
		if (dp->d_type == DT_REG)
		{
			++ret;
		}
	}
	closedir(dir);
	LOG( "Counted %d files for directory %s\n", ret, directory );
	return ret;
}
Пример #19
0
int dir_count(const char * directory)
{
	LOG("Entering counting sub-dirs for %s\n", directory);
	// Count sub-directories in a given directory
	int ret = 0;
	DIR *dir = opendir(directory);
	struct dirent * dp = NULL;
	if(dir == NULL)
	{
		LOG_S("[***]ERROR opening directory!\n");
		return 0;
	}
	while( (dp = readdir(dir)) != NULL){
		// Exclude the entries '.' and '..'
		if (dp->d_type == DT_DIR && strcmp(dp->d_name, ".") && strcmp(dp->d_name, ".."))
		{
			++ret;
		}
	}
	closedir(dir);
	LOG( "Counted %d dirs for directory %s\n", ret, directory );
	return ret;
}
Пример #20
0
/**
 * Handles the owner draw items of the tab control
 */
bool CTabControl::handleOwnerDraws(LPDRAWITEMSTRUCT lpdis)
{
	LOG("Entered");
	if(!currentPage)
	{
		return false;
	}
size_t size = currentPage->controls.size();

	for(size_t i=0; i<size; i++)
	{
		LOG_S("Trying:", string(currentPage->controls[i]->szClassName));
		LOG_X("Handle:", currentPage->controls[i]->getHandle());
		if(currentPage->controls[i]->getHandle() == lpdis->hwndItem && currentPage->controls[i]->isVisible())
		{
			LOG("Drawing it");
			if(!dynamic_cast<COwnerDrawControl*>(currentPage->controls[i])->draw(lpdis))
			{
				return false;
			}
		}
	}
	return true;
}
Пример #21
0
S3DModel* CAssParser::Load(const std::string& modelFilePath)
{
	LOG_S(LOG_SECTION_MODEL, "Loading model: %s", modelFilePath.c_str() );
	const std::string modelPath  = FileSystem::GetDirectory(modelFilePath);
	const std::string modelName  = FileSystem::GetBasename(modelFilePath);

	// LOAD METADATA
	// Load the lua metafile. This contains properties unique to Spring models and must return a table
	std::string metaFileName = modelFilePath + ".lua";
	if (!CFileHandler::FileExists(metaFileName, SPRING_VFS_ZIP)) {
		// Try again without the model file extension
		metaFileName = modelPath + '/' + modelName + ".lua";
	}
	LuaParser metaFileParser(metaFileName, SPRING_VFS_MOD_BASE, SPRING_VFS_ZIP);
	if (!CFileHandler::FileExists(metaFileName, SPRING_VFS_ZIP)) {
		LOG_S(LOG_SECTION_MODEL, "No meta-file '%s'. Using defaults.", metaFileName.c_str());
	} else if (!metaFileParser.Execute()) {
		LOG_SL(LOG_SECTION_MODEL, L_ERROR, "'%s': %s. Using defaults.", metaFileName.c_str(), metaFileParser.GetErrorLog().c_str());
	}

	// Get the (root-level) model table
	const LuaTable& metaTable = metaFileParser.GetRoot();
	if (metaTable.IsValid()) {
		LOG_S(LOG_SECTION_MODEL, "Found valid model metadata in '%s'", metaFileName.c_str());
	}


	// LOAD MODEL DATA
	// Create a model importer instance
	Assimp::Importer importer;

	// Create a logger for debugging model loading issues
	Assimp::DefaultLogger::create("",Assimp::Logger::VERBOSE);
	const unsigned int severity = Assimp::Logger::Debugging|Assimp::Logger::Info|Assimp::Logger::Err|Assimp::Logger::Warn;
	Assimp::DefaultLogger::get()->attachStream( new AssLogStream(), severity );

	// Give the importer an IO class that handles Spring's VFS
	importer.SetIOHandler( new AssVFSSystem() );

	// Speed-up processing by skipping things we don't need
	importer.SetPropertyInteger(AI_CONFIG_PP_RVC_FLAGS, aiComponent_CAMERAS|aiComponent_LIGHTS|aiComponent_TEXTURES|aiComponent_ANIMATIONS);

#ifndef BITMAP_NO_OPENGL
	// Optimize VBO-Mesh sizes/ranges
	GLint maxIndices  = 1024;
	GLint maxVertices = 1024;
	glGetIntegerv(GL_MAX_ELEMENTS_INDICES,  &maxIndices);
	glGetIntegerv(GL_MAX_ELEMENTS_VERTICES, &maxVertices); //FIXME returns not optimal data, at best compute it ourself! (pre-TL cache size!)
	importer.SetPropertyInteger(AI_CONFIG_PP_SLM_VERTEX_LIMIT,   maxVertices);
	importer.SetPropertyInteger(AI_CONFIG_PP_SLM_TRIANGLE_LIMIT, maxIndices/3);
#endif

	// Read the model file to build a scene object
	LOG_S(LOG_SECTION_MODEL, "Importing model file: %s", modelFilePath.c_str() );
	const aiScene* scene = importer.ReadFile( modelFilePath, ASS_POSTPROCESS_OPTIONS );
	if (scene != NULL) {
		LOG_S(LOG_SECTION_MODEL,
				"Processing scene for model: %s (%d meshes / %d materials / %d textures)",
				modelFilePath.c_str(), scene->mNumMeshes, scene->mNumMaterials,
				scene->mNumTextures );
	} else {
		throw content_error("[AssimpParser] Model Import: " + std::string(importer.GetErrorString()));
	}

	SAssModel* model = new SAssModel;
	model->name = modelFilePath;
	model->type = MODELTYPE_ASS;
	model->scene = scene;

	// Gather per mesh info
	CalculatePerMeshMinMax(model);

	// Load textures
	FindTextures(model, scene, metaTable, modelName);
	LOG_S(LOG_SECTION_MODEL, "Loading textures. Tex1: '%s' Tex2: '%s'",
			model->tex1.c_str(), model->tex2.c_str());
	texturehandlerS3O->LoadS3OTexture(model);

	// Load all pieces in the model
	LOG_S(LOG_SECTION_MODEL, "Loading pieces from root node '%s'",
			scene->mRootNode->mName.data);
	LoadPiece(model, scene->mRootNode, metaTable);

	// Update piece hierarchy based on metadata
	BuildPieceHierarchy(model);
	CalculateModelProperties(model, metaTable);

	// Verbose logging of model properties
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->name: %s", model->name.c_str());
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->numobjects: %d", model->numPieces);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->radius: %f", model->radius);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->height: %f", model->height);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->drawRadius: %f", model->drawRadius);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->mins: (%f,%f,%f)", model->mins[0], model->mins[1], model->mins[2]);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->maxs: (%f,%f,%f)", model->maxs[0], model->maxs[1], model->maxs[2]);

	LOG_S(LOG_SECTION_MODEL, "Model %s Imported.", model->name.c_str());
	return model;
}
Пример #22
0
SAssPiece* CAssParser::LoadPiece(SAssModel* model, aiNode* node, const LuaTable& metaTable)
{
	// Create new piece
	++model->numPieces;
	SAssPiece* piece = new SAssPiece;
	piece->type = MODELTYPE_OTHER;
	piece->node = node;
	piece->isEmpty = (node->mNumMeshes == 0);

	if (node->mParent) {
		piece->name = std::string(node->mName.data);
	} else {
		//FIXME is this really smart?
		piece->name = "root"; //! The real model root
	}

	// find a new name if none given or if a piece with the same name already exists
	if (piece->name.empty()) {
		piece->name = "piece";
	}

	ModelPieceMap::const_iterator it = model->pieces.find(piece->name);
	if (it != model->pieces.end()) {
		char buf[64];
		int i = 0;
		while (it != model->pieces.end()) {
			SNPRINTF(buf, 64, "%s%02i", piece->name.c_str(), i++);
			it = model->pieces.find(buf);
		}
		piece->name = buf;
	}
	LOG_S(LOG_SECTION_PIECE, "Converting node '%s' to piece '%s' (%d meshes).",
			node->mName.data, piece->name.c_str(), node->mNumMeshes);

	// Load additional piece properties from metadata
	const LuaTable& pieceTable = metaTable.SubTable("pieces").SubTable(piece->name);
	if (pieceTable.IsValid()) {
		LOG_S(LOG_SECTION_PIECE, "Found metadata for piece '%s'",
				piece->name.c_str());
	}

	// Load transforms
	LoadPieceTransformations(model, piece, pieceTable);

	// Update piece min/max extents
	for (unsigned meshListIndex = 0; meshListIndex < node->mNumMeshes; meshListIndex++) {
		const unsigned int meshIndex = node->mMeshes[meshListIndex];
		const SAssModel::MinMax& minmax = model->mesh_minmax[meshIndex];

		piece->mins = std::min(piece->mins, minmax.mins);
		piece->maxs = std::max(piece->maxs, minmax.maxs);
	}


	// Check if piece is special (ie, used to set Spring model properties)
	if (strcmp(node->mName.data, "SpringHeight") == 0) {
		// Set the model height to this nodes Z value
		if (!metaTable.KeyExists("height")) {
			model->height = piece->offset.z;
			LOG_S(LOG_SECTION_MODEL,
					"Model height of %f set by special node 'SpringHeight'",
					model->height);
		}
		--model->numPieces;
		delete piece;
		return NULL;
	}
	if (strcmp(node->mName.data, "SpringRadius") == 0) {
		if (!metaTable.KeyExists("midpos")) {
			model->relMidPos = piece->scaleRotMatrix.Mul(piece->offset);
			LOG_S(LOG_SECTION_MODEL,
					"Model midpos of (%f,%f,%f) set by special node 'SpringRadius'",
					model->relMidPos.x, model->relMidPos.y, model->relMidPos.z);
		}
		if (!metaTable.KeyExists("radius")) {
			if (piece->maxs.x <= 0.00001f) {
				aiVector3D _scale, _offset;
				aiQuaternion _rotate;
				piece->node->mTransformation.Decompose(_scale,_rotate,_offset);
				model->radius = aiVectorToFloat3(_scale).x; // the blender import script only sets the scale property
			} else {
				model->radius = piece->maxs.x; // use the transformed mesh extents
			}
			LOG_S(LOG_SECTION_MODEL,
					"Model radius of %f set by special node 'SpringRadius'",
					model->radius);
		}
		--model->numPieces;
		delete piece;
		return NULL;
	}



	//! Get vertex data from node meshes
	for (unsigned meshListIndex = 0; meshListIndex < node->mNumMeshes; ++meshListIndex) {
		unsigned int meshIndex = node->mMeshes[meshListIndex];
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "Fetching mesh %d from scene",
				meshIndex);
		const aiMesh* mesh = model->scene->mMeshes[meshIndex];
		std::vector<unsigned> mesh_vertex_mapping;
		// extract vertex data
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Processing vertices for mesh %d (%d vertices)",
				meshIndex, mesh->mNumVertices);
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Normals: %s Tangents/Bitangents: %s TexCoords: %s",
				(mesh->HasNormals() ? "Y" : "N"),
				(mesh->HasTangentsAndBitangents() ? "Y" : "N"),
				(mesh->HasTextureCoords(0) ? "Y" : "N"));

		piece->vertices.reserve(piece->vertices.size() + mesh->mNumVertices);
		for (unsigned vertexIndex = 0; vertexIndex < mesh->mNumVertices; ++vertexIndex) {
			SAssVertex vertex;

			// vertex coordinates
			const aiVector3D& aiVertex = mesh->mVertices[vertexIndex];
			vertex.pos = aiVectorToFloat3(aiVertex);

			// vertex normal
			LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "Fetching normal for vertex %d",
					vertexIndex);
			const aiVector3D& aiNormal = mesh->mNormals[vertexIndex];
			if (!IS_QNAN(aiNormal)) {
				vertex.normal = aiVectorToFloat3(aiNormal);
			}

			// vertex tangent, x is positive in texture axis
			if (mesh->HasTangentsAndBitangents()) {
				LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
						"Fetching tangent for vertex %d", vertexIndex );
				const aiVector3D& aiTangent = mesh->mTangents[vertexIndex];
				const aiVector3D& aiBitangent = mesh->mBitangents[vertexIndex];

				vertex.sTangent = aiVectorToFloat3(aiTangent);
				vertex.tTangent = aiVectorToFloat3(aiBitangent);
			}

			// vertex texcoords
			if (mesh->HasTextureCoords(0)) {
				vertex.texCoord.x = mesh->mTextureCoords[0][vertexIndex].x;
				vertex.texCoord.y = mesh->mTextureCoords[0][vertexIndex].y;
			}

			if (mesh->HasTextureCoords(1)) {
				piece->hasTexCoord2 = true,
				vertex.texCoord2.x = mesh->mTextureCoords[1][vertexIndex].x;
				vertex.texCoord2.y = mesh->mTextureCoords[1][vertexIndex].y;
			}

			mesh_vertex_mapping.push_back(piece->vertices.size());
			piece->vertices.push_back(vertex);
		}

		// extract face data
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Processing faces for mesh %d (%d faces)",
				meshIndex, mesh->mNumFaces);
		/*
		 * since aiProcess_SortByPType is being used,
		 * we're sure we'll get only 1 type here,
		 * so combination check isn't needed, also
		 * anything more complex than triangles is
		 * being split thanks to aiProcess_Triangulate
		 */
		piece->vertexDrawIndices.reserve(piece->vertexDrawIndices.size() + mesh->mNumFaces * 3);
		for (unsigned faceIndex = 0; faceIndex < mesh->mNumFaces; ++faceIndex) {
			const aiFace& face = mesh->mFaces[faceIndex];

			// some models contain lines (mNumIndices == 2)
			// we cannot render those (esp. they would need to be called in a 2nd drawcall)
			if (face.mNumIndices != 3)
				continue;

			for (unsigned vertexListID = 0; vertexListID < face.mNumIndices; ++vertexListID) {
				const unsigned int vertexFaceIdx = face.mIndices[vertexListID];
				const unsigned int vertexDrawIdx = mesh_vertex_mapping[vertexFaceIdx];
				piece->vertexDrawIndices.push_back(vertexDrawIdx);
			}
		}
	}

	piece->isEmpty = piece->vertices.empty();

	//! Get parent name from metadata or model
	if (pieceTable.KeyExists("parent")) {
		piece->parentName = pieceTable.GetString("parent", "");
	} else if (node->mParent) {
		if (node->mParent->mParent) {
			piece->parentName = std::string(node->mParent->mName.data);
		} else { // my parent is the root, which gets renamed
			piece->parentName = "root";
		}
	} else {
		piece->parentName = "";
	}

	LOG_S(LOG_SECTION_PIECE, "Loaded model piece: %s with %d meshes",
			piece->name.c_str(), node->mNumMeshes);

	// Verbose logging of piece properties
	LOG_S(LOG_SECTION_PIECE, "piece->name: %s", piece->name.c_str());
	LOG_S(LOG_SECTION_PIECE, "piece->parent: %s", piece->parentName.c_str());

	// Recursively process all child pieces
	for (unsigned int i = 0; i < node->mNumChildren; ++i) {
		LoadPiece(model, node->mChildren[i], metaTable);
	}

	model->pieces[piece->name] = piece;
	return piece;
}
Пример #23
0
void CAssParser::LoadPieceTransformations(const S3DModel* model, SAssPiece* piece, const LuaTable& pieceMetaTable)
{
	// Process transforms
	float3 rotate, offset;
	float3 scale(1.0,1.0,1.0);
	aiVector3D _scale, _offset;
	aiQuaternion _rotate;
	piece->node->mTransformation.Decompose(_scale,_rotate,_offset);
	const aiMatrix4x4t<float> aiRotMatrix = aiMatrix4x4t<float>(_rotate.GetMatrix());

	LOG_S(LOG_SECTION_PIECE,
		"(%d:%s) Assimp offset (%f,%f,%f), rotate (%f,%f,%f,%f), scale (%f,%f,%f)",
		model->numPieces, piece->name.c_str(),
		_offset.x, _offset.y, _offset.z,
		_rotate.w, _rotate.x, _rotate.y, _rotate.z,
		_scale.x, _scale.y, _scale.z
	);

	// Scale
	scale   = pieceMetaTable.GetFloat3("scale", float3(_scale.x, _scale.z, _scale.y));
	scale.x = pieceMetaTable.GetFloat("scalex", scale.x);
	scale.y = pieceMetaTable.GetFloat("scaley", scale.y);
	scale.z = pieceMetaTable.GetFloat("scalez", scale.z);

	if (scale.x != scale.y || scale.y != scale.z) {
		//LOG_SL(LOG_SECTION_MODEL, L_WARNING, "Spring doesn't support non-uniform scaling");
		scale.y = scale.x;
		scale.z = scale.x;
	}

	// Rotate
	// Note these rotations are put into the `Spring rotations` and are not baked into the Assimp matrix!
	rotate   = pieceMetaTable.GetFloat3("rotate", float3(0,0,0));
	rotate.x = pieceMetaTable.GetFloat("rotatex", rotate.x);
	rotate.y = pieceMetaTable.GetFloat("rotatey", rotate.y);
	rotate.z = pieceMetaTable.GetFloat("rotatez", rotate.z);
	rotate  *= DEGTORAD;

	// Translate
	offset   = pieceMetaTable.GetFloat3("offset", float3(_offset.x, _offset.y, _offset.z));
	offset.x = pieceMetaTable.GetFloat("offsetx", offset.x);
	offset.y = pieceMetaTable.GetFloat("offsety", offset.y);
	offset.z = pieceMetaTable.GetFloat("offsetz", offset.z);

	LOG_S(LOG_SECTION_PIECE,
		"(%d:%s) Relative offset (%f,%f,%f), rotate (%f,%f,%f), scale (%f,%f,%f)",
		model->numPieces, piece->name.c_str(),
		offset.x, offset.y, offset.z,
		rotate.x, rotate.y, rotate.z,
		scale.x, scale.y, scale.z
	);

	// construct 'baked' part of the modelpiece matrix
	// Assimp order is: translate * rotate * scale * v
	piece->scaleRotMatrix.LoadIdentity();
	piece->scaleRotMatrix.Scale(scale);
	piece->scaleRotMatrix *= aiMatrixToMatrix(aiRotMatrix);
	// piece->scaleRotMatrix.Translate(offset);

	piece->offset = offset;
	piece->rot = rotate;
	piece->mIsIdentity = (scale.x == 1.0f) && aiRotMatrix.IsIdentity();
}
Пример #24
0
S3DModel* CAssParser::Load(const std::string& modelFilePath)
{
	LOG_S(LOG_SECTION_MODEL, "Loading model: %s", modelFilePath.c_str() );
	const std::string modelPath  = FileSystem::GetDirectory(modelFilePath);
	const std::string modelName  = FileSystem::GetBasename(modelFilePath);

	//! LOAD METADATA
	//! Load the lua metafile. This contains properties unique to Spring models and must return a table
	std::string metaFileName = modelFilePath + ".lua";
	if (!CFileHandler::FileExists(metaFileName, SPRING_VFS_ZIP)) {
		//! Try again without the model file extension
		metaFileName = modelPath + '/' + modelName + ".lua";
	}
	LuaParser metaFileParser(metaFileName, SPRING_VFS_MOD_BASE, SPRING_VFS_ZIP);
	if (!CFileHandler::FileExists(metaFileName, SPRING_VFS_ZIP)) {
		LOG_S(LOG_SECTION_MODEL, "No meta-file '%s'. Using defaults.", metaFileName.c_str());
	} else if (!metaFileParser.Execute()) {
		LOG_SL(LOG_SECTION_MODEL, L_ERROR, "'%s': %s. Using defaults.", metaFileName.c_str(), metaFileParser.GetErrorLog().c_str());
	}

	//! Get the (root-level) model table
	const LuaTable& metaTable = metaFileParser.GetRoot();
	if (metaTable.IsValid()) {
		LOG_S(LOG_SECTION_MODEL, "Found valid model metadata in '%s'", metaFileName.c_str());
	}


	//! LOAD MODEL DATA
	//! Create a model importer instance
	Assimp::Importer importer;

	//! Create a logger for debugging model loading issues
	Assimp::DefaultLogger::create("",Assimp::Logger::VERBOSE);
	const unsigned int severity = Assimp::Logger::Debugging|Assimp::Logger::Info|Assimp::Logger::Err|Assimp::Logger::Warn;
	Assimp::DefaultLogger::get()->attachStream( new AssLogStream(), severity );

	//! Give the importer an IO class that handles Spring's VFS
	importer.SetIOHandler( new AssVFSSystem() );

	//! Speed-up processing by skipping things we don't need
	importer.SetPropertyInteger(AI_CONFIG_PP_RVC_FLAGS, aiComponent_CAMERAS|aiComponent_LIGHTS|aiComponent_TEXTURES|aiComponent_ANIMATIONS);

#ifndef BITMAP_NO_OPENGL
	//! Optimize VBO-Mesh sizes/ranges
	GLint maxIndices  = 1024;
	GLint maxVertices = 1024;
	glGetIntegerv(GL_MAX_ELEMENTS_INDICES,  &maxIndices);
	glGetIntegerv(GL_MAX_ELEMENTS_VERTICES, &maxVertices); //FIXME returns not optimal data, at best compute it ourself! (pre-TL cache size!)
	importer.SetPropertyInteger(AI_CONFIG_PP_SLM_VERTEX_LIMIT,   maxVertices);
	importer.SetPropertyInteger(AI_CONFIG_PP_SLM_TRIANGLE_LIMIT, maxIndices/3);
#endif

	//! Read the model file to build a scene object
	LOG_S(LOG_SECTION_MODEL, "Importing model file: %s", modelFilePath.c_str() );
	const aiScene* scene = importer.ReadFile( modelFilePath, ASS_POSTPROCESS_OPTIONS );
	if (scene != NULL) {
		LOG_S(LOG_SECTION_MODEL,
				"Processing scene for model: %s (%d meshes / %d materials / %d textures)",
				modelFilePath.c_str(), scene->mNumMeshes, scene->mNumMaterials,
				scene->mNumTextures );
	} else {
		LOG_SL(LOG_SECTION_MODEL, L_ERROR, "Model Import: %s",
				importer.GetErrorString());
	}

	SAssModel* model = new SAssModel;
	model->name = modelFilePath;
	model->type = MODELTYPE_ASS;
	model->scene = scene;
	//model->meta = &metaTable;

	//! Gather per mesh info
	CalculatePerMeshMinMax(model);

	//! Assign textures
	//! The S3O texture handler uses two textures.
	//! The first contains diffuse color (RGB) and teamcolor (A)
	//! The second contains glow (R), reflectivity (G) and 1-bit Alpha (A).
	if (metaTable.KeyExists("tex1")) {
		model->tex1 = metaTable.GetString("tex1", "default.png");
	} else {
		//! Search for a texture
		std::vector<std::string> files = CFileHandler::FindFiles("unittextures/", modelName + ".*");
		for(std::vector<std::string>::iterator fi = files.begin(); fi != files.end(); ++fi) {
			model->tex1 = FileSystem::GetFilename(*fi);
			break; //! there can be only one!
		}
	}
	if (metaTable.KeyExists("tex2")) {
		model->tex2 = metaTable.GetString("tex2", "");
	} else {
		//! Search for a texture
		std::vector<std::string> files = CFileHandler::FindFiles("unittextures/", modelName + "2.*");
		for(std::vector<std::string>::iterator fi = files.begin(); fi != files.end(); ++fi) {
			model->tex2 = FileSystem::GetFilename(*fi);
			break; //! there can be only one!
		}
	}
	model->flipTexY = metaTable.GetBool("fliptextures", true); //! Flip texture upside down
	model->invertTexAlpha = metaTable.GetBool("invertteamcolor", true); //! Reverse teamcolor levels

	//! Load textures
	LOG_S(LOG_SECTION_MODEL, "Loading textures. Tex1: '%s' Tex2: '%s'",
			model->tex1.c_str(), model->tex2.c_str());
	texturehandlerS3O->LoadS3OTexture(model);

	//! Load all pieces in the model
	LOG_S(LOG_SECTION_MODEL, "Loading pieces from root node '%s'",
			scene->mRootNode->mName.data);
	LoadPiece(model, scene->mRootNode, metaTable);

	//! Update piece hierarchy based on metadata
	BuildPieceHierarchy( model );

	//! Simplified dimensions used for rough calculations
	model->radius = metaTable.GetFloat("radius", model->radius);
	model->height = metaTable.GetFloat("height", model->height);
	model->relMidPos = metaTable.GetFloat3("midpos", model->relMidPos);
	model->mins = metaTable.GetFloat3("mins", model->mins);
	model->maxs = metaTable.GetFloat3("maxs", model->maxs);

	//! Calculate model dimensions if not set
	if (!metaTable.KeyExists("mins") || !metaTable.KeyExists("maxs")) CalculateMinMax( model->rootPiece );
	if (model->radius < 0.0001f) CalculateRadius( model );
	if (model->height < 0.0001f) CalculateHeight( model );

	//! Verbose logging of model properties
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->name: %s", model->name.c_str());
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->numobjects: %d", model->numPieces);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->radius: %f", model->radius);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->height: %f", model->height);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->mins: (%f,%f,%f)", model->mins[0], model->mins[1], model->mins[2]);
	LOG_SL(LOG_SECTION_MODEL, L_DEBUG, "model->maxs: (%f,%f,%f)", model->maxs[0], model->maxs[1], model->maxs[2]);

	LOG_S(LOG_SECTION_MODEL, "Model %s Imported.", model->name.c_str());
	return model;
}
Пример #25
0
/**
 * Hides the given page. Used when changing
 */
void CTabControl::hidePage(int page)
{
	LOG_S("Hiding page:", pages[page]->getText());
	pages[page]->hide();
}
Пример #26
0
int efab_file_move_to_alien_stack_rsop(ci_private_t *stack_priv, void *arg)
{
  ci_fixed_descriptor_t sock_fd = *(ci_fixed_descriptor_t *)arg;
  struct file *sock_file = fget(sock_fd);
  ci_private_t *sock_priv;
  tcp_helper_resource_t *old_thr;
  tcp_helper_resource_t *new_thr;
  citp_waitable *w;
  int rc;

  if( sock_file == NULL )
    return -EINVAL;
  if( !FILE_IS_ENDPOINT_SOCK(sock_file) ||
      stack_priv->fd_type != CI_PRIV_TYPE_NETIF ) {
    fput(sock_file);
    return -EINVAL;
  }
  sock_priv = sock_file->private_data;
  ci_assert(sock_priv->fd_type == CI_PRIV_TYPE_TCP_EP ||
            sock_priv->fd_type == CI_PRIV_TYPE_UDP_EP);

  old_thr = sock_priv->thr;
  new_thr = stack_priv->thr;
  ci_assert(old_thr);
  ci_assert(new_thr);

  if( old_thr == new_thr ) {
    fput(sock_file);
    return 0;
  }

  if( tcp_helper_cluster_from_cluster(old_thr) != 0 ) {
    LOG_S(ci_log("%s: move_fd() not permitted on clustered stacks", __func__));
    fput(sock_file);
    return -EINVAL;
  }

  w = SP_TO_WAITABLE(&old_thr->netif, sock_priv->sock_id);
  rc = ci_sock_lock(&old_thr->netif, w);
  if( rc != 0 ) {
    fput(sock_file);
    return rc;
  }

  rc = ci_netif_lock(&old_thr->netif);
  if( rc != 0 ) {
    ci_sock_unlock(&old_thr->netif, w);
    fput(sock_file);
    return rc;
  }

  efab_thr_ref(new_thr);
  rc = efab_file_move_to_alien_stack(sock_priv, &stack_priv->thr->netif);
  fput(sock_file);

  if( rc != 0 )
    efab_thr_release(new_thr);
  else
    ci_netif_unlock(&new_thr->netif);

  return rc;
}
Пример #27
0
SAssPiece* CAssParser::LoadPiece(SAssModel* model, aiNode* node, const LuaTable& metaTable)
{
	//! Create new piece
	++model->numPieces;
	SAssPiece* piece = new SAssPiece;
	piece->type = MODELTYPE_OTHER;
	piece->node = node;
	piece->model = model;
	piece->isEmpty = (node->mNumMeshes == 0);

	if (node->mParent) {
		piece->name = std::string(node->mName.data);
	} else {
		//FIXME is this really smart?
		piece->name = "root"; //! The real model root
	}

	//! find a new name if none given or if a piece with the same name already exists
	if (piece->name.empty()) {
		piece->name = "piece";
	}
	ModelPieceMap::const_iterator it = model->pieces.find(piece->name);
	if (it != model->pieces.end()) {
		char buf[64];
		int i = 0;
		while (it != model->pieces.end()) {
			SNPRINTF(buf, 64, "%s%02i", piece->name.c_str(), i++);
			it = model->pieces.find(buf);
		}
		piece->name = buf;
	}
	LOG_S(LOG_SECTION_PIECE, "Converting node '%s' to piece '%s' (%d meshes).",
			node->mName.data, piece->name.c_str(), node->mNumMeshes);

	//! Load additional piece properties from metadata
	const LuaTable& pieceTable = metaTable.SubTable("pieces").SubTable(piece->name);
	if (pieceTable.IsValid()) {
		LOG_S(LOG_SECTION_PIECE, "Found metadata for piece '%s'",
				piece->name.c_str());
	}

	//! Load transforms
	LoadPieceTransformations(piece, pieceTable);

	//! Update piece min/max extents
	for (unsigned meshListIndex = 0; meshListIndex < node->mNumMeshes; meshListIndex++) {
		unsigned int meshIndex = node->mMeshes[meshListIndex];
		SAssModel::MinMax& minmax = model->mesh_minmax[meshIndex];
		piece->mins.x = std::min(piece->mins.x, minmax.mins.x);
		piece->mins.y = std::min(piece->mins.y, minmax.mins.y);
		piece->mins.z = std::min(piece->mins.z, minmax.mins.z);
		piece->maxs.x = std::max(piece->maxs.x, minmax.maxs.x);
		piece->maxs.y = std::max(piece->maxs.y, minmax.maxs.y);
		piece->maxs.z = std::max(piece->maxs.z, minmax.maxs.z);
	}


	//! Check if piece is special (ie, used to set Spring model properties)
	if (strcmp(node->mName.data, "SpringHeight") == 0) {
		//! Set the model height to this nodes Z value
		if (!metaTable.KeyExists("height")) {
			model->height = piece->offset.z;
			LOG_S(LOG_SECTION_MODEL,
					"Model height of %f set by special node 'SpringHeight'",
					model->height);
		}
		--model->numPieces;
		delete piece;
		return NULL;
	}
	if (strcmp(node->mName.data, "SpringRadius") == 0) {
		if (!metaTable.KeyExists("midpos")) {
			model->relMidPos = float3(piece->offset.x, piece->offset.z, piece->offset.y); //! Y and Z are swapped because this piece isn't rotated
			LOG_S(LOG_SECTION_MODEL,
					"Model midpos of (%f,%f,%f) set by special node 'SpringRadius'",
					model->relMidPos.x, model->relMidPos.y, model->relMidPos.z);
		}
		if (!metaTable.KeyExists("radius")) {
			if (piece->maxs.x <= 0.00001f) {
				model->radius = piece->scale.x; //! the blender import script only sets the scale property
			} else {
				model->radius = piece->maxs.x; //! use the transformed mesh extents
			}
			LOG_S(LOG_SECTION_MODEL,
					"Model radius of %f set by special node 'SpringRadius'",
					model->radius);
		}
		--model->numPieces;
		delete piece;
		return NULL;
	}


	//! Get vertex data from node meshes
	for (unsigned meshListIndex = 0; meshListIndex < node->mNumMeshes; ++meshListIndex) {
		unsigned int meshIndex = node->mMeshes[meshListIndex];
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "Fetching mesh %d from scene",
				meshIndex);
		aiMesh* mesh = model->scene->mMeshes[meshIndex];
		std::vector<unsigned> mesh_vertex_mapping;
		//! extract vertex data
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Processing vertices for mesh %d (%d vertices)",
				meshIndex, mesh->mNumVertices);
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Normals: %s Tangents/Bitangents: %s TexCoords: %s",
				(mesh->HasNormals() ? "Y" : "N"),
				(mesh->HasTangentsAndBitangents() ? "Y" : "N"),
				(mesh->HasTextureCoords(0) ? "Y" : "N"));

		// FIXME add piece->vertices.reserve()
		for (unsigned vertexIndex= 0; vertexIndex < mesh->mNumVertices; ++vertexIndex) {
			SAssVertex vertex;

			//! vertex coordinates
			//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "Fetching vertex %d from mesh", vertexIndex);
			const aiVector3D& aiVertex = mesh->mVertices[vertexIndex];
			vertex.pos.x = aiVertex.x;
			vertex.pos.y = aiVertex.y;
			vertex.pos.z = aiVertex.z;

			//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "vertex position %d: %f %f %f", vertexIndex, vertex.pos.x, vertex.pos.y, vertex.pos.z);

			//! vertex normal
			LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "Fetching normal for vertex %d",
					vertexIndex);
			const aiVector3D& aiNormal = mesh->mNormals[vertexIndex];
			vertex.hasNormal = !IS_QNAN(aiNormal);
			if (vertex.hasNormal) {
				vertex.normal.x = aiNormal.x;
				vertex.normal.y = aiNormal.y;
				vertex.normal.z = aiNormal.z;
				//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "vertex normal %d: %f %f %f",vertexIndex, vertex.normal.x, vertex.normal.y,vertex.normal.z);
			}

			//! vertex tangent, x is positive in texture axis
			if (mesh->HasTangentsAndBitangents()) {
				LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
						"Fetching tangent for vertex %d", vertexIndex );
				const aiVector3D& aiTangent = mesh->mTangents[vertexIndex];
				const aiVector3D& aiBitangent = mesh->mBitangents[vertexIndex];
				vertex.hasTangent = !IS_QNAN(aiBitangent) && !IS_QNAN(aiTangent);

				const float3 tangent(aiTangent.x, aiTangent.y, aiTangent.z);
				//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "vertex tangent %d: %f %f %f",vertexIndex, tangent.x, tangent.y,tangent.z);
				piece->sTangents.push_back(tangent);

				const float3 bitangent(aiBitangent.x, aiBitangent.y, aiBitangent.z);
				//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "vertex bitangent %d: %f %f %f",vertexIndex, bitangent.x, bitangent.y,bitangent.z);
				piece->tTangents.push_back(bitangent);
			}

			//! vertex texcoords
			if (mesh->HasTextureCoords(0)) {
				vertex.textureX = mesh->mTextureCoords[0][vertexIndex].x;
				vertex.textureY = mesh->mTextureCoords[0][vertexIndex].y;
				//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "vertex texcoords %d: %f %f", vertexIndex, vertex.textureX, vertex.textureY);
			}

			mesh_vertex_mapping.push_back(piece->vertices.size());
			piece->vertices.push_back(vertex);
		}

		//! extract face data
		// FIXME add piece->vertexDrawOrder.reserve()
		LOG_SL(LOG_SECTION_PIECE, L_DEBUG,
				"Processing faces for mesh %d (%d faces)",
				meshIndex, mesh->mNumFaces);
		for (unsigned faceIndex = 0; faceIndex < mesh->mNumFaces; ++faceIndex) {
			const aiFace& face = mesh->mFaces[faceIndex];
			//! get the vertex belonging to the mesh
			for (unsigned vertexListID = 0; vertexListID < face.mNumIndices; ++vertexListID) {
				unsigned int vertexID = mesh_vertex_mapping[face.mIndices[vertexListID]];
				//LOG_SL(LOG_SECTION_PIECE, L_DEBUG, "face %d vertex %d", faceIndex, vertexID);
				piece->vertexDrawOrder.push_back(vertexID);
			}
		}
	}

	//! collision volume for piece (not sure about these coords)
	// FIXME add metatable tags for this!!!!
	const float3 cvScales = piece->maxs - piece->mins;
	const float3 cvOffset = (piece->maxs - piece->offset) + (piece->mins - piece->offset);
	//const float3 cvOffset(piece->offset.x, piece->offset.y, piece->offset.z);
	piece->colvol = new CollisionVolume("box", cvScales, cvOffset, CollisionVolume::COLVOL_HITTEST_CONT);

	//! Get parent name from metadata or model
	if (pieceTable.KeyExists("parent")) {
		piece->parentName = pieceTable.GetString("parent", "");
	} else if (node->mParent) {
		if (node->mParent->mParent) {
			piece->parentName = std::string(node->mParent->mName.data);
		} else { //! my parent is the root, which gets renamed
			piece->parentName = "root";
		}
	} else {
		piece->parentName = "";
	}

	LOG_S(LOG_SECTION_PIECE, "Loaded model piece: %s with %d meshes",
			piece->name.c_str(), node->mNumMeshes);

	//! Verbose logging of piece properties
	LOG_S(LOG_SECTION_PIECE, "piece->name: %s", piece->name.c_str());
	LOG_S(LOG_SECTION_PIECE, "piece->parent: %s", piece->parentName.c_str());

	//! Recursively process all child pieces
	for (unsigned int i = 0; i < node->mNumChildren; ++i) {
		LoadPiece(model, node->mChildren[i], metaTable);
	}

	model->pieces[piece->name] = piece;
	return piece;
}
Пример #28
0
int work_directory(char * dir_name, dummy_dir * dir_st, char * root_name)
{
	struct dirent *dp = NULL;

	LOG("[enter]Ptr is %x\n", (unsigned int) dir_st);
	LOG("[enter]DirName is %s\n", dir_name);
	
	// List subdirs and files	
	// Get count; alocate accordingly
	int dir_c = dir_count( dir_name );

	dir_st->subdirs_count = dir_c;

	LOG_S("Parsing directories\n");
	if( dir_c == 0 )
	{
		dir_st->subdirs = NULL;
	}
	else
	{
		dir_st->subdirs = malloc(sizeof(dummy_dir) * dir_c);

		DIR *dir = opendir(dir_name);
		int i = 0;
		while( (dp = readdir(dir)) != NULL )
		{
			if(dp->d_type != DT_DIR || !strcmp(dp->d_name, ".") || !strcmp(dp->d_name, ".."))
				continue;
			
			// Allocate and call recursively (for each dir)
			dummy_dir * new_child = dir_st->subdirs + i++;

			LOG("Ptr is %x\n" , (unsigned int)new_child);
			LOG("Working directory %s\n", dp->d_name);
			new_child->parent = dir_st;

			// Init name here, its easier than decomposing the full path in the recursive call.
			init( &new_child->name );

			LOG("Before adding dir %s\n", dp->d_name);
			write( &new_child->name, dp->d_name, strlen(dp->d_name) + 1 );
	
			// Prepare for recursive call
			buffer * full_path = malloc(sizeof(full_path));
			init( full_path );
			write( full_path, dir_name, strlen(dir_name) + 1);

			cat_paths( full_path, dp->d_name );

			LOG( "Cat'ed dir is = %s\n", full_path->buffer );
	
			// Call recursively
			work_directory( full_path->buffer, new_child, root_name );

			destroy( full_path );
			free( full_path );
		}
		closedir(dir);
	}	

	LOG_S("Parsing files\n");

	// Get count; alocate accordingly
	int file_c = file_count( dir_name );

	dir_st->files_count = file_c;

	if( file_c == 0 )
	{
		dir_st->files = NULL;
	}
	else
	{
		dir_st->files = malloc(sizeof(dummy_file) * file_c);

		DIR *dir = opendir(dir_name);
		int i = 0;
		while( (dp = readdir(dir)) != NULL )
		{
			if(dp->d_type != DT_REG)
				continue;
			
			// Allocate and call recursively (for each dir)
			dummy_file * new_child = dir_st->files + i++;

			LOG("Ptr is %x\n" , (unsigned int)new_child);
			LOG("Working file %s\n", dp->d_name);

			init( &new_child->name );

			LOG("Before adding file %s\n", dp->d_name);
			write( &new_child->name, dp->d_name, strlen(dp->d_name) + 1 );
		}
		closedir(dir);
	}

	return WRKDIR_SUCCESS;
}
Пример #29
0
static void amagent_worker_init(apr_pool_t *p, server_rec *s) {
    /* worker process init */
    LOG_S(APLOG_DEBUG, s, "amagent_worker_init() %d", getpid());
    am_init_worker();
    apr_pool_cleanup_register(p, s, amagent_worker_cleanup, apr_pool_cleanup_null);
}
Пример #30
0
void CArchiveScanner::ScanArchive(const std::string& fullName, bool doChecksum)
{
	const std::string fn    = FileSystem::GetFilename(fullName);
	const std::string fpath = FileSystem::GetDirectory(fullName);
	const std::string lcfn  = StringToLower(fn);

	// Stat file
	struct stat info = {0};
	int statfailed = stat(fullName.c_str(), &info);

	// If stat fails, assume the archive is not broken nor cached
	if (!statfailed) {
		// Determine whether this archive has earlier be found to be broken
		std::map<std::string, BrokenArchive>::iterator bai = brokenArchives.find(lcfn);
		if (bai != brokenArchives.end()) {
			if ((unsigned)info.st_mtime == bai->second.modified && fpath == bai->second.path) {
				bai->second.updated = true;
				return;
			}
		}

		// Determine whether to rely on the cached info or not
		std::map<std::string, ArchiveInfo>::iterator aii = archiveInfos.find(lcfn);
		if (aii != archiveInfos.end()) {
			// This archive may have been obsoleted, do not process it if so
			if (!aii->second.replaced.empty()) {
				return;
			}

			if ((unsigned)info.st_mtime == aii->second.modified && fpath == aii->second.path) {
				// cache found update checksum if wanted
				aii->second.updated = true;
				if (doChecksum && (aii->second.checksum == 0)) {
					aii->second.checksum = GetCRC(fullName);
				}
				return;
			} else {
				if (aii->second.updated) {
					const std::string filename = aii->first;
					LOG_L(L_ERROR, "Found a \"%s\" already in \"%s\", ignoring.", fullName.c_str(), (aii->second.path + aii->second.origName).c_str());
					if (IsBaseContent(filename)) {
						throw user_error(std::string("duplicate base content detected:\n\t") + aii->second.path + std::string("\n\t") + fpath
							+ std::string("\nPlease fix your configuration/installation as this can cause desyncs!"));
					}
					return;
				}

				// If we are here, we could have invalid info in the cache
				// Force a reread if it is a directory archive (.sdd), as
				// st_mtime only reflects changes to the directory itself,
				// not the contents.
				archiveInfos.erase(aii);
			}
		}
	}

	boost::scoped_ptr<IArchive> ar(archiveLoader.OpenArchive(fullName));
	if (!ar || !ar->IsOpen()) {
		LOG_L(L_WARNING, "Unable to open archive: %s", fullName.c_str());

		// record it as broken, so we don't need to look inside everytime
		BrokenArchive& ba = brokenArchives[lcfn];
		ba.path = fpath;
		ba.modified = info.st_mtime;
		ba.updated = true;
		ba.problem = "Unable to open archive";
		return;
	}

	std::string error;
	std::string mapfile;

	const bool hasModinfo = ar->FileExists("modinfo.lua");
	const bool hasMapinfo = ar->FileExists("mapinfo.lua");


	ArchiveInfo ai;
	auto& ad = ai.archiveData;
	if (hasMapinfo) {
		ScanArchiveLua(ar.get(), "mapinfo.lua", ai, error);
		if (ad.GetMapFile().empty()) {
			LOG_L(L_WARNING, "%s: mapfile isn't set in mapinfo.lua, please set it for faster loading!", fullName.c_str());
			mapfile = SearchMapFile(ar.get(), error);
		}
	} else if (hasModinfo) {
		ScanArchiveLua(ar.get(), "modinfo.lua", ai, error);
	} else {
		mapfile = SearchMapFile(ar.get(), error);
	}
	CheckCompression(ar.get(), fullName, error);

	if (!error.empty()) {
		// for some reason, the archive is marked as broken
		LOG_L(L_WARNING, "Failed to scan %s (%s)", fullName.c_str(), error.c_str());

		// record it as broken, so we don't need to look inside everytime
		BrokenArchive& ba = brokenArchives[lcfn];
		ba.path = fpath;
		ba.modified = info.st_mtime;
		ba.updated = true;
		ba.problem = error;
		return;
	}

	if (hasMapinfo || !mapfile.empty()) {
		// it is a map
		if (ad.GetName().empty()) {
			// FIXME The name will never be empty, if version is set (see HACK in ArchiveData)
			ad.SetInfoItemValueString("name_pure", FileSystem::GetBasename(mapfile));
			ad.SetInfoItemValueString("name", FileSystem::GetBasename(mapfile));
		}
		if (ad.GetMapFile().empty()) {
			ad.SetInfoItemValueString("mapfile", mapfile);
		}

		AddDependency(ad.GetDependencies(), "Map Helper v1");
		ad.SetInfoItemValueInteger("modType", modtype::map);

		LOG_S(LOG_SECTION_ARCHIVESCANNER, "Found new map: %s", ad.GetNameVersioned().c_str());
	} else if (hasModinfo) {
		// it is a game
		if (ad.GetModType() == modtype::primary) {
			AddDependency(ad.GetDependencies(), "Spring content v1");
		}

		LOG_S(LOG_SECTION_ARCHIVESCANNER, "Found new game: %s", ad.GetNameVersioned().c_str());
	} else {
		// neither a map nor a mod: error
		error = "missing modinfo.lua/mapinfo.lua";
	}

	ai.path = fpath;
	ai.modified = info.st_mtime;
	ai.origName = fn;
	ai.updated = true;
	ai.checksum = (doChecksum) ? GetCRC(fullName) : 0;
	archiveInfos[lcfn] = ai;
}