void testJustify() { AString str; str.assign("2f8"); str.justifyRight(4, '0'); std::cout << str << std::endl; str.justifyRight(8, '0'); std::cout << str << std::endl; str.justifyRight(3, '0'); std::cout << str << std::endl; str.assign("2f8"); str.justifyCenter(8, '0'); std::cout << str << std::endl; str.justifyCenter(11, '0'); std::cout << str << std::endl; str.assign("2f8"); str.justifyCenter(11, '0'); std::cout << str << std::endl; }
void testJustify(int& iRet) { AString str; str.assign("2f8"); str.justifyRight(4, '0'); ASSERT_UNIT_TEST(str.equals("02f8"), "AString::justifyRight", "0", iRet); str.justifyRight(8, '0'); ASSERT_UNIT_TEST(str.equals("000002f8"), "AString::justifyRight", "1", iRet); str.justifyRight(3, '0'); ASSERT_UNIT_TEST(str.equals("000002f8"), "AString::justifyRight", "2", iRet); str.assign("2f8"); str.justifyCenter(8, '0'); ASSERT_UNIT_TEST(str.equals("0002f800"), "AString::justifyCenter", "0", iRet); str.justifyCenter(11, '0'); ASSERT_UNIT_TEST(str.equals("000002f8000"), "AString::justifyCenter", "1", iRet); str.assign("2f8"); str.justifyCenter(11, '0'); ASSERT_UNIT_TEST(str.equals("00002f80000"), "AString::justifyCenter", "2", iRet); str.justifyCenter(5, '0'); ASSERT_UNIT_TEST(str.equals("00002f80000"), "AString::justifyCenter", "3", iRet); str.assign("2f8"); str.justifyLeft(7, '0'); ASSERT_UNIT_TEST(str.equals("2f80000"), "AString::justifyLeft", "0", iRet); str.justifyLeft(5, '0'); ASSERT_UNIT_TEST(str.equals("2f80000"), "AString::justifyLeft", "1", iRet); }
int cFile::ReadRestOfFile(AString & a_Contents) { ASSERT(IsOpen()); if (!IsOpen()) { return -1; } long TotalSize = GetSize(); if (TotalSize < 0) { return -1; } long Position = Tell(); if (Position < 0) { return -1; } auto DataSize = static_cast<size_t>(TotalSize - Position); // HACK: This depends on the internal knowledge that AString's data() function returns the internal buffer directly a_Contents.assign(DataSize, '\0'); return Read(reinterpret_cast<void *>(const_cast<char *>(a_Contents.data())), DataSize); }
void cTCPLinkImpl::UpdateAddress(const sockaddr * a_Address, socklen_t a_AddrLen, AString & a_IP, UInt16 & a_Port) { // Based on the family specified in the address, use the correct datastructure to convert to IP string: char IP[128]; switch (a_Address->sa_family) { case AF_INET: // IPv4: { const sockaddr_in * sin = reinterpret_cast<const sockaddr_in *>(a_Address); evutil_inet_ntop(AF_INET, &(sin->sin_addr), IP, sizeof(IP)); a_Port = ntohs(sin->sin_port); break; } case AF_INET6: // IPv6 { const sockaddr_in6 * sin = reinterpret_cast<const sockaddr_in6 *>(a_Address); evutil_inet_ntop(AF_INET6, &(sin->sin6_addr), IP, sizeof(IP)); a_Port = ntohs(sin->sin6_port); break; } default: { LOGWARNING("%s: Unknown socket address family: %d", __FUNCTION__, a_Address->sa_family); ASSERT(!"Unknown socket address family"); break; } } a_IP.assign(IP); }
bool cByteBuffer::ReadString(AString & a_String, int a_Count) { CHECK_THREAD; CheckValid(); ASSERT(a_Count >= 0); NEEDBYTES(a_Count); a_String.clear(); a_String.reserve(a_Count); int BytesToEndOfBuffer = m_BufferSize - m_ReadPos; ASSERT(BytesToEndOfBuffer >= 0); // Sanity check if (BytesToEndOfBuffer <= a_Count) { // Reading across the ringbuffer end, read the first part and adjust parameters: if (BytesToEndOfBuffer > 0) { a_String.assign(m_Buffer + m_ReadPos, BytesToEndOfBuffer); a_Count -= BytesToEndOfBuffer; } m_ReadPos = 0; } // Read the rest of the bytes in a single read (guaranteed to fit): if (a_Count > 0) { a_String.append(m_Buffer + m_ReadPos, a_Count); m_ReadPos += a_Count; } return true; }
bool cWSSCompact::cPAKFile::GetChunkData(const cChunkCoords & a_Chunk, int & a_UncompressedSize, AString & a_Data) { int ChunkX = a_Chunk.m_ChunkX; int ChunkZ = a_Chunk.m_ChunkZ; sChunkHeader * Header = NULL; int Offset = 0; for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) { if (((*itr)->m_ChunkX == ChunkX) && ((*itr)->m_ChunkZ == ChunkZ)) { Header = *itr; break; } Offset += (*itr)->m_CompressedSize; } if ((Header == NULL) || (Offset + Header->m_CompressedSize > (int)m_DataContents.size())) { // Chunk not found / data invalid return false; } a_UncompressedSize = Header->m_UncompressedSize; a_Data.assign(m_DataContents, Offset, Header->m_CompressedSize); return true; }
int ut_ATextConverter_General() { std::cerr << "ut_ATextConverter_General" << std::endl; int iRet = 0x0; AString result; AString str("!<HTML>&..."); ATextConverter::makeHtmlSafe(str, result); ASSERT_UNIT_TEST(result.equals("!<HTML>&..."), "ATextConverter::makeHtmlSafe", "0", iRet); result.clear(); str.assign("<><HTML>&...&"); ATextConverter::makeHtmlSafe(str, result); ASSERT_UNIT_TEST(result.equals("<><HTML>&...&"), "ATextConverter::makeHtmlSafe", "1", iRet); //a_Round trips AString strR; str = "±(S3t¯th3~co^tro|$~f0r~the~he@rt~of~the¯sµ^!)²÷exit`"; result.clear(); ATextConverter::encodeHEX(str, strR); ATextConverter::decodeHEX(strR, result); ASSERT_UNIT_TEST(result.equals(str), "HEX roudtrip", "", iRet); strR.clear(); result.clear(); ATextConverter::encodeURL(str, strR); ATextConverter::decodeURL(strR, result); ASSERT_UNIT_TEST(result.equals(str), "URL roudtrip", "", iRet); strR.clear(); result.clear(); ATextConverter::encodeBase64(str, strR); ATextConverter::decodeBase64(strR, result); ASSERT_UNIT_TEST(result.equals(str), "Base64 roudtrip", "", iRet); strR.clear(); result.clear(); ATextConverter::encode64(str, strR); ATextConverter::decode64(strR, result); ASSERT_UNIT_TEST(result.equals(str), "encode64 roudtrip", "", iRet); //a_Base64 result.clear(); ATextConverter::encodeBase64("foo:bar", result); ASSERT_UNIT_TEST(result.equals("Zm9vOmJhcg=="), "Base64 encode", "", iRet); result.clear(); ATextConverter::decodeBase64("Zm9vOmJhcg==", result); ASSERT_UNIT_TEST(result.equals("foo:bar"), "Base64 decode", "", iRet); //a_CData safe str.assign("!INVALID]]> CData string! ]]> !"); strR.assign("!INVALID%5d%5d%3e CData string! %5d%5d%3e !"); result.clear(); ATextConverter::makeCDataSafe(str, result); ASSERT_UNIT_TEST(result.equals(strR), "makeCDataSafe", "", iRet); return iRet; }
void cLuaState::ToString(int a_StackPos, AString & a_String) { size_t len; const char * s = lua_tolstring(m_LuaState, a_StackPos, &len); if (s != nullptr) { a_String.assign(s, len); } }
void cLuaState::GetStackValue(int a_StackPos, AString & a_Value) { size_t len = 0; const char * data = lua_tolstring(m_LuaState, a_StackPos, &len); if (data != nullptr) { a_Value.assign(data, len); } }
bool cWSSCompact::cPAKFile::SaveChunkToData(const cChunkCoords & a_Chunk, cWorld * a_World) { // Serialize the chunk: cJsonChunkSerializer Serializer; if (!a_World->GetChunkData(a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, Serializer)) { // Chunk not valid LOG("cWSSCompact: Trying to save chunk [%d, %d, %d] that has no data, ignoring request.", a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ); return false; } AString Data; Data.assign((const char *)Serializer.GetBlockData(), cChunkDef::BlockDataSize); if (Serializer.HasJsonData()) { AString JsonData; Json::StyledWriter writer; JsonData = writer.write(Serializer.GetRoot()); Data.append(JsonData); } // Compress the data: AString CompressedData; int errorcode = CompressString(Data.data(), Data.size(), CompressedData, m_CompressionFactor); if ( errorcode != Z_OK ) { LOGERROR("Error %i compressing data for chunk [%d, %d, %d]", errorcode, a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ); return false; } // Erase any existing data for the chunk: EraseChunkData(a_Chunk); // Save the header: sChunkHeader * Header = new sChunkHeader; if (Header == NULL) { LOGWARNING("Cannot create a new chunk header to save chunk [%d, %d, %d]", a_Chunk.m_ChunkX, a_Chunk.m_ChunkY, a_Chunk.m_ChunkZ); return false; } Header->m_CompressedSize = (int)CompressedData.size(); Header->m_ChunkX = a_Chunk.m_ChunkX; Header->m_ChunkZ = a_Chunk.m_ChunkZ; Header->m_UncompressedSize = (int)Data.size(); m_ChunkHeaders.push_back(Header); m_DataContents.append(CompressedData.data(), CompressedData.size()); m_NumDirty++; return true; }
bool cWSSAnvil::cMCAFile::GetChunkData(const cChunkCoords & a_Chunk, AString & a_Data) { if (!OpenFile(true)) { return false; } int LocalX = a_Chunk.m_ChunkX % 32; if (LocalX < 0) { LocalX = 32 + LocalX; } int LocalZ = a_Chunk.m_ChunkZ % 32; if (LocalZ < 0) { LocalZ = 32 + LocalZ; } unsigned ChunkLocation = ntohl(m_Header[LocalX + 32 * LocalZ]); unsigned ChunkOffset = ChunkLocation >> 8; m_File.Seek(ChunkOffset * 4096); int ChunkSize = 0; if (m_File.Read(&ChunkSize, 4) != 4) { return false; } ChunkSize = ntohl(ChunkSize); char CompressionType = 0; if (m_File.Read(&CompressionType, 1) != 1) { return false; } if (CompressionType != 2) { // Chunk is in an unknown compression return false; } ChunkSize--; // HACK: This depends on the internal knowledge that AString's data() function returns the internal buffer directly a_Data.assign(ChunkSize, '\0'); return (m_File.Read((void *)a_Data.data(), ChunkSize) == ChunkSize); }
void cLuaState::LogStack(lua_State * a_LuaState, const char * a_Header) { // Format string consisting only of %s is used to appease the compiler LOG("%s", (a_Header != nullptr) ? a_Header : "Lua C API Stack contents:"); for (int i = lua_gettop(a_LuaState); i > 0; i--) { AString Value; int Type = lua_type(a_LuaState, i); switch (Type) { case LUA_TBOOLEAN: Value.assign((lua_toboolean(a_LuaState, i) != 0) ? "true" : "false"); break; case LUA_TLIGHTUSERDATA: Printf(Value, "%p", lua_touserdata(a_LuaState, i)); break; case LUA_TNUMBER: Printf(Value, "%f", static_cast<double>(lua_tonumber(a_LuaState, i))); break; case LUA_TSTRING: Printf(Value, "%s", lua_tostring(a_LuaState, i)); break; case LUA_TTABLE: Printf(Value, "%p", lua_topointer(a_LuaState, i)); break; default: break; } LOGD(" Idx %d: type %d (%s) %s", i, Type, lua_typename(a_LuaState, Type), Value.c_str()); } // for i - stack idx }
bool AMySQLServer::getFields(const AString& table, VECTOR_AString& sv, AString& error) { if (!isInitialized()) { error.assign("Database has not been initialized;"); return false; } if (table.isEmpty()) { error = "Please use a namespace;"; return false; } sv.clear(); AString query("SHOW COLUMNS FROM `"); query += table; query += "`"; MYSQL_RES *pmyresult = executeSQL(query, error); if (pmyresult) { MYSQL_ROW myrow; int iSize = (int)mysql_num_rows(pmyresult); for (int i=0; i < iSize; ++i) { myrow = mysql_fetch_row(pmyresult); if (myrow) { sv.push_back(myrow[0]); } } mysql_free_result(pmyresult); } else return false; return true; }
bool AMySQLServer::getTables(VECTOR_AString& sv, AString& error) { if (!isInitialized()) { error.assign("Database has not been initialized;"); return false; } sv.clear(); MYSQL_RES *pmyresult = mysql_list_tables(mp_mydata, NULL); if (pmyresult) { MYSQL_ROW myrow; int iSize = (int)mysql_num_rows(pmyresult); for (int i=0; i < iSize; ++i) { myrow = mysql_fetch_row(pmyresult); if (myrow) { sv.push_back(myrow[0]); } } mysql_free_result(pmyresult); } else { error = "Error("; error += mysql_error(mp_mydata); error += ") looking for tables;"; return false; } return true; }
void cWSSCompact::cPAKFile::UpdateChunk2To3() { int Offset = 0; AString NewDataContents; int ChunksConverted = 0; for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) { sChunkHeader * Header = *itr; if( ChunksConverted % 32 == 0 ) { LOGINFO("Updating \"%s\" version 2 to version 3: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() ); } ChunksConverted++; AString Data; int UncompressedSize = Header->m_UncompressedSize; Data.assign(m_DataContents, Offset, Header->m_CompressedSize); Offset += Header->m_CompressedSize; // Crude data integrity check: const int ExpectedSize = (16*256*16)*2 + (16*256*16)/2; // For version 2 if (UncompressedSize < ExpectedSize) { LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", Header->m_ChunkX, Header->m_ChunkZ, UncompressedSize, ExpectedSize ); Offset += Header->m_CompressedSize; continue; } // Decompress the data: AString UncompressedData; { int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, UncompressedSize); if (errorcode != Z_OK) { LOGERROR("Error %d decompressing data for chunk [%d, %d]", errorcode, Header->m_ChunkX, Header->m_ChunkZ ); Offset += Header->m_CompressedSize; continue; } } if (UncompressedSize != (int)UncompressedData.size()) { LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", UncompressedSize, UncompressedData.size(), Header->m_ChunkX, Header->m_ChunkZ ); Offset += Header->m_CompressedSize; continue; } char ConvertedData[ExpectedSize]; memset(ConvertedData, 0, ExpectedSize); // Cannot use cChunk::MakeIndex because it might change again????????? // For compatibility, use what we know is current #define MAKE_3_INDEX( x, y, z ) ( x + (z * 16) + (y * 16 * 16) ) unsigned int InChunkOffset = 0; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) // YZX Loop order is important, in 1.1 Y was first then Z then X { ConvertedData[ MAKE_3_INDEX(x, y, z) ] = UncompressedData[InChunkOffset]; ++InChunkOffset; } // for y, z, x unsigned int index2 = 0; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) { ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4); ++index2; } InChunkOffset += index2 / 2; index2 = 0; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) { ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4); ++index2; } InChunkOffset += index2 / 2; index2 = 0; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) for( int y = 0; y < 256; ++y ) { ConvertedData[ InChunkOffset + MAKE_3_INDEX(x, y, z)/2 ] |= ( (UncompressedData[ InChunkOffset + index2/2 ] >> ((index2&1)*4) ) & 0x0f ) << ((x&1)*4); ++index2; } InChunkOffset += index2 / 2; AString Converted(ConvertedData, ExpectedSize); // Add JSON data afterwards if (UncompressedData.size() > InChunkOffset) { Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() ); } // Re-compress data AString CompressedData; { int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData, m_CompressionFactor); if (errorcode != Z_OK) { LOGERROR("Error %d compressing data for chunk [%d, %d]", errorcode, Header->m_ChunkX, Header->m_ChunkZ ); continue; } } // Save into file's cache Header->m_UncompressedSize = Converted.size(); Header->m_CompressedSize = CompressedData.size(); NewDataContents.append( CompressedData ); } // Done converting m_DataContents = NewDataContents; m_ChunkVersion = 3; SynchronizeFile(); LOGINFO("Updated \"%s\" version 2 to version 3", m_FileName.c_str() ); }
void cWSSCompact::cPAKFile::UpdateChunk1To2() { int Offset = 0; AString NewDataContents; int ChunksConverted = 0; for (sChunkHeaders::iterator itr = m_ChunkHeaders.begin(); itr != m_ChunkHeaders.end(); ++itr) { sChunkHeader * Header = *itr; if( ChunksConverted % 32 == 0 ) { LOGINFO("Updating \"%s\" version 1 to version 2: " SIZE_T_FMT " %%", m_FileName.c_str(), (ChunksConverted * 100) / m_ChunkHeaders.size() ); } ChunksConverted++; AString Data; int UncompressedSize = Header->m_UncompressedSize; Data.assign(m_DataContents, Offset, Header->m_CompressedSize); Offset += Header->m_CompressedSize; // Crude data integrity check: int ExpectedSize = (16*128*16)*2 + (16*128*16)/2; // For version 1 if (UncompressedSize < ExpectedSize) { LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", Header->m_ChunkX, Header->m_ChunkZ, UncompressedSize, ExpectedSize ); Offset += Header->m_CompressedSize; continue; } // Decompress the data: AString UncompressedData; { int errorcode = UncompressString(Data.data(), Data.size(), UncompressedData, UncompressedSize); if (errorcode != Z_OK) { LOGERROR("Error %d decompressing data for chunk [%d, %d]", errorcode, Header->m_ChunkX, Header->m_ChunkZ ); Offset += Header->m_CompressedSize; continue; } } if (UncompressedSize != (int)UncompressedData.size()) { LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", UncompressedSize, UncompressedData.size(), Header->m_ChunkX, Header->m_ChunkZ ); Offset += Header->m_CompressedSize; continue; } // Old version is 128 blocks high with YZX axis order char ConvertedData[cChunkDef::BlockDataSize]; int Index = 0; unsigned int InChunkOffset = 0; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) { for( int y = 0; y < 128; ++y ) { ConvertedData[Index++] = UncompressedData[y + z * 128 + x * 128 * 16 + InChunkOffset]; } // Add 128 empty blocks after an old y column memset(ConvertedData + Index, E_BLOCK_AIR, 128); Index += 128; } InChunkOffset += (16 * 128 * 16); for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Metadata { for( int y = 0; y < 64; ++y ) { ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; } memset(ConvertedData + Index, 0, 64); Index += 64; } InChunkOffset += (16 * 128 * 16) / 2; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Block light { for( int y = 0; y < 64; ++y ) { ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; } memset(ConvertedData + Index, 0, 64); Index += 64; } InChunkOffset += (16*128*16)/2; for( int x = 0; x < 16; ++x ) for( int z = 0; z < 16; ++z ) // Sky light { for( int y = 0; y < 64; ++y ) { ConvertedData[Index++] = UncompressedData[y + z * 64 + x * 64 * 16 + InChunkOffset]; } memset(ConvertedData + Index, 0, 64); Index += 64; } InChunkOffset += (16 * 128 * 16) / 2; AString Converted(ConvertedData, ARRAYCOUNT(ConvertedData)); // Add JSON data afterwards if (UncompressedData.size() > InChunkOffset) { Converted.append( UncompressedData.begin() + InChunkOffset, UncompressedData.end() ); } // Re-compress data AString CompressedData; { int errorcode = CompressString(Converted.data(), Converted.size(), CompressedData,m_CompressionFactor); if (errorcode != Z_OK) { LOGERROR("Error %d compressing data for chunk [%d, %d]", errorcode, Header->m_ChunkX, Header->m_ChunkZ ); continue; } } // Save into file's cache Header->m_UncompressedSize = Converted.size(); Header->m_CompressedSize = CompressedData.size(); NewDataContents.append( CompressedData ); } // Done converting m_DataContents = NewDataContents; m_ChunkVersion = 2; SynchronizeFile(); LOGINFO("Updated \"%s\" version 1 to version 2", m_FileName.c_str() ); }
void AOSContextManager::adminEmitXml(AXmlElement& eBase, const AHTTPRequestHeader& request) { AOSAdminInterface::adminEmitXml(eBase, request); //a_Check if specific context is required AString contextId; if (request.getUrl().getParameterPairs().get(ASW("contextId",9), contextId)) { //a_Find the specific context { ALock lock(m_InUseSync); for (CONTEXT_INUSE::const_iterator cit = m_InUse.begin(); cit != m_InUse.end(); ++cit) { if (contextId == AString::fromPointer(*cit)) { adminAddProperty(eBase, ASW("contextDetail",7), *(*cit), AXmlElement::ENC_CDATADIRECT); return; } } } { ALock lock(m_ErrorHistory.useSync()); for (ABase *p = m_ErrorHistory.useTail(); p; p = p->usePrev()) { AOSContext *pContext = dynamic_cast<AOSContext *>(p); if (contextId == AString::fromPointer(pContext)) { adminAddProperty(eBase, ASW("contextDetail",7), *pContext, AXmlElement::ENC_CDATADIRECT); return; } } } { ALock lock(m_History.useSync()); for (ABase *p = m_History.useTail(); p; p = p->usePrev()) { AOSContext *pContext = dynamic_cast<AOSContext *>(p); if (contextId == AString::fromPointer(pContext)) { adminAddProperty(eBase, ASW("contextDetail",7), *pContext, AXmlElement::ENC_CDATADIRECT); return; } } } //a_Context no longer available adminAddError(eBase, ASWNL("AOSContext no longer available")); } else { //a_Display context summary for (size_t i=0; i < m_Queues.size(); ++i) { AOSContextQueueInterface *pQueue = m_Queues.at(i); if (pQueue) adminAddProperty(eBase, AString::fromSize_t(i), pQueue->getClass()); else adminAddProperty(eBase, AString::fromSize_t(i), AConstant::ASTRING_NULL); } adminAddPropertyWithAction( eBase, ASW("log_level",9), AString::fromInt(m_DefaultEventLogLevel), ASW("Update",6), ASWNL("Maximum event to log 0:Disabled 1:Error, 2:Event, 3:Warning, 4:Info, 5:Debug"), ASW("Set",3) ); { AString str; str.assign('['); str.append(AString::fromSize_t(m_History.size())); str.append('/'); str.append(AString::fromSize_t(m_HistoryMaxSize)); str.append(']'); AXmlElement& eHistory = eBase.addElement("object").addAttribute("name", "history"); adminAddPropertyWithAction(eHistory, ASWNL("max_size"), str, ASW("Set",3), ASWNL("Set maximum AOSContext history size (objects not immediately deleted)"), ASW("Set",3)); } adminAddPropertyWithAction( eBase, ASW("clear_history",13), AConstant::ASTRING_EMPTY, ASW("Clear",5), ASWNL("Clear context history: 0:all 1:history 2:error history"), ASW("Clear",5) ); { AString str; str.assign('['); str.append(AString::fromSize_t(m_ErrorHistory.size())); str.append('/'); str.append(AString::fromSize_t(m_ErrorHistoryMaxSize)); str.append(']'); AXmlElement& eErrorHistory = eBase.addElement("object").addAttribute("name", "error_history"); adminAddPropertyWithAction(eErrorHistory, ASWNL("max_size"), str, ASW("Set",3), ASWNL("Set maximum AOSContext error history size (objects not immediately deleted)"), ASW("Set",3)); } { AString str; str.assign('['); str.append(AString::fromSize_t(m_FreeStore.size())); str.append('/'); str.append(AString::fromSize_t(m_FreeStoreMaxSize)); str.append(']'); AXmlElement& eFreestore = eBase.addElement("object").addAttribute("name", "freestore"); adminAddPropertyWithAction(eFreestore, ASWNL("max_size"), str, ASW("Set",3), ASWNL("Set maximum AOSContext freestore size (objects not immediately deleted)"), ASW("Set",3)); } AXmlElement& eInUse = eBase.addElement("object"); eInUse.addAttribute("name", "InUse"); eInUse.addAttribute("size", AString::fromSize_t(m_InUse.size())); CONTEXT_INUSE::const_iterator citU = m_InUse.begin(); if (citU != m_InUse.end()) { ALock lock(m_InUseSync); while(citU != m_InUse.end()) { AXmlElement& eProp = adminAddProperty(eInUse, ASW("context",7), (*citU)->useEventVisitor(), AXmlElement::ENC_CDATADIRECT); eProp.addAttribute(ASW("errors",6), AString::fromSize_t((*citU)->useEventVisitor().getErrorCount())); eProp.addElement(ASW("url",3)).addData((*citU)->useEventVisitor().useName(), AXmlElement::ENC_CDATADIRECT); eProp.addElement(ASW("contextId",9)).addData(AString::fromPointer(*citU), AXmlElement::ENC_CDATADIRECT); ++citU; } } { AXmlElement& eHistory = eBase.addElement("object").addAttribute("name", "history"); ALock lock(m_History.useSync()); for (ABase *p = m_History.useTail(); p; p = p->usePrev()) { AOSContext *pContext = dynamic_cast<AOSContext *>(p); if (pContext) { AXmlElement& eProp = adminAddProperty(eHistory, ASW("context",7), pContext->useEventVisitor(), AXmlElement::ENC_CDATADIRECT); eProp.addAttribute(ASW("errors",6), AString::fromSize_t(pContext->useEventVisitor().getErrorCount())); eProp.addElement(ASW("url",3), pContext->useEventVisitor().useName(), AXmlElement::ENC_CDATADIRECT); eProp.addElement(ASW("contextId",3)).addData(AString::fromPointer(pContext), AXmlElement::ENC_CDATADIRECT); } else ATHROW(this, AException::InvalidObject); } } { AXmlElement& eErrorHistory = eBase.addElement("object").addAttribute("name", "error_history"); ALock lock(m_ErrorHistory.useSync()); for (ABase *p = m_ErrorHistory.useTail(); p; p = p->usePrev()) { AOSContext *pContext = dynamic_cast<AOSContext *>(p); if (pContext) { AXmlElement& eProp = adminAddProperty(eErrorHistory, ASW("context",7), pContext->useEventVisitor(), AXmlElement::ENC_CDATADIRECT); eProp.addAttribute(ASW("errors",6), AString::fromSize_t(pContext->useEventVisitor().getErrorCount())); eProp.addElement(ASW("url",3), pContext->useEventVisitor().useName(), AXmlElement::ENC_CDATADIRECT); eProp.addElement(ASW("contextId",3)).addData(AString::fromPointer(pContext), AXmlElement::ENC_CDATADIRECT); } else ATHROW(this, AException::InvalidObject); } } } }
void AWordUtility::getPlural(const AString& one, AString& many) { many.clear(); //a_Words of size 1 or 2, just append s and return if (one.getSize() < 3) { many.assign(one); many.append('s'); return; } switch(one.last()) { case 's': { char c = one.at(one.getSize()-2); if ('i' == c) { //a_"is" -> "es" many.assign(one); many.set('i', many.getSize()-2); } else if ('u' == c) { //a_"us" -> "ii" one.peek(many, 0, one.getSize()-2); many.append("ii", 2); } else { many.assign(one); many.append("es", 2); } } break; case 'z': case 'x': many.assign(one); many.append("es", 2); break; case 'h': { char c = one.at(one.getSize()-2); if ('s' == c || 'c' == c) { many.assign(one); many.append("es", 2); } else { many.assign(one); many.append('s'); } } break; case 'y': { char c = one.at(one.getSize()-2); if (AConstant::npos != sstr_Vowels.find(c)) { //a_vowel+'y', add 's' many.assign(one); many.append('s'); } else { //a_consonant+'y', convert 'y' to 'ies' one.peek(many, 0, one.getSize()-1); many.append("ies", 3); } } break; default: many.assign(one); many.append('s'); break; } }
void AOSOutputExecutor::execute(AOSContext& context) { AString command; if ( context.useRequestParameterPairs().exists(OVERRIDE_OUTPUT) && m_Services.useConfiguration().isOutputOverrideAllowed() ) { //a_Override requested and allowed context.useRequestParameterPairs().get(OVERRIDE_OUTPUT, command); } else { command = context.getOutputCommand(); if (context.useEventVisitor().isLogging(AEventVisitor::EL_DEBUG)) { ARope rope("Default output generator overridden to: ",40); rope.append(command); context.useEventVisitor().startEvent(rope, AEventVisitor::EL_DEBUG); } } if (command.equals("NOP")) { //a_If NOP was used force XML m_Services.useConfiguration().setMimeTypeFromExt(ASW("xml",3), context); if (context.useEventVisitor().isLogging(AEventVisitor::EL_DEBUG)) context.useEventVisitor().startEvent(ASWNL("NOP detected, defaulting to XML output"), AEventVisitor::EL_DEBUG); } if (command.isEmpty()) { if (!m_Services.useConfiguration().getAosDefaultOutputGenerator().isEmpty()) { command.assign(m_Services.useConfiguration().getAosDefaultOutputGenerator()); if (context.useEventVisitor().isLogging(AEventVisitor::EL_DEBUG)) { ARope rope("No output generator specified, defaulting to: ",46); rope.append(command); context.useEventVisitor().startEvent(rope, AEventVisitor::EL_DEBUG); } } else { if (context.useEventVisitor().isLogging(AEventVisitor::EL_DEBUG)) context.useEventVisitor().startEvent(ASW("No output generator, defaulting to XML",38), AEventVisitor::EL_DEBUG); return; } } try { //a_Find input command, if not found execute the default OutputGeneratorContainer::iterator it = m_OutputGenerators.find(command); if (it == m_OutputGenerators.end()) { if (context.useEventVisitor().isLogging(AEventVisitor::EL_WARN)) { ARope rope("Skipping unknown output generator: ",35); rope.append(command); context.useEventVisitor().startEvent(rope, AEventVisitor::EL_WARN); } } else { ATimer timer(true); //a_Generate output if (context.useEventVisitor().isLogging(AEventVisitor::EL_INFO)) { ARope rope("Generating output: ",19); rope.append((*it).first); context.useEventVisitor().startEvent(rope, AEventVisitor::EL_INFO); } if (context.useContextFlags().isClear(AOSContext::CTXFLAG_IS_AJAX)) { context.useModel().overwriteElement(ASW("execute/output", 14)).addData(command); //a_Publish timers context.getRequestTimer().emitXml(context.useModel().overwriteElement(ASW("request_time",12))); context.getContextTimer().emitXml(context.useModel().overwriteElement(ASW("context_time",12))); } //a_Generate output AOSContext::ReturnCode ret = (*it).second->execute(context); switch (ret) { case AOSContext::RETURN_OK: break; case AOSContext::RETURN_REDIRECT: if (context.useEventVisitor().isLogging(AEventVisitor::EL_INFO)) { context.useEventVisitor().startEvent(ASWNL("Output generator has done a redirect"), AEventVisitor::EL_DEBUG); } break; default: context.addError((*it).second->getClass(), ASWNL("Output generator returned neither OK nor REDIRECT")); return; } //a_Event over context.useEventVisitor().endEvent(); //a_Add execution time (*it).second->addExecutionTimeSample(timer.getInterval()); } } catch(AException& ex) { AString strWhere("AOSOutputExecutor::execute(", 27); strWhere.append(command); strWhere.append(')'); context.addError(strWhere, ex.what()); AXmlElement& element = context.useModel().addElement("output_error"); element.addElement("where", strWhere); element.addElement("exception", ex); } catch(...) { AString strWhere("AOSOutputExecutor::execute(", 27); strWhere.append(command); strWhere.append(')'); context.addError(strWhere, ASWNL("Unknown Exception")); context.useModel().addElement("output_error").addData("Unknown Exception"); } }