//@njz //GAConjProblemForORGroupSolver::GAConjProblemForORGroupSolver( const OneRelatorGroup& group , const Word& W1 , const Word& W2 , bool createFile = true , bool cp = true ) : GAConjProblemForORGroupSolver::GAConjProblemForORGroupSolver( const OneRelatorGroup& group , const Word& W1 , const Word& W2 , bool createFile , bool cp ) : // theGroup( group ), conjProblem( cp ), fitnessRate( 0 ) { if( createFile ) { file = new File; deleteFile = true; } else { file = 0; deleteFile = false; } numGenes = 50; genes = new GACPforORGSolverGene*[numGenes]; for( int i=0 ; i<numGenes ; ++i ) genes[i] = 0; //create two genes newGene[0] = new GACPforORGSolverGene( theGroup , Word() , Word() ); newGene[1] = new GACPforORGSolverGene( theGroup , Word() , Word() ); if( file ) if( conjProblem ) *file << "This genetic algorithm tries to determine whether given words are conjugate." << endl << endl; else *file << "This genetic algorithm tries to determine whether the given word is trivial." << endl << endl; toStart( W1 , W2 ); }
std::vector<Word> CText::ParseWordsIntoLines(std::vector<Word> tWords, slong slWidth, float fScale) { std::vector<Word> tLines; int pixWidth = 0; int wordCount = 0; int lineCount = 0; tLines.push_back(Word()); for (unsigned int i = 0; i < tWords.size(); i++) { //If the word fits on the current line, then add it to the line if (pixWidth + tWords[i].pixWidth <= slWidth) { tLines[lineCount] += tWords[i]; pixWidth += tWords[i].pixWidth; } else if (tWords[i].text[0] == ' ') //If the word is actually space(s) then skip them { continue; } //Try it without the Kern else if (pixWidth + tWords[i].pixWidth - (slong)(fScale * m_vRunes[DEFAULT_FONT].pix.x * KERN) <= slWidth) { tLines[lineCount] += tWords[i]; pixWidth += tWords[i].pixWidth - (slong)(fScale * m_vRunes[DEFAULT_FONT].pix.x * SPACE) - (slong)(fScale * m_vRunes[DEFAULT_FONT].pix.x * KERN); } else if (tWords[i].pixWidth > slWidth)//If the word is too big for any line, then put it somewhere { if (pixWidth == 0) //Put it on the current line { tLines[lineCount] += tWords[i]; pixWidth += tWords[i].pixWidth; } else //Put it on the next line { lineCount++; pixWidth = 0; tLines.push_back(Word()); tLines[lineCount] += tWords[i]; pixWidth += tWords[i].pixWidth; } } else//If the word is too big for just that line, then put it on the next line { i--; lineCount++; pixWidth = 0; tLines.push_back(Word()); } } //Remove final kern on each line ulong size = tLines.size(); for (ulong i = 0; i < size; i++) tLines[i].pixWidth -= (slong)(fScale * m_vRunes[DEFAULT_FONT].pix.x * KERN); return tLines; }
Map RMap::getRandomWhiteheadAuto( int nOfGens ) { vector<Word> image(nOfGens); int ai = RandLib::ur.irand(0,nOfGens-1); auto a = ai+1; if (RandLib::ur.rand() < 0.5) a = -a; image[ai] = Word(a); for (int i=0;i<nOfGens;i++){ if (i != ai){ int choice = RandLib::ur.irand(0,4); if (choice == 0) image[i] = Word(i+1); else if (choice == 1) image[i] = Word(-(i+1)); else if (choice == 2) image[i] = Word(i+1)*Word(a); else if (choice == 3) image[i] = Word(-a)*Word(i+1); else image[i] = Word(-a)*Word(i+1)*Word(a); } } Map m(nOfGens,nOfGens,image); return m; }
Word ThLeftNormalForm::getReducedWord() const { if (theOmegaPower >= 0 || theDecomposition.size() == 0) return getWord(); const auto p = -theOmegaPower; const auto d = theDecomposition.size(); const auto a = p < d ? p : d; Word result; // 1. Process omega const Permutation omega = Permutation::getHalfTwistPermutation(theRank); Word omegaWord = Word(omega.geodesicWord()); omegaWord = -omegaWord; result = omegaWord.power(p - a); // 2. Cancel omega^-1 with positive permutations auto it = theDecomposition.begin(); for (int i = 0; i < a; ++i, ++it) { auto perm = (-(*it)) * omega; if ((a - i - 1) % 2 != 0) perm = perm.flip(); result *= -Word(perm.geodesicWord()); } // 3. process the rest of positive permutations for (; it != theDecomposition.end(); ++it) { result *= Word((*it).geodesicWord()); } return result; }
ATTRIBUTE_NO_SANITIZE_ALL void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2, size_t n, bool StopAtZero) { if (!n) return; size_t Len = std::min(n, Word::GetMaxSize()); const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1); const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2); uint8_t B1[Word::kMaxSize]; uint8_t B2[Word::kMaxSize]; // Copy the data into locals in this non-msan-instrumented function // to avoid msan complaining further. size_t Hash = 0; // Compute some simple hash of both strings. for (size_t i = 0; i < Len; i++) { B1[i] = A1[i]; B2[i] = A2[i]; size_t T = B1[i]; Hash ^= (T << 8) | B2[i]; } size_t I = 0; for (; I < Len; I++) if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0)) break; size_t PC = reinterpret_cast<size_t>(caller_pc); size_t Idx = (PC & 4095) | (I << 12); ValueProfileMap.AddValue(Idx); TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len)); }
void TestHash() { Thing key = Word(7); Thing item = Word(11); Thing X, Y; CommentLine("Testing Hashtable"); Hash H = NewHash(NULL); HashIns(H, key, item); //BAD CODE: if (IntWord(HashGet(H, key)) != 11) //{ printf("no match!\n"); } //HashGet can return NULL! X = HashGet(H, key); assert(X); assert(SameThing(X, item)); if (ThingCmp(X, item) != EQ) printf("error in retr hash"); Y = HashRm(H, key); assert(SameThing(Y, item)); assert(SameThing(X, Y)); DelThing(X); X = HashRm(H, key); assert(X == NULL); DelHash(H); //asm("int3"); DelThing(Y); // this calls the dtor on item DelThing(key); printf("Basic insert/retrieve in hash table done.\n"); }
void * parse_dest_group(char *buff, void *state) { dest_group_t *group; dest_t *dest; if (*buff == CONF_BEGIN_CHAR) { group = (dest_group_t *) malloc(sizeof(dest_group_t)); MEMSET(group, 0, sizeof(dest_group_t)); return ((void *) group); } else if (*buff == CONF_END_CHAR) { ASSERT(state != NULL); group = (dest_group_t *) state; if (group->name == NULL || !dest_group_add(group)) { free(group); /* Duplicate/invalid group, so delete the structure */ } return (NULL); } else { ASSERT(state != NULL); group = (dest_group_t *) state; if (!BEG_STRCASECMP(buff, "name")) { group->name = Word(2, buff); } else if (!BEG_STRCASECMP(buff, "dest")) { dest = (dest_t *) malloc(sizeof(dest_t)); dest->name = Word(2, buff); if (dest->name == NULL || !dest_group_add_dest(group, dest)) { free(dest); } } else { print_error("Parse error in file %s, line %lu: Attribute \"%s\" is not valid in the current context.", file_peek_path(), file_peek_line(), buff); } return (state); } }
std::map<std::string, std::string> WhoParser::KeyValues(const char * inStr, int & inOutPos) { int pos; std::map<std::string, std::string> keyValueMap; while( true ) { pos = inOutPos; std::string key = Word(inStr, inOutPos, '='); if( key == "" ) break; EatWhitespace(inStr, inOutPos); char eq = inStr[inOutPos++]; if( eq != '=' ) break; std::string value = Word(inStr, inOutPos); if( value == "" ) break; keyValueMap[key] = value; } inOutPos = pos; return keyValueMap; }
/* ** Relation() reads a relation. The defining rule is: ** ** relation: word | word '=' word | word '=:' word | word ':=' word ** ** A relation starts either with 'generator', with '(' or with '['. */ static node *Relation(void) { node *n, *o; if (Token != GEN && Token != LPAREN && Token != LBRACK) SyntaxError("relation expected"); o = Word(); if (Token == EQUAL) { NextToken(); n = o; o = GetNode(TREL); o->cont.op.l = n; o->cont.op.r = Word(); } else if (Token == DEQUALL) { NextToken(); n = o; o = GetNode(TDRELL); o->cont.op.l = n; o->cont.op.r = Word(); } else if (Token == DEQUALR) { NextToken(); n = o; o = GetNode(TDRELR); o->cont.op.l = n; o->cont.op.r = Word(); } return o; }
Word PresentationParser::parseRelator( const VectorOf<Chars>& names, Chars& errMesg ) { genNames = names; if ( curToken == INIT ) getToken(); if ( !atStartOfWord() ) { parseError("Expected a word here"); errMesg = parseErrorMessage; return Word(); } else { Word u = parseWord( names, errMesg ); if ( errMesg.length() > 0 ) { return Word(); } if ( curToken == EQUALS ) { getToken(); Word v = parseWord( names, errMesg ); if ( errMesg.length() > 0 ) { return Word(); } return ((Word)(u * v.inverse())).cyclicallyReduce(); } else return u.cyclicallyReduce(); } }
set< Word > QuadEquationTranformationGraph::getNeighbours( const Word& eq ) { set< Word > result; // construct all of its descendants Word::const_iterator eq_it = eq.begin( ); for( ; eq_it!=eq.end( ) ; ++eq_it ) { // skip generators if( theEquation.isGenerator( *eq_it ) ) continue; // look to the left Word::const_iterator eq_it2 = eq_it; if( eq_it2!=eq.begin( ) ) { int lg = *(--eq_it2); // cout << *eq_it << " , " << lg << endl; if( *eq_it-lg!=0 ) result.insert( applyAdjointTransformation( eq , *eq_it , Word( -lg ) * Word( *eq_it ) ) ); } // look to the right eq_it2 = eq_it; if( ++eq_it2!=eq.end( ) ) { int lg = *eq_it2; // cout << *eq_it << " , " << lg << endl; if( *eq_it-lg!=0 ) result.insert( applyAdjointTransformation( eq , *eq_it , Word( *eq_it ) * Word( -lg ) ) ); } } return result; }
TEST( WordTest, DoesntMatchBytes ) { Word word( "Fo𐍈βAr" ); EXPECT_FALSE( word.ContainsBytes( Word( "Fo𐍈βArε" ) ) ); EXPECT_FALSE( word.ContainsBytes( Word( "gggg" ) ) ); EXPECT_FALSE( word.ContainsBytes( Word( "χ" ) ) ); EXPECT_FALSE( word.ContainsBytes( Word( "nfooΒar" ) ) ); EXPECT_FALSE( word.ContainsBytes( Word( "Fβrmmm" ) ) ); }
int main(){ Word w1 = Word(yeti,4711); assert(w1 == (yeti * Word::lowestLangbit | 4711)); Word w2 = Word(french,815); assert(w2 == (french * Word::lowestLangbit | 815)); Word w3 = Word(english,123); assert(w3 == (english * Word::lowestLangbit | 123)); return 0; }
Word FreeMetabelianGroupAlgorithms::getWordFromEdgeMap( int N , const map< vector< int > , int >& EM ) { Word result; // adjacencyList = point in Z^n -> (edge direction, arity) map< vector< int > , set< pair< int , int > > > adjacencyList; for( map< vector< int > , int >::const_iterator E_it=EM.begin( ) ; E_it!=EM.end( ) ; ++E_it ) { pair< vector< int > , int > C = *E_it; int direction = C.first[N]; C.first.pop_back( ); if( C.second<0 ) { ++C.first[direction-1]; adjacencyList[C.first].insert( pair< int , int >( -direction , -C.second ) ); } else { adjacencyList[C.first].insert( pair< int , int >( direction , C.second ) ); } } // Cover the components while( !adjacencyList.empty( ) ) { pair< vector< int > , set< pair< int , int > > > C = *adjacencyList.begin( ); list< int > R; exhaust( N , adjacencyList , C.first , C.first , R , R.begin( ) ); Word TW = getTailWord( N , C.first ); result *= TW*Word( R )*-TW; } return result; }
void loadWords(std::vector<Word> &dict){ std::ifstream inf("words.txt"); std::string str; while (inf >> str){ dict.emplace_back(Word(str)); } }
Word ThRightNormalForm::getShortWord( ) const { Word result; Permutation omega = Permutation::getHalfTwistPermutation( getRank( ) ); int power = getPower( ); if( power<0 ) { const list< Permutation >& decomp = getDecomposition( ); list<Permutation>:: const_iterator it = decomp.begin( ); for( int j=0 ; it!=decomp.end( ) ; ++it, ++j ) { int n = j - decomp.size( ) - power; if( n<0 ) { vector< int > gd = (*it).geodesic(); for( size_t t=0 ; t<gd.size() ; ++t ) result.push_back( gd[t]+1 ); } else { Permutation p = ( n%2 == 1 ? (*it).inverse() * omega : omega * (*it).inverse() ); vector<int> gd = p.geodesic(); for( int t=gd.size( )-1 ; t>=0 ; --t ) result.push_back( -gd[t]-1 ); } } Word omega_w = Word(omega.geodesicWord( )); omega_w = -omega_w; for( int j=decomp.size( ) ; j<-power ; ++j ) result = omega_w*result; } else result = getWord( ); return result; }
bool WhoParser::ShowDrawer(const char * inStr, int & inOutPos, std::ostream & inErrorStream) { int pos = inOutPos; if( Word(inStr, inOutPos) == "showDrawer" ) { std::map<std::string, std::string> keyValues = KeyValues(inStr, inOutPos); auto drawerIt = keyValues.find("drawer"); auto locationIt = keyValues.find("location"); if( drawerIt != keyValues.end() && locationIt != keyValues.end() ) { if( locationIt->second == "bottom" ) gGame.bottomDrawer = drawerIt->second; else if( locationIt->second == "top" ) gGame.topDrawer = drawerIt->second; if( gGame.drawerDropAnim < 1.0 ) AnimationSystem::CreateFloatAnimation(1e-7f, 1.0f, 2, InterpolationTypeSmooth, &gGame.drawerDropAnim); return true; } inErrorStream << "error with: " << inStr << "\n"; if( drawerIt == keyValues.end() ) inErrorStream << "\trequired drawer=<string>\n"; if( locationIt == keyValues.end() ) inErrorStream << "\trequired location=(top|bottom)\n"; } inOutPos = pos; return false; }
bool WhoParser::AddPhotoToDrawer(const char * inStr, int & inOutPos, std::ostream & inErrorStream) { int pos = inOutPos; if( Word(inStr, inOutPos) == "addPhotoToDrawer" ) { std::map<std::string, std::string> keyValues = KeyValues(inStr, inOutPos); auto drawerIt = keyValues.find("drawer"); auto photoIt = keyValues.find("photo"); if( drawerIt != keyValues.end() && photoIt != keyValues.end() ) { who::Drawer & drawer = gGame.drawers[drawerIt->second]; drawer.photos.push_back(photoIt->second); return true; } inErrorStream << "error with: " << inStr << "\n"; if( drawerIt == keyValues.end() ) inErrorStream << "\trequired drawer=<string>\n"; if( photoIt == keyValues.end() ) inErrorStream << "\trequired photo=<string>\n"; } inOutPos = pos; return false; }
bool WhoParser::AddMaskToPhoto(const char * inStr, int & inOutPos, std::ostream & inErrorStream) { int pos = inOutPos; if( Word(inStr, inOutPos) == "addMaskToPhoto" ) { std::map<std::string, std::string> keyValues = KeyValues(inStr, inOutPos); auto imageIt = keyValues.find("image"); auto photoIt = keyValues.find("photo"); if( imageIt!=keyValues.end() && photoIt!=keyValues.end() ) { who::Photo * photo = gGame.GetPhoto(photoIt->second); photo->_maskImages.push_back(imageIt->second); photo->_maskWeights.push_back(1); return true; } inErrorStream << "error with: " << inStr << "\n"; if( imageIt == keyValues.end() ) inErrorStream << "\trequired image=<string>\n"; if( photoIt == keyValues.end() ) inErrorStream << "\trequired photo=<string>\n"; } inOutPos = pos; return false; }
bool WhoParser::AddImageFromFile(const char * inStr, int & inOutPos, std::ostream & inErrorStream) { int pos = inOutPos; if( Word(inStr, inOutPos) == "addImageFromFile" ) { std::map<std::string, std::string> keyValues = KeyValues(inStr, inOutPos); auto fileIt = keyValues.find("file"); if( fileIt!=keyValues.end() ) { ImageInfo image; GL_LoadTextureFromFile(fileIt->second.c_str(), image);//.back()); gGame.images[fileIt->second] = image; return true; } inErrorStream << "error with: " << inStr << "\n"; if( fileIt == keyValues.end() ) inErrorStream << "\trequired file=<string>\n"; } inOutPos = pos; return false; }
bool WhoParser::AddImageFromTextAndImage(const char * inStr, int & inOutPos, std::ostream & inErrorStream) { int pos = inOutPos; if( Word(inStr, inOutPos) == "addImageFromTextAndImage" ) { std::map<std::string, std::string> keyValues = KeyValues(inStr, inOutPos); auto nameIt = keyValues.find("name"); auto textIt = keyValues.find("text"); auto imageFileIt = keyValues.find("imageFile"); if( nameIt!=keyValues.end() && textIt!=keyValues.end() && imageFileIt!=keyValues.end() ) { GL_LoadTextureFromTextAndImage(textIt->second, imageFileIt->second, gGame.images[nameIt->second]); return true; } inErrorStream << "error with: " << inStr << "\n"; if( nameIt == keyValues.end() ) inErrorStream << "\trequired name=<string>\n"; if( textIt == keyValues.end() ) inErrorStream << "\trequired test=<string>\n"; if( imageFileIt == keyValues.end() ) inErrorStream << "\trequired imageFile=<string>\n"; } inOutPos = pos; return false; }
void CategoryTest::TestWordCount() { CPPUNIT_ASSERT(m_DefaultCategory.GetWordCount() == 0); m_DefaultCategory.AddWord(Word("a")); CPPUNIT_ASSERT(m_DefaultCategory.GetWordCount() == 1); m_DefaultCategory.AddWords(m_DefaultWords); CPPUNIT_ASSERT(m_DefaultCategory.GetWordCount() == m_DefaultWords.size() + 1); m_DefaultCategory.RemoveWord(Word("a")); CPPUNIT_ASSERT(m_DefaultCategory.GetWordCount() == m_DefaultWords.size()); m_DefaultCategory.Clear(); CPPUNIT_ASSERT(m_DefaultCategory.GetWordCount() == 0); }
int Verb (char *pattern, char **p) { int res = Word(pattern, p); if(res && !Match("s", p)) Match("ed", p); // eat conjugation suffix, if any return res; }
Word FPGroup::randomIdentity_Classic( int length , float conj_param ) const { if( theRelators.size( )==0 ) return Word(); int ngens = numberOfGenerators( ); int nrels = theRelators.size( ); Word result; while( result.length()<length ) { while( result.length()<length ) { // generate conjugator int conj_length = 0; for( ; RandLib::ur.rand()<conj_param ; ++conj_length ); Word conjugator = Word::randomWord( numOfGenerators , conj_length ); Word rel( theRelators[RandLib::ur.irand( 0 , theRelators.size( )-1 )] ); rel = ( RandLib::ur.irand( 0 , 1 )==0 ? rel : rel.inverse( ) ); rel.cyclicallyPermute( RandLib::ur.irand( 0 , rel.length( )-1 ) ); result *= -conjugator * rel * conjugator; } } return result; }
//! Get a word given by a vector Word getTailWord( int N , const vector< int >& T ) { Word C; for( int i=0 ; i<N ; ++i ) C *= Word(i+1).power( T[i] ); return C; }
static uint LookupAnswer(Hash const H, const uint x) { Thing wx = Word(x); Thing ans = HashGet(H, wx); //if (ans) printf("Found answer!\n"); return((ans) ? (uint)IntWord(ans) : 0); }
void Words::initialize() { beginResetModel(); words_.clear(); words_.append(Word()); endResetModel(); }
void cScanner::startScanning(){ cCode code(codeFile); cConstRecognizer Const(&code); cWordRecognizer Word(&code); std::string Token; while(!code.IsEnd()){ char ch = code.ShowCh(); if(((int)ch >= DIGIT_ASCII_LOWER_LIMIT && (int)ch <= DIGIT_ASCII_UPPER_LIMIT) || (ch == '\'') || (ch == '#')){ int Class = Const.getClass(); Token = Const.getToken(); tokensFlow->addToken(cToken(code.getStrNum(), Class, Token)); } else if(((int)ch >= LETTER_LOWERCASE_ASCII_LOWER_LIMIT && (int)ch <= LETTER_LOWERCASE_ASCII_UPPER_LIMIT) || ((int)ch >= LETTER_UPPERCASE_ASCII_LOWER_LIMIT && (int)ch <= LETTER_UPPERCASE_ASCII_UPPER_LIMIT) || (ch == '_') || (ch == '<')){ tokensFlow->addToken(Word.getToken()); } else if(ch == ' ' || ch == '\n' || ch == '\t'){ code.GiveCh(); } else if(ch == '>' || ch == '=' || ch == '/'){ code.GiveCh(); switch (ch){ case '=': { Token = "="; tokensFlow->addToken(cToken(code.getStrNum(), CLASS_OPERATION_SIGN, Token)); break; } case '>': { Token = ">"; tokensFlow->addToken(cToken(code.getStrNum(), CLASS_BRACKET, Token)); break; } } } } }
IsEltCentral::IsEltCentral(const SMWord& word) : Supervisor( ! word.getParent().gic.haveFastWordProblem() ), theWord( word ), theChecker( word.getParent() ), normalClosure( *this, word.getParent().gcm().normalClosure ), abelianInvariants( *this, word.getParent().gcm().abelianInvariants ), kbSupervisor( *this, word.getParent().gcm().kbSupervisor ), agSupervisor( *this, word.getParent().gcm().agSupervisor ), computeBasis( *this, word.getParent().gcm().computeBasis ), nilpotentQuotients( *this, word.getParent().gcm().nilpotentQuotients ), nilpotentWPInQuotients(*this), nilpotentWP( *this ), genetic( *this ) { SetOf<Word> comms; int numOfGens = word.getParent().getFPGroup().numberOfGenerators(); Word w = word.getWord(); for( int i = 0; i < numOfGens; ++i ) { Word gen = Word(Generator(i+1)); comms |= (w.inverse() * gen.inverse() * w * gen).freelyReduce(); } theChecker.replaceTheSet(comms); nilpotentWP->initialize(comms,&theWord.getParent()); nilpotentWPInQuotients->initialize(comms,&theWord.getParent()); genetic->init(theWord.getParent().getFPGroup(), comms, GeneticWPCM::COMMUTATORS); if ( !displayInFE() ) adminStart(); }
Word AAGProtocolInstance::generateHardProductOfGenerators( int num_gens , int product_length ) { if( product_length<4 ) return Word::randomWord( num_gens , product_length ); auto result = Word::randomWord( num_gens , 2 ).toList( ); result.push_back( -(* result.begin( ) ) ); result.push_back( -(*++result.begin( ) ) ); while( product_length - result.size( ) > 1 ) { int g = RandLib::ur.irand( 1 , num_gens ); g = RandLib::ur.irand( 0 , 1 )==0 ? -g : g; if( RandLib::ur.irand( 0 , 1 )==0 ) { result.insert( --result.end( ) , g ); result.push_back( -g ); } else { result.insert( --(--result.end( ) ) , g ); result.push_back( -g ); } } Word product = Word(std::move(result)); while( product.length( )<product_length ) { int g = RandLib::ur.irand( 1 , num_gens ); g = RandLib::ur.irand( 0 , 1 )==0 ? -g : g; product.push_front( g ); } return product; }