void ArchDesc::gen_dfa_state_body(FILE* fp, Dict &minimize, ProductionState &status, Dict &operands_chained_from, int i) { // Start the body of each Op_XXX sub-dfa with a clean state. status.initialize(); // Walk the list, compacting it MatchList* mList = _mlistab[i]; do { // Hash each entry using inputs as key and pointer as data. // If there is already an entry, keep the one with lower cost, and // remove the other one from the list. prune_matchlist(minimize, *mList); // Iterate mList = mList->get_next(); } while(mList != NULL); // Hoist previously specified common sub-expressions out of predicates dfa_shared_preds::reset_found(); dfa_shared_preds::cse_matchlist(_mlistab[i]); dfa_shared_preds::generate_cse(fp); mList = _mlistab[i]; // Walk the list again, generating code do { // Each match can generate its own chains operands_chained_from.Clear(); gen_match(fp, *mList, status, operands_chained_from); mList = mList->get_next(); } while(mList != NULL); // Fill in any chain rules which add instructions // These can generate their own chains as well. operands_chained_from.Clear(); // if( debug_output1 ) { fprintf(fp, "// top level chain rules for: %s \n", (char *)NodeClassNames[i]); // %%%%% Explanation } const Expr *zeroCost = new Expr("0"); chain_rule(fp, " ", (char *)NodeClassNames[i], zeroCost, "Invalid", operands_chained_from, status); }
/* ================ DeclParser::Parse ================ */ void DeclParser::Parse( Lexer &lexer ) { Dict *resultDict = OG_NULL; bool getKeyValue = false; int index; const Token *token; String key, value; const char *p; while ( (token = lexer.ReadToken()) != OG_NULL ) { p = token->GetString(); if ( p ) { if ( *p == '\0' ) lexer.Error("Unexpected Empty Token"); if ( !getKeyValue ) { index = declTypes.Find(p); if ( index != -1 ) { value = lexer.ReadString(); if ( value.IsEmpty() ) lexer.Error("Empty name!"); DictEx<Dict> &result = declTypes[index]->declList; index = result.Find(p); if ( index == -1 ) resultDict = &result[p]; else { resultDict = &result[index]; resultDict->Clear(); } } else { resultDict = OG_NULL; lexer.Warning( Format("Unknown decl Type '$*'") << p ); } lexer.ExpectToken("{"); getKeyValue = true; } else { if ( *p == '}' ) getKeyValue = false; else if ( resultDict ) { key = p; (*resultDict).Set( key.c_str(), lexer.ReadString() ); } } } } if ( getKeyValue ) throw LexerError( LexerError::END_OF_FILE ); }
/* ================ DeclParser::MakeBinary ================ */ bool DeclParser::MakeBinary( const char *filename ) { if ( commonFS == OG_NULL ) return false; Lexer lexer; if ( !lexer.LoadFile( filename ) ) return false; File *f = commonFS->OpenWrite( Format( "$*.bin" ) << filename ); if ( !f ) return false; try { f->Write( DECL_DESCRIPTOR_STR, DECL_DESCRIPTOR_LENGTH ); Dict dict; bool getKeyValue = false; const Token *token; String key, value; String str; const char *p; while ( (token = lexer.ReadToken()) != OG_NULL ) { //! @todo maybe token should be stored as String, so we don't have to recalc len/bytelen p = token->GetString(); if ( p ) { if ( *p == '\0' ) { lexer.Error("Unexpected Empty Token"); return false; } if ( !getKeyValue ) { str = p; str.WriteToFile( f ); value = lexer.ReadString(); if ( value.IsEmpty() ) { lexer.Error("Empty name!"); return false; } value.WriteToFile( f ); lexer.ExpectToken("{"); getKeyValue = true; } else { if ( *p == '}' ) { getKeyValue = false; dict.WriteToFile( f ); dict.Clear(); } else { key = p; dict.Set( key.c_str(), lexer.ReadString() ); } } } } if ( getKeyValue ) { lexer.Error("Unexpected End Of File"); return false; } return true; } catch( FileReadWriteError &err ) { f->Close(); User::Error( ERR_FILE_WRITEFAIL, Format("Binary Decl: $*.bin" ) << err.ToString(), filename ); return false; } catch( LexerError &err ) { f->Close(); String errStr; err.ToString( errStr ); User::Error( ERR_LEXER_FAILURE, errStr.c_str(), filename ); return false; } }