// static void PositionCalculationCollection::typeFromEnum(const char *post, e_poscalc_t *type, int *flags) { if (post[0] == 'a') { *type = POS_ATOM; *flags &= ~(POS_MASS | POS_COMPLMAX | POS_COMPLWHOLE); return; } /* Process the prefix */ const char *ptr = post; if (post[0] == 'w') { *flags &= ~POS_COMPLMAX; *flags |= POS_COMPLWHOLE; ptr = post + 6; } else if (post[0] == 'p') { *flags &= ~POS_COMPLWHOLE; *flags |= POS_COMPLMAX; ptr = post + 5; } else if (post[0] == 'd') { *flags &= ~(POS_COMPLMAX | POS_COMPLWHOLE); ptr = post + 4; } if (ptr[0] == 'r') { *type = POS_RES; } else if (ptr[0] == 'm') { *type = POS_MOL; } else { GMX_THROW(InternalError("Unknown position calculation type")); } if (strlen(ptr) < 7) { GMX_THROW(InternalError("Unknown position calculation type")); } if (ptr[6] == 'm') { *flags |= POS_MASS; } else if (ptr[6] == 'g') { *flags &= ~POS_MASS; } else { GMX_THROW(InternalError("Unknown position calculation type")); } }
static void LoadProg( char *cmd, char *cmd_tail ) { RESULTCODES res; NEWSTARTDATA start; USHORT SID; if( NewSession ) { start.Length = 50; start.Related = 1; start.FgBg = 0; start.TraceOpt = 1; start.PgmTitle = cmd; start.PgmName = cmd; start.PgmInputs = (PBYTE)cmd_tail; start.TermQ = 0; start.Environment = NULL; start.InheritOpt = 1; start.SessionType = 0; start.IconFile = NULL; start.PgmHandle = 0; start.PgmControl = 0; if( DosStartSession( (void __far *)&start, &SID, &Pid ) != 0 ) { InternalError( MsgArray[MSG_SAMPLE_3 - ERR_FIRST_MESSAGE] ); } } else { if( DosExecPgm( NULL, 0, EXEC_TRACE, cmd, NULL, &res, cmd ) != 0 ) { InternalError( MsgArray[MSG_SAMPLE_3 - ERR_FIRST_MESSAGE] ); } Pid = res.codeTerminate; } }
void EmitVar(Var * var, UInt8 format) { Bool non_keyword = true; if (var != NULL) { if (var->mode == INSTR_SRC_FILE) { EmitStr(var->name); } else if (var->mode == INSTR_ELEMENT) { if (VarIsStructElement(var)) { EmitVar(var->adr, format); EmitStr("+"); EmitVar(var->var, format); } else { InternalError("don't know how to emit array element"); } } else if (var->mode == INSTR_DEREF) { EmitVar(var->var, format); } else if (var->mode == INSTR_BYTE) { InternalError("don't know how to emit byte array element"); } else if (var->name != NULL) { // *** Module parameters (4) // When parameter name is emmited, it is prefixed with PARAM_ prefix if (VarIsParam(var)) { EmitStr("PARAM_"); } else if (var->mode == INSTR_INT && var->type != NULL && var->type->variant == TYPE_INT && var->type->owner != NULL) { EmitVarName(var->type->owner); EmitStr("__"); } else if (var->scope != NULL && var->scope != &ROOT_PROC && var->scope != CPU->SCOPE && var->scope->name != NULL && !VarIsLabel(var)) { EmitVarName(var->scope); EmitStr("__"); } else { non_keyword = true; // For variables (excluding registers), emit extra underscore at the beginning to prevent name clash with assembler built-in keywords and register names if (!VarIsReg(var)) { EmitStr("_"); } } EmitVarName(var); } else if (var->mode == INSTR_TEXT) { if (format == 1) { EmitStrConst(var->str); } else { EmitStr(var->str); } } else { ASSERT(var->mode == INSTR_INT); EmitBigInt(&var->n); } } }
string Z3Model::ToString() const { if (Ctx == nullptr || Model == nullptr) { throw InternalError((string)"ToString() called on a null Z3Model object"); } return string(Z3_model_to_string(Ctx, Model)); }
Document::Document(xmlDocPtr doc) : Node(reinterpret_cast<xmlNodePtr>(doc)) { if ( _xml == nullptr ) throw InternalError("Failed to create new document"); // ensure the right polymorphic type ptr is installed _xml->_private = this; }
void Node::InsertAfter(Node *child) { xmlNodePtr newNode = xmlAddNextSibling(xml(), child->xml()); if ( newNode == nullptr ) throw InternalError("Unable to add child node", xmlGetLastError()); child->rebind(newNode); }
// // newTypeIndex defaulted to 0 to indicate that a new type is to be enter. // void TypeIndexMap::Insert( const type_index oldTypeIndex, const type_index newTypeIndex ) /********************************************************/ { try { if ( newTypeIndex != 0 ) { _mappingTable[oldTypeIndex].globalIndex = newTypeIndex; _mappingTable[oldTypeIndex].isNewType = FALSE; } else { // check if running out of types. if ( _currentGlobalIndex >= 0xffff ) { throw MiscError("fatal : running out of type indices."); } _mappingTable[oldTypeIndex].globalIndex = _currentGlobalIndex++; _mappingTable[oldTypeIndex].isNewType = TRUE; } // testing code. _mappingTable[oldTypeIndex].isDone = TRUE; } catch (...) { cerr << "index : " << oldTypeIndex << endl; throw InternalError("packtype.cpp : TypeIndexMap::Lookup() index range failed."); } }
//------------------------------------------------------------------------------------------------- LuaWorker::LuaWorker(IConsole *pConsole) :QObject(NULL) ,IScriptEngine() ,ISyncContext() ,m_pConsole(pConsole) ,m_mtxTasks() ,m_luaState(NULL) ,m_pSysVar(NULL) ,m_vLuaTables() { ILuaTable::setSyncContext(this); // Hier keine dynamische allokation, da diese im Hauptthread geschehen würde! if (m_pConsole==NULL) throw InternalError(tr("Can't create Lua worker with null console pointer")); m_vLuaTables.push_back(new LuaTabWindow()); m_vLuaTables.push_back(new LuaTabSys()); m_vLuaTables.push_back(new LuaTabMessageBox()); m_vLuaTables.push_back(new LuaTabCanvas()); init(); initTables(); splashScreen(); }
EnergyFrame EnergyFrameReader::frame() { EnergyFrame energyFrame; if (!haveProbedForNextFrame_) { readNextFrame(); } if (!nextFrameExists_) { GMX_THROW(APIError("There is no next frame, so there should have been no attempt to use the data, e.g. by reacting to a call to readNextFrame().")); } // The probe filled enxframe_ with new data, so now we use that data to fill energyFrame t_enxframe *enxframe = enxframeGuard_.get(); energyFrame.time_ = enxframe->t; energyFrame.step_ = enxframe->step; for (auto &index : indicesOfEnergyFields_) { if (index.second >= enxframe->nre) { GMX_THROW(InternalError(formatString("Index %d for energy %s not present in energy frame with %d energies", index.second, index.first.c_str(), enxframe->nre))); } energyFrame.values_[index.first] = enxframe->ener[index.second].e; } // Prepare for reading future frames haveProbedForNextFrame_ = false; nextFrameExists_ = false; return energyFrame; }
// // GetLevelName // const char *GetLevelName(void) { if (!wad.current_level) InternalError("GetLevelName: no current level"); return wad.current_level->name; }
// // SeparateSegs // void SeparateSegs(superblock_t *seg_list, seg_t *part, superblock_t *lefts, superblock_t *rights, intersection_t ** cut_list) { int num; while (seg_list->segs) { seg_t *cur = seg_list->segs; seg_list->segs = cur->next; cur->block = NULL; DivideOneSeg(cur, part, lefts, rights, cut_list); } // recursively handle sub-blocks for (num=0; num < 2; num++) { superblock_t *A = seg_list->subs[num]; if (A) { SeparateSegs(A, part, lefts, rights, cut_list); if (A->real_num + A->mini_num > 0) InternalError("SeparateSegs: child %d not empty !", num); FreeSuper(A); seg_list->subs[num] = NULL; } } seg_list->real_num = seg_list->mini_num = 0; }
/* * Provides a factory for the standard logical time types HLAfloat64Time * and HLAinteger64Time. The RTI reference time library's LogicalTimeFactoryFactory * should just forward requests to here. */ std::auto_ptr<LogicalTimeFactory> HLAlogicalTimeFactoryFactory::makeLogicalTimeFactory( const std::wstring& implementationName ) { if( implementationName.compare(L"HLAfloat64TimeFactory") == 0 ) { return auto_ptr<LogicalTimeFactory>( new HLAfloat64TimeFactory() ); } else if( implementationName.compare(L"HLAinteger64TimeFactory") == 0 ) { return auto_ptr<LogicalTimeFactory>( new HLAinteger64TimeFactory() ); } else if( implementationName.compare(L"HLAfloat64Time") == 0 ) { return auto_ptr<LogicalTimeFactory>( new HLAfloat64TimeFactory() ); } else if( implementationName.compare(L"HLAinteger64Time") == 0 ) { return auto_ptr<LogicalTimeFactory>( new HLAinteger64TimeFactory() ); } else { wstringstream wss; wss << "Unknown time implementation type [" << implementationName << "]: Must be HLAfloat64TimeFactory or HLAinteger64TimeFactory"; throw InternalError( wss.str() ); } }
SelectionCollection::Impl::Impl(gmx_ana_poscalc_coll_t *pcc) : _options("selection", "Common selection control"), _debugLevel(0), _grps(NULL) { _sc.root = NULL; _sc.nvars = 0; _sc.varstrs = NULL; _sc.top = NULL; gmx_ana_index_clear(&_sc.gall); _sc.pcc = pcc; _sc.mempool = NULL; _sc.symtab = NULL; // TODO: This is not exception-safe if any called function throws. if (_sc.pcc == NULL) { int rc = gmx_ana_poscalc_coll_create(&_sc.pcc); if (rc != 0) { // TODO: A more reasonable error GMX_THROW(InternalError("Failed to create position calculation collection")); } _flags.set(Impl::efOwnPositionCollection); } _gmx_sel_symtab_create(&_sc.symtab); gmx_ana_selmethod_register_defaults(_sc.symtab); }
void HelpWriterContext::Impl::processMarkup(const std::string &text, WrapperInterface *wrapper) const { std::string result(text); for (ReplaceList::const_iterator i = replacements_.begin(); i != replacements_.end(); ++i) { result = replaceAll(result, i->search, i->replace); } switch (state_->format_) { case eHelpOutputFormat_Console: { result = repall(result, sandrTty); result = replaceLinks(result); return wrapper->wrap(result); } case eHelpOutputFormat_Man: { // Needs to be done first to avoid '-' -> '\-' messing up the links. result = replaceLinks(result); result = repall(result, sandrMan); return wrapper->wrap(result); } case eHelpOutputFormat_Html: { result = repall(result, sandrHtml); result = replaceLinks(result); return wrapper->wrap(result); } default: GMX_THROW(InternalError("Invalid help output format")); } }
GrammarFunc::GrammarFunc(const Grammar* TheGrammar, const FuncOperatorBase* FuncOp, const vector<GrammarNode*>& Children) : GrammarNode(TheGrammar, FuncOp->GetEvalType()), Op(FuncOp), Children(Children) { // Check that all children belong to the same grammar for (auto const& Child : Children) { if (Child->GetGrammar() != TheGrammar) { throw InternalError((string)"Error: Attempted to combine nodes from different grammars.\n" + "At " + __FILE__ + ":" + to_string(__LINE__)); } } // Check that the types match auto FuncType = FuncOp->GetFuncType(); const uint32 Arity = FuncOp->GetArity(); if (Children.size() != Arity) { throw TypeException((string)"Operator \"" + FuncOp->GetName() + "\" expects " + to_string(Arity) + " operands, but received " + to_string(Children.size()) + " operands."); } for (uint32 i = 0; i < Arity; ++i) { if (Children[i]->GetType() != FuncType->GetDomainTypes()[i]) { throw TypeException((string)"Type mismatch in arguments to operator \"" + FuncOp->GetName() + "\""); } } }
Node<DeclarationNode>::Link DeclarationParser::declaration(bool throwIfEmpty) { if (accept(TT::SEMI)) { if (throwIfEmpty) throw InternalError("Empty declaration", {METADATA_PAIRS}); else return nullptr; } if (accept(TT::DEFINE)) { skip(); // Dynamic variable declaration expect(TT::IDENTIFIER, "Unexpected token after define keyword"); return declarationFromTypes({}); } else if (accept(TT::IDENTIFIER)) { auto ident = current().data; skip(); // Single-type declaration if (accept(TT::IDENTIFIER)) { return declarationFromTypes({ident}); } // Multi-type declaration if (accept(",")) { TypeList types = {ident}; do { skip(); expect(TT::IDENTIFIER, "Expected identifier in type list"); types.insert(current().data); skip(); } while (accept(",")); expect(TT::IDENTIFIER); return declarationFromTypes(types); } } throw Error("SyntaxError", "Invalid declaration", current().trace); }
void RuleSetAddRule(RuleSet * ruleset, Rule * rule) /* Purpose: Add the rule to the rule set. The rules in the ruleset are sorted by specificity (the most specific rule is at the start of the list). The rules are hashed by operation. */ { InstrOp op = rule->op; Rule * prev_r, * r; if (!rule->to->first) { InternalError("Empty rule"); return; } prev_r = NULL; r = ruleset->rules[op]; while(r != NULL && !RuleIsMoreSpecific(rule, r)) { prev_r = r; r = r->next; } rule->next = r; if (prev_r == NULL) { ruleset->rules[op] = rule; } else { prev_r->next = rule; } }
int PGBuildLR1::MAKE_KERNEL (int camefrom, int symb) { int i, la, c, first_item; // prt_log ("\nFROM STATE %d MAKING STATE %d\n\n", camefrom, n_states); hash_no = 0; first_item = -1; f_lrkernel[n_states] = n_lrkernels; for (c = 0; c < N_clo[camefrom]; c++) // For all closure items. { i = Closure[camefrom][c].item; // Get item #. if (item[i].symb == symb) // If head symbol match? { if (first_item == -1) first_item = i; la = Closure[camefrom][c].LA; // Get old LA. hash_no += N_terms*(i+1) + la; // Add both to hash number. lrkernel [n_lrkernels].item = i+1; // Make new LR1 item. lrkernel [n_lrkernels].LA = la; // Keep same look ahead. // prt_item ("Making LR1 kernel ", i+1, la); if (++n_lrkernels >= max_lrkernels) MemCrash ("Number of LR(1) kernels", max_lrkernels); } } l_lrkernel[n_states] = n_lrkernels; if (n_lrkernels == f_lrkernel[n_states]) InternalError (255); // PRT_LRSTA (n_states); return first_item; }
Operator Token::op() const { if (type != TT::OPERATOR) throw InternalError("Only available on operators", { METADATA_PAIRS, {"token", this->toString()} }); return Operator::list[idx]; }
void PruneSectors(void) { int i; int new_num; DisplayTicker(); // scan all sectors for (i=0, new_num=0; i < num_sectors; i++) { sector_t *S = lev_sectors[i]; if (S->ref_count < 0) InternalError("Sector %d ref_count is %d", i, S->ref_count); if (S->ref_count == 0) { UtilFree(S); continue; } S->index = new_num; lev_sectors[new_num++] = S; } if (new_num < num_sectors) { PrintVerbose("Pruned %d unused sectors\n", num_sectors - new_num); num_sectors = new_num; } if (new_num == 0) FatalError("Couldn't find any Sectors"); }
void Node::AddChild(std::shared_ptr<Node> child) { xmlNodePtr newNode = xmlAddChild(_xml, child->xml()); if ( newNode == nullptr ) throw InternalError("Unable to add child node"); child->rebind(newNode); }
void Node::InsertBefore(std::shared_ptr<Node> child) { xmlNodePtr newNode = xmlAddPrevSibling(xml(), child->xml()); if ( newNode == nullptr ) throw InternalError("Unable to add child node", xmlGetLastError()); child->rebind(newNode); }
void UserLetExpression::Evaluate(ExpSubstMap SubstExps, VariableMap VarMap, ConcreteValueBase* Result) const { throw InternalError((string)"Internal Error: UserLetExpression::Evaluate() must never have been called.\n" + "At: " + __FILE__ + ":" + to_string(__LINE__)); }
//----------------------------------------------------------------------- int XmlConvert(InStm *pin, const strvector &css, const strvector &fonts, const strvector &mfonts, XlitConv *xlitConv, OutPackStm *pout) { // perform pass 1 to determine fb2 document structure and to collect all cross-references inside the fb2 file UnitArray units; // The input file name is pin->UIFileName(); XMLDocument doc; doc.LoadFile(pin->UIFileName().c_str()); XMLHandle hDoc(&doc); XMLHandle fb = hDoc.FirstChildElement("FictionBook"); XMLHandle desc = fb.FirstChildElement("description"); XMLHandle titleInfo = desc.FirstChildElement("title-info"); XMLHandle genre = titleInfo.FirstChildElement("genre"); XMLHandle genreInfo = genre.FirstChild(); const char* txt = genreInfo.ToNode()->Value(); // "Ciencia-Ficción" // Now build from the above the damn epub! // Go directly to DoConvertionPass2 and substitute XML calls to make epub. // CONVERTION PASS 1 (DETERMINE DOCUMENT STRUCTURE AND COLLECT ALL CROSS-REFERENCES INSIDE THE FB2 FILE) Ptr<ConverterPass1> conv = new ConverterPass1(&units); conv->XmlScan(hDoc); //DoConvertionPass1(CreateScanner(pin), &units); //pin->Rewind(); // sanity check if (units.size() == 0) InternalError(__FILE__, __LINE__, "I don't know why but it happened that there is no content in input file!"); // perform pass 2 to create epub document //XmlConversionPass2(hDoc, css, fonts, mfonts, xlitConv, &units, pout); //DoConvertionPass2(CreateScanner(pin), css, fonts, mfonts, xlitConv, &units, pout); return 0; }
static void lAddToTree( CgContext *cg, Symbol **fSymbols, Symbol *fSymb, Type *fType) { Symbol *lSymb = *fSymbols; if (lSymb) { Atom f = fSymb->name; while (lSymb) { Atom l = lSymb->name; if (l == f) { InternalError( cg, cg->tokenLoc, 9999, "symbol \"%s\" already in table", cg->GetString(fSymb->name)); break; } else { if (f < l) { if (lSymb->left) { lSymb = lSymb->left; } else { lSymb->left = fSymb; break; } } else { if (lSymb->right) { lSymb = lSymb->right; } else { lSymb->right = fSymb; break; } } } } } else { *fSymbols = fSymb; } } // lAddToTree
ModelPrivate::ModelPrivate(std::shared_ptr<Storage> storage) : _storage(storage) { /* Default storage type, which is memory */ _model = librdf_new_model(World().get(), storage->get(), NULL); if (!_model) { throw InternalError("Failed to create RDF model"); } }
void export_set(Set& set, const String& filename, size_t format_index, bool is_copy) { FileFormatP format = file_formats.at(format_index); if (!format->canExport(*set.game)) { throw InternalError(_("File format doesn't apply to set")); } format->exportSet(set, filename, is_copy); }
void Angle::optionsFinished(Options *options, TrajectoryAnalysisSettings *settings) { bool bSingle = (g1type_[0] == 'a' || g1type_[0] == 'd'); if (bSingle && g2type_[0] != 'n') { GMX_THROW(InconsistentInputError("Cannot use a second group (-g2) with " "-g1 angle or dihedral")); } if (bSingle && options->isSet("group2")) { GMX_THROW(InconsistentInputError("Cannot provide a second selection " "(-group2) with -g1 angle or dihedral")); } if (!bSingle && g2type_[0] == 'n') { GMX_THROW(InconsistentInputError("Should specify a second group (-g2) " "if the first group is not an angle or a dihedral")); } // Set up the number of positions per angle. switch (g1type_[0]) { case 'a': natoms1_ = 3; break; case 'd': natoms1_ = 4; break; case 'v': natoms1_ = 2; break; case 'p': natoms1_ = 3; break; default: GMX_THROW(InternalError("invalid -g1 value")); } switch (g2type_[0]) { case 'n': natoms2_ = 0; break; case 'v': natoms2_ = 2; break; case 'p': natoms2_ = 3; break; case 't': natoms2_ = 0; break; case 'z': natoms2_ = 0; break; case 's': natoms2_ = 1; break; default: GMX_THROW(InternalError("invalid -g2 value")); } if (natoms2_ == 0 && options->isSet("group2")) { GMX_THROW(InconsistentInputError("Cannot provide a second selection (-group2) with -g2 t0 or z")); } }
// // ReadWadFile // glbsp_ret_e ReadWadFile(const char *filename) { int check; char *read_msg; // open input wad file & read header in_file = fopen(filename, "rb"); if (! in_file) { if (errno == ENOENT) SetErrorMsg("Cannot open WAD file: %s", filename); else SetErrorMsg("Cannot open WAD file: %s [%s]", filename, strerror(errno)); return GLBSP_E_ReadError; } if (! ReadHeader(filename)) { fclose(in_file); return GLBSP_E_ReadError; } PrintMsg("Opened %cWAD file : %s\n", (wad.kind == IWAD) ? 'I' : 'P', filename); PrintVerbose("Reading %d dir entries at 0x%X\n", wad.num_entries, wad.dir_start); // read directory ReadDirectory(); DisplayOpen(DIS_FILEPROGRESS); DisplaySetTitle("glBSP Reading Wad"); read_msg = UtilFormat("Reading: %s", filename); DisplaySetBarText(1, read_msg); DisplaySetBarLimit(1, CountLumpTypes(LUMP_READ_ME, LUMP_READ_ME)); DisplaySetBar(1, 0); UtilFree(read_msg); cur_comms->file_pos = 0; // now read lumps check = ReadAllLumps(); if (check != wad.num_entries) InternalError("Read directory count consistency failure (%d,%d)", check, wad.num_entries); wad.current_level = NULL; DisplayClose(); return GLBSP_E_OK; }
const Token *Token::linkAt(int index) const { const Token *tok = this->tokAt(index); if (!tok) { throw InternalError(this, "Internal error. Token::linkAt called with index outside the tokens range."); } return tok->link(); }