bool SxeEdit::operator<(const SxeEdit &other) const { // Can't compare edits to different records if(rid() != other.rid()) { qDebug(QString("Comparing SxeEdits to %1 an %2.").arg(rid()).arg(other.rid()).toAscii()); return false; } if(type() == other.type()) { // Only Record edits can be unequal with other edits of the same type if(type() == SxeEdit::Record) { const SxeRecordEdit* thisp = dynamic_cast<const SxeRecordEdit*>(this); const SxeRecordEdit* otherp = dynamic_cast<const SxeRecordEdit*>(&other); return (thisp->version() < otherp->version()); } return false; } else { // New < Record, Record < Remove if(type() == SxeEdit::New) return true; if(other.type() == SxeEdit::Remove) return true; return false; } }
std::wstring Hyperlink::GetPathFromId(OOX::IFileContainer* pRels, const std::wstring & rId)const { if (rId.empty()) return L""; OOX::RId rid(rId); std::wstring sLink = L""; if (pRels != NULL) { smart_ptr<OOX::HyperLink> p = pRels->Get<OOX::HyperLink>(rid); if (p.is_init()) sLink = p->Uri().m_strFilename; } if(sLink.empty()) { if(parentFileIs<FileContainer>()) sLink = parentFileAs<FileContainer>().GetLinkFromRId(rid); } XmlUtils::replace_all(sLink, L"\\", L"/"); XmlUtils::replace_all(sLink, L"//", L"/"); XmlUtils::replace_all(sLink, L"http:/", L"http://"); XmlUtils::replace_all(sLink, L"https:/",L"https://"); XmlUtils::replace_all(sLink, L"ftp:/", L"ftp://"); XmlUtils::replace_all(sLink, L"file:/", L"file://"); return sLink; }
bool ridc( FILE *fp,uchar correctID ) { uchar id=rid(fp); if(id!=correctID) error("Expected id %i, read %i\n",correctID,id); return id==correctID; }
StringData ObjectWrapper::Key::toStringData(JSContext* cx, JSStringWrapper* jsstr) { if (_type == Type::Field) { return _field; } if (_type == Type::Index) { *jsstr = JSStringWrapper(_idx); return jsstr->toStringData(); } JS::RootedId rid(cx); if (_type == Type::Id) { rid.set(_id); } else { InternedStringId id(cx, _internedString); rid.set(id); } if (JSID_IS_INT(rid)) { *jsstr = JSStringWrapper(JSID_TO_INT(rid)); return jsstr->toStringData(); } if (JSID_IS_STRING(rid)) { *jsstr = JSStringWrapper(cx, JSID_TO_STRING(rid)); return jsstr->toStringData(); } uasserted(ErrorCodes::BadValue, "Couldn't convert key to String"); }
CString Hyperlink::GetFullHyperlinkName()const { if(id.IsInit() && *id != _T("")) { OOX::RId rid(*id); CString sLink = _T(""); if(parentFileIs<Slide>()) sLink = parentFileAs<Slide>().GetFullHyperlinkNameFromRId(rid); else if(parentFileIs<SlideLayout>()) sLink = parentFileAs<SlideLayout>().GetFullHyperlinkNameFromRId(rid); else if(parentFileIs<SlideMaster>()) sLink = parentFileAs<SlideMaster>().GetFullHyperlinkNameFromRId(rid); else if(parentFileIs<Theme>()) sLink = parentFileAs<Theme>().GetFullHyperlinkNameFromRId(rid); sLink.Replace(TCHAR('\\'), TCHAR('/')); sLink.Replace(_T("//"), _T("/")); sLink.Replace(_T("http:/"), _T("http://")); sLink.Replace(_T("https:/"), _T("https://")); sLink.Replace(_T("ftp:/"), _T("ftp://")); sLink.Replace(_T("file:/"), _T("file://")); return sLink; } return _T(""); }
void MeterSML::_parse(sml_list *entry, Reading *rd) { //int unit = (entry->unit) ? *entry->unit : 0; int scaler = (entry->scaler) ? *entry->scaler : 1; rd->value(sml_value_to_double(entry->value) * pow(10, scaler)); Obis obis((unsigned char)entry->obj_name->str[0], (unsigned char)entry->obj_name->str[1], (unsigned char)entry->obj_name->str[2], (unsigned char)entry->obj_name->str[3], (unsigned char)entry->obj_name->str[4], (unsigned char)entry->obj_name->str[5]); ReadingIdentifier *rid(new ObisIdentifier(obis)); rd->identifier(rid); // TODO handle SML_TIME_SEC_INDEX or time by SML File/Message struct timeval tv; if (entry->val_time) { /* use time from meter */ tv.tv_sec = *entry->val_time->data.timestamp; tv.tv_usec = 0; } else { gettimeofday(&tv, NULL); /* use local time */ } rd->time(tv); }
void RepeatingBulkRound::ProcessDataBase(const Id &from, const QByteArray &data) { QByteArray payload; if(!Verify(from, data, payload)) { throw QRunTimeError("Invalid signature or data"); } if(_state == Offline) { throw QRunTimeError("Should never receive a message in the bulk" " round while offline."); } QDataStream stream(payload); int mtype; QByteArray round_id; uint phase; stream >> mtype >> round_id >> phase; MessageType msg_type = (MessageType) mtype; Id rid(round_id); if(rid != GetRoundId()) { throw QRunTimeError("Not this round: " + rid.ToString() + " " + GetRoundId().ToString()); } if(_state == Shuffling) { _log.Pop(); _offline_log.Append(data, from); return; } if(_phase != phase) { if(_phase == phase - 1 && _state == DataSharing) { _log.Pop(); _offline_log.Append(data, from); return; } else { throw QRunTimeError("Received a message for phase: " + QString::number(phase) + ", while in phase: " + QString::number(_phase)); } } else if(_state == PhasePreparation) { _log.Pop(); _offline_log.Append(data, from); return; } switch(msg_type) { case BulkData: HandleBulkData(stream, from); break; default: throw QRunTimeError("Unknown message type"); } }
void SxeRecord::apply(QDomDocument &doc, SxeEdit* edit, bool importing) { if(edit->rid() == rid()) { if((edit->type() == SxeEdit::New && applySxeNewEdit(doc, dynamic_cast<const SxeNewEdit*>(edit))) || (edit->type() == SxeEdit::Remove && applySxeRemoveEdit(dynamic_cast<const SxeRemoveEdit*>(edit))) || (edit->type() == SxeEdit::Record && applySxeRecordEdit(dynamic_cast<const SxeRecordEdit*>(edit), importing))) { edits_ += edit; } } else { qDebug(QString("Tried to apply an edit meant for %1 to %2.").arg(edit->rid()).arg(rid()).toAscii()); } }
void SxeRecord::apply(QDomDocument &doc, SxeEdit* edit) { if(edit->rid() == rid()) { if(edit->type() == SxeEdit::New) applySxeNewEdit(doc, dynamic_cast<SxeNewEdit*>(edit)); else if (edit->type() == SxeEdit::Remove) applySxeRemoveEdit(dynamic_cast<SxeRemoveEdit*>(edit)); else if (edit->type() == SxeEdit::Record) applySxeRecordEdit(dynamic_cast<SxeRecordEdit*>(edit)); } else { qDebug("Tried to apply an edit meant for %s to %s.", qPrintable(edit->rid()), qPrintable(rid())); } }
bool SxeEdit::overridenBy(const SxeEdit &e) const { if(e.rid() == rid()) { if(e.type() == SxeEdit::Remove) return true; else if(type() == SxeEdit::Record && e.type() == SxeEdit::Record) { const SxeRecordEdit* ep = dynamic_cast<const SxeRecordEdit*>(&e); const SxeRecordEdit* tp = dynamic_cast<const SxeRecordEdit*>(this); return (ep->version() <= tp->version()); } } return false; }
void ribo_profile::sailfish_parser(const transcript_info& tinfo, const char* sf_fname, double abundance_cutoff) { rid_t pid(0); ifstream ifile(sf_fname); while(ifile.peek() == '#'){ string line; getline(ifile,line); } string word, tid; int i(0); double tpm(0), total_abundance(0); while(ifile >> word) { ++i; // 1st column: transcript ID if (i==1) { tid = word; // // parse Gencode transcript ID to get the actural ID // size_t id(word.find('|')); // if (id!= word.npos) // tid = word.substr(0,id); // else // tid = word; } // 3rd column: transcript abundance (tpm: transcript per million) else if (i==3) { try { tpm = std::stod(word); } catch (const std::out_of_range& oor) { tpm = 0; } ifile.ignore(numeric_limits<streamsize>::max(), '\n'); i = 0; if (tpm <= abundance_cutoff) continue; rid_t rid(tinfo.get_refID(tid)); if (rid == tinfo.total_count()) continue; int tlen(tinfo.tlen(rid)); // denominator for computing relative transcript abundance per nucleotide total_abundance += tpm * tlen ; // initialize profile list vector<double> count(tlen,0); profile.emplace_back(tprofile{0, count, tpm}); refID2pID[rid] = pid++; include_abundant_transcript(rid); } } ifile.close(); // normalize abundance for (size_t t = 0; t!=profile.size(); ++t) profile[t].tot_abundance /= total_abundance; }
RC QL_ProjNode::GetNext(RM_Record &rec) { RM_Record record; if (prevNode.GetNext(record) == QL_EOF) { return QL_EOF; } int i; int base = 0; char *prevRecord = record.GetContent(); //投影操作 for(i = 0; i < nAttrInfos; i++) { memcpy(buffer + base, prevRecord + offsetInPrev[i], attrInfos[i].attrLength); base += attrInfos[i].attrLength; } RID rid(-1, -1); rec = RM_Record(buffer, rid, tupleLength); return 0; }
void BulkRound::ProcessDataBase(const Id &from, const QByteArray &data) { QByteArray payload; if(!Verify(from, data, payload)) { throw QRunTimeError("Invalid signature or data"); } if(_state == Offline) { throw QRunTimeError("Should never receive a message in the bulk" " round while offline."); } QDataStream stream(payload); int mtype; QByteArray round_id; stream >> mtype >> round_id; MessageType msg_type = (MessageType) mtype; Id rid(round_id); if(rid != GetRoundId()) { throw QRunTimeError("Not this round: " + rid.ToString() + " " + GetRoundId().ToString()); } if(_state == Shuffling) { _log.Pop(); _offline_log.Append(data, from); return; } switch(msg_type) { case BulkData: HandleBulkData(stream, from); break; case LoggedBulkData: HandleLoggedBulkData(stream, from); break; case AggregatedBulkData: HandleAggregatedBulkData(stream, from); break; default: throw QRunTimeError("Unknown message type"); } }
void ribo_profile::express_parser(const transcript_info& tinfo, const char* ep_fname, double abundance_cutoff) { rid_t pid(0); ifstream ifile(ep_fname); // first line is header description, disgarded. ifile.ignore(numeric_limits<streamsize>::max(), '\n'); string word, tid; int i(0); double fpkm(0), total_abundance(0); while (ifile >> word) { ++i; // 2nd column: transcript ID if (i==2) { tid = word; // // parse Gencode transcript ID to get the actural ID // size_t id(word.find('|')); // if (id!= word.npos) // tid = word.substr(0,id); // else // tid = word; } // 11th column: transcript abundance else if (i==11) { try { fpkm = std::stod(word); } catch (const std::out_of_range& oor) { fpkm = 0; } ifile.ignore(numeric_limits<streamsize>::max(), '\n'); i = 0; if (fpkm <= abundance_cutoff) continue; rid_t rid(tinfo.get_refID(tid)); if (rid == tinfo.total_count()) continue; int tlen(tinfo.tlen(rid)); total_abundance += fpkm; // initialize profile list vector<double> count(tlen,0); profile.emplace_back(tprofile{0, count, fpkm}); refID2pID[rid] = pid++; include_abundant_transcript(rid); } } ifile.close(); // normalize abundance for (size_t t = 0; t!=profile.size(); ++t) profile[t].tot_abundance /= total_abundance; }
ObjectWrapper::WriteFieldRecursionFrame::WriteFieldRecursionFrame(JSContext* cx, JSObject* obj, BSONObjBuilder* parent, StringData sd) : thisv(cx, obj), ids(cx, JS::IdVector(cx)) { bool isArray = false; if (parent) { if (!JS_IsArrayObject(cx, thisv, &isArray)) { throwCurrentJSException( cx, ErrorCodes::JSInterpreterFailure, "Failure to check object is an array"); } subbob.emplace(isArray ? parent->subarrayStart(sd) : parent->subobjStart(sd)); } if (isArray) { uint32_t length; if (!JS_GetArrayLength(cx, thisv, &length)) { throwCurrentJSException( cx, ErrorCodes::JSInterpreterFailure, "Failure to get array length"); } if (!ids.reserve(length)) { throwCurrentJSException( cx, ErrorCodes::JSInterpreterFailure, "Failure to reserve array"); } JS::RootedId rid(cx); for (uint32_t i = 0; i < length; i++) { rid.set(INT_TO_JSID(i)); ids.infallibleAppend(rid); } } else { if (!JS_Enumerate(cx, thisv, &ids)) { throwCurrentJSException( cx, ErrorCodes::JSInterpreterFailure, "Failure to enumerate object"); } } if (getScope(cx)->getProto<BSONInfo>().instanceOf(thisv)) { std::tie(originalBSON, altered) = BSONInfo::originalBSON(cx, thisv); } }
void RestraintCache::load_cache(const kernel::ParticlesTemp &particle_ordering, RMF::HDF5::ConstGroup group) { ParticleIndex particle_index = get_particle_index(particle_ordering); base::map<RestraintID, kernel::Restraint *> index; for (KnownRestraints::const_iterator it = known_restraints_.begin(); it != known_restraints_.end(); ++it) { index[get_restraint_id(particle_index, it->second, restraint_index_.find(it->first)->second)] = it->first; } kernel::RestraintsTemp restraints; for (unsigned int i = 0; i < group.get_number_of_children(); ++i) { RMF::HDF5::ConstGroup ch = group.get_child_group(i); int restraint_index = ch.get_attribute<RMF::HDF5::IndexTraits>("restraint")[0]; RMF::HDF5::Indexes particle_indexes = ch.get_attribute<RMF::HDF5::IndexTraits>("particles"); RestraintID rid(restraint_index, base::ConstVector<unsigned int>(Ints( particle_indexes.begin(), particle_indexes.end()))); kernel::Restraint *r = index.find(rid)->second; restraints.push_back(r); IMP_LOG_TERSE("Matching " << Showable(r) << " with " << ch.get_name() << std::endl); } Orders orders = get_orders(known_restraints_, restraints, particle_ordering); for (unsigned int i = 0; i < group.get_number_of_children(); ++i) { RMF::HDF5::ConstGroup ch = group.get_child_group(i); RMF::HDF5::FloatConstDataSet1D scores = ch.get_child_float_data_set_1d("scores"); RMF::HDF5::IntConstDataSet2D assignments = ch.get_child_int_data_set_2d("assignments"); for (unsigned int j = 0; j < scores.get_size()[0]; ++j) { double s = scores.get_value(RMF::HDF5::DataSetIndex1D(j)); RMF::HDF5::Ints rw = assignments.get_row(RMF::HDF5::DataSetIndex1D(j)); Ints psit(rw.begin(), rw.end()); Assignment ass = orders[i].get_subset_ordered(psit); cache_.insert(Key(restraints[i], ass), s); } } validate(); }
bool RegisterRawInput(HWND hwnd, bool enable) { nsTArray<RAWINPUTDEVICE> rid(ArrayLength(kUsagePages)); rid.SetLength(ArrayLength(kUsagePages)); for (unsigned i = 0; i < rid.Length(); i++) { rid[i].usUsagePage = kUsagePages[i].usagePage; rid[i].usUsage = kUsagePages[i].usage; rid[i].dwFlags = enable ? RIDEV_EXINPUTSINK | RIDEV_DEVNOTIFY : RIDEV_REMOVE; rid[i].hwndTarget = hwnd; } if (!RegisterRawInputDevices(rid.Elements(), rid.Length(), sizeof(RAWINPUTDEVICE))) { return false; } return true; }
rule_matcher::rule_matcher(vm::program *prog) { #ifdef USE_RULE_COUNTING predicate_count.resize(prog->num_predicates()); rules.resize(prog->num_rules()); fill(predicate_count.begin(), predicate_count.end(), 0); rule_id rid(0); for(rule_vector::iterator it(rules.begin()), end(rules.end()); it != end; it++, rid++) { rule_matcher_obj& obj(*it); obj.ignore = prog->get_rule(rid)->as_persistent(); obj.total_have = 0; obj.total_needed = prog->get_rule(rid)->num_predicates(); } #endif }
void EnterJs(){ HandleScope store; int rt = -1; if(_env!=0) return; _env = new JsEnv; _threadFlag = 1; *_env->cont = Context::New(NULL,ObjectTemplate::New()); (*_env->cont)->Enter(); Handle<Object> glb = GetGlobal(); LoadBase(); LoadConsole(glb); LoadAPICall(glb); //给全局添加stack只读变量. glb->Set(String::New("Stack"),Object::New(),ReadOnly); //加载基本的JS库文件 cs::ResID rid(IDR_JS_BASE); LoadJsRes(rid,L"IDR_JS_BASE"); //加载JS库里面的纯js类或函数, 供C++使用. LoadLibStruct(glb); }
RC QL_JoinNode::GetNext(RM_Record &rec) { RM_Record leftRec; RM_Record rightRec; while(true) { if (bRightNodeEOF) { if (lSubNode.GetNext(leftRec) == QL_EOF) return QL_EOF; memcpy(buffer, leftRec.GetContent(), leftRec.GetRecordSize()); bRightNodeEOF = false; } if (rSubNode.GetNext(rightRec) == QL_EOF) { bRightNodeEOF = true; rSubNode.Reset(); continue; } memcpy(buffer + lSubNode.GetTupleLength(), rightRec.GetContent(), rSubNode.GetTupleLength()); RID rid(-1, -1); rec = RM_Record(buffer, rid, tupleLength); return 0; } }
CItemTypeDef * CWorld::GetTerrainItemTypeDef( DWORD dwTerrainIndex ) { ADDTOCALLSTACK("CWorld::GetTerrainItemTypeDef"); CResourceDef * pRes = NULL; if ( g_World.m_TileTypes.IsValidIndex( dwTerrainIndex ) ) { pRes = g_World.m_TileTypes[dwTerrainIndex]; } if ( !pRes ) { RESOURCE_ID rid( RES_TYPEDEF, 0 ); pRes = g_Cfg.ResourceGetDef( rid ); } ASSERT( pRes ); CItemTypeDef * pItemTypeDef = dynamic_cast <CItemTypeDef*> (pRes); ASSERT( pItemTypeDef ); return( pItemTypeDef ); }
bool GTO2Slater::put(xmlNodePtr cur) { cur = cur->children; while(cur != NULL) { string cname((const char*)(cur->name)); if(cname == "grid") gridPtr = cur; else if(cname == "basisGroup") { string rid("invalid"); string rtype("Gaussian"); string norm("no"); int l=0; OhmmsAttributeSet inAttrib; inAttrib.add(rid,"rid"); inAttrib.add(l,"l"); inAttrib.add(rtype,"type"); inAttrib.add(norm,"normalized"); inAttrib.put(cur); if(rtype == "Gaussian" && l == 0) { //pick only S //if Ngto==1, don't do it if(norm == "yes") Normalized=true; else Normalized=false; map<string,xmlNodePtr>::iterator it(sPtr.find(rid)); if(it == sPtr.end()) { sPtr[rid]=cur; } } } cur=cur->next; } if(sPtr.empty()) return false; return true; }
rule_matcher::rule_matcher(void) { predicate_count = mem::allocator<pred_count>().allocate(theProgram->num_predicates()); memset(predicate_count, 0, theProgram->num_predicates() * sizeof(pred_count)); rules = mem::allocator<utils::byte>().allocate(theProgram->num_rules()); memset(rules, 0, sizeof(utils::byte) * theProgram->num_rules()); bitmap::create(active_bitmap, theProgram->num_rules_next_uint()); bitmap::create(dropped_bitmap, theProgram->num_rules_next_uint()); active_bitmap.clear(theProgram->num_rules_next_uint()); dropped_bitmap.clear(theProgram->num_rules_next_uint()); #ifndef NDEBUG for(rule_id rid(0); rid < theProgram->num_rules(); ++rid) { rule *rl(theProgram->get_rule(rid)); assert(rl->num_predicates() <= 255); } #endif bitmap::create(predicates, theProgram->num_predicates_next_uint()); clear_predicates(); }
// MUST BE CUSTOMIZED FOR EACH RuleSet void construct_ruleset_adm(const simpleString & s,Source & source) { asStringGB orderName; list<Polynomial> L; Source so(source.inputNamedFunction("List")); so >> orderName; GBInputSpecial(L,so); AdmissibleOrder * p; bool valid = StorageGet(order_storage,orderName.value(),p); if(valid) { RuleSet * result = new AdmRuleSet(*p); typedef list<Polynomial>::const_iterator LI; LI w = L.begin(), e = L.end(); int i = 1; while(w!=e) { RuleID rid(*w,i); result->insert(rid); ++w;++i; }; GBStream << "Constructing the ruleset with name " << s << '\n'; result->print(GBStream); GBStream << "end\n"; ruleset_storage.insert(make_pair(s,result)); } else DBG(); };