void rust_task::wakeup(rust_cond *from) { scoped_lock with(state_lock); wakeup_locked(from); }
rust_scheduler * rust_kernel::get_scheduler_by_id(rust_sched_id id) { scoped_lock with(sched_lock); return get_scheduler_by_id_nolock(id); }
void IndexWriter::_IndexWriter(const bool create) { //Func - Initialises the instances //Pre - create indicates if the indexWriter must create a new index // located at path or just open it similarity = CL_NS(search)::Similarity::getDefault(); useCompoundFile = true; if ( directory->getDirectoryType() == RAMDirectory::DirectoryType() ) useCompoundFile = false; //Create a ramDirectory ramDirectory = _CLNEW TransactionalRAMDirectory; CND_CONDITION(ramDirectory != NULL, "ramDirectory is NULL"); //Initialize the writeLock to writeLock = NULL; //initialise the settings... maxFieldLength = DEFAULT_MAX_FIELD_LENGTH; mergeFactor = DEFAULT_MERGE_FACTOR; maxMergeDocs = DEFAULT_MAX_MERGE_DOCS; writeLockTimeout = WRITE_LOCK_TIMEOUT; commitLockTimeout = COMMIT_LOCK_TIMEOUT; minMergeDocs = DEFAULT_MAX_BUFFERED_DOCS; termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL; //Create a new lock using the name "write.lock" LuceneLock* newLock = directory->makeLock(IndexWriter::WRITE_LOCK_NAME); //Condition check to see if newLock has been allocated properly CND_CONDITION(newLock != NULL, "No memory could be allocated for LuceneLock newLock"); //Try to obtain a write lock if (!newLock->obtain(writeLockTimeout)){ //Write lock could not be obtained so delete it _CLDELETE(newLock); //Reset the instance _finalize(); //throw an exception because no writelock could be created or obtained _CLTHROWA(CL_ERR_IO, "Index locked for write or no write access." ); } //The Write Lock has been obtained so save it for later use this->writeLock = newLock; //Create a new lock using the name "commit.lock" LuceneLock* lock = directory->makeLock(IndexWriter::COMMIT_LOCK_NAME); //Condition check to see if lock has been allocated properly CND_CONDITION(lock != NULL, "No memory could be allocated for LuceneLock lock"); LockWith2 with(lock, commitLockTimeout, this, NULL, create); { SCOPED_LOCK_MUTEX(directory->THIS_LOCK) // in- & inter-process sync with.run(); } //Release the commit lock _CLDELETE(lock); isOpen = true; }
TEST_F(StaticJsonBuffer_ParseArray_Tests, BufferOfTheRightSizeForEmptyArray) { StaticJsonBuffer<JSON_ARRAY_SIZE(0)> bufferOfRightSize; with(bufferOfRightSize); whenInputIs("[]"); parseMustSucceed(); }
void rust_scheduler::disallow_exit() { scoped_lock with(lock); may_exit = false; }
bool rust_task::running() { scoped_lock with(state_lock); return state == task_state_running; }
bool rust_task::blocked_on(rust_cond *on) { scoped_lock with(state_lock); return cond == on; }
void rust_task::allow_kill() { scoped_lock with(kill_lock); disallow_kill = false; }
size_t rust_port::size() { scoped_lock with(lock); return buffer.size(); }
TEST_F(StaticJsonBuffer_ParseObject_Tests, BufferOfTheRightSizeForEmptyObject) { StaticJsonBuffer<JSON_OBJECT_SIZE(0)> bufferOfRightSize; with(bufferOfRightSize); whenInputIs("{}"); parseMustSucceed(); }
void rust_task::inhibit_kill() { scoped_lock with(kill_lock); disallow_kill = true; }
TEST_F(StaticJsonBuffer_ParseObject_Tests, TooSmallBufferForEmptyObject) { StaticJsonBuffer<JSON_OBJECT_SIZE(0) - 1> bufferTooSmall; with(bufferTooSmall); whenInputIs("{}"); parseMustFail(); }
void rust_sched_driver::signal() { scoped_lock with(lock); signalled = true; lock.signal(); }
void rust_task::allow_kill() { scoped_lock with(kill_lock); assert(disallow_kill > 0 && "Illegal allow_kill(): already killable!"); disallow_kill--; }
void stop() { scoped_lock with(lock); stop_flag = true; }
void rust_port::ref() { scoped_lock with(ref_lock); ref_count++; }
bool rust_task::must_fail_from_being_killed() { scoped_lock with(kill_lock); return must_fail_from_being_killed_unlocked(); }
void rust_port::end_detach() { // Just take the lock to make sure that the thread that signaled // the detach_cond isn't still holding it scoped_lock with(ref_lock); assert(ref_count == 0); }
bool rust_task::blocked() { scoped_lock with(state_lock); return state == task_state_blocked; }
void rust_task::die() { scoped_lock with(lock); transition(&sched->running_tasks, &sched->dead_tasks); sched->lock.signal(); }
bool rust_task::dead() { scoped_lock with(state_lock); return state == task_state_dead; }
void ASTSelectQuery::formatImpl(const FormatSettings & s, FormatState & state, FormatStateStacked frame) const { frame.current_select = this; frame.need_parens = false; std::string indent_str = s.one_line ? "" : std::string(4 * frame.indent, ' '); if (with()) { s.ostr << (s.hilite ? hilite_keyword : "") << indent_str << "WITH " << (s.hilite ? hilite_none : ""); s.one_line ? with()->formatImpl(s, state, frame) : with()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame); s.ostr << s.nl_or_ws; } s.ostr << (s.hilite ? hilite_keyword : "") << indent_str << "SELECT " << (distinct ? "DISTINCT " : "") << (s.hilite ? hilite_none : ""); s.one_line ? select()->formatImpl(s, state, frame) : select()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame); if (tables()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "FROM " << (s.hilite ? hilite_none : ""); tables()->formatImpl(s, state, frame); } if (prewhere()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "PREWHERE " << (s.hilite ? hilite_none : ""); prewhere()->formatImpl(s, state, frame); } if (where()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "WHERE " << (s.hilite ? hilite_none : ""); where()->formatImpl(s, state, frame); } if (groupBy()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "GROUP BY " << (s.hilite ? hilite_none : ""); s.one_line ? groupBy()->formatImpl(s, state, frame) : groupBy()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame); } if (group_by_with_rollup) s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << (s.one_line ? "" : " ") << "WITH ROLLUP" << (s.hilite ? hilite_none : ""); if (group_by_with_cube) s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << (s.one_line ? "" : " ") << "WITH CUBE" << (s.hilite ? hilite_none : ""); if (group_by_with_totals) s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << (s.one_line ? "" : " ") << "WITH TOTALS" << (s.hilite ? hilite_none : ""); if (having()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "HAVING " << (s.hilite ? hilite_none : ""); having()->formatImpl(s, state, frame); } if (orderBy()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "ORDER BY " << (s.hilite ? hilite_none : ""); s.one_line ? orderBy()->formatImpl(s, state, frame) : orderBy()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame); } if (limitByValue()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "LIMIT " << (s.hilite ? hilite_none : ""); limitByValue()->formatImpl(s, state, frame); s.ostr << (s.hilite ? hilite_keyword : "") << " BY " << (s.hilite ? hilite_none : ""); s.one_line ? limitBy()->formatImpl(s, state, frame) : limitBy()->as<ASTExpressionList &>().formatImplMultiline(s, state, frame); } if (limitLength()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "LIMIT " << (s.hilite ? hilite_none : ""); if (limitOffset()) { limitOffset()->formatImpl(s, state, frame); s.ostr << ", "; } limitLength()->formatImpl(s, state, frame); } if (settings()) { s.ostr << (s.hilite ? hilite_keyword : "") << s.nl_or_ws << indent_str << "SETTINGS " << (s.hilite ? hilite_none : ""); settings()->formatImpl(s, state, frame); } }
TEST_F(StaticJsonBuffer_ParseArray_Tests, TooSmallBufferForArrayWithOneValue) { StaticJsonBuffer<JSON_ARRAY_SIZE(1) - 1> bufferTooSmall; with(bufferTooSmall); whenInputIs("[1]"); parseMustFail(); }
extern "C" CDECL void rust_set_exit_status_newrt(uintptr_t code) { scoped_lock with(exit_status_lock); exit_status = code; }
// You can toy with different inputs here // You can also use auto& input = std::cin; at home std::istringstream input { "4\n5\n;" }; /* bind-chain demo */ auto parse_int = [&input]() -> boost::optional<int> { int i; if(input >> i) { return i; } else { return {}; } }; auto result = with(parse_int() , lift([](int i) { return 2 * i; }) , [&parse_int](int i) { return with(parse_int() , lift([&i](int j) { return i + j; } )); } ); if(result) { std::cout << "Result:\t" << *result << '\n'; } else { std::cout << "No result\n"; } }
extern "C" CDECL uintptr_t rust_get_exit_status_newrt() { scoped_lock with(exit_status_lock); return exit_status; }
void rust_kernel::release_scheduler_id(rust_sched_id id) { scoped_lock with(sched_lock); join_list.push_back(id); sched_lock.signal(); }
/** * Run the global analysis. Put all data and covariates in a logistic regression model. * * Compute likelihood ratio statistic. * This uses a likelihood statistic where n-1 haplotypes are tested. * * @return pvalue. */ ZaykinGlobalStatsResults Zaykin::runGlobal(){ ZaykinGlobalStatsResults stats; vector<vector<double> > haps; vector<double> ones; prepHaplotypes(ones, haps); #if DEBUG_ZAY_PROGRESS cout << "Zaykin start LR portion" << endl; #endif LogisticRegression with(params->getRegressionConditionNumberThreshold()), without(params->getRegressionConditionNumberThreshold()); /* * Run without the haplotypes: */ vector<vector<double> > inv_infmatrixWithOut; vector<double> betasWithOut; vector<vector<double> > inWithout; inWithout = cov; inWithout.push_back(ones); inv_infmatrixWithOut = vecops::getDblVec(inWithout.size() , inWithout.size()); int retry = 0; double startVal = 0; // value to start betas with. while(retry < 3){ try{ betasWithOut = without.newtonRaphson(inWithout, phenotype, inv_infmatrixWithOut, startVal); break; }catch(NewtonRaphsonFailureEx){ handleException(stats, startVal, retry, "Unable to compute reduced model: Newton-Raphson setup failure."); }catch(NewtonRaphsonIterationEx){ handleException(stats, startVal, retry, "Unable to compute reduced model: max iterations hit."); }catch(SingularMatrixEx){ handleException(stats, startVal, retry, "Unable to compute reduced model: information matrix was singular."); }catch(ConditionNumberEx err){ LogisticRegression lr; int separableVariable = lr.dataIsSeparable(inWithout, phenotype); string message; if (separableVariable < 0){ // Error: poor conditioning. stringstream ss; ss << "Unable to compute reduced model: Poor conditioning in information matrix. "; ss << "Condition number (1-norm) is " << err.conditionNumber; message = ss.str(); }else{ stringstream ss; ss << "Unable to compute reduced model: Separable data matrix."; ss << "Condition number (1-norm) is " << err.conditionNumber; message = ss.str(); } handleException(stats, startVal, retry, message); if (retry >= 3) return stats; }catch(ADTException e){ // This one is generic. string message = "Unable to compute reduced model: Newton-Raphson error."; handleException(stats, startVal, retry, message); if (retry >= 3) return stats; }catch(alglib::ap_error err){ stringstream ss; ss << "Unable to compute reduced model due to linalg exception: " << err.msg; handleException(stats, startVal, retry, ss.str()); if (retry >= 3) return stats; } } /* * Run with the haplotypes: */ vector<vector<double> > inv_infmatrixWith, inWith; vector<double> betasWith; inWith = inWithout; for (unsigned int i=0; i < haps.size()-1; i++){ // NOTE: Don't push the very last haplotype. inWith.push_back(haps.at(i)); } inv_infmatrixWith = vecops::getDblVec(inWith.size() , inWith.size()); retry = 0; startVal = 0; // value to start betas with. while(retry < 3){ try{ betasWith = with.newtonRaphson(inWith, phenotype, inv_infmatrixWith, startVal); break; }catch(NewtonRaphsonFailureEx){ handleException(stats, startVal, retry, "Unable to compute full model: Newton-Raphson setup failure."); }catch(NewtonRaphsonIterationEx){ handleException(stats, startVal, retry, "Unable to compute full model: max iterations hit."); }catch(SingularMatrixEx){ handleException(stats, startVal, retry, "Unable to compute full model: information matrix was singular."); }catch(ConditionNumberEx err){ LogisticRegression lr; int separableVariable = lr.dataIsSeparable(inWith, phenotype); stringstream ss; if (separableVariable < 0){ // Error: poor conditioning. ss << "Unable to compute reduced model: Poor conditioning in information matrix. "; ss << "Condition number (1-norm) is " << err.conditionNumber; }else{ ss << "Unable to compute reduced model: Separable data matrix."; ss << "Condition number (1-norm) is " << err.conditionNumber; } string message = ss.str(); handleException(stats, startVal, retry, message); if (retry >= 3) return stats; }catch(ADTException e){ string message = "Unable to compute full model: Newton-Raphson error."; handleException(stats, startVal, retry, message); if (retry >= 3) return stats; }catch(alglib::ap_error err){ stringstream ss; ss << "Unable to compute full model due to linalg exception: " << err.msg; handleException(stats, startVal, retry,ss.str()); if (retry >= 3) return stats; } } double likeRatio = with.likelihoodRatio(betasWithOut, inWithout, betasWith, inWith, phenotype); try{ stats.pvalue = Statistics::chi2prob(likeRatio, betasWith.size() - betasWithOut.size()); stats.testStat = likeRatio; stats.degFreedom = betasWith.size() - betasWithOut.size(); }catch(...){ stringstream ss; ss << "Zaykin's method: unable to compute chi square: " << likeRatio << " " << betasWith.size() - betasWithOut.size() << endl; Logger::Instance()->writeLine(ss.str()); stats.fillDefault(); return stats; } return stats; }
TEST_F(StaticJsonBuffer_ParseArray_Tests, TooSmallBufferForEmptyArray) { StaticJsonBuffer<JSON_ARRAY_SIZE(0) - 1> bufferTooSmall; with(bufferTooSmall); whenInputIs("[]"); parseMustFail(); }
bool rust_task::block(rust_cond *on, const char* name) { scoped_lock with(kill_lock); return block_locked(on, name); }