void init() { // prepare topology. unsigned rootNodes = container.queryJob().querySlaves(); unsigned res = MERGE_GRANULARITY; sD = new MemoryBuffer[rootNodes]; UnsignedArray nodes1, nodes2; UnsignedArray *currentLevel = &nodes1, *nextLevel = &nodes2; unsigned n = 0; while (n<rootNodes) currentLevel->append(n++); while (rootNodes > 1) { assertex(rootNodes); unsigned r = (rootNodes+(res-1))/res; // groups unsigned t = 0; n = 0; bool first = true; loop { if (first) { first = false; nextLevel->append(currentLevel->item(n)); } else { #ifdef _DEBUG unsigned node = nextLevel->tos(); unsigned item = currentLevel->item(n); ActPrintLog("Adding to node=%d, item=%d", node, item); #endif sD[nextLevel->tos()].append(currentLevel->item(n)); } n++; if (n>=rootNodes) break; t += r; if (t>=rootNodes) { t -= rootNodes; first = true; } } assertex(sD[nextLevel->tos()].length()); // something must have been added n = 0; while (n<nextLevel->ordinality()) sD[nextLevel->item(n++)].append(0); // terminator #ifdef _DEBUG ActPrintLog("EOL"); #endif rootNodes = nextLevel->ordinality(); UnsignedArray *tmp = currentLevel; currentLevel = nextLevel; nextLevel = tmp; nextLevel->kill(); } }
NlpMatchPath::NlpMatchPath(const UnsignedArray & _ids, const UnsignedArray & _indices) { assert(_ids.ordinality() == _indices.ordinality()); ForEachItemIn(idx, _ids) { ids.append(_ids.item(idx)); indices.append(_indices.item(idx)); }
MultiLexer::MultiLexer(const AsciiDfa & _tokens, const AsciiDfa & _skip, const UnsignedArray & _endTokenChars, unsigned _eofId) : tokens(_tokens), skip(_skip) { eofId = _eofId; _clear(isEndToken); ForEachItemIn(idx, _endTokenChars) { unsigned next = _endTokenChars.item(idx); if (next < 256) isEndToken[next] = true; }
bool CCronAtSchedule::match(UnsignedArray &a,unsigned v,unsigned &next) { if (a.ordinality()==0) { next = v; return true; } ForEachItemIn(i,a) { unsigned n = a.item(i); if (n>=v) { next = n; return true; } }