int RateShaper::RetrieveTokens(unsigned int num_tokens) { /* update the token number first */ double timespan = getElapsedTime(last_check_time); last_check_time = HRC::now(); addTokens(timespan); /** * If the number of tokens requested exceeds the bucket size, * there is no way to give back sufficient tokens. Thus, just * return all the available tokens. */ if (num_tokens > bucket_size) { unsigned int tmp = avail_tokens; avail_tokens = 0; return tmp; } while (num_tokens > avail_tokens) { /* compute how much time it needs to generate enough tokens */ double required_time = (num_tokens - avail_tokens) / token_gentime; double sec_part = floor(required_time); tim.tv_sec = static_cast<long int>(sec_part); tim.tv_nsec = static_cast<long int>((required_time - sec_part) * 1e9); nanosleep(&tim , NULL); double timespan = getElapsedTime(last_check_time); last_check_time = HRC::now(); addTokens(timespan); } avail_tokens -= num_tokens; return num_tokens; }
// static void OriginTrialContext::addTokensFromHeader(ExecutionContext* host, const String& headerValue) { if (headerValue.isEmpty()) return; std::unique_ptr<Vector<String>> tokens(parseHeaderValue(headerValue)); if (!tokens) return; addTokens(host, tokens.get()); }
void startSyntaxTree() { int temp = 0; openLexFile(); getNextToken(); while(!matchToken(PROGRAM)) { getNextToken(); } temp = lookupNumber(PROGRAM); addTokens(hand.root, getTokenName(temp), getTokenType(temp)); }
void DOMTokenList::add(const Vector<String>& tokens, ExceptionState& es) { Vector<String> filteredTokens; for (size_t i = 0; i < tokens.size(); ++i) { if (!validateToken(tokens[i], es)) return; if (!containsInternal(tokens[i])) filteredTokens.append(tokens[i]); } if (filteredTokens.isEmpty()) return; setValue(addTokens(value(), filteredTokens)); }
// Optimally, this should take a Vector<AtomicString> const ref in argument but // the bindings generator does not handle that. void DOMTokenList::add(const Vector<String>& tokens, ExceptionState& exceptionState) { Vector<String> filteredTokens; filteredTokens.reserveCapacity(tokens.size()); for (const auto& token : tokens) { if (!validateToken(token, exceptionState)) return; if (containsInternal(AtomicString(token))) continue; if (filteredTokens.contains(token)) continue; filteredTokens.append(token); } if (!filteredTokens.isEmpty()) setValue(addTokens(value(), filteredTokens)); }
int StringArray::addTokens (const String& text, const bool preserveQuotedStrings) { return addTokens (text, " \n\r\t", preserveQuotedStrings ? "\"" : ""); }
AtomicString DOMTokenList::addToken(const AtomicString& input, const AtomicString& token) { Vector<String> tokens; tokens.append(token.string()); return addTokens(input, tokens); }
WordNgrams::WordNgrams( int newNgramN, const char * newInFileName, const char * newOutFileName, const char * newDelimiters, const char * newStopChars ) : Ngrams( newNgramN, newInFileName, newOutFileName, newDelimiters, newStopChars ) { addTokens(); }