QualifiedIdentifier InstantiationInformation::applyToIdentifier(const QualifiedIdentifier& id) const { QualifiedIdentifier ret; if(id.count() > 1) { ret = id; ret.pop(); if(previousInstantiationInformation.index()) ret = previousInstantiationInformation.information().applyToIdentifier(ret); } Identifier lastId(id.last()); KDevVarLengthArray<IndexedTypeIdentifier> oldTemplateIdentifiers; for(uint a = 0; a < lastId.templateIdentifiersCount(); ++a) oldTemplateIdentifiers.append(lastId.templateIdentifier(a)); lastId.clearTemplateIdentifiers(); for(uint a = 0; a < templateParametersSize(); ++a) { if(templateParameters()[a].abstractType()) { lastId.appendTemplateIdentifier(IndexedTypeIdentifier(templateParameters()[a].abstractType()->toString(), true)); }else{ lastId.appendTemplateIdentifier((uint) oldTemplateIdentifiers.size() > a ? oldTemplateIdentifiers[a] : IndexedTypeIdentifier()); } } for(int a = templateParametersSize(); a < oldTemplateIdentifiers.size(); ++a) lastId.appendTemplateIdentifier(oldTemplateIdentifiers[a]); ret.push(lastId); return ret; }
KDevVarLengthArray<IndexedTopDUContext> Uses::uses(const DeclarationId& id) const { KDevVarLengthArray<IndexedTopDUContext> ret; UsesItem item; item.declaration = id; UsesRequestItem request(item); uint index = d->m_uses.findIndex(item); if(index) { const UsesItem* repositoryItem = d->m_uses.itemFromIndex(index); FOREACH_FUNCTION(const IndexedTopDUContext& decl, repositoryItem->uses) ret.append(decl); } return ret; }
KDevVarLengthArray<IndexedDUContext> Importers::importers(const DeclarationId& id) const { KDevVarLengthArray<IndexedDUContext> ret; ImportersItem item; item.declaration = id; ImportersRequestItem request(item); uint index = d->m_importers.findIndex(item); if(index) { const ImportersItem* repositoryItem = d->m_importers.itemFromIndex(index); FOREACH_FUNCTION(const IndexedDUContext& decl, repositoryItem->importers) ret.append(decl); } return ret; }
KDevVarLengthArray<Declaration*> DeclarationId::getDeclarations(const TopDUContext* top) const { KDevVarLengthArray<Declaration*> ret; if(m_isDirect == false) { //Find the declaration by its qualified identifier and additionalIdentity QualifiedIdentifier id(m_indirectData.identifier); if(top) { //Do filtering PersistentSymbolTable::FilteredDeclarationIterator filter = PersistentSymbolTable::self().getFilteredDeclarations(id, top->recursiveImportIndices()); for(; filter; ++filter) { Declaration* decl = filter->data(); if(decl && m_indirectData.additionalIdentity == decl->additionalIdentity()) { //Hit ret.append(decl); } } }else{ //Just accept anything PersistentSymbolTable::Declarations decls = PersistentSymbolTable::self().getDeclarations(id); PersistentSymbolTable::Declarations::Iterator decl = decls.iterator(); for(; decl; ++decl) { const IndexedDeclaration& iDecl(*decl); ///@todo think this over once we don't pull in all imported top-context any more //Don't trigger loading of top-contexts from here, it will create a lot of problems if((!DUChain::self()->isInMemory(iDecl.topContextIndex()))) continue; if(!top) { Declaration* decl = iDecl.data(); if(decl && m_indirectData.additionalIdentity == decl->additionalIdentity()) { //Hit ret.append(decl); } } } } }else{ Declaration* decl = m_directData.declaration(); if(decl) ret.append(decl); } if(!ret.isEmpty() && m_specialization.index()) { KDevVarLengthArray<Declaration*> newRet; foreach (Declaration* decl, ret) { Declaration* specialized = decl->specialize(m_specialization, top ? top : decl->topContext()); if(specialized) newRet.append(specialized); }
PreprocessedContents tokenizeFromByteArray(const QByteArray& array) { PreprocessedContents to; const char* data = array.constData(); const char* dataEnd = data + array.size(); //unsigned int* target = to.data(); KDevVarLengthArray<char, 100> identifier; KDevelop::IndexedString::RunningHash hash; bool tokenizing = false; while(data < dataEnd) { if(!tokenizing) { if(isLetter(*data) || *data == '_') tokenizing = true; } if(tokenizing) { if(isLetterOrNumber(*data) || *data == '_') { hash.append(*data); identifier.append(*data); }else{ //End of token to.append( KDevelop::IndexedString(identifier.constData(), identifier.size(), hash.hash).index() ); //kDebug() << "word" << "\"" + KDevelop::IndexedString(to.back()).str() + "\""; hash.clear(); identifier.clear(); tokenizing = false; } } if(!tokenizing) to.append( indexFromCharacter(*data) ); ++data; } if(tokenizing) to.append( KDevelop::IndexedString(identifier.constData(), identifier.size(), hash.hash).index() ); /* kDebug() << QString::fromUtf8(stringFromContents(to)); kDebug() << QString::fromUtf8(array); Q_ASSERT(stringFromContents(to) == array);*/ return to; }
uint pp_skip_identifier::operator()(Stream& input) { KDevVarLengthArray<char, 100> identifier; KDevelop::IndexedString::RunningHash hash; while (!input.atEnd()) { if(!isCharacter(input.current())) { //Do a more complex merge, where also tokenized identifiers can be merged KDevelop::IndexedString ret; if(!identifier.isEmpty()) ret = KDevelop::IndexedString(identifier.constData(), identifier.size(), hash.hash); while (!input.atEnd()) { uint current = input.current(); if (!isLetterOrNumber(current) && input != '_' && isCharacter(current)) break; if(ret.isEmpty()) ret = KDevelop::IndexedString::fromIndex(current); //The most common fast path else ///@todo Be better to build up a complete buffer and then append it all, so we don't get he intermediate strings into the repository ret = KDevelop::IndexedString(ret.byteArray() + KDevelop::IndexedString::fromIndex(input.current()).byteArray()); ++input; } return ret.index(); } //Collect characters and connect them to an IndexedString if (!isLetterOrNumber(input.current()) && input != '_') break; char c = characterFromIndex(input); hash.append(c); identifier.append(c); ++input; } return KDevelop::IndexedString(identifier.constData(), identifier.size(), hash.hash).index(); }