llvm::Constant * IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, SILLocation diagLoc) { // TODO: Landing 32-bit key paths requires some runtime changes to get the // 8-byte object header. if (getPointerSize() != Size(8)) { Context.Diags.diagnose(diagLoc.getSourceLoc(), diag::not_implemented, "32-bit key paths"); return llvm::UndefValue::get(Int8PtrTy); } // See if we already emitted this. auto found = KeyPathPatterns.find(pattern); if (found != KeyPathPatterns.end()) return found->second; // Gather type arguments from the root and leaf types of the key path. auto rootTy = pattern->getRootType(); auto valueTy = pattern->getValueType(); // Check for parameterization, whether by subscript indexes or by the generic // environment. If there isn't any, we can instantiate the pattern in-place. bool isInstantiableInPlace = pattern->getNumOperands() == 0 && !pattern->getGenericSignature(); // Collect the required parameters for the keypath's generic environment. SmallVector<GenericRequirement, 4> requirements; GenericEnvironment *genericEnv = nullptr; if (auto sig = pattern->getGenericSignature()) { genericEnv = sig->createGenericEnvironment(*getSwiftModule()); enumerateGenericSignatureRequirements(pattern->getGenericSignature(), [&](GenericRequirement reqt) { requirements.push_back(reqt); }); } /// Generate a metadata accessor that produces metadata for the given type /// using arguments from the generic context of the key path. auto emitMetadataGenerator = [&](CanType type) -> llvm::Function * { // TODO: Use the standard metadata accessor when there are no arguments // and the metadata accessor is defined. // Build a stub that loads the necessary bindings from the key path's // argument buffer then fetches the metadata. auto fnTy = llvm::FunctionType::get(TypeMetadataPtrTy, {Int8PtrTy}, /*vararg*/ false); auto accessorThunk = llvm::Function::Create(fnTy, llvm::GlobalValue::PrivateLinkage, "keypath_get_type", getModule()); accessorThunk->setAttributes(constructInitialAttributes()); { IRGenFunction IGF(*this, accessorThunk); if (DebugInfo) DebugInfo->emitArtificialFunction(IGF, accessorThunk); if (type->hasTypeParameter()) { auto bindingsBufPtr = IGF.collectParameters().claimNext(); bindFromGenericRequirementsBuffer(IGF, requirements, Address(bindingsBufPtr, getPointerAlignment()), [&](CanType t) { if (!genericEnv) return t; return genericEnv->mapTypeIntoContext(t)->getCanonicalType(); }); type = genericEnv->mapTypeIntoContext(type)->getCanonicalType(); } auto ret = IGF.emitTypeMetadataRef(type); IGF.Builder.CreateRet(ret); } return accessorThunk; }; // Start building the key path pattern. ConstantInitBuilder builder(*this); ConstantStructBuilder fields = builder.beginStruct(); fields.setPacked(true); // Add a zero-initialized header we can use for lazy initialization. fields.add(llvm::ConstantInt::get(SizeTy, 0)); // Store references to metadata generator functions to generate the metadata // for the root and leaf. These sit in the "isa" and object header parts of // the final object. fields.add(emitMetadataGenerator(rootTy)); fields.add(emitMetadataGenerator(valueTy)); // Add a pointer to the ObjC KVC compatibility string, if there is one, or // null otherwise. llvm::Constant *objcString; if (!pattern->getObjCString().empty()) { objcString = getAddrOfGlobalString(pattern->getObjCString()); } else { objcString = llvm::ConstantPointerNull::get(Int8PtrTy); } fields.add(objcString); // Leave a placeholder for the buffer header, since we need to know the full // buffer size to fill it in. auto headerPlaceholder = fields.addPlaceholderWithSize(Int32Ty); auto startOfKeyPathBuffer = fields.getNextOffsetFromGlobal(); // Build out the components. auto baseTy = rootTy; auto getPropertyOffsetOrIndirectOffset = [&](SILType loweredBaseTy, VarDecl *property) -> std::pair<llvm::Constant*, bool> { llvm::Constant *offset; bool isResolved; bool isStruct; if (auto structTy = loweredBaseTy.getStructOrBoundGenericStruct()) { offset = emitPhysicalStructMemberFixedOffset(*this, loweredBaseTy, property); isStruct = true; } else if (auto classTy = loweredBaseTy.getClassOrBoundGenericClass()) { offset = tryEmitConstantClassFragilePhysicalMemberOffset(*this, loweredBaseTy, property); isStruct = false; } else { llvm_unreachable("property of non-struct, non-class?!"); } // If the offset isn't fixed, try instead to get the field offset vector // offset for the field to look it up dynamically. isResolved = offset != nullptr; if (!isResolved) { if (isStruct) { offset = emitPhysicalStructMemberOffsetOfFieldOffset( *this, loweredBaseTy, property); assert(offset && "field is neither fixed-offset nor in offset vector"); } else { auto offsetValue = getClassFieldOffset(*this, loweredBaseTy.getClassOrBoundGenericClass(), property); offset = llvm::ConstantInt::get(Int32Ty, offsetValue.getValue()); } } return {offset, isResolved}; }; for (unsigned i : indices(pattern->getComponents())) { SILType loweredBaseTy; Lowering::GenericContextScope scope(getSILTypes(), pattern->getGenericSignature()); loweredBaseTy = getLoweredType(AbstractionPattern::getOpaque(), baseTy->getLValueOrInOutObjectType()); auto &component = pattern->getComponents()[i]; switch (auto kind = component.getKind()) { case KeyPathPatternComponent::Kind::StoredProperty: { // Try to get a constant offset if we can. auto property = cast<VarDecl>(component.getStoredPropertyDecl()); llvm::Constant *offset; bool isResolved; std::tie(offset, isResolved) = getPropertyOffsetOrIndirectOffset(loweredBaseTy, property); offset = llvm::ConstantExpr::getTruncOrBitCast(offset, Int32Ty); bool isStruct = (bool)loweredBaseTy.getStructOrBoundGenericStruct(); // If the projection is a statically known integer, try to pack it into // the key path payload. if (isResolved) { if (auto offsetInt = dyn_cast_or_null<llvm::ConstantInt>(offset)) { auto offsetValue = offsetInt->getValue().getZExtValue(); if (KeyPathComponentHeader::offsetCanBeInline(offsetValue)) { auto header = isStruct ? KeyPathComponentHeader::forStructComponentWithInlineOffset(offsetValue) : KeyPathComponentHeader::forClassComponentWithInlineOffset(offsetValue); fields.addInt32(header.getData()); break; } } auto header = isStruct ? KeyPathComponentHeader::forStructComponentWithOutOfLineOffset() : KeyPathComponentHeader::forClassComponentWithOutOfLineOffset(); fields.addInt32(header.getData()); fields.add(offset); } else { // Otherwise, stash the offset of the field offset within the metadata // object, so we can pull it out at instantiation time. // TODO: We'll also need a way to handle resilient field offsets, once // field offset vectors no longer cover all fields in the type. KeyPathComponentHeader header = isStruct ? KeyPathComponentHeader::forStructComponentWithUnresolvedOffset() : KeyPathComponentHeader::forClassComponentWithUnresolvedOffset(); fields.addInt32(header.getData()); fields.add(offset); } break; } case KeyPathPatternComponent::Kind::GettableProperty: case KeyPathPatternComponent::Kind::SettableProperty: { // Encode the settability. bool settable = kind == KeyPathPatternComponent::Kind::SettableProperty; KeyPathComponentHeader::ComputedPropertyKind componentKind; if (settable) { componentKind = component.isComputedSettablePropertyMutating() ? KeyPathComponentHeader::SettableMutating : KeyPathComponentHeader::SettableNonmutating; } else { componentKind = KeyPathComponentHeader::GetOnly; } // Lower the id reference. auto id = component.getComputedPropertyId(); KeyPathComponentHeader::ComputedPropertyIDKind idKind; llvm::Constant *idValue; bool idResolved; switch (id.getKind()) { case KeyPathPatternComponent::ComputedPropertyId::Function: idKind = KeyPathComponentHeader::Getter; idValue = getAddrOfSILFunction(id.getFunction(), NotForDefinition); idResolved = true; break; case KeyPathPatternComponent::ComputedPropertyId::DeclRef: { idKind = KeyPathComponentHeader::VTableOffset; auto declRef = id.getDeclRef(); auto dc = declRef.getDecl()->getDeclContext(); if (auto methodClass = dyn_cast<ClassDecl>(dc)) { auto index = getVirtualMethodIndex(*this, declRef); idValue = llvm::ConstantInt::get(SizeTy, index); idResolved = true; } else if (auto methodProto = dyn_cast<ProtocolDecl>(dc)) { auto &protoInfo = getProtocolInfo(methodProto); auto index = protoInfo.getFunctionIndex( cast<AbstractFunctionDecl>(declRef.getDecl())); idValue = llvm::ConstantInt::get(SizeTy, -index.getValue()); idResolved = true; } else { llvm_unreachable("neither a class nor protocol dynamic method?"); } break; } case KeyPathPatternComponent::ComputedPropertyId::Property: idKind = KeyPathComponentHeader::StoredPropertyOffset; std::tie(idValue, idResolved) = getPropertyOffsetOrIndirectOffset(loweredBaseTy, id.getProperty()); idValue = llvm::ConstantExpr::getZExtOrBitCast(idValue, SizeTy); break; } auto header = KeyPathComponentHeader::forComputedProperty(componentKind, idKind, !isInstantiableInPlace, idResolved); fields.addInt32(header.getData()); fields.add(idValue); if (isInstantiableInPlace) { // No generic arguments, so we can invoke the getter/setter as is. fields.add(getAddrOfSILFunction(component.getComputedPropertyGetter(), NotForDefinition)); if (settable) fields.add(getAddrOfSILFunction(component.getComputedPropertySetter(), NotForDefinition)); } else { // If there's generic context (TODO: or subscript indexes), embed as // arguments in the component. Thunk the SIL-level accessors to give the // runtime implementation a polymorphically-callable interface. Context.Diags.diagnose(diagLoc.getSourceLoc(), diag::not_implemented, "generic computed key paths"); return llvm::UndefValue::get(Int8PtrTy); } } } // For all but the last component, we pack in the type of the component. if (i + 1 != pattern->getComponents().size()) { fields.add(emitMetadataGenerator(component.getComponentType())); } baseTy = component.getComponentType(); } // Save the total size of the buffer. Size componentSize = fields.getNextOffsetFromGlobal() - startOfKeyPathBuffer; // We now have enough info to build the header. KeyPathBufferHeader header(componentSize.getValue(), isInstantiableInPlace, /*reference prefix*/ false); // Add the header, followed by the components. fields.fillPlaceholder(headerPlaceholder, llvm::ConstantInt::get(Int32Ty, header.getData())); // Create the global variable. // TODO: The pattern could be immutable if // it isn't instantiable in place, and if we made the type metadata accessor // references private, it could go in true-const memory. auto patternVar = fields.finishAndCreateGlobal("keypath", getPointerAlignment(), /*constant*/ false, llvm::GlobalVariable::PrivateLinkage); KeyPathPatterns.insert({pattern, patternVar}); return patternVar; }
llvm::Constant * IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, SILLocation diagLoc) { // See if we already emitted this. auto found = KeyPathPatterns.find(pattern); if (found != KeyPathPatterns.end()) return found->second; // Gather type arguments from the root and leaf types of the key path. auto rootTy = pattern->getRootType(); auto valueTy = pattern->getValueType(); // Check for parameterization, whether by subscript indexes or by the generic // environment. If there isn't any, we can instantiate the pattern in-place. bool isInstantiableInPlace = pattern->getNumOperands() == 0 && !pattern->getGenericSignature(); // Collect the required parameters for the keypath's generic environment. SmallVector<GenericRequirement, 4> requirements; GenericEnvironment *genericEnv = nullptr; if (auto sig = pattern->getGenericSignature()) { genericEnv = sig->createGenericEnvironment(); enumerateGenericSignatureRequirements(pattern->getGenericSignature(), [&](GenericRequirement reqt) { requirements.push_back(reqt); }); } auto emitGenerator = [&](StringRef name, CanType type, llvm::Type *returnType, llvm::function_ref<void (IRGenFunction&, CanType)> emit) -> llvm::Function * { // TODO: Use the standard metadata accessor when there are no arguments // and the metadata accessor is defined. // Build a stub that loads the necessary bindings from the key path's // argument buffer then fetches the metadata. auto fnTy = llvm::FunctionType::get(returnType, {Int8PtrTy}, /*vararg*/ false); auto accessorThunk = llvm::Function::Create(fnTy, llvm::GlobalValue::PrivateLinkage, name, getModule()); accessorThunk->setAttributes(constructInitialAttributes()); { IRGenFunction IGF(*this, accessorThunk); if (DebugInfo) DebugInfo->emitArtificialFunction(IGF, accessorThunk); if (type->hasTypeParameter()) { auto bindingsBufPtr = IGF.collectParameters().claimNext(); bindFromGenericRequirementsBuffer(IGF, requirements, Address(bindingsBufPtr, getPointerAlignment()), [&](CanType t) { return genericEnv->mapTypeIntoContext(t)->getCanonicalType(); }); type = genericEnv->mapTypeIntoContext(type)->getCanonicalType(); } emit(IGF, type); } return accessorThunk; }; /// Generate a metadata accessor that produces metadata for the given type /// using arguments from the generic context of the key path. auto emitMetadataGenerator = [&](CanType type) -> llvm::Function * { // TODO: Use the standard metadata accessor when there are no arguments // and the metadata accessor is defined. return emitGenerator("keypath_get_type", type, TypeMetadataPtrTy, [&](IRGenFunction &IGF, CanType substType) { auto ret = IGF.emitTypeMetadataRef(substType); IGF.Builder.CreateRet(ret); }); }; auto emitWitnessTableGenerator = [&](CanType type, ProtocolConformanceRef conformance) -> llvm::Function * { // TODO: Use the standard conformance accessor when there are no arguments // and the conformance accessor is defined. return emitGenerator("keypath_get_witness_table", type, WitnessTablePtrTy, [&](IRGenFunction &IGF, CanType substType) { if (type->hasTypeParameter()) conformance = conformance.subst(type, QueryInterfaceTypeSubstitutions(genericEnv), LookUpConformanceInSignature(*genericEnv->getGenericSignature())); auto ret = emitWitnessTableRef(IGF, substType, conformance); IGF.Builder.CreateRet(ret); }); }; // Start building the key path pattern. ConstantInitBuilder builder(*this); ConstantStructBuilder fields = builder.beginStruct(); fields.setPacked(true); // Add a zero-initialized header we can use for lazy initialization. fields.add(llvm::ConstantInt::get(SizeTy, 0)); #ifndef NDEBUG auto startOfObject = fields.getNextOffsetFromGlobal(); #endif // Store references to metadata generator functions to generate the metadata // for the root and leaf. These sit in the "isa" and object header parts of // the final object. fields.add(emitMetadataGenerator(rootTy)); fields.add(emitMetadataGenerator(valueTy)); #ifndef NDEBUG auto endOfObjectHeader = fields.getNextOffsetFromGlobal(); unsigned expectedObjectHeaderSize; if (SizeTy == Int64Ty) expectedObjectHeaderSize = SWIFT_ABI_HEAP_OBJECT_HEADER_SIZE_64; else if (SizeTy == Int32Ty) expectedObjectHeaderSize = SWIFT_ABI_HEAP_OBJECT_HEADER_SIZE_32; else llvm_unreachable("unexpected pointer size"); assert((endOfObjectHeader - startOfObject).getValue() == expectedObjectHeaderSize && "key path pattern header size doesn't match heap object header size"); #endif // Add a pointer to the ObjC KVC compatibility string, if there is one, or // null otherwise. llvm::Constant *objcString; if (!pattern->getObjCString().empty()) { objcString = getAddrOfGlobalString(pattern->getObjCString()); } else { objcString = llvm::ConstantPointerNull::get(Int8PtrTy); } fields.add(objcString); // Leave a placeholder for the buffer header, since we need to know the full // buffer size to fill it in. auto headerPlaceholder = fields.addPlaceholderWithSize(Int32Ty); fields.addAlignmentPadding(getPointerAlignment()); auto startOfKeyPathBuffer = fields.getNextOffsetFromGlobal(); // Build out the components. auto baseTy = rootTy; auto assertPointerAlignment = [&]{ assert(fields.getNextOffsetFromGlobal() % getPointerAlignment() == Size(0) && "must be pointer-aligned here"); }; // Collect the order and types of any captured index operands, which will // determine the layout of the buffer that gets passed to the initializer // for each component. SmallVector<KeyPathIndexOperand, 4> operands; operands.resize(pattern->getNumOperands()); for (auto &component : pattern->getComponents()) { switch (component.getKind()) { case KeyPathPatternComponent::Kind::GettableProperty: case KeyPathPatternComponent::Kind::SettableProperty: case KeyPathPatternComponent::Kind::External: for (auto &index : component.getSubscriptIndices()) { operands[index.Operand].LoweredType = index.LoweredType; operands[index.Operand].LastUser = &component; } break; case KeyPathPatternComponent::Kind::StoredProperty: case KeyPathPatternComponent::Kind::OptionalChain: case KeyPathPatternComponent::Kind::OptionalForce: case KeyPathPatternComponent::Kind::OptionalWrap: break; } } for (unsigned i : indices(pattern->getComponents())) { assertPointerAlignment(); SILType loweredBaseTy; Lowering::GenericContextScope scope(getSILTypes(), pattern->getGenericSignature()); loweredBaseTy = getLoweredType(AbstractionPattern::getOpaque(), baseTy->getWithoutSpecifierType()); auto &component = pattern->getComponents()[i]; switch (auto kind = component.getKind()) { case KeyPathPatternComponent::Kind::External: { fields.addInt32(KeyPathComponentHeader::forExternalComponent().getData()); // Emit accessors for all of the external declaration's necessary // bindings. SmallVector<llvm::Constant*, 4> descriptorArgs; auto componentSig = component.getExternalDecl()->getInnermostDeclContext() ->getGenericSignatureOfContext(); auto subs = componentSig->getSubstitutionMap( component.getExternalSubstitutions()); enumerateGenericSignatureRequirements( componentSig->getCanonicalSignature(), [&](GenericRequirement reqt) { auto substType = reqt.TypeParameter.subst(subs) ->getCanonicalType(); if (!reqt.Protocol) { // Type requirement. descriptorArgs.push_back(emitMetadataGenerator(substType)); } else { // Protocol requirement. auto conformance = subs.lookupConformance( reqt.TypeParameter->getCanonicalType(), reqt.Protocol); descriptorArgs.push_back(emitWitnessTableGenerator(substType, *conformance)); } }); // If instantiable in-place, pad out the argument count here to ensure // there's room enough to instantiate a settable computed property // with two captured words in-place. The runtime instantiation of the // external component will ignore the padding, and this will make in-place // instantiation more likely to avoid needing an allocation. unsigned argSize = descriptorArgs.size(); if (isInstantiableInPlace) { argSize = std::max(argSize, 5u); } fields.addInt32(argSize); fields.add(getAddrOfPropertyDescriptor(component.getExternalDecl())); // Add an initializer function that copies generic arguments out of the // pattern argument buffer into the instantiated object, or null if there // are no arguments. if (component.getSubscriptIndices().empty()) fields.addInt(IntPtrTy, 0); else fields.add(getInitializerForComputedComponent(*this, component, operands, genericEnv, requirements)); // Add the generic arguments for the external context. for (auto arg : descriptorArgs) fields.add(arg); // Add padding. for (unsigned i = descriptorArgs.size(); i < argSize; ++i) fields.addInt(IntPtrTy, 0); break; } case KeyPathPatternComponent::Kind::StoredProperty: { auto property = cast<VarDecl>(component.getStoredPropertyDecl()); auto addFixedOffset = [&](bool isStruct, llvm::Constant *offset) { if (auto offsetInt = dyn_cast_or_null<llvm::ConstantInt>(offset)) { auto offsetValue = offsetInt->getValue().getZExtValue(); if (KeyPathComponentHeader::offsetCanBeInline(offsetValue)) { auto header = isStruct ? KeyPathComponentHeader ::forStructComponentWithInlineOffset(offsetValue) : KeyPathComponentHeader ::forClassComponentWithInlineOffset(offsetValue); fields.addInt32(header.getData()); return; } } auto header = isStruct ? KeyPathComponentHeader::forStructComponentWithOutOfLineOffset() : KeyPathComponentHeader::forClassComponentWithOutOfLineOffset(); fields.addInt32(header.getData()); fields.add(llvm::ConstantExpr::getTruncOrBitCast(offset, Int32Ty)); }; // For a struct stored property, we may know the fixed offset of the field, // or we may need to fetch it out of the type's metadata at instantiation // time. if (auto theStruct = loweredBaseTy.getStructOrBoundGenericStruct()) { if (auto offset = emitPhysicalStructMemberFixedOffset(*this, loweredBaseTy, property)) { // We have a known constant fixed offset. addFixedOffset(/*struct*/ true, offset); break; } // If the offset isn't fixed, try instead to get the field offset out // of the type metadata at instantiation time. auto &metadataLayout = getMetadataLayout(theStruct); auto fieldOffset = metadataLayout.getStaticFieldOffset(property); auto header = KeyPathComponentHeader ::forStructComponentWithUnresolvedFieldOffset(); fields.addInt32(header.getData()); fields.addInt32(fieldOffset.getValue()); break; } // For a class, we may know the fixed offset of a field at compile time, // or we may need to fetch it at instantiation time. Depending on the // ObjC-ness and resilience of the class hierarchy, there might be a few // different ways we need to go about this. if (loweredBaseTy.getClassOrBoundGenericClass()) { switch (getClassFieldAccess(*this, loweredBaseTy, property)) { case FieldAccess::ConstantDirect: { // Known constant fixed offset. auto offset = tryEmitConstantClassFragilePhysicalMemberOffset(*this, loweredBaseTy, property); assert(offset && "no constant offset for ConstantDirect field?!"); addFixedOffset(/*struct*/ false, offset); break; } case FieldAccess::NonConstantDirect: { // A constant offset that's determined at class realization time. // We have to load the offset from a global ivar. auto header = KeyPathComponentHeader ::forClassComponentWithUnresolvedIndirectOffset(); fields.addInt32(header.getData()); fields.addAlignmentPadding(getPointerAlignment()); auto offsetVar = getAddrOfFieldOffset(property, NotForDefinition); fields.add(cast<llvm::Constant>(offsetVar.getAddress())); break; } case FieldAccess::ConstantIndirect: { // An offset that depends on the instance's generic parameterization, // but whose field offset is at a known vtable offset. auto header = KeyPathComponentHeader::forClassComponentWithUnresolvedFieldOffset(); fields.addInt32(header.getData()); auto fieldOffset = getClassFieldOffsetOffset(*this, loweredBaseTy.getClassOrBoundGenericClass(), property); fields.addInt32(fieldOffset.getValue()); break; } } break; } llvm_unreachable("not struct or class"); } case KeyPathPatternComponent::Kind::GettableProperty: case KeyPathPatternComponent::Kind::SettableProperty: { // Encode the settability. bool settable = kind == KeyPathPatternComponent::Kind::SettableProperty; KeyPathComponentHeader::ComputedPropertyKind componentKind; if (settable) { componentKind = component.isComputedSettablePropertyMutating() ? KeyPathComponentHeader::SettableMutating : KeyPathComponentHeader::SettableNonmutating; } else { componentKind = KeyPathComponentHeader::GetOnly; } // Lower the id reference. auto id = component.getComputedPropertyId(); KeyPathComponentHeader::ComputedPropertyIDKind idKind; llvm::Constant *idValue; bool idResolved; switch (id.getKind()) { case KeyPathPatternComponent::ComputedPropertyId::Function: idKind = KeyPathComponentHeader::Pointer; idValue = getAddrOfSILFunction(id.getFunction(), NotForDefinition); idResolved = true; break; case KeyPathPatternComponent::ComputedPropertyId::DeclRef: { auto declRef = id.getDeclRef(); // Foreign method refs identify using a selector // reference, which is doubly-indirected and filled in with a unique // pointer by dyld. if (declRef.isForeign) { assert(ObjCInterop && "foreign keypath component w/o objc interop?!"); idKind = KeyPathComponentHeader::Pointer; idValue = getAddrOfObjCSelectorRef(declRef); idResolved = false; } else { idKind = KeyPathComponentHeader::VTableOffset; auto dc = declRef.getDecl()->getDeclContext(); if (isa<ClassDecl>(dc) && !cast<ClassDecl>(dc)->isForeign()) { auto overridden = declRef.getOverriddenVTableEntry(); auto declaringClass = cast<ClassDecl>(overridden.getDecl()->getDeclContext()); auto &metadataLayout = getClassMetadataLayout(declaringClass); // FIXME: Resilience. We don't want vtable layout to be ABI, so this // should be encoded as a reference to the method dispatch thunk // instead. auto offset = metadataLayout.getStaticMethodOffset(overridden); idValue = llvm::ConstantInt::get(SizeTy, offset.getValue()); idResolved = true; } else if (auto methodProto = dyn_cast<ProtocolDecl>(dc)) { // FIXME: Resilience. We don't want witness table layout to be ABI, // so this should be encoded as a reference to the method dispatch // thunk instead. auto &protoInfo = getProtocolInfo(methodProto); auto index = protoInfo.getFunctionIndex( cast<AbstractFunctionDecl>(declRef.getDecl())); idValue = llvm::ConstantInt::get(SizeTy, -index.getValue()); idResolved = true; } else { llvm_unreachable("neither a class nor protocol dynamic method?"); } } break; } case KeyPathPatternComponent::ComputedPropertyId::Property: // Use the index of the stored property within the aggregate to key // the property. auto property = id.getProperty(); idKind = KeyPathComponentHeader::StoredPropertyIndex; if (baseTy->getStructOrBoundGenericStruct()) { idResolved = true; Optional<unsigned> structIdx = getPhysicalStructFieldIndex(*this, SILType::getPrimitiveAddressType(baseTy), property); assert(structIdx.hasValue() && "empty property"); idValue = llvm::ConstantInt::get(SizeTy, structIdx.getValue()); } else if (baseTy->getClassOrBoundGenericClass()) { // TODO: This field index would require runtime resolution with Swift // native class resilience. We never directly access ObjC-imported // ivars so we can disregard ObjC ivar resilience for this computation // and start counting at the Swift native root. switch (getClassFieldAccess(*this, loweredBaseTy, property)) { case FieldAccess::ConstantDirect: case FieldAccess::ConstantIndirect: case FieldAccess::NonConstantDirect: idResolved = true; idValue = llvm::ConstantInt::get(SizeTy, getClassFieldIndex(*this, SILType::getPrimitiveAddressType(baseTy), property)); break; } } else { llvm_unreachable("neither struct nor class"); } break; } auto header = KeyPathComponentHeader::forComputedProperty(componentKind, idKind, !isInstantiableInPlace, idResolved); fields.addInt32(header.getData()); fields.addAlignmentPadding(getPointerAlignment()); fields.add(idValue); if (isInstantiableInPlace) { // No generic arguments or indexes, so we can invoke the // getter/setter as is. fields.add(getAddrOfSILFunction(component.getComputedPropertyGetter(), NotForDefinition)); if (settable) fields.add(getAddrOfSILFunction(component.getComputedPropertySetter(), NotForDefinition)); } else { // If there's generic context or subscript indexes, embed as // arguments in the component. Thunk the SIL-level accessors to give the // runtime implementation a polymorphically-callable interface. // Push the accessors, possibly thunked to marshal generic environment. fields.add(getAccessorForComputedComponent(*this, component, Getter, genericEnv, requirements)); if (settable) fields.add(getAccessorForComputedComponent(*this, component, Setter, genericEnv, requirements)); fields.add(getLayoutFunctionForComputedComponent(*this, component, genericEnv, requirements)); // Set up a "witness table" for the component that handles copying, // destroying, equating, and hashing the captured contents of the // component. // If there are only generic parameters, we can use a prefab witness // table from the runtime. // For subscripts we generate functions that dispatch out to // the copy/destroy/equals/hash functionality of the subscript indexes. fields.add(getWitnessTableForComputedComponent(*this, component, genericEnv, requirements)); // Add an initializer function that copies generic arguments out of the // pattern argument buffer into the instantiated object. fields.add(getInitializerForComputedComponent(*this, component, operands, genericEnv, requirements)); } break; } case KeyPathPatternComponent::Kind::OptionalChain: fields.addInt32(KeyPathComponentHeader::forOptionalChain().getData()); break; case KeyPathPatternComponent::Kind::OptionalForce: fields.addInt32(KeyPathComponentHeader::forOptionalForce().getData()); break; case KeyPathPatternComponent::Kind::OptionalWrap: fields.addInt32(KeyPathComponentHeader::forOptionalWrap().getData()); break; } // For all but the last component, we pack in the type of the component. if (i + 1 != pattern->getComponents().size()) { fields.addAlignmentPadding(getPointerAlignment()); fields.add(emitMetadataGenerator(component.getComponentType())); } baseTy = component.getComponentType(); } // Save the total size of the buffer. Size componentSize = fields.getNextOffsetFromGlobal() - startOfKeyPathBuffer; // We now have enough info to build the header. KeyPathBufferHeader header(componentSize.getValue(), isInstantiableInPlace, /*reference prefix*/ false); // Add the header, followed by the components. fields.fillPlaceholder(headerPlaceholder, llvm::ConstantInt::get(Int32Ty, header.getData())); // Create the global variable. // TODO: The pattern could be immutable if // it isn't instantiable in place, and if we made the type metadata accessor // references private, it could go in true-const memory. auto patternVar = fields.finishAndCreateGlobal("keypath", getPointerAlignment(), /*constant*/ false, llvm::GlobalVariable::PrivateLinkage); KeyPathPatterns.insert({pattern, patternVar}); return patternVar; }