diff --git a/lib/SILOptimizer/SILCombiner/SILCombiner.h b/lib/SILOptimizer/SILCombiner/SILCombiner.h index cfd8207d089fe..6630010e5b7ae 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombiner.h +++ b/lib/SILOptimizer/SILCombiner/SILCombiner.h @@ -246,8 +246,11 @@ class SILCombiner : bool tryOptimizeKeypath(ApplyInst *AI); bool tryOptimizeInoutKeypath(BeginApplyInst *AI); bool tryOptimizeKeypathApplication(ApplyInst *AI, SILFunction *callee); - bool tryOptimizeKeypathKVCString(ApplyInst *AI, SILDeclRef callee); - + bool tryOptimizeKeypathOffsetOf(ApplyInst *AI, FuncDecl *calleeFn, + KeyPathInst *kp); + bool tryOptimizeKeypathKVCString(ApplyInst *AI, FuncDecl *calleeFn, + KeyPathInst *kp); + // Optimize concatenation of string literals. // Constant-fold concatenation of string literals known at compile-time. SILInstruction *optimizeConcatenationOfStringLiterals(ApplyInst *AI); diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp index 67090d1886a97..e71bba15b5f6c 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp @@ -271,6 +271,130 @@ bool SILCombiner::tryOptimizeKeypathApplication(ApplyInst *AI, return true; } +/// Replaces a call of the getter of AnyKeyPath._storedInlineOffset with a +/// "constant" offset, in case of a keypath literal. +/// +/// "Constant" offset means a series of struct_element_addr and +/// tuple_element_addr instructions with a 0-pointer as base address. +/// These instructions can then be lowered to "real" constants in IRGen for +/// concrete types, or to metatype offset lookups for generic or resilient types. +/// +/// Replaces: +/// %kp = keypath ... +/// %offset = apply %_storedInlineOffset_method(%kp) +/// with: +/// %zero = integer_literal $Builtin.Word, 0 +/// %null_ptr = unchecked_trivial_bit_cast %zero to $Builtin.RawPointer +/// %null_addr = pointer_to_address %null_ptr +/// %projected_addr = struct_element_addr %null_addr +/// ... // other address projections +/// %offset_ptr = address_to_pointer %projected_addr +/// %offset_builtin_int = unchecked_trivial_bit_cast %offset_ptr +/// %offset_int = struct $Int (%offset_builtin_int) +/// %offset = enum $Optional, #Optional.some!enumelt, %offset_int +bool SILCombiner::tryOptimizeKeypathOffsetOf(ApplyInst *AI, + FuncDecl *calleeFn, + KeyPathInst *kp) { + auto *accessor = dyn_cast(calleeFn); + if (!accessor || !accessor->isGetter()) + return false; + + AbstractStorageDecl *storage = accessor->getStorage(); + DeclName name = storage->getName(); + if (!name.isSimpleName() || + (name.getBaseIdentifier().str() != "_storedInlineOffset")) + return false; + + KeyPathPattern *pattern = kp->getPattern(); + SubstitutionMap patternSubs = kp->getSubstitutions(); + CanType rootTy = pattern->getRootType().subst(patternSubs)->getCanonicalType(); + CanType parentTy = rootTy; + + // First check if _storedInlineOffset would return an offset or nil. Basically + // only stored struct and tuple elements produce an offset. Everything else + // (e.g. computed properties, class properties) result in nil. + bool hasOffset = true; + for (const KeyPathPatternComponent &component : pattern->getComponents()) { + switch (component.getKind()) { + case KeyPathPatternComponent::Kind::StoredProperty: { + + // Handle the special case of C tail-allocated arrays. IRGen would + // generate an undef offset for struct_element_addr of C tail-allocated + // arrays. + VarDecl *propDecl = component.getStoredPropertyDecl(); + if (propDecl->hasClangNode() && propDecl->getType()->isVoid()) + return false; + + if (!parentTy.getStructOrBoundGenericStruct()) + hasOffset = false; + break; + } + case KeyPathPatternComponent::Kind::TupleElement: + break; + case KeyPathPatternComponent::Kind::GettableProperty: + case KeyPathPatternComponent::Kind::SettableProperty: + // We cannot predict the offset of fields in resilient types, because it's + // unknown if a resilient field is a computed or stored property. + if (component.getExternalDecl()) + return false; + hasOffset = false; + break; + case KeyPathPatternComponent::Kind::OptionalChain: + case KeyPathPatternComponent::Kind::OptionalForce: + case KeyPathPatternComponent::Kind::OptionalWrap: + hasOffset = false; + break; + } + parentTy = component.getComponentType(); + } + + SILLocation loc = AI->getLoc(); + SILValue result; + + if (hasOffset) { + SILType rootAddrTy = SILType::getPrimitiveAddressType(rootTy); + SILValue rootAddr = Builder.createBaseAddrForOffset(loc, rootAddrTy); + + auto projector = KeyPathProjector::create(kp, rootAddr, loc, Builder); + if (!projector) + return false; + + // Create the address projections of the keypath. + SILType ptrType = SILType::getRawPointerType(Builder.getASTContext()); + SILValue offsetPtr; + projector->project(KeyPathProjector::AccessType::Get, [&](SILValue addr) { + offsetPtr = Builder.createAddressToPointer(loc, addr, ptrType); + }); + + // The result of the _storedInlineOffset call should be Optional. If + // not, something is wrong with the stdlib. Anyway, if it's not like we + // expect, bail. + SILType intType = AI->getType().getOptionalObjectType(); + if (!intType) + return false; + StructDecl *intDecl = intType.getStructOrBoundGenericStruct(); + if (!intDecl || intDecl->getStoredProperties().size() != 1) + return false; + VarDecl *member = intDecl->getStoredProperties()[0]; + CanType builtinIntTy = member->getType()->getCanonicalType(); + if (!isa(builtinIntTy)) + return false; + + // Convert the projected address back to an optional integer. + SILValue offset = Builder.createUncheckedBitCast(loc, offsetPtr, + SILType::getPrimitiveObjectType(builtinIntTy)); + SILValue offsetInt = Builder.createStruct(loc, intType, { offset }); + result = Builder.createOptionalSome(loc, offsetInt, AI->getType()); + } else { + // The keypath has no offset. + result = Builder.createOptionalNone(loc, AI->getType()); + } + AI->replaceAllUsesWith(result); + eraseInstFromFunction(*AI); + ++NumOptimizedKeypaths; + return true; +} + /// Try to optimize a keypath KVC string access on a literal key path. /// /// Replace: @@ -279,17 +403,8 @@ bool SILCombiner::tryOptimizeKeypathApplication(ApplyInst *AI, /// With: /// %string = string_literal "blah" bool SILCombiner::tryOptimizeKeypathKVCString(ApplyInst *AI, - SILDeclRef callee) { - if (AI->getNumArguments() != 1) { - return false; - } - if (!callee.hasDecl()) { - return false; - } - auto calleeFn = dyn_cast(callee.getDecl()); - if (!calleeFn) - return false; - + FuncDecl *calleeFn, + KeyPathInst *kp) { if (!calleeFn->getAttrs() .hasSemanticsAttr(semantics::KEYPATH_KVC_KEY_PATH_STRING)) return false; @@ -300,11 +415,6 @@ bool SILCombiner::tryOptimizeKeypathKVCString(ApplyInst *AI, if (!objTy || objTy.getStructOrBoundGenericStruct() != C.getStringDecl()) return false; - KeyPathInst *kp - = KeyPathProjector::getLiteralKeyPath(AI->getArgument(0)); - if (!kp || !kp->hasPattern()) - return false; - auto objcString = kp->getPattern()->getObjCString(); SILValue literalValue; @@ -357,10 +467,33 @@ bool SILCombiner::tryOptimizeKeypath(ApplyInst *AI) { return tryOptimizeKeypathApplication(AI, callee); } - if (auto method = dyn_cast(AI->getCallee())) { - return tryOptimizeKeypathKVCString(AI, method->getMember()); + // Try optimize keypath method calls. + auto *methodInst = dyn_cast(AI->getCallee()); + if (!methodInst) + return false; + + if (AI->getNumArguments() != 1) { + return false; + } + + SILDeclRef callee = methodInst->getMember(); + if (!callee.hasDecl()) { + return false; } + auto *calleeFn = dyn_cast(callee.getDecl()); + if (!calleeFn) + return false; + + KeyPathInst *kp = KeyPathProjector::getLiteralKeyPath(AI->getArgument(0)); + if (!kp || !kp->hasPattern()) + return false; + if (tryOptimizeKeypathOffsetOf(AI, calleeFn, kp)) + return true; + + if (tryOptimizeKeypathKVCString(AI, calleeFn, kp)) + return true; + return false; } diff --git a/test/SILOptimizer/Inputs/struct_with_fields.swift b/test/SILOptimizer/Inputs/struct_with_fields.swift new file mode 100644 index 0000000000000..7652d39100139 --- /dev/null +++ b/test/SILOptimizer/Inputs/struct_with_fields.swift @@ -0,0 +1,5 @@ + +public struct TestStruct { + public var x: Int + public var y: Int +} diff --git a/test/SILOptimizer/keypath_offset.swift b/test/SILOptimizer/keypath_offset.swift new file mode 100644 index 0000000000000..bb0ffca7249f1 --- /dev/null +++ b/test/SILOptimizer/keypath_offset.swift @@ -0,0 +1,197 @@ +// RUN: %empty-directory(%t) +// RUN: %target-build-swift %S/Inputs/struct_with_fields.swift -parse-as-library -wmo -enable-library-evolution -module-name=Test -emit-module -emit-module-path=%t/Test.swiftmodule -c -o %t/test.o + +// RUN: %target-build-swift -O %s -module-name=test -Xfrontend -sil-verify-all -I%t -emit-sil | %FileCheck %s + +// RUN: %target-build-swift -Onone %s -I%t %t/test.o -o %t/Onone.out +// RUN: %target-build-swift -O %s -I%t %t/test.o -o %t/O.out +// RUN: %target-run %t/Onone.out > %t/Onone.txt +// RUN: %target-run %t/O.out > %t/O.txt +// RUN: diff %t/Onone.txt %t/O.txt + +// REQUIRES: executable_test,swift_stdlib_no_asserts,optimized_stdlib + +import Test + +final class C { + var x: Int + var z: T + let immutable: String + private(set) var secretlyMutable: String + + init(x: Int, z: T) { + self.x = x + self.z = z + self.immutable = "somestring" + self.secretlyMutable = immutable + } +} + +struct Point { + var x: Double + var y: Double +} + +struct S { + var x: Int + var y: Int? + var z: T + var p: Point + var op: Point? + var c: C +} + +struct NonOffsetableProperties { + // observers + var x: Int { didSet {} } + // reabstracted + var y: () -> () + // computed + var z: Int { return 0 } +} + +struct TupleProperties { + // unlabeled + var a: (Int, String) + // labeled + let b: (x: String, y: Int) + // reference writable + let c: (m: C, n: C) +} + +typealias Tuple = (S, C) + +func getIdentityKeyPathOfType(_: T.Type) -> KeyPath { + return \.self +} + + +@inline(never) +func printOffset(_ o: Int?) { + print(o as Any) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A13StructOffsetsyyF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A13StructOffsetsyyF' +@inline(never) +func testStructOffsets() { + let SLayout = MemoryLayout>.self + printOffset(SLayout.offset(of: \S.x)) + printOffset(SLayout.offset(of: \S.y)) + printOffset(SLayout.offset(of: \S.z)) + printOffset(SLayout.offset(of: \S.p)) + printOffset(SLayout.offset(of: \S.p.x)) + printOffset(SLayout.offset(of: \S.p.y)) + printOffset(SLayout.offset(of: \S.c)) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A20GenericStructOffsetsyyxmlF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A20GenericStructOffsetsyyxmlF' +@inline(never) +@_semantics("optimize.sil.specialize.generic.never") +func testGenericStructOffsets(_ t: T.Type) { + let SLayout = MemoryLayout>.self + printOffset(SLayout.offset(of: \S.x)) + printOffset(SLayout.offset(of: \S.y)) + printOffset(SLayout.offset(of: \S.z)) + printOffset(SLayout.offset(of: \S.p)) + printOffset(SLayout.offset(of: \S.p.x)) + printOffset(SLayout.offset(of: \S.p.y)) + printOffset(SLayout.offset(of: \S.c)) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A10NonOffsetsyyF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A10NonOffsetsyyF' +@inline(never) +func testNonOffsets() { + let NOPLayout = MemoryLayout.self + printOffset(NOPLayout.offset(of: \NonOffsetableProperties.x)) + printOffset(NOPLayout.offset(of: \NonOffsetableProperties.y)) + printOffset(NOPLayout.offset(of: \NonOffsetableProperties.z)) + printOffset(MemoryLayout>.offset(of: \C.x)) + let SLayout = MemoryLayout>.self + printOffset(SLayout.offset(of: \S.c.x)) + printOffset(SLayout.offset(of: \S.op!.x)) + printOffset(SLayout.offset(of: \S.op?.x)) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A11SelfOffsetsyyF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A11SelfOffsetsyyF' +@inline(never) +func testSelfOffsets() { + let SLayout = MemoryLayout>.self + printOffset(SLayout.offset(of: \.self)) + printOffset(SLayout.offset(of: getIdentityKeyPathOfType(S.self))) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A12TupleOffsetsyyF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A12TupleOffsetsyyF' +@inline(never) +func testTupleOffsets() { + let TPLayout = MemoryLayout.self + printOffset(TPLayout.offset(of: \TupleProperties.self)) + printOffset(TPLayout.offset(of: \TupleProperties.a)) + printOffset(TPLayout.offset(of: \TupleProperties.a.0)) + printOffset(TPLayout.offset(of: \TupleProperties.a.1)) + printOffset(TPLayout.offset(of: \TupleProperties.b)) + printOffset(TPLayout.offset(of: \TupleProperties.b.x)) + printOffset(TPLayout.offset(of: \TupleProperties.b.y)) + printOffset(TPLayout.offset(of: \TupleProperties.c)) + printOffset(TPLayout.offset(of: \TupleProperties.c.m)) + printOffset(TPLayout.offset(of: \TupleProperties.c.n)) + + let TLayout = MemoryLayout>.self + printOffset(TLayout.offset(of: \Tuple.self)) + printOffset(TLayout.offset(of: \Tuple.0)) + printOffset(TLayout.offset(of: \Tuple.0.x)) + printOffset(TLayout.offset(of: \Tuple.1)) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A19GenericTupleOffsetsyyxmlF +// CHECK-NOT: _storedInlineOffset +// CHECK-NOT: class_method +// CHECK: } // end sil function '$s4test0A19GenericTupleOffsetsyyxmlF' +@inline(never) +@_semantics("optimize.sil.specialize.generic.never") +func testGenericTupleOffsets(_ t: T.Type) { + let TLayout = MemoryLayout>.self + printOffset(TLayout.offset(of: \Tuple.self)) + printOffset(TLayout.offset(of: \Tuple.0)) + printOffset(TLayout.offset(of: \Tuple.0.x)) + printOffset(TLayout.offset(of: \Tuple.1)) +} + +// CHECK-LABEL: sil {{.*}} @$s4test0A16ResilientOffsetsyyF +// CHECK: class_method {{.*}}_storedInlineOffset +// CHECK: } // end sil function '$s4test0A16ResilientOffsetsyyF' +@inline(never) +func testResilientOffsets() { + let TLayout = MemoryLayout.self + printOffset(TLayout.offset(of: \TestStruct.x)) +} + +print("### testStructOffsets") +testStructOffsets() +print("### testGenericStructOffsets") +testGenericStructOffsets(Int.self) +print("### testNonOffsets") +testNonOffsets() +print("### testSelfOffsets") +testSelfOffsets() +print("### testTupleOffsets") +testTupleOffsets() +print("### testGenericTupleOffsets") +testGenericTupleOffsets(Int.self) +print("### testResilientOffsets") +testResilientOffsets() +