diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift index 18f1e165b56af..ad7389a8ed892 100644 --- a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift @@ -28,16 +28,16 @@ private func log(_ message: @autoclosure () -> String) { /// Within safe swift code there shouldn't be any buffer overflows. But if the address /// of a stack variable is converted to an unsafe pointer, it's not in the control of /// the compiler anymore. -/// This means, if there is any `address_to_pointer` instruction for an `alloc_stack`, -/// such a function is marked for stack protection. -/// Another case is `index_addr` for non-tail allocated memory. This pattern appears if -/// pointer arithmetic is done with unsafe pointers in swift code. +/// This means, if an `alloc_stack` ends up at an `address_to_pointer [stack_protection]`, +/// the `alloc_stack`'s function is marked for stack protection. +/// Another case is `index_addr [stack_protection]` for non-tail allocated memory. This +/// pattern appears if pointer arithmetic is done with unsafe pointers in swift code. /// /// If the origin of an unsafe pointer can only be tracked to a function argument, the /// pass tries to find the root stack allocation for such an argument by doing an -/// inter-procedural analysis. If this is not possible, the fallback is to move the -/// argument into a temporary `alloc_stack` and do the unsafe pointer operations on -/// the temporary. +/// inter-procedural analysis. If this is not possible and the `enableMoveInoutStackProtection` +/// option is set, the fallback is to move the argument into a temporary `alloc_stack` +/// and do the unsafe pointer operations on the temporary. let stackProtection = ModulePass(name: "stack-protection", { (context: ModulePassContext) in @@ -45,7 +45,7 @@ let stackProtection = ModulePass(name: "stack-protection", { return } - var optimization = StackProtectionOptimization() + var optimization = StackProtectionOptimization(enableMoveInout: context.options.enableMoveInoutStackProtection) optimization.processModule(context) }) @@ -60,13 +60,15 @@ let functionStackProtection = FunctionPass(name: "function-stack-protection", { return } - var optimization = StackProtectionOptimization() + var optimization = StackProtectionOptimization(enableMoveInout: context.options.enableMoveInoutStackProtection) optimization.process(function: function, context) }) /// The optimization algorithm. private struct StackProtectionOptimization { + private let enableMoveInout: Bool + // The following members are nil/not used if this utility is used on function-level. private var moduleContext: ModulePassContext? @@ -76,7 +78,11 @@ private struct StackProtectionOptimization { // Functions (other than the currently processed one) which need stack protection, // are added to this array in `findOriginsInCallers`. private var needStackProtection: [Function] = [] - + + init(enableMoveInout: Bool) { + self.enableMoveInout = enableMoveInout + } + /// The main entry point if running on module-level. mutating func processModule(_ moduleContext: ModulePassContext) { self.moduleContext = moduleContext @@ -147,6 +153,8 @@ private struct StackProtectionOptimization { case .yes: // For example: // %baseAddr = alloc_stack $T + log("local: \(function.name) -- \(instruction)") + function.setNeedsStackProtection(context) case .decidedInCaller(let arg): @@ -157,7 +165,7 @@ private struct StackProtectionOptimization { defer { worklist.deinitialize() } worklist.push(arg) - if !findOriginsInCallers(&worklist) { + if findOriginsInCallers(&worklist) == NeedInsertMoves.yes { // We don't know the origin of the function argument. Therefore we need to do the // conservative default which is to move the value to a temporary stack location. if let beginAccess = scope { @@ -179,22 +187,19 @@ private struct StackProtectionOptimization { // If the object is passed as an argument to its function, add those arguments // to the worklist. - switch worklist.push(rootsOf: obj) { - case .failed: - // If we cannot find the roots, the object is most likely not stack allocated. - return - case .succeeded(let foundStackAlloc): - if foundStackAlloc { - // The object is created by an `alloc_ref [stack]`. - function.setNeedsStackProtection(context) - } + let (_, foundStackAlloc) = worklist.push(rootsOf: obj) + if foundStackAlloc { + // The object is created by an `alloc_ref [stack]`. + log("objectIfStackPromoted: \(function.name) -- \(instruction)") + + function.setNeedsStackProtection(context) } // In case the (potentially) stack allocated object is passed via an argument, // process the worklist as we do for indirect arguments (see above). // For example: // bb0(%0: $Class): // %baseAddr = ref_element_addr %0 : $Class, #Class.field - if !findOriginsInCallers(&worklist), + if findOriginsInCallers(&worklist) == NeedInsertMoves.yes, let beginAccess = scope { // We don't know the origin of the object. Therefore we need to do the // conservative default which is to move the value to a temporary stack location. @@ -206,15 +211,26 @@ private struct StackProtectionOptimization { break } } - + + /// Return value of `findOriginsInCallers()`. + enum NeedInsertMoves { + // Not all call sites could be identified, and if moves are enabled (`enableMoveInout`) + // the original argument should be moved to a temporary. + case yes + + // Either all call sites could be identified, which means that stack protection is done + // in the callers, or moves are not enabled (`enableMoveInout` is false). + case no + } + /// Find all origins of function arguments in `worklist`. /// All functions, which allocate such an origin are added to `self.needStackProtection`. /// Returns true if all origins could be found and false, if there are unknown origins. - private mutating func findOriginsInCallers(_ worklist: inout ArgumentWorklist) -> Bool { + private mutating func findOriginsInCallers(_ worklist: inout ArgumentWorklist) -> NeedInsertMoves { guard let moduleContext = moduleContext else { // Don't do any inter-procedural analysis when used on function-level. - return false + return enableMoveInout ? .yes : .no } // Put the resulting functions into a temporary array, because we only add them to @@ -230,21 +246,25 @@ private struct StackProtectionOptimization { while let arg = worklist.pop() { let f = arg.function let uses = functionUses.getUses(of: f) - if uses.hasUnknownUses { - return false + if uses.hasUnknownUses && enableMoveInout { + return NeedInsertMoves.yes } for useInst in uses { guard let fri = useInst as? FunctionRefInst else { - return false + if enableMoveInout { + return NeedInsertMoves.yes + } + continue } for functionRefUse in fri.uses { - guard let apply = functionRefUse.instruction as? ApplySite else { - return false - } - guard let callerArgIdx = apply.callerArgIndex(calleeArgIndex: arg.index) else { - return false + guard let apply = functionRefUse.instruction as? ApplySite, + let callerArgIdx = apply.callerArgIndex(calleeArgIndex: arg.index) else { + if enableMoveInout { + return NeedInsertMoves.yes + } + continue } let callerArg = apply.arguments[callerArgIdx] if callerArg.type.isAddress { @@ -252,6 +272,7 @@ private struct StackProtectionOptimization { switch callerArg.accessBase.isStackAllocated { case .yes: if !callerArg.function.needsStackProtection { + log("alloc_stack in caller: \(callerArg.function.name) -- \(callerArg)") newFunctions.push(callerArg.function) } case .no: @@ -266,36 +287,38 @@ private struct StackProtectionOptimization { case .objectIfStackPromoted(let obj): // If the object is passed as an argument to its function, // add those arguments to the worklist. - switch worklist.push(rootsOf: obj) { - case .failed: - return false - case .succeeded(let foundStackAlloc): - if foundStackAlloc && !obj.function.needsStackProtection { - // The object is created by an `alloc_ref [stack]`. - newFunctions.push(obj.function) - } + let (foundUnknownRoots, foundStackAlloc) = worklist.push(rootsOf: obj) + if foundUnknownRoots && enableMoveInout { + return NeedInsertMoves.yes + } + if foundStackAlloc && !obj.function.needsStackProtection { + // The object is created by an `alloc_ref [stack]`. + log("object in caller: \(obj.function.name) -- \(obj)") + newFunctions.push(obj.function) } case .unknown: - return false + if enableMoveInout { + return NeedInsertMoves.yes + } } } else { // The argument is an object. If the object is itself passed as an argument // to its function, add those arguments to the worklist. - switch worklist.push(rootsOf: callerArg) { - case .failed: - return false - case .succeeded(let foundStackAlloc): - if foundStackAlloc && !callerArg.function.needsStackProtection { - // The object is created by an `alloc_ref [stack]`. - newFunctions.push(callerArg.function) - } + let (foundUnknownRoots, foundStackAlloc) = worklist.push(rootsOf: callerArg) + if foundUnknownRoots && enableMoveInout { + return NeedInsertMoves.yes + } + if foundStackAlloc && !callerArg.function.needsStackProtection { + // The object is created by an `alloc_ref [stack]`. + log("object arg in caller: \(callerArg.function.name) -- \(callerArg)") + newFunctions.push(callerArg.function) } } } } } needStackProtection.append(contentsOf: newFunctions) - return true + return NeedInsertMoves.no } /// Moves the value of an indirect argument to a temporary stack location, if possible. @@ -366,9 +389,16 @@ private struct StackProtectionOptimization { /// Worklist for inter-procedural analysis of function arguments. private struct ArgumentWorklist : ValueUseDefWalker { var walkUpCache = WalkerCache() + + // Used in `push(rootsOf:)` private var foundStackAlloc = false + private var foundUnknownRoots = false + // Contains arguments which are already handled and don't need to be put into the worklist again. + // Note that this cannot be a `ValueSet`, because argument can be from different functions. private var handled = Set() + + // The actual worklist. private var list: Stack init(_ context: PassContext) { @@ -385,21 +415,15 @@ private struct ArgumentWorklist : ValueUseDefWalker { } } - enum PushResult { - case failed - case succeeded(foundStackAlloc: Bool) - } - /// Pushes all roots of `object`, which are function arguments, to the worklist. - /// Returns `.succeeded(true)` if some of the roots are `alloc_ref [stack]` instructions. - mutating func push(rootsOf object: Value) -> PushResult { + /// If the returned `foundUnknownRoots` is true, it means that not all roots of `object` could + /// be tracked to a function argument. + /// If the returned `foundStackAlloc` than at least one found root is an `alloc_ref [stack]`. + mutating func push(rootsOf object: Value) -> (foundUnknownRoots: Bool, foundStackAlloc: Bool) { foundStackAlloc = false - switch walkUp(value: object, path: SmallProjectionPath(.anything)) { - case .continueWalk: - return .succeeded(foundStackAlloc: foundStackAlloc) - case .abortWalk: - return .failed - } + foundUnknownRoots = false + _ = walkUp(value: object, path: SmallProjectionPath(.anything)) + return (foundUnknownRoots, foundStackAlloc) } mutating func pop() -> FunctionArgument? { @@ -413,15 +437,12 @@ private struct ArgumentWorklist : ValueUseDefWalker { if ar.canAllocOnStack { foundStackAlloc = true } - return .continueWalk case let arg as FunctionArgument: - if handled.insert(arg).0 { - list.push(arg) - } - return .continueWalk - default: - return .abortWalk + push(arg) + default: + foundUnknownRoots = true } + return .continueWalk } } @@ -491,7 +512,6 @@ private extension Instruction { private extension Function { func setNeedsStackProtection(_ context: PassContext) { if !needsStackProtection { - log("needs protection: \(name)") set(needStackProtection: true, context) } } diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift index c616ec9e0a8fa..af9b0f74b91e5 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift @@ -18,4 +18,8 @@ struct Options { var enableStackProtection: Bool { SILOptions_enableStackProtection(_bridged) != 0 } + + var enableMoveInoutStackProtection: Bool { + SILOptions_enableMoveInoutStackProtection(_bridged) != 0 + } } diff --git a/include/swift/AST/SILOptions.h b/include/swift/AST/SILOptions.h index 291037babd89f..c28b0e1a07592 100644 --- a/include/swift/AST/SILOptions.h +++ b/include/swift/AST/SILOptions.h @@ -129,7 +129,11 @@ class SILOptions { bool EnablePerformanceAnnotations = false; /// Enables the emission of stack protectors in functions. - bool EnableStackProtection = false; + bool EnableStackProtection = true; + + /// Like `EnableStackProtection` and also enables moving of values to + /// temporaries for stack protection. + bool EnableMoveInoutStackProtection = false; /// Controls whether or not paranoid verification checks are run. bool VerifyAll = false; diff --git a/include/swift/Option/FrontendOptions.td b/include/swift/Option/FrontendOptions.td index 1213c70db75be..f5f7d657aa571 100644 --- a/include/swift/Option/FrontendOptions.td +++ b/include/swift/Option/FrontendOptions.td @@ -1112,6 +1112,9 @@ def enable_stack_protector : def disable_stack_protector : Flag<["-"], "disable-stack-protector">, HelpText<"Disable the stack-protector">; +def enable_move_inout_stack_protector : + Flag<["-"], "enable-move-inout-stack-protector">, + HelpText<"Enable the stack protector by moving values to temporaries">; def enable_new_llvm_pass_manager : Flag<["-"], "enable-new-llvm-pass-manager">, diff --git a/include/swift/SILOptimizer/OptimizerBridging.h b/include/swift/SILOptimizer/OptimizerBridging.h index b4680cf713092..b0747d868ff96 100644 --- a/include/swift/SILOptimizer/OptimizerBridging.h +++ b/include/swift/SILOptimizer/OptimizerBridging.h @@ -184,6 +184,7 @@ OptionalBridgedFunction PassContext_loadFunction(BridgedPassContext context, llvm::StringRef name); SwiftInt SILOptions_enableStackProtection(BridgedPassContext context); +SwiftInt SILOptions_enableMoveInoutStackProtection(BridgedPassContext context); SWIFT_END_NULLABILITY_ANNOTATIONS diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index c7e6a28cefa1f..3bc7af3497a4f 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -1831,6 +1831,9 @@ static bool ParseSILArgs(SILOptions &Opts, ArgList &Args, Opts.EnableStackProtection = Args.hasFlag(OPT_enable_stack_protector, OPT_disable_stack_protector, Opts.EnableStackProtection); + Opts.EnableMoveInoutStackProtection = + Args.hasFlag(OPT_enable_move_inout_stack_protector, OPT_disable_stack_protector, + Opts.EnableMoveInoutStackProtection); Opts.VerifyAll |= Args.hasArg(OPT_sil_verify_all); Opts.VerifyNone |= Args.hasArg(OPT_sil_verify_none); Opts.DebugSerialization |= Args.hasArg(OPT_sil_debug_serialization); diff --git a/lib/SILOptimizer/PassManager/PassManager.cpp b/lib/SILOptimizer/PassManager/PassManager.cpp index 43308c2d6e6c5..93b267b249126 100644 --- a/lib/SILOptimizer/PassManager/PassManager.cpp +++ b/lib/SILOptimizer/PassManager/PassManager.cpp @@ -1638,3 +1638,8 @@ SwiftInt SILOptions_enableStackProtection(BridgedPassContext context) { SILModule *mod = castToPassInvocation(context)->getPassManager()->getModule(); return mod->getOptions().EnableStackProtection; } + +SwiftInt SILOptions_enableMoveInoutStackProtection(BridgedPassContext context) { + SILModule *mod = castToPassInvocation(context)->getPassManager()->getModule(); + return mod->getOptions().EnableMoveInoutStackProtection; +} diff --git a/test/SILOptimizer/stack_protection.sil b/test/SILOptimizer/stack_protection.sil index 924e352939735..26309dde4275e 100644 --- a/test/SILOptimizer/stack_protection.sil +++ b/test/SILOptimizer/stack_protection.sil @@ -1,5 +1,6 @@ // RUN: %target-sil-opt -wmo -stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=MODULE // RUN: %target-sil-opt -function-stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=FUNCTION +// RUN: %target-sil-opt -wmo -stack-protection -enable-move-inout-stack-protector -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=MOVE // REQUIRES: swift_in_compiler @@ -57,8 +58,7 @@ bb0: return %r : $() } // CHECK-LABEL: sil [stack_protection] @function_local_stack_obj -// MODULE-NOT: copy_addr -// FUNCTION: copy_addr +// CHECK-NOT: copy_addr // CHECK: } // end sil function 'function_local_stack_obj' sil @function_local_stack_obj : $@convention(thin) () -> () { bb0: @@ -72,11 +72,9 @@ bb0: return %r : $() } -// MODULE-LABEL: sil @function_local_obj -// FUNCTION-LABEL: sil [stack_protection] @function_local_obj -// MODULE-NOT: copy_addr -// FUNCTION: copy_addr -// CHECK: } // end sil function 'function_local_obj' +// CHECK-LABEL: sil @function_local_obj +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'function_local_obj' sil @function_local_obj : $@convention(thin) () -> () { bb0: %0 = alloc_ref $C @@ -89,13 +87,17 @@ bb0: return %r : $() } -// CHECK-LABEL: sil [stack_protection] @inout_with_unknown_callers1 -// CHECK: [[T:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] %0 to [init] [[T]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[T]] -// CHECK: copy_addr [take] [[T]] to [init] %0 : $*Int64 -// CHECK: dealloc_stack [[T]] : $*Int64 -// CHECK: } // end sil function 'inout_with_unknown_callers1' +// MOVE-LABEL: sil [stack_protection] @inout_with_unknown_callers1 +// MODULE-LABEL: sil @inout_with_unknown_callers1 +// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] %0 to [init] [[T]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[T]] +// MOVE: copy_addr [take] [[T]] to [init] %0 : $*Int64 +// MOVE: dealloc_stack [[T]] : $*Int64 +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK-MODULE: address_to_pointer [stack_protection] %0 +// CHECK: } // end sil function 'inout_with_unknown_callers1' sil @inout_with_unknown_callers1 : $@convention(thin) (@inout Int64) -> () { bb0(%0 : $*Int64): %1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer @@ -114,14 +116,18 @@ bb0(%0 : $*Int64): return %r : $() } -// CHECK-LABEL: sil [stack_protection] @inout_with_modify_access -// CHECK: [[A:%[0-9]+]] = begin_access [modify] [dynamic] %0 : $*Int64 -// CHECK: [[T:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[T]] -// CHECK: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 -// CHECK: dealloc_stack [[T]] : $*Int64 -// CHECK: } // end sil function 'inout_with_modify_access' +// MOVE-LABEL: sil [stack_protection] @inout_with_modify_access +// MODULE-LABEL: sil @inout_with_modify_access +// MOVE: [[A:%[0-9]+]] = begin_access [modify] [dynamic] %0 : $*Int64 +// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[T]] +// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 +// MOVE: dealloc_stack [[T]] : $*Int64 +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK-MODULE: address_to_pointer [stack_protection] %0 +// CHECK: } // end sil function 'inout_with_modify_access' sil @inout_with_modify_access : $@convention(thin) (@inout Int64) -> () { bb0(%0 : $*Int64): %1 = begin_access [modify] [dynamic] %0 : $*Int64 @@ -145,16 +151,19 @@ bb0(%0 : $*Int64): return %r : $() } -// CHECK-LABEL: sil [stack_protection] @inout_with_unknown_callers2 -// CHECK: [[T:%[0-9]+]] = alloc_stack $S -// CHECK: copy_addr [take] %0 to [init] [[T]] : $*S -// CHECK: [[A:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.a -// CHECK: address_to_pointer [stack_protection] [[A]] -// CHECK: [[B:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.b -// CHECK: address_to_pointer [stack_protection] [[B]] -// CHECK: copy_addr [take] [[T]] to [init] %0 : $*S -// CHECK: dealloc_stack [[T]] : $*S -// CHECK: } // end sil function 'inout_with_unknown_callers2' +// MOVE-LABEL: sil [stack_protection] @inout_with_unknown_callers2 +// MODULE-LABEL: sil @inout_with_unknown_callers2 +// MOVE: [[T:%[0-9]+]] = alloc_stack $S +// MOVE: copy_addr [take] %0 to [init] [[T]] : $*S +// MOVE: [[A:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.a +// MOVE: address_to_pointer [stack_protection] [[A]] +// MOVE: [[B:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.b +// MOVE: address_to_pointer [stack_protection] [[B]] +// MOVE: copy_addr [take] [[T]] to [init] %0 : $*S +// MOVE: dealloc_stack [[T]] : $*S +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'inout_with_unknown_callers2' sil @inout_with_unknown_callers2 : $@convention(thin) (@inout S) -> () { bb0(%0 : $*S): %1 = struct_element_addr %0 : $*S, #S.a @@ -165,16 +174,19 @@ bb0(%0 : $*S): return %r : $() } -// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers1 -// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i -// CHECK: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 -// CHECK: [[T:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[T]] -// CHECK: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 -// CHECK: dealloc_stack [[T]] : $*Int64 -// CHECK: end_access [[A]] : $*Int64 -// CHECK: } // end sil function 'object_with_unknown_callers1' +// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers1 +// MODULE-LABEL: sil @object_with_unknown_callers1 +// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// MOVE: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 +// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[T]] +// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 +// MOVE: dealloc_stack [[T]] : $*Int64 +// MOVE: end_access [[A]] : $*Int64 +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'object_with_unknown_callers1' sil @object_with_unknown_callers1 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : $C): %1 = ref_element_addr %0 : $C, #C.i @@ -185,17 +197,20 @@ bb0(%0 : $C): return %r : $() } -// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers2 -// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i -// CHECK: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 -// CHECK: [[T:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[T]] -// CHECK: address_to_pointer [stack_protection] [[T]] -// CHECK: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 -// CHECK: dealloc_stack [[T]] : $*Int64 -// CHECK: end_access [[A]] : $*Int64 -// CHECK: } // end sil function 'object_with_unknown_callers2' +// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers2 +// MODULE-LABEL: sil @object_with_unknown_callers2 +// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// MOVE: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 +// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[T]] +// MOVE: address_to_pointer [stack_protection] [[T]] +// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64 +// MOVE: dealloc_stack [[T]] : $*Int64 +// MOVE: end_access [[A]] : $*Int64 +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'object_with_unknown_callers2' sil @object_with_unknown_callers2 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : $C): %1 = ref_element_addr %0 : $C, #C.i @@ -207,24 +222,27 @@ bb0(%0 : $C): return %r : $() } -// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers3 -// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i -// CHECK: [[AI:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 -// CHECK: [[TI:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] [[AI]] to [init] [[TI]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[TI]] -// CHECK: [[J:%[0-9]+]] = ref_element_addr %0 : $C, #C.j -// CHECK: [[AJ:%[0-9]+]] = begin_access [modify] [static] [[J]] : $*Int64 -// CHECK: [[TJ:%[0-9]+]] = alloc_stack $Int64 -// CHECK: copy_addr [take] [[AJ]] to [init] [[TJ]] : $*Int64 -// CHECK: copy_addr [take] [[TI]] to [init] [[AI]] : $*Int64 -// CHECK: end_access [[AI]] : $*Int64 -// CHECK: address_to_pointer [stack_protection] [[TJ]] -// CHECK: copy_addr [take] [[TJ]] to [init] [[AJ]] : $*Int64 -// CHECK: dealloc_stack [[TJ]] : $*Int64 -// CHECK: dealloc_stack [[TI]] : $*Int64 -// CHECK: end_access [[AJ]] : $*Int64 -// CHECK: } // end sil function 'object_with_unknown_callers3' +// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers3 +// MODULE-LABEL: sil @object_with_unknown_callers3 +// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// MOVE: [[AI:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64 +// MOVE: [[TI:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] [[AI]] to [init] [[TI]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[TI]] +// MOVE: [[J:%[0-9]+]] = ref_element_addr %0 : $C, #C.j +// MOVE: [[AJ:%[0-9]+]] = begin_access [modify] [static] [[J]] : $*Int64 +// MOVE: [[TJ:%[0-9]+]] = alloc_stack $Int64 +// MOVE: copy_addr [take] [[AJ]] to [init] [[TJ]] : $*Int64 +// MOVE: copy_addr [take] [[TI]] to [init] [[AI]] : $*Int64 +// MOVE: end_access [[AI]] : $*Int64 +// MOVE: address_to_pointer [stack_protection] [[TJ]] +// MOVE: copy_addr [take] [[TJ]] to [init] [[AJ]] : $*Int64 +// MOVE: dealloc_stack [[TJ]] : $*Int64 +// MOVE: dealloc_stack [[TI]] : $*Int64 +// MOVE: end_access [[AJ]] : $*Int64 +// MODULE-NOT: alloc_stack +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'object_with_unknown_callers3' sil @object_with_unknown_callers3 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : $C): %1 = ref_element_addr %0 : $C, #C.i @@ -257,11 +275,9 @@ bb0(%0 : $C): return %r : $() } -// MODULE-LABEL: sil hidden @known_callers_inout -// FUNCTION-LABEL: sil hidden [stack_protection] @known_callers_inout -// MODULE-NOT: copy_addr -// FUNCTION: copy_addr -// CHECK: } // end sil function 'known_callers_inout' +// CHECK-LABEL: sil hidden @known_callers_inout +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'known_callers_inout' sil hidden @known_callers_inout : $@convention(thin) (@inout Int64) -> () { bb0(%0 : $*Int64): %2 = integer_literal $Builtin.Int64, 2 @@ -315,10 +331,12 @@ bb3: return %4 : $() } -// CHECK-LABEL: sil hidden [stack_protection] @unknown_callers -// CHECK: copy_addr -// CHECK: copy_addr -// CHECK: } // end sil function 'unknown_callers' +// MOVE-LABEL: sil hidden [stack_protection] @unknown_callers +// MODULE-LABEL: sil hidden @unknown_callers +// MOVE: copy_addr +// MOVE: copy_addr +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'unknown_callers' sil hidden @unknown_callers : $@convention(thin) (@inout Int64) -> () { bb0(%0 : $*Int64): %2 = integer_literal $Builtin.Int64, 2 @@ -341,11 +359,36 @@ bb0(%0 : $*Int64): return %4 : $() } -// MODULE-LABEL: sil hidden @known_callers_obj1 -// FUNCTION-LABEL: sil hidden [stack_protection] @known_callers_obj1 -// MODULE-NOT: copy_addr -// FUNCTION: copy_addr -// CHECK: } // end sil function 'known_callers_obj1' +// MOVE-LABEL: sil [stack_protection] @partially_known_callers +// MODULE-LABEL: sil @partially_known_callers +// MOVE: copy_addr +// MOVE: copy_addr +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'partially_known_callers' +sil @partially_known_callers : $@convention(thin) (@inout Int64) -> () { +bb0(%0 : $*Int64): + %1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer + %2 = tuple () + return %2 : $() +} + +// MOVE-LABEL: sil @call_partially_known_callers +// MODULE-LABEL: sil [stack_protection] @call_partially_known_callers +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_partially_known_callers' +sil @call_partially_known_callers : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $Int64 + %1 = function_ref @partially_known_callers : $@convention(thin) (@inout Int64) -> () + %2 = apply %1(%0) : $@convention(thin) (@inout Int64) -> () + dealloc_stack %0 : $*Int64 + %4 = tuple () + return %4 : $() +} + +// CHECK-LABEL: sil hidden @known_callers_obj1 +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'known_callers_obj1' sil hidden @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : $C): %1 = ref_element_addr %0 : $C, #C.i @@ -390,11 +433,13 @@ bb3(%5 : $C): return %r : $() } -// CHECK-LABEL: sil hidden [stack_protection] @known_callers_obj2 -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: copy_addr -// CHECK: } // end sil function 'known_callers_obj2' +// MOVE-LABEL: sil hidden [stack_protection] @known_callers_obj2 +// MODULE-LABEL: sil hidden @known_callers_obj2 +// MOVE: alloc_stack +// MOVE: copy_addr +// MOVE: copy_addr +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'known_callers_obj2' sil hidden @known_callers_obj2 : $@convention(thin) (@guaranteed C) -> () { bb0(%0 : $C): %1 = ref_element_addr %0 : $C, #C.i @@ -428,11 +473,9 @@ bb3(%5 : $C): return %r : $() } -// MODULE-LABEL: sil private @closure_with_inout_capture -// FUNCTION-LABEL: sil private [stack_protection] @closure_with_inout_capture -// MODULE-NOT: copy_addr -// FUNCTION: copy_addr -// CHECK: } // end sil function 'closure_with_inout_capture' +// CHECK-LABEL: sil private @closure_with_inout_capture +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'closure_with_inout_capture' sil private @closure_with_inout_capture : $@convention(thin) (@inout_aliasable Int64) -> () { bb0(%0 : $*Int64): %1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer @@ -464,11 +507,13 @@ bb0: return %12 : $() } -// CHECK-LABEL: sil private [stack_protection] @closure_with_inout_arg -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: copy_addr -// CHECK: } // end sil function 'closure_with_inout_arg' +// MOVE-LABEL: sil private [stack_protection] @closure_with_inout_arg +// MODULE-LABEL: sil private @closure_with_inout_arg +// MOVE: alloc_stack +// MOVE: copy_addr +// MOVE: copy_addr +// MODULE-NOT: copy_addr +// CHECK: } // end sil function 'closure_with_inout_arg' sil private @closure_with_inout_arg : $@convention(thin) (@inout Int64) -> () { bb0(%0 : $*Int64): %1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer diff --git a/test/SILOptimizer/stack_protection.swift b/test/SILOptimizer/stack_protection.swift index a164694958d43..0192f6c528ee8 100644 --- a/test/SILOptimizer/stack_protection.swift +++ b/test/SILOptimizer/stack_protection.swift @@ -1,4 +1,5 @@ -// RUN: %target-swift-frontend -module-name=test -emit-sil %s -O -enable-stack-protector | %FileCheck %s +// RUN: %target-swift-frontend -module-name=test -emit-sil %s -O | %FileCheck %s --check-prefix=CHECK --check-prefix=DEFAULT +// RUN: %target-swift-frontend -module-name=test -enable-move-inout-stack-protector -emit-sil %s -O -enable-stack-protector | %FileCheck %s --check-prefix=CHECK --check-prefix=MOVE // REQUIRES: swift_in_compiler @@ -71,10 +72,12 @@ public func callOverflowInoutPointer() { inoutWithKnownCaller(&x) } -// CHECK-LABEL: sil [stack_protection] @$s4test22inoutWithUnknownCalleryySizF -// CHECK: copy_addr [take] {{.*}} to [init] -// CHECK: copy_addr [take] {{.*}} to [init] -// CHECK: } // end sil function '$s4test22inoutWithUnknownCalleryySizF' +// DEFAULT-LABEL: sil @$s4test22inoutWithUnknownCalleryySizF +// MOVE-LABEL: sil [stack_protection] @$s4test22inoutWithUnknownCalleryySizF +// MOVE: copy_addr [take] {{.*}} to [init] +// MOVE: copy_addr [take] {{.*}} to [init] +// DEFAULT-NOT: copy_addr +// CHECK: } // end sil function '$s4test22inoutWithUnknownCalleryySizF' public func inoutWithUnknownCaller(_ x: inout Int) { withUnsafeMutablePointer(to: &x) { $0[1] = 0 diff --git a/tools/sil-opt/SILOpt.cpp b/tools/sil-opt/SILOpt.cpp index 477a0bb26f399..1e22fdd66f088 100644 --- a/tools/sil-opt/SILOpt.cpp +++ b/tools/sil-opt/SILOpt.cpp @@ -148,6 +148,10 @@ static llvm::cl::opt EnableSpeculativeDevirtualization("enable-spec-devirt", llvm::cl::desc("Enable Speculative Devirtualization pass.")); +static llvm::cl::opt +EnableMoveInoutStackProtection("enable-move-inout-stack-protector", + llvm::cl::desc("Enable the stack protector by moving values to temporaries.")); + static llvm::cl::opt EnableOSSAModules( "enable-ossa-modules", llvm::cl::desc("Do we always serialize SIL in OSSA form? If " @@ -607,6 +611,7 @@ int main(int argc, char **argv) { SILOpts.checkSILModuleLeaks = true; SILOpts.EnablePerformanceAnnotations = true; SILOpts.EnableStackProtection = true; + SILOpts.EnableMoveInoutStackProtection = EnableMoveInoutStackProtection; SILOpts.VerifyExclusivity = VerifyExclusivity; if (EnforceExclusivity.getNumOccurrences() != 0) {