diff --git a/SwiftCompilerSources/Sources/Optimizer/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/CMakeLists.txt index 7bb7e9703c5cc..b31b6afac5d77 100644 --- a/SwiftCompilerSources/Sources/Optimizer/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/CMakeLists.txt @@ -15,6 +15,7 @@ add_subdirectory(Analysis) add_subdirectory(DataStructures) add_subdirectory(InstructionPasses) add_subdirectory(PassManager) +add_subdirectory(ModulePasses) add_subdirectory(FunctionPasses) add_subdirectory(TestPasses) add_subdirectory(Utilities) diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt new file mode 100644 index 0000000000000..f96b1c4fc4236 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/CMakeLists.txt @@ -0,0 +1,11 @@ +# This source file is part of the Swift.org open source project +# +# Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors +# Licensed under Apache License v2.0 with Runtime Library Exception +# +# See http://swift.org/LICENSE.txt for license information +# See http://swift.org/CONTRIBUTORS.txt for Swift project authors + +swift_compiler_sources(Optimizer + StackProtection.swift +) diff --git a/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift new file mode 100644 index 0000000000000..9bfd3a76e8d8f --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/ModulePasses/StackProtection.swift @@ -0,0 +1,498 @@ +//===--- StackProtection.swift --------------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2022 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +import SIL + +private let verbose = false + +private func log(_ message: @autoclosure () -> String) { + if verbose { + print("### \(message())") + } +} + +/// Decides which functions need stack protection. +/// +/// Sets the `needStackProtection` flags on all function which contain stack-allocated +/// values for which an buffer overflow could occur. +/// +/// Within safe swift code there shouldn't be any buffer overflows. But if the address +/// of a stack variable is converted to an unsafe pointer, it's not in the control of +/// the compiler anymore. +/// This means, if there is any `address_to_pointer` instruction for an `alloc_stack`, +/// such a function is marked for stack protection. +/// Another case is `index_addr` for non-tail allocated memory. This pattern appears if +/// pointer arithmetic is done with unsafe pointers in swift code. +/// +/// If the origin of an unsafe pointer can only be tracked to a function argument, the +/// pass tries to find the root stack allocation for such an argument by doing an +/// inter-procedural analysis. If this is not possible, the fallback is to move the +/// argument into a temporary `alloc_stack` and do the unsafe pointer operations on +/// the temporary. +let stackProtection = ModulePass(name: "stack-protection", { + (context: ModulePassContext) in + + if !context.options.enableStackProtection { + return + } + + var optimization = StackProtectionOptimization() + optimization.processModule(context) +}) + +/// The stack-protection optimization on function-level. +/// +/// In contrast to the `stack-protection` pass, this pass doesn't do any inter-procedural +/// analysis. It runs at Onone. +let functionStackProtection = FunctionPass(name: "function-stack-protection", { + (function: Function, context: PassContext) in + + if !context.options.enableStackProtection { + return + } + + var optimization = StackProtectionOptimization() + optimization.process(function: function, context) +}) + +/// The optimization algorithm. +private struct StackProtectionOptimization { + + // The following members are nil/not used if this utility is used on function-level. + + private var moduleContext: ModulePassContext? + private var functionUses = FunctionUses() + private var functionUsesComputed = false + + // Functions (other than the currently processed one) which need stack protection, + // are added to this array in `findOriginsInCallers`. + private var needStackProtection: [Function] = [] + + /// The main entry point if running on module-level. + mutating func processModule(_ moduleContext: ModulePassContext) { + self.moduleContext = moduleContext + + // Collect all functions which need stack protection and insert required moves. + for function in moduleContext.functions { + + moduleContext.transform(function: function) { context in + process(function: function, context) + } + + // We cannot modify other functions than the currently processed function in `process(function:)`. + // Therefore, if `findOriginsInCallers` finds any callers, which need stack protection, + // add the `needStackProtection` flags here. + for function in needStackProtection { + moduleContext.transform(function: function) { context in + function.setNeedsStackProtection(context) + } + } + needStackProtection.removeAll(keepingCapacity: true) + } + } + + /// The main entry point if running on function-level. + mutating func process(function: Function, _ context: PassContext) { + var mustFixStackNesting = false + for inst in function.instructions { + process(instruction: inst, in: function, mustFixStackNesting: &mustFixStackNesting, context) + } + if mustFixStackNesting { + context.fixStackNesting(function: function) + } + } + + /// Checks if `instruction` may be an unsafe instruction which may cause a buffer overflow. + /// + /// If this is the case, either + /// - set the function's `needStackProtection` flag if the relevant allocation is in the + /// same function, or + /// - if the address is passed as an argument: try to find the origin in its callers and + /// add the relevant callers to `self.needStackProtection`, or + /// - if the origin is unknown, move the value into a temporary and set the function's + /// `needStackProtection` flag. + private mutating func process(instruction: Instruction, in function: Function, + mustFixStackNesting: inout Bool, _ context: PassContext) { + + // `withUnsafeTemporaryAllocation(of:capacity:_:)` is compiled to a `builtin "stackAlloc"`. + if let bi = instruction as? BuiltinInst, bi.id == .stackAlloc { + function.setNeedsStackProtection(context) + return + } + + // For example: + // %accessBase = alloc_stack $S + // %scope = begin_access [modify] %accessBase + // %instruction = address_to_pointer [stack_protection] %scope + // + guard let (accessBase, scope) = instruction.accessBaseToProtect else { + return + } + + switch accessBase.isStackAllocated { + case .no: + // For example: + // %baseAddr = global_addr @global + break + + case .yes: + // For example: + // %baseAddr = alloc_stack $T + function.setNeedsStackProtection(context) + + case .decidedInCaller(let arg): + // For example: + // bb0(%baseAddr: $*T): + + var worklist = ArgumentWorklist(context) + defer { worklist.deinitialize() } + worklist.push(arg) + + if !findOriginsInCallers(&worklist) { + // We don't know the origin of the function argument. Therefore we need to do the + // conservative default which is to move the value to a temporary stack location. + if let beginAccess = scope { + // If there is an access, we need to move the destination of the `begin_access`. + // We should never change the source address of a `begin_access` to a temporary. + moveToTemporary(scope: beginAccess, mustFixStackNesting: &mustFixStackNesting, context) + } else { + moveToTemporary(argument: arg, context) + } + } + + case .objectIfStackPromoted(let obj): + // For example: + // %0 = alloc_ref [stack] $Class + // %baseAddr = ref_element_addr %0 : $Class, #Class.field + + var worklist = ArgumentWorklist(context) + defer { worklist.deinitialize() } + + // If the object is passed as an argument to its function, add those arguments + // to the worklist. + switch worklist.push(rootsOf: obj) { + case .failed: + // If we cannot find the roots, the object is most likely not stack allocated. + return + case .succeeded(let foundStackAlloc): + if foundStackAlloc { + // The object is created by an `alloc_ref [stack]`. + function.setNeedsStackProtection(context) + } + } + // In case the (potentially) stack allocated object is passed via an argument, + // process the worklist as we do for indirect arguments (see above). + // For example: + // bb0(%0: $Class): + // %baseAddr = ref_element_addr %0 : $Class, #Class.field + if !findOriginsInCallers(&worklist), + let beginAccess = scope { + // We don't know the origin of the object. Therefore we need to do the + // conservative default which is to move the value to a temporary stack location. + moveToTemporary(scope: beginAccess, mustFixStackNesting: &mustFixStackNesting, context) + } + + case .unknown: + // TODO: better handling of unknown access bases + break + } + } + + /// Find all origins of function arguments in `worklist`. + /// All functions, which allocate such an origin are added to `self.needStackProtection`. + /// Returns true if all origins could be found and false, if there are unknown origins. + private mutating func findOriginsInCallers(_ worklist: inout ArgumentWorklist) -> Bool { + + guard let moduleContext = moduleContext else { + // Don't do any inter-procedural analysis when used on function-level. + return false + } + + // Put the resulting functions into a temporary array, because we only add them to + // `self.needStackProtection` if we don't return false. + var newFunctions = Stack(moduleContext) + defer { newFunctions.deinitialize() } + + if !functionUsesComputed { + functionUses.collect(context: moduleContext) + functionUsesComputed = true + } + + while let arg = worklist.pop() { + let f = arg.function + let uses = functionUses.getUses(of: f) + if uses.hasUnknownUses { + return false + } + + for useInst in uses { + guard let fri = useInst as? FunctionRefInst else { + return false + } + + for functionRefUse in fri.uses { + guard let apply = functionRefUse.instruction as? ApplySite else { + return false + } + guard let callerArgIdx = apply.callerArgIndex(calleeArgIndex: arg.index) else { + return false + } + let callerArg = apply.arguments[callerArgIdx] + if callerArg.type.isAddress { + // It's an indirect argument. + switch callerArg.accessBase.isStackAllocated { + case .yes: + if !callerArg.function.needsStackProtection { + newFunctions.push(callerArg.function) + } + case .no: + break + case .decidedInCaller(let callerFuncArg): + if !callerFuncArg.convention.isInout { + break + } + // The argumente is itself passed as an argument to its function. + // Continue with looking into the callers. + worklist.push(callerFuncArg) + case .objectIfStackPromoted(let obj): + // If the object is passed as an argument to its function, + // add those arguments to the worklist. + switch worklist.push(rootsOf: obj) { + case .failed: + return false + case .succeeded(let foundStackAlloc): + if foundStackAlloc && !obj.function.needsStackProtection { + // The object is created by an `alloc_ref [stack]`. + newFunctions.push(obj.function) + } + } + case .unknown: + return false + } + } else { + // The argument is an object. If the object is itself passed as an argument + // to its function, add those arguments to the worklist. + switch worklist.push(rootsOf: callerArg) { + case .failed: + return false + case .succeeded(let foundStackAlloc): + if foundStackAlloc && !callerArg.function.needsStackProtection { + // The object is created by an `alloc_ref [stack]`. + newFunctions.push(callerArg.function) + } + } + } + } + } + } + needStackProtection.append(contentsOf: newFunctions) + return true + } + + /// Moves the value of an indirect argument to a temporary stack location, if possible. + private func moveToTemporary(argument: FunctionArgument, _ context: PassContext) { + if !argument.convention.isInout { + // We cannot move from a read-only argument. + // Also, read-only arguments shouldn't be subject to buffer overflows (because + // no one should ever write to such an argument). + return + } + + let function = argument.function + let entryBlock = function.entryBlock + let loc = entryBlock.instructions.first!.location.autoGenerated + let builder = Builder(atBeginOf: entryBlock, location: loc, context) + let temporary = builder.createAllocStack(argument.type) + argument.uses.replaceAll(with: temporary, context) + + builder.createCopyAddr(from: argument, to: temporary, takeSource: true, initializeDest: true) + + for block in function.blocks { + let terminator = block.terminator + if terminator.isFunctionExiting { + let exitBuilder = Builder(at: terminator, location: terminator.location.autoGenerated, context) + exitBuilder.createCopyAddr(from: temporary, to: argument, takeSource: true, initializeDest: true) + exitBuilder.createDeallocStack(temporary) + } + } + log("move addr protection in \(function.name): \(argument)") + + function.setNeedsStackProtection(context) + } + + /// Moves the value of a `beginAccess` to a temporary stack location, if possible. + private func moveToTemporary(scope beginAccess: BeginAccessInst, mustFixStackNesting: inout Bool, + _ context: PassContext) { + if beginAccess.accessKind != .modify { + // We can only move from a `modify` access. + // Also, read-only accesses shouldn't be subject to buffer overflows (because + // no one should ever write to such a storage). + return + } + + let builder = Builder(after: beginAccess, location: beginAccess.location.autoGenerated, context) + let temporary = builder.createAllocStack(beginAccess.type) + + for use in beginAccess.uses where !(use.instruction is EndAccessInst) { + use.instruction.setOperand(at: use.index, to: temporary, context) + } + + for endAccess in beginAccess.endInstructions { + let endBuilder = Builder(at: endAccess, location: endAccess.location.autoGenerated, context) + endBuilder.createCopyAddr(from: temporary, to: beginAccess, takeSource: true, initializeDest: true) + endBuilder.createDeallocStack(temporary) + } + + builder.createCopyAddr(from: beginAccess, to: temporary, takeSource: true, initializeDest: true) + log("move object protection in \(beginAccess.function.name): \(beginAccess)") + + beginAccess.function.setNeedsStackProtection(context) + + // Access scopes are not necessarily properly nested, which can result in + // not properly nested stack allocations. + mustFixStackNesting = true + } +} + +/// Worklist for inter-procedural analysis of function arguments. +private struct ArgumentWorklist : ValueUseDefWalker { + var walkUpCache = WalkerCache() + private var foundStackAlloc = false + + private var handled = Set() + private var list: Stack + + init(_ context: PassContext) { + self.list = Stack(context) + } + + mutating func deinitialize() { + list.deinitialize() + } + + mutating func push(_ arg: FunctionArgument) { + if handled.insert(arg).0 { + list.push(arg) + } + } + + enum PushResult { + case failed + case succeeded(foundStackAlloc: Bool) + } + + /// Pushes all roots of `object`, which are function arguments, to the worklist. + /// Returns `.succeeded(true)` if some of the roots are `alloc_ref [stack]` instructions. + mutating func push(rootsOf object: Value) -> PushResult { + foundStackAlloc = false + switch walkUp(value: object, path: SmallProjectionPath(.anything)) { + case .continueWalk: + return .succeeded(foundStackAlloc: foundStackAlloc) + case .abortWalk: + return .failed + } + } + + mutating func pop() -> FunctionArgument? { + return list.pop() + } + + // Internal walker function. + mutating func rootDef(value: Value, path: Path) -> WalkResult { + switch value { + case let ar as AllocRefInstBase: + if ar.canAllocOnStack { + foundStackAlloc = true + } + return .continueWalk + case let arg as FunctionArgument: + if handled.insert(arg).0 { + list.push(arg) + } + return .continueWalk + default: + return .abortWalk + } + } +} + +private extension AccessBase { + enum IsStackAllocatedResult { + case yes + case no + case decidedInCaller(FunctionArgument) + case objectIfStackPromoted(Value) + case unknown + } + + var isStackAllocated: IsStackAllocatedResult { + switch self { + case .stack: + return .yes + case .box, .global: + return .no + case .class(let rea): + return .objectIfStackPromoted(rea.operand) + case .tail(let rta): + return .objectIfStackPromoted(rta.operand) + case .argument(let arg): + return .decidedInCaller(arg) + case .yield, .pointer: + return .unknown + case .unidentified: + // In the rare case of an unidentified access, just ignore it. + // This should not happen in regular SIL, anyway. + return .no + } + } +} + +private extension Instruction { + /// If the instruction needs stack protection, return the relevant access base and scope. + var accessBaseToProtect: (AccessBase, scope: BeginAccessInst?)? { + let baseAddr: Value + switch self { + case let atp as AddressToPointerInst: + if !atp.needsStackProtection { + return nil + } + // The result of an `address_to_pointer` may be used in any unsafe way, e.g. + // passed to a C function. + baseAddr = atp.operand + case let ia as IndexAddrInst: + if !ia.needsStackProtection { + return nil + } + // `index_addr` is unsafe if not used for tail-allocated elements (e.g. in Array). + baseAddr = ia.base + default: + return nil + } + let (accessPath, scope) = baseAddr.accessPathWithScope + + if case .tail = accessPath.base, self is IndexAddrInst { + // `index_addr` for tail-allocated elements is the usual case (most likely coming from + // Array code). + return nil + } + return (accessPath.base, scope) + } +} + +private extension Function { + func setNeedsStackProtection(_ context: PassContext) { + if !needsStackProtection { + log("needs protection: \(name)") + set(needStackProtection: true, context) + } + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift index 85ab4657edc51..c616ec9e0a8fa 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/Options.swift @@ -14,4 +14,8 @@ import OptimizerBridging struct Options { let _bridged: BridgedPassContext + + var enableStackProtection: Bool { + SILOptions_enableStackProtection(_bridged) != 0 + } } diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassContext.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassContext.swift index f0d27c868c029..35998e56b4e5c 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassContext.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassContext.swift @@ -240,3 +240,10 @@ extension RefCountingInst { RefCountingInst_setIsAtomic(bridged, isAtomic) } } + +extension Function { + func set(needStackProtection: Bool, _ context: PassContext) { + context.notifyFunctionDataChanged() + SILFunction_setNeedStackProtection(bridged, needStackProtection ? 1 : 0) + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift index 7c74a2e077517..8cb40c34b91e9 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift @@ -46,11 +46,15 @@ private func registerPass( } private func registerSwiftPasses() { + // Module passes + registerPass(stackProtection, { stackProtection.run($0) }) + // Function passes registerPass(mergeCondFailsPass, { mergeCondFailsPass.run($0) }) registerPass(computeEffects, { computeEffects.run($0) }) registerPass(objCBridgingOptimization, { objCBridgingOptimization.run($0) }) registerPass(stackPromotion, { stackPromotion.run($0) }) + registerPass(functionStackProtection, { functionStackProtection.run($0) }) registerPass(assumeSingleThreadedPass, { assumeSingleThreadedPass.run($0) }) registerPass(releaseDevirtualizerPass, { releaseDevirtualizerPass.run($0) }) diff --git a/SwiftCompilerSources/Sources/Optimizer/TestPasses/AccessDumper.swift b/SwiftCompilerSources/Sources/Optimizer/TestPasses/AccessDumper.swift index 35d01d1292d1d..5ed8d663a2503 100644 --- a/SwiftCompilerSources/Sources/Optimizer/TestPasses/AccessDumper.swift +++ b/SwiftCompilerSources/Sources/Optimizer/TestPasses/AccessDumper.swift @@ -61,10 +61,9 @@ private func printAccessInfo(address: Value) { var apw = AccessPathWalker() let (ap, scope) = apw.getAccessPathWithScope(of: address) - switch scope { - case let .scope(ba): - print(" Scope: \(ba)") - case .base(_): + if let beginAccess = scope { + print(" Scope: \(beginAccess)") + } else { print(" Scope: base") } diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/AccessUtils.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/AccessUtils.swift index b621e12b6f20f..703ea0c4c8893 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/AccessUtils.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/AccessUtils.swift @@ -381,34 +381,31 @@ struct AccessPathWalker { return walker.result } - mutating func getAccessPathWithScope(of address: Value) -> (AccessPath, EnclosingScope) { + mutating func getAccessPathWithScope(of address: Value) -> (AccessPath, scope: BeginAccessInst?) { let ap = getAccessPath(of: address) - return (ap, walker.scope) + return (ap, walker.foundBeginAccess) } mutating func getAccessBase(of address: Value) -> AccessBase { getAccessPath(of: address).base } - mutating func getAccessScope(of address: Value) -> EnclosingScope { - getAccessPathWithScope(of: address).1 + mutating func getEnclosingScope(of address: Value) -> EnclosingScope { + let accessPath = getAccessPath(of: address) + + if let ba = walker.foundBeginAccess { + return .scope(ba) + } + return .base(accessPath.base) } private var walker = Walker() private struct Walker : AddressUseDefWalker { private(set) var result = AccessPath.unidentified() - private var foundBeginAccess: BeginAccessInst? = nil + private(set) var foundBeginAccess: BeginAccessInst? private var pointerId = PointerIdentification() - var scope: EnclosingScope { - if let ba = foundBeginAccess { - return .scope(ba) - } else { - return .base(result.base) - } - } - mutating func start() { result = .unidentified() foundBeginAccess = nil @@ -489,11 +486,23 @@ extension Value { var apWalker = AccessPathWalker() return apWalker.getAccessBase(of: self) } - + + /// Computes the access path of this address value. + var accessPath: AccessPath { + var apWalker = AccessPathWalker() + return apWalker.getAccessPath(of: self) + } + + /// Computes the access path of this address value and also returns the scope. + var accessPathWithScope: (AccessPath, scope: BeginAccessInst?) { + var apWalker = AccessPathWalker() + return apWalker.getAccessPathWithScope(of: self) + } + /// Computes the enclosing access scope of this address value. - var accessScope: EnclosingScope { + var enclosingAccessScope: EnclosingScope { var apWalker = AccessPathWalker() - return apWalker.getAccessScope(of: self) + return apWalker.getEnclosingScope(of: self) } } diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/EscapeInfo.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/EscapeInfo.swift index 4bc7e2494b0e1..adc9376355786 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/EscapeInfo.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/EscapeInfo.swift @@ -396,7 +396,7 @@ fileprivate struct EscapeInfoWalker : ValueDefUseWalker, return walkDownUses(ofAddress: pta, path: path.with(knownType: nil)) case let bi as BuiltinInst: switch bi.id { - case .DestroyArray: + case .destroyArray: // If it's not the array base pointer operand -> bail. Though, that shouldn't happen // because the other operands (metatype, count) shouldn't be visited anyway. if operand.index != 1 { return isEscaping } diff --git a/SwiftCompilerSources/Sources/SIL/Function.swift b/SwiftCompilerSources/Sources/SIL/Function.swift index ef4c0f8e36517..8036e61f587fc 100644 --- a/SwiftCompilerSources/Sources/SIL/Function.swift +++ b/SwiftCompilerSources/Sources/SIL/Function.swift @@ -103,6 +103,10 @@ final public class Function : CustomStringConvertible, HasShortDescription, Hash SILFunction_isSwift51RuntimeAvailable(bridged) != 0 } + public var needsStackProtection: Bool { + SILFunction_needsStackProtection(bridged) != 0 + } + // Only to be called by PassContext public func _modifyEffects(_ body: (inout FunctionEffects) -> ()) { body(&effects) diff --git a/SwiftCompilerSources/Sources/SIL/Instruction.swift b/SwiftCompilerSources/Sources/SIL/Instruction.swift index 9dca3c8afd9c7..6b7fc7d2af027 100644 --- a/SwiftCompilerSources/Sources/SIL/Instruction.swift +++ b/SwiftCompilerSources/Sources/SIL/Instruction.swift @@ -321,13 +321,16 @@ final public class LoadBorrowInst : SingleValueInstruction, UnaryInstruction {} final public class BuiltinInst : SingleValueInstruction { // TODO: find a way to directly reuse the BuiltinValueKind enum public enum ID { - case None - case DestroyArray + case none + case destroyArray + case stackAlloc } - public var id: ID? { + + public var id: ID { switch BuiltinInst_getID(bridged) { - case DestroyArrayBuiltin: return .DestroyArray - default: return .None + case DestroyArrayBuiltin: return .destroyArray + case StackAllocBuiltin: return .stackAlloc + default: return .none } } } @@ -341,7 +344,11 @@ final public class RawPointerToRefInst : SingleValueInstruction, UnaryInstruction {} final public -class AddressToPointerInst : SingleValueInstruction, UnaryInstruction {} +class AddressToPointerInst : SingleValueInstruction, UnaryInstruction { + public var needsStackProtection: Bool { + AddressToPointerInst_needsStackProtection(bridged) != 0 + } +} final public class PointerToAddressInst : SingleValueInstruction, UnaryInstruction {} @@ -350,6 +357,10 @@ final public class IndexAddrInst : SingleValueInstruction { public var base: Value { operands[0].value } public var index: Value { operands[1].value } + + public var needsStackProtection: Bool { + IndexAddrInst_needsStackProtection(bridged) != 0 + } } final public diff --git a/docs/SIL.rst b/docs/SIL.rst index ba03af786930f..36325edb97176 100644 --- a/docs/SIL.rst +++ b/docs/SIL.rst @@ -1053,6 +1053,12 @@ the top-level `switch_enum`_. sil-function-attribute ::= '[weak_imported]' Cross-module references to this function should always use weak linking. +:: + + sil-function-attribute ::= '[stack_protection]' + +Stack protectors are inserted into this function to detect stack related +buffer overflows. :: sil-function-attribute ::= '[available' sil-version-tuple ']' @@ -4308,7 +4314,7 @@ index_addr `````````` :: - sil-instruction ::= 'index_addr' sil-operand ',' sil-operand + sil-instruction ::= 'index_addr' ('[' 'stack_protection' ']')? sil-operand ',' sil-operand %2 = index_addr %0 : $*T, %1 : $Builtin.Int // %0 must be of an address type $*T @@ -4324,6 +4330,9 @@ special behavior in this regard, unlike ``char*`` or ``void*`` in C.) It is also undefined behavior to index out of bounds of an array, except to index the "past-the-end" address of the array. +The ``stack_protection`` flag indicates that stack protection is done for +the pointer origin. + tail_addr ````````` :: @@ -6558,7 +6567,7 @@ address_to_pointer `````````````````` :: - sil-instruction ::= 'address_to_pointer' sil-operand 'to' sil-type + sil-instruction ::= 'address_to_pointer' ('[' 'stack_protection' ']')? sil-operand 'to' sil-type %1 = address_to_pointer %0 : $*T to $Builtin.RawPointer // %0 must be of an address type $*T @@ -6570,6 +6579,9 @@ an address equivalent to ``%0``. It is undefined behavior to cast the ``RawPointer`` to any address type other than its original address type or any `layout compatible types`_. +The ``stack_protection`` flag indicates that stack protection is done for +the pointer origin. + pointer_to_address `````````````````` :: diff --git a/include/swift/AST/Builtins.def b/include/swift/AST/Builtins.def index 0f859b8116438..e53a05d1e5f3f 100644 --- a/include/swift/AST/Builtins.def +++ b/include/swift/AST/Builtins.def @@ -339,11 +339,25 @@ BUILTIN_SIL_OPERATION(ReinterpretCast, "reinterpretCast", Special) /// only valid for the duration of the original binding. BUILTIN_SIL_OPERATION(AddressOf, "addressof", Special) +/// unprotectedAddressOf (inout T) -> Builtin.RawPointer +/// Returns a RawPointer pointing to a physical lvalue. The returned pointer is +/// only valid for the duration of the original binding. +/// In contrast to `addressof`, this builtin doesn't trigger an insertion of +/// stack protectors. +BUILTIN_SIL_OPERATION(UnprotectedAddressOf, "unprotectedAddressOf", Special) + /// addressOfBorrow (__shared T) -> Builtin.RawPointer /// Returns a RawPointer pointing to a borrowed rvalue. The returned pointer is only /// valid within the scope of the borrow. BUILTIN_SIL_OPERATION(AddressOfBorrow, "addressOfBorrow", Special) +/// unprotectedAddressOfBorrow (__shared T) -> Builtin.RawPointer +/// Returns a RawPointer pointing to a borrowed rvalue. The returned pointer is only +/// valid within the scope of the borrow. +/// In contrast to `addressOfBorrow`, this builtin doesn't trigger an insertion of +/// stack protectors. +BUILTIN_SIL_OPERATION(UnprotectedAddressOfBorrow, "unprotectedAddressOfBorrow", Special) + /// GepRaw(Builtin.RawPointer, Builtin.Word) -> Builtin.RawPointer /// /// Adds index bytes to a base pointer. diff --git a/include/swift/AST/IRGenOptions.h b/include/swift/AST/IRGenOptions.h index 154e6be2ed076..e608a8925aa86 100644 --- a/include/swift/AST/IRGenOptions.h +++ b/include/swift/AST/IRGenOptions.h @@ -94,8 +94,6 @@ enum class ReflectionMetadataMode : unsigned { Runtime, ///< Make reflection metadata fully available. }; -enum class StackProtectorMode : bool { NoStackProtector, StackProtector }; - using clang::PointerAuthSchema; struct PointerAuthOptions : clang::PointerAuthOptions { @@ -285,8 +283,6 @@ class IRGenOptions { /// Whether we should run swift specific LLVM optimizations after IRGen. unsigned DisableSwiftSpecificLLVMOptzns : 1; - unsigned EnableStackProtector : 1; - /// Special codegen for playgrounds. unsigned Playground : 1; @@ -464,7 +460,7 @@ class IRGenOptions { DebugInfoFormat(IRGenDebugInfoFormat::None), DisableClangModuleSkeletonCUs(false), UseJIT(false), DisableLLVMOptzns(false), DisableSwiftSpecificLLVMOptzns(false), - EnableStackProtector(false), Playground(false), + Playground(false), EmitStackPromotionChecks(false), UseSingleModuleLLVMEmission(false), FunctionSections(false), PrintInlineTree(false), EmbedMode(IRGenEmbedMode::None), LLVMLTOKind(IRGenLLVMLTOKind::None), @@ -550,9 +546,6 @@ class IRGenOptions { bool hasMultipleIRGenThreads() const { return !UseSingleModuleLLVMEmission && NumThreads > 1; } bool shouldPerformIRGenerationInParallel() const { return !UseSingleModuleLLVMEmission && NumThreads != 0; } bool hasMultipleIGMs() const { return hasMultipleIRGenThreads(); } - StackProtectorMode getStackProtectorMode() const { - return StackProtectorMode(EnableStackProtector); - } }; } // end namespace swift diff --git a/include/swift/AST/SILOptions.h b/include/swift/AST/SILOptions.h index cddd2b2d1a5ea..291037babd89f 100644 --- a/include/swift/AST/SILOptions.h +++ b/include/swift/AST/SILOptions.h @@ -128,6 +128,9 @@ class SILOptions { /// Enables experimental performance annotations. bool EnablePerformanceAnnotations = false; + /// Enables the emission of stack protectors in functions. + bool EnableStackProtection = false; + /// Controls whether or not paranoid verification checks are run. bool VerifyAll = false; diff --git a/include/swift/Basic/Features.def b/include/swift/Basic/Features.def index 2eeab5bf551fc..e77724077646e 100644 --- a/include/swift/Basic/Features.def +++ b/include/swift/Basic/Features.def @@ -82,6 +82,7 @@ LANGUAGE_FEATURE(BuiltinCreateAsyncTaskInGroup, 0, "MainActor executor building LANGUAGE_FEATURE(BuiltinCopy, 0, "Builtin.copy()", true) LANGUAGE_FEATURE(BuiltinStackAlloc, 0, "Builtin.stackAlloc", true) LANGUAGE_FEATURE(BuiltinTaskRunInline, 0, "Builtin.taskRunInline", true) +LANGUAGE_FEATURE(BuiltinUnprotectedAddressOf, 0, "Builtin.unprotectedAddressOf", true) SUPPRESSIBLE_LANGUAGE_FEATURE(SpecializeAttributeWithAvailability, 0, "@_specialize attribute with availability", true) LANGUAGE_FEATURE(BuiltinAssumeAlignment, 0, "Builtin.assumeAlignment", true) SUPPRESSIBLE_LANGUAGE_FEATURE(UnsafeInheritExecutor, 0, "@_unsafeInheritExecutor", true) diff --git a/include/swift/SIL/SILBridging.h b/include/swift/SIL/SILBridging.h index c1bb7caa4b184..d1a9fe91f25d2 100644 --- a/include/swift/SIL/SILBridging.h +++ b/include/swift/SIL/SILBridging.h @@ -266,6 +266,9 @@ SwiftInt SILFunction_isPossiblyUsedExternally(BridgedFunction function); SwiftInt SILFunction_isAvailableExternally(BridgedFunction function); SwiftInt SILFunction_hasSemanticsAttr(BridgedFunction function, llvm::StringRef attrName); +SwiftInt SILFunction_needsStackProtection(BridgedFunction function); +void SILFunction_setNeedStackProtection(BridgedFunction function, + SwiftInt needSP); llvm::StringRef SILGlobalVariable_getName(BridgedGlobalVar global); std::string SILGlobalVariable_debugDescription(BridgedGlobalVar global); @@ -359,6 +362,8 @@ BridgedArrayRef TermInst_getSuccessors(BridgedInstruction term); llvm::StringRef CondFailInst_getMessage(BridgedInstruction cfi); BridgedBuiltinID BuiltinInst_getID(BridgedInstruction bi); +SwiftInt AddressToPointerInst_needsStackProtection(BridgedInstruction atp); +SwiftInt IndexAddrInst_needsStackProtection(BridgedInstruction ia); BridgedGlobalVar GlobalAccessInst_getGlobal(BridgedInstruction globalInst); BridgedFunction FunctionRefBaseInst_getReferencedFunction(BridgedInstruction fri); llvm::StringRef StringLiteralInst_getValue(BridgedInstruction sli); diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index 7d32b41ab8476..f52d031f6a300 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -1094,9 +1094,9 @@ class SILBuilder { } AddressToPointerInst *createAddressToPointer(SILLocation Loc, SILValue Op, - SILType Ty) { + SILType Ty, bool needsStackProtection) { return insert(new (getModule()) AddressToPointerInst( - getSILDebugLocation(Loc), Op, Ty)); + getSILDebugLocation(Loc), Op, Ty, needsStackProtection)); } PointerToAddressInst * @@ -2149,9 +2149,9 @@ class SILBuilder { //===--------------------------------------------------------------------===// IndexAddrInst *createIndexAddr(SILLocation Loc, SILValue Operand, - SILValue Index) { + SILValue Index, bool needsStackProtection) { return insert(new (getModule()) IndexAddrInst(getSILDebugLocation(Loc), - Operand, Index)); + Operand, Index, needsStackProtection)); } TailAddrInst *createTailAddr(SILLocation Loc, SILValue Operand, diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index 26336b265d983..c7f8154beac24 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -1461,7 +1461,8 @@ SILCloner::visitAddressToPointerInst(AddressToPointerInst *Inst) { recordClonedInstruction( Inst, getBuilder().createAddressToPointer(getOpLocation(Inst->getLoc()), getOpValue(Inst->getOperand()), - getOpType(Inst->getType()))); + getOpType(Inst->getType()), + Inst->needsStackProtection())); } template @@ -2638,7 +2639,8 @@ SILCloner::visitIndexAddrInst(IndexAddrInst *Inst) { recordClonedInstruction( Inst, getBuilder().createIndexAddr(getOpLocation(Inst->getLoc()), getOpValue(Inst->getBase()), - getOpValue(Inst->getIndex()))); + getOpValue(Inst->getIndex()), + Inst->needsStackProtection())); } template diff --git a/include/swift/SIL/SILFunction.h b/include/swift/SIL/SILFunction.h index 75bafcdff811b..13d409418a028 100644 --- a/include/swift/SIL/SILFunction.h +++ b/include/swift/SIL/SILFunction.h @@ -336,6 +336,8 @@ class SILFunction /// Check whether this is a distributed method. unsigned IsDistributed : 1; + unsigned stackProtection : 1; + /// True if this function is inlined at least once. This means that the /// debug info keeps a pointer to this function. unsigned Inlined : 1; @@ -832,6 +834,9 @@ class SILFunction IsDistributed = value; } + bool needsStackProtection() const { return stackProtection; } + void setNeedStackProtection(bool needSP) { stackProtection = needSP; } + /// Get the DeclContext of this function. DeclContext *getDeclContext() const { return DeclCtxt; } diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index 1cc28baa96c6e..ff19382b5d2e8 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -5350,9 +5350,18 @@ class AddressToPointerInst ConversionInst> { friend SILBuilder; + USE_SHARED_UINT8; - AddressToPointerInst(SILDebugLocation DebugLoc, SILValue Operand, SILType Ty) - : UnaryInstructionBase(DebugLoc, Operand, Ty) {} + AddressToPointerInst(SILDebugLocation DebugLoc, SILValue Operand, SILType Ty, + bool needsStackProtection) + : UnaryInstructionBase(DebugLoc, Operand, Ty) { + sharedUInt8().AddressToPointerInst.needsStackProtection = needsStackProtection; + } + +public: + bool needsStackProtection() const { + return sharedUInt8().AddressToPointerInst.needsStackProtection; + } }; /// PointerToAddressInst - Convert a Builtin.RawPointer value to a SIL address. @@ -8132,11 +8141,20 @@ class IndexAddrInst : public InstructionBase { friend SILBuilder; + USE_SHARED_UINT8; enum { Base, Index }; - IndexAddrInst(SILDebugLocation DebugLoc, SILValue Operand, SILValue Index) - : InstructionBase(DebugLoc, Operand->getType(), Operand, Index) {} + IndexAddrInst(SILDebugLocation DebugLoc, SILValue Operand, SILValue Index, + bool needsStackProtection) + : InstructionBase(DebugLoc, Operand->getType(), Operand, Index) { + sharedUInt8().IndexAddrInst.needsStackProtection = needsStackProtection; + } + +public: + bool needsStackProtection() const { + return sharedUInt8().IndexAddrInst.needsStackProtection; + } }; /// TailAddrInst - like IndexingInst, but aligns-up the resulting address to a diff --git a/include/swift/SIL/SILNode.h b/include/swift/SIL/SILNode.h index 51ebe8da4eb57..e20cda57ffeee 100644 --- a/include/swift/SIL/SILNode.h +++ b/include/swift/SIL/SILNode.h @@ -194,6 +194,8 @@ class alignas(8) SILNode : SHARED_FIELD(EndAccessInst, bool aborting); SHARED_FIELD(RefElementAddrInst, bool immutable); SHARED_FIELD(RefTailAddrInst, bool immutable); + SHARED_FIELD(AddressToPointerInst, bool needsStackProtection); + SHARED_FIELD(IndexAddrInst, bool needsStackProtection); SHARED_FIELD(HopToExecutorInst, bool mandatory); SHARED_FIELD(DestroyValueInst, bool poisonRefs); SHARED_FIELD(EndCOWMutationInst, bool keepUnique); diff --git a/include/swift/SILOptimizer/OptimizerBridging.h b/include/swift/SILOptimizer/OptimizerBridging.h index 53405267adc4b..23c1c3e32986f 100644 --- a/include/swift/SILOptimizer/OptimizerBridging.h +++ b/include/swift/SILOptimizer/OptimizerBridging.h @@ -184,6 +184,8 @@ PassContext_nextDefaultWitnessTableInModule(BridgedDefaultWitnessTable table); OptionalBridgedFunction PassContext_loadFunction(BridgedPassContext context, llvm::StringRef name); +SwiftInt SILOptions_enableStackProtection(BridgedPassContext context); + SWIFT_END_NULLABILITY_ANNOTATIONS #endif diff --git a/include/swift/SILOptimizer/PassManager/Passes.def b/include/swift/SILOptimizer/PassManager/Passes.def index 2d2940abf3b9a..bd135256f74cb 100644 --- a/include/swift/SILOptimizer/PassManager/Passes.def +++ b/include/swift/SILOptimizer/PassManager/Passes.def @@ -401,6 +401,10 @@ SWIFT_FUNCTION_PASS(SILPrinter, "sil-printer", "Test pass which prints the SIL of a function") SWIFT_MODULE_PASS(FunctionUsesDumper, "dump-function-uses", "Dump the results of FunctionUses") +SWIFT_MODULE_PASS(StackProtection, "stack-protection", + "Decides which functions need stack protectors") +SWIFT_FUNCTION_PASS(FunctionStackProtection, "function-stack-protection", + "Decides which functions need stack protectors") PASS(SROA, "sroa", "Scalar Replacement of Aggregate Stack Objects") PASS(SROABBArgs, "sroa-bb-args", diff --git a/lib/AST/ASTPrinter.cpp b/lib/AST/ASTPrinter.cpp index afa0e200f2fbd..02c10064490e5 100644 --- a/lib/AST/ASTPrinter.cpp +++ b/lib/AST/ASTPrinter.cpp @@ -2921,6 +2921,8 @@ static bool usesFeatureBuiltinCopy(Decl *decl) { return false; } static bool usesFeatureBuiltinTaskRunInline(Decl *) { return false; } +static bool usesFeatureBuiltinUnprotectedAddressOf(Decl *) { return false; } + static bool usesFeatureSpecializeAttributeWithAvailability(Decl *decl) { if (auto func = dyn_cast(decl)) { for (auto specialize : func->getAttrs().getAttributes()) { diff --git a/lib/AST/Builtins.cpp b/lib/AST/Builtins.cpp index 65cbc4ea8914c..c4c7da93bd22d 100644 --- a/lib/AST/Builtins.cpp +++ b/lib/AST/Builtins.cpp @@ -2691,6 +2691,7 @@ ValueDecl *swift::getBuiltinValueDecl(ASTContext &Context, Identifier Id) { return getReinterpretCastOperation(Context, Id); case BuiltinValueKind::AddressOf: + case BuiltinValueKind::UnprotectedAddressOf: if (!Types.empty()) return nullptr; return getAddressOfOperation(Context, Id); @@ -2698,6 +2699,7 @@ ValueDecl *swift::getBuiltinValueDecl(ASTContext &Context, Identifier Id) { return getLegacyCondFailOperation(Context, Id); case BuiltinValueKind::AddressOfBorrow: + case BuiltinValueKind::UnprotectedAddressOfBorrow: if (!Types.empty()) return nullptr; return getAddressOfBorrowOperation(Context, Id); diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index 2c3d12320e7f5..1ca0cdfa0bd34 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -1764,6 +1764,9 @@ static bool ParseSILArgs(SILOptions &Opts, ArgList &Args, } Opts.EnablePerformanceAnnotations |= Args.hasArg(OPT_ExperimentalPerformanceAnnotations); + Opts.EnableStackProtection = + Args.hasFlag(OPT_enable_stack_protector, OPT_disable_stack_protector, + Opts.EnableStackProtection); Opts.VerifyAll |= Args.hasArg(OPT_sil_verify_all); Opts.VerifyNone |= Args.hasArg(OPT_sil_verify_none); Opts.DebugSerialization |= Args.hasArg(OPT_sil_debug_serialization); @@ -2414,10 +2417,6 @@ static bool ParseIRGenArgs(IRGenOptions &Opts, ArgList &Args, OPT_enable_new_llvm_pass_manager, Opts.LegacyPassManager); - Opts.EnableStackProtector = - Args.hasFlag(OPT_enable_stack_protector, OPT_disable_stack_protector, - Opts.EnableStackProtector); - return false; } diff --git a/lib/IRGen/GenDecl.cpp b/lib/IRGen/GenDecl.cpp index c974807734971..2b01ed4617e2c 100644 --- a/lib/IRGen/GenDecl.cpp +++ b/lib/IRGen/GenDecl.cpp @@ -3201,7 +3201,9 @@ llvm::Constant *swift::irgen::emitCXXConstructorThunkIfNeeded( } StackProtectorMode IRGenModule::shouldEmitStackProtector(SILFunction *f) { - return IRGen.Opts.getStackProtectorMode(); + const SILOptions &opts = IRGen.SIL.getOptions(); + return (opts.EnableStackProtection && f->needsStackProtection()) ? + StackProtectorMode::StackProtector : StackProtectorMode::NoStackProtector; } /// Find the entry point for a SIL function. diff --git a/lib/IRGen/IRGen.h b/lib/IRGen/IRGen.h index ba4041ca18330..68d41c2045bcc 100644 --- a/lib/IRGen/IRGen.h +++ b/lib/IRGen/IRGen.h @@ -44,6 +44,8 @@ namespace irgen { /// store vectors of spare bits. using SpareBitVector = ClusteredBitVector; +enum class StackProtectorMode : bool { NoStackProtector, StackProtector }; + class Size; enum IsPOD_t : bool { IsNotPOD, IsPOD }; diff --git a/lib/IRGen/IRGenModule.cpp b/lib/IRGen/IRGenModule.cpp index 66e133cf4039d..45f49612a09a2 100644 --- a/lib/IRGen/IRGenModule.cpp +++ b/lib/IRGen/IRGenModule.cpp @@ -1265,7 +1265,7 @@ void IRGenModule::constructInitialFnAttributes( Attrs.removeAttribute(llvm::Attribute::OptimizeForSize); } if (stackProtector == StackProtectorMode::StackProtector) { - Attrs.addAttribute(llvm::Attribute::StackProtectStrong); + Attrs.addAttribute(llvm::Attribute::StackProtectReq); Attrs.addAttribute("stack-protector-buffer-size", llvm::utostr(8)); } } diff --git a/lib/SIL/IR/SILFunction.cpp b/lib/SIL/IR/SILFunction.cpp index e4db0daad650b..05a460be38d1c 100644 --- a/lib/SIL/IR/SILFunction.cpp +++ b/lib/SIL/IR/SILFunction.cpp @@ -186,6 +186,7 @@ void SILFunction::init( this->IsDynamicReplaceable = isDynamic; this->ExactSelfClass = isExactSelfClass; this->IsDistributed = isDistributed; + this->stackProtection = false; this->Inlined = false; this->Zombie = false; this->HasOwnership = true, diff --git a/lib/SIL/IR/SILInstruction.cpp b/lib/SIL/IR/SILInstruction.cpp index f0d045d33baa2..f6eb5347b0bfc 100644 --- a/lib/SIL/IR/SILInstruction.cpp +++ b/lib/SIL/IR/SILInstruction.cpp @@ -663,9 +663,8 @@ namespace { } bool visitIndexAddrInst(IndexAddrInst *RHS) { - // We have already compared the operands/types, so we should have equality - // at this point. - return true; + auto *lhs = cast(LHS); + return lhs->needsStackProtection() == RHS->needsStackProtection(); } bool visitTailAddrInst(TailAddrInst *RHS) { @@ -772,7 +771,8 @@ namespace { } bool visitAddressToPointerInst(AddressToPointerInst *RHS) { - return true; + auto *lhs = cast(LHS); + return lhs->needsStackProtection() == RHS->needsStackProtection(); } bool visitPointerToAddressInst(PointerToAddressInst *RHS) { diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index 8e67b84e28aa1..8b3d4a2402cf5 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -1855,6 +1855,7 @@ class SILPrinter : public SILInstructionVisitor { printUncheckedConversionInst(CI, CI->getOperand()); } void visitAddressToPointerInst(AddressToPointerInst *CI) { + *this << (CI->needsStackProtection() ? "[stack_protection] " : ""); printUncheckedConversionInst(CI, CI->getOperand()); } void visitPointerToAddressInst(PointerToAddressInst *CI) { @@ -2368,7 +2369,8 @@ class SILPrinter : public SILInstructionVisitor { } void visitIndexAddrInst(IndexAddrInst *IAI) { - *this << getIDAndType(IAI->getBase()) << ", " + *this << (IAI->needsStackProtection() ? "[stack_protection] " : "") + << getIDAndType(IAI->getBase()) << ", " << getIDAndType(IAI->getIndex()); } @@ -3124,6 +3126,9 @@ void SILFunction::print(SILPrintContext &PrintCtx) const { if (!isExternalDeclaration() && hasOwnership()) OS << "[ossa] "; + if (needsStackProtection()) + OS << "[stack_protection] "; + llvm::DenseMap sugaredTypeNames; printSILFunctionNameAndType(OS, this, sugaredTypeNames, &PrintCtx); diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index 792d1b83a82f7..7e0e82862ad17 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -983,6 +983,7 @@ static bool parseDeclSILOptional(bool *isTransparent, PerformanceConstraints *perfConstraints, bool *isLet, bool *isWeakImported, + bool *needStackProtection, AvailabilityContext *availability, bool *isWithoutActuallyEscapingThunk, SmallVectorImpl *Semantics, @@ -1015,6 +1016,8 @@ static bool parseDeclSILOptional(bool *isTransparent, *isCanonical = true; else if (hasOwnershipSSA && SP.P.Tok.getText() == "ossa") *hasOwnershipSSA = true; + else if (needStackProtection && SP.P.Tok.getText() == "stack_protection") + *needStackProtection = true; else if (isThunk && SP.P.Tok.getText() == "thunk") *isThunk = IsThunk; else if (isThunk && SP.P.Tok.getText() == "signature_optimized_thunk") @@ -3766,6 +3769,7 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, SourceLoc ToLoc; bool not_guaranteed = false; bool without_actually_escaping = false; + bool needsStackProtection = false; if (Opcode == SILInstructionKind::ConvertEscapeToNoEscapeInst) { StringRef attrName; if (parseSILOptional(attrName, *this)) { @@ -3774,7 +3778,11 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, else return true; } + } if (Opcode == SILInstructionKind::AddressToPointerInst) { + if (parseSILOptional(needsStackProtection, *this, "stack_protection")) + return true; } + if (parseTypedValueRef(Val, B) || parseSILIdentifier(ToToken, ToLoc, diag::expected_tok_in_sil_instr, "to")) @@ -3838,7 +3846,7 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, B.createConvertEscapeToNoEscape(InstLoc, Val, Ty, !not_guaranteed); break; case SILInstructionKind::AddressToPointerInst: - ResultVal = B.createAddressToPointer(InstLoc, Val, Ty); + ResultVal = B.createAddressToPointer(InstLoc, Val, Ty, needsStackProtection); break; case SILInstructionKind::BridgeObjectToRefInst: ResultVal = @@ -5164,11 +5172,13 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, } case SILInstructionKind::IndexAddrInst: { SILValue IndexVal; - if (parseTypedValueRef(Val, B) || + bool needsStackProtection = false; + if (parseSILOptional(needsStackProtection, *this, "stack_protection") || + parseTypedValueRef(Val, B) || P.parseToken(tok::comma, diag::expected_tok_in_sil_instr, ",") || parseTypedValueRef(IndexVal, B) || parseSILDebugLocation(InstLoc, B)) return true; - ResultVal = B.createIndexAddr(InstLoc, Val, IndexVal); + ResultVal = B.createIndexAddr(InstLoc, Val, IndexVal, needsStackProtection); break; } case SILInstructionKind::TailAddrInst: { @@ -6527,6 +6537,7 @@ bool SILParserState::parseDeclSIL(Parser &P) { IsThunk_t isThunk = IsNotThunk; SILFunction::Purpose specialPurpose = SILFunction::Purpose::None; bool isWeakImported = false; + bool needStackProtection = false; AvailabilityContext availability = AvailabilityContext::alwaysAvailable(); bool isWithoutActuallyEscapingThunk = false; Inline_t inlineStrategy = InlineDefault; @@ -6546,7 +6557,7 @@ bool SILParserState::parseDeclSIL(Parser &P) { &isThunk, &isDynamic, &isDistributed, &isExactSelfClass, &DynamicallyReplacedFunction, &AdHocWitnessFunction, &objCReplacementFor, &specialPurpose, &inlineStrategy, &optimizationMode, &perfConstr, nullptr, - &isWeakImported, &availability, + &isWeakImported, &needStackProtection, &availability, &isWithoutActuallyEscapingThunk, &Semantics, &SpecAttrs, &ClangDecl, &MRK, &argEffectLocs, FunctionState, M) || P.parseToken(tok::at_sign, diag::expected_sil_function_name) || @@ -6779,7 +6790,7 @@ bool SILParserState::parseSILGlobal(Parser &P) { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, &isLet, nullptr, nullptr, nullptr, nullptr, nullptr, - nullptr, nullptr, nullptr, State, M) || + nullptr, nullptr, nullptr, nullptr, State, M) || P.parseToken(tok::at_sign, diag::expected_sil_value_name) || P.parseIdentifier(GlobalName, NameLoc, /*diagnoseDollarPrefix=*/false, diag::expected_sil_value_name) || @@ -6830,7 +6841,7 @@ bool SILParserState::parseSILProperty(Parser &P) { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, - nullptr, SP, M)) + nullptr, nullptr, SP, M)) return true; ValueDecl *VD; @@ -6899,7 +6910,7 @@ bool SILParserState::parseSILVTable(Parser &P) { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, - nullptr, nullptr, VTableState, M)) + nullptr, nullptr, nullptr, VTableState, M)) return true; // Parse the class name. @@ -7419,7 +7430,7 @@ bool SILParserState::parseSILWitnessTable(Parser &P) { nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, nullptr, - nullptr, nullptr, WitnessState, M)) + nullptr, nullptr, nullptr, WitnessState, M)) return true; // Parse the protocol conformance. diff --git a/lib/SIL/Utils/Projection.cpp b/lib/SIL/Utils/Projection.cpp index 580da8a5be9cf..2daadb11edc66 100644 --- a/lib/SIL/Utils/Projection.cpp +++ b/lib/SIL/Utils/Projection.cpp @@ -272,7 +272,9 @@ Projection::createAddressProjection(SILBuilder &B, SILLocation Loc, SILType::getBuiltinIntegerType(64, B.getModule().getASTContext()); auto IntLiteralIndex = B.createIntegerLiteral(Loc, IntLiteralTy, getIndex()); - return B.createIndexAddr(Loc, Base, IntLiteralIndex); + return B.createIndexAddr(Loc, Base, IntLiteralIndex, + // TODO: do we need to be conservative here? + /*needsStackProtection=*/ true); } case ProjectionKind::Enum: return B.createUncheckedTakeEnumDataAddr(Loc, Base, diff --git a/lib/SIL/Utils/SILBridging.cpp b/lib/SIL/Utils/SILBridging.cpp index 75ff330a49eb2..61f74a5128d1f 100644 --- a/lib/SIL/Utils/SILBridging.cpp +++ b/lib/SIL/Utils/SILBridging.cpp @@ -231,6 +231,15 @@ SwiftInt SILFunction_hasSemanticsAttr(BridgedFunction function, return f->hasSemanticsAttr(attrName) ? 1 : 0; } +SwiftInt SILFunction_needsStackProtection(BridgedFunction function) { + return castToFunction(function)->needsStackProtection() ? 1 : 0; +} + +void SILFunction_setNeedStackProtection(BridgedFunction function, + SwiftInt needSP) { + castToFunction(function)->setNeedStackProtection(needSP != 0); +} + //===----------------------------------------------------------------------===// // SILBasicBlock //===----------------------------------------------------------------------===// @@ -759,6 +768,14 @@ BridgedBuiltinID BuiltinInst_getID(BridgedInstruction bi) { return (BridgedBuiltinID)castToInst(bi)->getBuiltinInfo().ID; } +SwiftInt AddressToPointerInst_needsStackProtection(BridgedInstruction atp) { + return castToInst(atp)->needsStackProtection() ? 1 : 0; +} + +SwiftInt IndexAddrInst_needsStackProtection(BridgedInstruction ia) { + return castToInst(ia)->needsStackProtection() ? 1 : 0; +} + BridgedGlobalVar GlobalAccessInst_getGlobal(BridgedInstruction globalInst) { return {castToInst(globalInst)->getReferencedGlobal()}; } diff --git a/lib/SILGen/SILGenBuiltin.cpp b/lib/SILGen/SILGenBuiltin.cpp index 7ab9bde1ffbb9..f98e972066a13 100644 --- a/lib/SILGen/SILGenBuiltin.cpp +++ b/lib/SILGen/SILGenBuiltin.cpp @@ -409,12 +409,11 @@ static ManagedValue emitBuiltinBridgeFromRawPointer(SILGenFunction &SGF, return SGF.emitManagedRetain(loc, result, destLowering); } -/// Specialized emitter for Builtin.addressof. -static ManagedValue emitBuiltinAddressOf(SILGenFunction &SGF, +static ManagedValue emitBuiltinAddressOfBuiltins(SILGenFunction &SGF, SILLocation loc, SubstitutionMap substitutions, PreparedArguments &&preparedArgs, - SGFContext C) { + SGFContext C, bool stackProtected) { SILType rawPointerType = SILType::getRawPointerType(SGF.getASTContext()); auto argsOrError = decomposeArguments(SGF, loc, std::move(preparedArgs), 1); @@ -436,17 +435,36 @@ static ManagedValue emitBuiltinAddressOf(SILGenFunction &SGF, .getLValueAddress(); // Take the address argument and cast it to RawPointer. - SILValue result = SGF.B.createAddressToPointer(loc, addr, - rawPointerType); + SILValue result = SGF.B.createAddressToPointer(loc, addr, rawPointerType, + stackProtected); return ManagedValue::forUnmanaged(result); } +/// Specialized emitter for Builtin.addressof. +static ManagedValue emitBuiltinAddressOf(SILGenFunction &SGF, + SILLocation loc, + SubstitutionMap substitutions, + PreparedArguments &&preparedArgs, + SGFContext C) { + return emitBuiltinAddressOfBuiltins(SGF, loc, substitutions, std::move(preparedArgs), C, + /*stackProtected=*/ true); +} + +static ManagedValue emitBuiltinUnprotectedAddressOf(SILGenFunction &SGF, + SILLocation loc, + SubstitutionMap substitutions, + PreparedArguments &&preparedArgs, + SGFContext C) { + return emitBuiltinAddressOfBuiltins(SGF, loc, substitutions, std::move(preparedArgs), C, + /*stackProtected=*/ false); +} + /// Specialized emitter for Builtin.addressOfBorrow. -static ManagedValue emitBuiltinAddressOfBorrow(SILGenFunction &SGF, +static ManagedValue emitBuiltinAddressOfBorrowBuiltins(SILGenFunction &SGF, SILLocation loc, SubstitutionMap substitutions, PreparedArguments &&preparedArgs, - SGFContext C) { + SGFContext C, bool stackProtected) { SILType rawPointerType = SILType::getRawPointerType(SGF.getASTContext()); auto argsOrError = decomposeArguments(SGF, loc, std::move(preparedArgs), 1); @@ -468,11 +486,31 @@ static ManagedValue emitBuiltinAddressOfBorrow(SILGenFunction &SGF, addr = borrow.getValue(); // Take the address argument and cast it to RawPointer. - SILValue result = SGF.B.createAddressToPointer(loc, addr, - rawPointerType); + SILValue result = SGF.B.createAddressToPointer(loc, addr, rawPointerType, + stackProtected); return ManagedValue::forUnmanaged(result); } +/// Specialized emitter for Builtin.addressOfBorrow. +static ManagedValue emitBuiltinAddressOfBorrow(SILGenFunction &SGF, + SILLocation loc, + SubstitutionMap substitutions, + PreparedArguments &&preparedArgs, + SGFContext C) { + return emitBuiltinAddressOfBorrowBuiltins(SGF, loc, substitutions, + std::move(preparedArgs), C, /*stackProtected=*/ true); +} + +/// Specialized emitter for Builtin.addressOfBorrow. +static ManagedValue emitBuiltinUnprotectedAddressOfBorrow(SILGenFunction &SGF, + SILLocation loc, + SubstitutionMap substitutions, + PreparedArguments &&preparedArgs, + SGFContext C) { + return emitBuiltinAddressOfBorrowBuiltins(SGF, loc, substitutions, + std::move(preparedArgs), C, /*stackProtected=*/ false); +} + /// Specialized emitter for Builtin.gepRaw. static ManagedValue emitBuiltinGepRaw(SILGenFunction &SGF, SILLocation loc, @@ -504,8 +542,10 @@ static ManagedValue emitBuiltinGep(SILGenFunction &SGF, ElemTy.getAddressType(), /*strict*/ true, /*invariant*/ false); - addr = SGF.B.createIndexAddr(loc, addr, args[1].getUnmanagedValue()); - addr = SGF.B.createAddressToPointer(loc, addr, RawPtrType); + addr = SGF.B.createIndexAddr(loc, addr, args[1].getUnmanagedValue(), + /*needsStackProtection=*/ true); + addr = SGF.B.createAddressToPointer(loc, addr, RawPtrType, + /*needsStackProtection=*/ true); return ManagedValue::forUnmanaged(addr); } @@ -530,7 +570,8 @@ static ManagedValue emitBuiltinGetTailAddr(SILGenFunction &SGF, /*invariant*/ false); addr = SGF.B.createTailAddr(loc, addr, args[1].getUnmanagedValue(), TailTy.getAddressType()); - addr = SGF.B.createAddressToPointer(loc, addr, RawPtrType); + addr = SGF.B.createAddressToPointer(loc, addr, RawPtrType, + /*needsStackProtection=*/ false); return ManagedValue::forUnmanaged(addr); } @@ -1055,7 +1096,8 @@ static ManagedValue emitBuiltinProjectTailElems(SILGenFunction &SGF, SILValue result = SGF.B.createRefTailAddr( loc, args[0].borrow(SGF, loc).getValue(), ElemType.getAddressType()); SILType rawPointerType = SILType::getRawPointerType(SGF.F.getASTContext()); - result = SGF.B.createAddressToPointer(loc, result, rawPointerType); + result = SGF.B.createAddressToPointer(loc, result, rawPointerType, + /*needsStackProtection=*/ false); return ManagedValue::forUnmanaged(result); } diff --git a/lib/SILGen/SILGenDecl.cpp b/lib/SILGen/SILGenDecl.cpp index 3464385e0df1a..c420cf049af11 100644 --- a/lib/SILGen/SILGenDecl.cpp +++ b/lib/SILGen/SILGenDecl.cpp @@ -1295,7 +1295,8 @@ void SILGenFunction::emitPatternBinding(PatternBindingDecl *PBD, SILLocation loc(PBD); SILValue resultBuf = emitTemporaryAllocation(loc, initLoweredTy); SILValue resultBufPtr = B.createAddressToPointer(loc, resultBuf, - SILType::getPrimitiveObjectType(C.TheRawPointerType)); + SILType::getPrimitiveObjectType(C.TheRawPointerType), + /*needsStackProtection=*/ false); // Emit the closure for the child task. // Prepare the opaque `AsyncLet` representation. diff --git a/lib/SILGen/SILGenExpr.cpp b/lib/SILGen/SILGenExpr.cpp index 5ed7c623de6ea..db4977520095b 100644 --- a/lib/SILGen/SILGenExpr.cpp +++ b/lib/SILGen/SILGenExpr.cpp @@ -4006,7 +4006,8 @@ visitMagicIdentifierLiteralExpr(MagicIdentifierLiteralExpr *E, SGFContext C) { auto ImageBaseAddr = B.createGlobalAddr(SILLoc, ImageBase); auto ImageBasePointer = - B.createAddressToPointer(SILLoc, ImageBaseAddr, BuiltinRawPtrTy); + B.createAddressToPointer(SILLoc, ImageBaseAddr, BuiltinRawPtrTy, + /*needsStackProtection=*/ false); S = B.createStruct(SILLoc, UnsafeRawPtrTy, { ImageBasePointer }); } else { auto DSOGlobal = M.lookUpGlobalVariable("__dso_handle"); @@ -4018,7 +4019,8 @@ visitMagicIdentifierLiteralExpr(MagicIdentifierLiteralExpr *E, SGFContext C) { auto DSOAddr = B.createGlobalAddr(SILLoc, DSOGlobal); auto DSOPointer = - B.createAddressToPointer(SILLoc, DSOAddr, BuiltinRawPtrTy); + B.createAddressToPointer(SILLoc, DSOAddr, BuiltinRawPtrTy, + /*needsStackProtection=*/ false); S = B.createStruct(SILLoc, UnsafeRawPtrTy, { DSOPointer }); } @@ -4063,7 +4065,8 @@ RValue RValueEmitter::visitCollectionExpr(CollectionExpr *E, SGFContext C) { if (index != 0) { SILValue indexValue = SGF.B.createIntegerLiteral( loc, SILType::getBuiltinWordType(SGF.getASTContext()), index); - destAddr = SGF.B.createIndexAddr(loc, destAddr, indexValue); + destAddr = SGF.B.createIndexAddr(loc, destAddr, indexValue, + /*needsStackProtection=*/ false); } auto &destTL = varargsInfo.getBaseTypeLowering(); // Create a dormant cleanup for the value in case we exit before the @@ -5622,7 +5625,8 @@ ManagedValue SILGenFunction::emitLValueToPointer(SILLocation loc, LValue &&lv, SILValue address = emitAddressOfLValue(loc, std::move(lv)).getUnmanagedValue(); address = B.createAddressToPointer(loc, address, - SILType::getRawPointerType(getASTContext())); + SILType::getRawPointerType(getASTContext()), + /*needsStackProtection=*/ true); // Disable nested writeback scopes for any calls evaluated during the // conversion intrinsic. diff --git a/lib/SILGen/SILGenGlobalVariable.cpp b/lib/SILGen/SILGenGlobalVariable.cpp index 23b0aa05de4ee..79624367e6faf 100644 --- a/lib/SILGen/SILGenGlobalVariable.cpp +++ b/lib/SILGen/SILGenGlobalVariable.cpp @@ -254,7 +254,8 @@ static void emitOnceCall(SILGenFunction &SGF, VarDecl *global, // Emit a reference to the global token. SILValue onceTokenAddr = SGF.B.createGlobalAddr(global, onceToken); onceTokenAddr = SGF.B.createAddressToPointer(global, onceTokenAddr, - rawPointerSILTy); + rawPointerSILTy, + /*needsStackProtection=*/ false); // Emit a reference to the function to execute. SILValue onceFuncRef = SGF.B.createFunctionRefFor(global, onceFunc); @@ -277,7 +278,8 @@ void SILGenFunction::emitGlobalAccessor(VarDecl *global, SILType rawPointerSILTy = getLoweredLoadableType(getASTContext().TheRawPointerType); - addr = B.createAddressToPointer(global, addr, rawPointerSILTy); + addr = B.createAddressToPointer(global, addr, rawPointerSILTy, + /*needsStackProtection=*/ false); auto *ret = B.createReturn(global, addr); (void)ret; assert(ret->getDebugScope() && "instruction without scope"); diff --git a/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp b/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp index 658313381e699..40c3e931bc940 100644 --- a/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp +++ b/lib/SILOptimizer/Mandatory/OSLogOptimization.cpp @@ -558,7 +558,8 @@ static SILValue emitCodeForConstantArray(ArrayRef elements, if (elementIndex != 0) { SILValue indexSIL = builder.createIntegerLiteral( loc, SILType::getBuiltinWordType(astContext), elementIndex); - currentStorageAddr = builder.createIndexAddr(loc, storageAddr, indexSIL); + currentStorageAddr = builder.createIndexAddr(loc, storageAddr, indexSIL, + /*needsStackProtection=*/ false); } else { currentStorageAddr = storageAddr; } diff --git a/lib/SILOptimizer/PassManager/PassManager.cpp b/lib/SILOptimizer/PassManager/PassManager.cpp index df5e1a5356171..c03a41d17334d 100644 --- a/lib/SILOptimizer/PassManager/PassManager.cpp +++ b/lib/SILOptimizer/PassManager/PassManager.cpp @@ -1633,3 +1633,8 @@ PassContext_loadFunction(BridgedPassContext context, StringRef name) { SILFunction *f = mod->loadFunction(name, SILModule::LinkingMode::LinkNormal); return {f}; } + +SwiftInt SILOptions_enableStackProtection(BridgedPassContext context) { + SILModule *mod = castToPassInvocation(context)->getPassManager()->getModule(); + return mod->getOptions().EnableStackProtection; +} diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 91d5fdeeaf072..c4e42552c5a5c 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -811,6 +811,9 @@ static void addLastChanceOptPassPipeline(SILPassPipelinePlan &P) { // Emits remarks on all functions with @_assemblyVision attribute. P.addAssemblyVisionRemarkGenerator(); + // In optimized builds, do the inter-procedural analysis in a module pass. + P.addStackProtection(); + // FIXME: rdar://72935649 (Miscompile on combining PruneVTables with WMO) // P.addPruneVTables(); } @@ -983,6 +986,9 @@ SILPassPipelinePlan::getOnonePassPipeline(const SILOptions &Options) { P.addAssumeSingleThreaded(); } + // In Onone builds, do a function-local analysis in a function pass. + P.addFunctionStackProtection(); + // Has only an effect if the -sil-based-debuginfo option is specified. P.addSILDebugInfoGenerator(); diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp index 4c43e618c2721..e9a02ae2094e0 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp @@ -398,7 +398,8 @@ bool SILCombiner::tryOptimizeKeypathOffsetOf(ApplyInst *AI, SILType ptrType = SILType::getRawPointerType(Builder.getASTContext()); SILValue offsetPtr; projector->project(KeyPathProjector::AccessType::Get, [&](SILValue addr) { - offsetPtr = Builder.createAddressToPointer(loc, addr, ptrType); + offsetPtr = Builder.createAddressToPointer(loc, addr, ptrType, + /*needsStackProtection=*/ false); }); // The result of the _storedInlineOffset call should be Optional. If diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerBuiltinVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerBuiltinVisitors.cpp index 173ba1bf1953c..fd42430b24f61 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerBuiltinVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerBuiltinVisitors.cpp @@ -451,9 +451,11 @@ static SILValue createIndexAddrFrom(IndexRawPointerInst *I, Builder.createBuiltin(I->getLoc(), TruncOrBitCast->getName(), TruncOrBitCast->getType(), {}, Distance); - auto *NewIAI = Builder.createIndexAddr(I->getLoc(), NewPTAI, DistanceAsWord); + auto *NewIAI = Builder.createIndexAddr(I->getLoc(), NewPTAI, DistanceAsWord, + /*needsStackProtection=*/ false); auto *NewATPI = - Builder.createAddressToPointer(I->getLoc(), NewIAI, RawPointerTy); + Builder.createAddressToPointer(I->getLoc(), NewIAI, RawPointerTy, + /*needsStackProtection=*/ false); return NewATPI; } diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp index 53760d54c8280..43e36c3c37879 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp @@ -416,7 +416,8 @@ visitPointerToAddressInst(PointerToAddressInst *PTAI) { auto DistanceAsWord = Builder.createBuiltin( PTAI->getLoc(), Trunc->getName(), Trunc->getType(), {}, Distance); - return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, DistanceAsWord); + return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, DistanceAsWord, + /*needsStackProtection=*/ false); } } } @@ -462,7 +463,8 @@ visitPointerToAddressInst(PointerToAddressInst *PTAI) { auto *NewPTAI = Builder.createPointerToAddress(PTAI->getLoc(), Ptr, PTAI->getType(), PTAI->isStrict(), PTAI->isInvariant()); - return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, Distance); + return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, Distance, + /*needsStackProtection=*/ false); } } diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerMiscVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerMiscVisitors.cpp index 7f09f5c70d6b3..5f543c2f2aa4c 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerMiscVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerMiscVisitors.cpp @@ -1033,7 +1033,8 @@ SILInstruction *SILCombiner::visitIndexAddrInst(IndexAddrInst *IA) { auto *newIndex = Builder.createIntegerLiteral(IA->getLoc(), IA->getIndex()->getType(), index + index2); - return Builder.createIndexAddr(IA->getLoc(), base2, newIndex); + return Builder.createIndexAddr(IA->getLoc(), base2, newIndex, + IA->needsStackProtection() || cast(base)->needsStackProtection()); } /// Walks over all fields of an aggregate and checks if a reference count diff --git a/lib/SILOptimizer/Transforms/PartialApplySimplification.cpp b/lib/SILOptimizer/Transforms/PartialApplySimplification.cpp index fe01db3e6e51a..2739421a693a1 100644 --- a/lib/SILOptimizer/Transforms/PartialApplySimplification.cpp +++ b/lib/SILOptimizer/Transforms/PartialApplySimplification.cpp @@ -787,7 +787,8 @@ rewriteKnownCalleeWithExplicitContext(SILFunction *callee, case ParameterConvention::Indirect_Inout: { // Pass a pointer to the argument into the box. auto p = B.createAddressToPointer(loc, arg, - SILType::getRawPointerType(C)); + SILType::getRawPointerType(C), + /*needsStackProtection=*/ false); if (caller->hasOwnership()) { B.createStore(loc, p, proj, StoreOwnershipQualifier::Trivial); } else { diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index a22bada828a6d..5dafd8df5708f 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -1365,7 +1365,6 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, ONEOPERAND_ONETYPE_INST(BridgeObjectToRef) ONEOPERAND_ONETYPE_INST(BridgeObjectToWord) ONEOPERAND_ONETYPE_INST(Upcast) - ONEOPERAND_ONETYPE_INST(AddressToPointer) ONEOPERAND_ONETYPE_INST(RefToRawPointer) ONEOPERAND_ONETYPE_INST(RawPointerToRef) ONEOPERAND_ONETYPE_INST(ThinToThickFunction) @@ -1376,6 +1375,17 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, ONEOPERAND_ONETYPE_INST(ProjectBlockStorage) #undef ONEOPERAND_ONETYPE_INST + case SILInstructionKind::AddressToPointerInst: { + assert(RecordKind == SIL_ONE_TYPE_ONE_OPERAND && + "Layout should be OneTypeOneOperand."); + ResultInst = Builder.createAddressToPointer( + Loc, + getLocalValue(ValID, getSILType(MF->getType(TyID2), + (SILValueCategory)TyCategory2, Fn)), + getSILType(MF->getType(TyID), (SILValueCategory)TyCategory, Fn), + /*needsStackProtection=*/ Attr != 0); + break; + } case SILInstructionKind::ProjectBoxInst: { assert(RecordKind == SIL_ONE_TYPE_ONE_OPERAND && "Layout should be OneTypeOneOperand."); @@ -1781,7 +1791,8 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, Loc, getLocalValue(ValID, getSILType(Ty, (SILValueCategory)TyCategory, Fn)), getLocalValue(ValID2, - getSILType(Ty2, (SILValueCategory)TyCategory2, Fn))); + getSILType(Ty2, (SILValueCategory)TyCategory2, Fn)), + /*needsStackProtection=*/ Attr != 0); break; } case SILInstructionKind::TailAddrInst: { diff --git a/lib/Serialization/ModuleFormat.h b/lib/Serialization/ModuleFormat.h index 099b62ac2e5df..e92ddc0fe5ed8 100644 --- a/lib/Serialization/ModuleFormat.h +++ b/lib/Serialization/ModuleFormat.h @@ -58,7 +58,7 @@ const uint16_t SWIFTMODULE_VERSION_MAJOR = 0; /// describe what change you made. The content of this comment isn't important; /// it just ensures a conflict if two people change the module format. /// Don't worry about adhering to the 80-column limit for this line. -const uint16_t SWIFTMODULE_VERSION_MINOR = 708; // increment_profiler_counter +const uint16_t SWIFTMODULE_VERSION_MINOR = 709; // needsStackProtection instruction flags /// A standard hash seed used for all string hashes in a serialized module. /// diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index 0f8b1260d80b8..db987882079fc 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -1611,6 +1611,7 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { const IndexAddrInst *IAI = cast(&SI); operand = IAI->getBase(); operand2 = IAI->getIndex(); + Attr = (IAI->needsStackProtection() ? 1 : 0); } SILTwoOperandsLayout::emitRecord(Out, ScratchRecord, SILAbbrCodes[SILTwoOperandsLayout::Code], @@ -1787,6 +1788,8 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { attrs |= 0x01; } else if (auto *refCast = dyn_cast(&SI)) { attrs = encodeValueOwnership(refCast->getOwnershipKind()); + } else if (auto *atp = dyn_cast(&SI)) { + attrs = atp->needsStackProtection() ? 1 : 0; } writeConversionLikeInstruction(cast(&SI), attrs); break; diff --git a/stdlib/public/core/AnyHashable.swift b/stdlib/public/core/AnyHashable.swift index e2377ab0d02bc..f8ef73b979a3d 100644 --- a/stdlib/public/core/AnyHashable.swift +++ b/stdlib/public/core/AnyHashable.swift @@ -159,9 +159,11 @@ public struct AnyHashable { } self.init(_box: _ConcreteHashableBox(false)) // Dummy value - _makeAnyHashableUpcastingToHashableBaseType( - base, - storingResultInto: &self) + _withUnprotectedUnsafeMutablePointer(to: &self) { + _makeAnyHashableUpcastingToHashableBaseType( + base, + storingResultInto: $0) + } } internal init(_usingDefaultRepresentationOf base: H) { diff --git a/stdlib/public/core/FloatingPointParsing.swift.gyb b/stdlib/public/core/FloatingPointParsing.swift.gyb index f9343a57c173b..687ee8c50bf3f 100644 --- a/stdlib/public/core/FloatingPointParsing.swift.gyb +++ b/stdlib/public/core/FloatingPointParsing.swift.gyb @@ -169,7 +169,7 @@ extension ${Self}: LosslessStringConvertible { self.init(Substring(text)) } else { self = 0.0 - let success = withUnsafeMutablePointer(to: &self) { p -> Bool in + let success = _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in text.withCString { chars -> Bool in switch chars[0] { case 9, 10, 11, 12, 13, 32: @@ -198,7 +198,7 @@ extension ${Self}: LosslessStringConvertible { @available(SwiftStdlib 5.3, *) public init?(_ text: Substring) { self = 0.0 - let success = withUnsafeMutablePointer(to: &self) { p -> Bool in + let success = _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in text.withCString { chars -> Bool in switch chars[0] { case 9, 10, 11, 12, 13, 32: diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 796380b8b7d1a..fea53b2c6eb91 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -1951,7 +1951,9 @@ func _modifyAtWritableKeyPath_impl( keyPath: _unsafeUncheckedDowncast(keyPath, to: ReferenceWritableKeyPath.self)) } - return keyPath._projectMutableAddress(from: &root) + return _withUnprotectedUnsafePointer(to: &root) { + keyPath._projectMutableAddress(from: $0) + } } // The release that ends the access scope is guaranteed to happen @@ -1980,7 +1982,9 @@ func _setAtWritableKeyPath( value: value) } // TODO: we should be able to do this more efficiently than projecting. - let (addr, owner) = keyPath._projectMutableAddress(from: &root) + let (addr, owner) = _withUnprotectedUnsafePointer(to: &root) { + keyPath._projectMutableAddress(from: $0) + } addr.pointee = value _fixLifetime(owner) // FIXME: this needs a deallocation barrier to ensure that the @@ -3277,7 +3281,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { _internalInvariant(_isPOD(T.self)) let size = MemoryLayout.size let (baseAddress, misalign) = adjustDestForAlignment(of: T.self) - withUnsafeBytes(of: value) { + _withUnprotectedUnsafeBytes(of: value) { _memcpy(dest: baseAddress, src: $0.baseAddress.unsafelyUnwrapped, size: UInt(size)) } diff --git a/stdlib/public/core/LifetimeManager.swift b/stdlib/public/core/LifetimeManager.swift index a537c5a230953..881dc14054f36 100644 --- a/stdlib/public/core/LifetimeManager.swift +++ b/stdlib/public/core/LifetimeManager.swift @@ -82,6 +82,23 @@ public func withUnsafeMutablePointer( return try body(UnsafeMutablePointer(Builtin.addressof(&value))) } +/// Calls the given closure with a mutable pointer to the given argument. +/// +/// This function is similar to `withUnsafeMutablePointer`, except that it +/// doesn't trigger stack protection for the pointer. +@_alwaysEmitIntoClient +public func _withUnprotectedUnsafeMutablePointer( + to value: inout T, + _ body: (UnsafeMutablePointer) throws -> Result +) rethrows -> Result +{ +#if $BuiltinUnprotectedAddressOf + return try body(UnsafeMutablePointer(Builtin.unprotectedAddressOf(&value))) +#else + return try body(UnsafeMutablePointer(Builtin.addressof(&value))) +#endif +} + /// Invokes the given closure with a pointer to the given argument. /// /// The `withUnsafePointer(to:_:)` function is useful for calling Objective-C @@ -144,6 +161,23 @@ public func withUnsafePointer( return try body(UnsafePointer(Builtin.addressof(&value))) } +/// Invokes the given closure with a pointer to the given argument. +/// +/// This function is similar to `withUnsafePointer`, except that it +/// doesn't trigger stack protection for the pointer. +@_alwaysEmitIntoClient +public func _withUnprotectedUnsafePointer( + to value: inout T, + _ body: (UnsafePointer) throws -> Result +) rethrows -> Result +{ +#if $BuiltinUnprotectedAddressOf + return try body(UnsafePointer(Builtin.unprotectedAddressOf(&value))) +#else + return try body(UnsafePointer(Builtin.addressof(&value))) +#endif +} + extension String { /// Calls the given closure with a pointer to the contents of the string, /// represented as a null-terminated sequence of UTF-8 code units. diff --git a/stdlib/public/core/MutableCollection.swift b/stdlib/public/core/MutableCollection.swift index 5b0657799a109..08a4e0c3ad6e5 100644 --- a/stdlib/public/core/MutableCollection.swift +++ b/stdlib/public/core/MutableCollection.swift @@ -488,8 +488,13 @@ extension MutableCollection { public func swap(_ a: inout T, _ b: inout T) { // Semantically equivalent to (a, b) = (b, a). // Microoptimized to avoid retain/release traffic. +#if $BuiltinUnprotectedAddressOf + let p1 = Builtin.unprotectedAddressOf(&a) + let p2 = Builtin.unprotectedAddressOf(&b) +#else let p1 = Builtin.addressof(&a) let p2 = Builtin.addressof(&b) +#endif _debugPrecondition( p1 != p2, "swapping a location with itself is not supported") diff --git a/stdlib/public/core/Random.swift b/stdlib/public/core/Random.swift index e293344a28789..d0895664ffdb0 100644 --- a/stdlib/public/core/Random.swift +++ b/stdlib/public/core/Random.swift @@ -158,7 +158,9 @@ public struct SystemRandomNumberGenerator: RandomNumberGenerator, Sendable { @inlinable public mutating func next() -> UInt64 { var random: UInt64 = 0 - swift_stdlib_random(&random, MemoryLayout.size) + _withUnprotectedUnsafeMutablePointer(to: &random) { + swift_stdlib_random($0, MemoryLayout.size) + } return random } } diff --git a/stdlib/public/core/SmallString.swift b/stdlib/public/core/SmallString.swift index 49680b5163aa6..de7b8d1fa1da6 100644 --- a/stdlib/public/core/SmallString.swift +++ b/stdlib/public/core/SmallString.swift @@ -221,7 +221,7 @@ extension _SmallString { ) rethrows -> Result { let count = self.count var raw = self.zeroTerminatedRawCodeUnits - return try Swift.withUnsafeBytes(of: &raw) { + return try Swift._withUnprotectedUnsafeBytes(of: &raw) { let rawPtr = $0.baseAddress._unsafelyUnwrappedUnchecked // Rebind the underlying (UInt64, UInt64) tuple to UInt8 for the // duration of the closure. Accessing self after this rebind is undefined. diff --git a/stdlib/public/core/StringSwitch.swift b/stdlib/public/core/StringSwitch.swift index b2874ac8a346a..4eb0a175afc46 100644 --- a/stdlib/public/core/StringSwitch.swift +++ b/stdlib/public/core/StringSwitch.swift @@ -68,23 +68,20 @@ func _findStringSwitchCaseWithCache( string: String, cache: inout _OpaqueStringSwitchCache) -> Int { - return withUnsafeMutableBytes(of: &cache) { - (bufPtr: UnsafeMutableRawBufferPointer) -> Int in - - let oncePtr = bufPtr.baseAddress! - let cacheRawPtr = oncePtr + MemoryLayout.stride - let cachePtr = cacheRawPtr.bindMemory(to: _StringSwitchCache.self, capacity: 1) - var context = _StringSwitchContext(cases: cases, cachePtr: cachePtr) - withUnsafeMutablePointer(to: &context) { (context) -> () in - Builtin.onceWithContext(oncePtr._rawValue, _createStringTableCache, - context._rawValue) - } - let cache = cachePtr.pointee; - if let idx = cache[string] { - return idx - } - return -1 + let ptr = UnsafeMutableRawPointer(Builtin.unprotectedAddressOf(&cache)) + let oncePtr = ptr + let cacheRawPtr = oncePtr + MemoryLayout.stride + let cachePtr = cacheRawPtr.bindMemory(to: _StringSwitchCache.self, capacity: 1) + var context = _StringSwitchContext(cases: cases, cachePtr: cachePtr) + withUnsafeMutablePointer(to: &context) { (context) -> () in + Builtin.onceWithContext(oncePtr._rawValue, _createStringTableCache, + context._rawValue) + } + let cache = cachePtr.pointee; + if let idx = cache[string] { + return idx } + return -1 } /// Builds the string switch case. diff --git a/stdlib/public/core/UnicodeScalar.swift b/stdlib/public/core/UnicodeScalar.swift index dac7225c37ede..c2e25ee33c7b5 100644 --- a/stdlib/public/core/UnicodeScalar.swift +++ b/stdlib/public/core/UnicodeScalar.swift @@ -524,7 +524,7 @@ extension Unicode.Scalar { // The first code unit is in the least significant byte of codeUnits. codeUnits = codeUnits.littleEndian - return try Swift.withUnsafePointer(to: &codeUnits) { + return try Swift._withUnprotectedUnsafePointer(to: &codeUnits) { return try $0.withMemoryRebound(to: UInt8.self, capacity: 4) { return try body(UnsafeBufferPointer(start: $0, count: utf8Count)) } diff --git a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb index 1df4cff6dba8a..44a68d03220c7 100644 --- a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb +++ b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb @@ -1037,6 +1037,22 @@ public func withUnsafeBytes( } } +/// Invokes the given closure with a buffer pointer covering the raw bytes of +/// the given argument. +/// +/// This function is similar to `withUnsafeBytes`, except that it +/// doesn't trigger stack protection for the pointer. +@_alwaysEmitIntoClient +public func _withUnprotectedUnsafeBytes( + of value: inout T, + _ body: (UnsafeRawBufferPointer) throws -> Result +) rethrows -> Result +{ + return try _withUnprotectedUnsafePointer(to: &value) { + try body(UnsafeRawBufferPointer(start: $0, count: MemoryLayout.size)) + } +} + /// Invokes the given closure with a buffer pointer covering the raw bytes of /// the given argument. /// @@ -1066,6 +1082,25 @@ public func withUnsafeBytes( return try body(buffer) } +/// Invokes the given closure with a buffer pointer covering the raw bytes of +/// the given argument. +/// +/// This function is similar to `withUnsafeBytes`, except that it +/// doesn't trigger stack protection for the pointer. +@_alwaysEmitIntoClient +public func _withUnprotectedUnsafeBytes( + of value: T, + _ body: (UnsafeRawBufferPointer) throws -> Result +) rethrows -> Result { +#if $BuiltinUnprotectedAddressOf + let addr = UnsafeRawPointer(Builtin.unprotectedAddressOfBorrow(value)) +#else + let addr = UnsafeRawPointer(Builtin.addressof(value)) +#endif + let buffer = UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) + return try body(buffer) +} + // ${'Local Variables'}: // eval: (read-only-mode 1) // End: diff --git a/stdlib/public/core/VarArgs.swift b/stdlib/public/core/VarArgs.swift index 91429ac4dc8f2..a5c1e51d674db 100644 --- a/stdlib/public/core/VarArgs.swift +++ b/stdlib/public/core/VarArgs.swift @@ -215,9 +215,11 @@ public func _encodeBitsAsWords(_ x: T) -> [Int] { _internalInvariant(!result.isEmpty) var tmp = x // FIXME: use UnsafeMutablePointer.assign(from:) instead of memcpy. - _memcpy(dest: UnsafeMutablePointer(result._baseAddressIfContiguous!), - src: UnsafeMutablePointer(Builtin.addressof(&tmp)), - size: UInt(MemoryLayout.size)) + _withUnprotectedUnsafeMutablePointer(to: &tmp) { + _memcpy(dest: UnsafeMutablePointer(result._baseAddressIfContiguous!), + src: $0, + size: UInt(MemoryLayout.size)) + } return result } diff --git a/test/ClangImporter/throwing-mismarked-nonnullable-error.swift b/test/ClangImporter/throwing-mismarked-nonnullable-error.swift index 1e82d1fbba545..4f6604340850a 100644 --- a/test/ClangImporter/throwing-mismarked-nonnullable-error.swift +++ b/test/ClangImporter/throwing-mismarked-nonnullable-error.swift @@ -6,7 +6,7 @@ // throwing. We really just shouldn't expect this at all. I filed: rdar://94656178 // to track that work. -// CHECK-LABEL: sil hidden @$s4main1fyyF : $@convention(thin) () -> () { +// CHECK-LABEL: sil hidden {{.*}}@$s4main1fyyF : $@convention(thin) () -> () { // CHECK: [[STACK:%.*]] = alloc_stack [dynamic_lifetime] $Optional // CHECK: inject_enum_addr [[STACK]] : $*Optional, #Optional.none!enumelt // CHECK: [[FN:%.*]] = objc_method {{%[0-9]+}} : $MyClass, #MyClass.submit!foreign : (MyClass) -> () throws -> (), $@convention(objc_method) (Optional>, MyClass) -> ObjCBool @@ -14,7 +14,7 @@ // CHECK: [[VAL:%.*]] = load [[STACK]] : $*Optional // CHECK: [[UNMANAGED:%.*]] = ref_to_unmanaged [[VAL]] // CHECK: store [[UNMANAGED]] to [[NEXT_STACK]] -// CHECK: [[PTR:%.*]] = address_to_pointer [[NEXT_STACK]] +// CHECK: [[PTR:%.*]] = address_to_pointer [stack_protection] [[NEXT_STACK]] // CHECK: [[AUMP:%.*]] = struct $AutoreleasingUnsafeMutablePointer ([[PTR]] : // CHECK: [[OPT_AUMP:%.*]] = enum $Optional>, #Optional.some!enumelt, [[AUMP]] : $AutoreleasingUnsafeMutablePointer // CHECK: apply {{%.*}}([[OPT_AUMP]], diff --git a/test/IRGen/stack_protector.swift b/test/IRGen/stack_protector.swift index 5358159978a9b..5187d2bd90546 100644 --- a/test/IRGen/stack_protector.swift +++ b/test/IRGen/stack_protector.swift @@ -1,18 +1,25 @@ -// RUN: %target-swift-frontend -enable-stack-protector -emit-ir %s -o - | %FileCheck %s +// RUN: %target-swift-frontend -enable-stack-protector -Onone -emit-ir %s -o - | %FileCheck %s +// RUN: %target-swift-frontend -enable-stack-protector -O -emit-ir %s -o - | %FileCheck %s + +// REQUIRES: swift_in_compiler @_silgen_name("escape") -func f(_ arg: UnsafePointer) +func f(_ arg: UnsafePointer) -public func escapeGenericValue(_ t: T) { - withUnsafePointer(to: t) { ptr in - f(ptr) - } -} +@_silgen_name("noescape") +func g(_ arg: Int) // CHECK: define {{.*}}swiftcc void @"$s15stack_protector21requestStackProtectoryyF"() [[SSPATTRS:#[0-9]+]] { public func requestStackProtector() { var x: Int = 0 - escapeGenericValue(x) + f(&x) +} + +// CHECK-NOT: define {{.*}}swiftcc void @"$s15stack_protector16noStackProtectoryyF"() [[SSPATTRS]] { +public func noStackProtector() { + var x: Int = 27 + g(x) + g(x) // avoid function merging by calling `g` two times } +// CHECK: [[SSPATTRS]] = { sspreq {{.*}}"stack-protector-buffer-size"="8" -// CHECK: [[SSPATTRS]] = { sspstrong {{.*}}"stack-protector-buffer-size"="8" diff --git a/test/SIL/Parser/basic.sil b/test/SIL/Parser/basic.sil index 9203151bfa7db..c3f5173102a9a 100644 --- a/test/SIL/Parser/basic.sil +++ b/test/SIL/Parser/basic.sil @@ -752,6 +752,23 @@ bb0: return %2 : $() } +// CHECK-LABEL: sil @test_stack_protection_flags +sil @test_stack_protection_flags : $@convention(thin) (@inout Builtin.Word) -> () { +bb0(%0 : $*Builtin.Word): + // CHECK: address_to_pointer [stack_protection] %0 + %1 = address_to_pointer [stack_protection] %0 : $*Builtin.Word to $Builtin.RawPointer + // CHECK: address_to_pointer %0 + %2 = address_to_pointer %0 : $*Builtin.Word to $Builtin.RawPointer + %3 = integer_literal $Builtin.Word, 0 + // CHECK: index_addr [stack_protection] %0 + %4 = index_addr [stack_protection] %0 : $*Builtin.Word, %3 : $Builtin.Word + // CHECK: index_addr %0 + %5 = index_addr %0 : $*Builtin.Word, %3 : $Builtin.Word + + %6 = tuple () + return %6 : $() +} + // CHECK-LABEL: sil @test_tail_elems sil @test_tail_elems : $@convention(thin) (Builtin.Word, Builtin.Word) -> () { bb0(%0 : $Builtin.Word, %1 : $Builtin.Word): diff --git a/test/SILGen/builtins.swift b/test/SILGen/builtins.swift index 78480b91d63bc..cec69cc04b858 100644 --- a/test/SILGen/builtins.swift +++ b/test/SILGen/builtins.swift @@ -326,8 +326,8 @@ func gep_raw32(_ p: Builtin.RawPointer, i: Builtin.Int32) -> Builtin.RawPointer // CHECK-LABEL: sil hidden [ossa] @$s8builtins3gep{{[_0-9a-zA-Z]*}}F func gep(_ p: Builtin.RawPointer, i: Builtin.Word, e: Elem.Type) -> Builtin.RawPointer { // CHECK: [[P2A:%.*]] = pointer_to_address %0 - // CHECK: [[GEP:%.*]] = index_addr [[P2A]] : $*Elem, %1 : $Builtin.Word - // CHECK: [[A2P:%.*]] = address_to_pointer [[GEP]] + // CHECK: [[GEP:%.*]] = index_addr [stack_protection] [[P2A]] : $*Elem, %1 : $Builtin.Word + // CHECK: [[A2P:%.*]] = address_to_pointer [stack_protection] [[GEP]] // CHECK: return [[A2P]] return Builtin.gep_Word(p, i, e) } @@ -386,6 +386,40 @@ func getTailAddr(start: Builtin.RawPointer, i: Builtin.Word, ty1: T1.Typ return Builtin.getTailAddr_Word(start, i, ty1, ty2) } +// CHECK-LABEL: sil hidden [ossa] @$s8builtins18protectedAddressOfyBpxzlF +func protectedAddressOf(_ x: inout T) -> Builtin.RawPointer { + // CHECK: [[A:%.*]] = begin_access [modify] [unknown] %0 + // CHECK: [[P:%.*]] = address_to_pointer [stack_protection] [[A]] + // CHECK: return [[P]] + return Builtin.addressof(&x) +} +// CHECK: } // end sil function '$s8builtins18protectedAddressOfyBpxzlF' + +// CHECK-LABEL: sil hidden [ossa] @$s8builtins20unprotectedAddressOfyBpxzlF +func unprotectedAddressOf(_ x: inout T) -> Builtin.RawPointer { + // CHECK: [[A:%.*]] = begin_access [modify] [unknown] %0 + // CHECK: [[P:%.*]] = address_to_pointer [[A]] + // CHECK: return [[P]] + return Builtin.unprotectedAddressOf(&x) +} +// CHECK: } // end sil function '$s8builtins20unprotectedAddressOfyBpxzlF' + +// CHECK-LABEL: sil hidden [ossa] @$s8builtins24protectedAddressOfBorrowyBpxlF +func protectedAddressOfBorrow(_ x: T) -> Builtin.RawPointer { + // CHECK: [[P:%.*]] = address_to_pointer [stack_protection] %0 + // CHECK: return [[P]] + return Builtin.addressOfBorrow(x) +} +// CHECK: } // end sil function '$s8builtins24protectedAddressOfBorrowyBpxlF' + +// CHECK-LABEL: sil hidden [ossa] @$s8builtins26unprotectedAddressOfBorrowyBpxlF +func unprotectedAddressOfBorrow(_ x: T) -> Builtin.RawPointer { + // CHECK: [[P:%.*]] = address_to_pointer %0 + // CHECK: return [[P]] + return Builtin.unprotectedAddressOfBorrow(x) +} +// CHECK: } // end sil function '$s8builtins26unprotectedAddressOfBorrowyBpxlF' + // CHECK-LABEL: sil hidden [ossa] @$s8builtins25beginUnpairedModifyAccess{{[_0-9a-zA-Z]*}}F func beginUnpairedModifyAccess(address: Builtin.RawPointer, scratch: Builtin.RawPointer, ty1: T1.Type) { // CHECK: [[P2A_ADDR:%.*]] = pointer_to_address %0 diff --git a/test/SILGen/foreign_errors.swift b/test/SILGen/foreign_errors.swift index 70b867680a1df..d7d758bf83bf1 100644 --- a/test/SILGen/foreign_errors.swift +++ b/test/SILGen/foreign_errors.swift @@ -22,7 +22,7 @@ func test0() throws { // CHECK: [[T0:%.*]] = load_borrow [[ERR_TEMP0]] // CHECK: [[T1:%.*]] = ref_to_unmanaged [[T0]] // CHECK: store [[T1]] to [trivial] [[ERR_TEMP1]] - // CHECK: address_to_pointer [[ERR_TEMP1]] + // CHECK: address_to_pointer [stack_protection] [[ERR_TEMP1]] // Call the method. // CHECK: [[RESULT:%.*]] = apply [[METHOD]]({{.*}}, [[SELF]]) diff --git a/test/SILGen/pointer_conversion.swift b/test/SILGen/pointer_conversion.swift index 1302caa286ea2..e25bede2a686b 100644 --- a/test/SILGen/pointer_conversion.swift +++ b/test/SILGen/pointer_conversion.swift @@ -177,7 +177,7 @@ func inoutToPointer() { // CHECK: [[PB:%.*]] = project_box [[INT]] takesMutablePointer(&int) // CHECK: [[WRITE:%.*]] = begin_access [modify] [unknown] [[PB]] - // CHECK: [[POINTER:%.*]] = address_to_pointer [[WRITE]] + // CHECK: [[POINTER:%.*]] = address_to_pointer [stack_protection] [[WRITE]] // CHECK: [[CONVERT:%.*]] = function_ref @$ss30_convertInOutToPointerArgument{{[_0-9a-zA-Z]*}}F // CHECK: apply [[CONVERT]]>({{%.*}}, [[POINTER]]) // CHECK: [[TAKES_MUTABLE:%.*]] = function_ref @$s18pointer_conversion19takesMutablePointer{{[_0-9a-zA-Z]*}}F @@ -199,7 +199,7 @@ func inoutToPointer() { takesMutableRawPointer(&int) // CHECK: [[WRITE:%.*]] = begin_access [modify] [unknown] [[PB]] - // CHECK: [[POINTER:%.*]] = address_to_pointer [[WRITE]] + // CHECK: [[POINTER:%.*]] = address_to_pointer [stack_protection] [[WRITE]] // CHECK: [[CONVERT:%.*]] = function_ref @$ss30_convertInOutToPointerArgument{{[_0-9a-zA-Z]*}}F // CHECK: apply [[CONVERT]]({{%.*}}, [[POINTER]]) // CHECK: [[TAKES_MUTABLE:%.*]] = function_ref @$s18pointer_conversion22takesMutableRawPointer{{[_0-9a-zA-Z]*}}F @@ -230,7 +230,7 @@ func classInoutToPointer() { // CHECK: [[PB:%.*]] = project_box [[LIFETIME]] takesPlusOnePointer(&c) // CHECK: [[WRITE:%.*]] = begin_access [modify] [unknown] [[PB]] - // CHECK: [[POINTER:%.*]] = address_to_pointer [[WRITE]] + // CHECK: [[POINTER:%.*]] = address_to_pointer [stack_protection] [[WRITE]] // CHECK: [[CONVERT:%.*]] = function_ref @$ss30_convertInOutToPointerArgument{{[_0-9a-zA-Z]*}}F // CHECK: apply [[CONVERT]]>({{%.*}}, [[POINTER]]) // CHECK: [[TAKES_PLUS_ONE:%.*]] = function_ref @$s18pointer_conversion19takesPlusOnePointer{{[_0-9a-zA-Z]*}}F @@ -242,7 +242,7 @@ func classInoutToPointer() { // CHECK: [[OWNED:%.*]] = load_borrow [[WRITE2]] // CHECK: [[UNOWNED:%.*]] = ref_to_unmanaged [[OWNED]] // CHECK: store [[UNOWNED]] to [trivial] [[WRITEBACK]] - // CHECK: [[POINTER:%.*]] = address_to_pointer [[WRITEBACK]] + // CHECK: [[POINTER:%.*]] = address_to_pointer [stack_protection] [[WRITEBACK]] // CHECK: [[CONVERT:%.*]] = function_ref @$ss30_convertInOutToPointerArgument{{[_0-9a-zA-Z]*}}F // CHECK: apply [[CONVERT]]>({{%.*}}, [[POINTER]]) // CHECK: [[TAKES_PLUS_ZERO:%.*]] = function_ref @$s18pointer_conversion20takesPlusZeroPointeryySAyAA1CCGF @@ -271,7 +271,7 @@ func functionInoutToPointer() { var f: () -> () = {} // CHECK: [[REABSTRACT_BUF:%.*]] = alloc_stack $@callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for <()> - // CHECK: address_to_pointer [[REABSTRACT_BUF]] + // CHECK: address_to_pointer [stack_protection] [[REABSTRACT_BUF]] takesMutableVoidPointer(&f) } diff --git a/test/SILGen/pointer_conversion_nonaccessing.swift b/test/SILGen/pointer_conversion_nonaccessing.swift index 044dea119a68b..f96b472b253a9 100644 --- a/test/SILGen/pointer_conversion_nonaccessing.swift +++ b/test/SILGen/pointer_conversion_nonaccessing.swift @@ -10,55 +10,55 @@ var global = 0 // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing6testEq3ptrSbSV_tF func testEq(ptr: UnsafeRawPointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global == ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing7testNeq3ptrSbSV_tF func testNeq(ptr: UnsafeRawPointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global != ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing6testEq3ptrSbSv_tF func testEq(ptr: UnsafeMutableRawPointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global == ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing7testNeq3ptrSbSv_tF func testNeq(ptr: UnsafeMutableRawPointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global != ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing6testEq3ptrSbSPySiG_tF func testEq(ptr: UnsafePointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global == ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing7testNeq3ptrSbSPySiG_tF func testNeq(ptr: UnsafePointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global != ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing6testEq3ptrSbSpySiG_tF func testEq(ptr: UnsafeMutablePointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global == ptr } // CHECK-LABEL: sil hidden [ossa] @$s31pointer_conversion_nonaccessing7testNeq3ptrSbSpySiG_tF func testNeq(ptr: UnsafeMutablePointer) -> Bool { // CHECK: [[T0:%.*]] = global_addr @$s31pointer_conversion_nonaccessing6globalSiv - // CHECK: address_to_pointer [[T0]] + // CHECK: address_to_pointer [stack_protection] [[T0]] return &global != ptr } diff --git a/test/SILGen/pointer_conversion_nonaccessing_objc.swift b/test/SILGen/pointer_conversion_nonaccessing_objc.swift index 48e909993fb3c..e25cfb2d2bf0e 100644 --- a/test/SILGen/pointer_conversion_nonaccessing_objc.swift +++ b/test/SILGen/pointer_conversion_nonaccessing_objc.swift @@ -14,14 +14,14 @@ var global = 0 // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc15testAddObserver6object8observerySo8NSObjectC_AFtF func testAddObserver(object: NSObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.addObserver(observer, forKeyPath: "", options: 0, context: &global) } // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc18testRemoveObserver6object8observerySo8NSObjectC_AFtF func testRemoveObserver(object: NSObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.removeObserver(observer, forKeyPath: "", context: &global) } @@ -31,27 +31,27 @@ func testRemoveObserver(object: NSObject, observer: NSObject) { // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc28testDynamicForcedAddObserver6object8observeryyXl_So8NSObjectCtF func testDynamicForcedAddObserver(object: AnyObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.addObserver!(observer, forKeyPath: "", options: 0, context: &global) } // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc31testDynamicForcedRemoveObserver6object8observeryyXl_So8NSObjectCtF func testDynamicForcedRemoveObserver(object: AnyObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.removeObserver!(observer, forKeyPath: "", context: &global) } // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc30testDynamicOptionalAddObserver6object8observeryyXl_So8NSObjectCtF func testDynamicOptionalAddObserver(object: AnyObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.addObserver?(observer, forKeyPath: "", options: 0, context: &global) } // CHECK-LABEL: sil hidden [ossa] @$s36pointer_conversion_nonaccessing_objc33testDynamicOptionalRemoveObserver6object8observeryyXl_So8NSObjectCtF func testDynamicOptionalRemoveObserver(object: AnyObject, observer: NSObject) { // CHECK: [[T0:%.*]] = global_addr @$s36pointer_conversion_nonaccessing_objc6globalSiv - // CHECK: address_to_pointer [[T0]] : + // CHECK: address_to_pointer [stack_protection] [[T0]] : object.removeObserver?(observer, forKeyPath: "", context: &global) } diff --git a/test/SILOptimizer/stack_protection.sil b/test/SILOptimizer/stack_protection.sil new file mode 100644 index 0000000000000..1e35e41f26fbb --- /dev/null +++ b/test/SILOptimizer/stack_protection.sil @@ -0,0 +1,495 @@ +// RUN: %target-sil-opt -wmo -stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=MODULE +// RUN: %target-sil-opt -function-stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=FUNCTION + +// REQUIRES: swift_in_compiler + +sil_stage canonical + +import Builtin +import Swift + +class C { + @_hasStorage var i: Int + @_hasStorage var j: Int + @_hasStorage var s: S +} + +struct S { + @_hasStorage var a: Int + @_hasStorage var b: Int +} + +// CHECK-LABEL: sil [stack_protection] @function_local_stack +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'function_local_stack' +sil @function_local_stack : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $Int + %1 = address_to_pointer [stack_protection] %0 : $*Int to $Builtin.RawPointer + dealloc_stack %0 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @no_stack_protection +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'no_stack_protection' +sil @no_stack_protection : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $Int + %1 = address_to_pointer %0 : $*Int to $Builtin.RawPointer + %2 = integer_literal $Builtin.Word, 1 + %3 = index_addr %0 : $*Int, %2 : $Builtin.Word + dealloc_stack %0 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @stack_alloc_builtin +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'stack_alloc_builtin' +sil @stack_alloc_builtin : $@convention(thin) () -> () { +bb0: + %0 = integer_literal $Builtin.Word, 8 + %1 = builtin "stackAlloc"(%0 : $Builtin.Word, %0 : $Builtin.Word, %0 : $Builtin.Word) : $Builtin.RawPointer + %2 = builtin "stackDealloc"(%1 : $Builtin.RawPointer) : $() + %r = tuple () + return %r : $() +} +// CHECK-LABEL: sil [stack_protection] @function_local_stack_obj +// MODULE-NOT: copy_addr +// FUNCTION: copy_addr +// CHECK: } // end sil function 'function_local_stack_obj' +sil @function_local_stack_obj : $@convention(thin) () -> () { +bb0: + %0 = alloc_ref [stack] $C + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + dealloc_stack_ref %0 : $C + %r = tuple () + return %r : $() +} + +// MODULE-LABEL: sil @function_local_obj +// FUNCTION-LABEL: sil [stack_protection] @function_local_obj +// MODULE-NOT: copy_addr +// FUNCTION: copy_addr +// CHECK: } // end sil function 'function_local_obj' +sil @function_local_obj : $@convention(thin) () -> () { +bb0: + %0 = alloc_ref $C + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + strong_release %0 : $C + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @inout_with_unknown_callers1 +// CHECK: [[T:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] %0 to [initialization] [[T]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[T]] +// CHECK: copy_addr [take] [[T]] to [initialization] %0 : $*Int +// CHECK: dealloc_stack [[T]] : $*Int +// CHECK: } // end sil function 'inout_with_unknown_callers1' +sil @inout_with_unknown_callers1 : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %1 = address_to_pointer [stack_protection] %0 : $*Int to $Builtin.RawPointer + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @in_with_unknown_callers +// CHECK: address_to_pointer [stack_protection] %0 +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'in_with_unknown_callers' +sil @in_with_unknown_callers : $@convention(thin) (@in_guaranteed Int) -> () { +bb0(%0 : $*Int): + %1 = address_to_pointer [stack_protection] %0 : $*Int to $Builtin.RawPointer + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @inout_with_modify_access +// CHECK: [[A:%[0-9]+]] = begin_access [modify] [dynamic] %0 : $*Int +// CHECK: [[T:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] [[A]] to [initialization] [[T]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[T]] +// CHECK: copy_addr [take] [[T]] to [initialization] [[A]] : $*Int +// CHECK: dealloc_stack [[T]] : $*Int +// CHECK: } // end sil function 'inout_with_modify_access' +sil @inout_with_modify_access : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %1 = begin_access [modify] [dynamic] %0 : $*Int + %2 = address_to_pointer [stack_protection] %1 : $*Int to $Builtin.RawPointer + end_access %1 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @inout_with_read_access +// CHECK: [[A:%[0-9]+]] = begin_access [read] [dynamic] %0 : $*Int +// CHECK: address_to_pointer [stack_protection] [[A]] +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'inout_with_read_access' +sil @inout_with_read_access : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %1 = begin_access [read] [dynamic] %0 : $*Int + %2 = address_to_pointer [stack_protection] %1 : $*Int to $Builtin.RawPointer + end_access %1 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @inout_with_unknown_callers2 +// CHECK: [[T:%[0-9]+]] = alloc_stack $S +// CHECK: copy_addr [take] %0 to [initialization] [[T]] : $*S +// CHECK: [[A:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.a +// CHECK: address_to_pointer [stack_protection] [[A]] +// CHECK: [[B:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.b +// CHECK: address_to_pointer [stack_protection] [[B]] +// CHECK: copy_addr [take] [[T]] to [initialization] %0 : $*S +// CHECK: dealloc_stack [[T]] : $*S +// CHECK: } // end sil function 'inout_with_unknown_callers2' +sil @inout_with_unknown_callers2 : $@convention(thin) (@inout S) -> () { +bb0(%0 : $*S): + %1 = struct_element_addr %0 : $*S, #S.a + %2 = address_to_pointer [stack_protection] %1 : $*Int to $Builtin.RawPointer + %3 = struct_element_addr %0 : $*S, #S.b + %4 = address_to_pointer [stack_protection] %3 : $*Int to $Builtin.RawPointer + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers1 +// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// CHECK: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int +// CHECK: [[T:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] [[A]] to [initialization] [[T]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[T]] +// CHECK: copy_addr [take] [[T]] to [initialization] [[A]] : $*Int +// CHECK: dealloc_stack [[T]] : $*Int +// CHECK: end_access [[A]] : $*Int +// CHECK: } // end sil function 'object_with_unknown_callers1' +sil @object_with_unknown_callers1 : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers2 +// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// CHECK: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int +// CHECK: [[T:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] [[A]] to [initialization] [[T]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[T]] +// CHECK: address_to_pointer [stack_protection] [[T]] +// CHECK: copy_addr [take] [[T]] to [initialization] [[A]] : $*Int +// CHECK: dealloc_stack [[T]] : $*Int +// CHECK: end_access [[A]] : $*Int +// CHECK: } // end sil function 'object_with_unknown_callers2' +sil @object_with_unknown_callers2 : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + %4 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil [stack_protection] @object_with_unknown_callers3 +// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// CHECK: [[AI:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int +// CHECK: [[TI:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] [[AI]] to [initialization] [[TI]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[TI]] +// CHECK: [[J:%[0-9]+]] = ref_element_addr %0 : $C, #C.j +// CHECK: [[AJ:%[0-9]+]] = begin_access [modify] [static] [[J]] : $*Int +// CHECK: [[TJ:%[0-9]+]] = alloc_stack $Int +// CHECK: copy_addr [take] [[AJ]] to [initialization] [[TJ]] : $*Int +// CHECK: copy_addr [take] [[TI]] to [initialization] [[AI]] : $*Int +// CHECK: end_access [[AI]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[TJ]] +// CHECK: copy_addr [take] [[TJ]] to [initialization] [[AJ]] : $*Int +// CHECK: dealloc_stack [[TJ]] : $*Int +// CHECK: dealloc_stack [[TI]] : $*Int +// CHECK: end_access [[AJ]] : $*Int +// CHECK: } // end sil function 'object_with_unknown_callers3' +sil @object_with_unknown_callers3 : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + // Accesses don't need to be properly nested. + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + %4 = ref_element_addr %0 : $C, #C.j + %5 = begin_access [modify] [static] %4 : $*Int + end_access %2 : $*Int + %7 = address_to_pointer [stack_protection] %5 : $*Int to $Builtin.RawPointer + end_access %5 : $*Int + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil @object_with_read_access +// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i +// CHECK: [[A:%[0-9]+]] = begin_access [read] [static] [[I]] : $*Int +// CHECK: address_to_pointer [stack_protection] [[A]] +// CHECK-NOT: copy_addr +// CHECK: end_access [[A]] : $*Int +// CHECK: } // end sil function 'object_with_read_access' +sil @object_with_read_access : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [read] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + %r = tuple () + return %r : $() +} + +// MODULE-LABEL: sil hidden @known_callers_inout +// FUNCTION-LABEL: sil hidden [stack_protection] @known_callers_inout +// MODULE-NOT: copy_addr +// FUNCTION: copy_addr +// CHECK: } // end sil function 'known_callers_inout' +sil hidden @known_callers_inout : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %2 = integer_literal $Builtin.Int64, 2 + %3 = struct $Int (%2 : $Builtin.Int64) + %4 = integer_literal $Builtin.Word, 1 + %5 = index_addr [stack_protection] %0 : $*Int, %4 : $Builtin.Word + store %3 to %5 : $*Int + %7 = tuple () + return %7 : $() +} + +// MODULE-LABEL: sil [stack_protection] @call_internal_with_inout1 +// FUNCTION-LABEL: sil @call_internal_with_inout1 +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_internal_with_inout1' +sil @call_internal_with_inout1 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack [lexical] $Int + %1 = function_ref @known_callers_inout : $@convention(thin) (@inout Int) -> () + %2 = apply %1(%0) : $@convention(thin) (@inout Int) -> () + dealloc_stack %0 : $*Int + + // Introduce a cycle in the call graph. + %f = function_ref @call_internal_with_inout2 : $@convention(thin) () -> () + %a = apply %f() : $@convention(thin) () -> () + %4 = tuple () + return %4 : $() +} + + +// MODULE-LABEL: sil [stack_protection] @call_internal_with_inout2 +// FUNCTION-LABEL: sil @call_internal_with_inout2 +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_internal_with_inout2' +sil @call_internal_with_inout2 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack [lexical] $Int + %1 = function_ref @known_callers_inout : $@convention(thin) (@inout Int) -> () + %2 = apply %1(%0) : $@convention(thin) (@inout Int) -> () + dealloc_stack %0 : $*Int + cond_br undef, bb1, bb2 +bb1: + // Introduce a cycle in the call graph. + %f = function_ref @call_internal_with_inout1 : $@convention(thin) () -> () + %a = apply %f() : $@convention(thin) () -> () + br bb3 +bb2: + br bb3 +bb3: + %4 = tuple () + return %4 : $() +} + +// CHECK-LABEL: sil hidden [stack_protection] @unknown_callers +// CHECK: copy_addr +// CHECK: copy_addr +// CHECK: } // end sil function 'unknown_callers' +sil hidden @unknown_callers : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %2 = integer_literal $Builtin.Int64, 2 + %3 = struct $Int (%2 : $Builtin.Int64) + %4 = integer_literal $Builtin.Word, 1 + %5 = index_addr [stack_protection] %0 : $*Int, %4 : $Builtin.Word + store %3 to %5 : $*Int + %7 = tuple () + return %7 : $() +} + +// CHECK-LABEL: sil @call_unknown_callers +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_unknown_callers' +sil @call_unknown_callers : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %1 = function_ref @unknown_callers : $@convention(thin) (@inout Int) -> () + %2 = apply %1(%0) : $@convention(thin) (@inout Int) -> () + %4 = tuple () + return %4 : $() +} + +// MODULE-LABEL: sil hidden @known_callers_obj1 +// FUNCTION-LABEL: sil hidden [stack_protection] @known_callers_obj1 +// MODULE-NOT: copy_addr +// FUNCTION: copy_addr +// CHECK: } // end sil function 'known_callers_obj1' +sil hidden @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + %7 = tuple () + return %7 : $() +} + +// MODULE-LABEL: sil [stack_protection] @call_internal_with_stack_obj +// FUNCTION-LABEL: sil @call_internal_with_stack_obj +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_internal_with_stack_obj' +sil @call_internal_with_stack_obj : $@convention(thin) () -> () { +bb0: + %0 = alloc_ref [stack] $C + %1 = function_ref @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> () + %2 = apply %1(%0) : $@convention(thin) (@guaranteed C) -> () + dealloc_stack_ref %0 : $C + %4 = tuple () + return %4 : $() +} + +// CHECK-LABEL: sil @call_internal_with_heap_obj +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_internal_with_heap_obj' +sil @call_internal_with_heap_obj : $@convention(thin) () -> () { +bb0: + cond_br undef, bb1, bb2 +bb1: + %0 = alloc_ref $C + br bb3(%0 : $C) +bb2: + %2 = alloc_ref $C + br bb3(%2 : $C) +bb3(%5 : $C): + %6 = function_ref @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> () + %7 = apply %6(%5) : $@convention(thin) (@guaranteed C) -> () + strong_release %5 : $C + %r = tuple () + return %r : $() +} + +// CHECK-LABEL: sil hidden [stack_protection] @known_callers_obj2 +// CHECK: alloc_stack +// CHECK: copy_addr +// CHECK: copy_addr +// CHECK: } // end sil function 'known_callers_obj2' +sil hidden @known_callers_obj2 : $@convention(thin) (@guaranteed C) -> () { +bb0(%0 : $C): + %1 = ref_element_addr %0 : $C, #C.i + %2 = begin_access [modify] [static] %1 : $*Int + %3 = address_to_pointer [stack_protection] %2 : $*Int to $Builtin.RawPointer + end_access %2 : $*Int + %7 = tuple () + return %7 : $() +} + +sil @create_obj : $@convention(thin) () -> @owned C + +// CHECK-LABEL: sil @call_known_callers_obj2 +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'call_known_callers_obj2' +sil @call_known_callers_obj2 : $@convention(thin) () -> () { +bb0: + cond_br undef, bb1, bb2 +bb1: + %0 = alloc_ref $C + br bb3(%0 : $C) +bb2: + %2 = function_ref @create_obj : $@convention(thin) () -> @owned C + %3 = apply %2() : $@convention(thin) () -> @owned C + br bb3(%3 : $C) +bb3(%5 : $C): + %6 = function_ref @known_callers_obj2 : $@convention(thin) (@guaranteed C) -> () + %7 = apply %6(%5) : $@convention(thin) (@guaranteed C) -> () + strong_release %5 : $C + %r = tuple () + return %r : $() +} + +// MODULE-LABEL: sil private @closure_with_inout_capture +// FUNCTION-LABEL: sil private [stack_protection] @closure_with_inout_capture +// MODULE-NOT: copy_addr +// FUNCTION: copy_addr +// CHECK: } // end sil function 'closure_with_inout_capture' +sil private @closure_with_inout_capture : $@convention(thin) (@inout_aliasable Int) -> () { +bb0(%0 : $*Int): + %1 = address_to_pointer [stack_protection] %0 : $*Int to $Builtin.RawPointer + %r = tuple () + return %r : $() +} + +sil @call_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + +// MODULE-LABEL: sil [stack_protection] @test_closure_with_inout_capture +// FUNCTION-LABEL: sil @test_closure_with_inout_capture +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'test_closure_with_inout_capture' +sil @test_closure_with_inout_capture : $@convention(thin) () -> () { +bb0: + %2 = alloc_stack [lexical] $Int + %3 = integer_literal $Builtin.Int64, 2 + %4 = struct $Int (%3 : $Builtin.Int64) + store %4 to %2 : $*Int + + %6 = function_ref @closure_with_inout_capture : $@convention(thin) (@inout_aliasable Int) -> () + %7 = partial_apply [callee_guaranteed] [on_stack] %6(%2) : $@convention(thin) (@inout_aliasable Int) -> () + + %8 = function_ref @call_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + %9 = apply %8(%7) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> () + dealloc_stack %7 : $@noescape @callee_guaranteed () -> () + dealloc_stack %2 : $*Int + %12 = tuple () + return %12 : $() +} + +// CHECK-LABEL: sil private [stack_protection] @closure_with_inout_arg +// CHECK: alloc_stack +// CHECK: copy_addr +// CHECK: copy_addr +// CHECK: } // end sil function 'closure_with_inout_arg' +sil private @closure_with_inout_arg : $@convention(thin) (@inout Int) -> () { +bb0(%0 : $*Int): + %1 = address_to_pointer [stack_protection] %0 : $*Int to $Builtin.RawPointer + %r = tuple () + return %r : $() +} + +sil @call_closure_with_arg : $@convention(thin) (@noescape @callee_guaranteed (@inout Int) -> ()) -> () + +// CHECK-LABEL: sil @test_closure_with_inout_arg +// CHECK-NOT: copy_addr +// CHECK: } // end sil function 'test_closure_with_inout_arg' +sil @test_closure_with_inout_arg : $@convention(thin) () -> () { +bb0: + %6 = function_ref @closure_with_inout_arg : $@convention(thin) (@inout Int) -> () + %7 = partial_apply [callee_guaranteed] [on_stack] %6() : $@convention(thin) (@inout Int) -> () + %8 = function_ref @call_closure_with_arg : $@convention(thin) (@noescape @callee_guaranteed (@inout Int) -> ()) -> () + %9 = apply %8(%7) : $@convention(thin) (@noescape @callee_guaranteed (@inout Int) -> ()) -> () + dealloc_stack %7 : $@noescape @callee_guaranteed (@inout Int) -> () + %12 = tuple () + return %12 : $() +} + + diff --git a/test/SILOptimizer/stack_protection.swift b/test/SILOptimizer/stack_protection.swift new file mode 100644 index 0000000000000..6a85abd2f42d1 --- /dev/null +++ b/test/SILOptimizer/stack_protection.swift @@ -0,0 +1,92 @@ +// RUN: %target-swift-frontend -module-name=test -emit-sil %s -O -enable-stack-protector | %FileCheck %s + +// REQUIRES: swift_in_compiler + +@_silgen_name("potentiallyBadCFunction") +func potentiallyBadCFunction(_ arg: UnsafePointer) + +// CHECK-LABEL: sil [stack_protection] @$s4test20overflowInCFunction1yyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test20overflowInCFunction1yyF' +public func overflowInCFunction1() { + var x = 0 + withUnsafeMutablePointer(to: &x) { + potentiallyBadCFunction($0) + } +} + +// CHECK-LABEL: sil @$s4test19unprotectedOverflowyyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test19unprotectedOverflowyyF' +public func unprotectedOverflow() { + var x = 0 + _withUnprotectedUnsafeMutablePointer(to: &x) { + potentiallyBadCFunction($0) + } +} + +// CHECK-LABEL: sil [stack_protection] @$s4test23overflowWithUnsafeBytesyyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test23overflowWithUnsafeBytesyyF' +public func overflowWithUnsafeBytes() { + var x = 0 + withUnsafeBytes(of: &x) { + potentiallyBadCFunction($0.bindMemory(to: Int.self).baseAddress!) + } +} + +// CHECK-LABEL: sil @$s4test22unprotectedUnsafeBytesyyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test22unprotectedUnsafeBytesyyF' +public func unprotectedUnsafeBytes() { + var x = 0 + _withUnprotectedUnsafeBytes(of: &x) { + potentiallyBadCFunction($0.bindMemory(to: Int.self).baseAddress!) + } +} + +// CHECK-LABEL: sil [stack_protection] @$s4test20overflowInCFunction2yyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test20overflowInCFunction2yyF' +public func overflowInCFunction2() { + var x = 0 + potentiallyBadCFunction(&x) +} + +// CHECK-LABEL: sil hidden [noinline] @$s4test20inoutWithKnownCalleryySizF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test20inoutWithKnownCalleryySizF' +@inline(never) +func inoutWithKnownCaller(_ x: inout Int) { + withUnsafeMutablePointer(to: &x) { + $0[1] = 0 + } +} + +// CHECK-LABEL: sil [stack_protection] @$s4test24callOverflowInoutPointeryyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test24callOverflowInoutPointeryyF' +public func callOverflowInoutPointer() { + var x = 27 + inoutWithKnownCaller(&x) +} + +// CHECK-LABEL: sil [stack_protection] @$s4test22inoutWithUnknownCalleryySizF +// CHECK: copy_addr [take] {{.*}} to [initialization] +// CHECK: copy_addr [take] {{.*}} to [initialization] +// CHECK: } // end sil function '$s4test22inoutWithUnknownCalleryySizF' +public func inoutWithUnknownCaller(_ x: inout Int) { + withUnsafeMutablePointer(to: &x) { + $0[1] = 0 + } +} + +// CHECK-LABEL: sil [stack_protection] @$s4test0A29WithUnsafeTemporaryAllocationyyF +// CHECK-NOT: copy_addr +// CHECK: } // end sil function '$s4test0A29WithUnsafeTemporaryAllocationyyF' +public func testWithUnsafeTemporaryAllocation() { + withUnsafeTemporaryAllocation(of: Int.self, capacity: 10) { + potentiallyBadCFunction($0.baseAddress!) + } +} + diff --git a/test/Serialization/Inputs/def_basic.sil b/test/Serialization/Inputs/def_basic.sil index c3049498542a0..1b014da833d64 100644 --- a/test/Serialization/Inputs/def_basic.sil +++ b/test/Serialization/Inputs/def_basic.sil @@ -684,6 +684,24 @@ bb0: return %2 : $() } +// CHECK-LABEL: sil public_external [transparent] @test_stack_protection_flags +sil [transparent] [serialized] @test_stack_protection_flags : $@convention(thin) (@inout Builtin.Word) -> () { +bb0(%0 : $*Builtin.Word): + // CHECK: address_to_pointer [stack_protection] %0 + %1 = address_to_pointer [stack_protection] %0 : $*Builtin.Word to $Builtin.RawPointer + // CHECK: address_to_pointer %0 + %2 = address_to_pointer %0 : $*Builtin.Word to $Builtin.RawPointer + %3 = integer_literal $Builtin.Word, 0 + // CHECK: index_addr [stack_protection] %0 + %4 = index_addr [stack_protection] %0 : $*Builtin.Word, %3 : $Builtin.Word + // CHECK: index_addr %0 + %5 = index_addr %0 : $*Builtin.Word, %3 : $Builtin.Word + + %6 = tuple () + return %6 : $() +} + + // CHECK-LABEL: sil public_external [transparent] @test_tail_elems sil [transparent] [serialized] @test_tail_elems : $@convention(thin) (Builtin.Word, Builtin.Word) -> () { bb0(%0 : $Builtin.Word, %1 : $Builtin.Word): @@ -1519,6 +1537,7 @@ bb0: %84 = function_ref @test_dealloc_partial_ref : $@convention(thin) () -> () %85 = function_ref @test_dealloc_box : $@convention(thin) () -> () %86 = function_ref @test_stack_flag : $@convention(thin) () -> () + %86b = function_ref @test_stack_protection_flags : $@convention(thin) (@inout Builtin.Word) -> () %87 = function_ref @test_tail_elems : $@convention(thin) (Builtin.Word, Builtin.Word) -> () %88 = function_ref @test_tail_elems_dynamic : $@convention(thin) (Builtin.Word, Builtin.Word, @thick Class1.Type) -> () %89 = function_ref @test_tail_addr : $@convention(thin) (@owned Class1, Builtin.Word) -> @owned Class1 diff --git a/test/sil-passpipeline-dump/basic.test-sh b/test/sil-passpipeline-dump/basic.test-sh index d86d9d2b57a81..9a9380715641b 100644 --- a/test/sil-passpipeline-dump/basic.test-sh +++ b/test/sil-passpipeline-dump/basic.test-sh @@ -10,5 +10,6 @@ // CHECK-NEXT: "ownership-model-eliminator" ] // CHECK: --- // CHECK: name: Rest of Onone -// CHECK: passes: [ "use-prespecialized", "target-constant-folding", "sil-debuginfo-gen" ] +// CHECK: passes: [ "use-prespecialized", "target-constant-folding", "function-stack-protection", +// CHECK-NEXT: "sil-debuginfo-gen" ] // CHECK: ... diff --git a/tools/sil-opt/SILOpt.cpp b/tools/sil-opt/SILOpt.cpp index a7a8fa2960717..497285812b212 100644 --- a/tools/sil-opt/SILOpt.cpp +++ b/tools/sil-opt/SILOpt.cpp @@ -598,6 +598,7 @@ int main(int argc, char **argv) { SILOpts.OptRecordPasses = RemarksPasses; SILOpts.checkSILModuleLeaks = true; SILOpts.EnablePerformanceAnnotations = true; + SILOpts.EnableStackProtection = true; SILOpts.VerifyExclusivity = VerifyExclusivity; if (EnforceExclusivity.getNumOccurrences() != 0) {