diff --git a/include/swift/Basic/BlotSetVector.h b/include/swift/Basic/BlotSetVector.h index cc538ab95b8f6..3de1c44a03021 100644 --- a/include/swift/Basic/BlotSetVector.h +++ b/include/swift/Basic/BlotSetVector.h @@ -113,7 +113,7 @@ class BlotSetVector { return {index, true}; } - bool count(const ValueT &value) { return map.count(value); } + bool count(const ValueT &value) const { return map.count(value); } /// Replace \p value1 with \p value2 placing \p value2 into the position in /// the array where value1 used to be. If \p value2 is already in the set, diff --git a/include/swift/Demangling/Demangle.h b/include/swift/Demangling/Demangle.h index 1f2baacf791f3..3af786764e2d6 100644 --- a/include/swift/Demangling/Demangle.h +++ b/include/swift/Demangling/Demangle.h @@ -108,6 +108,7 @@ enum class FunctionSigSpecializationParamKind : unsigned { ClosureProp = 5, BoxToValue = 6, BoxToStack = 7, + InOutToOut = 8, // Option Set Flags use bits 6-31. This gives us 26 bits to use for option // flags. @@ -144,6 +145,7 @@ enum class SpecializationPass : uint8_t { CapturePropagation, FunctionSignatureOpts, GenericSpecializer, + MoveDiagnosticInOutToOut, }; static inline char encodeSpecializationPass(SpecializationPass Pass) { diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index 6d9415ef9d687..933828e49472a 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -128,6 +128,14 @@ class SILCloner : protected SILInstructionVisitor { ArrayRef entryArgs, bool replaceOriginalFunctionInPlace = false); + /// The same as clone function body, except the caller can provide a callback + /// that allows for an entry arg to be assigned to a custom old argument. This + /// is useful if one re-arranges parameters when converting from inout to out. + void + cloneFunctionBody(SILFunction *F, SILBasicBlock *clonedEntryBB, + ArrayRef entryArgs, + llvm::function_ref entryArgToOldArgMap); + /// MARK: Callback utilities used from CRTP extensions during cloning. /// These should only be called from within an instruction cloning visitor. @@ -613,6 +621,29 @@ void SILCloner::cloneFunctionBody(SILFunction *F, commonFixUp(F); } +template +void SILCloner::cloneFunctionBody( + SILFunction *F, SILBasicBlock *clonedEntryBB, ArrayRef entryArgs, + llvm::function_ref entryArgIndexToOldArgIndex) { + assert(F != clonedEntryBB->getParent() && "Must clone into a new function."); + assert(BBMap.empty() && "This API does not allow clients to map blocks."); + assert(ValueMap.empty() && "Stale ValueMap."); + + assert(entryArgs.size() == F->getArguments().size()); + for (unsigned i = 0, e = entryArgs.size(); i != e; ++i) { + ValueMap[entryArgIndexToOldArgIndex(entryArgs[i])] = entryArgs[i]; + } + + BBMap.insert(std::make_pair(&*F->begin(), clonedEntryBB)); + + Builder.setInsertionPoint(clonedEntryBB); + + // This will layout all newly cloned blocks immediate after clonedEntryBB. + visitBlocksDepthFirst(&*F->begin()); + + commonFixUp(F); +} + template void SILCloner::clonePhiArgs(SILBasicBlock *oldBB) { auto *mappedBB = BBMap[oldBB]; diff --git a/include/swift/SIL/SILFunction.h b/include/swift/SIL/SILFunction.h index c2888227c87d4..9e3641177ac32 100644 --- a/include/swift/SIL/SILFunction.h +++ b/include/swift/SIL/SILFunction.h @@ -1029,6 +1029,19 @@ class SILFunction /// generic. SubstitutionMap getForwardingSubstitutionMap(); + /// Returns true if this SILFunction must be a defer statement. + /// + /// NOTE: This may return false for defer statements that have been + /// deserialized without a DeclContext. This means that this is guaranteed to + /// be correct for SILFunctions in Raw SIL that were not deserialized as + /// canonical. Thus one can use it for diagnostics. + bool isDefer() const { + if (auto *dc = getDeclContext()) + if (auto *decl = dyn_cast_or_null(dc->getAsDecl())) + return decl->isDeferBody(); + return false; + } + //===--------------------------------------------------------------------===// // Block List Access //===--------------------------------------------------------------------===// diff --git a/include/swift/SILOptimizer/Utils/SpecializationMangler.h b/include/swift/SILOptimizer/Utils/SpecializationMangler.h index 24db9306381aa..47aabf40ac3eb 100644 --- a/include/swift/SILOptimizer/Utils/SpecializationMangler.h +++ b/include/swift/SILOptimizer/Utils/SpecializationMangler.h @@ -62,20 +62,24 @@ class FunctionSignatureSpecializationMangler : public SpecializationMangler { using ArgumentModifierIntBase = uint16_t; enum class ArgumentModifier : ArgumentModifierIntBase { // Option Space 4 bits (i.e. 16 options). - Unmodified=0, - ConstantProp=1, - ClosureProp=2, - BoxToValue=3, - BoxToStack=4, - First_Option=0, Last_Option=31, + Unmodified = 0, + ConstantProp = 1, + ClosureProp = 2, + BoxToValue = 3, + BoxToStack = 4, + InOutToOut = 5, + + First_Option = 0, + Last_Option = 31, // Option Set Space. 12 bits (i.e. 12 option). - Dead=32, - OwnedToGuaranteed=64, - SROA=128, - GuaranteedToOwned=256, - ExistentialToGeneric=512, - First_OptionSetEntry=32, LastOptionSetEntry=32768, + Dead = 32, + OwnedToGuaranteed = 64, + SROA = 128, + GuaranteedToOwned = 256, + ExistentialToGeneric = 512, + First_OptionSetEntry = 32, + LastOptionSetEntry = 32768, }; using ArgInfo = std::pairgetChild(Idx++), depth + 1); break; case FunctionSigSpecializationParamKind::ConstantPropFunction: @@ -1582,6 +1583,9 @@ NodePointer NodePrinter::print(NodePointer Node, unsigned depth, case FunctionSigSpecializationParamKind::BoxToStack: Printer << "Stack Promoted from Box"; return nullptr; + case FunctionSigSpecializationParamKind::InOutToOut: + Printer << "InOut Converted to Out"; + return nullptr; case FunctionSigSpecializationParamKind::ConstantPropFunction: Printer << "Constant Propagated Function"; return nullptr; diff --git a/lib/Demangling/OldDemangler.cpp b/lib/Demangling/OldDemangler.cpp index b9acc42f64b7c..253d863a04466 100644 --- a/lib/Demangling/OldDemangler.cpp +++ b/lib/Demangling/OldDemangler.cpp @@ -645,6 +645,11 @@ class OldDemangler { if (!result) return nullptr; param->addChild(result, Factory); + } else if (Mangled.nextIf("r_")) { + auto result = FUNCSIGSPEC_CREATE_PARAM_KIND(InOutToOut); + if (!result) + return nullptr; + param->addChild(result, Factory); } else { // Otherwise handle option sets. unsigned Value = 0; diff --git a/lib/Demangling/Remangler.cpp b/lib/Demangling/Remangler.cpp index 1dc2ee9a6186d..29c71e96f87a8 100644 --- a/lib/Demangling/Remangler.cpp +++ b/lib/Demangling/Remangler.cpp @@ -1407,6 +1407,9 @@ Remangler::mangleFunctionSignatureSpecializationParam(Node *node, case FunctionSigSpecializationParamKind::BoxToStack: Buffer << 's'; break; + case FunctionSigSpecializationParamKind::InOutToOut: + Buffer << 'r'; + break; case FunctionSigSpecializationParamKind::SROA: Buffer << 'x'; break; diff --git a/lib/SILOptimizer/Mandatory/MoveKillsCopyableAddressesChecker.cpp b/lib/SILOptimizer/Mandatory/MoveKillsCopyableAddressesChecker.cpp index 1deabc81828f5..efb0b1ef2e06b 100644 --- a/lib/SILOptimizer/Mandatory/MoveKillsCopyableAddressesChecker.cpp +++ b/lib/SILOptimizer/Mandatory/MoveKillsCopyableAddressesChecker.cpp @@ -136,8 +136,11 @@ #define DEBUG_TYPE "sil-move-kills-copyable-addresses-checker" #include "swift/AST/DiagnosticsSIL.h" +#include "swift/AST/Types.h" +#include "swift/Basic/BlotSetVector.h" #include "swift/Basic/Defer.h" #include "swift/Basic/FrozenMultiMap.h" +#include "swift/Basic/GraphNodeWorklist.h" #include "swift/SIL/BasicBlockBits.h" #include "swift/SIL/BasicBlockDatastructures.h" #include "swift/SIL/Consumption.h" @@ -147,12 +150,19 @@ #include "swift/SIL/OwnershipUtils.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/SILBuilder.h" +#include "swift/SIL/SILCloner.h" #include "swift/SIL/SILFunction.h" #include "swift/SIL/SILInstruction.h" +#include "swift/SIL/SILLinkage.h" #include "swift/SIL/SILUndef.h" +#include "swift/SIL/SILVisitor.h" +#include "swift/SILOptimizer/Analysis/BasicCalleeAnalysis.h" #include "swift/SILOptimizer/Analysis/ClosureScope.h" #include "swift/SILOptimizer/PassManager/Transforms.h" #include "swift/SILOptimizer/Utils/CanonicalOSSALifetime.h" +#include "swift/SILOptimizer/Utils/InstOptUtils.h" +#include "swift/SILOptimizer/Utils/SILOptFunctionBuilder.h" +#include "swift/SILOptimizer/Utils/SpecializationMangler.h" #include "llvm/ADT/PointerEmbeddedInt.h" #include "llvm/ADT/PointerSumType.h" @@ -180,67 +190,77 @@ llvm::raw_ostream &operator<<(llvm::raw_ostream &os, const SmallBitVector &bv) { } } // namespace llvm +static SourceLoc getSourceLocFromValue(SILValue value) { + if (auto *defInst = value->getDefiningInstruction()) + return defInst->getLoc().getSourceLoc(); + if (auto *arg = dyn_cast(value)) + return arg->getDecl()->getLoc(); + llvm_unreachable("Do not know how to get source loc for value?!"); +} + +#ifndef NDEBUG +static void dumpBitVector(llvm::raw_ostream &os, const SmallBitVector &bv) { + for (unsigned i = 0; i < bv.size(); ++i) { + os << (bv[i] ? '1' : '0'); + } +} +#endif + +/// Returns true if a value has one or zero debug uses. +static bool hasMoreThanOneDebugUse(SILValue v) { + auto Range = getDebugUses(v); + auto i = Range.begin(), e = Range.end(); + if (i == e) + return false; + ++i; + return i != e; +} + //===----------------------------------------------------------------------===// -// Use Gathering +// Forward Declarations //===----------------------------------------------------------------------===// namespace { -struct UseState { - SILValue address; - SmallVector markMoves; - SmallPtrSet seenMarkMoves; - SmallSetVector inits; - SmallSetVector livenessUses; - SmallBlotSetVector destroys; - llvm::SmallDenseMap destroyToIndexMap; - SmallBlotSetVector reinits; - llvm::SmallDenseMap reinitToIndexMap; - - void insertMarkUnresolvedMoveAddr(MarkUnresolvedMoveAddrInst *inst) { - if (!seenMarkMoves.insert(inst).second) - return; - markMoves.emplace_back(inst); - } +enum class DownwardScanResult { + Invalid, + Destroy, + Reinit, + // NOTE: We use UseForDiagnostic both for defer uses and normal uses. + UseForDiagnostic, + MoveOut, + ClosureConsume, + ClosureUse, +}; - void insertDestroy(DestroyAddrInst *dai) { - destroyToIndexMap[dai] = destroys.size(); - destroys.insert(dai); - } +struct ClosureOperandState { + /// This is the downward scan result that visiting a full applysite of this + /// closure will effect on the address being analyzed. + DownwardScanResult result = DownwardScanResult::Invalid; - void insertReinit(SILInstruction *inst) { - reinitToIndexMap[inst] = reinits.size(); - reinits.insert(inst); - } + /// Instructions that act as consumes in the closure callee. This is the set + /// of earliest post dominating consumes that should be eliminated in the + /// cloned callee. Only set if state is upwards consume. + TinyPtrVector pairedConsumingInsts; - void clear() { - address = SILValue(); - markMoves.clear(); - seenMarkMoves.clear(); - inits.clear(); - livenessUses.clear(); - destroys.clear(); - destroyToIndexMap.clear(); - reinits.clear(); - reinitToIndexMap.clear(); - } -}; + /// The set of instructions in the callee that are uses that require the move + /// to be alive. Only set if state is upwards use. + TinyPtrVector pairedUseInsts; -/// Visit all of the uses of a lexical lifetime, initializing useState as we go. -struct GatherLexicalLifetimeUseVisitor : public AccessUseVisitor { - UseState &useState; + /// The single debug value in the closure callee that we sink to the reinit + /// points. + DebugValueInst *singleDebugValue = nullptr; - GatherLexicalLifetimeUseVisitor(UseState &useState) - : AccessUseVisitor(AccessUseType::Overlapping, - NestedAccessType::IgnoreAccessBegin), - useState(useState) {} + bool isUpwardsUse() const { + return result == DownwardScanResult::ClosureUse; + } - bool visitUse(Operand *op, AccessUseType useTy) override; - void reset(SILValue address) { useState.address = address; } - void clear() { useState.clear(); } + bool isUpwardsConsume() const { + return result == DownwardScanResult::ClosureConsume; + } }; -} // end anonymous namespace +} // namespace static void convertMemoryReinitToInitForm(SILInstruction *memInst) { switch (memInst->getKind()) { @@ -279,87 +299,75 @@ static bool memInstMustReinitialize(Operand *memOper) { } } -// Filter out recognized uses that do not write to memory. -// -// TODO: Ensure that all of the conditional-write logic below is encapsulated in -// mayWriteToMemory and just call that instead. Possibly add additional -// verification that visitAccessPathUses recognizes all instructions that may -// propagate pointers (even though they don't write). -bool GatherLexicalLifetimeUseVisitor::visitUse(Operand *op, - AccessUseType useTy) { - // If this operand is for a dependent type, then it does not actually access - // the operand's address value. It only uses the metatype defined by the - // operation (e.g. open_existential). - if (op->isTypeDependent()) { - return true; - } +//===----------------------------------------------------------------------===// +// Use State +//===----------------------------------------------------------------------===// - // If we have a move from src, this is a mark_move we want to visit. - if (auto *move = dyn_cast(op->getUser())) { - if (move->getSrc() == op->get()) { - LLVM_DEBUG(llvm::dbgs() << "Found move: " << *move); - useState.insertMarkUnresolvedMoveAddr(move); - return true; - } +namespace { + +struct UseState { + SILValue address; + SmallVector markMoves; + SmallPtrSet seenMarkMoves; + SmallSetVector inits; + SmallSetVector livenessUses; + SmallBlotSetVector destroys; + llvm::SmallDenseMap destroyToIndexMap; + SmallBlotSetVector reinits; + llvm::SmallDenseMap reinitToIndexMap; + llvm::SmallMapVector closureUses; + llvm::SmallDenseMap closureOperandToIndexMap; + + void insertMarkUnresolvedMoveAddr(MarkUnresolvedMoveAddrInst *inst) { + if (!seenMarkMoves.insert(inst).second) + return; + markMoves.emplace_back(inst); } - if (memInstMustInitialize(op)) { - LLVM_DEBUG(llvm::dbgs() << "Found init: " << *op->getUser()); - useState.inits.insert(op->getUser()); - return true; + void insertDestroy(DestroyAddrInst *dai) { + destroyToIndexMap[dai] = destroys.size(); + destroys.insert(dai); } - if (memInstMustReinitialize(op)) { - LLVM_DEBUG(llvm::dbgs() << "Found reinit: " << *op->getUser()); - useState.insertReinit(op->getUser()); - return true; + void insertReinit(SILInstruction *inst) { + reinitToIndexMap[inst] = reinits.size(); + reinits.insert(inst); } - if (auto *dvi = dyn_cast(op->getUser())) { - // If we see a destroy_addr not on our base address, bail! Just error and - // say that we do not understand the code. - if (dvi->getOperand() != useState.address) { - LLVM_DEBUG(llvm::dbgs() - << "!!! Error! Found destroy_addr no on base address: " - << *dvi); - return false; - } - LLVM_DEBUG(llvm::dbgs() << "Found destroy_addr: " << *dvi); - useState.insertDestroy(dvi); - return true; + void insertClosureOperand(Operand *op) { + closureOperandToIndexMap[op] = closureUses.size(); + closureUses[op] = {}; } - // Ignore dealloc_stack. - if (isa(op->getUser())) - return true; + void clear() { + address = SILValue(); + markMoves.clear(); + seenMarkMoves.clear(); + inits.clear(); + livenessUses.clear(); + destroys.clear(); + destroyToIndexMap.clear(); + reinits.clear(); + reinitToIndexMap.clear(); + closureUses.clear(); + closureOperandToIndexMap.clear(); + } - LLVM_DEBUG(llvm::dbgs() << "Found liveness use: " << *op->getUser()); - useState.livenessUses.insert(op->getUser()); + SILFunction *getFunction() const { return address->getFunction(); } +}; - return true; -} +} // namespace //===----------------------------------------------------------------------===// // Dataflow //===----------------------------------------------------------------------===// -namespace { - -enum class DownwardScanResult { - Invalid, - Destroy, - Reinit, - UseForDiagnostic, - MoveOut -}; - -} - /// Returns true if we are move out, false otherwise. If we find an interesting /// inst, we return it in foundInst. If no inst is returned, one must continue. static DownwardScanResult downwardScanForMoveOut(MarkUnresolvedMoveAddrInst *mvi, UseState &useState, - SILInstruction **foundInst) { + SILInstruction **foundInst, Operand **foundOperand, + TinyPtrVector &foundClosureInsts) { // Forward scan looking for uses or reinits. for (auto &next : llvm::make_range(std::next(mvi->getIterator()), mvi->getParent()->end())) { @@ -395,6 +403,42 @@ downwardScanForMoveOut(MarkUnresolvedMoveAddrInst *mvi, UseState &useState, return DownwardScanResult::Destroy; } } + + // Finally check if we have a closure user that we were able to handle. + if (auto fas = FullApplySite::isa(&next)) { + LLVM_DEBUG(llvm::dbgs() << "DownwardScan: ClosureCheck: " << **fas); + for (auto &op : fas.getArgumentOperands()) { + auto iter = useState.closureUses.find(&op); + if (iter == useState.closureUses.end()) { + continue; + } + + LLVM_DEBUG(llvm::dbgs() + << "DownwardScan: ClosureCheck: Matching Operand: " + << fas.getAppliedArgIndex(op)); + *foundInst = &next; + *foundOperand = &op; + switch (iter->second.result) { + case DownwardScanResult::Invalid: + case DownwardScanResult::Destroy: + case DownwardScanResult::Reinit: + case DownwardScanResult::UseForDiagnostic: + case DownwardScanResult::MoveOut: + llvm_unreachable("unhandled"); + case DownwardScanResult::ClosureConsume: + LLVM_DEBUG(llvm::dbgs() << ". ClosureConsume.\n"); + llvm::copy(iter->second.pairedConsumingInsts, + std::back_inserter(foundClosureInsts)); + break; + case DownwardScanResult::ClosureUse: + LLVM_DEBUG(llvm::dbgs() << ". ClosureUse.\n"); + llvm::copy(iter->second.pairedUseInsts, + std::back_inserter(foundClosureInsts)); + break; + } + return iter->second.result; + } + } } // We are move out! @@ -431,6 +475,12 @@ static bool upwardScanForUseOut(SILInstruction *inst, UseState &useState) { return false; if (useState.reinitToIndexMap.count(&iter)) return false; + if (auto fas = FullApplySite::isa(&iter)) { + for (auto &op : fas.getArgumentOperands()) { + if (useState.closureUses.find(&op) != useState.closureUses.end()) + return false; + } + } } return true; } @@ -454,171 +504,929 @@ static bool upwardScanForDestroys(SILInstruction *inst, UseState &useState) { // destroy_addr up since it is balanced by the init. if (useState.inits.contains(&iter)) return false; + if (auto fas = FullApplySite::isa(&iter)) { + for (auto &op : fas.getArgumentOperands()) { + if (useState.closureUses.find(&op) != useState.closureUses.end()) + return false; + } + } + + // Otherwise, we have a normal use, just ignore it. + } + + // Ok, this instruction is the first use in the block of our value. So return + // true so we track it as such. + return true; +} + +/// Search for the first init in the block. +static bool upwardScanForInit(SILInstruction *inst, UseState &useState) { + // We scan backwards from the instruction before \p inst to the beginning of + // the block. + for (auto &iter : llvm::make_range(std::next(inst->getReverseIterator()), + inst->getParent()->rend())) { + if (useState.inits.contains(&iter)) + return false; + if (auto fas = FullApplySite::isa(&iter)) { + for (auto &op : fas.getArgumentOperands()) { + if (useState.closureUses.find(&op) != useState.closureUses.end()) + return false; + } + } + } + return true; +} + +//===----------------------------------------------------------------------===// +// Closure Argument Global Dataflow +//===----------------------------------------------------------------------===// + +namespace { + +/// A utility class that analyzes a closure that captures a moved value. It is +/// used to perform move checking within the closure as well as to determine a +/// set of reinit/destroys that we will need to convert to init and or eliminate +/// while cloning the closure. +/// +/// NOTE: We do not need to consider if the closure reinitializes the memory +/// since there must be some sort of use for the closure to even reference it +/// and the compiler emits assigns when it reinitializes vars this early in the +/// pipeline. +struct ClosureArgDataflowState { + SmallVector livenessWorklist; + SmallVector consumingWorklist; + PrunedLiveness livenessForConsumes; + UseState &useState; + +public: + ClosureArgDataflowState(UseState &useState) : useState(useState) {} + + bool process( + SILArgument *arg, ClosureOperandState &state, + SmallBlotSetVector &postDominatingConsumingUsers); + + void clear() { + livenessForConsumes.clear(); + } + +private: + /// Perform our liveness dataflow. Returns true if we found any liveness uses + /// at all. These we will need to error upon. + bool performLivenessDataflow(const BasicBlockSet &initBlocks, + const BasicBlockSet &livenessBlocks, + const BasicBlockSet &consumingBlocks); + + /// Perform our consuming dataflow. Returns true if we found an earliest set + /// of consuming uses that we can handle that post-dominate the argument. + /// Returns false otherwise. + bool performConsumingDataflow(const BasicBlockSet &initBlocks, + const BasicBlockSet &consumingBlocks); + + void classifyUses(BasicBlockSet &initBlocks, BasicBlockSet &livenessBlocks, + BasicBlockSet &consumingBlocks); + + bool handleSingleBlockCase(SILArgument *address, ClosureOperandState &state); +}; + +} // namespace + +bool ClosureArgDataflowState::handleSingleBlockCase( + SILArgument *address, ClosureOperandState &state) { + // Walk the instructions from the beginning of the block to the end. + for (auto &inst : *address->getParent()) { + assert(!useState.inits.count(&inst) && + "Shouldn't see an init before a destroy or reinit"); + + // If we see a destroy, then we know we are upwards consume... stash it so + // that we can destroy it + if (auto *dvi = dyn_cast(&inst)) { + if (useState.destroyToIndexMap.count(dvi)) { + LLVM_DEBUG(llvm::dbgs() + << "ClosureArgDataflow: Found Consume: " << *dvi); + + if (hasMoreThanOneDebugUse(address)) + return false; + + state.pairedConsumingInsts.push_back(dvi); + state.result = DownwardScanResult::ClosureConsume; + return true; + } + } + + // Same for reinits. + if (useState.reinits.count(&inst)) { + LLVM_DEBUG(llvm::dbgs() << "ClosureArgDataflow: Found Reinit: " << inst); + + if (hasMoreThanOneDebugUse(address)) + return false; + + state.pairedConsumingInsts.push_back(&inst); + state.result = DownwardScanResult::ClosureConsume; + return true; + } + + // Finally, if we have a liveness use, report it for a diagnostic. + if (useState.livenessUses.count(&inst)) { + LLVM_DEBUG(llvm::dbgs() + << "ClosureArgDataflow: Found liveness use: " << inst); + state.pairedUseInsts.push_back(&inst); + state.result = DownwardScanResult::ClosureUse; + return true; + } + } + + LLVM_DEBUG( + llvm::dbgs() << "ClosureArgDataflow: Did not find interesting uses.\n"); + return false; +} + +bool ClosureArgDataflowState::performLivenessDataflow( + const BasicBlockSet &initBlocks, const BasicBlockSet &livenessBlocks, + const BasicBlockSet &consumingBlocks) { + LLVM_DEBUG(llvm::dbgs() << "ClosureArgLivenessDataflow. Start!\n"); + bool foundSingleLivenessUse = false; + auto *fn = useState.getFunction(); + auto *frontBlock = &*fn->begin(); + BasicBlockWorklist worklist(fn); + + for (unsigned i : indices(livenessWorklist)) { + auto *&user = livenessWorklist[i]; + + // If our use is in the first block, then we are done with this user. Set + // the found single liveness use flag and continue! + if (frontBlock == user->getParent()) { + foundSingleLivenessUse = true; + continue; + } + + bool success = false; + for (auto *predBlock : user->getParent()->getPredecessorBlocks()) { + worklist.pushIfNotVisited(predBlock); + } + while (auto *next = worklist.pop()) { + if (livenessBlocks.contains(next) || initBlocks.contains(next) || + consumingBlocks.contains(next)) { + continue; + } + + if (frontBlock == next) { + success = true; + foundSingleLivenessUse = true; + break; + } + + for (auto *predBlock : next->getPredecessorBlocks()) { + worklist.pushIfNotVisited(predBlock); + } + } + if (!success) { + user = nullptr; + } + } + return foundSingleLivenessUse; +} + +bool ClosureArgDataflowState::performConsumingDataflow( + const BasicBlockSet &initBlocks, const BasicBlockSet &consumingBlocks) { + auto *fn = useState.getFunction(); + auto *frontBlock = &*fn->begin(); + + bool foundSingleConsumingUse = false; + BasicBlockWorklist worklist(fn); + for (unsigned i : indices(consumingWorklist)) { + auto *&user = consumingWorklist[i]; + + if (frontBlock == user->getParent()) + continue; + + bool success = false; + for (auto *predBlock : user->getParent()->getPredecessorBlocks()) { + worklist.pushIfNotVisited(predBlock); + } + while (auto *next = worklist.pop()) { + if (initBlocks.contains(next) || consumingBlocks.contains(next)) { + continue; + } + + if (frontBlock == next) { + success = true; + foundSingleConsumingUse = true; + break; + } + + for (auto *predBlock : next->getPredecessorBlocks()) { + worklist.pushIfNotVisited(predBlock); + } + } + if (!success) { + user = nullptr; + } + } + return foundSingleConsumingUse; +} + +void ClosureArgDataflowState::classifyUses(BasicBlockSet &initBlocks, + BasicBlockSet &livenessBlocks, + BasicBlockSet &consumingBlocks) { + + for (auto *user : useState.inits) { + if (upwardScanForInit(user, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found init block at: " << *user); + initBlocks.insert(user->getParent()); + } + } + + for (auto *user : useState.livenessUses) { + if (upwardScanForUseOut(user, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found use block at: " << *user); + livenessBlocks.insert(user->getParent()); + livenessWorklist.push_back(user); + } + } + + for (auto destroyOpt : useState.destroys) { + assert(destroyOpt); + + auto *destroy = *destroyOpt; + + auto iter = useState.destroyToIndexMap.find(destroy); + assert(iter != useState.destroyToIndexMap.end()); + + if (upwardScanForDestroys(destroy, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found destroy block at: " << *destroy); + consumingBlocks.insert(destroy->getParent()); + consumingWorklist.push_back(destroy); + } + } + + for (auto reinitOpt : useState.reinits) { + assert(reinitOpt); + + auto *reinit = *reinitOpt; + auto iter = useState.reinitToIndexMap.find(reinit); + assert(iter != useState.reinitToIndexMap.end()); + + if (upwardScanForDestroys(reinit, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found reinit block at: " << *reinit); + consumingBlocks.insert(reinit->getParent()); + consumingWorklist.push_back(reinit); + } + } +} + +bool ClosureArgDataflowState::process( + SILArgument *address, ClosureOperandState &state, + SmallBlotSetVector &postDominatingConsumingUsers) { + clear(); + + SILFunction *fn = address->getFunction(); + assert(fn); + + // First see if our function only has a single block. In such a case, + // summarize using the single processing routine. + if (address->getParent()->getTerminator()->isFunctionExiting()) { + LLVM_DEBUG(llvm::dbgs() << "ClosureArgDataflow: Single Block Case.\n"); + return handleSingleBlockCase(address, state); + } + + LLVM_DEBUG(llvm::dbgs() << "ClosureArgDataflow: Multiple Block Case.\n"); + + // At this point, we begin by classifying the uses of our address into init + // blocks, liveness blocks, consuming blocks. We also seed the worklist for + // our two dataflows. + SWIFT_DEFER { + livenessWorklist.clear(); + consumingWorklist.clear(); + }; + BasicBlockSet initBlocks(fn); + BasicBlockSet livenessBlocks(fn); + BasicBlockSet consumingBlocks(fn); + classifyUses(initBlocks, livenessBlocks, consumingBlocks); + + // Liveness Dataflow: + // + // The way that we do this is that for each such instruction: + // + // 1. If the instruction is in the entrance block, then it is our only answer. + // + // 2. If the user is not in the entrance block, visit recursively its + // predecessor blocks until one either hits the entrance block (in which + // case this is the result) /or/ one hits a block in one of our basic block + // sets which means there is an earlier use. Consuming blocks only stop for + // consuming blocks and init blocks. Liveness blocks stop for all other + // blocks. + // + // The result is what remains in our set. Thus we start by processing + // liveness. + if (performLivenessDataflow(initBlocks, livenessBlocks, consumingBlocks)) { + for (unsigned i : indices(livenessWorklist)) { + if (auto *ptr = livenessWorklist[i]) { + LLVM_DEBUG(llvm::dbgs() + << "ClosureArgLivenessDataflow. Liveness User: " << *ptr); + state.pairedUseInsts.push_back(ptr); + } + } + state.result = DownwardScanResult::ClosureUse; + return true; + } + + // Then perform the consuming use dataflow. In this case, we think we may have + // found a set of post-dominating consuming uses for our inout_aliasable + // parameter. We are going to change it to be an out parameter and eliminate + // these when we clone the closure. + if (performConsumingDataflow(initBlocks, consumingBlocks)) { + // Before we do anything, make sure our argument has at least one single + // debug_value user. If we have many we can't handle it since something in + // SILGen is emitting weird code. Our tests will ensure that SILGen does not + // diverge by mistake. So we are really just being careful. + if (hasMoreThanOneDebugUse(address)) { + // Failing b/c more than one debug use! + return false; + } + + SWIFT_DEFER { livenessForConsumes.clear(); }; + auto *frontBlock = &*fn->begin(); + livenessForConsumes.initializeDefBlock(frontBlock); + + for (unsigned i : indices(livenessWorklist)) { + if (auto *ptr = livenessWorklist[i]) { + state.pairedConsumingInsts.push_back(ptr); + livenessForConsumes.updateForUse(ptr, true /*is lifetime ending*/); + } + } + + // If our consumes do not have a linear lifetime, bail. We will error on the + // move being unknown. + for (auto *ptr : state.pairedConsumingInsts) { + if (livenessForConsumes.isWithinBoundary(ptr)) + return false; + postDominatingConsumingUsers.insert(ptr); + } + state.result = DownwardScanResult::ClosureConsume; + return true; + } + + return true; +} + +//===----------------------------------------------------------------------===// +// Closure Use Gatherer +//===----------------------------------------------------------------------===// + +namespace { + +/// Visit all of the uses of a closure argument, initializing useState as we go. +struct GatherClosureUseVisitor : public AccessUseVisitor { + UseState &useState; + + GatherClosureUseVisitor(UseState &useState) + : AccessUseVisitor(AccessUseType::Overlapping, + NestedAccessType::IgnoreAccessBegin), + useState(useState) {} + + bool visitUse(Operand *op, AccessUseType useTy) override; + void reset(SILValue address) { useState.address = address; } + void clear() { useState.clear(); } +}; + +} // end anonymous namespace + +// Filter out recognized uses that do not write to memory. +// +// TODO: Ensure that all of the conditional-write logic below is encapsulated in +// mayWriteToMemory and just call that instead. Possibly add additional +// verification that visitAccessPathUses recognizes all instructions that may +// propagate pointers (even though they don't write). +bool GatherClosureUseVisitor::visitUse(Operand *op, AccessUseType useTy) { + // If this operand is for a dependent type, then it does not actually access + // the operand's address value. It only uses the metatype defined by the + // operation (e.g. open_existential). + if (op->isTypeDependent()) { + return true; + } + + // Ignore debug_values. We should leave them on the argument so that later in + // the function the user can still access the out parameter once it is + // updated. + if (isa(op->getUser())) + return true; + + // Ignore end_access. For our purposes, they are irrelevent and we do not want + // to treat them like liveness uses. + if (isa(op->getUser())) + return true; + + if (memInstMustInitialize(op)) { + if (stripAccessMarkers(op->get()) != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found init use not on base address: " + << *op->getUser()); + return false; + } + + LLVM_DEBUG(llvm::dbgs() << "ClosureUse: Found init: " << *op->getUser()); + useState.inits.insert(op->getUser()); + return true; + } + + if (memInstMustReinitialize(op)) { + if (stripAccessMarkers(op->get()) != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found reinit use not on base address: " + << *op->getUser()); + return false; + } + + LLVM_DEBUG(llvm::dbgs() << "ClosureUse: Found reinit: " << *op->getUser()); + useState.insertReinit(op->getUser()); + return true; + } + + if (auto *dvi = dyn_cast(op->getUser())) { + // If we see a destroy_addr not on our base address, bail! Just error and + // say that we do not understand the code. + if (dvi->getOperand() != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found destroy_addr no on base address: " + << *dvi); + return false; + } + LLVM_DEBUG(llvm::dbgs() << "ClosureUse: Found destroy_addr: " << *dvi); + useState.insertDestroy(dvi); + return true; + } + + LLVM_DEBUG(llvm::dbgs() << "ClosureUse: Found liveness use: " + << *op->getUser()); + useState.livenessUses.insert(op->getUser()); + + return true; +} + +//===----------------------------------------------------------------------===// +// Closure Argument Cloner +//===----------------------------------------------------------------------===// + +namespace { + +struct ClosureArgumentInOutToOutCloner + : SILClonerWithScopes { + friend class SILInstructionVisitor; + friend class SILCloner; + + SmallBlotSetVector &postDominatingConsumingUsers; + SILFunction *orig; + const SmallBitVector &argsToConvertIndices; + SmallPtrSet oldArgSet; + + // Map from clonedArg -> oldArg. + llvm::SmallMapVector clonedArgToOldArgMap; + +public: + ClosureArgumentInOutToOutCloner( + SILOptFunctionBuilder &funcBuilder, SILFunction *orig, + IsSerialized_t isSerialized, + SmallBlotSetVector &postDominatingConsumingUsers, + const SmallBitVector &argsToConvertIndices, StringRef name); + + void populateCloned(); + + SILFunction *getCloned() { return &getBuilder().getFunction(); } + + void visitDebugValueInst(DebugValueInst *inst) { + // Do not clone if our inst argument is one of our cloned arguments. In such + // a case, we are going to handle the debug_value when we visit a post + // dominating consuming reinit. + if (oldArgSet.count(inst->getOperand())) { + LLVM_DEBUG(llvm::dbgs() + << " Visiting debug value that is in the old arg set!\n"); + return; + } + LLVM_DEBUG(llvm::dbgs() + << " Visiting debug value that we will clone!\n"); + SILCloner::visitDebugValueInst(inst); + } + + void visitDestroyValueInst(DestroyValueInst *inst) { + if (!postDominatingConsumingUsers.count(inst)) { + SILCloner::visitDestroyValueInst(inst); + } + + // Don't do anything if we have a destroy. + } + + void visitCopyAddrInst(CopyAddrInst *inst) { + if (!postDominatingConsumingUsers.count(inst)) { + return SILCloner::visitCopyAddrInst( + inst); + } + + // If this copy_addr is one of the copies that we need to fixup, convert it + // to an init from a reinit. We also insert a debug_value + assert(!inst->isInitializationOfDest() && "Should be a reinit"); + getBuilder().setCurrentDebugScope(getOpScope(inst->getDebugScope())); + recordClonedInstruction( + inst, getBuilder().createCopyAddr( + getOpLocation(inst->getLoc()), getOpValue(inst->getSrc()), + getOpValue(inst->getDest()), inst->isTakeOfSrc(), + IsInitialization_t::IsInitialization)); + + // Then if in our caller we had a debug_value on our dest, add it here. + auto base = AccessPathWithBase::compute(inst->getDest()).base; + if (oldArgSet.count(base)) { + if (auto *op = getSingleDebugUse(base)) { + if (auto *dvi = dyn_cast(op->getUser())) { + SILCloner::visitDebugValueInst(dvi); + } + } + } + } + + void visitStoreInst(StoreInst *inst) { + if (!postDominatingConsumingUsers.count(inst)) { + return SILCloner::visitStoreInst(inst); + } + + // If this store is one of the copies that we need to fixup, convert it + // to an init from being an assign. + assert(inst->getOwnershipQualifier() == StoreOwnershipQualifier::Assign); + getBuilder().setCurrentDebugScope(getOpScope(inst->getDebugScope())); + recordClonedInstruction( + inst, getBuilder().createStore( + getOpLocation(inst->getLoc()), getOpValue(inst->getSrc()), + getOpValue(inst->getDest()), StoreOwnershipQualifier::Init)); + + auto base = AccessPathWithBase::compute(inst->getDest()).base; + if (oldArgSet.count(base)) { + if (auto *op = getSingleDebugUse(base)) { + if (auto *dvi = dyn_cast(op->getUser())) { + SILCloner::visitDebugValueInst(dvi); + } + } + } + } + +private: + static SILFunction *initCloned( + SILOptFunctionBuilder &funcBuilder, SILFunction *orig, + IsSerialized_t isSerialized, + SmallBlotSetVector &postDominatingConsumingUsers, + const SmallBitVector &argsToConvertIndices, StringRef cloneName); +}; + +} // namespace + +static std::string getClonedName(SILFunction *func, IsSerialized_t serialized, + const SmallBitVector &argsToConvertIndices) { + auto kind = Demangle::SpecializationPass::MoveDiagnosticInOutToOut; + Mangle::FunctionSignatureSpecializationMangler Mangler(kind, serialized, + func); + for (int i = argsToConvertIndices.find_first(); i != -1; + i = argsToConvertIndices.find_next(i)) { + Mangler.setArgumentInOutToOut(i); + } + return Mangler.mangle(); +} + +ClosureArgumentInOutToOutCloner::ClosureArgumentInOutToOutCloner( + SILOptFunctionBuilder &funcBuilder, SILFunction *orig, + IsSerialized_t isSerialized, + SmallBlotSetVector &postDominatingConsumingUsers, + const SmallBitVector &argsToConvertIndices, StringRef name) + : SILClonerWithScopes(*initCloned( + funcBuilder, orig, isSerialized, postDominatingConsumingUsers, + argsToConvertIndices, name)), + postDominatingConsumingUsers(postDominatingConsumingUsers), orig(orig), + argsToConvertIndices(argsToConvertIndices) { + assert(orig->getDebugScope()->getParentFunction() != + getCloned()->getDebugScope()->getParentFunction()); +} + +/// Create the function corresponding to the clone of the +/// original closure with the signature modified to reflect promoted +/// parameters (which are specified by PromotedArgIndices). +SILFunction *ClosureArgumentInOutToOutCloner::initCloned( + SILOptFunctionBuilder &funcBuilder, SILFunction *orig, + IsSerialized_t serialized, + SmallBlotSetVector &postDominatingConsumingUsers, + const SmallBitVector &argsToConvertIndices, StringRef clonedName) { + SILModule &mod = orig->getModule(); + SmallVector clonedInterfaceArgTys; + SmallVector clonedResultInfos; + SILFunctionType *origFTI = orig->getLoweredFunctionType(); + + // First initialized cloned result infos with the old results. + for (auto result : origFTI->getResults()) + clonedResultInfos.push_back(result); + + // Generate a new parameter list with deleted parameters removed... + unsigned initArgIndex = orig->getConventions().getSILArgIndexOfFirstParam(); + LLVM_DEBUG(llvm::dbgs() << "CLONER: initArgIndex: " << initArgIndex << '\n'); + for (auto state : + llvm::enumerate(origFTI->getParameters().drop_front(initArgIndex))) { + unsigned index = state.index(); + auto paramInfo = state.value(); + + // If we are supposed to convert this, add the parameter to the result list. + if (argsToConvertIndices.test(index)) { + LLVM_DEBUG(llvm::dbgs() << "CLONER: Converting: " << index << "\n"); + clonedResultInfos.emplace_back(paramInfo.getInterfaceType(), + ResultConvention::Indirect); + continue; + } + + LLVM_DEBUG(llvm::dbgs() << "CLONER: Letting through: " << index << "\n"); + // Otherwise, just let it through. + clonedInterfaceArgTys.push_back(paramInfo); + ++index; + } + + // Create the new function type for the cloned function with some of + // the parameters moved to be results. + auto clonedTy = SILFunctionType::get( + origFTI->getInvocationGenericSignature(), origFTI->getExtInfo(), + origFTI->getCoroutineKind(), origFTI->getCalleeConvention(), + clonedInterfaceArgTys, origFTI->getYields(), clonedResultInfos, + origFTI->getOptionalErrorResult(), origFTI->getPatternSubstitutions(), + origFTI->getInvocationSubstitutions(), mod.getASTContext(), + origFTI->getWitnessMethodConformanceOrInvalid()); + LLVM_DEBUG(llvm::dbgs() << "CLONER: clonedTy: " << clonedTy << "\n"); + assert((orig->isTransparent() || orig->isBare() || orig->getLocation()) && + "SILFunction missing location"); + assert((orig->isTransparent() || orig->isBare() || orig->getDebugScope()) && + "SILFunction missing DebugScope"); + assert(!orig->isGlobalInit() && "Global initializer cannot be cloned"); + auto *Fn = funcBuilder.createFunction( + swift::getSpecializedLinkage(orig, orig->getLinkage()), clonedName, + clonedTy, orig->getGenericEnvironment(), orig->getLocation(), + orig->isBare(), orig->isTransparent(), serialized, IsNotDynamic, + orig->getEntryCount(), orig->isThunk(), orig->getClassSubclassScope(), + orig->getInlineStrategy(), orig->getEffectsKind(), orig, + orig->getDebugScope()); + for (auto &Attr : orig->getSemanticsAttrs()) { + Fn->addSemanticsAttr(Attr); + } + + return Fn; +} + +/// Populate the body of the cloned closure, modifying instructions as +/// necessary to take into consideration the removed parameters. +void ClosureArgumentInOutToOutCloner::populateCloned() { + SILFunction *cloned = getCloned(); + + // Create arguments for the entry block + SILBasicBlock *origEntryBlock = &*orig->begin(); + SILBasicBlock *clonedEntryBlock = cloned->createBasicBlock(); + + SmallVector entryArgs; + entryArgs.reserve(origEntryBlock->getArguments().size()); + + // First process all of the indirect results and add our new results after + // them. + auto oldArgs = origEntryBlock->getArguments(); + auto origConventions = orig->getConventions(); + for (unsigned i : range(origConventions.getSILArgIndexOfFirstIndirectResult(), + origConventions.getSILArgIndexOfFirstParam())) { + LLVM_DEBUG(llvm::dbgs() << "Have indirect result\n"); + auto *arg = oldArgs[i]; + // Create a new argument which copies the original argument. + auto *newArg = clonedEntryBlock->createFunctionArgument(arg->getType(), + arg->getDecl()); + clonedArgToOldArgMap[newArg] = arg; + entryArgs.push_back(newArg); + } + + // To avoid needing to mess with types, just go through our original arguments + // in the entry block to get the right types. + for (auto state : llvm::enumerate(origEntryBlock->getArguments())) { + unsigned argNo = state.index(); + LLVM_DEBUG(llvm::dbgs() << "Testing Old Arg Number: " << argNo << "\n"); + if (!argsToConvertIndices.test(argNo)) + continue; + + auto *arg = state.value(); + auto *newArg = clonedEntryBlock->createFunctionArgument(arg->getType(), + arg->getDecl()); + clonedArgToOldArgMap[newArg] = arg; + oldArgSet.insert(arg); + entryArgs.push_back(newArg); + LLVM_DEBUG(llvm::dbgs() << "Mapping From: " << *arg); + LLVM_DEBUG(llvm::dbgs() + << " of function: " << arg->getFunction()->getName() << '\n'); + LLVM_DEBUG(llvm::dbgs() << "Mapping To: " << *newArg); + LLVM_DEBUG(llvm::dbgs() << " of function: " + << newArg->getFunction()->getName() << '\n'); + } + + // Finally, recreate the rest of the arguments which we did not specialize. + for (auto state : llvm::enumerate(origEntryBlock->getArguments())) { + unsigned argNo = state.index(); + if (argsToConvertIndices.test(argNo)) + continue; + + auto *arg = state.value(); + auto *newArg = clonedEntryBlock->createFunctionArgument(arg->getType(), + arg->getDecl()); + + clonedArgToOldArgMap[newArg] = arg; + entryArgs.push_back(newArg); + } + + // Visit original BBs in depth-first preorder, starting with the + // entry block, cloning all instructions and terminators. + cloneFunctionBody( + orig, clonedEntryBlock, entryArgs, [&](SILValue clonedArg) -> SILValue { + LLVM_DEBUG(llvm::dbgs() << "Searching for: " << *clonedArg); + auto iter = clonedArgToOldArgMap.find(clonedArg); + assert(iter != clonedArgToOldArgMap.end() && + "Should map all cloned args to an old arg"); + LLVM_DEBUG(llvm::dbgs() << "Found it! Mapping to : " << *iter->second); + return iter->second; + }); +} + +///////////////////////////////////// +// Caller Lexical Lifetime Visitor // +///////////////////////////////////// + +namespace { + +/// Visit all of the uses of a lexical lifetime, initializing useState as we go. +struct GatherLexicalLifetimeUseVisitor : public AccessUseVisitor { + UseState &useState; + + GatherLexicalLifetimeUseVisitor(UseState &useState) + : AccessUseVisitor(AccessUseType::Overlapping, + NestedAccessType::IgnoreAccessBegin), + useState(useState) {} + + bool visitUse(Operand *op, AccessUseType useTy) override; + void reset(SILValue address) { useState.address = address; } + void clear() { useState.clear(); } +}; + +} // end anonymous namespace + +// Filter out recognized uses that do not write to memory. +// +// TODO: Ensure that all of the conditional-write logic below is encapsulated in +// mayWriteToMemory and just call that instead. Possibly add additional +// verification that visitAccessPathUses recognizes all instructions that may +// propagate pointers (even though they don't write). +bool GatherLexicalLifetimeUseVisitor::visitUse(Operand *op, + AccessUseType useTy) { + // If this operand is for a dependent type, then it does not actually access + // the operand's address value. It only uses the metatype defined by the + // operation (e.g. open_existential). + if (op->isTypeDependent()) { + return true; + } + + // If we have a move from src, this is a mark_move we want to visit. + if (auto *move = dyn_cast(op->getUser())) { + if (move->getSrc() == op->get()) { + LLVM_DEBUG(llvm::dbgs() << "Found move: " << *move); + useState.insertMarkUnresolvedMoveAddr(move); + return true; + } + } + + if (memInstMustInitialize(op)) { + if (stripAccessMarkers(op->get()) != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found init use not on base address: " + << *op->getUser()); + return false; + } + + LLVM_DEBUG(llvm::dbgs() << "Found init: " << *op->getUser()); + useState.inits.insert(op->getUser()); + return true; + } + + if (memInstMustReinitialize(op)) { + if (stripAccessMarkers(op->get()) != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found reinit use not on base address: " + << *op->getUser()); + return false; + } + + LLVM_DEBUG(llvm::dbgs() << "Found reinit: " << *op->getUser()); + useState.insertReinit(op->getUser()); + return true; + } + + if (auto *dvi = dyn_cast(op->getUser())) { + // If we see a destroy_addr not on our base address, bail! Just error and + // say that we do not understand the code. + if (dvi->getOperand() != useState.address) { + LLVM_DEBUG(llvm::dbgs() + << "!!! Error! Found destroy_addr no on base address: " + << *dvi); + return false; + } + LLVM_DEBUG(llvm::dbgs() << "Found destroy_addr: " << *dvi); + useState.insertDestroy(dvi); + return true; + } + + // Then see if we have a inout_aliasable full apply site use. In that case, we + // are going to try and extend move checking into the partial apply using + // cloning to eliminate destroys or reinits. + if (auto fas = FullApplySite::isa(op->getUser())) { + if (stripAccessMarkers(op->get()) != useState.address) { + LLVM_DEBUG( + llvm::dbgs() + << "!!! Error! Found consuming closure use not on base address: " + << *op->getUser()); + return false; + } - // Otherwise, we have a normal use, just ignore it. + if (fas.getArgumentOperandConvention(*op) == + SILArgumentConvention::Indirect_InoutAliasable) { + // If we don't find the function, we can't handle this, so bail. + auto *func = fas.getCalleeFunction(); + if (!func || !func->isDefer()) + return false; + useState.insertClosureOperand(op); + return true; + } } - // Ok, this instruction is the first use in the block of our value. So return - // true so we track it as such. - return true; -} + // Ignore dealloc_stack. + if (isa(op->getUser())) + return true; + + LLVM_DEBUG(llvm::dbgs() << "Found liveness use: " << *op->getUser()); + useState.livenessUses.insert(op->getUser()); -/// Search for the first init in the block. -static bool upwardScanForInit(SILInstruction *inst, UseState &useState) { - // We scan backwards from the instruction before \p inst to the beginning of - // the block. - for (auto &iter : llvm::make_range(std::next(inst->getReverseIterator()), - inst->getParent()->rend())) { - if (useState.inits.contains(&iter)) - return false; - } return true; } //===----------------------------------------------------------------------===// -// Address Checker +// Global Dataflow //===----------------------------------------------------------------------===// namespace { -struct MoveKillsCopyableAddressesObjectChecker { - SmallSetVector addressesToCheck; - SILFunction *fn; - UseState useState; - GatherLexicalLifetimeUseVisitor visitor; +struct DataflowState { llvm::DenseMap useBlocks; llvm::DenseSet initBlocks; llvm::DenseMap destroyBlocks; llvm::DenseMap reinitBlocks; - SmallVector markMovesToDataflow; - - MoveKillsCopyableAddressesObjectChecker(SILFunction *fn) - : fn(fn), useState(), visitor(useState) {} - bool performSingleBasicBlockAnalysisForAllMarkMoves(SILValue address); - bool performGlobalDataflow(SILValue address); - - bool check(); + llvm::DenseMap closureConsumeBlocks; + llvm::DenseMap closureUseBlocks; + SmallVector markMovesThatPropagateDownwards; - void emitDiagnosticForMove(SILValue borrowedValue, - StringRef borrowedValueName, MoveValueInst *mvi); - - ASTContext &getASTContext() const { return fn->getASTContext(); } + SILOptFunctionBuilder &funcBuilder; + UseState &useState; + llvm::SmallMapVector + &applySiteToPromotedArgIndices; + SmallBlotSetVector &closureConsumes; + + DataflowState(SILOptFunctionBuilder &funcBuilder, UseState &useState, + llvm::SmallMapVector + &applySiteToPromotedArgIndices, + SmallBlotSetVector &closureConsumes) + : funcBuilder(funcBuilder), useState(useState), + applySiteToPromotedArgIndices(applySiteToPromotedArgIndices), + closureConsumes(closureConsumes) {} + void init(); + bool process( + SILValue address, + SmallBlotSetVector &postDominatingConsumingUsers); + bool handleSingleBlockClosure(SILArgument *address, + ClosureOperandState &state); + bool cleanupAllDestroyAddr( + SILValue address, SILFunction *fn, SmallBitVector &destroyIndices, + SmallBitVector &reinitIndices, SmallBitVector &consumingClosureIndices, + BasicBlockSet &blocksVisitedWhenProcessingNewTakes, + BasicBlockSet &blocksWithMovesThatAreNowTakes, + SmallBlotSetVector &postDominatingConsumingUsers); + void clear() { + useBlocks.clear(); + initBlocks.clear(); + destroyBlocks.clear(); + reinitBlocks.clear(); + markMovesThatPropagateDownwards.clear(); + closureConsumeBlocks.clear(); + closureUseBlocks.clear(); + } }; } // namespace -static SourceLoc getSourceLocFromValue(SILValue value) { - if (auto *defInst = value->getDefiningInstruction()) - return defInst->getLoc().getSourceLoc(); - if (auto *arg = dyn_cast(value)) - return arg->getDecl()->getLoc(); - llvm_unreachable("Do not know how to get source loc for value?!"); -} - -// Returns true if we emitted a diagnostic and handled the single block -// case. Returns false if we visited all of the uses and seeded the UseState -// struct with the information needed to perform our interprocedural dataflow. -bool MoveKillsCopyableAddressesObjectChecker:: - performSingleBasicBlockAnalysisForAllMarkMoves(SILValue address) { - bool didEmitSingleBlockDiagnostic = false; - for (auto *mvi : useState.markMoves) { - // First scan downwards to make sure we are move out of this block. - - SILInstruction *interestingUser = nullptr; - switch (downwardScanForMoveOut(mvi, useState, &interestingUser)) { - case DownwardScanResult::Invalid: - llvm_unreachable("invalid"); - case DownwardScanResult::Destroy: { - // If we found a destroy, then we found a single block case that we can - // handle. Remove the destroy and convert the mark_unresolved_move_addr - // into a true move. - auto *dvi = cast(interestingUser); - SILBuilderWithScope builder(mvi); - builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), - IsTake, IsInitialization); - useState.destroys.erase(dvi); - mvi->eraseFromParent(); - dvi->eraseFromParent(); - continue; - } - case DownwardScanResult::UseForDiagnostic: { - // Then check if we found a user that violated our dataflow rules. In such - // a case, emit an error, cleanup our mark_unresolved_move_addr, and - // finally continue. - didEmitSingleBlockDiagnostic = true; - - { - auto diag = - diag::sil_movekillscopyablevalue_value_consumed_more_than_once; - StringRef name = getDebugVarName(address); - diagnose(getASTContext(), getSourceLocFromValue(address), diag, name); - } - - { - auto diag = diag::sil_movekillscopyablevalue_move_here; - diagnose(getASTContext(), mvi->getLoc().getSourceLoc(), diag); - } - - { - auto diag = diag::sil_movekillscopyablevalue_use_here; - diagnose(getASTContext(), interestingUser->getLoc().getSourceLoc(), - diag); - } - - // We purposely continue to see if at least in simple cases, we can flag - // mistakes from other moves. Since we are setting emittedDiagnostic to - // true, we will not perform the actual dataflow due to a check after - // the loop. - // - // We also clean up mvi by converting it to a copy_addr init so we do not - // emit fail errors later. - // - // TODO: Can we handle multiple errors in the same block for a single - // move? - SILBuilderWithScope builder(mvi); - builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), - IsNotTake, IsInitialization); - mvi->eraseFromParent(); - continue; - } - case DownwardScanResult::Reinit: { - convertMemoryReinitToInitForm(interestingUser); - useState.reinits.erase(interestingUser); - SILBuilderWithScope builder(mvi); - builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), - IsTake, IsInitialization); - mvi->eraseFromParent(); - continue; - } - case DownwardScanResult::MoveOut: - break; - } - - // If we did not found any uses later in the block that was an interesting - // use, we need to perform dataflow. - LLVM_DEBUG(llvm::dbgs() << "Our move is live out, so we need to process " - "it with the dataflow.\n"); - markMovesToDataflow.emplace_back(mvi); - - // Now scan up to see if mvi is also a use to seed the dataflow. This could - // happen if we have an earlier move. - if (upwardScanForUseOut(mvi, useState)) { - LLVM_DEBUG(llvm::dbgs() << "MVI projects a use up"); - useBlocks[mvi->getParent()] = mvi; - } - } - - return didEmitSingleBlockDiagnostic; -} - -static bool -cleanupAllDestroyAddr(SILFunction *fn, SmallBitVector &destroyIndices, - SmallBitVector &reinitIndices, UseState &useState, - BasicBlockSet &blocksVisitedWhenProcessingNewTakes, - BasicBlockSet &blocksWithMovesThatAreNowTakes) { +bool DataflowState::cleanupAllDestroyAddr( + SILValue address, SILFunction *fn, SmallBitVector &destroyIndices, + SmallBitVector &reinitIndices, SmallBitVector &consumingClosureIndices, + BasicBlockSet &blocksVisitedWhenProcessingNewTakes, + BasicBlockSet &blocksWithMovesThatAreNowTakes, + SmallBlotSetVector &postDominatingConsumingUsers) { bool madeChange = false; BasicBlockWorklist worklist(fn); - SILValue daiOperand; + LLVM_DEBUG(llvm::dbgs() << "Cleanup up destroy addr!\n"); LLVM_DEBUG(llvm::dbgs() << " Visiting destroys!\n"); LLVM_DEBUG(llvm::dbgs() << " Destroy Indices: " << destroyIndices << "\n"); @@ -629,39 +1437,65 @@ cleanupAllDestroyAddr(SILFunction *fn, SmallBitVector &destroyIndices, if (!dai) continue; LLVM_DEBUG(llvm::dbgs() << " Destroy: " << *dai); - SILValue op = (*dai)->getOperand(); - assert(daiOperand == SILValue() || op == daiOperand); - daiOperand = op; for (auto *predBlock : (*dai)->getParent()->getPredecessorBlocks()) { worklist.pushIfNotVisited(predBlock); } } + LLVM_DEBUG(llvm::dbgs() << " Visiting reinit!\n"); for (int index = reinitIndices.find_first(); index != -1; index = reinitIndices.find_next(index)) { auto reinit = useState.reinits[index]; if (!reinit) continue; - LLVM_DEBUG(llvm::dbgs() << " Reinit: " << *reinit); + LLVM_DEBUG(llvm::dbgs() << " Reinit: " << **reinit); for (auto *predBlock : (*reinit)->getParent()->getPredecessorBlocks()) { worklist.pushIfNotVisited(predBlock); } } + LLVM_DEBUG(llvm::dbgs() << " Visiting consuming closures!\n"); + for (int index = consumingClosureIndices.find_first(); index != -1; + index = consumingClosureIndices.find_next(index)) { + auto &pair = *std::next(useState.closureUses.begin(), index); + auto *op = pair.first; + LLVM_DEBUG(llvm::dbgs() << " Consuming closure: " << *op->getUser()); + for (auto *predBlock : op->getUser()->getParent()->getPredecessorBlocks()) { + worklist.pushIfNotVisited(predBlock); + } + } + LLVM_DEBUG(llvm::dbgs() << "Processing worklist!\n"); while (auto *next = worklist.pop()) { LLVM_DEBUG(llvm::dbgs() << "Looking at block: bb" << next->getDebugID() << "\n"); + // Any blocks that contained processed moves are stop points. - if (blocksWithMovesThatAreNowTakes.contains(next)) + if (blocksWithMovesThatAreNowTakes.contains(next)) { + LLVM_DEBUG(llvm::dbgs() + << " Block contained a move that is now a true take.\n"); continue; + } + // Then if we find that we have a block that was never visited when we + // walked along successor edges from the move, then we know that we need to + // insert a destroy_addr. + // + // This is safe to do since this block lives along the dominance frontier + // and we do not allow for critical edges, so as we walk along predecessors, + // given that any such block must also have a successor that was reachable + // from our move, we know that this unprocessed block must only have one + // successor, a block reachable from our move and thus must not have any + // unhandled uses. if (!blocksVisitedWhenProcessingNewTakes.contains(next)) { + LLVM_DEBUG(llvm::dbgs() << " Found a block that was not visited when " + "we processed takes of the given move.\n"); // Insert a destroy_addr here since the block isn't reachable from any of // our moves. - SILBuilderWithScope builder(next->getTerminator()); + SILBuilderWithScope builder( + std::prev(next->getTerminator()->getIterator())); auto *dvi = builder.createDestroyAddr( - RegularLocation::getAutoGeneratedLocation(), daiOperand); + RegularLocation::getAutoGeneratedLocation(), address); useState.destroys.insert(dvi); continue; } @@ -675,10 +1509,11 @@ cleanupAllDestroyAddr(SILFunction *fn, SmallBitVector &destroyIndices, for (int index = destroyIndices.find_first(); index != -1; index = destroyIndices.find_next(index)) { - auto dvi = useState.destroys[index]; - if (!dvi) + auto destroy = useState.destroys[index]; + if (!destroy) continue; - (*dvi)->eraseFromParent(); + LLVM_DEBUG(llvm::dbgs() << "Erasing destroy_addr: " << *destroy); + (*destroy)->eraseFromParent(); madeChange = true; } @@ -687,17 +1522,52 @@ cleanupAllDestroyAddr(SILFunction *fn, SmallBitVector &destroyIndices, auto reinit = useState.reinits[index]; if (!reinit) continue; + LLVM_DEBUG(llvm::dbgs() << "Converting reinit to init: " << *reinit); convertMemoryReinitToInitForm(*reinit); madeChange = true; } + // Check for consuming closures. If we find such a consuming closure, track + // that this full apply site needs to have some parameters converted when we + // are done processing. + // + // NOTE: We do this late to ensure that we only clone a defer exactly once + // rather than multiple times for multiple vars. + for (int index = consumingClosureIndices.find_first(); index != -1; + index = consumingClosureIndices.find_next(index)) { + auto &pair = *std::next(useState.closureUses.begin(), index); + auto *closureUse = pair.first; + if (!closureUse) + continue; + + // This is correct today due to us only supporting defer. When we handle + // partial apply, we will need to do more work ehre. + FullApplySite fas(closureUse->getUser()); + assert(fas); + unsigned appliedArgIndex = fas.getAppliedArgIndex(*closureUse); + LLVM_DEBUG(llvm::dbgs() << "Processing closure use: " << **fas); + LLVM_DEBUG(llvm::dbgs() << "AppliedArgIndex: " << appliedArgIndex << '\n'); + auto &bitVector = applySiteToPromotedArgIndices[fas]; + auto conventions = fas.getSubstCalleeConv(); + unsigned numNonResultArgs = conventions.getNumSILArguments(); + if (bitVector.size() < numNonResultArgs) + bitVector.resize(numNonResultArgs); + bitVector.set(appliedArgIndex); + for (auto *user : pair.second.pairedConsumingInsts) { + closureConsumes.insert(user); + } + } + return madeChange; } -bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( - SILValue address) { - bool madeChange = false; +bool DataflowState::process( + SILValue address, + SmallBlotSetVector &postDominatingConsumingUsers) { + SILFunction *fn = address->getFunction(); + assert(fn); + bool madeChange = false; SmallBitVector indicesOfPairedDestroys; auto getIndicesOfPairedDestroys = [&]() -> SmallBitVector & { if (indicesOfPairedDestroys.size() != useState.destroys.size()) @@ -710,22 +1580,29 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( indicesOfPairedReinits.resize(useState.reinits.size()); return indicesOfPairedReinits; }; + SmallBitVector indicesOfPairedConsumingClosureUses; + auto getIndicesOfPairedConsumingClosureUses = [&]() -> SmallBitVector & { + if (indicesOfPairedConsumingClosureUses.size() != + useState.closureUses.size()) + indicesOfPairedConsumingClosureUses.resize(useState.closureUses.size()); + return indicesOfPairedConsumingClosureUses; + }; - BasicBlockSet visitedByNewMove(fn); + BasicBlockSet blocksVisitedWhenProcessingNewTakes(fn); BasicBlockSet blocksWithMovesThatAreNowTakes(fn); bool convertedMarkMoveToTake = false; - for (auto *mvi : markMovesToDataflow) { + for (auto *mvi : markMovesThatPropagateDownwards) { bool emittedSingleDiagnostic = false; LLVM_DEBUG(llvm::dbgs() << "Checking Multi Block Dataflow for: " << *mvi); BasicBlockWorklist worklist(fn); - BasicBlockSetVector setVector(fn); + BasicBlockSetVector visitedBlocks(fn); for (auto *succBlock : mvi->getParent()->getSuccessorBlocks()) { LLVM_DEBUG(llvm::dbgs() << " SuccBlocks: " << succBlock->getDebugID() << "\n"); worklist.pushIfNotVisited(succBlock); - setVector.insert(succBlock); + visitedBlocks.insert(succBlock); } while (auto *next = worklist.pop()) { @@ -741,7 +1618,7 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( LLVM_DEBUG(llvm::dbgs() << " Is Use Block! Emitting Error!\n"); // We found one! Emit the diagnostic and continue and see if we can get // more diagnostics. - auto &astContext = getASTContext(); + auto &astContext = fn->getASTContext(); { auto diag = diag::sil_movekillscopyablevalue_value_consumed_more_than_once; @@ -763,6 +1640,38 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( break; } + // Now see if we have a closure use. + { + auto iter = closureUseBlocks.find(next); + if (iter != closureUseBlocks.end()) { + LLVM_DEBUG(llvm::dbgs() << " Is Use Block! Emitting Error!\n"); + // We found one! Emit the diagnostic and continue and see if we can + // get more diagnostics. + auto &astContext = fn->getASTContext(); + { + auto diag = + diag::sil_movekillscopyablevalue_value_consumed_more_than_once; + StringRef name = getDebugVarName(address); + diagnose(astContext, getSourceLocFromValue(address), diag, name); + } + + { + auto diag = diag::sil_movekillscopyablevalue_move_here; + diagnose(astContext, mvi->getLoc().getSourceLoc(), diag); + } + + { + auto diag = diag::sil_movekillscopyablevalue_use_here; + for (auto *user : iter->second->pairedUseInsts) { + diagnose(astContext, user->getLoc().getSourceLoc(), diag); + } + } + + emittedSingleDiagnostic = true; + break; + } + } + // Then see if this is a destroy block. If so, do not add successors and // continue. This is because we stop processing at destroy_addr. This // destroy_addr is paired with the mark_unresolved_move_addr. @@ -790,6 +1699,18 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( } } + { + auto iter = closureConsumeBlocks.find(next); + if (iter != closureConsumeBlocks.end()) { + LLVM_DEBUG(llvm::dbgs() << " Is reinit Block! Setting up for " + "later deletion if possible!\n"); + auto indexIter = useState.closureOperandToIndexMap.find(iter->second); + assert(indexIter != useState.closureOperandToIndexMap.end()); + getIndicesOfPairedConsumingClosureUses().set(indexIter->second); + continue; + } + } + // Then see if this is an init block. If so, do not add successors and // continue. We already checked that we are not destroy up in this block // by the check a few lines up. So we know that we are in one of the @@ -800,7 +1721,7 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( // such a case, we need to just stop processing since we are re-initing // memory for a var. // - // 2. There is a consuming use that is treated as a liveness use before + // 2. There is a consuming use that is treated as a consuming use before // us. In that case, we will have already errored upon it. if (initBlocks.count(next)) { LLVM_DEBUG(llvm::dbgs() << " Is Init Block!\n"); @@ -813,7 +1734,7 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( // Otherwise, add successors if we haven't visited them to the worklist. for (auto *succBlock : next->getSuccessorBlocks()) { worklist.pushIfNotVisited(succBlock); - setVector.insert(succBlock); + visitedBlocks.insert(succBlock); } } @@ -830,8 +1751,8 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), IsTake, IsInitialization); // Flush our SetVector into the visitedByNewMove. - for (auto *block : setVector) { - visitedByNewMove.insert(block); + for (auto *block : visitedBlocks) { + blocksVisitedWhenProcessingNewTakes.insert(block); } convertedMarkMoveToTake = true; } @@ -845,135 +1766,509 @@ bool MoveKillsCopyableAddressesObjectChecker::performGlobalDataflow( // Now that we have processed all of our mark_moves, eliminate all of the // destroy_addr. madeChange |= cleanupAllDestroyAddr( - fn, getIndicesOfPairedDestroys(), getIndicesOfPairedReinits(), useState, - visitedByNewMove, blocksWithMovesThatAreNowTakes); + address, fn, getIndicesOfPairedDestroys(), getIndicesOfPairedReinits(), + getIndicesOfPairedConsumingClosureUses(), + blocksVisitedWhenProcessingNewTakes, blocksWithMovesThatAreNowTakes, + postDominatingConsumingUsers); return madeChange; } -bool MoveKillsCopyableAddressesObjectChecker::check() { - if (addressesToCheck.empty()) - return false; +void DataflowState::init() { + // Go through all init uses and if we don't see any other of our uses, then + // mark this as an "init block". + for (auto *init : useState.inits) { + if (upwardScanForInit(init, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found use block at: " << *init); + initBlocks.insert(init->getParent()); + } + } - LLVM_DEBUG(llvm::dbgs() << "Visiting Function: " << fn->getName() << "\n"); - auto addressToProcess = - llvm::makeArrayRef(addressesToCheck.begin(), addressesToCheck.end()); + // Then go through all normal uses and do upwardScanForUseOut. + for (auto *user : useState.livenessUses) { + if (upwardScanForUseOut(user, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found liveness block at: " << *user); + useBlocks[user->getParent()] = user; + } + } - bool madeChange = false; + for (auto destroyOpt : useState.destroys) { + // Any destroys we eliminated when processing single basic blocks will be + // nullptr. Skip them! + if (!destroyOpt) + continue; - while (!addressToProcess.empty()) { - auto address = addressToProcess.front(); - addressToProcess = addressToProcess.drop_front(1); - LLVM_DEBUG(llvm::dbgs() << "Visiting: " << *address); + auto *destroy = *destroyOpt; - auto accessPathWithBase = AccessPathWithBase::compute(address); - auto accessPath = accessPathWithBase.accessPath; + auto iter = useState.destroyToIndexMap.find(destroy); + assert(iter != useState.destroyToIndexMap.end()); - // Bail on an invalid AccessPath. - // - // AccessPath completeness is verified independently--it may be invalid in - // extraordinary situations. When AccessPath is valid, we know all its uses - // are recognizable. - // - // NOTE: If due to an invalid access path we fail here, we will just error - // on the _move since the _move would not have been handled. - if (!accessPath.isValid()) - continue; + if (upwardScanForDestroys(destroy, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found destroy block at: " << *destroy); + destroyBlocks[destroy->getParent()] = destroy; + } + } - SWIFT_DEFER { visitor.clear(); }; - visitor.reset(address); - if (!visitAccessPathUses(visitor, accessPath, fn)) + for (auto reinitOpt : useState.reinits) { + // Any destroys we eliminated when processing single basic blocks will be + // nullptr. Skip them! + if (!reinitOpt) continue; - // See if our base address is an inout. If we found any moves, add as a - // liveness use all function terminators. - if (auto *fArg = dyn_cast(address)) { - if (fArg->hasConvention(SILArgumentConvention::Indirect_Inout)) { - if (visitor.useState.markMoves.size()) { - SmallVector exitingBlocks; - fn->findExitingBlocks(exitingBlocks); - for (auto *block : exitingBlocks) { - visitor.useState.livenessUses.insert(block->getTerminator()); - } - } + auto *reinit = *reinitOpt; + auto iter = useState.reinitToIndexMap.find(reinit); + assert(iter != useState.reinitToIndexMap.end()); + + if (upwardScanForDestroys(reinit, useState)) { + LLVM_DEBUG(llvm::dbgs() << " Found reinit block at: " << *reinit); + reinitBlocks[reinit->getParent()] = reinit; + } + } + + for (auto closureUse : useState.closureUses) { + auto *use = closureUse.first; + auto &state = closureUse.second; + auto *user = use->getUser(); + + switch (state.result) { + case DownwardScanResult::Invalid: + case DownwardScanResult::Destroy: + case DownwardScanResult::Reinit: + case DownwardScanResult::UseForDiagnostic: + case DownwardScanResult::MoveOut: + llvm_unreachable("unhandled"); + case DownwardScanResult::ClosureUse: + if (upwardScanForUseOut(user, useState)) { + LLVM_DEBUG(llvm::dbgs() + << " Found closure liveness block at: " << *user); + closureUseBlocks[user->getParent()] = &state; } + break; + case DownwardScanResult::ClosureConsume: + if (upwardScanForDestroys(user, useState)) { + LLVM_DEBUG(llvm::dbgs() + << " Found closure consuming block at: " << *user); + closureConsumeBlocks[user->getParent()] = use; + } + break; } + } +} - // Now initialize our data structures. - SWIFT_DEFER { - useBlocks.clear(); - initBlocks.clear(); - destroyBlocks.clear(); - reinitBlocks.clear(); - markMovesToDataflow.clear(); - }; - - // Perform the single basic block analysis emitting a diagnostic/pairing - // mark_unresolved_move_addr and destroys if needed. If we discover a - // mark_move that propagates its state out of the current block, this - // routine also prepares the pass for running the multi-basic block - // diagnostic. - if (performSingleBasicBlockAnalysisForAllMarkMoves(address)) { - LLVM_DEBUG(llvm::dbgs() << "Performed single block analysis!\n"); - madeChange = true; - continue; +// Returns true if we emitted a diagnostic and handled the single block +// case. Returns false if we visited all of the uses and seeded the UseState +// struct with the information needed to perform our interprocedural dataflow. +static bool performSingleBasicBlockAnalysis(DataflowState &dataflowState, + SILValue address, + MarkUnresolvedMoveAddrInst *mvi) { + // First scan downwards to make sure we are move out of this block. + auto &useState = dataflowState.useState; + auto &applySiteToPromotedArgIndices = + dataflowState.applySiteToPromotedArgIndices; + auto &closureConsumes = dataflowState.closureConsumes; + + SILInstruction *interestingUser = nullptr; + Operand *interestingUse = nullptr; + TinyPtrVector interestingClosureUsers; + switch (downwardScanForMoveOut(mvi, useState, &interestingUser, + &interestingUse, interestingClosureUsers)) { + case DownwardScanResult::Invalid: + llvm_unreachable("invalid"); + case DownwardScanResult::Destroy: { + assert(!interestingUse); + assert(interestingUser); + + // If we found a destroy, then we found a single block case that we can + // handle. Remove the destroy and convert the mark_unresolved_move_addr + // into a true move. + auto *dvi = cast(interestingUser); + SILBuilderWithScope builder(mvi); + builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), IsTake, + IsInitialization); + useState.destroys.erase(dvi); + mvi->eraseFromParent(); + dvi->eraseFromParent(); + return false; + } + case DownwardScanResult::ClosureUse: { + assert(interestingUse); + assert(interestingUser); + + // Then check if we found a user that violated our dataflow rules. In such + // a case, emit an error, cleanup our mark_unresolved_move_addr, and + // finally continue. + auto &astCtx = mvi->getFunction()->getASTContext(); + { + auto diag = + diag::sil_movekillscopyablevalue_value_consumed_more_than_once; + StringRef name = getDebugVarName(address); + diagnose(astCtx, getSourceLocFromValue(address), diag, name); } - // Go through all init uses and if we don't see any other of our uses, then - // mark this as an "init block". - for (auto *init : useState.inits) { - if (upwardScanForInit(init, useState)) { - LLVM_DEBUG(llvm::dbgs() << " Found use block at: " << *init); - initBlocks.insert(init->getParent()); + auto diag = diag::sil_movekillscopyablevalue_move_here; + diagnose(astCtx, mvi->getLoc().getSourceLoc(), diag); + + { + auto diag = diag::sil_movekillscopyablevalue_use_here; + for (auto *user : interestingClosureUsers) { + diagnose(astCtx, user->getLoc().getSourceLoc(), diag); } } - // Then go through all normal uses and do upwardScanForUseOut. - for (auto *user : useState.livenessUses) { - if (upwardScanForUseOut(user, useState)) { - LLVM_DEBUG(llvm::dbgs() << " Found liveness block at: " << *user); - useBlocks[user->getParent()] = user; - } + // We purposely continue to see if at least in simple cases, we can flag + // mistakes from other moves. Since we are setting emittedDiagnostic to + // true, we will not perform the actual dataflow due to a check after + // the loop. + // + // We also clean up mvi by converting it to a copy_addr init so we do not + // emit fail errors later. + // + // TODO: Can we handle multiple errors in the same block for a single + // move? + SILBuilderWithScope builder(mvi); + builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), + IsNotTake, IsInitialization); + mvi->eraseFromParent(); + return true; + } + case DownwardScanResult::UseForDiagnostic: { + assert(!interestingUse); + assert(interestingUser); + + // Then check if we found a user that violated our dataflow rules. In such + // a case, emit an error, cleanup our mark_unresolved_move_addr, and + // finally continue. + auto &astCtx = mvi->getFunction()->getASTContext(); + { + auto diag = + diag::sil_movekillscopyablevalue_value_consumed_more_than_once; + StringRef name = getDebugVarName(address); + diagnose(astCtx, getSourceLocFromValue(address), diag, name); } - for (auto destroyOpt : useState.destroys) { - // Any destroys we eliminated when processing single basic blocks will be - // nullptr. Skip them! - if (!destroyOpt) - continue; + { + auto diag = diag::sil_movekillscopyablevalue_move_here; + diagnose(astCtx, mvi->getLoc().getSourceLoc(), diag); + } - auto *destroy = *destroyOpt; + { + auto diag = diag::sil_movekillscopyablevalue_use_here; + diagnose(astCtx, interestingUser->getLoc().getSourceLoc(), diag); + } + + // We purposely continue to see if at least in simple cases, we can flag + // mistakes from other moves. Since we are setting emittedDiagnostic to + // true, we will not perform the actual dataflow due to a check after + // the loop. + // + // We also clean up mvi by converting it to a copy_addr init so we do not + // emit fail errors later. + // + // TODO: Can we handle multiple errors in the same block for a single + // move? + SILBuilderWithScope builder(mvi); + builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), + IsNotTake, IsInitialization); + mvi->eraseFromParent(); + return true; + } + case DownwardScanResult::Reinit: { + assert(!interestingUse); + assert(interestingUser); - auto iter = useState.destroyToIndexMap.find(destroy); - assert(iter != useState.destroyToIndexMap.end()); + convertMemoryReinitToInitForm(interestingUser); + useState.reinits.erase(interestingUser); + SILBuilderWithScope builder(mvi); + builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), IsTake, + IsInitialization); + mvi->eraseFromParent(); + return false; + } + case DownwardScanResult::ClosureConsume: { + assert(interestingUse); + assert(interestingUser); - if (upwardScanForDestroys(destroy, useState)) { - LLVM_DEBUG(llvm::dbgs() << " Found destroy block at: " << *destroy); - destroyBlocks[destroy->getParent()] = destroy; - } + // If we found a closure consume, then we found a single block case that we + // can handle. Remove the destroys/reinit, register the specific. + SILBuilderWithScope builder(mvi); + builder.createCopyAddr(mvi->getLoc(), mvi->getSrc(), mvi->getDest(), IsTake, + IsInitialization); + + // This is correct today due to us only supporting defer. When we handle + // partial apply, we will need to do more work ehre. + FullApplySite fas(interestingUser); + assert(fas); + auto &bitVector = applySiteToPromotedArgIndices[fas]; + auto conventions = fas.getSubstCalleeConv(); + unsigned numNonResultArgs = conventions.getNumSILArguments(); + if (bitVector.size() < numNonResultArgs) + bitVector.resize(numNonResultArgs); + bitVector.set(fas.getAppliedArgIndex(*interestingUse)); + for (auto *user : interestingClosureUsers) { + closureConsumes.insert(user); } + LLVM_DEBUG(llvm::dbgs() << "Found apply site to clone: " << **fas); + LLVM_DEBUG(llvm::dbgs() << "BitVector: "; + dumpBitVector(llvm::dbgs(), bitVector); llvm::dbgs() << '\n'); + mvi->eraseFromParent(); + return false; + } + case DownwardScanResult::MoveOut: + assert(!interestingUse); + assert(!interestingUser); + break; + } - for (auto reinitOpt : useState.reinits) { - // Any destroys we eliminated when processing single basic blocks will be - // nullptr. Skip them! - if (!reinitOpt) - continue; + // If we did not found any uses later in the block that was an interesting + // use, we need to perform dataflow. + LLVM_DEBUG(llvm::dbgs() << "Our move is live out, so we need to process " + "it with the dataflow.\n"); + dataflowState.markMovesThatPropagateDownwards.emplace_back(mvi); + + // Now scan up to see if mvi is also a use to seed the dataflow. This could + // happen if we have an earlier move. + if (upwardScanForUseOut(mvi, dataflowState.useState)) { + LLVM_DEBUG(llvm::dbgs() << "MVI projects a use up"); + dataflowState.useBlocks[mvi->getParent()] = mvi; + } + return false; +} + +//===----------------------------------------------------------------------===// +// Address Checker +//===----------------------------------------------------------------------===// + +namespace { + +struct MoveKillsCopyableAddressesChecker { + SILFunction *fn; + UseState useState; + DataflowState dataflowState; + UseState closureUseState; + ClosureArgDataflowState closureUseDataflowState; + SILOptFunctionBuilder &funcBuilder; + llvm::SmallMapVector + applySiteToPromotedArgIndices; + SmallBlotSetVector closureConsumes; + + MoveKillsCopyableAddressesChecker(SILFunction *fn, + SILOptFunctionBuilder &funcBuilder) + : fn(fn), useState(), + dataflowState(funcBuilder, useState, applySiteToPromotedArgIndices, + closureConsumes), + closureUseState(), closureUseDataflowState(closureUseState), + funcBuilder(funcBuilder) {} + + void cloneDeferCalleeAndRewriteUses( + SmallVectorImpl &temporaryStorage, + const SmallBitVector &bitVector, FullApplySite oldApplySite, + SmallBlotSetVector &postDominatingConsumingUsers); + + bool check(SILValue address); + bool performClosureDataflow(Operand *callerOperand, + ClosureOperandState &calleeOperandState); + + void emitDiagnosticForMove(SILValue borrowedValue, + StringRef borrowedValueName, MoveValueInst *mvi); + + ASTContext &getASTContext() const { return fn->getASTContext(); } +}; + +} // namespace + +void MoveKillsCopyableAddressesChecker::cloneDeferCalleeAndRewriteUses( + SmallVectorImpl &newArgs, const SmallBitVector &bitVector, + FullApplySite oldApplySite, + SmallBlotSetVector &postDominatingConsumingUsers) { + auto *origCallee = oldApplySite.getReferencedFunctionOrNull(); + assert(origCallee); + + auto name = getClonedName(origCallee, origCallee->isSerialized(), bitVector); + + SILFunction *newCallee = nullptr; + if (auto *fn = origCallee->getModule().lookUpFunction(name)) { + newCallee = fn; + } else { + ClosureArgumentInOutToOutCloner cloner( + funcBuilder, origCallee, origCallee->isSerialized(), + postDominatingConsumingUsers, bitVector, name); + cloner.populateCloned(); + newCallee = cloner.getCloned(); + } + assert(newCallee); + + // Ok, we now have populated our new callee. We need to create a new full + // apply site that calls the new function appropriately. + SWIFT_DEFER { newArgs.clear(); }; + + // First add all of our old results to newArgs. + auto oldConv = oldApplySite.getSubstCalleeConv(); + for (unsigned i : range(oldConv.getSILArgIndexOfFirstIndirectResult(), + oldConv.getSILArgIndexOfFirstParam())) { + newArgs.push_back(oldApplySite->getOperand(i)); + } + + // Now add all of our new out params. + for (int i = bitVector.find_first(); i != -1; i = bitVector.find_next(i)) { + unsigned appliedArgIndex = + oldApplySite.getOperandIndexOfFirstArgument() + i; + newArgs.push_back(oldApplySite->getOperand(appliedArgIndex)); + } + + // Finally, add all of the rest of our arguments, skipping our new out + // parameters. + for (unsigned i : range(oldConv.getSILArgIndexOfFirstParam(), + oldConv.getNumSILArguments())) { + if (bitVector.test(i)) + continue; + unsigned appliedArgIndex = + oldApplySite.getOperandIndexOfFirstArgument() + i; + newArgs.push_back(oldApplySite->getOperand(appliedArgIndex)); + } + + // Then create our new apply. + SILBuilderWithScope builder(*oldApplySite); + auto *newCalleeRef = + builder.createFunctionRef(oldApplySite->getLoc(), newCallee); + auto *newApply = + builder.createApply(oldApplySite->getLoc(), newCalleeRef, + oldApplySite.getSubstitutionMap(), newArgs); + oldApplySite->replaceAllUsesPairwiseWith(newApply); + oldApplySite->eraseFromParent(); +} + +bool MoveKillsCopyableAddressesChecker::performClosureDataflow( + Operand *callerOperand, ClosureOperandState &calleeOperandState) { + auto fas = FullApplySite::isa(callerOperand->getUser()); + auto *func = fas.getCalleeFunction(); + auto *address = + func->begin()->getArgument(fas.getCalleeArgIndex(*callerOperand)); + + LLVM_DEBUG(llvm::dbgs() << "Performing closure dataflow on caller use: " + << *callerOperand->getUser()); + LLVM_DEBUG(llvm::dbgs() << " Callee: " << func->getName() << '\n'); + LLVM_DEBUG(llvm::dbgs() << " Callee Argument: " << *address); + // We emit an end closure dataflow to make it easier when reading debug output + // to make it easy to see when we have returned to analyzing the caller. + SWIFT_DEFER { + LLVM_DEBUG(llvm::dbgs() + << "Finished performing closure dataflow on Callee: " + << func->getName() << '\n';); + }; + auto accessPathWithBase = AccessPathWithBase::compute(address); + auto accessPath = accessPathWithBase.accessPath; + + // Bail on an invalid AccessPath. + // + // AccessPath completeness is verified independently--it may be invalid in + // extraordinary situations. When AccessPath is valid, we know all its uses + // are recognizable. + // + // NOTE: If due to an invalid access path we fail here, we will just error + // on the _move since the _move would not have been handled. + if (!accessPath.isValid()) + return false; + + // TODO: Hoist this useState into an ivar that we can reuse in between closure + // operands? + GatherClosureUseVisitor visitor(closureUseState); + SWIFT_DEFER { visitor.clear(); }; + visitor.reset(address); + if (!visitAccessPathUses(visitor, accessPath, fn)) + return false; + + SWIFT_DEFER { closureUseDataflowState.clear(); }; + return closureUseDataflowState.process(address, calleeOperandState, + closureConsumes); +} + +bool MoveKillsCopyableAddressesChecker::check(SILValue address) { + auto accessPathWithBase = AccessPathWithBase::compute(address); + auto accessPath = accessPathWithBase.accessPath; + + // Bail on an invalid AccessPath. + // + // AccessPath completeness is verified independently--it may be invalid in + // extraordinary situations. When AccessPath is valid, we know all its uses + // are recognizable. + // + // NOTE: If due to an invalid access path we fail here, we will just error + // on the _move since the _move would not have been handled. + if (!accessPath.isValid()) + return false; - auto *reinit = *reinitOpt; - auto iter = useState.reinitToIndexMap.find(reinit); - assert(iter != useState.reinitToIndexMap.end()); + GatherLexicalLifetimeUseVisitor visitor(useState); + SWIFT_DEFER { visitor.clear(); }; + visitor.reset(address); + if (!visitAccessPathUses(visitor, accessPath, fn)) + return false; - if (upwardScanForDestroys(reinit, useState)) { - LLVM_DEBUG(llvm::dbgs() << " Found reinit block at: " << *reinit); - reinitBlocks[reinit->getParent()] = reinit; + // See if our base address is an inout. If we found any moves, add as a + // liveness use all function terminators. + if (auto *fArg = dyn_cast(address)) { + if (fArg->hasConvention(SILArgumentConvention::Indirect_Inout)) { + if (visitor.useState.markMoves.size()) { + SmallVector exitingBlocks; + fn->findExitingBlocks(exitingBlocks); + for (auto *block : exitingBlocks) { + visitor.useState.livenessUses.insert(block->getTerminator()); + } } } + } + + // Now initialize our data structures. + SWIFT_DEFER { dataflowState.clear(); }; + + // First go through and perform dataflow on each of the closures our address + // depends on. We do not have to worry about other unrelated addresses from + // being passed to the defer in our argument slot since address phis are + // banned in canonical SIL. + // + // This summary will let us treat the whole closure's effect on the closure + // operand as if it was a single instruction. + for (auto &pair : useState.closureUses) { + auto *operand = pair.first; + auto &closureState = pair.second; + + if (!performClosureDataflow(operand, closureState)) { + LLVM_DEBUG(llvm::dbgs() + << "!! Early exit due to failing to analyze closure operand: " + << *operand->getUser()); + return false; + } + } - // Ok, we are setup. Perform the global dataflow! - madeChange |= performGlobalDataflow(address); + // Perform the single basic block analysis emitting a diagnostic/pairing + // mark_unresolved_move_addr and destroys if needed. If we discover a + // mark_move that propagates its state out of the current block, this + // routine also prepares the pass for running the multi-basic block + // diagnostic. + bool emittedSingleBBDiagnostic = false; + for (auto *mvi : useState.markMoves) { + emittedSingleBBDiagnostic |= + performSingleBasicBlockAnalysis(dataflowState, address, mvi); } - return madeChange; + if (emittedSingleBBDiagnostic) { + LLVM_DEBUG(llvm::dbgs() + << "Performed single block analysis and found error!\n"); + return true; + } + + // Then check if we do not need to propagate down any mark moves. In that + // case, since we did not emit an error but we did not have any + if (dataflowState.markMovesThatPropagateDownwards.empty()) { + LLVM_DEBUG(llvm::dbgs() << "Single block analysis handled all cases " + "without finding an error!\n"); + return true; + } + + // Ok, we need to perform global dataflow for one of our moves. Initialize our + // dataflow state engine and then run the dataflow itself. + dataflowState.init(); + bool result = dataflowState.process(address, closureConsumes); + return result; } //===----------------------------------------------------------------------===// @@ -991,18 +2286,16 @@ class MoveKillsCopyableAddressesCheckerPass : public SILFunctionTransform { if (getFunction()->wasDeserializedCanonical()) return; - bool madeChange = false; - assert(fn->getModule().getStage() == SILStage::Raw && "Should only run on Raw SIL"); - MoveKillsCopyableAddressesObjectChecker checker(getFunction()); + SmallSetVector addressesToCheck; for (auto *arg : fn->front().getSILFunctionArguments()) { if (arg->getType().isAddress() && (arg->hasConvention(SILArgumentConvention::Indirect_In) || arg->hasConvention(SILArgumentConvention::Indirect_Inout))) - checker.addressesToCheck.insert(arg); + addressesToCheck.insert(arg); } for (auto &block : *fn) { @@ -1014,19 +2307,49 @@ class MoveKillsCopyableAddressesCheckerPass : public SILFunctionTransform { // Only check lexical alloc_stack that were not emitted as vars. if (asi->isLexical()) { LLVM_DEBUG(llvm::dbgs() << "Found lexical alloc_stack: " << *asi); - checker.addressesToCheck.insert(asi); + addressesToCheck.insert(asi); continue; } } } } - madeChange |= checker.check(); + LLVM_DEBUG(llvm::dbgs() << "Visiting Function: " << fn->getName() << "\n"); + auto addressToProcess = + llvm::makeArrayRef(addressesToCheck.begin(), addressesToCheck.end()); + + SILOptFunctionBuilder funcBuilder(*this); + MoveKillsCopyableAddressesChecker checker(getFunction(), funcBuilder); + bool madeChange = false; + + while (!addressToProcess.empty()) { + auto address = addressToProcess.front(); + addressToProcess = addressToProcess.drop_front(1); + LLVM_DEBUG(llvm::dbgs() << "Visiting: " << *address); + madeChange |= checker.check(address); + } if (madeChange) { invalidateAnalysis(SILAnalysis::InvalidationKind::Instructions); } + // Now go through and clone any apply sites that we need to clone. + SmallVector newArgs; + bool rewroteCallee = false; + for (auto &pair : checker.applySiteToPromotedArgIndices) { + SWIFT_DEFER { newArgs.clear(); }; + auto fas = pair.first; + auto &bitVector = pair.second; + LLVM_DEBUG(llvm::dbgs() << "CLONING APPLYSITE: " << **fas); + LLVM_DEBUG(llvm::dbgs() << "BitVector: "; + dumpBitVector(llvm::dbgs(), bitVector); llvm::dbgs() << '\n'); + checker.cloneDeferCalleeAndRewriteUses(newArgs, bitVector, fas, + checker.closureConsumes); + rewroteCallee = true; + } + if (rewroteCallee) + invalidateAnalysis(SILAnalysis::InvalidationKind::CallsAndInstructions); + // Now search through our function one last time and any move_value // [allows_diagnostics] that remain are ones that we did not know how to // check so emit a diagnostic so the user doesn't assume that they have @@ -1037,6 +2360,8 @@ class MoveKillsCopyableAddressesCheckerPass : public SILFunctionTransform { // TODO: Emit specific diagnostics here (e.x.: _move of global). if (DisableUnhandledMoveDiagnostic) return; + + bool lateMadeChange = false; for (auto &block : *fn) { for (auto ii = block.begin(), ie = block.end(); ii != ie;) { auto *inst = &*ii; @@ -1054,9 +2379,12 @@ class MoveKillsCopyableAddressesCheckerPass : public SILFunctionTransform { builder.createCopyAddr(mai->getLoc(), mai->getSrc(), mai->getDest(), IsNotTake, IsInitialization); mai->eraseFromParent(); + lateMadeChange = true; } } } + if (lateMadeChange) + invalidateAnalysis(SILAnalysis::InvalidationKind::Instructions); } }; diff --git a/lib/SILOptimizer/Mandatory/MoveKillsCopyableValuesChecker.cpp b/lib/SILOptimizer/Mandatory/MoveKillsCopyableValuesChecker.cpp index 757076a2e2476..833784782d0e3 100644 --- a/lib/SILOptimizer/Mandatory/MoveKillsCopyableValuesChecker.cpp +++ b/lib/SILOptimizer/Mandatory/MoveKillsCopyableValuesChecker.cpp @@ -363,17 +363,7 @@ bool MoveKillsCopyableValuesChecker::check() { // Before we do anything, see if we can find a name for our value. We do // this early since we need this for all of our diagnostics below. - StringRef varName = "unknown"; - if (auto *use = getSingleDebugUse(lexicalValue)) { - DebugVarCarryingInst debugVar(use->getUser()); - if (auto varInfo = debugVar.getVarInfo()) { - varName = varInfo->Name; - } else { - if (auto *decl = debugVar.getDecl()) { - varName = decl->getBaseName().userFacingName(); - } - } - } + StringRef varName = getDebugVarName(lexicalValue); // Then compute liveness. SWIFT_DEFER { livenessInfo.clear(); }; @@ -419,6 +409,18 @@ bool MoveKillsCopyableValuesChecker::check() { return false; } +//===----------------------------------------------------------------------===// +// Unsupported Use Case Errors +//===----------------------------------------------------------------------===// + +static void emitUnsupportedUseCaseError(MoveValueInst *mvi) { + auto &astContext = mvi->getModule().getASTContext(); + auto diag = diag:: + sil_movekillscopyablevalue_move_applied_to_unsupported_move; + diagnose(astContext, mvi->getLoc().getSourceLoc(), diag); + mvi->setAllowsDiagnostics(false); +} + //===----------------------------------------------------------------------===// // Top Level Entrypoint //===----------------------------------------------------------------------===// @@ -455,10 +457,7 @@ class MoveKillsCopyableValuesCheckerPass : public SILFunctionTransform { for (auto &inst : block) { if (auto *mvi = dyn_cast(&inst)) { if (mvi->getAllowDiagnostics()) { - auto diag = diag:: - sil_movekillscopyablevalue_move_applied_to_unsupported_move; - diagnose(astContext, mvi->getLoc().getSourceLoc(), diag); - mvi->setAllowsDiagnostics(false); + emitUnsupportedUseCaseError(mvi); } } } diff --git a/lib/SILOptimizer/Utils/SpecializationMangler.cpp b/lib/SILOptimizer/Utils/SpecializationMangler.cpp index 0517e2299107f..dcbe58cd97504 100644 --- a/lib/SILOptimizer/Utils/SpecializationMangler.cpp +++ b/lib/SILOptimizer/Utils/SpecializationMangler.cpp @@ -109,6 +109,12 @@ void FunctionSignatureSpecializationMangler::setArgumentBoxToStack( ArgumentModifierIntBase(ArgumentModifier::BoxToStack); } +void FunctionSignatureSpecializationMangler::setArgumentInOutToOut( + unsigned OrigArgIdx) { + OrigArgs[OrigArgIdx].first = + ArgumentModifierIntBase(ArgumentModifier::InOutToOut); +} + void FunctionSignatureSpecializationMangler:: setReturnValueOwnedToUnowned() { @@ -224,6 +230,11 @@ void FunctionSignatureSpecializationMangler::mangleArgument( return; } + if (ArgMod == ArgumentModifierIntBase(ArgumentModifier::InOutToOut)) { + ArgOpBuffer << 'r'; + return; + } + bool hasSomeMod = false; if (ArgMod & ArgumentModifierIntBase(ArgumentModifier::ExistentialToGeneric)) { ArgOpBuffer << 'e'; diff --git a/test/SILOptimizer/move_function_kills_copyable_addressonly_lets.swift b/test/SILOptimizer/move_function_kills_copyable_addressonly_lets.swift index 355d44e8a0b25..9e689cf6ff57d 100644 --- a/test/SILOptimizer/move_function_kills_copyable_addressonly_lets.swift +++ b/test/SILOptimizer/move_function_kills_copyable_addressonly_lets.swift @@ -4,12 +4,16 @@ import Swift -public class Klass {} - ////////////////// // Declarations // ////////////////// +public class Klass {} + +public protocol P {} +public protocol SubP1 : P {} +public protocol SubP2 : P {} + func consumingUse(_ k: __owned T) {} var booleanValue: Bool { false } func nonConsumingUse(_ k: T) {} @@ -284,3 +288,117 @@ public func multipleVarsWithSubsequentBorrows(_ p: T) -> Bool { let k3 = _move(k) return k2 == k3 } + +//////////////// +// Cast Tests // +//////////////// + +public func castTest0(_ x: __owned T) -> P { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as P // expected-note {{use here}} +} + +public func castTest1(_ x: __owned T) -> SubP2 { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as! SubP2 // expected-note {{use here}} +} + +public func castTest2(_ x: __owned T) -> SubP1? { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as? SubP1 // expected-note {{use here}} +} + +public func castTestSwitch1(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + switch x { // expected-note {{use here}} + case let k as SubP1: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitch2(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + switch x { // expected-note {{use here}} + case let k as SubP1: + print(k) + case let k as SubP2: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitchInLoop(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + + for _ in 0..<1024 { + switch x { // expected-note {{use here}} + case let k as SubP1: + print(k) + default: + print("Nope") + } + } +} + +public func castTestIfLet(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + if case let k as SubP1 = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +public func castTestIfLetInLoop(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + for _ in 0..<1024 { + if case let k as SubP1 = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } + } +} + +public enum EnumWithKlass { + case none + case klass(P) +} + +public func castTestIfLet2(_ x : __owned EnumWithKlass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + if case let .klass(k as SubP1) = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +///////////////////////// +// Partial Apply Tests // +///////////////////////// + +// Emit a better error here. At least we properly error. +public func partialApplyTest(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + let f = { // expected-note {{use here}} + nonConsumingUse(x) + } + f() +} + +///////////////// +// Defer Tests // +///////////////// + +// TODO: Emit an error in the defer. +public func deferTest(_ x: __owned T) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + defer { // expected-note {{use here}} + nonConsumingUse(x) + } + print("do Something") +} diff --git a/test/SILOptimizer/move_function_kills_copyable_addressonly_vars.swift b/test/SILOptimizer/move_function_kills_copyable_addressonly_vars.swift index 8a138c7f21942..4b311ec14ed51 100644 --- a/test/SILOptimizer/move_function_kills_copyable_addressonly_vars.swift +++ b/test/SILOptimizer/move_function_kills_copyable_addressonly_vars.swift @@ -4,17 +4,34 @@ import Swift -public class Klass {} - ////////////////// // Declarations // ////////////////// +public class Klass { + public func getOtherKlass() -> Klass? { return nil } +} + +struct KlassWrapper { + var k: Klass +} + func consumingUse(_ k: __owned T) {} var booleanValue: Bool { false } func nonConsumingUse(_ k: T) {} func exchangeUse(_ k: __owned T) -> T { k } +public protocol P { + var k: Klass { get } + + static func getP() -> Self + + func doSomething() +} + +public protocol SubP1 : P {} +public protocol SubP2 : P {} + /////////// // Tests // /////////// @@ -197,3 +214,479 @@ struct S { } } // expected-note {{use here}} } + +///////////////// +// Defer Tests // +///////////////// + +protocol DeferTestProtocol : P { +} + +extension DeferTestProtocol { + mutating func deferTestSuccess1() { + let selfType = type(of: self) + let _ = _move(self) + defer { + self = selfType.getP() + } + print("123") + } + + // Make sure we can init/reinit self multiple times without error. + mutating func deferTestSuccess2() { + let selfType = type(of: self) + let _ = _move(self) + self = selfType.getP() + let _ = _move(self) + defer { + self = selfType.getP() + } + print("123") + } + + mutating func deferTestSuccess3() { + let selfType = type(of: self) + let _ = _move(self) + defer { + self = selfType.getP() + } + defer { + self = selfType.getP() + } + print("123") + } + + // We do not support moving within a defer right now. + mutating func deferTestFail1() { + let selfType = type(of: self) + let _ = _move(self) + defer { + self = selfType.getP() + let _ = _move(self) // expected-error {{_move applied to value that the compiler does not support checking}} + } + print("123") + } + + // We do not support moving within a defer right now. + mutating func deferTestFail2() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + defer { + nonConsumingUse(k) // expected-note {{use here}} + self = selfType.getP() + } + print("123") + } + + + mutating func deferTestFail3() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + nonConsumingUse(k) // expected-note {{use here}} + defer { + nonConsumingUse(k) + self = selfType.getP() + } + print("123") + } + + mutating func deferTestFail4() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + defer { + consumingUse(k) // expected-note {{use here}} + self = selfType.getP() + } + print("123") + } + + // TODO: We should definitely be erroring on consuming use I think. + mutating func deferTestFail5() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + for _ in 0..<1024 { + defer { + consumingUse(k) + self = selfType.getP() + } + print("foo bar") + } + print("123") + } // expected-note {{use here}} + + // TODO: We should be erroring on nonConsumingUse rather than the end of + // scope use. + // + mutating func deferTestFail6() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + for _ in 0..<1024 { + defer { + nonConsumingUse(k) + self = selfType.getP() + } + print("foo bar") + } + print("123") + } // expected-note {{use here}} + + mutating func deferTestFail7() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + for _ in 0..<1024 { + let _ = _move(self) // expected-note {{move here}} + defer { + nonConsumingUse(k) // expected-note {{use here}} + self = selfType.getP() + } + print("foo bar") + } + print("123") + } + + mutating func deferTestFail8() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + defer { + if booleanValue { + nonConsumingUse(k) // expected-note {{use here}} + } + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestFail9() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + defer { + if booleanValue { + nonConsumingUse(k) // expected-note {{use here}} + } else { + nonConsumingUse(k) + } + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestFail10() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestFail11() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + let _ = _move(self) // expected-note {{move here}} + if booleanValue { + print("creating blocks") + } else { + print("creating blocks2") + } + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestFail12() { // expected-error {{'self' used after being moved}} + let selfType = type(of: self) + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) // expected-note {{move here}} + print("creating blocks2") + } + + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestSuccess13() { + let selfType = type(of: self) + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) + print("creating blocks2") + } + + defer { + self = selfType.getP() + } + print("foo bar") + } + + mutating func deferTestSuccess14() { + let selfType = type(of: self) + if booleanValue { + print("creating blocks") + self.doSomething() + } else { + let _ = _move(self) + print("creating blocks2") + } + + defer { + self = selfType.getP() + } + print("foo bar") + } +} + +//////////////// +// Cast Tests // +//////////////// + +public func castTest0(_ x: __owned T) -> P { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as P // expected-note {{use here}} +} + +public func castTest1(_ x: __owned T) -> SubP1 { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as! SubP1 // expected-note {{use here}} +} + +public func castTest2(_ x: __owned T) -> SubP1? { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as? SubP1 // expected-note {{use here}} +} + +public func castTestSwitch1(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + switch x2 { // expected-note {{use here}} + case let k as SubP1: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitch2(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + switch x2 { // expected-note {{use here}} + case let k as SubP1: + print(k) + case let k as SubP2: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitchInLoop(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + + for _ in 0..<1024 { + switch x2 { // expected-note {{use here}} + case let k as SubP1: + print(k) + default: + print("Nope") + } + } +} + +public func castTestIfLet(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + if case let k as SubP1 = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +public func castTestIfLetInLoop(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + for _ in 0..<1024 { + if case let k as SubP1 = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } + } +} + +public enum EnumWithP { + case none + case klass(T) +} + +public func castTestIfLet2(_ x : __owned EnumWithP) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + if case let .klass(k as SubP1) = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +/////////////// +// GEP Tests // +/////////////// + +public func castAccess(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + let _ = x2.k // expected-note {{use here}} +} + +public func castAccess2(_ x : __owned T) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + let _ = x2.k.getOtherKlass() // expected-note {{use here}} +} + +///////////////////////// +// Partial Apply Tests // +///////////////////////// + +// This makes sure we always fail if we are asked to check in a partial apply. +public func partialApplyTest(_ x: __owned T) { + var x2 = x + x2 = x + let _ = _move(x2) // expected-error {{_move applied to value that the compiler does not support checking}} + let f = { + print(x2) + } + f() +} + +//////////////////////// +// Misc Tests on Self // +//////////////////////// + +protocol MiscTests : P {} + +extension MiscTests { + + // This test makes sure that we are able to properly put in the destroy_addr + // in the "creating blocks" branch. There used to be a bug where the impl + // would need at least one destroy_addr to properly infer the value to put + // into blocks not reachable from the _move but that are on the dominance + // frontier from the _move. This was unnecessary and the test makes sure we + // do not fail on this again. + mutating func noDestroyAddrBeforeOptInsertAfter() { + let selfType = type(of: self) + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) + print("creating blocks2") + } + + self = selfType.getP() + print("foo bar") + } + + // A derived version of noDestroyAddrBeforeOptInsertAfter that makes sure + // when we insert the destroy_addr, we destroy self at the end of the block. + mutating func noDestroyAddrBeforeOptInsertAfter2() { + let selfType = type(of: self) + if booleanValue { + print("creating blocks") + self.doSomething() + } else { + let _ = _move(self) + print("creating blocks2") + } + + self = selfType.getP() + print("foo bar") + } +} + +////////////////////////////////// +// Multiple Captures from Defer // +////////////////////////////////// + +func multipleCapture1(_ k: T) -> () { + let kType = type(of: k) + var k2 = k + var k3 = k + let _ = _move(k2) + let _ = _move(k3) + var k4 = k + k4 = k + defer { + k2 = kType.getP() + print(k4) + k3 = kType.getP() + } + print("foo bar") +} + +func multipleCapture2(_ k: T) -> () { + let kType = type(of: k) + var k2 = k // expected-error {{'k2' used after being moved}} + k2 = k + var k3 = k + let _ = _move(k2) // expected-note {{move here}} + let _ = _move(k3) + var k4 = k + k4 = k + defer { + print(k2) // expected-note {{use here}} + print(k4) + k3 = kType.getP() + } + print("foo bar") +} + +////////////////////// +// Reinit in pieces // +////////////////////// + +// These tests exercise the diagnostic to see how we error if we re-initialize a +// var in pieces. Eventually we should teach either this diagnostic pass how to +// handle this or teach DI how to combine the initializations into one large +// reinit. +struct ProtPair { + var lhs: T + var rhs: T +} + +func reinitInPieces1(_ k: ProtPair) { + let selfType = type(of: k.lhs) + var k2 = k + k2 = k + + let _ = _move(k2) // expected-error {{_move applied to value that the compiler does not support checking}} + k2.lhs = selfType.getP() + k2.rhs = selfType.getP() +} diff --git a/test/SILOptimizer/move_function_kills_copyable_loadable_vars.swift b/test/SILOptimizer/move_function_kills_copyable_loadable_vars.swift index fa23804951462..942037b8f66ba 100644 --- a/test/SILOptimizer/move_function_kills_copyable_loadable_vars.swift +++ b/test/SILOptimizer/move_function_kills_copyable_loadable_vars.swift @@ -4,12 +4,22 @@ import Swift -public class Klass {} - ////////////////// // Declarations // ////////////////// +public class Klass { + var k: Klass? = nil + + func getOtherKlass() -> Klass? { nil } +} +public class SubKlass1 : Klass {} +public class SubKlass2 : Klass {} + +struct KlassWrapper { + var k: Klass +} + func consumingUse(_ k: __owned Klass) {} var booleanValue: Bool { false } var booleanValue2: Bool { false } @@ -17,7 +27,7 @@ func nonConsumingUse(_ k: Klass) {} func exchangeUse(_ k: Klass) -> Klass { k } /////////// -// Klassests // +// Tests // /////////// public func performMoveOnVarSingleBlock(_ p: Klass) { @@ -270,3 +280,454 @@ struct S { } } // expected-note {{use here}} } + +///////////////// +// Defer Tests // +///////////////// + +extension KlassWrapper { + mutating func deferTestSuccess1() { + let _ = _move(self) + defer { + self = KlassWrapper(k: Klass()) + } + print("123") + } + + // Make sure we can init/reinit self multiple times without error. + mutating func deferTestSuccess2() { + let _ = _move(self) + self = KlassWrapper(k: Klass()) + let _ = _move(self) + defer { + self = KlassWrapper(k: Klass()) + } + print("123") + } + + mutating func deferTestSuccess3() { + let _ = _move(self) + defer { + self = KlassWrapper(k: Klass()) + } + defer { + self = KlassWrapper(k: Klass()) + } + print("123") + } + + // We do not support moving within a defer right now. + mutating func deferTestFail1() { + let _ = _move(self) + defer { + self = KlassWrapper(k: Klass()) + let _ = _move(self) // expected-error {{_move applied to value that the compiler does not support checking}} + } + print("123") + } + + // We do not support moving within a defer right now. + mutating func deferTestFail2() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + defer { + nonConsumingUse(k) // expected-note {{use here}} + self = KlassWrapper(k: Klass()) + } + print("123") + } + + + mutating func deferTestFail3() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + nonConsumingUse(k) // expected-note {{use here}} + defer { + nonConsumingUse(k) + self = KlassWrapper(k: Klass()) + } + print("123") + } + + mutating func deferTestFail4() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + defer { + consumingUse(k) // expected-note {{use here}} + self = KlassWrapper(k: Klass()) + } + print("123") + } + + // TODO: We should definitely be erroring on consuming use I think. + mutating func deferTestFail5() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + for _ in 0..<1024 { + defer { + consumingUse(k) + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + print("123") + } // expected-note {{use here}} + + // TODO: We should be erroring on nonConsumingUse rather than the end of + // scope use. + // + mutating func deferTestFail6() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + for _ in 0..<1024 { + defer { + nonConsumingUse(k) + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + print("123") + } // expected-note {{use here}} + + mutating func deferTestFail7() { // expected-error {{'self' used after being moved}} + for _ in 0..<1024 { + let _ = _move(self) // expected-note {{move here}} + defer { + nonConsumingUse(k) // expected-note {{use here}} + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + print("123") + } + + mutating func deferTestFail8() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + defer { + if booleanValue { + nonConsumingUse(k) // expected-note {{use here}} + } + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestFail9() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + defer { + if booleanValue { + nonConsumingUse(k) // expected-note {{use here}} + } else { + nonConsumingUse(k) + } + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestFail10() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestFail11() { // expected-error {{'self' used after being moved}} + let _ = _move(self) // expected-note {{move here}} + if booleanValue { + print("creating blocks") + } else { + print("creating blocks2") + } + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestFail12() { // expected-error {{'self' used after being moved}} + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) // expected-note {{move here}} + print("creating blocks2") + } + + defer { + for _ in 0..<1024 { + nonConsumingUse(k) // expected-note {{use here}} + } + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestSuccess13() { + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) + print("creating blocks2") + } + + defer { + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } + + mutating func deferTestSuccess14() { + if booleanValue { + print("creating blocks") + self.doSomething() + } else { + let _ = _move(self) + print("creating blocks2") + } + + defer { + self = KlassWrapper(k: Klass()) + } + print("foo bar") + } +} + +//////////////// +// Cast Tests // +//////////////// + +public func castTest0(_ x: __owned SubKlass1) -> Klass { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as Klass // expected-note {{use here}} +} + +public func castTest1(_ x: __owned Klass) -> SubKlass1 { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as! SubKlass1 // expected-note {{use here}} +} + +public func castTest2(_ x: __owned Klass) -> SubKlass1? { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + return x2 as? SubKlass1 // expected-note {{use here}} +} + +public func castTestSwitch1(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + switch x2 { // expected-note {{use here}} + case let k as SubKlass1: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitch2(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + switch x2 { // expected-note {{use here}} + case let k as SubKlass1: + print(k) + case let k as SubKlass2: + print(k) + default: + print("Nope") + } +} + +public func castTestSwitchInLoop(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + + for _ in 0..<1024 { + switch x2 { // expected-note {{use here}} + case let k as SubKlass1: + print(k) + default: + print("Nope") + } + } +} + +public func castTestIfLet(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + if case let k as SubKlass1 = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +public func castTestIfLetInLoop(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + for _ in 0..<1024 { + if case let k as SubKlass1 = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } + } +} + +public enum EnumWithKlass { + case none + case klass(Klass) +} + +public func castTestIfLet2(_ x : __owned EnumWithKlass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + if case let .klass(k as SubKlass1) = x2 { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +/////////////// +// GEP Tests // +/////////////// + +public func castAccess(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + let _ = x2.k // expected-note {{use here}} +} + +public func castAccess2(_ x : __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + let _ = x2.k!.getOtherKlass() // expected-note {{use here}} +} + +///////////////////////// +// Partial Apply Tests // +///////////////////////// + +// Emit a better error here. At least we properly error. +public func partialApplyTest(_ x: __owned Klass) { + var x2 = x // expected-error {{'x2' used after being moved}} + x2 = x + let _ = _move(x2) // expected-note {{move here}} + let f = { // expected-note {{use here}} + print(x2) + } + f() +} + +//////////////////////// +// Misc Tests on Self // +//////////////////////// + +extension KlassWrapper { + + func doSomething() { print("foo") } + + // This test makes sure that we are able to properly put in the destroy_addr + // in the "creating blocks" branch. There used to be a bug where the impl + // would need at least one destroy_addr to properly infer the value to put + // into blocks not reachable from the _move but that are on the dominance + // frontier from the _move. This was unnecessary and the test makes sure we + // do not fail on this again. + mutating func noDestroyAddrBeforeOptInsertAfter() { + if booleanValue { + print("creating blocks") + } else { + let _ = _move(self) + print("creating blocks2") + } + + self = .init(k: Klass()) + print("foo bar") + } + + // A derived version of noDestroyAddrBeforeOptInsertAfter that makes sure + // when we insert the destroy_addr, we destroy self at the end of the block. + mutating func noDestroyAddrBeforeOptInsertAfter2() { + if booleanValue { + print("creating blocks") + self.doSomething() + } else { + let _ = _move(self) + print("creating blocks2") + } + + self = .init(k: Klass()) + print("foo bar") + } +} + +////////////////////////////////// +// Multiple Captures from Defer // +////////////////////////////////// + +func multipleCapture1(_ k: Klass) -> () { + var k2 = k + var k3 = k + let _ = _move(k2) + let _ = _move(k3) + var k4 = k + k4 = k + defer { + k2 = Klass() + print(k4) + k3 = Klass() + } + print("foo bar") +} + +func multipleCapture2(_ k: Klass) -> () { + var k2 = k // expected-error {{'k2' used after being moved}} + k2 = k + var k3 = k + let _ = _move(k2) // expected-note {{move here}} + let _ = _move(k3) + var k4 = k + k4 = k + defer { + print(k2) // expected-note {{use here}} + print(k4) + k3 = Klass() + } + print("foo bar") +} + +////////////////////// +// Reinit in pieces // +////////////////////// + +// These tests exercise the diagnostic to see how we error if we re-initialize a +// var in pieces. Eventually we should teach either this diagnostic pass how to +// handle this or teach DI how to combine the initializations into one large +// reinit. +struct KlassPair { + var lhs: Klass + var rhs: Klass +} + +func reinitInPieces1(_ k: KlassPair) { + var k2 = k + k2 = k + + let _ = _move(k2) // expected-error {{_move applied to value that the compiler does not support checking}} + k2.lhs = Klass() + k2.rhs = Klass() +} diff --git a/test/SILOptimizer/move_function_kills_copyable_values.swift b/test/SILOptimizer/move_function_kills_copyable_values.swift index 770973fc4b240..0a99d9dd063ec 100644 --- a/test/SILOptimizer/move_function_kills_copyable_values.swift +++ b/test/SILOptimizer/move_function_kills_copyable_values.swift @@ -5,6 +5,8 @@ import Swift public class Klass {} +public class SubKlass1 : Klass {} +public class SubKlass2 : Klass {} ////////////////// // Declarations // @@ -136,6 +138,13 @@ public func errorLoopMultipleMove(_ x: __owned Klass) -> () { // expected-error } } +public func errorLoopMultipleMove1(_ x: __owned Klass) -> () { // expected-error {{'x' used after being moved}} + for _ in 0..<1024 { + let _ = _move(x) // expected-note {{move here}} + // expected-note @-1 {{cyclic move here. move will occur multiple times in the loop}} + } +} + public func errorLoopMoveOfParameter(_ x: __owned Klass) -> () { // expected-error {{'x' used after being moved}} let _ = _move(x) // expected-note {{move here}} for _ in 0..<1024 { @@ -253,3 +262,120 @@ public func multipleVarsWithSubsequentBorrows() -> Bool { let k3 = _move(k) return k2 === k3 } + +//////////////// +// Cast Tests // +//////////////// + +public func castTest0(_ x: __owned SubKlass1) -> Klass { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as Klass // expected-note {{use here}} +} + +public func castTest1(_ x: __owned Klass) -> SubKlass1 { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as! SubKlass1 // expected-note {{use here}} +} + +public func castTest2(_ x: __owned Klass) -> SubKlass1? { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + return x as? SubKlass1 // expected-note {{use here}} +} + +public func castTestSwitch1(_ x : __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + switch x { + case let k as SubKlass1: // expected-note {{use here}} + print(k) + default: + print("Nope") + } +} + +public func castTestSwitch2(_ x : __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + switch x { + case let k as SubKlass1: + print(k) + case let k as SubKlass2: // expected-note {{use here}} + print(k) + default: + print("Nope") + } +} + +public func castTestSwitchInLoop(_ x : __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + + for _ in 0..<1024 { + switch x { + case let k as SubKlass1: // expected-note {{use here}} + print(k) + default: + print("Nope") + } + } +} + +public func castTestIfLet(_ x : __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + if case let k as SubKlass1 = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +public func castTestIfLetInLoop(_ x : __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + for _ in 0..<1024 { + if case let k as SubKlass1 = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } + } +} + +public enum EnumWithKlass { + case none + case klass(Klass) +} + +public func castTestIfLet2(_ x : __owned EnumWithKlass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + if case let .klass(k as SubKlass1) = x { // expected-note {{use here}} + print(k) + } else { + print("no") + } +} + +///////////////////////// +// Partial Apply Tests // +///////////////////////// + +// Emit a better error here. At least we properly error. +public func partialApplyTest(_ x: __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + let f = { // expected-note {{use here}} + print(x) + } + f() +} + +///////////////// +// Defer Tests // +///////////////// + +// TODO: Improve this error msg. +// +// NOTE: This will require adding knowledge about captured defer arguments for +// values. This at least prevents the error from happening. +public func deferTest(_ x: __owned Klass) { // expected-error {{'x' used after being moved}} + let _ = _move(x) // expected-note {{move here}} + defer { // expected-note {{use here}} + nonConsumingUse(x) + } + print("do Something") +}