diff --git a/include/swift/SIL/ApplySite.h b/include/swift/SIL/ApplySite.h index 6f8b911f46369..de97fef3e9bf0 100644 --- a/include/swift/SIL/ApplySite.h +++ b/include/swift/SIL/ApplySite.h @@ -21,9 +21,11 @@ #ifndef SWIFT_SIL_APPLYSITE_H #define SWIFT_SIL_APPLYSITE_H +#include "swift/SIL/SILArgument.h" #include "swift/SIL/SILBasicBlock.h" -#include "swift/SIL/SILInstruction.h" #include "swift/SIL/SILFunction.h" +#include "swift/SIL/SILInstruction.h" +#include "llvm/ADT/ArrayRef.h" namespace swift { @@ -502,6 +504,34 @@ class FullApplySite : public ApplySite { return getSubstCalleeConv().hasIndirectSILResults(); } + /// If our apply site has a single direct result SILValue, return that + /// SILValue. Return SILValue() otherwise. + /// + /// This means that: + /// + /// 1. If we have an ApplyInst, we just visit the apply. + /// 2. If we have a TryApplyInst, we visit the first argument of the normal + /// block. + /// 3. If we have a BeginApplyInst, we return SILValue() since the begin_apply + /// yields values instead of returning them. A returned value should only + /// be valid after a full apply site has completely finished executing. + SILValue getSingleDirectResult() const { + switch (getKind()) { + case FullApplySiteKind::ApplyInst: + return SILValue(cast(getInstruction())); + case FullApplySiteKind::BeginApplyInst: { + return SILValue(); + } + case FullApplySiteKind::TryApplyInst: { + auto *normalBlock = cast(getInstruction())->getNormalBB(); + assert(normalBlock->getNumArguments() == 1 && + "Expected try apply to have a single result"); + return normalBlock->getArgument(0); + } + } + llvm_unreachable("Covered switch isn't covered?!"); + } + unsigned getNumIndirectSILResults() const { return getSubstCalleeConv().getNumIndirectSILResults(); } diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index a2b220d06b8b5..10422ec523278 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -726,7 +726,8 @@ class SILBuilder { } LoadBorrowInst *createLoadBorrow(SILLocation Loc, SILValue LV) { - assert(isLoadableOrOpaque(LV->getType())); + assert(isLoadableOrOpaque(LV->getType()) && + !LV->getType().isTrivial(getFunction())); return insert(new (getModule()) LoadBorrowInst(getSILDebugLocation(Loc), LV)); } @@ -737,11 +738,19 @@ class SILBuilder { BeginBorrowInst(getSILDebugLocation(Loc), LV)); } + /// Convenience function for creating a load_borrow on non-trivial values and + /// load [trivial] on trivial values. Becomes load unqualified in non-ossa + /// functions. SILValue emitLoadBorrowOperation(SILLocation loc, SILValue v) { if (!hasOwnership()) { return emitLoadValueOperation(loc, v, LoadOwnershipQualifier::Unqualified); } + + if (v->getType().isTrivial(getFunction())) { + return emitLoadValueOperation(loc, v, LoadOwnershipQualifier::Trivial); + } + return createLoadBorrow(loc, v); } @@ -877,6 +886,33 @@ class SILBuilder { StoreBorrowInst(getSILDebugLocation(Loc), Src, DestAddr)); } + /// A helper function for emitting store_borrow in operations where one must + /// handle both ossa and non-ossa code. + /// + /// In words: + /// + /// * If the function does not have ownership, this just emits an unqualified + /// store. + /// + /// * If the function has ownership, but the type is trivial, use store + /// [trivial]. + /// + /// * Otherwise, emit an actual store_borrow. + void emitStoreBorrowOperation(SILLocation loc, SILValue src, + SILValue destAddr) { + if (!hasOwnership()) { + return emitStoreValueOperation(loc, src, destAddr, + StoreOwnershipQualifier::Unqualified); + } + + if (src->getType().isTrivial(getFunction())) { + return emitStoreValueOperation(loc, src, destAddr, + StoreOwnershipQualifier::Trivial); + } + + createStoreBorrow(loc, src, destAddr); + } + MarkUninitializedInst * createMarkUninitialized(SILLocation Loc, SILValue src, MarkUninitializedInst::Kind k) { diff --git a/lib/SILOptimizer/Transforms/GenericSpecializer.cpp b/lib/SILOptimizer/Transforms/GenericSpecializer.cpp index 60bb6377fe69c..cd54f6a7e5efb 100644 --- a/lib/SILOptimizer/Transforms/GenericSpecializer.cpp +++ b/lib/SILOptimizer/Transforms/GenericSpecializer.cpp @@ -28,6 +28,10 @@ using namespace swift; +// For testing during bring up. +static llvm::cl::opt EnableGenericSpecializerWithOwnership( + "sil-generic-specializer-enable-ownership", llvm::cl::init(false)); + namespace { class GenericSpecializer : public SILFunctionTransform { @@ -39,7 +43,7 @@ class GenericSpecializer : public SILFunctionTransform { SILFunction &F = *getFunction(); // TODO: We should be able to handle ownership. - if (F.hasOwnership()) + if (F.hasOwnership() && !EnableGenericSpecializerWithOwnership) return; LLVM_DEBUG(llvm::dbgs() << "***** GenericSpecializer on function:" diff --git a/lib/SILOptimizer/Utils/GenericCloner.cpp b/lib/SILOptimizer/Utils/GenericCloner.cpp index 438a47ca78e81..e027cdfb3927f 100644 --- a/lib/SILOptimizer/Utils/GenericCloner.cpp +++ b/lib/SILOptimizer/Utils/GenericCloner.cpp @@ -79,8 +79,9 @@ void GenericCloner::populateCloned() { auto createAllocStack = [&]() { // We need an alloc_stack as a replacement for the indirect parameter. - assert(mappedType.isAddress()); - mappedType = mappedType.getObjectType(); + if (mappedType.isAddress()) { + mappedType = mappedType.getObjectType(); + } auto AllocStackLoc = RegularLocation::getAutoGeneratedLocation(); ASI = getBuilder().createAllocStack(AllocStackLoc, mappedType); AllocStacks.push_back(ASI); @@ -106,24 +107,36 @@ void GenericCloner::populateCloned() { // Handle arguments for formal parameters. unsigned paramIdx = ArgIdx - origConv.getSILArgIndexOfFirstParam(); if (ReInfo.isParamConverted(paramIdx)) { - // Store the new direct parameter to the alloc_stack. - createAllocStack(); + assert(mappedType.isAddress()); + mappedType = mappedType.getObjectType(); auto *NewArg = ClonedEntryBB->createFunctionArgument( mappedType, OrigArg->getDecl()); - getBuilder().createStore(Loc, NewArg, ASI, - StoreOwnershipQualifier::Unqualified); - // Try to create a new debug_value from an existing debug_value_addr. + // Try to create a new debug_value from an existing debug_value_addr + // for the argument. We do this before storing to ensure that when we + // are cloning code in ossa the argument has not been consumed by the + // store below. for (Operand *ArgUse : OrigArg->getUses()) { if (auto *DVAI = dyn_cast(ArgUse->getUser())) { + auto *oldScope = getBuilder().getCurrentDebugScope(); getBuilder().setCurrentDebugScope( remapScope(DVAI->getDebugScope())); getBuilder().createDebugValue(DVAI->getLoc(), NewArg, *DVAI->getVarInfo()); - getBuilder().setCurrentDebugScope(nullptr); + getBuilder().setCurrentDebugScope(oldScope); break; } } + + // Store the new direct parameter to an alloc_stack. + createAllocStack(); + if (!NewArg->getArgumentConvention().isGuaranteedConvention()) { + getBuilder().emitStoreValueOperation(Loc, NewArg, ASI, + StoreOwnershipQualifier::Init); + } else { + getBuilder().emitStoreBorrowOperation(Loc, NewArg, ASI); + } + entryArgs.push_back(ASI); return true; } @@ -150,9 +163,9 @@ void GenericCloner::visitTerminator(SILBasicBlock *BB) { if (ReturnValueAddr) { // The result is converted from indirect to direct. We have to load the // returned value from the alloc_stack. - ReturnValue = - getBuilder().createLoad(ReturnValueAddr->getLoc(), ReturnValueAddr, - LoadOwnershipQualifier::Unqualified); + ReturnValue = getBuilder().emitLoadValueOperation( + ReturnValueAddr->getLoc(), ReturnValueAddr, + LoadOwnershipQualifier::Take); } for (AllocStackInst *ASI : reverse(AllocStacks)) { getBuilder().createDeallocStack(ASI->getLoc(), ASI); diff --git a/lib/SILOptimizer/Utils/Generics.cpp b/lib/SILOptimizer/Utils/Generics.cpp index fab8829dc56c0..1cfe889d04301 100644 --- a/lib/SILOptimizer/Utils/Generics.cpp +++ b/lib/SILOptimizer/Utils/Generics.cpp @@ -1870,7 +1870,6 @@ SILFunction *GenericFuncSpecializer::tryCreateSpecialization() { SpecializedF->getGenericEnvironment()) || (!SpecializedF->getLoweredFunctionType()->isPolymorphic() && !SpecializedF->getGenericEnvironment())); - assert(!SpecializedF->hasOwnership()); // Store the meta-information about how this specialization was created. auto *Caller = ReInfo.getApply() ? ReInfo.getApply().getFunction() : nullptr; SubstitutionMap Subs = Caller ? ReInfo.getApply().getSubstitutionMap() @@ -1907,10 +1906,18 @@ static void fixUsedVoidType(SILValue VoidVal, SILLocation Loc, } /// Prepare call arguments. Perform re-abstraction if required. -static void prepareCallArguments(ApplySite AI, SILBuilder &Builder, - const ReabstractionInfo &ReInfo, - SmallVectorImpl &Arguments, - SILValue &StoreResultTo) { +/// +/// \p ArgAtIndexNeedsEndBorrow after return contains indices of arguments that +/// need end borrow. The reason why we are doing this in a separate array is +/// that we are going to eventually need to pass off Arguments to SILBuilder +/// which will want an ArrayRef() so using a composite type here would +/// force us to do some sort of conversion then. +static void +prepareCallArguments(ApplySite AI, SILBuilder &Builder, + const ReabstractionInfo &ReInfo, + SmallVectorImpl &Arguments, + SmallVectorImpl &ArgAtIndexNeedsEndBorrow, + SILValue &StoreResultTo) { /// SIL function conventions for the original apply site with substitutions. SILLocation Loc = AI.getLoc(); auto substConv = AI.getSubstCalleeConv(); @@ -1938,8 +1945,16 @@ static void prepareCallArguments(ApplySite AI, SILBuilder &Builder, if (ReInfo.isParamConverted(paramIdx)) { // An argument is converted from indirect to direct. Instead of the // address we pass the loaded value. - SILValue Val = Builder.createLoad( - Loc, Op.get(), LoadOwnershipQualifier::Unqualified); + auto argConv = substConv.getSILArgumentConvention(ArgIdx); + SILValue Val; + if (!argConv.isGuaranteedConvention() || isa(AI)) { + Val = Builder.emitLoadValueOperation(Loc, Op.get(), + LoadOwnershipQualifier::Take); + } else { + Val = Builder.emitLoadBorrowOperation(Loc, Op.get()); + if (Val.getOwnershipKind() == ValueOwnershipKind::Guaranteed) + ArgAtIndexNeedsEndBorrow.push_back(Arguments.size()); + } Arguments.push_back(Val); return true; } @@ -1953,6 +1968,16 @@ static void prepareCallArguments(ApplySite AI, SILBuilder &Builder, } } +static void +cleanupCallArguments(SILBuilder &builder, SILLocation loc, + ArrayRef values, + ArrayRef valueIndicesThatNeedEndBorrow) { + for (int index : valueIndicesThatNeedEndBorrow) { + auto *lbi = cast(values[index]); + builder.createEndBorrow(loc, lbi); + } +} + /// Create a new apply based on an old one, but with a different /// function being applied. static ApplySite replaceWithSpecializedCallee(ApplySite applySite, @@ -1961,14 +1986,20 @@ static ApplySite replaceWithSpecializedCallee(ApplySite applySite, SILBuilderWithScope builder(applySite.getInstruction()); SILLocation loc = applySite.getLoc(); SmallVector arguments; + SmallVector argsNeedingEndBorrow; SILValue resultOut; - prepareCallArguments(applySite, builder, reInfo, arguments, resultOut); + prepareCallArguments(applySite, builder, reInfo, arguments, + argsNeedingEndBorrow, resultOut); // Create a substituted callee type. + // + // NOTE: We do not perform this substitution if we are promoting a full apply + // site callee of a partial apply. auto canFnTy = callee->getType().castTo(); SubstitutionMap subs; - if (reInfo.getSpecializedType()->isPolymorphic()) { + if (reInfo.getSpecializedType()->isPolymorphic() && + canFnTy->isPolymorphic()) { subs = reInfo.getCallerParamSubstitutionMap(); subs = SubstitutionMap::get(canFnTy->getSubstGenericSignature(), subs); } @@ -1983,6 +2014,13 @@ static ApplySite replaceWithSpecializedCallee(ApplySite applySite, auto *tai = cast(applySite); SILBasicBlock *resultBlock = tai->getNormalBB(); assert(resultBlock->getSinglePredecessorBlock() == tai->getParent()); + // First insert the cleanups for our arguments int he appropriate spot. + FullApplySite(tai).insertAfterFullEvaluation( + [&](SILBasicBlock::iterator insertPt) { + SILBuilderWithScope argBuilder(insertPt); + cleanupCallArguments(argBuilder, loc, arguments, + argsNeedingEndBorrow); + }); auto *newTAI = builder.createTryApply(loc, callee, subs, arguments, resultBlock, tai->getErrorBB()); if (resultOut) { @@ -1995,23 +2033,28 @@ static ApplySite replaceWithSpecializedCallee(ApplySite applySite, SILArgument *arg = resultBlock->replacePhiArgument( 0, resultOut->getType().getObjectType(), ValueOwnershipKind::Owned); // Store the direct result to the original result address. - builder.createStore(loc, arg, resultOut, - StoreOwnershipQualifier::Unqualified); + builder.emitStoreValueOperation(loc, arg, resultOut, + StoreOwnershipQualifier::Init); } return newTAI; } case ApplySiteKind::ApplyInst: { auto *ai = cast(applySite); + FullApplySite(ai).insertAfterFullEvaluation( + [&](SILBasicBlock::iterator insertPt) { + SILBuilderWithScope argBuilder(insertPt); + cleanupCallArguments(argBuilder, loc, arguments, + argsNeedingEndBorrow); + }); auto *newAI = builder.createApply(loc, callee, subs, arguments, ai->isNonThrowing()); if (resultOut) { - assert(substConv.useLoweredAddresses()); if (!calleeSILSubstFnTy.isNoReturnFunction( builder.getModule(), builder.getTypeExpansionContext())) { // Store the direct result to the original result address. fixUsedVoidType(ai, loc, builder); - builder.createStore(loc, newAI, resultOut, - StoreOwnershipQualifier::Unqualified); + builder.emitStoreValueOperation(loc, newAI, resultOut, + StoreOwnershipQualifier::Init); } else { builder.createUnreachable(loc); // unreachable should be the terminator instruction. @@ -2027,6 +2070,12 @@ static ApplySite replaceWithSpecializedCallee(ApplySite applySite, case ApplySiteKind::BeginApplyInst: { auto *bai = cast(applySite); assert(!resultOut); + FullApplySite(bai).insertAfterFullEvaluation( + [&](SILBasicBlock::iterator insertPt) { + SILBuilderWithScope argBuilder(insertPt); + cleanupCallArguments(argBuilder, loc, arguments, + argsNeedingEndBorrow); + }); auto *newBAI = builder.createBeginApply(loc, callee, subs, arguments, bai->isNonThrowing()); bai->replaceAllUsesPairwiseWith(newBAI); @@ -2038,7 +2087,11 @@ static ApplySite replaceWithSpecializedCallee(ApplySite applySite, loc, callee, subs, arguments, pai->getType().getAs()->getCalleeConvention(), pai->isOnStack()); + // When we have a partial apply, we should always perform a load [take]. pai->replaceAllUsesWith(newPAI); + assert(llvm::none_of(arguments, + [](SILValue v) { return isa(v); }) && + "Partial apply consumes all of its parameters?!"); return newPAI; } } @@ -2096,8 +2149,9 @@ class ReabstractionThunkGenerator { SILFunction *createThunk(); protected: - SILValue createReabstractionThunkApply(SILBuilder &Builder); - SILArgument *convertReabstractionThunkArguments(SILBuilder &Builder); + FullApplySite createReabstractionThunkApply(SILBuilder &Builder); + SILArgument *convertReabstractionThunkArguments( + SILBuilder &Builder, SmallVectorImpl &ArgsNeedingEndBorrows); }; } // anonymous namespace @@ -2132,30 +2186,46 @@ SILFunction *ReabstractionThunkGenerator::createThunk() { SpecArg->getDecl()); Arguments.push_back(NewArg); } - SILValue ReturnValue = createReabstractionThunkApply(Builder); + FullApplySite ApplySite = createReabstractionThunkApply(Builder); + SILValue ReturnValue = ApplySite.getSingleDirectResult(); + assert(ReturnValue && "getSingleDirectResult out of sync with ApplySite?!"); Builder.createReturn(Loc, ReturnValue); + return Thunk; } // Handle lowered addresses. - SILArgument *ReturnValueAddr = convertReabstractionThunkArguments(Builder); + SmallVector ArgsThatNeedEndBorrow; + SILArgument *ReturnValueAddr = + convertReabstractionThunkArguments(Builder, ArgsThatNeedEndBorrow); + + FullApplySite ApplySite = createReabstractionThunkApply(Builder); - SILValue ReturnValue = createReabstractionThunkApply(Builder); + SILValue ReturnValue = ApplySite.getSingleDirectResult(); + assert(ReturnValue && "getSingleDirectResult out of sync with ApplySite?!"); if (ReturnValueAddr) { // Need to store the direct results to the original indirect address. - Builder.createStore(Loc, ReturnValue, ReturnValueAddr, - StoreOwnershipQualifier::Unqualified); + Builder.emitStoreValueOperation(Loc, ReturnValue, ReturnValueAddr, + StoreOwnershipQualifier::Init); SILType VoidTy = OrigPAI->getSubstCalleeType()->getDirectFormalResultsType( M, Builder.getTypeExpansionContext()); assert(VoidTy.isVoid()); ReturnValue = Builder.createTuple(Loc, VoidTy, {}); } Builder.createReturn(Loc, ReturnValue); + + // Now that we have finished constructing our CFG (note the return above), + // insert any compensating end borrows that we need. + ApplySite.insertAfterFullEvaluation([&](SILBasicBlock::iterator insertPt) { + SILBuilderWithScope argBuilder(insertPt); + cleanupCallArguments(argBuilder, Loc, Arguments, ArgsThatNeedEndBorrow); + }); + return Thunk; } /// Create a call to a reabstraction thunk. Return the call's direct result. -SILValue ReabstractionThunkGenerator::createReabstractionThunkApply( +FullApplySite ReabstractionThunkGenerator::createReabstractionThunkApply( SILBuilder &Builder) { SILFunction *Thunk = &Builder.getFunction(); auto *FRI = Builder.createFunctionRef(Loc, SpecializedFunc); @@ -2167,19 +2237,20 @@ SILValue ReabstractionThunkGenerator::createReabstractionThunkApply( // Create the logic for calling a throwing function. SILBasicBlock *NormalBB = Thunk->createBasicBlock(); SILBasicBlock *ErrorBB = Thunk->createBasicBlock(); - Builder.createTryApply(Loc, FRI, Subs, Arguments, NormalBB, ErrorBB); + auto *TAI = + Builder.createTryApply(Loc, FRI, Subs, Arguments, NormalBB, ErrorBB); auto *ErrorVal = ErrorBB->createPhiArgument( SpecializedFunc->mapTypeIntoContext( specConv.getSILErrorType(Builder.getTypeExpansionContext())), ValueOwnershipKind::Owned); Builder.setInsertionPoint(ErrorBB); Builder.createThrow(Loc, ErrorVal); - SILValue ReturnValue = NormalBB->createPhiArgument( + NormalBB->createPhiArgument( SpecializedFunc->mapTypeIntoContext( specConv.getSILResultType(Builder.getTypeExpansionContext())), ValueOwnershipKind::Owned); Builder.setInsertionPoint(NormalBB); - return ReturnValue; + return FullApplySite(TAI); } /// Create SIL arguments for a reabstraction thunk with lowered addresses. This @@ -2189,7 +2260,7 @@ SILValue ReabstractionThunkGenerator::createReabstractionThunkApply( /// FIXME: Remove this if we don't need to create reabstraction thunks after /// address lowering. SILArgument *ReabstractionThunkGenerator::convertReabstractionThunkArguments( - SILBuilder &Builder) { + SILBuilder &Builder, SmallVectorImpl &ArgsThatNeedEndBorrow) { SILFunction *Thunk = &Builder.getFunction(); CanSILFunctionType SpecType = SpecializedFunc->getLoweredFunctionType(); CanSILFunctionType SubstType = ReInfo.getSubstitutedType(); @@ -2251,11 +2322,18 @@ SILArgument *ReabstractionThunkGenerator::convertReabstractionThunkArguments( Builder.getTypeExpansionContext())); assert(ParamTy.isAddress()); SILArgument *SpecArg = *SpecArgIter++; - SILArgument *NewArg = + SILFunctionArgument *NewArg = EntryBB->createFunctionArgument(ParamTy, SpecArg->getDecl()); - auto *ArgVal = - Builder.createLoad(Loc, NewArg, LoadOwnershipQualifier::Unqualified); - Arguments.push_back(ArgVal); + if (!NewArg->getArgumentConvention().isGuaranteedConvention()) { + SILValue argVal = Builder.emitLoadValueOperation( + Loc, NewArg, LoadOwnershipQualifier::Take); + Arguments.push_back(argVal); + } else { + SILValue argVal = Builder.emitLoadBorrowOperation(Loc, NewArg); + if (argVal.getOwnershipKind() == ValueOwnershipKind::Guaranteed) + ArgsThatNeedEndBorrow.push_back(Arguments.size()); + Arguments.push_back(argVal); + } continue; } // Simply clone unconverted direct or indirect parameters. @@ -2358,15 +2436,6 @@ void swift::trySpecializeApplyOfGeneric( if (shouldNotSpecialize(RefF, F)) return; - // If our callee has ownership, do not specialize for now. This should only - // occur with transparent referenced functions. - // - // FIXME: Support this. - if (RefF->hasOwnership()) { - assert(RefF->isTransparent()); - return; - } - // If the caller and callee are both fragile, preserve the fragility when // cloning the callee. Otherwise, strip it off so that we can optimize // the body more. @@ -2408,11 +2477,24 @@ void swift::trySpecializeApplyOfGeneric( // this case we can just skip the existing re-abstraction. // 3) For all other cases we need to create a new re-abstraction thunk. needAdaptUsers = true; - for (Operand *Use : PAI->getUses()) { + SmallVector worklist(PAI->getUses()); + while (!worklist.empty()) { + auto *Use = worklist.pop_back_val(); + SILInstruction *User = Use->getUser(); + + // Look through copy_value. + if (auto *cvi = dyn_cast(User)) { + llvm::copy(cvi->getUses(), std::back_inserter(worklist)); + continue; + } + // Ignore destroy_value. + if (isa(User)) + continue; + // Ignore older ref count instructions. if (isa(User)) continue; - if (User->isDebugInstruction()) + if (isIncidentalUse(User)) continue; auto FAS = FullApplySite::isa(User); @@ -2443,7 +2525,6 @@ void swift::trySpecializeApplyOfGeneric( << SpecializedF->getName() << "\n" << "Specialized function type: " << SpecializedF->getLoweredFunctionType() << "\n"); - assert(!SpecializedF->hasOwnership()); NewFunctions.push_back(SpecializedF); } diff --git a/test/SILOptimizer/specialize_anyobject.swift b/test/SILOptimizer/specialize_anyobject.swift index 31426157e5608..b5c0654720980 100644 --- a/test/SILOptimizer/specialize_anyobject.swift +++ b/test/SILOptimizer/specialize_anyobject.swift @@ -1,4 +1,3 @@ - // RUN: %target-swift-frontend -module-name specialize_anyobject -O -sil-inline-threshold 0 -emit-sil -primary-file %s | %FileCheck %s // rdar://problem/20338028 diff --git a/test/SILOptimizer/specialize_default_witness_ossa.sil b/test/SILOptimizer/specialize_default_witness_ossa.sil new file mode 100644 index 0000000000000..beacfa832a0d8 --- /dev/null +++ b/test/SILOptimizer/specialize_default_witness_ossa.sil @@ -0,0 +1,53 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -sil-generic-specializer-enable-ownership -generic-specializer %s | %FileCheck %s + +sil_stage canonical + +import Builtin +import Swift + +public protocol ResilientProtocol { + func defaultA() + func defaultB() +} + +struct ConformingStruct : ResilientProtocol { + func defaultA() + func defaultB() +} + +// CHECK-LABEL: sil shared [ossa] @$s8defaultA4main16ConformingStructV_Tg5 +// CHECK: bb0(%0 : $ConformingStruct): +// CHECK: [[FN:%.*]] = function_ref @$s8defaultB4main16ConformingStructV_Tg5 +// CHECK: [[RESULT:%.*]] = apply [[FN]] +// CHECK: return [[RESULT]] + +sil [ossa] @defaultA : $@convention(witness_method: ResilientProtocol) (@in_guaranteed Self) -> () { +bb0(%0 : $*Self): + %fn = function_ref @defaultB : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + %result = apply %fn(%0) : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + return %result : $() +} + +// CHECK-LABEL: sil shared [ossa] @$s8defaultB4main16ConformingStructV_Tg5 +// CHECK: bb0(%0 : $ConformingStruct): +// CHECK: [[RESULT:%.*]] = tuple () +// CHECK: return [[RESULT]] + +sil [ossa] @defaultB : $@convention(witness_method: ResilientProtocol) (@in_guaranteed Self) -> () { +bb0(%0 : $*Self): + %result = tuple () + return %result : $() +} + +// CHECK-LABEL: sil hidden [ossa] @test_specialize_default_witness_method +// CHECK: bb0(%0 : $*ConformingStruct): +// CHECK: [[FN:%.*]] = function_ref @$s8defaultA4main16ConformingStructV_Tg5 +// CHECK: [[RESULT:%.*]] = apply [[FN]] +// CHECK: return [[RESULT]] + +sil hidden [ossa] @test_specialize_default_witness_method : $@convention(thin) (@in_guaranteed ConformingStruct) -> () { +bb0(%0 : $*ConformingStruct): + %fn = function_ref @defaultA : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + %result = apply %fn(%0) : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + return %result : $() +} diff --git a/test/SILOptimizer/specialize_default_witness_resilience_ossa.sil b/test/SILOptimizer/specialize_default_witness_resilience_ossa.sil new file mode 100644 index 0000000000000..2de2e23b74d55 --- /dev/null +++ b/test/SILOptimizer/specialize_default_witness_resilience_ossa.sil @@ -0,0 +1,101 @@ +// RUN: %target-sil-opt -enable-library-evolution -enable-sil-verify-all -generic-specializer -sil-generic-specializer-enable-ownership %s | %FileCheck %s + +sil_stage canonical + +import Builtin +import Swift + +public protocol ResilientProtocol { + func defaultA() + func defaultB() +} + +public struct ConformingStruct : ResilientProtocol { + public func defaultA() + public func defaultB() +} + +class Klass {} + +// Used to make sure we also handle non-trivial structs correctly. +public struct ConformingNonTrivialStruct : ResilientProtocol { + var k: Klass + + public func defaultA() + public func defaultB() +} + +// CHECK-LABEL: sil shared [ossa] @$s8defaultA4main16ConformingStructV_Tg5 +// CHECK: bb0(%0 : $ConformingStruct): +// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $ConformingStruct +// CHECK-NEXT: store %0 to [trivial] [[TMP]] : $*ConformingStruct +// CHECK: [[FN:%.*]] = function_ref @$s8defaultB4main16ConformingStructV_Tg5 +// CHECK-NEXT: [[LOAD:%.*]] = load [trivial] [[TMP]] : $*ConformingStruct +// CHECK-NEXT: [[RESULT:%.*]] = apply [[FN]]([[LOAD]]) +// CHECK-NEXT: dealloc_stack [[TMP]] : $*ConformingStruct +// CHECK } // end sil function 's8defaultA4main16ConformingStructV_Tg5' + +// CHECK-LABEL: sil shared [ossa] @$s8defaultA4main26ConformingNonTrivialStructV_Tg5 +// CHECK: bb0(%0 : @guaranteed $ConformingNonTrivialStruct): +// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $ConformingNonTrivialStruct +// CHECK-NEXT: store_borrow %0 to [[TMP]] : $*ConformingNonTrivialStruct +// CHECK: [[FN:%.*]] = function_ref @$s8defaultB4main26ConformingNonTrivialStructV_Tg5 +// CHECK-NEXT: [[LOAD:%.*]] = load_borrow [[TMP]] : $*ConformingNonTrivialStruct +// CHECK-NEXT: [[RESULT:%.*]] = apply [[FN]]([[LOAD]]) +// CHECK: dealloc_stack [[TMP]] : $*ConformingNonTrivialStruct +// CHECK } // end sil function 's8defaultA4main16ConformingNonTrivialStructV_Tg5' + +sil [ossa] @defaultA : $@convention(witness_method: ResilientProtocol) (@in_guaranteed Self) -> () { +bb0(%0 : $*Self): + %fn = function_ref @defaultB : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + %result = apply %fn(%0) : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + return %result : $() +} + +// CHECK-LABEL: sil shared [ossa] @$s8defaultB4main16ConformingStructV_Tg5 : +// CHECK: bb0(%0 : $ConformingStruct): +// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $ConformingStruct +// CHECK-NEXT: store %0 to [trivial] [[TMP]] : $*ConformingStruct +// CHECK: dealloc_stack [[TMP]] : $*ConformingStruct +// CHECK: } // end sil function '$s8defaultB4main16ConformingStructV_Tg5' + +// CHECK-LABEL: sil shared [ossa] @$s8defaultB4main26ConformingNonTrivialStructV_Tg5 : +// CHECK: bb0(%0 : @guaranteed $ConformingNonTrivialStruct): +// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $ConformingNonTrivialStruct +// CHECK-NEXT: store_borrow %0 to [[TMP]] : $*ConformingNonTrivialStruct +// CHECK: dealloc_stack [[TMP]] : $*ConformingNonTrivialStruct +// CHECK: } // end sil function '$s8defaultB4main26ConformingNonTrivialStructV_Tg5' + +sil [ossa] @defaultB : $@convention(witness_method: ResilientProtocol) (@in_guaranteed Self) -> () { +bb0(%0 : $*Self): + %result = tuple () + return %result : $() +} + +// CHECK-LABEL: sil hidden [ossa] @test_specialize_default_witness_method +// CHECK: bb0(%0 : $*ConformingStruct): +// CHECK: [[FN:%.*]] = function_ref @$s8defaultA4main16ConformingStructV_Tg5 +// CHECK-NEXT: [[VALUE:%.*]] = load [trivial] %0 : $*ConformingStruct +// CHECK-NEXT: [[RESULT:%.*]] = apply [[FN]]([[VALUE]]) +// CHECK-NEXT: return [[RESULT]] + +sil hidden [ossa] @test_specialize_default_witness_method : $@convention(thin) (@in_guaranteed ConformingStruct) -> () { +bb0(%0 : $*ConformingStruct): + %fn = function_ref @defaultA : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + %result = apply %fn(%0) : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + return %result : $() +} + +// CHECK-LABEL: sil hidden [ossa] @test_specialize_default_witness_method_nontrivial +// CHECK: bb0(%0 : $*ConformingNonTrivialStruct): +// CHECK: [[FN:%.*]] = function_ref @$s8defaultA4main26ConformingNonTrivialStructV_Tg5 +// CHECK-NEXT: [[VALUE:%.*]] = load_borrow %0 : $*ConformingNonTrivialStruct +// CHECK-NEXT: [[RESULT:%.*]] = apply [[FN]]([[VALUE]]) +// CHECK: } // end sil function 'test_specialize_default_witness_method_nontrivial' + +sil hidden [ossa] @test_specialize_default_witness_method_nontrivial : $@convention(thin) (@in_guaranteed ConformingNonTrivialStruct) -> () { +bb0(%0 : $*ConformingNonTrivialStruct): + %fn = function_ref @defaultA : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + %result = apply %fn(%0) : $@convention(witness_method: ResilientProtocol) (@in_guaranteed T) -> () + return %result : $() +} diff --git a/test/SILOptimizer/specialize_inherited_ossa.sil b/test/SILOptimizer/specialize_inherited_ossa.sil new file mode 100644 index 0000000000000..feafbd1446590 --- /dev/null +++ b/test/SILOptimizer/specialize_inherited_ossa.sil @@ -0,0 +1,71 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -generic-specializer -module-name inherit -sil-generic-specializer-enable-ownership %s | %FileCheck %s + +import Builtin +import Swift + +class MMFont { +} + +class Klass {} + +struct MMStorage { + var k: Klass = Klass() +} + +func ==(lhs: MMObject, rhs: MMObject) -> Bool + +class MMObject : Hashable { + func hash(into hasher: inout Hasher) +} + +class MMString : MMObject { +} + +// CHECK-LABEL: @caller : $@convention(thin) (Int, Int, @owned MMString) -> @owned MMStorage> { +sil [ossa] @caller : $@convention(thin) (Int, Int, @owned MMString) -> @owned MMStorage> { +bb0(%0 : $Int, %1 : $Int, %2 : @owned $MMString): + %3 = metatype $@thin MMStorage>.Type + %13 = function_ref @ext_fn1 : $@convention(thin) (Int, @thin MMStorage>.Type) -> @owned MMStorage> + %14 = apply %13(%0, %3) : $@convention(thin) (Int, @thin MMStorage>.Type) -> @owned MMStorage> + %15 = copy_value %14 : $MMStorage> + + // CHECK: [[STACK:%[0-9]+]] = alloc_stack $MMString + %37 = alloc_stack $MMString + store %2 to [init] %37 : $*MMString + // CHECK: [[ID:%[0-9]+]] = function_ref @$s6callee7inherit8MMStringC_AB6MMFontCSgTg5 : $@convention(method) (@owned MMString, Int, @owned MMStorage>) -> Bool + %34 = function_ref @callee : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in τ_0_0, Int, @owned MMStorage<τ_0_0, τ_0_1>) -> Bool + // CHECK: [[LOAD:%[0-9]+]] = load [take] [[STACK]] + // CHECK: apply [[ID]]([[LOAD]], %1, %{{[0-9]+}}) : $@convention(method) (@owned MMString, Int, @owned MMStorage>) -> Bool + %45 = apply %34(%37, %1, %14) : $@convention(method) <τ_0_0, τ_0_1 where τ_0_0 : Hashable> (@in τ_0_0, Int, @owned MMStorage<τ_0_0, τ_0_1>) -> Bool + dealloc_stack %37 : $*MMString + + return %15 : $MMStorage> +} + +// CHECK-LABEL: @$s6callee7inherit8MMStringC_AB6MMFontCSgTg5 : $@convention(method) (@owned MMString, Int, @owned MMStorage>) -> Bool { +// CHECK: [[META:%[0-9]+]] = metatype $@thick MMString.Type +// CHECK: [[ID3:%[0-9]+]] = witness_method $MMString, #Equatable."==" : +// CHECK: [[STACK2:%[0-9]+]] = alloc_stack $MMString +// CHECK: [[STACK3:%[0-9]+]] = alloc_stack $MMString +// CHECK: apply [[ID3]]([[STACK2]], [[STACK3]], [[META]]) : $@convention(witness_method: Equatable) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool + +// CHECK-LABEL: @callee : $@convention(method) (@in Key, Int, @owned MMStorage) -> Bool { +sil [noinline] [ossa] @callee : $@convention(method) (@in Key, Int, @owned MMStorage) -> Bool { +bb0(%0 : $*Key, %1 : $Int, %2 : @owned $MMStorage): + %25 = metatype $@thick Key.Type + // CHECK: [[ID2:%[0-9]+]] = witness_method $Key, #Equatable."==" : + %26 = witness_method $Key, #Equatable."==" : $@convention(witness_method: Equatable) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool + %27 = alloc_stack $Key + %33 = alloc_stack $Key + copy_addr %0 to [initialization] %27 : $*Key + copy_addr %0 to [initialization] %33 : $*Key + // CHECK: apply [[ID2]] + %35 = apply %26(%27, %33, %25) : $@convention(witness_method: Equatable) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool + dealloc_stack %33 : $*Key + dealloc_stack %27 : $*Key + destroy_value %2 : $MMStorage + destroy_addr %0 : $*Key + return %35 : $Bool +} + +sil @ext_fn1 : $@convention(thin) (Int, @thin MMStorage>.Type) -> @owned MMStorage> diff --git a/test/SILOptimizer/specialize_opaque_ossa.sil b/test/SILOptimizer/specialize_opaque_ossa.sil new file mode 100644 index 0000000000000..b7eab611c04cd --- /dev/null +++ b/test/SILOptimizer/specialize_opaque_ossa.sil @@ -0,0 +1,45 @@ +// RUN: %target-sil-opt -enable-sil-opaque-values -enable-sil-verify-all -generic-specializer -sil-generic-specializer-enable-ownership %s | %FileCheck %s + +sil_stage canonical + +import Builtin + +// Test that foo is specialized on Builtin.Int64 and the copy_values and destroy_values are dropped. +// +// CHECK-LABEL: sil shared [ossa] @$s3fooBi64__Tg5 : $@convention(thin) (Builtin.Int64, Builtin.Int64) -> () { +// CHECK: bb0(%0 : $Builtin.Int64, %1 : $Builtin.Int64): +// CHECK: [[F:%.*]] = function_ref @$s3fooBi64__Tg5 : $@convention(thin) (Builtin.Int64, Builtin.Int64) -> () +// CHECK: %{{.*}} = apply [[F]](%0, %1) : $@convention(thin) (Builtin.Int64, Builtin.Int64) -> () +// CHECK-NOT: copy_value +// CHECK-NOT: destroy_value +// CHECK: return %{{.*}} : $() +// CHECK: } // end sil function '$s3fooBi64__Tg5' + +// Test that foo when specialized on Builtin.NativeObject, keeps copy_value/etc. +// CHECK-LABEL: sil shared [ossa] @$s3fooBo_Tg5 : $@convention(thin) (@owned Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: copy_value +// CHECK: apply +// CHECK: destroy_value +// CHECK: } // end sil function '$s3fooBo_Tg5' + +sil hidden [ossa] @foo : $@convention(thin) (@in T, @in T) -> () { +bb0(%0 : @owned $T, %1 : @owned $T): + %f = function_ref @foo : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () + %cp0 = copy_value %0 : $T + %cp1 = copy_value %1 : $T + %call = apply %f(%cp0, %cp1) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () + destroy_value %1 : $T + destroy_value %0 : $T + %10 = tuple () + return %10 : $() +} + +sil [ossa] @testSpecialize : $@convention(thin) (Builtin.Int64, @owned Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.Int64, %1 : @owned $Builtin.NativeObject): + %f = function_ref @foo : $@convention(thin) (@in T, @in T) -> () + %call = apply %f(%0, %0) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () + %1a = copy_value %1 : $Builtin.NativeObject + %call2 = apply %f(%1, %1a) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () + %999 = tuple () + return %999 : $() +} diff --git a/test/SILOptimizer/specialize_opaque_result_types_ossa.sil b/test/SILOptimizer/specialize_opaque_result_types_ossa.sil new file mode 100644 index 0000000000000..1ead1ab5274b6 --- /dev/null +++ b/test/SILOptimizer/specialize_opaque_result_types_ossa.sil @@ -0,0 +1,74 @@ +// RUN: %empty-directory(%t) +// RUN: %target-swift-frontend -disable-availability-checking %S/Inputs/opaque_result_types.swift -module-name External -emit-module -emit-module-path %t/External.swiftmodule +// RUN: %target-sil-opt -I %t -enable-sil-verify-all %s -generic-specializer -sil-generic-specializer-enable-ownership | %FileCheck %s + +// REQUIRES: CPU=x86_64 + +import Builtin +import Swift +import SwiftShims +import External + +sil_stage canonical + +sil @project : $@convention(thin) (@in Test) -> @out IndexingIterator + +sil shared [ossa] @test : $@convention(thin) (@owned Test) -> () { +bb0(%0 : @owned $Test): + %3 = alloc_stack $Test + store %0 to [init] %3 : $*Test + %5 = alloc_stack $IndexingIterator + %6 = function_ref @project : $@convention(thin) (@in Test) -> @out IndexingIterator + %7 = apply %6(%5, %3) : $@convention(thin) (@in Test) -> @out IndexingIterator + %44 = alloc_stack $Optional + // function_ref protocol witness for IteratorProtocol.next() in conformance IndexingIterator + %45 = function_ref @next : $@convention(witness_method: IteratorProtocol) <τ_0_0 where τ_0_0 : Collection> (@inout IndexingIterator<τ_0_0>) -> @out Optional<τ_0_0.Element> + %46 = apply %45(%44, %5) : $@convention(witness_method: IteratorProtocol) <τ_0_0 where τ_0_0 : Collection> (@inout IndexingIterator<τ_0_0>) -> @out Optional<τ_0_0.Element> + destroy_addr %44: $*Optional + dealloc_stack %44 : $*Optional + destroy_addr %5: $*IndexingIterator + dealloc_stack %5 : $*IndexingIterator + dealloc_stack %3 : $*Test + %41 = tuple () + return %41 : $() +} +// CHECK-LABEL: sil shared [ossa] @$s4next8External4TestV_Tg5 : $@convention(witness_method: IteratorProtocol) (@inout IndexingIterator) -> Optional +// CHECK: bb0(%0 : $*IndexingIterator): +// CHECK: alloc_stack $Optional +// CHECK: ([[RES:%.*]], %{{.*}}) = begin_apply {{.*}}({{.*}}) : $@yield_once @convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @yields @in_guaranteed τ_0_0.Element +// CHECK: [[DEST:%.*]] = init_enum_data_addr %1 : $*Optional, #Optional.some!enumelt +// CHECK: copy_addr [[RES]] to [initialization] {{.*}} : $*Int64 +// CHECK: } // end sil function '$s4next8External4TestV_Tg5' +sil [ossa] @next : $@convention(witness_method: IteratorProtocol) <τ_0_0 where τ_0_0 : Collection> (@inout IndexingIterator<τ_0_0>) -> @out Optional<τ_0_0.Element> { +bb0(%0 : $*Optional<τ_0_0.Element>, %1 : $*IndexingIterator<τ_0_0>): + %2 = metatype $@thick τ_0_0.Index.Type + %3 = struct_element_addr %1 : $*IndexingIterator<τ_0_0>, #IndexingIterator._position + %4 = alloc_stack $τ_0_0.Index + copy_addr %3 to [initialization] %4 : $*τ_0_0.Index + %6 = struct_element_addr %1 : $*IndexingIterator<τ_0_0>, #IndexingIterator._elements + %20 = alloc_stack $τ_0_0 + copy_addr %6 to [initialization] %20 : $*τ_0_0 + %22 = alloc_stack $τ_0_0.Index + copy_addr %3 to [initialization] %22 : $*τ_0_0.Index + %24 = alloc_stack $τ_0_0 + copy_addr [take] %20 to [initialization] %24 : $*τ_0_0 + %26 = witness_method $τ_0_0, #Collection.subscript!read : (Self) -> (Self.Index) -> () : $@yield_once @convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @yields @in_guaranteed τ_0_0.Element + + // The specialized begin apply %26 has a result type of t_0_0.Element + // which works out to be an opaque result type whose underlying type is Int64. + // Make sure that the specialized code handles this correctly. + (%27, %28) = begin_apply %26<τ_0_0>(%22, %24) : $@yield_once @convention(witness_method: Collection) <τ_0_0 where τ_0_0 : Collection> (@in_guaranteed τ_0_0.Index, @in_guaranteed τ_0_0) -> @yields @in_guaranteed τ_0_0.Element + + %29 = init_enum_data_addr %0 : $*Optional<τ_0_0.Element>, #Optional.some!enumelt + copy_addr %27 to [initialization] %29 : $*τ_0_0.Element + end_apply %28 + destroy_addr %22 : $*τ_0_0.Index + destroy_addr %24 : $*τ_0_0 + dealloc_stack %24 : $*τ_0_0 + dealloc_stack %22 : $*τ_0_0.Index + dealloc_stack %20 : $*τ_0_0 + destroy_addr %4 : $*τ_0_0.Index + dealloc_stack %4 : $*τ_0_0.Index + %41 = tuple () + return %41 : $() +} diff --git a/test/SILOptimizer/specialize_ossa.sil b/test/SILOptimizer/specialize_ossa.sil index b76d6621600fa..45714dcc4f9f5 100644 --- a/test/SILOptimizer/specialize_ossa.sil +++ b/test/SILOptimizer/specialize_ossa.sil @@ -1,10 +1,12 @@ -// RUN: %target-sil-opt -enable-sil-verify-all -generic-specializer %/s | %FileCheck %s +// RUN: %target-sil-opt -enable-sil-verify-all -sil-partial-specialization -generic-specializer -sil-generic-specializer-enable-ownership %s | %FileCheck %s sil_stage canonical import Builtin import Swift +class Klass {} + sil [ossa] [transparent] @ossaTransparentCallee : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): destroy_addr %0 : $*T @@ -12,9 +14,12 @@ bb0(%0 : $*T): return %9999 : $() } +// We specialize this case today and just respect the already set ownership in +// each function. This makes sense since ossa can be specialized. +// // CHECK-LABEL: sil @caller : $@convention(thin) (@owned Builtin.NativeObject) -> () { -// CHECK: [[FUNC:%.*]] = function_ref @ossaTransparentCallee : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () -// CHECK: apply [[FUNC]]( +// CHECK: [[FUNC:%.*]] = function_ref @$s21ossaTransparentCalleeBo_Tg5 : $@convention(thin) (@owned Builtin.NativeObject) -> () +// CHECK: apply [[FUNC]]( // CHECK: } // end sil function 'caller' sil @caller : $@convention(thin) (@owned Builtin.NativeObject) -> () { bb0(%0 : $Builtin.NativeObject): @@ -26,3 +31,1222 @@ bb0(%0 : $Builtin.NativeObject): %9999 = tuple() return %9999 : $() } + +// CHECK-LABEL: sil [ossa] @exp1 : $@convention(thin) () -> () { +// CHECK-NOT: apply +// Call of specialized initializer: +// CHECK: [[CTOR:%[0-9]+]] = function_ref @$s8XXX_inits5Int32V_Tg5 +// CHECK: apply [[CTOR]] +// CHECK: [[ACCEPTS_INT:%[0-9]+]] = function_ref @acceptsInt +// Call of specialized XXX_foo: +// CHECK: [[FOO:%[0-9]+]] = function_ref @$s7XXX_foos5Int32V_Tg5 +// CHECK: apply [[FOO]] +// CHECK: apply [[ACCEPTS_INT]] +// CHECK: return +// CHECK: } // end sil function 'exp1' + +// CHECK: sil [ossa] @exp2 : $@convention(thin) () -> () { +// CHECK: } // end sil function 'exp2' + +struct XXX { + init(t: T) + mutating func foo(t: T) -> Int32 + var m_t: T +} + +// specialize.XXX.init (specialize.XXX.Type)(t : A) -> specialize.XXX +sil [ossa] [noinline] @XXX_init : $@convention(thin) (@in T, @thin XXX.Type) -> @out XXX { +bb0(%0 : $*XXX, %1 : $*T, %2 : $@thin XXX.Type): + %3 = alloc_stack $XXX, var, name "sf" // users: %7, %11, %13 + debug_value_addr %1 : $*T, let, name "t" // id: %4 + %5 = alloc_stack $T // users: %6, %8, %9 + copy_addr %1 to [initialization] %5 : $*T // id: %6 + %7 = struct_element_addr %3 : $*XXX, #XXX.m_t // user: %8 + copy_addr [take] %5 to [initialization] %7 : $*T // id: %8 + dealloc_stack %5 : $*T // id: %9 + destroy_addr %1 : $*T // id: %10 + copy_addr [take] %3 to [initialization] %0 : $*XXX // id: %11 + %12 = tuple () // user: %14 + dealloc_stack %3 : $*XXX // id: %13 + return %12 : $() // id: %14 +} + +// specialize.XXX.foo (@inout specialize.XXX)(t : A) -> Swift.Int32 +sil [ossa] [noinline] @XXX_foo : $@convention(method) (@in T, @inout XXX) -> Int32 { +bb0(%0 : $*T, %1 : $*XXX): + debug_value_addr %0 : $*T, let, name "t" // id: %2 + %3 = alloc_stack $T // users: %4, %6, %7 + copy_addr %0 to [initialization] %3 : $*T // id: %4 + %5 = struct_element_addr %1 : $*XXX, #XXX.m_t // user: %6 + copy_addr [take] %3 to %5 : $*T // id: %6 + dealloc_stack %3 : $*T // id: %7 + %8 = integer_literal $Builtin.Int32, 4 // user: %9 + %9 = struct $Int32 (%8 : $Builtin.Int32) // user: %11 + destroy_addr %0 : $*T // id: %10 + return %9 : $Int32 // id: %11 +} + +sil [ossa] [noinline] @XXX_init_guaranteed : $@convention(thin) (@in_guaranteed T, @thin XXX.Type) -> @out XXX { +bb0(%0 : $*XXX, %1 : $*T, %2 : $@thin XXX.Type): + %3 = alloc_stack $XXX, var, name "sf" // users: %7, %11, %13 + debug_value_addr %1 : $*T, let, name "t" // id: %4 + %5 = alloc_stack $T // users: %6, %8, %9 + copy_addr %1 to [initialization] %5 : $*T // id: %6 + %7 = struct_element_addr %3 : $*XXX, #XXX.m_t // user: %8 + copy_addr [take] %5 to [initialization] %7 : $*T // id: %8 + dealloc_stack %5 : $*T // id: %9 + copy_addr [take] %3 to [initialization] %0 : $*XXX // id: %11 + %12 = tuple () // user: %14 + dealloc_stack %3 : $*XXX // id: %13 + return %12 : $() // id: %14 +} + +// specialize.XXX.foo (@inout specialize.XXX)(t : A) -> Swift.Int32 +sil [ossa] [noinline] @XXX_foo_guaranteed : $@convention(method) (@in_guaranteed T, @inout XXX) -> Int32 { +bb0(%0 : $*T, %1 : $*XXX): + debug_value_addr %0 : $*T, let, name "t" // id: %2 + %3 = alloc_stack $T // users: %4, %6, %7 + copy_addr %0 to [initialization] %3 : $*T // id: %4 + %5 = struct_element_addr %1 : $*XXX, #XXX.m_t // user: %6 + copy_addr [take] %3 to %5 : $*T // id: %6 + dealloc_stack %3 : $*T // id: %7 + %8 = integer_literal $Builtin.Int32, 4 // user: %9 + %9 = struct $Int32 (%8 : $Builtin.Int32) // user: %11 + return %9 : $Int32 // id: %11 +} + +// Swift.Int32._convertFromBuiltinIntegerLiteral (Swift.Int32.Type)(Builtin.IntLiteral) -> Swift.Int32 +sil public_external [ossa] [transparent] @$sSi33_convertFromBuiltinIntegerLiteralySiBI_cSimF : $@convention(thin) (Builtin.IntLiteral, @thin Int32.Type) -> Int32 { +bb0(%0 : $Builtin.IntLiteral, %1 : $@thin Int32.Type): + %3 = builtin "s_to_s_checked_trunc_IntLiteral_Int32"(%0 : $Builtin.IntLiteral) : $(Builtin.Int32, Builtin.Int1) + %4 = tuple_extract %3 : $(Builtin.Int32, Builtin.Int1), 0 // user: %5 + %5 = struct $Int32 (%4 : $Builtin.Int32) // user: %6 + return %5 : $Int32 // id: %6 +} + +// specialize.acceptsInt (Swift.Int32) -> () +sil [noinline] [ossa] @acceptsInt : $@convention(thin) (Int32) -> () { +bb0(%0 : $Int32): + debug_value %0 : $Int32, let, name "x" // id: %1 + %2 = tuple () // user: %3 + return %2 : $() // id: %3 +} + +// specialize.exp1 () -> () +sil [ossa] @exp1 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $XXX, var, name "II" // users: %7, %15, %19 + // function_ref specialize.XXX.init (specialize.XXX.Type)(t : A) -> specialize.XXX + %1 = function_ref @XXX_init : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> // user: %7 + %2 = metatype $@thin XXX.Type // user: %7 + %3 = alloc_stack $Int32 // users: %6, %7, %8 + %4 = integer_literal $Builtin.Int32, 5 // user: %5 + %5 = struct $Int32 (%4 : $Builtin.Int32) // user: %6 + store %5 to [trivial] %3 : $*Int32 // id: %6 + %7 = apply %1(%0, %3, %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> + dealloc_stack %3 : $*Int32 // id: %8 + // function_ref specialize.acceptsInt (Swift.Int32) -> () + %9 = function_ref @acceptsInt : $@convention(thin) (Int32) -> () // user: %16 + // function_ref specialize.XXX.foo (@inout specialize.XXX)(t : A) -> Swift.Int32 + %10 = function_ref @XXX_foo : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %15 + %11 = alloc_stack $Int32 // users: %14, %15, %17 + %12 = integer_literal $Builtin.Int32, 4 // user: %13 + %13 = struct $Int32 (%12 : $Builtin.Int32) // user: %14 + store %13 to [trivial] %11 : $*Int32 // id: %14 + %15 = apply %10(%11, %0) : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %16 + %16 = apply %9(%15) : $@convention(thin) (Int32) -> () + dealloc_stack %11 : $*Int32 // id: %17 + %18 = tuple () // user: %20 + dealloc_stack %0 : $*XXX // id: %19 + return %18 : $() // id: %20 +} + +// specialize.exp2 () -> () +sil [ossa] @exp2 : $@convention(thin) () -> () { +bb0: + %0 = alloc_stack $XXX, var, name "I8" // users: %7, %15, %19 + // function_ref specialize.XXX.init (specialize.XXX.Type)(t : A) -> specialize.XXX + %1 = function_ref @XXX_init : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> // user: %7 + %2 = metatype $@thin XXX.Type // user: %7 + %3 = alloc_stack $UInt8 // users: %6, %7, %8 + %4 = integer_literal $Builtin.Int8, 5 // user: %5 + %5 = struct $UInt8 (%4 : $Builtin.Int8) // user: %6 + store %5 to [trivial] %3 : $*UInt8 // id: %6 + %7 = apply %1(%0, %3, %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> + dealloc_stack %3 : $*UInt8 // id: %8 + // function_ref specialize.acceptsInt (Swift.Int32) -> () + %9 = function_ref @acceptsInt : $@convention(thin) (Int32) -> () // user: %16 + // function_ref specialize.XXX.foo (@inout specialize.XXX)(t : A) -> Swift.Int32 + %10 = function_ref @XXX_foo : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %15 + %11 = alloc_stack $UInt8 // users: %14, %15, %17 + %12 = integer_literal $Builtin.Int8, 4 // user: %13 + %13 = struct $UInt8 (%12 : $Builtin.Int8) // user: %14 + store %13 to [trivial] %11 : $*UInt8 // id: %14 + %15 = apply %10(%11, %0) : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %16 + %16 = apply %9(%15) : $@convention(thin) (Int32) -> () + dealloc_stack %11 : $*UInt8 // id: %17 + %18 = tuple () // user: %20 + dealloc_stack %0 : $*XXX // id: %19 + return %18 : $() // id: %20 +} + +sil [ossa] @exp2_nativeObject : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () { +bb0(%arg : @guaranteed $Builtin.NativeObject): + %0 = alloc_stack $XXX + // function_ref specialize.XXX.init (specialize.XXX.Type)(t : A) -> specialize.XXX + %1 = function_ref @XXX_init : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> // user: %7 + %2 = metatype $@thin XXX.Type // user: %7 + %3 = alloc_stack $Builtin.NativeObject // users: %6, %7, %8 + %arg1 = copy_value %arg : $Builtin.NativeObject + store %arg1 to [init] %3 : $*Builtin.NativeObject // id: %6 + %7 = apply %1(%0, %3, %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> + dealloc_stack %3 : $*Builtin.NativeObject // id: %8 + + %1g = function_ref @XXX_init_guaranteed : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> + %3g = alloc_stack $Builtin.NativeObject // users: %6, %7, %8 + %3g_out = alloc_stack $XXX // users: %6, %7, %8 + %arg1g = copy_value %arg : $Builtin.NativeObject + store %arg1g to [init] %3g : $*Builtin.NativeObject // id: %6 + %7g = apply %1(%3g_out, %3g, %2) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin XXX<τ_0_0>.Type) -> @out XXX<τ_0_0> + destroy_addr %3g_out : $*XXX + dealloc_stack %3g_out : $*XXX + dealloc_stack %3g : $*Builtin.NativeObject // id: %8 + + // function_ref specialize.acceptsInt (Swift.Int32) -> () + %9 = function_ref @acceptsInt : $@convention(thin) (Int32) -> () // user: %16 + // function_ref specialize.XXX.foo (@inout specialize.XXX)(t : A) -> Swift.Int32 + %10 = function_ref @XXX_foo : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %15 + %11 = alloc_stack $Builtin.NativeObject // users: %14, %15, %17 + %arg2 = copy_value %arg : $Builtin.NativeObject + store %arg2 to [init] %11 : $*Builtin.NativeObject + %15 = apply %10(%11, %0) : $@convention(method) <τ_0_0> (@in τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %16 + %16 = apply %9(%15) : $@convention(thin) (Int32) -> () + dealloc_stack %11 : $*Builtin.NativeObject // id: %17 + + %10g = function_ref @XXX_foo_guaranteed : $@convention(method) <τ_0_0> (@in_guaranteed τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %15 + %11g = alloc_stack $Builtin.NativeObject // users: %14, %15, %17 + %arg2g = copy_value %arg : $Builtin.NativeObject + store %arg2g to [init] %11g : $*Builtin.NativeObject + %15g = apply %10g(%11g, %0) : $@convention(method) <τ_0_0> (@in_guaranteed τ_0_0, @inout XXX<τ_0_0>) -> Int32 // user: %16 + apply %9(%15g) : $@convention(thin) (Int32) -> () + destroy_addr %11g : $*Builtin.NativeObject + dealloc_stack %11g : $*Builtin.NativeObject // id: %17 + destroy_addr %0 : $*XXX + dealloc_stack %0 : $*XXX // id: %19 + %18 = tuple () // user: %20 + return %18 : $() // id: %20 +} + +// specialize.useClosure (fun : () -> A) -> A +sil [ossa] @useClosure : $@convention(thin) (@owned @callee_owned () -> @out T) -> @out T { +bb0(%0 : $*T, %1 : @owned $@callee_owned () -> @out T): + debug_value %1 : $@callee_owned () -> @out T, let, name "fun" // id: %2 + %2 = copy_value %1 : $@callee_owned () -> @out T // id: %3 + %4 = apply %2(%0) : $@callee_owned () -> @out T + destroy_value %1 : $@callee_owned () -> @out T // id: %5 + %6 = tuple () // user: %7 + return %6 : $() // id: %7 +} + +// specialize.getGenericClosure (t : A) -> () -> A +sil [ossa] @getGenericClosure : $@convention(thin) (@in T) -> @owned @callee_owned () -> @out T { +bb0(%0 : $*T): + debug_value_addr %0 : $*T, let, name "t" // id: %1 + // function_ref specialize.(getGenericClosure (t : A) -> () -> A).(tmp #1) (())A + %2 = function_ref @getGenericClosure_closure : $@convention(thin) <τ_0_0> (@owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> @out τ_0_0 // user: %5 + %3 = alloc_box $<τ_0_0> { var τ_0_0 } + %3b = begin_borrow %3 : $<τ_0_0> { var τ_0_0 } + %3a = project_box %3b : $<τ_0_0> { var τ_0_0 } , 0 + copy_addr %0 to [initialization] %3a : $*T // id: %4 + end_borrow %3b : $<τ_0_0> { var τ_0_0 } + %5 = partial_apply %2(%3) : $@convention(thin) <τ_0_0> (@owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> @out τ_0_0 // user: %7 + destroy_addr %0 : $*T // id: %6 + return %5 : $@callee_owned () -> @out T // id: %7 +} + +// specialize.(getGenericClosure (t : A) -> () -> A).(tmp #1) (()) +sil shared [ossa] @getGenericClosure_closure : $@convention(thin) (@owned <τ_0_0> { var τ_0_0 } ) -> @out T { +bb0(%0 : $*T, %1 : @owned $<τ_0_0> { var τ_0_0 } ): + %1a = begin_borrow %1 : $<τ_0_0> { var τ_0_0 } + %2 = project_box %1a : $<τ_0_0> { var τ_0_0 } , 0 + copy_addr %2 to [initialization] %0 : $*T // id: %3 + end_borrow %1a : $<τ_0_0> { var τ_0_0 } + destroy_value %1 : $<τ_0_0> { var τ_0_0 } // id: %4 + %5 = tuple () // user: %6 + return %5 : $() // id: %6 +} + +// specialize.specializePartialApplies () -> Swift.UInt8 +sil [ossa] @specializePartialApplies : $@convention(thin) () -> UInt8 { +bb0: + %0 = alloc_stack $UInt8, var, name "i" // users: %3, %18 + %1 = integer_literal $Builtin.Int8, 5 // user: %2 + %2 = struct $UInt8 (%1 : $Builtin.Int8) // users: %3, %7 + store %2 to [trivial] %0 : $*UInt8 // id: %3 + // function_ref specialize.useClosure (fun : () -> A) -> A + %4 = function_ref @useClosure : $@convention(thin) <τ_0_0> (@owned @callee_owned () -> @out τ_0_0) -> @out τ_0_0 // user: %14 + // function_ref specialize.getGenericClosure (t : A) -> () -> A + %5 = function_ref @getGenericClosure : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @owned @callee_owned () -> @out τ_0_0 // user: %8 + %6 = alloc_stack $UInt8 // users: %7, %8, %17 + store %2 to [trivial] %6 : $*UInt8 // id: %7 + %8 = apply %5(%6) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> @owned @callee_owned () -> @out τ_0_0 // user: %10 + // function_ref reabstraction thunk helper from @callee_owned () -> (@out Swift.UInt8) to @callee_owned () -> (@unowned Swift.UInt8) + %9 = function_ref @$ss5UInt8VIxr_ABIxd_TR : $@convention(thin) (@owned @callee_owned () -> @out UInt8) -> UInt8 // user: %10 + %10 = partial_apply %9(%8) : $@convention(thin) (@owned @callee_owned () -> @out UInt8) -> UInt8 // user: %12 + // function_ref reabstraction thunk helper from @callee_owned () -> (@unowned Swift.UInt8) to @callee_owned () -> (@out Swift.UInt8) + %11 = function_ref @$ss5UInt8VIxd_ABIxr_TR : $@convention(thin) (@owned @callee_owned () -> UInt8) -> @out UInt8 // user: %12 + %12 = partial_apply %11(%10) : $@convention(thin) (@owned @callee_owned () -> UInt8) -> @out UInt8 // user: %14 + %13 = alloc_stack $UInt8 // users: %14, %15, %16 + %14 = apply %4(%13, %12) : $@convention(thin) <τ_0_0> (@owned @callee_owned () -> @out τ_0_0) -> @out τ_0_0 + %15 = load [trivial] %13 : $*UInt8 // user: %19 + dealloc_stack %13 : $*UInt8 // id: %16 + dealloc_stack %6 : $*UInt8 // id: %17 + dealloc_stack %0 : $*UInt8 // id: %18 + return %15 : $UInt8 // id: %19 +} + +// reabstraction thunk helper from @callee_owned () -> (@out Swift.UInt8) to @callee_owned () -> (@unowned Swift.UInt8) +sil shared [ossa] [transparent] @$ss5UInt8VIxr_ABIxd_TR : $@convention(thin) (@owned @callee_owned () -> @out UInt8) -> UInt8 { +bb0(%0 : @owned $@callee_owned () -> @out UInt8): + %1 = alloc_stack $UInt8 // users: %2, %3, %4 + %2 = apply %0(%1) : $@callee_owned () -> @out UInt8 + %3 = load [trivial] %1 : $*UInt8 // user: %5 + dealloc_stack %1 : $*UInt8 // id: %4 + return %3 : $UInt8 // id: %5 +} + +// reabstraction thunk helper from @callee_owned () -> (@unowned Swift.UInt8) to @callee_owned () -> (@out Swift.UInt8) +sil shared [ossa] [transparent] @$ss5UInt8VIxd_ABIxr_TR : $@convention(thin) (@owned @callee_owned () -> UInt8) -> @out UInt8 { +bb0(%0 : $*UInt8, %1 : @owned $@callee_owned () -> UInt8): + %2 = apply %1() : $@callee_owned () -> UInt8 // user: %3 + store %2 to [trivial] %0 : $*UInt8 // id: %3 + %4 = tuple () // user: %5 + return %4 : $() // id: %5 +} + + +class Base { +} +sil_vtable Base { +} + +sil [ossa] @generic_upcast : $@convention(thin) (@owned T) -> @owned Base { +bb0(%0 : @owned $T): + %2 = upcast %0 : $T to $Base + return %2 : $Base +} + +sil [ossa] @specialize_generic_upcast : $@convention(thin)(@owned Base) -> @owned Base { +bb0(%0 : @owned $Base): + %1 = function_ref @generic_upcast : $@convention(thin) (@owned T) -> @owned Base + %2 = apply %1(%0) : $@convention(thin) (@owned T) -> @owned Base + return %2 : $Base +} + +// CHECK-LABEL: sil shared [ossa] @{{.*}}generic_upcast{{.*}}Tg5 : $@convention(thin) (@owned Base) -> @owned Base { +// CHECK: bb0(%0 : @owned $Base): +// CHECK: return %0 : $Base + +// Check generic specialization of partial_apply + +protocol P { func get() -> Int32 } + +struct C : P { func get() -> Int32 } + +// test4.C.get (test4.C)() -> Swift.Int32 +sil hidden [ossa] @C_get : $@convention(method) (C) -> Int32 { +bb0(%0 : $C): + debug_value %0 : $C, let, name "self" // id: %1 + %2 = integer_literal $Builtin.Int32, 1 // user: %3 + %3 = struct $Int32 (%2 : $Builtin.Int32) // user: %4 + return %3 : $Int32 // id: %4 +} + +// test4.C.init (test4.C.Type)() -> test4.C +sil hidden [ossa][noinline] @C_init : $@convention(thin) (@thin C.Type) -> C { +bb0(%0 : $@thin C.Type): + %1 = alloc_stack $C, var, name "sf" // user: %3 + %2 = struct $C () // user: %4 + dealloc_stack %1 : $*C // id: %3 + return %2 : $C // id: %4 +} + +// protocol witness for test4.P.get (test4.P.Self)() -> Swift.Int32 in conformance test4.C : test4.P in test4 +sil hidden [ossa] [transparent] [thunk] @test4_P_get_witness_C : $@convention(witness_method: P) (@in_guaranteed C) -> Int32 { +bb0(%0 : $*C): + %1 = load [trivial] %0 : $*C // user: %3 + // function_ref test4.C.get (test4.C)() -> Swift.Int32 + %2 = function_ref @C_get : $@convention(method) (C) -> Int32 // user: %3 + %3 = apply %2(%1) : $@convention(method) (C) -> Int32 // user: %4 + return %3 : $Int32 // id: %4 +} + +// test4.boo (A) -> (Swift.Int32, B) -> Swift.Int32 +sil hidden [ossa] [noinline] @boo : $@convention(thin) (@in U) -> @owned @callee_owned (Int32, @in T) -> Int32 { +bb0(%0 : $*U): + debug_value_addr %0 : $*U, let, name "y" // id: %1 + // function_ref test4.(boo (A) -> (Swift.Int32, B) -> Swift.Int32).(closure #1) + %2 = function_ref @boo_closure : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P> (Int32, @in τ_0_1, @owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> Int32 // user: %5 + %3 = alloc_box $<τ_0_0> { var τ_0_0 } // users: %4, %5, %5 + %3a = project_box %3 : $<τ_0_0> { var τ_0_0 } , 0 + copy_addr %0 to [initialization] %3a : $*U // id: %4 + %5 = partial_apply %2(%3) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P> (Int32, @in τ_0_1, @owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> Int32 // user: %7 + destroy_addr %0 : $*U // id: %6 + return %5 : $@callee_owned (Int32, @in T) -> Int32 // id: %7 +} + +// test4.(boo (A) -> (Swift.Int32, B) -> Swift.Int32).(closure #1) +sil shared [ossa] [noinline] @boo_closure : $@convention(thin) (Int32, @in T, @owned <τ_0_0> { var τ_0_0 } ) -> Int32 { +bb0(%0 : $Int32, %1 : $*T, %2 : @owned $<τ_0_0> { var τ_0_0 } ): + %3 = project_box %2 : $<τ_0_0> { var τ_0_0 } , 0 + debug_value %0 : $Int32, let, name "x" // id: %4 + debug_value_addr %1 : $*T, let, name "z" // id: %5 + %6 = witness_method $U, #P.get : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Int32 // user: %7 + %7 = apply %6(%3) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Int32 // user: %8 + %8 = struct_extract %7 : $Int32, #Int32._value // user: %11 + %9 = struct_extract %0 : $Int32, #Int32._value // user: %11 + %10 = integer_literal $Builtin.Int1, -1 // user: %11 + %11 = builtin "sadd_with_overflow_Int32"(%8 : $Builtin.Int32, %9 : $Builtin.Int32, %10 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) // users: %12, %13 + %12 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 0 // user: %15 + %13 = tuple_extract %11 : $(Builtin.Int32, Builtin.Int1), 1 // user: %14 + cond_fail %13 : $Builtin.Int1 // id: %14 + %15 = struct $Int32 (%12 : $Builtin.Int32) // user: %18 + destroy_value %2 : $<τ_0_0> { var τ_0_0 } // id: %16 + destroy_addr %1 : $*T // id: %17 + return %15 : $Int32 // id: %18 +} + +// static Swift.+ infix (Swift.Int32, Swift.Int32) -> Swift.Int32 +sil public_external [ossa] [transparent] [serialized] @$ss1poiys5Int32VAC_ACtFZ : $@convention(thin) (Int32, Int32) -> Int32 { +bb0(%0 : $Int32, %1 : $Int32): + %2 = struct_extract %0 : $Int32, #Int32._value // user: %5 + %3 = struct_extract %1 : $Int32, #Int32._value // user: %5 + %4 = integer_literal $Builtin.Int1, -1 // user: %5 + %5 = builtin "sadd_with_overflow_Int32"(%2 : $Builtin.Int32, %3 : $Builtin.Int32, %4 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) // users: %6, %7 + %6 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 0 // user: %9 + %7 = tuple_extract %5 : $(Builtin.Int32, Builtin.Int1), 1 // user: %8 + cond_fail %7 : $Builtin.Int1 // id: %8 + %9 = struct $Int32 (%6 : $Builtin.Int32) // user: %10 + return %9 : $Int32 // id: %10 +} + +// test4.foo (A, B) -> (Swift.Int32, Swift.Float) -> Swift.Int32 +sil hidden [ossa] [noinline] @foo : $@convention(thin) (@in T, @in U) -> @owned @callee_owned (Int32, Float) -> Int32 { +bb0(%0 : $*T, %1 : $*U): + debug_value_addr %0 : $*T, let, name "x" // id: %2 + debug_value_addr %1 : $*U, let, name "y" // id: %3 + // function_ref test4.boo (A) -> (Swift.Int32, B) -> Swift.Int32 + %4 = function_ref @boo : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P> (@in τ_0_0) -> @owned @callee_owned (Int32, @in τ_0_1) -> Int32 // user: %7 + %5 = alloc_stack $U // users: %6, %7, %10 + copy_addr %1 to [initialization] %5 : $*U // id: %6 + %7 = apply %4(%5) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P> (@in τ_0_0) -> @owned @callee_owned (Int32, @in τ_0_1) -> Int32 // user: %9 + // function_ref reabstraction thunk helper from @callee_owned (@unowned Swift.Int32, @in Swift.Float) -> (@unowned Swift.Int32) to @callee_owned (@unowned Swift.Int32, @unowned Swift.Float) -> (@unowned Swift.Int32) + %8 = function_ref @_TTRG1_RPq_P5test41P_Pq0_PS0___XFo_dVs5Int32iSf_dS1__XFo_dS1_dSf_dS1__ : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P, τ_0_1 : P> (Int32, Float, @owned @callee_owned (Int32, @in Float) -> Int32) -> Int32 // user: %9 + %9 = partial_apply %8(%7) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P, τ_0_1 : P> (Int32, Float, @owned @callee_owned (Int32, @in Float) -> Int32) -> Int32 // user: %13 + dealloc_stack %5 : $*U // id: %10 + destroy_addr %1 : $*U // id: %11 + destroy_addr %0 : $*T // id: %12 + return %9 : $@callee_owned (Int32, Float) -> Int32 // id: %13 +} + +// reabstraction thunk helper from @callee_owned (@unowned Swift.Int32, @in Swift.Float) -> (@unowned Swift.Int32) to @callee_owned (@unowned Swift.Int32, @unowned Swift.Float) -> (@unowned Swift.Int32) +sil shared [ossa] [transparent] [thunk] @_TTRG1_RPq_P5test41P_Pq0_PS0___XFo_dVs5Int32iSf_dS1__XFo_dS1_dSf_dS1__ : $@convention(thin) (Int32, Float, @owned @callee_owned (Int32, @in Float) -> Int32) -> Int32 { +bb0(%0 : $Int32, %1 : $Float, %2 : @owned $@callee_owned (Int32, @in Float) -> Int32): + %3 = alloc_stack $Float // users: %4, %5, %6 + store %1 to [trivial] %3 : $*Float // id: %4 + %5 = apply %2(%0, %3) : $@callee_owned (Int32, @in Float) -> Int32 // user: %7 + dealloc_stack %3 : $*Float // id: %6 + return %5 : $Int32 // id: %7 +} + +// test4.gen1 (A) -> (Swift.Int32) -> Swift.Int32 +sil hidden [ossa] [noinline] @gen1 : $@convention(thin) (@in T) -> @owned @callee_owned (Int32) -> Int32 { +bb0(%0 : $*T): + debug_value_addr %0 : $*T, let, name "x" // id: %1 + // function_ref test4.(gen1 (A) -> (Swift.Int32) -> Swift.Int32).(closure #1) + %2 = function_ref @gen1_closure : $@convention(thin) <τ_0_0 where τ_0_0 : P> (Int32, @owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> Int32 // user: %5 + %3 = alloc_box $<τ_0_0> { var τ_0_0 } // users: %4, %5, %5 + %3a = project_box %3 : $<τ_0_0> { var τ_0_0 } , 0 + copy_addr %0 to [initialization] %3a : $*T // id: %4 + %5 = partial_apply %2(%3) : $@convention(thin) <τ_0_0 where τ_0_0 : P> (Int32, @owned <τ_0_0> { var τ_0_0 } <τ_0_0>) -> Int32 // user: %7 + destroy_addr %0 : $*T // id: %6 + return %5 : $@callee_owned (Int32) -> Int32 // id: %7 +} + +// test4.(gen1 (A) -> (Swift.Int32) -> Swift.Int32).(closure #1) +sil shared [ossa] [noinline] @gen1_closure : $@convention(thin) (Int32, @owned <τ_0_0> { var τ_0_0 } ) -> Int32 { +bb0(%0 : $Int32, %1 : @owned $<τ_0_0> { var τ_0_0 } ): + %2 = project_box %1 : $<τ_0_0> { var τ_0_0 } , 0 + debug_value %0 : $Int32 , let, name "$0" // id: %3 + %4 = witness_method $T, #P.get : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Int32 // user: %5 + %5 = apply %4(%2) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> Int32 // user: %6 + %6 = struct_extract %5 : $Int32, #Int32._value // user: %9 + %7 = struct_extract %0 : $Int32, #Int32._value // user: %9 + %8 = integer_literal $Builtin.Int1, -1 // user: %9 + %9 = builtin "sadd_with_overflow_Int32"(%6 : $Builtin.Int32, %7 : $Builtin.Int32, %8 : $Builtin.Int1) : $(Builtin.Int32, Builtin.Int1) // users: %10, %11 + %10 = tuple_extract %9 : $(Builtin.Int32, Builtin.Int1), 0 // user: %13 + %11 = tuple_extract %9 : $(Builtin.Int32, Builtin.Int1), 1 // user: %12 + cond_fail %11 : $Builtin.Int1 // id: %12 + %13 = struct $Int32 (%10 : $Builtin.Int32) // user: %15 + destroy_value %1 : $<τ_0_0> { var τ_0_0 } // id: %14 + return %13 : $Int32 // id: %15 +} + +// check that there is a generic specialization of boo +// CHECK-LABEL: sil shared [noinline] [ossa] @$s3booxs5Int32VSfACIexyid_4main1PRzSfRs_r0_lIetio_Tp5AD1CV_Tg5 : $@convention(thin) (C) -> @owned @callee_owned (Int32, @in Float) -> Int32 +// CHECK: [[CLOSURE_SPECIALIZATION:%[0-9]+]] = function_ref @$s11boo_closures5Int32VSfxz_x_lXXAC4main1PRzSfRs_r0_lIetyyxd_TP5AD1CV_TG5 +// CHECK: partial_apply [[CLOSURE_SPECIALIZATION:%[0-9]+]] +// CHECK: return + +// Check that there is a generic specialization of a closure from boo +// CHECK-LABEL: sil shared [noinline] [ossa] @$s11boo_closures5Int32VSfxz_x_lXXAC4main1PRzSfRs_r0_lIetyyxd_Tp5AD1CV_Tg5 +// CHECK: return + +// Check that there is a generic specialization of foo +// CHECK-LABEL: sil shared [noinline] [ossa] @$s3foo4main1CV_ADTg5 : $@convention(thin) (C, C) -> @owned @callee_owned (Int32, Float) -> Int32 +// CHECK: function_ref @$s3booxs5Int32VSfACIexyid_4main1PRzSfRs_r0_lIetio_Tp5AD1CV_Tg5 +// check that it invokes a generic specialization of the reabstraction thunk helper which invokes a specialization boo +// CHECK: [[THUNK_SPECIALIZATION:%[0-9]+]] = function_ref @$s053_TTRG1_RPq_P5test41P_Pq0_PS0___XFo_dVs5Int32iSf_dS1__f2_dj2_di2_dJ2__4main1CV_ADTg5 +// CHECK-NOT: apply +// CHECK: partial_apply [[THUNK_SPECIALIZATION]] +// CHECK-NOT: apply +// CHECK: return + + +// Check that there is a generic specialization of gen1 +// CHECK-LABEL: sil shared [noinline] [ossa] @$s4gen14main1CV_Tg5 : $@convention(thin) (C) -> @owned @callee_owned (Int32) -> Int32 +// check that it invokes a generic specialization of the closure by mean of partial_apply +// CHECK: [[CLOSURE_SPECIALIZATION:%[0-9]+]] = function_ref @$s12gen1_closure4main1CV_Tg5 +// CHECK-NOT: apply +// CHECK: partial_apply [[CLOSURE_SPECIALIZATION]] +// CHECK-NOT: apply +// CHECK: return + +// Check that there is a generic specialization of a closure from gen1 +// CHECK-LABEL: sil shared [noinline] [ossa] @$s12gen1_closure4main1CV_Tg5 : $@convention(thin) (Int32, @owned <τ_0_0> { var τ_0_0 } ) -> Int32 +// CHECK: return + + + +// test4.bar () -> Swift.Int32 +// CHECK-LABEL: sil hidden [ossa] @bar +// check that it does not invoke a generic specialization of foo +// CHECK-NOT: function_ref @foo +// check that it invokes a generic specialization of foo +// CHECK: function_ref @$s3foo4main1CV_ADTg5 +sil hidden [ossa] @bar : $@convention(thin) () -> Int32 { +bb0: + %0 = alloc_stack $@callee_owned (Int32, Float) -> Int32, var, name "f" // users: %11, %22 + // function_ref test4.C.init (test4.C.Type)() -> test4.C + %1 = function_ref @C_init : $@convention(thin) (@thin C.Type) -> C // user: %3 + %2 = metatype $@thin C.Type // user: %3 + %3 = apply %1(%2) : $@convention(thin) (@thin C.Type) -> C // users: %4, %7, %9 + debug_value %3 : $C, let, name "c" // id: %4 + // function_ref test4.foo (A, B) -> (Swift.Int32, Swift.Float) -> Swift.Int32 + %5 = function_ref @foo : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P, τ_0_1 : P> (@in τ_0_0, @in τ_0_1) -> @owned @callee_owned (Int32, Float) -> Int32 // user: %10 + %6 = alloc_stack $C // users: %7, %10, %13 + store %3 to [trivial] %6 : $*C // id: %7 + %8 = alloc_stack $C // users: %9, %10, %12 + store %3 to [trivial] %8 : $*C // id: %9 + %10 = apply %5(%6, %8) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : P, τ_0_1 : P> (@in τ_0_0, @in τ_0_1) -> @owned @callee_owned (Int32, Float) -> Int32 // users: %11, %14, %20, %21 + store %10 to [init] %0 : $*@callee_owned (Int32, Float) -> Int32 // id: %11 + dealloc_stack %8 : $*C // id: %12 + dealloc_stack %6 : $*C // id: %13 + %11 = load [take] %0 : $*@callee_owned (Int32, Float) -> Int32 // id: %14 + %15 = integer_literal $Builtin.Int32, 3 // user: %16 + %16 = struct $Int32 (%15 : $Builtin.Int32) // user: %20 + %17 = float_literal $Builtin.FPIEEE80, 0x4000C8F5C28F5C28F5C3 // 3.1400000000000000001 // user: %18 + %18 = builtin "fptrunc_FPIEEE80_FPIEEE32"(%17 : $Builtin.FPIEEE80) : $Builtin.FPIEEE32 // user: %19 + %19 = struct $Float (%18 : $Builtin.FPIEEE32) // user: %20 + %20 = apply %11(%16, %19) : $@callee_owned (Int32, Float) -> Int32 // user: %23 + dealloc_stack %0 : $*@callee_owned (Int32, Float) -> Int32 // id: %22 + return %20 : $Int32 // id: %23 +} + +// test4.testBar () -> Swift.Int32 +sil [ossa] @testBar : $@convention(thin) () -> Int32 { +bb0: + // function_ref test4.bar () -> Swift.Int32 + %0 = function_ref @bar : $@convention(thin) () -> Int32 // user: %1 + %1 = apply %0() : $@convention(thin) () -> Int32 // user: %2 + return %1 : $Int32 // id: %2 +} + +// CHECK-LABEL: sil [ossa] @testGen1 +// Call of C_init +// CHECK: function_ref @C_init +// CHECK: apply +// Reference to the generic specialization of gen1 +// CHECK-NOT: function_ref @gen1 +// CHECK: function_ref @$s4gen14main1CV_Tg5 : $@convention(thin) (C) -> @owned @callee_owned (Int32) -> Int32 +sil [ossa] @testGen1 : $@convention(thin) () -> Int32 { +bb0: + %0 = alloc_stack $@callee_owned (Int32) -> Int32, var, name "f" // users: %9, %16 + // function_ref test4.C.init (test4.C.Type)() -> test4.C + %1 = function_ref @C_init : $@convention(thin) (@thin C.Type) -> C // user: %3 + %2 = metatype $@thin C.Type // user: %3 + %3 = apply %1(%2) : $@convention(thin) (@thin C.Type) -> C // users: %4, %7 + debug_value %3 : $C, let, name "c" // id: %4 + // function_ref test4.gen1 (A) -> (Swift.Int32) -> Swift.Int32 + %5 = function_ref @gen1 : $@convention(thin) <τ_0_0 where τ_0_0 : P> (@in τ_0_0) -> @owned @callee_owned (Int32) -> Int32 // user: %8 + %6 = alloc_stack $C // users: %7, %8, %10 + store %3 to [trivial] %6 : $*C // id: %7 + %8 = apply %5(%6) : $@convention(thin) <τ_0_0 where τ_0_0 : P> (@in τ_0_0) -> @owned @callee_owned (Int32) -> Int32 // users: %9, %11, %14, %15 + store %8 to [init] %0 : $*@callee_owned (Int32) -> Int32 // id: %9 + dealloc_stack %6 : $*C // id: %10 + %8a = load [take] %0 : $*@callee_owned (Int32) -> Int32 // id: %11 + %12 = integer_literal $Builtin.Int32, 3 // user: %13 + %13 = struct $Int32 (%12 : $Builtin.Int32) // user: %14 + %14 = apply %8a(%13) : $@callee_owned (Int32) -> Int32 // user: %17 + dealloc_stack %0 : $*@callee_owned (Int32) -> Int32 // id: %16 + return %14 : $Int32 // id: %17 +} + +// test_bind (Builtin.RawPointer, A.Type) -> () +// Check that this is specialized as T=Int. +// CHECK-LABEL: sil shared [ossa] @$s9test_bindSi_Tg5 : $@convention(thin) (Builtin.RawPointer, @thick Int.Type) -> () +// CHECK: bind_memory %0 : $Builtin.RawPointer, {{%.*}} : $Builtin.Word to $*Int +// CHECK: return +sil hidden [ossa] @test_bind : $@convention(thin) (Builtin.RawPointer, @thick T.Type) -> () { +bb0(%0 : $Builtin.RawPointer, %1 : $@thick T.Type): + %4 = integer_literal $Builtin.Word, 1 + %5 = metatype $@thick T.Type + bind_memory %0 : $Builtin.RawPointer, %4 : $Builtin.Word to $*T + %7 = tuple () + %8 = tuple () + return %8 : $() +} + +// Invoke test_bind with T=Int. +sil [ossa] @call_bind : $@convention(thin) (Builtin.RawPointer) -> () { +bb0(%0 : $Builtin.RawPointer): + // function_ref test_bind (Builtin.RawPointer, A.Type) -> () + %2 = function_ref @test_bind : $@convention(thin) <τ_0_0> (Builtin.RawPointer, @thick τ_0_0.Type) -> () + %3 = metatype $@thick Int.Type + %4 = apply %2(%0, %3) : $@convention(thin) <τ_0_0> (Builtin.RawPointer, @thick τ_0_0.Type) -> () + %5 = tuple () + return %5 : $() +} + +// invokeGenericClosure(todo:) +sil [ossa] [noinline] @invokeGenericClosure : $@convention(thin) (@owned @callee_owned () -> (@out R, @error Error)) -> (@out R, @error Error) { +bb0(%0 : $*R, %1 : @owned $@callee_owned () -> (@out R, @error Error)): + debug_value %1 : $@callee_owned () -> (@out R, @error Error), let, name "todo", argno 1 // id: %2 + debug_value undef : $Error, var, name "$error", argno 2 // id: %3 + %1a = copy_value %1 : $@callee_owned () -> (@out R, @error Error) // id: %4 + try_apply %1a(%0) : $@callee_owned () -> (@out R, @error Error), normal bb1, error bb2 // id: %5 + +bb1(%6 : $()): // Preds: bb0 + destroy_value %1 : $@callee_owned () -> (@out R, @error Error) // id: %7 + %8 = tuple () // user: %9 + return %8 : $() // id: %9 + +// %10 // user: %12 +bb2(%10 : @owned $Error): // Preds: bb0 + destroy_value %1 : $@callee_owned () -> (@out R, @error Error) // id: %11 + throw %10 : $Error // id: %12 +} // end sil function 'invokeGenericClosure' + +sil public_external @error : $@convention(thin) () -> Never + +// action() +sil @action : $@convention(thin) () -> Never + +// thunk for @callee_owned () -> (@unowned Never, @error @owned Error) +sil @action_thunk : $@convention(thin) (@owned @callee_owned () -> (Never, @error Error)) -> (@out Never, @error Error) + +// Check that in a case where a generic specialization is a non-return function, +// the return value is not stored after the call and an unreachable instruction +// is inserted as a terminator of a basic block. +// +// CHECK-LABEL: sil [ossa] @testGenericClosureSpecialization +// Call of the generic specialization of invokeGenericClosure +// CHECK: function_ref @$s20invokeGenericClosures5NeverO_Tg5 : $@convention(thin) (@owned @callee_owned () -> (@out Never, @error Error)) -> (Never, @error Error) +// CHECK: apply [nothrow] +// CHECK: unreachable +// CHECK: end sil function 'testGenericClosureSpecialization' +sil [ossa] @testGenericClosureSpecialization : $@convention(thin) () -> @error Error { +bb0: + // function_ref invokeGenericClosure(todo:) + %1 = function_ref @invokeGenericClosure : $@convention(thin) <τ_0_0> (@owned @callee_owned () -> (@out τ_0_0, @error Error)) -> (@out τ_0_0, @error Error) + %2 = alloc_stack $Never + // function_ref action() + %3 = function_ref @action : $@convention(thin) () -> Never + %4 = thin_to_thick_function %3 : $@convention(thin) () -> Never to $@callee_owned () -> Never + %5 = convert_function %4 : $@callee_owned () -> Never to $@callee_owned () -> (Never, @error Error) + // function_ref thunk for @callee_owned () -> (@unowned Never, @error @owned Error) + %6 = function_ref @action_thunk : $@convention(thin) (@owned @callee_owned () -> (Never, @error Error)) -> (@out Never, @error Error) + %7 = partial_apply %6(%5) : $@convention(thin) (@owned @callee_owned () -> (Never, @error Error)) -> (@out Never, @error Error) + %8 = apply [nothrow] %1(%2, %7) : $@convention(thin) <τ_0_0> (@owned @callee_owned () -> (@out τ_0_0, @error Error)) -> (@out τ_0_0, @error Error) + unreachable +} // end sil function 'testGenericClosureSpecialization' + +// Test a specialization of a self-recursive generic closure. + +// CHECK-LABEL: sil shared [ossa] @$s27selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lIetnyy_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, Builtin.Int64, Builtin.Int64) -> () { +// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @$s27selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lIetnyy_Tp5 +// CHECK: partial_apply [[SPECIALIZED_FN]]{{.*}}({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, Builtin.Int64, Builtin.Int64) -> () + +// CHECK-LABEL: sil [ossa] @selfReferringGenericClosure : $@convention(thin) (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> () +// Refer to the specialized version of the function +// CHECK: [[SPECIALIZED_FN:%[0-9]+]] = function_ref @$s27selfReferringGenericClosurexBi64_Bi64_Bi64_Rs_r0_lIetnyy_Tp5 +// CHECK: partial_apply [[SPECIALIZED_FN]]({{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.Int64> (@in_guaranteed τ_0_0, Builtin.Int64, Builtin.Int64) -> () +sil [ossa] @selfReferringGenericClosure : $@convention(thin) (@in_guaranteed R, @in_guaranteed S, Builtin.Int64) -> () { +bb0(%0 : $*R, %1 : $*S, %2 : $Builtin.Int64): + %4 = integer_literal $Builtin.Int64, 100 + %5 = builtin "cmp_eq_Int64"(%2 : $Builtin.Int64, %4 : $Builtin.Int64) : $Builtin.Int1 + cond_br %5, bb2, bb1 + +bb1: + %val_storage = alloc_stack $Builtin.Int64 + %val = integer_literal $Builtin.Int64, 4 + store %val to [trivial] %val_storage : $*Builtin.Int64 + %fn = function_ref @selfReferringGenericClosure : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> () + %7 = partial_apply %fn(%0, %val_storage, %4) : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, Builtin.Int64) -> () + dealloc_stack %val_storage : $*Builtin.Int64 + destroy_value %7 : $@callee_owned () -> () + br bb3 + +bb2: + br bb3 + +bb3: + %8 = tuple () + return %8 : $() +} + +sil [ossa] @selfReferringGenericClosure_nontrivial_guaranteed : $@convention(thin) (@in_guaranteed R, @in_guaranteed S, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $*R, %1 : $*S, %2 : @guaranteed $Builtin.NativeObject): + cond_br undef, bb2, bb1 + +bb1: + %val_storage = alloc_stack $Builtin.NativeObject + %val = copy_value %2 : $Builtin.NativeObject + %val2 = copy_value %2 : $Builtin.NativeObject + store %val to [init] %val_storage : $*Builtin.NativeObject + %fn = function_ref @selfReferringGenericClosure_nontrivial_guaranteed : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, @guaranteed Builtin.NativeObject) -> () + %7 = partial_apply %fn(%0, %val_storage, %val2) : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, @guaranteed Builtin.NativeObject) -> () + dealloc_stack %val_storage : $*Builtin.NativeObject + destroy_value %7 : $@callee_owned () -> () + br bb3 + +bb2: + br bb3 + +bb3: + %8 = tuple () + return %8 : $() +} + +sil [ossa] @selfReferringGenericClosure_nontrivial_guaranteed_applied : $@convention(thin) (@in_guaranteed R, @in_guaranteed S, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $*R, %1 : $*S, %2 : @guaranteed $Builtin.NativeObject): + cond_br undef, bb2, bb1 + +bb1: + %val_storage = alloc_stack $Builtin.NativeObject + %val = copy_value %2 : $Builtin.NativeObject + %val2 = copy_value %2 : $Builtin.NativeObject + store %val to [init] %val_storage : $*Builtin.NativeObject + %fn = function_ref @selfReferringGenericClosure_nontrivial_guaranteed_applied : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, @guaranteed Builtin.NativeObject) -> () + %7 = partial_apply %fn(%0, %val_storage, %val2) : $@convention(thin) (@in_guaranteed U, @in_guaranteed V, @guaranteed Builtin.NativeObject) -> () + apply %7() :$@callee_owned () -> () + dealloc_stack %val_storage : $*Builtin.NativeObject + br bb3 + +bb2: + br bb3 + +bb3: + %8 = tuple () + return %8 : $() +} + +//---- + +// CHECK-LABEL: sil [ossa] @selfReferringGenericClosure_nontrivial_owned : $@convention(thin) (@in R, @in S, @owned Builtin.NativeObject) -> () { +// CHECK: [[FUNC:%.*]] = function_ref @$s44selfReferringGenericClosure_nontrivial_ownedxBoBoBoRs_r0_lIetixx_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_1 == Builtin.NativeObject> (@in τ_0_0, @owned Builtin.NativeObject, @owned Builtin.NativeObject) -> () +// CHECK: [[PAI:%.*]] = partial_apply [[FUNC]]< +// CHECK: destroy_value [[PAI]] +// CHECK: } // end sil function 'selfReferringGenericClosure_nontrivial_owned' +sil [ossa] @selfReferringGenericClosure_nontrivial_owned : $@convention(thin) (@in R, @in S, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*R, %1 : $*S, %2 : @owned $Builtin.NativeObject): + cond_br undef, bb2, bb1 + +bb1: + %val_storage = alloc_stack $Builtin.NativeObject + %val = copy_value %2 : $Builtin.NativeObject + store %val to [init] %val_storage : $*Builtin.NativeObject + %fn = function_ref @selfReferringGenericClosure_nontrivial_owned : $@convention(thin) (@in U, @in V, @owned Builtin.NativeObject) -> () + %7 = partial_apply %fn(%0, %val_storage, %2) : $@convention(thin) (@in U, @in V, @owned Builtin.NativeObject) -> () + dealloc_stack %val_storage : $*Builtin.NativeObject + destroy_value %7 : $@callee_owned () -> () + br bb3 + +bb2: + destroy_value %2 : $Builtin.NativeObject + destroy_addr %0 : $*R + br bb3 + +bb3: + destroy_addr %1 : $*S + %8 = tuple () + return %8 : $() +} + +sil [ossa] @selfReferringGenericClosure_nontrivial_owned_applied : $@convention(thin) (@in R, @in S, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*R, %1 : $*S, %2 : @owned $Builtin.NativeObject): + cond_br undef, bb2, bb1 + +bb1: + %val_storage = alloc_stack $Builtin.NativeObject + %val = copy_value %2 : $Builtin.NativeObject + store %val to [init] %val_storage : $*Builtin.NativeObject + %fn = function_ref @selfReferringGenericClosure_nontrivial_owned_applied : $@convention(thin) (@in U, @in V, @owned Builtin.NativeObject) -> () + %7 = partial_apply %fn(%0, %val_storage, %2) : $@convention(thin) (@in U, @in V, @owned Builtin.NativeObject) -> () + apply %7() :$@callee_owned () -> () + dealloc_stack %val_storage : $*Builtin.NativeObject + br bb3 + +bb2: + destroy_value %2 : $Builtin.NativeObject + destroy_addr %0 : $*R + br bb3 + +bb3: + destroy_addr %1 : $*S + %8 = tuple () + return %8 : $() +} + +struct YYY { +} + +enum MyOptional { + case none + case some(T) +} + +// Check that a specialization of a self-recursive function is produced +// and it is not crashing the compiler. +// CHECK-LABEL: sil shared [ossa] @$s25testSelfRecursiveFunction4main10MyOptionalOyAB3YYYVyypGG_Tg5 : $@convention(thin) (MyOptional>) -> () +sil [ossa] @testSelfRecursiveFunction : $@convention(thin) (@in T) -> () { +bb0(%0 : $*T): + %2 = function_ref @testSelfRecursiveFunction : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + %3 = alloc_stack $MyOptional> + inject_enum_addr %3 : $*MyOptional>, #MyOptional.none!enumelt + %5 = tuple () + %6 = load [trivial] %3 : $*MyOptional> + %7 = alloc_stack $MyOptional> + store %6 to [trivial] %7 : $*MyOptional> + %9 = apply %2>>(%7) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + dealloc_stack %7 : $*MyOptional> + dealloc_stack %3 : $*MyOptional> + destroy_addr %0 : $*T + %13 = tuple () + return %13 : $() +} // end sil function 'testSelfRecursiveFunction' + +sil [ossa] @id : $@convention(thin) (@in T) -> @out T { +bb0(%0 : $*T, %1 :$*T): + copy_addr [take] %1 to [initialization] %0 : $*T + %t = tuple () + return %t : $() +} + +// This should not assert. +// CHECK-LABEL: sil shared [ossa] @$s26specialize_no_return_applys5NeverO_Tg5 +// CHECK: apply +// CHECK-NEXT: unreachable + +sil [ossa] @specialize_no_return_apply: $@convention(thin) (@thick T.Type) -> () { +bb0(%0 : $@thick T.Type): + %in = alloc_stack $T + copy_addr [take] undef to [initialization] %in : $*T + %out = alloc_stack $T + %f = function_ref @id : $@convention(thin) (@in T) -> @out T + %r = apply %f(%out, %in) : $@convention(thin) (@in T) -> @out T + destroy_addr %out : $*T + dealloc_stack %out : $*T + dealloc_stack %in : $*T + %t = tuple () + return %t : $() +} + +sil [ossa] @test_specialize_noreturn_apply : $@convention(thin) () -> () { +bb0: + %f = function_ref @specialize_no_return_apply : $@convention(thin) (@thick T.Type) -> () + %m = metatype $@thick Never.Type + %r = apply %f(%m) : $@convention(thin) (@thick T.Type) -> () + %t = tuple () + return %t : $() +} + +//////////////////// +// TryApply Tests // +//////////////////// + +sil @getError : $@convention(thin) () -> @owned Error + +sil [ossa] @generic_try_apply_callee2 : $@convention(thin) (@in_guaranteed T) -> @error Error { +bb0(%0 : $*T): + cond_br undef, bb1, bb2 + +bb1: + %f = function_ref @getError : $@convention(thin) () -> @owned Error + %e = apply %f() : $@convention(thin) () -> @owned Error + throw %e : $Error + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil [ossa] @generic_try_apply_callee : $@convention(thin) (@in_guaranteed T) -> @error Error { +bb0(%0 : $*T): + %f = function_ref @generic_try_apply_callee2 : $@convention(thin) (@in_guaranteed T) -> @error Error + try_apply %f(%0) : $@convention(thin) (@in_guaranteed T) -> @error Error, normal bb1, error bb2 + +bb1(%result : $()): + %9999 = tuple() + return %9999 : $() + +bb2(%e : @owned $Error): + throw %e : $Error +} + +sil [ossa] @generic_try_apply_callee2_out_param : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error) { +bb0(%0 : $*T, %1 : $*T): + cond_br undef, bb1, bb2 + +bb1: + %f = function_ref @getError : $@convention(thin) () -> @owned Error + %e = apply %f() : $@convention(thin) () -> @owned Error + throw %e : $Error + +bb2: + copy_addr %1 to [initialization] %0 : $*T + %9999 = tuple() + return %9999 : $() +} + + +sil [ossa] @generic_try_apply_callee_out_param : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error) { +bb0(%0 : $*T, %1 : $*T): + %f = function_ref @generic_try_apply_callee2_out_param : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error) + try_apply %f(%0, %1) : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error), normal bb1, error bb2 + +bb1(%result : $()): + %9999 = tuple() + return %9999 : $() + +bb2(%e : @owned $Error): + throw %e : $Error +} + +// Just make sure we pass the verifiers. +// +// CHECK-LABEL: sil [ossa] @test_try_apply : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +// CHECK: try_apply {{%.*}}({{%.*}}) : $@convention(thin) (Builtin.Int32) -> @error Error, normal bb1, error bb2 +// CHECK: try_apply {{%.*}}({{%.*}}) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> @error Error, normal bb4, error bb5 +// CHECK: } // end sil function 'test_try_apply' +sil [ossa] @test_try_apply : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.Int32, %1 : @guaranteed $Builtin.NativeObject): + %f = function_ref @generic_try_apply_callee : $@convention(thin) (@in_guaranteed T) -> @error Error + + %0a = alloc_stack $Builtin.Int32 + store %0 to [trivial] %0a : $*Builtin.Int32 + try_apply %f(%0a) : $@convention(thin) (@in_guaranteed T) -> @error Error, normal bb1, error bb2 + +bb1(%result : $()): + br bb3 + +bb2(%e : @owned $Error): + destroy_value %e : $Error + br bb3 + +bb3: + dealloc_stack %0a : $*Builtin.Int32 + + %0b = alloc_stack $Builtin.NativeObject + store_borrow %1 to %0b : $*Builtin.NativeObject + try_apply %f(%0b) : $@convention(thin) (@in_guaranteed T) -> @error Error, normal bb4, error bb5 + +bb4(%result2 : $()): + br bb6 + +bb5(%e2 : @owned $Error): + destroy_value %e2 : $Error + br bb6 + +bb6: + dealloc_stack %0b : $*Builtin.NativeObject + + %0c = alloc_stack $Builtin.NativeObject + store_borrow %1 to %0c : $*Builtin.NativeObject + %outParam = alloc_stack $Builtin.NativeObject + %f2 = function_ref @generic_try_apply_callee_out_param : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error) + try_apply %f2(%outParam, %0c) : $@convention(thin) (@in_guaranteed T) -> (@out T, @error Error), normal bb7, error bb8 + +bb7(%result3 : $()): + destroy_addr %outParam : $*Builtin.NativeObject + br bb9 + +bb8(%error4 : @owned $Error): + destroy_value %error4 : $Error + br bb9 + +bb9: + dealloc_stack %outParam : $*Builtin.NativeObject + dealloc_stack %0c : $*Builtin.NativeObject + %9999 = tuple() + return %9999 : $() +} + +// Test cases where we throw instead of catch. +sil [ossa] @test_try_apply_throw_error : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> @error Error { +bb0(%0 : $Builtin.Int32, %1 : @guaranteed $Builtin.NativeObject): + %f = function_ref @generic_try_apply_callee : $@convention(thin) (@in_guaranteed T) -> @error Error + + %0a = alloc_stack $Builtin.Int32 + store %0 to [trivial] %0a : $*Builtin.Int32 + try_apply %f(%0a) : $@convention(thin) (@in_guaranteed T) -> @error Error, normal bb1, error bb2 + +bb1(%result : $()): + br bb3 + +bb2(%e : @owned $Error): + dealloc_stack %0a : $*Builtin.Int32 + br bbError(%e : $Error) + +bb3: + dealloc_stack %0a : $*Builtin.Int32 + %0b = alloc_stack $Builtin.NativeObject + store_borrow %1 to %0b : $*Builtin.NativeObject + try_apply %f(%0b) : $@convention(thin) (@in_guaranteed T) -> @error Error, normal bb4, error bb5 + +bb4(%result2 : $()): + br bb6 + +bb5(%e2 : @owned $Error): + dealloc_stack %0b : $*Builtin.NativeObject + br bbError(%e2 : $Error) + +bbError(%eOut : @owned $Error): + throw %eOut : $Error + +bb6: + dealloc_stack %0b : $*Builtin.NativeObject + %9999 = tuple() + return %9999 : $() +} + +sil [ossa] @generic_try_apply_callee_loadable_2 : $@convention(thin) (@inout T, @guaranteed T) -> @error Error { +bb0(%0 : $*T, %1 : @guaranteed $T +): + cond_br undef, bb1, bb2 + +bb1: + %f = function_ref @getError : $@convention(thin) () -> @owned Error + %e = apply %f() : $@convention(thin) () -> @owned Error + throw %e : $Error + +bb2: + %9999 = tuple() + return %9999 : $() +} + +sil [ossa] @generic_try_apply_callee_loadable_1 : $@convention(thin) (@inout T, @in_guaranteed T) -> @error Error { +bb0(%0 : $*T, %1 : $*T): + %f = function_ref @generic_try_apply_callee_loadable_2 : $@convention(thin) (@inout T, @guaranteed T) -> @error Error + %1b = load_borrow %1 : $*T + try_apply %f(%0, %1b) : $@convention(thin) (@inout T, @guaranteed T) -> @error Error, normal bb1, error bb2 + +bb1(%result : $()): + end_borrow %1b : $T + %9999 = tuple() + return %9999 : $() + +bb2(%e : @owned $Error): + end_borrow %1b : $T + throw %e : $Error +} + +sil [ossa] @test_try_apply_loadable : $@convention(thin) (@inout Klass, @guaranteed Klass) -> () { +bb0(%0 : $*Klass, %1 : @guaranteed $Klass): + %f = function_ref @generic_try_apply_callee_loadable_1 : $@convention(thin) (@inout T, @in_guaranteed T) -> @error Error + %1b = alloc_stack $Klass + store_borrow %1 to %1b : $*Klass + try_apply %f(%0, %1b) : $@convention(thin) (@inout T, @in_guaranteed T) -> @error Error, normal bb4, error bb5 + +bb4(%result2 : $()): + br bb6 + +bb5(%e2 : @owned $Error): + destroy_value %e2 : $Error + br bb6 + +bb6: + dealloc_stack %1b : $*Klass + %9999 = tuple() + return %9999 : $() +} + +sil [ossa] @loadable_partial_apply_user : $@convention(thin) (@guaranteed @callee_guaranteed (@inout Klass) -> @error Error) -> () { +bb0(%0 : @guaranteed $@callee_guaranteed (@inout Klass) -> @error Error): + %9999 = tuple() + return %9999 : $() +} + +sil [ossa] @test_try_apply_loadable_partial_apply : $@convention(thin) (@inout Klass, @guaranteed Klass) -> () { +bb0(%0 : $*Klass, %1 : @guaranteed $Klass): + %f = function_ref @generic_try_apply_callee_loadable_1 : $@convention(thin) (@inout T, @in_guaranteed T) -> @error Error + %1b = alloc_stack $Klass + %1a = copy_value %1 : $Klass + store %1a to [init] %1b : $*Klass + %f2 = partial_apply [callee_guaranteed] %f(%1b) : $@convention(thin) (@inout T, @in_guaranteed T) -> @error Error + %f3 = function_ref @loadable_partial_apply_user : $@convention(thin) (@guaranteed @callee_guaranteed (@inout Klass) -> @error Error) -> () + apply %f3(%f2) : $@convention(thin) (@guaranteed @callee_guaranteed (@inout Klass) -> @error Error) -> () + destroy_value %f2 : $@callee_guaranteed (@inout Klass) -> @error Error + dealloc_stack %1b : $*Klass + %9999 = tuple() + return %9999 : $() +} + +////////////////////// +// BeginApply Tests // +////////////////////// + +sil [ossa] @generic_begin_apply_callee_inguaranteed : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in_guaranteed T { +bb0(%0 : $*T): + %1 = alloc_stack $*T + copy_addr %0 to [initialization] %1 : $*T + yield %1 : $*T, resume bb1, unwind bb2 + +bb1: + destroy_addr %1 : $*T + dealloc_stack %1 : $*T + %9999 = tuple() + return %9999 : $() + +bb2: + destroy_addr %1 : $*T + dealloc_stack %1 : $*T + unwind +} + +sil [ossa] @generic_begin_apply_callee_in : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in T { +bb0(%0 : $*T): + %1 = alloc_stack $*T + copy_addr %0 to [initialization] %1 : $*T + yield %1 : $*T, resume bb1, unwind bb2 + +bb1: + dealloc_stack %1 : $*T + %9999 = tuple() + return %9999 : $() + +bb2: + dealloc_stack %1 : $*T + unwind +} + +sil [ossa] @generic_begin_apply_callee_inout : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @inout T { +bb0(%0 : $*T): + %1 = alloc_stack $*T + copy_addr %0 to [initialization] %1 : $*T + yield %1 : $*T, resume bb1, unwind bb2 + +bb1: + destroy_addr %1 : $*T + dealloc_stack %1 : $*T + %9999 = tuple() + return %9999 : $() + +bb2: + destroy_addr %1 : $*T + dealloc_stack %1 : $*T + unwind +} + +// Just make sure we pass the verifiers. +// +// CHECK-LABEL: sil [ossa] @test_begin_apply_inguaranteed : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (Builtin.Int32) -> @yields @in_guaranteed Builtin.Int32 +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (@guaranteed Builtin.NativeObject) -> @yields @in_guaranteed Builtin.NativeObject +// CHECK: } // end sil function 'test_begin_apply_inguaranteed' +sil [ossa] @test_begin_apply_inguaranteed : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.Int32, %1 : @guaranteed $Builtin.NativeObject): + %f = function_ref @generic_begin_apply_callee_inguaranteed : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in_guaranteed T + + %0a = alloc_stack $Builtin.Int32 + store %0 to [trivial] %0a : $*Builtin.Int32 + (%0r, %0token) = begin_apply %f(%0a) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in_guaranteed T + end_apply %0token + dealloc_stack %0a : $*Builtin.Int32 + + %1b = alloc_stack $Builtin.NativeObject + %1c = copy_value %1 : $Builtin.NativeObject + store %1c to [init] %1b : $*Builtin.NativeObject + (%1result, %1token) = begin_apply %f(%1b) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in_guaranteed T + + end_apply %1token + destroy_addr %1b : $*Builtin.NativeObject + dealloc_stack %1b : $*Builtin.NativeObject + + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @test_begin_apply_in : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (Builtin.Int32) -> @yields @in Builtin.Int32 +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (@guaranteed Builtin.NativeObject) -> @yields @in Builtin.NativeObject +// CHECK: } // end sil function 'test_begin_apply_in' +sil [ossa] @test_begin_apply_in : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.Int32, %1 : @guaranteed $Builtin.NativeObject): + %f = function_ref @generic_begin_apply_callee_in : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in T + + %0a = alloc_stack $Builtin.Int32 + store %0 to [trivial] %0a : $*Builtin.Int32 + (%0r, %0token) = begin_apply %f(%0a) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in T + end_apply %0token + dealloc_stack %0a : $*Builtin.Int32 + + %1b = alloc_stack $Builtin.NativeObject + %1c = copy_value %1 : $Builtin.NativeObject + store %1c to [init] %1b : $*Builtin.NativeObject + (%1result, %1token) = begin_apply %f(%1b) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @in T + + end_apply %1token + destroy_addr %1b : $*Builtin.NativeObject + dealloc_stack %1b : $*Builtin.NativeObject + + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @test_begin_apply_inout : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (Builtin.Int32) -> @yields @inout Builtin.Int32 +// CHECK: begin_apply {{%.*}}({{%.*}}) : $@yield_once @convention(thin) (@guaranteed Builtin.NativeObject) -> @yields @inout Builtin.NativeObject +// CHECK: } // end sil function 'test_begin_apply_inout' +sil [ossa] @test_begin_apply_inout : $@convention(thin) (Builtin.Int32, @guaranteed Builtin.NativeObject) -> () { +bb0(%0 : $Builtin.Int32, %1 : @guaranteed $Builtin.NativeObject): + %f = function_ref @generic_begin_apply_callee_inout : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @inout T + + %0a = alloc_stack $Builtin.Int32 + store %0 to [trivial] %0a : $*Builtin.Int32 + (%0r, %0token) = begin_apply %f(%0a) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @inout T + end_apply %0token + dealloc_stack %0a : $*Builtin.Int32 + + %1b = alloc_stack $Builtin.NativeObject + %1c = copy_value %1 : $Builtin.NativeObject + store %1c to [init] %1b : $*Builtin.NativeObject + (%1result, %1token) = begin_apply %f(%1b) : $@yield_once @convention(thin) (@in_guaranteed T) -> @yields @inout T + + end_apply %1token + destroy_addr %1b : $*Builtin.NativeObject + dealloc_stack %1b : $*Builtin.NativeObject + + %9999 = tuple() + return %9999 : $() +} diff --git a/test/SILOptimizer/specialize_reabstraction_ossa.sil b/test/SILOptimizer/specialize_reabstraction_ossa.sil new file mode 100644 index 0000000000000..916bc1797e5e0 --- /dev/null +++ b/test/SILOptimizer/specialize_reabstraction_ossa.sil @@ -0,0 +1,132 @@ +// RUN: %target-sil-opt -enable-sil-verify-all -generic-specializer -sil-generic-specializer-enable-ownership %s | %FileCheck %s + +sil_stage canonical + +import Builtin +import Swift + +public protocol RefProto { + associatedtype T + var me: Ref { get } +} + +public final class Ref : RefProto { + public final var me: Ref { get } + deinit + init() +} + +extension RefProto { + public func merge(other: Ref) -> Ref<(Self.T, U)> +} + +public protocol ValProto { + associatedtype T + var me: Val { get } +} + +extension ValProto { + public func merge(other: Val) -> Val<(Self.T, U)> +} + +public struct Val : ValProto { + public var me: Val { get } + init() +} + +sil @coerce : $@convention(thin) (@owned @callee_owned (@owned Ref) -> @owned @callee_owned (@owned Ref) -> @owned Ref) -> @owned @callee_owned (Val) -> Val + +sil [ossa] @merge : $@convention(method) (@owned Ref, @in_guaranteed Self) -> @owned Ref<(Self.T, U)> { +bb0(%0 : @owned $Ref, %1 : $*Self): + %2 = alloc_ref $Ref<(Self.T, U)> + destroy_value %0 : $Ref + return %2 : $Ref<(Self.T, U)> +} + +sil [ossa] @merge_curried : $@convention(thin) (@in Self) -> @owned @callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)> { +bb0(%0 : $*Self): + %1 = function_ref @merge : $@convention(method) <τ_0_0 where τ_0_0 : RefProto><τ_1_0> (@owned Ref<τ_1_0>, @in_guaranteed τ_0_0) -> @owned Ref<(τ_0_0.T, τ_1_0)> + %2 = partial_apply %1(%0) : $@convention(method) <τ_0_0 where τ_0_0 : RefProto><τ_1_0> (@owned Ref<τ_1_0>, @in_guaranteed τ_0_0) -> @owned Ref<(τ_0_0.T, τ_1_0)> + return %2 : $@callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)> +} + +sil [ossa] [reabstraction_thunk] @reabstract : $@convention(thin) (@owned Ref, @owned @callee_owned (@in Ref) -> @owned @callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)>) -> @owned @callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)> { +bb0(%0 : @owned $Ref, %1 : @owned $@callee_owned (@in Ref) -> @owned @callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)>): + %2 = alloc_stack $Ref + store %0 to [init] %2 : $*Ref + %4 = apply %1(%2) : $@callee_owned (@in Ref) -> @owned @callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)> + dealloc_stack %2 : $*Ref + return %4 : $@callee_owned (@owned Ref) -> @owned Ref<(Self.T, U)> +} + +// CHECK-LABEL: sil [ossa] @test +sil [ossa] @test : $@convention(thin) (Val, Val) -> Val<(Bool, Int)> { +// CHECK: bb0 +bb0(%0 : $Val, %1 : $Val): + // CHECK: [[COERCE:%.*]] = function_ref @coerce + %2 = function_ref @coerce : $@convention(thin) <τ_0_0, τ_0_1, τ_0_2> (@owned @callee_owned (@owned Ref<τ_0_0>) -> @owned @callee_owned (@owned Ref<τ_0_1>) -> @owned Ref<τ_0_2>) -> @owned @callee_owned (Val<τ_0_1>) -> Val<τ_0_2> + // CHECK: [[MERGE:%.*]] = function_ref @$s13merge_curried4main3RefCySbG_SiTg5 + %3 = function_ref @merge_curried : $@convention(thin) <τ_0_0 where τ_0_0 : RefProto><τ_1_0> (@in τ_0_0) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)> + // CHECK: [[PARTIAL:%.*]] = partial_apply [[MERGE]]() + %4 = partial_apply %3, Int>() : $@convention(thin) <τ_0_0 where τ_0_0 : RefProto><τ_1_0> (@in τ_0_0) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)> + // CHECK-NOT: function_ref @reabstract + %5 = function_ref @reabstract : $@convention(thin) <τ_0_0 where τ_0_0 : ValProto><τ_1_0> (@owned Ref<τ_0_0.T>, @owned @callee_owned (@in Ref<τ_0_0.T>) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)>) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)> + // CHECK-NOT: partial_apply + %6 = partial_apply %5, Int>(%4) : $@convention(thin) <τ_0_0 where τ_0_0 : ValProto><τ_1_0> (@owned Ref<τ_0_0.T>, @owned @callee_owned (@in Ref<τ_0_0.T>) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)>) -> @owned @callee_owned (@owned Ref<τ_1_0>) -> @owned Ref<(τ_0_0.T, τ_1_0)> + // CHECK: apply [[COERCE]]([[PARTIAL]]) + %7 = apply %2(%6) : $@convention(thin) <τ_0_0, τ_0_1, τ_0_2> (@owned @callee_owned (@owned Ref<τ_0_0>) -> @owned @callee_owned (@owned Ref<τ_0_1>) -> @owned Ref<τ_0_2>) -> @owned @callee_owned (Val<τ_0_1>) -> Val<τ_0_2> + %8 = apply %7(%1) : $@callee_owned (Val) -> Val<(Bool, Int)> + return %8 : $Val<(Bool, Int)> +} + +// CHECK-LABEL: sil shared [ossa] @$s9coroutineSb_Tg5 : $@yield_once @convention(thin) (Bool) -> @yields @inout Bool { +// CHECK: bb0(%0 : $Bool): +// CHECK-NEXT: [[TEMP:%.*]] = alloc_stack $Bool +// CHECK-NEXT: store %0 to [trivial] [[TEMP]] : $*Bool +// CHECK-NEXT: yield [[TEMP]] : $*Bool, resume bb1, unwind bb2 +// CHECK: bb1: +// CHECK-NEXT: destroy_addr [[TEMP]] : $*Bool +// CHECK-NEXT: [[RV:%.*]] = tuple () +// CHECK-NEXT: dealloc_stack [[TEMP]] : $*Bool +// CHECK-NEXT: return [[RV]] : $() +// CHECK: bb2: +// CHECK-NEXT: destroy_addr [[TEMP]] : $*Bool +// CHECK-NEXT: dealloc_stack [[TEMP]] : $*Bool +// CHECK-NEXT: unwind +// CHECK-NEXT: } +sil [ossa] @coroutine : $@yield_once @convention(thin) (@in T) -> @yields @inout T { +bb0(%0 : $*T): + yield %0 : $*T, resume bb1, unwind bb2 +bb1: + destroy_addr %0 : $*T + %rv = tuple () + return %rv : $() +bb2: + destroy_addr %0 : $*T + unwind +} + +// CHECK-LABEL: @test_coroutine : $@convention(thin) (Bool) -> () { +// CHECK: bb0(%0 : $Bool): +// CHECK-NEXT: [[TEMP:%.*]] = alloc_stack $Bool +// CHECK-NEXT: store %0 to [trivial] [[TEMP]] : $*Bool +// CHECK-NEXT: // function_ref +// CHECK-NEXT: [[CORO:%.*]] = function_ref @$s9coroutineSb_Tg5 : $@yield_once @convention(thin) (Bool) -> @yields @inout Bool +// CHECK-NEXT: [[LOAD:%.*]] = load [trivial] [[TEMP]] : $*Bool +// CHECK-NEXT: ([[ADDR:%.*]], [[TOKEN:%.*]]) = begin_apply [[CORO]]([[LOAD]]) +// CHECK-NEXT: end_apply [[TOKEN]] +// CHECK-NEXT: dealloc_stack [[TEMP]] : $*Bool +// CHECK-NEXT: [[RV:%.*]] = tuple () +// CHECK-NEXT: return [[RV]] : $() +// CHECK-NEXT: } +sil [ossa] @test_coroutine : $@convention(thin) (Bool) -> () { +bb0(%0 : $Bool): + %coro = function_ref @coroutine : $@yield_once @convention(thin) (@in T) -> @yields @inout T + %temp = alloc_stack $Bool + store %0 to [trivial] %temp : $*Bool + (%addr, %token) = begin_apply %coro(%temp) : $@yield_once @convention(thin) (@in T) -> @yields @inout T + end_apply %token + dealloc_stack %temp : $*Bool + %rv = tuple () + return %rv : $() +} diff --git a/test/SILOptimizer/specialize_recursive_generics_ossa.sil b/test/SILOptimizer/specialize_recursive_generics_ossa.sil new file mode 100644 index 0000000000000..5bb75added371 --- /dev/null +++ b/test/SILOptimizer/specialize_recursive_generics_ossa.sil @@ -0,0 +1,150 @@ +// RUN: %target-sil-opt -enable-sil-verify-all %s -generic-specializer -cse -sil-generic-specializer-enable-ownership | %FileCheck %s + +// Check that SIL cloner can correctly handle specialization of recursive +// functions with generic arguments. + +sil_stage canonical + +import Builtin +import Swift + +// Check that this recursive function is specialized only for Int32. +// CHECK-LABEL: sil shared [noinline] [ossa] @$s62_TF29specialize_recursive_generics18recursive_genericsU__FQ_T_s5Int32V_Tg5 +// CHECK: function_ref @$s62_TF29specialize_recursive_generics18recursive_genericsU__FQ_T_s5Int32V_Tg5 +// CHECK: return +sil [noinline] [ossa] @_TF29specialize_recursive_generics18recursive_genericsU__FQ_T_ : $@convention(thin) (@in T) -> () { +bb0(%0 : $*T): + debug_value_addr %0 : $*T, let, name "t" // id: %1 + // function_ref specialize_recursive_generics.recursive_generics (A) -> () + %2 = function_ref @_TF29specialize_recursive_generics18recursive_genericsU__FQ_T_ : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () // user: %5 + %3 = alloc_stack $T // users: %4, %5, %6 + copy_addr %0 to [initialization] %3 : $*T // id: %4 + %5 = apply %2(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + dealloc_stack %3 : $*T // id: %6 + destroy_addr %0 : $*T // id: %7 + %8 = tuple () // user: %9 + return %8 : $() // id: %9 +} + +// Check that this recursive function is specialized twice: for (Int, Double) and for (Double, Int). + +// CHECK-LABEL: sil shared [noinline] [ossa] @$s97_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_Sd_s5Int32VTg5 +// CHECK: function_ref @$s97_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_s5Int32V_SdTg5 +// CHECK: return + +// CHECK-LABEL: sil shared [noinline] [ossa] @$s97_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_s5Int32V_SdTg5 +// CHECK: function_ref @$s97_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_Sd_s5Int32VTg5 +// CHECK: return + + +sil [noinline] [ossa] @_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_ : $@convention(thin) (@in T, @in U) -> () { +bb0(%0 : $*T, %1 : $*U): + debug_value_addr %0 : $*T, let, name "t" // id: %2 + debug_value_addr %1 : $*U, let, name "u" // id: %3 + // function_ref specialize_recursive_generics.recursive_generics_with_different_substitutions (A, B) -> () + %4 = function_ref @_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_ : $@convention(thin) <τ_0_0, τ_0_1> (@in τ_0_0, @in τ_0_1) -> () // user: %9 + %5 = alloc_stack $U // users: %6, %9, %11 + copy_addr %1 to [initialization] %5 : $*U // id: %6 + %7 = alloc_stack $T // users: %8, %9, %10 + copy_addr %0 to [initialization] %7 : $*T // id: %8 + %9 = apply %4(%5, %7) : $@convention(thin) <τ_0_0, τ_0_1> (@in τ_0_0, @in τ_0_1) -> () + dealloc_stack %7 : $*T // id: %10 + dealloc_stack %5 : $*U // id: %11 + destroy_addr %1 : $*U // id: %12 + destroy_addr %0 : $*T // id: %13 + %14 = tuple () // user: %15 + return %14 : $() // id: %15 +} + +sil [ossa] @$s29specialize_recursive_generics05test_b1_C0yyF : $@convention(thin) () -> () { +bb0: + // function_ref specialize_recursive_generics.recursive_generics (A) -> () + %0 = function_ref @_TF29specialize_recursive_generics18recursive_genericsU__FQ_T_ : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () // user: %5 + %1 = integer_literal $Builtin.Int32, 3 // user: %2 + %2 = struct $Int32 (%1 : $Builtin.Int32) // user: %4 + %3 = alloc_stack $Int32 // users: %4, %5, %6 + store %2 to [trivial] %3 : $*Int32 // id: %4 + %5 = apply %0(%3) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () + dealloc_stack %3 : $*Int32 // id: %6 + %7 = tuple () // user: %8 + return %7 : $() // id: %8 +} + +sil [ossa] @$s29specialize_recursive_generics05test_b1_C29_with_different_substitutionsyyF : $@convention(thin) () -> () { +bb0: + // function_ref specialize_recursive_generics.recursive_generics_with_different_substitutions (A, B) -> () + %0 = function_ref @_TF29specialize_recursive_generics47recursive_generics_with_different_substitutionsU___FTQ_Q0__T_ : $@convention(thin) <τ_0_0, τ_0_1> (@in τ_0_0, @in τ_0_1) -> () // user: %10 + %1 = float_literal $Builtin.FPIEEE80, 0x3FFF999999999999999A // 1.20000000000000000004 // user: %2 + %2 = builtin "fptrunc_FPIEEE80_FPIEEE64"(%1 : $Builtin.FPIEEE80) : $Builtin.FPIEEE64 // user: %3 + %3 = struct $Double (%2 : $Builtin.FPIEEE64) // user: %5 + %4 = alloc_stack $Double // users: %5, %10, %12 + store %3 to [trivial] %4 : $*Double // id: %5 + %6 = integer_literal $Builtin.Int32, 1 // user: %7 + %7 = struct $Int32 (%6 : $Builtin.Int32) // user: %9 + %8 = alloc_stack $Int32 // users: %9, %10, %11 + store %7 to [trivial] %8 : $*Int32 // id: %9 + %10 = apply %0(%4, %8) : $@convention(thin) <τ_0_0, τ_0_1> (@in τ_0_0, @in τ_0_1) -> () + dealloc_stack %8 : $*Int32 // id: %11 + dealloc_stack %4 : $*Double // id: %12 + %13 = tuple () // user: %14 + return %13 : $() // id: %14 +} + + +public class C : P {} + +public protocol P {} + +sil hidden [ossa] [noinline] @helper : $@convention(thin) (@in T, @in P) -> @owned Optional { +bb0(%0 : $*T, %1 : $*P): + %4 = alloc_stack $P + copy_addr %1 to [initialization] %4 : $*P + %6 = alloc_stack $C + checked_cast_addr_br take_always P in %4 : $*P to C in %6 : $*C, bb1, bb2 +bb1: + %8 = load [take] %6 : $*C + %9 = enum $Optional, #Optional.some!enumelt, %8 : $C + dealloc_stack %6 : $*C + br bb3(%9 : $Optional) + +bb2: + %12 = enum $Optional, #Optional.none!enumelt + dealloc_stack %6 : $*C + br bb3(%12 : $Optional) + +bb3(%15 : @owned $Optional): + dealloc_stack %4 : $*P + destroy_addr %1 : $*P + destroy_addr %0 : $*T + return %15 : $Optional +} + +// CHECK-LABEL: sil shared [ossa] @$s6lookup4main1CC_Tg5 +sil [ossa] @lookup : $@convention(method) (@owned C, @in_guaranteed Self) -> @owned Optional { +bb0(%0 : @owned $C, %1 : $*Self): + // CHECK: [[HELPER:%.*]] = function_ref @$s6helpers5Int32V_Tg5 + %4 = function_ref @helper : $@convention(thin) <τ_0_0> (@in τ_0_0, @in P) -> @owned Optional + %5 = integer_literal $Builtin.Int32, 1 + %6 = struct $Int32 (%5 : $Builtin.Int32) + %7 = alloc_stack $Int32 + store %6 to [trivial] %7 : $*Int32 + %9 = alloc_stack $P + %10 = init_existential_addr %9 : $*P, $Self + copy_addr %1 to [initialization] %10 : $*Self + // CHECK: apply [[HELPER]] + // CHECK-NOT: apply [[HELPER]] + %12 = apply %4(%7, %9) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in P) -> @owned Optional + destroy_value %12 : $Optional + dealloc_stack %9 : $*P + dealloc_stack %7 : $*Int32 + // CHECK: [[LOOKUP:%.*]] = function_ref @$s6lookup4main1CC_Tg5 + %16 = function_ref @lookup : $@convention(method) <τ_0_0 where τ_0_0 : P> (@owned C, @in_guaranteed τ_0_0) -> @owned Optional + %17 = alloc_stack $C + %0c = copy_value %0 : $C + store %0c to [init] %17 : $*C + // CHECK: apply [[LOOKUP]] + %20 = apply %16(%0, %17) : $@convention(method) <τ_0_0 where τ_0_0 : P> (@owned C, @in_guaranteed τ_0_0) -> @owned Optional + destroy_addr %17 : $*C + dealloc_stack %17 : $*C + return %20 : $Optional +}