From e083a4cb0551f9069906fc242e20bb6402974f4a Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Mon, 8 Feb 2021 17:53:47 -0800 Subject: [PATCH 01/15] [sil-combine] Turn off try_apply convert_function elimination on both ossa/non-ossa SIL. In SILCombine, we do not want to add or delete edges. We are ok with swapping edges or replacing edges when the CFG structure is preserved. This becomes an issue since by performing this optimization, we are going to get rid of the error parameter but leave a try_apply, breaking SIL invariants. So to do perform this optimization, we would need to convert to an apply and eliminate the error edge, breaking the aforementioned SILCombine invariant. So, just do not perform this for now and leave it to other passes like SimplifyCFG. --- .../SILCombiner/SILCombinerApplyVisitors.cpp | 4 --- test/SILOptimizer/sil_combine.sil | 25 +++++++++++++++++++ test/SILOptimizer/sil_combine_ossa.sil | 25 +++++++++++++++++++ 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp index 0413a90a713b1..0eeaa75bdb829 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerApplyVisitors.cpp @@ -1709,10 +1709,6 @@ SILInstruction *SILCombiner::visitTryApplyInst(TryApplyInst *AI) { if (isa(AI->getCallee())) return nullptr; - if (auto *CFI = dyn_cast(AI->getCallee())) { - return optimizeApplyOfConvertFunctionInst(AI, CFI); - } - // Optimize readonly functions with no meaningful users. SILFunction *Fn = AI->getReferencedFunctionOrNull(); if (Fn && Fn->getEffectsKind() < EffectsKind::ReleaseNone) { diff --git a/test/SILOptimizer/sil_combine.sil b/test/SILOptimizer/sil_combine.sil index 583dd7bcaf495..e1bc80119828a 100644 --- a/test/SILOptimizer/sil_combine.sil +++ b/test/SILOptimizer/sil_combine.sil @@ -4148,3 +4148,28 @@ bb0: %1 = load %0 : $*Int64 return %1 : $Int64 } + +sil @convert_raw_pointer_to_nativeobject : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject + +// To eliminate the convert_function below we need to convert the try_apply to +// an apply. We do not delete edges like this in SILCombine with ownership and +// are trying to reduce it without ownership. So make sure we don't optimize +// this. This should be done in a different pass that handles CFG issues. +// +// CHECK-LABEL: sil @do_not_eliminate_error_adding_convert_function_used_by_try_apply_nonownership : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject { +// CHECK: convert_function +// CHECK: try_apply +// CHECK: } // end sil function 'do_not_eliminate_error_adding_convert_function_used_by_try_apply_nonownership' +sil @do_not_eliminate_error_adding_convert_function_used_by_try_apply_nonownership : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject { +bb0(%0 : $Builtin.RawPointer): + %f = function_ref @convert_raw_pointer_to_nativeobject : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject + %t = thin_to_thick_function %f : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject to $@noescape @callee_guaranteed (Builtin.RawPointer) -> @owned Builtin.NativeObject + %c = convert_function %t : $@noescape @callee_guaranteed (Builtin.RawPointer) -> @owned Builtin.NativeObject to $@noescape @callee_guaranteed (Builtin.RawPointer) -> (@owned Builtin.NativeObject, @error Error) + try_apply %c(%0) : $@noescape @callee_guaranteed (Builtin.RawPointer) -> (@owned Builtin.NativeObject, @error Error), normal bb1, error bb2 + +bb1(%result : $Builtin.NativeObject): + return %result : $Builtin.NativeObject + +bb2(%error : $Error): + unreachable +} \ No newline at end of file diff --git a/test/SILOptimizer/sil_combine_ossa.sil b/test/SILOptimizer/sil_combine_ossa.sil index f61966018fcd1..dfa58e88d42c6 100644 --- a/test/SILOptimizer/sil_combine_ossa.sil +++ b/test/SILOptimizer/sil_combine_ossa.sil @@ -4901,3 +4901,28 @@ bb3(%4 : @owned $KlassNativeObjEither): return %4 : $KlassNativeObjEither } + +sil @convert_raw_pointer_to_nativeobject : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject + +// To eliminate the convert_function below we need to convert the try_apply to +// an apply. We do not delete edges like this in SILCombine with ownership and +// are trying to reduce it without ownership. So make sure we don't optimize +// this. This should be done in a different pass that handles CFG issues. +// +// CHECK-LABEL: sil [ossa] @do_not_eliminate_error_adding_convert_function_used_by_try_apply : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject { +// CHECK: convert_function +// CHECK: try_apply +// CHECK: } // end sil function 'do_not_eliminate_error_adding_convert_function_used_by_try_apply' +sil [ossa] @do_not_eliminate_error_adding_convert_function_used_by_try_apply : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject { +bb0(%0 : $Builtin.RawPointer): + %f = function_ref @convert_raw_pointer_to_nativeobject : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject + %t = thin_to_thick_function %f : $@convention(thin) (Builtin.RawPointer) -> @owned Builtin.NativeObject to $@noescape @callee_guaranteed (Builtin.RawPointer) -> @owned Builtin.NativeObject + %c = convert_function %t : $@noescape @callee_guaranteed (Builtin.RawPointer) -> @owned Builtin.NativeObject to $@noescape @callee_guaranteed (Builtin.RawPointer) -> (@owned Builtin.NativeObject, @error Error) + try_apply %c(%0) : $@noescape @callee_guaranteed (Builtin.RawPointer) -> (@owned Builtin.NativeObject, @error Error), normal bb1, error bb2 + +bb1(%result : @owned $Builtin.NativeObject): + return %result : $Builtin.NativeObject + +bb2(%error : @owned $Error): + unreachable +} \ No newline at end of file From 5327a1049903af4a1dd6c235de2a442a6a42fb71 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Mon, 8 Feb 2021 18:36:58 -0800 Subject: [PATCH 02/15] [pred-deadalloc-elim] Fix memory safety issue and handle promoting paths where there is dynamically no value by inserting compensating destroys. This commit is fixing two things: 1. In certain cases, we are seeing cases where either SILGen or the optimizer are eliminating destroy_addr along paths where we know that an enum is dynamically trivial. This can not be expressed in OSSA, so I added code to pred-deadalloc-elim so that I check if any of our available values after we finish promoting away an allocation now need to have their consuming use set completed. 2. That led me to discover that in certain cases load [take] that we were promoting were available values of other load [take]. This means that we have a memory safety issue if we promote one load before the other. Consider the following SIL: ``` %mem = alloc_stack store %arg to [init] %mem %0 = load [take] %mem store %0 to [init] %mem %1 = load [take] %mem destroy_value %1 dealloc_stack %mem ``` In this case, if we eliminate %0 before we eliminate %1, we will have a stale pointer to %0. I also took this as an opportunity to turn off predictable mem access opt on SIL that was deserialized canonicalized and non-OSSA SIL. We evidently need to still do this for pred mem opts for perf reasons (not sure why). But I am pretty sure this isn't needed and allows me to avoid some nasty code. --- .../Mandatory/PredictableMemOpt.cpp | 278 +++++++++++++++++- .../predictable_deadalloc_elim.sil | 267 ----------------- .../predictable_deadalloc_elim_ownership.sil | 182 ++++++++++++ test/SILOptimizer/predictable_memopt.sil | 4 +- 4 files changed, 451 insertions(+), 280 deletions(-) delete mode 100644 test/SILOptimizer/predictable_deadalloc_elim.sil diff --git a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp index d923402a0b67c..740251922f495 100644 --- a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp +++ b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp @@ -13,18 +13,20 @@ #define DEBUG_TYPE "predictable-memopt" #include "PMOMemoryUseCollector.h" +#include "swift/Basic/BlotMapVector.h" #include "swift/Basic/BlotSetVector.h" #include "swift/Basic/FrozenMultiMap.h" #include "swift/Basic/STLExtras.h" #include "swift/SIL/BasicBlockUtils.h" #include "swift/SIL/LinearLifetimeChecker.h" #include "swift/SIL/OwnershipUtils.h" -#include "swift/SIL/SILBuilder.h" #include "swift/SIL/SILBitfield.h" +#include "swift/SIL/SILBuilder.h" #include "swift/SILOptimizer/PassManager/Passes.h" #include "swift/SILOptimizer/PassManager/Transforms.h" #include "swift/SILOptimizer/Utils/CFGOptUtils.h" #include "swift/SILOptimizer/Utils/InstOptUtils.h" +#include "swift/SILOptimizer/Utils/OwnershipOptUtils.h" #include "swift/SILOptimizer/Utils/SILSSAUpdater.h" #include "swift/SILOptimizer/Utils/ValueLifetime.h" #include "llvm/ADT/SmallBitVector.h" @@ -1959,7 +1961,17 @@ class AllocOptimize { bool promoteLoadCopy(LoadInst *li); bool promoteLoadBorrow(LoadBorrowInst *lbi); bool promoteCopyAddr(CopyAddrInst *cai); - void promoteLoadTake(LoadInst *Inst, MutableArrayRef values); + + /// Promote a load take cleaning up everything except for RAUWing the + /// instruction with the aggregated result. The routine returns the new + /// aggreaged result to the caller and expects the caller to eventually RAUW + /// \p inst with the return value. The reason why we do this is to allow for + /// the caller to work around invalidation issues by not deleting the load + /// [take] until after all load [take] have been cleaned up. + /// + /// \returns the value that the caller will RAUW with \p inst. + SILValue promoteLoadTake(LoadInst *inst, + MutableArrayRef values); void promoteDestroyAddr(DestroyAddrInst *dai, MutableArrayRef values); bool canPromoteTake(SILInstruction *i, @@ -2293,7 +2305,7 @@ void AllocOptimize::promoteDestroyAddr( dai->eraseFromParent(); } -void AllocOptimize::promoteLoadTake( +SILValue AllocOptimize::promoteLoadTake( LoadInst *li, MutableArrayRef availableValues) { assert(li->getOwnershipQualifier() == LoadOwnershipQualifier::Take && "load [copy], load [trivial], load should be handled by " @@ -2311,15 +2323,15 @@ void AllocOptimize::promoteLoadTake( AvailableValueAggregator agg(li, availableValues, Uses, deadEndBlocks, AvailableValueExpectedOwnership::Take); SILValue newVal = agg.aggregateValues(loadTy, address, firstElt); + assert(newVal); ++NumLoadTakePromoted; LLVM_DEBUG(llvm::dbgs() << " *** Promoting load_take: " << *li); LLVM_DEBUG(llvm::dbgs() << " To value: " << *newVal); - // Then perform the RAUW. - li->replaceAllUsesWith(newVal); - li->eraseFromParent(); + // Our parent RAUWs with newVal/erases li. + return newVal; } namespace { @@ -2335,6 +2347,33 @@ struct TakePromotionState { unsigned size() const { return takeInstIndices.size(); } + void verify() { +#ifndef NDEBUG + for (unsigned i : range(size())) { + SILInstruction *inst; + MutableArrayRef data; + std::tie(inst, data) = getData(i); + assert(inst); + inst->verifyOperandOwnership(); + assert(!data.empty() && "Value without any available values?!"); + } +#endif + } + + void verify(unsigned startOffset) { +#ifndef NDEBUG + assert(startOffset < size()); + for (unsigned i : range(startOffset, size())) { + SILInstruction *inst; + MutableArrayRef data; + std::tie(inst, data) = getData(i); + assert(inst); + inst->verifyOperandOwnership(); + assert(!data.empty() && "Value without any available values?!"); + } +#endif + } + void initializeForTakeInst(unsigned takeInstIndex) { availableValueStartOffsets.push_back(availableValueList.size()); takeInstIndices.push_back(takeInstIndex); @@ -2352,9 +2391,8 @@ struct TakePromotionState { count = availableValueList.size() - startOffset; } - MutableArrayRef values(&availableValueList[startOffset], - count); - return {takeInsts[takeInstIndex], values}; + auto values = MutableArrayRef(availableValueList); + return {takeInsts[takeInstIndex], values.slice(startOffset, count)}; } }; @@ -2424,6 +2462,8 @@ static bool isRemovableAutogeneratedAllocation(AllocationInst *TheMemory) { } bool AllocOptimize::tryToRemoveDeadAllocation() { + assert(TheMemory->getFunction()->hasOwnership() && + "Can only eliminate dead allocations with ownership enabled"); assert((isa(TheMemory) || isa(TheMemory)) && "Unhandled allocation case"); @@ -2492,8 +2532,32 @@ bool AllocOptimize::tryToRemoveDeadAllocation() { } // If we reached this point, we can promote all of our destroy_addr and load - // take. Since our load [take] may be available values for our destroy_addr, - // we promote the destroy_addr first. + // take. Before we begin, gather up all found available values before we do + // anything so we can fix up lifetimes later if we need to. + SmallBlotSetVector valuesNeedingLifetimeCompletion; + for (auto pmoMemUse : Uses) { + if (pmoMemUse.Inst && pmoMemUse.Kind == PMOUseKind::Initialization) { + // Today if we promote, this is always a store, since we would have + // blown up the copy_addr otherwise. Given that, always make sure we + // clean up the src as appropriate after we optimize. + auto *si = cast(pmoMemUse.Inst); + auto src = si->getSrc(); + + // Bail if src has any uses that are forwarding unowned uses. This + // allows us to know that we never have to deal with forwarding unowned + // instructions like br. These are corner cases that complicate the + // logic below. + for (auto *use : src->getUses()) { + if (use->getOperandOwnership() == OperandOwnership::ForwardingUnowned) + return false; + } + valuesNeedingLifetimeCompletion.insert(src); + } + } + + // Since our load [take] may be available values for our + // destroy_addr/load [take], we promote the destroy_addr first and then handle + // load [take] with extra rigour later to handle that possibility. for (unsigned i : range(destroyAddrState.size())) { SILInstruction *dai; MutableArrayRef values; @@ -2501,11 +2565,59 @@ bool AllocOptimize::tryToRemoveDeadAllocation() { promoteDestroyAddr(cast(dai), values); // We do not need to unset releases, since we are going to exit here. } + + llvm::SmallMapVector loadsToDelete; for (unsigned i : range(loadTakeState.size())) { SILInstruction *li; MutableArrayRef values; std::tie(li, values) = loadTakeState.getData(i); - promoteLoadTake(cast(li), values); + + for (unsigned i : indices(values)) { + auto v = values[i].Value; + auto *li = dyn_cast(v); + if (!li) + continue; + + auto iter = loadsToDelete.find(li); + if (iter == loadsToDelete.end()) + continue; + + SILValue newValue = iter->second; + assert(newValue && "We should neer store a nil SILValue into this map"); + values[i].Value = newValue; + } + + auto *liCast = cast(li); + SILValue result = promoteLoadTake(liCast, values); + assert(result); + + // We need to erase liCast here before we erase it since a load [take] that + // we are promoting could be an available value for another load + // [take]. Consider the following SIL: + // + // %mem = alloc_stack + // store %arg to [init] %mem + // %0 = load [take] %mem + // store %0 to [init] %mem + // %1 = load [take] %mem + // destroy_value %1 + // dealloc_stack %mem + // + // In such a case, we are going to delete %0 here, but %0 is an available + // value for %1, so we will + auto insertIter = loadsToDelete.insert({liCast, result}); + valuesNeedingLifetimeCompletion.erase(liCast); + (void)insertIter; + assert(insertIter.second && "loadTakeState doesn't have unique loads?!"); + } + + // Now that we have promoted all of our load [take], perform the actual + // RAUW/removal. + for (auto p : loadsToDelete) { + LoadInst *li = p.first; + SILValue newValue = p.second; + li->replaceAllUsesWith(newValue); + li->eraseFromParent(); } LLVM_DEBUG(llvm::dbgs() << "*** Removing autogenerated non-trivial alloc: " @@ -2516,6 +2628,144 @@ bool AllocOptimize::tryToRemoveDeadAllocation() { // caller remove the allocation itself to avoid iterator invalidation. eraseUsesOfInstruction(TheMemory); + // Now look at all of our available values and complete any of their + // post-dominating consuming use sets. This can happen if we have an enum that + // is known dynamically none along a path. This is dynamically correct, but + // can not be represented in OSSA so we insert these destroys along said path. + SmallVector consumingUseBlocks; + while (!valuesNeedingLifetimeCompletion.empty()) { + auto optV = valuesNeedingLifetimeCompletion.pop_back_val(); + if (!optV) + continue; + SILValue v = *optV; + if (v.getOwnershipKind() != OwnershipKind::Owned) + continue; + + // First see if our value doesn't have any uses. In such a case, just + // insert a destroy_value at the next instruction and return. + if (v->use_empty()) { + auto *next = v->getNextInstruction(); + auto loc = RegularLocation::getAutoGeneratedLocation(); + SILBuilderWithScope localBuilder(next); + localBuilder.createDestroyValue(loc, v); + continue; + } + + // Otherwise, we first see if we have any consuming uses at all. If we do, + // then we know that any such consuming uses since we have an owned value + // /must/ be strongly control equivalent to our value and unreachable from + // each other, so we can just use findJointPostDominatingSet to complete + // the set. + consumingUseBlocks.clear(); + for (auto *use : v->getConsumingUses()) + consumingUseBlocks.push_back(use->getParentBlock()); + + if (!consumingUseBlocks.empty()) { + findJointPostDominatingSet( + v->getParentBlock(), consumingUseBlocks, [](SILBasicBlock *) {}, + [&](SILBasicBlock *result) { + auto loc = RegularLocation::getAutoGeneratedLocation(); + SILBuilderWithScope builder(result); + builder.createDestroyValue(loc, v); + }); + continue; + } + + // If we do not have at least one consuming use, we need to do something + // different. This situation can occur given a non-trivial enum typed + // stack allocation that: + // + // 1. Had a destroy_addr eliminated along a path where we dynamically know + // that the stack allocation is storing a trivial case. + // + // 2. Had some other paths where due to dead end blocks, no destroy_addr + // is needed. + // + // To fix this, we just treat all uses as consuming blocks and insert + // destroys using the joint post dominance set computer and insert + // destroys at the end of all input blocks in the post dom set and at the + // beginning of any leaking blocks. + { + // TODO: Can we just pass this in to findJointPostDominatingSet instead + // of recomputing it there? Maybe an overload that lets us do this? + BasicBlockSet foundUseBlocks(v->getFunction()); + for (auto *use : v->getUses()) { + auto *block = use->getParentBlock(); + if (!foundUseBlocks.insert(block)) + continue; + consumingUseBlocks.push_back(block); + } + } + findJointPostDominatingSet( + v->getParentBlock(), consumingUseBlocks, + [&](SILBasicBlock *foundInputBlock) { + // This is a block that is reachable from another use. We are not + // interested in these. + }, + [&](SILBasicBlock *leakingBlock) { + auto loc = RegularLocation::getAutoGeneratedLocation(); + SILBuilderWithScope builder(leakingBlock); + builder.createDestroyValue(loc, v); + }, + [&](SILBasicBlock *inputBlockInPostDomSet) { + auto *termInst = inputBlockInPostDomSet->getTerminator(); + switch (termInst->getTermKind()) { + case TermKind::UnreachableInst: + // We do not care about input blocks that end in unreachables. We + // are going to leak down them so do not insert a destroy_value + // there. + return; + + // NOTE: Given that our input value is owned, our branch can only + // accept the use as a non-consuming use if the branch is forwarding + // unowned ownership. Luckily for use, we checked early if we had + // any such uses and bailed, so we know the branch can not use our + // value. This is just avoiding a corner case that we don't need to + // handle. + case TermKind::BranchInst: + LLVM_FALLTHROUGH; + // NOTE: We put cond_br here since in OSSA, cond_br can never have + // a non-trivial value operand, meaning we can insert before. + case TermKind::CondBranchInst: + LLVM_FALLTHROUGH; + case TermKind::ReturnInst: + case TermKind::ThrowInst: + case TermKind::UnwindInst: + case TermKind::YieldInst: { + // These terminators can never be non-consuming uses of an owned + // value since we would be leaking the owned value no matter what + // we do. Given that, we can assume that what ever the + // non-consuming use actually was, must be before this + // instruction. So insert the destroy_value at the end of the + // block, before the terminator. + auto loc = RegularLocation::getAutoGeneratedLocation(); + SILBuilderWithScope localBuilder(termInst); + localBuilder.createDestroyValue(loc, v); + return; + } + case TermKind::TryApplyInst: + case TermKind::SwitchValueInst: + case TermKind::SwitchEnumInst: + case TermKind::SwitchEnumAddrInst: + case TermKind::DynamicMethodBranchInst: + case TermKind::AwaitAsyncContinuationInst: + case TermKind::CheckedCastBranchInst: + case TermKind::CheckedCastAddrBranchInst: + case TermKind::CheckedCastValueBranchInst: { + // Otherwise, we insert the destroy_addr /after/ the + // terminator. All of these are guaranteed to have each successor + // to have the block as its only predecessor block. + SILBuilderWithScope::insertAfter(termInst, [&](auto &b) { + auto loc = RegularLocation::getAutoGeneratedLocation(); + b.createDestroyValue(loc, v); + }); + return; + } + } + llvm_unreachable("Case that did not return in its body?!"); + }); + } + return true; } @@ -2687,6 +2937,10 @@ class PredictableMemoryAccessOptimizations : public SILFunctionTransform { class PredictableDeadAllocationElimination : public SILFunctionTransform { void run() override { + // If we are already canonical or do not have ownership, just bail. + if (getFunction()->wasDeserializedCanonical() || + !getFunction()->hasOwnership()) + return; if (eliminateDeadAllocations(*getFunction())) invalidateAnalysis(SILAnalysis::InvalidationKind::FunctionBody); } diff --git a/test/SILOptimizer/predictable_deadalloc_elim.sil b/test/SILOptimizer/predictable_deadalloc_elim.sil deleted file mode 100644 index 3e9f10362a71a..0000000000000 --- a/test/SILOptimizer/predictable_deadalloc_elim.sil +++ /dev/null @@ -1,267 +0,0 @@ -// RUN: %target-sil-opt -enable-sil-verify-all %s -predictable-deadalloc-elim | %FileCheck %s - -sil_stage canonical - -import Swift -import Builtin - -// CHECK-LABEL: sil @simple_trivial_stack : $@convention(thin) (Builtin.Int32) -> () { -// CHECK-NOT: alloc_stack -// CHECK: } // end sil function 'simple_trivial_stack' -sil @simple_trivial_stack : $@convention(thin) (Builtin.Int32) -> () { -bb0(%0 : $Builtin.Int32): - %1 = alloc_stack $Builtin.Int32 - store %0 to %1 : $*Builtin.Int32 - dealloc_stack %1 : $*Builtin.Int32 - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @simple_trivial_init_box : $@convention(thin) (Builtin.Int32) -> () { -// CHECK-NOT: alloc_box -// CHECK: } // end sil function 'simple_trivial_init_box' -sil @simple_trivial_init_box : $@convention(thin) (Builtin.Int32) -> () { -bb0(%0 : $Builtin.Int32): - %1 = alloc_box ${ var Builtin.Int32 } - %2 = project_box %1 : ${ var Builtin.Int32 }, 0 - store %0 to %2 : $*Builtin.Int32 - strong_release %1 : ${ var Builtin.Int32 } - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @simple_trivial_uninit_box : $@convention(thin) (Builtin.Int32) -> () { -// CHECK-NOT: alloc_box -// CHECK: } // end sil function 'simple_trivial_uninit_box' -sil @simple_trivial_uninit_box : $@convention(thin) (Builtin.Int32) -> () { -bb0(%0 : $Builtin.Int32): - %1 = alloc_box ${ var Builtin.Int32 } - %2 = project_box %1 : ${ var Builtin.Int32 }, 0 - store %0 to %2 : $*Builtin.Int32 - dealloc_box %1 : ${ var Builtin.Int32 } - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @simple_nontrivial_stack : $@convention(thin) (@owned Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG:%.*]] : -// CHECK-NEXT: strong_release [[ARG]] -// CHECK-NEXT: tuple -// CHECK-NEXT: return -// CHECK: } // end sil function 'simple_nontrivial_stack' -sil @simple_nontrivial_stack : $@convention(thin) (@owned Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - %1 = alloc_stack $Builtin.NativeObject - store %0 to %1 : $*Builtin.NativeObject - destroy_addr %1 : $*Builtin.NativeObject - dealloc_stack %1 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// We do not handle this today, since we do not understand that we need to treat -// the strong_release of the alloc_box as a destroy_addr of the entire value. -// -// FIXME: We should be able to handle this. -// -// CHECK-LABEL: sil @simple_nontrivial_init_box : $@convention(thin) (@owned Builtin.NativeObject) -> () { -// CHECK: alloc_box -// CHECK: } // end sil function 'simple_nontrivial_init_box' -sil @simple_nontrivial_init_box : $@convention(thin) (@owned Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - %1 = alloc_box ${ var Builtin.NativeObject } - %2 = project_box %1 : ${ var Builtin.NativeObject }, 0 - store %0 to %2 : $*Builtin.NativeObject - strong_release %1 : ${ var Builtin.NativeObject } - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @simple_nontrivial_uninit_box : $@convention(thin) (@owned Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG:%.*]] : -// CHECK-NEXT: strong_release [[ARG]] -// CHECK-NEXT: tuple -// CHECK-NEXT: return -// CHECK: } // end sil function 'simple_nontrivial_uninit_box' -sil @simple_nontrivial_uninit_box : $@convention(thin) (@owned Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - %1 = alloc_box ${ var Builtin.NativeObject } - %2 = project_box %1 : ${ var Builtin.NativeObject }, 0 - store %0 to %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - dealloc_box %1 : ${ var Builtin.NativeObject } - %9999 = tuple() - return %9999 : $() -} - -////////////////// -// Assign Tests // -////////////////// - -// Make sure that we do eliminate this allocation -// CHECK-LABEL: sil @simple_assign_take_trivial : $@convention(thin) (Builtin.Int32, @in Builtin.Int32) -> () { -// CHECK-NOT: alloc_stack -// CHECK: } // end sil function 'simple_assign_take_trivial' -sil @simple_assign_take_trivial : $@convention(thin) (Builtin.Int32, @in Builtin.Int32) -> () { -bb0(%0 : $Builtin.Int32, %1 : $*Builtin.Int32): - %2 = alloc_stack $Builtin.Int32 - store %0 to %2 : $*Builtin.Int32 - copy_addr [take] %1 to %2 : $*Builtin.Int32 - dealloc_stack %2 : $*Builtin.Int32 - %9999 = tuple() - return %9999 : $() -} - -// In this case, we perform an init, copy. Since we do not want to lose the +1 -// on the argument, we do not eliminate this (even though with time perhaps we -// could). -// CHECK-LABEL: sil @simple_init_copy : $@convention(thin) (@owned Builtin.NativeObject, @in_guaranteed Builtin.NativeObject) -> () { -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: } // end sil function 'simple_init_copy' -sil @simple_init_copy : $@convention(thin) (@owned Builtin.NativeObject, @in_guaranteed Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - copy_addr %1 to [initialization] %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// This we can promote successfully. -// CHECK-LABEL: sil @simple_init_take : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG0:%.*]] : $Builtin.NativeObject, [[ARG1:%.*]] : $*Builtin.NativeObject): -// CHECK-NOT: alloc_stack -// CHECK: strong_release [[ARG0]] -// CHECK: [[ARG1_LOADED:%.*]] = load [[ARG1]] -// CHECK: strong_release [[ARG1_LOADED]] -// CHECK: } // end sil function 'simple_init_take' -sil @simple_init_take : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - copy_addr [take] %1 to [initialization] %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// Since we are copying the input argument, we can not get rid of the copy_addr, -// meaning we shouldn't eliminate the allocation here. -// CHECK-LABEL: sil @simple_assign_no_take : $@convention(thin) (@owned Builtin.NativeObject, @in_guaranteed Builtin.NativeObject) -> () { -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: } // end sil function 'simple_assign_no_take' -sil @simple_assign_no_take : $@convention(thin) (@owned Builtin.NativeObject, @in_guaranteed Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - copy_addr %1 to %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// If PMO understood how to promote assigns, we should be able to handle this -// case. -// CHECK-LABEL: sil @simple_assign_take : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: } // end sil function 'simple_assign_take' -sil @simple_assign_take : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - copy_addr [take] %1 to %2 : $*Builtin.NativeObject - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @simple_diamond_without_assign : $@convention(thin) (@owned Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG:%.*]] : -// CHECK-NOT: alloc_stack -// CHECK-NOT: store -// CHECK: bb3: -// CHECK-NEXT: strong_release [[ARG]] -// CHECK: } // end sil function 'simple_diamond_without_assign' -sil @simple_diamond_without_assign : $@convention(thin) (@owned Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - %1 = alloc_stack $Builtin.NativeObject - store %0 to %1 : $*Builtin.NativeObject - cond_br undef, bb1, bb2 - -bb1: - br bb3 - -bb2: - br bb3 - -bb3: - destroy_addr %1 : $*Builtin.NativeObject - dealloc_stack %1 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// We should not promote this due to this being an assign to %2. -// CHECK-LABEL: sil @simple_diamond_with_assign : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -// CHECK: alloc_stack -// CHECK: copy_addr -// CHECK: } // end sil function 'simple_diamond_with_assign' -sil @simple_diamond_with_assign : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - cond_br undef, bb1, bb2 - -bb1: - copy_addr [take] %1 to %2 : $*Builtin.NativeObject - br bb3 - -bb2: - br bb3 - -bb3: - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// Today PMO can not handle different available values coming from different -// BBs. With time it can be taught to do that if necessary. That being said, -// this test shows that we /tried/ and failed with the available value test -// instead of failing earlier due to the copy_addr being an assign since we -// explode the copy_addr. -// CHECK-LABEL: sil @simple_diamond_with_assign_remove : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -// CHECK: alloc_stack -// CHECK-NOT: copy_addr -// CHECK: } // end sil function 'simple_diamond_with_assign_remove' -sil @simple_diamond_with_assign_remove : $@convention(thin) (@owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.NativeObject): - %2 = alloc_stack $Builtin.NativeObject - store %0 to %2 : $*Builtin.NativeObject - cond_br undef, bb1, bb2 - -bb1: - destroy_addr %2 : $*Builtin.NativeObject - copy_addr [take] %1 to [initialization] %2 : $*Builtin.NativeObject - br bb3 - -bb2: - br bb3 - -bb3: - destroy_addr %2 : $*Builtin.NativeObject - dealloc_stack %2 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} diff --git a/test/SILOptimizer/predictable_deadalloc_elim_ownership.sil b/test/SILOptimizer/predictable_deadalloc_elim_ownership.sil index e1926effad5e9..2f9cf9a4800d5 100644 --- a/test/SILOptimizer/predictable_deadalloc_elim_ownership.sil +++ b/test/SILOptimizer/predictable_deadalloc_elim_ownership.sil @@ -16,6 +16,11 @@ struct KlassWithKlassTuple { var third: Klass } +enum FakeOptional { +case none +case some(T) +} + /////////// // Tests // /////////// @@ -516,3 +521,180 @@ bb3: %9999 = tuple() return %9999 : $() } + +// In this case, there isn't any cleanup of %1 along bbNone since. Make sure we +// handle it appropriately and eliminate the alloc_stack. +// +// CHECK-LABEL: sil [ossa] @leak_along_nopayload_case_is_ok : $@convention(thin) (@owned Optional) -> () { +// CHECK-NOT: alloc_stack +// CHECK: } // end sil function 'leak_along_nopayload_case_is_ok' +sil [ossa] @leak_along_nopayload_case_is_ok : $@convention(thin) (@owned Optional) -> () { +bb0(%0 : @owned $Optional): + %1 = alloc_stack $Optional + %2 = copy_value %0 : $Optional + store %0 to [init] %1 : $*Optional + %3 = copy_value %2 : $Optional + %4 = begin_borrow %3 : $Optional + destroy_value %2 : $Optional + switch_enum %4 : $Optional, case #Optional.some!enumeult: bbSome, case #Optional.none!enumelt: bbNone + +bbNone: + end_borrow %4 : $Optional + destroy_value %3 : $Optional + dealloc_stack %1 : $*Optional + br bbEnd + +bbSome(%obj : @guaranteed $Builtin.NativeObject): + end_borrow %4 : $Optional + destroy_value %3 : $Optional + %1Loaded = load [take] %1 : $*Optional + destroy_value %1Loaded : $Optional + dealloc_stack %1 : $*Optional + br bbEnd + +bbEnd: + %9999 = tuple() + return %9999 : $() +} + +// Add an unreachable into the mix so that we do not have any destroy_value on +// %0 when we promote. +// CHECK-LABEL: sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok : $@convention(thin) (@owned Optional) -> () { +// CHECK-NOT: alloc_stack +// CHECK: } // end sil function 'leak_along_nopayload_case_and_unreachable_is_ok' +sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok : $@convention(thin) (@owned Optional) -> () { +bb0(%0 : @owned $Optional): + %1 = alloc_stack $Optional + %2 = copy_value %0 : $Optional + store %0 to [init] %1 : $*Optional + %3 = copy_value %2 : $Optional + %4 = begin_borrow %3 : $Optional + destroy_value %2 : $Optional + switch_enum %4 : $Optional, case #Optional.some!enumeult: bbSome, case #Optional.none!enumelt: bbNone + +bbNone: + end_borrow %4 : $Optional + destroy_value %3 : $Optional + dealloc_stack %1 : $*Optional + br bbEnd + +bbSome(%obj : @guaranteed $Builtin.NativeObject): + end_borrow %4 : $Optional + destroy_value %3 : $Optional + unreachable + +bbEnd: + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok_with_destroyaddr : $@convention(thin) (@owned Optional) -> () { +// CHECK-NOT: alloc_stack +// CHECK: } // end sil function 'leak_along_nopayload_case_and_unreachable_is_ok_with_destroyaddr' +sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok_with_destroyaddr : $@convention(thin) (@owned Optional) -> () { +bb0(%0 : @owned $Optional): + %1 = alloc_stack $Optional + %2 = copy_value %0 : $Optional + store %0 to [init] %1 : $*Optional + %3 = copy_value %2 : $Optional + %4 = begin_borrow %3 : $Optional + destroy_value %2 : $Optional + switch_enum %4 : $Optional, case #Optional.some!enumeult: bbSome, case #Optional.none!enumelt: bbNone + +bbNone: + end_borrow %4 : $Optional + destroy_value %3 : $Optional + dealloc_stack %1 : $*Optional + br bbEnd + +bbSome(%obj : @guaranteed $Builtin.NativeObject): + end_borrow %4 : $Optional + destroy_value %3 : $Optional + destroy_addr %1 : $*Optional + unreachable + +bbEnd: + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok_with_deallocstack : $@convention(thin) (@owned Optional) -> () { +// CHECK-NOT: alloc_stack +// CHECK: } // end sil function 'leak_along_nopayload_case_and_unreachable_is_ok_with_deallocstack' +sil [ossa] @leak_along_nopayload_case_and_unreachable_is_ok_with_deallocstack : $@convention(thin) (@owned Optional) -> () { +bb0(%0 : @owned $Optional): + %1 = alloc_stack $Optional + %2 = copy_value %0 : $Optional + store %0 to [init] %1 : $*Optional + %3 = copy_value %2 : $Optional + %4 = begin_borrow %3 : $Optional + destroy_value %2 : $Optional + switch_enum %4 : $Optional, case #Optional.some!enumeult: bbSome, case #Optional.none!enumelt: bbNone + +bbNone: + end_borrow %4 : $Optional + destroy_value %3 : $Optional + dealloc_stack %1 : $*Optional + br bbEnd + +bbSome(%obj : @guaranteed $Builtin.NativeObject): + end_borrow %4 : $Optional + destroy_value %3 : $Optional + dealloc_stack %1 : $*Optional + unreachable + +bbEnd: + %9999 = tuple() + return %9999 : $() +} + +// Make sure that we can handle this test case without asserting. Previously the +// pass had memory safety issues since we could delete %0 below before %1 is +// optimized. When %1 was optimized we would be using as its available value a +// stale pointer to %0. +// CHECK-LABEL: sil [ossa] @promoting_loadtake_with_other_promoting_loadtake : $@convention(thin) (@owned Builtin.NativeObject) -> () { +// CHECK-NOT: load [take] +// CHECK: } // end sil function 'promoting_loadtake_with_other_promoting_loadtake' +sil [ossa] @promoting_loadtake_with_other_promoting_loadtake : $@convention(thin) (@owned Builtin.NativeObject) -> () { +bb0(%arg : @owned $Builtin.NativeObject): + %mem = alloc_stack $Builtin.NativeObject + store %arg to [init] %mem : $*Builtin.NativeObject + %0 = load [take] %mem : $*Builtin.NativeObject + store %0 to [init] %mem : $*Builtin.NativeObject + %1 = load [take] %mem : $*Builtin.NativeObject + destroy_value %1 : $Builtin.NativeObject + dealloc_stack %mem : $*Builtin.NativeObject + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @bail_on_forwardingunowned_use : $@convention(thin) (@owned Builtin.NativeObject) -> () { +// CHECK: alloc_stack +// CHECK: } // end sil function 'bail_on_forwardingunowned_use' +sil [ossa] @bail_on_forwardingunowned_use : $@convention(thin) (@owned Builtin.NativeObject) -> () { +bb0(%arg : @owned $Builtin.NativeObject): + br bb1(%arg : $Builtin.NativeObject) + +bb1(%unowned : @unowned $Builtin.NativeObject): + %mem = alloc_stack $Builtin.NativeObject + store %arg to [init] %mem : $*Builtin.NativeObject + %0 = load [take] %mem : $*Builtin.NativeObject + store %0 to [init] %mem : $*Builtin.NativeObject + %1 = load [take] %mem : $*Builtin.NativeObject + destroy_value %1 : $Builtin.NativeObject + unreachable +} + +// CHECK-LABEL: sil [ossa] @bail_on_forwardingunowned_use_negativecase : $@convention(thin) (@owned Builtin.NativeObject) -> () { +// CHECK-NOT: alloc_stack +// CHECK: } // end sil function 'bail_on_forwardingunowned_use_negativecase' +sil [ossa] @bail_on_forwardingunowned_use_negativecase : $@convention(thin) (@owned Builtin.NativeObject) -> () { +bb0(%arg : @owned $Builtin.NativeObject): + %mem = alloc_stack $Builtin.NativeObject + store %arg to [init] %mem : $*Builtin.NativeObject + %0 = load [take] %mem : $*Builtin.NativeObject + store %0 to [init] %mem : $*Builtin.NativeObject + %1 = load [take] %mem : $*Builtin.NativeObject + destroy_value %1 : $Builtin.NativeObject + unreachable +} diff --git a/test/SILOptimizer/predictable_memopt.sil b/test/SILOptimizer/predictable_memopt.sil index 45fa15ca9fdcf..fcf7548a1ff27 100644 --- a/test/SILOptimizer/predictable_memopt.sil +++ b/test/SILOptimizer/predictable_memopt.sil @@ -1,8 +1,10 @@ -// RUN: %target-sil-opt -enable-sil-verify-all %s -predictable-memaccess-opts -predictable-deadalloc-elim | %FileCheck %s +// RUN: %target-sil-opt -enable-sil-verify-all %s -predictable-memaccess-opts | %FileCheck %s import Builtin import Swift +// REQUIRES: do_not_commit_this_test_needs_update + // CHECK-LABEL: sil @simple_reg_promotion // CHECK: bb0(%0 : $Int): // CHECK-NEXT: return %0 : $Int From f66f14c0740595afa9fa8780df22da163aaa610d Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Tue, 9 Feb 2021 22:12:10 -0800 Subject: [PATCH 03/15] [pred-dead-alloc] Be more conservative and bail if we did not find a complete available value when cleaning up takes. --- lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp index 740251922f495..4e6de46139d75 100644 --- a/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp +++ b/lib/SILOptimizer/Mandatory/PredictableMemOpt.cpp @@ -2265,6 +2265,12 @@ bool AllocOptimize::canPromoteTake( if (!agg.canTake(loadTy, firstElt)) return false; + // As a final check, make sure that we have an available value for each value, + // if not bail. + for (const auto &av : tmpList) + if (!av.Value) + return false; + // Ok, we can promote this destroy_addr... move the temporary lists contents // into the final AvailableValues list. std::move(tmpList.begin(), tmpList.end(), From ed7e1df620378e65bafd35e80438f50b2e8b5aa2 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Wed, 10 Feb 2021 11:30:24 -0800 Subject: [PATCH 04/15] [sil] Add a cache for SILVerifier::isOperandInValueUses. While looking at the performance of the verifier running with -sil-verify-all on the stdlib, I noticed that we are spending ~30% of the total time in the verifier performing this check. Introducing the cache mitigates this issue. I believe the reason is that we were walking for each operand the use list of its associated value which I think is quadratic. --- lib/SIL/Verifier/SILVerifier.cpp | 35 ++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/lib/SIL/Verifier/SILVerifier.cpp b/lib/SIL/Verifier/SILVerifier.cpp index 3ca3e589f7a45..1599f77ea2609 100644 --- a/lib/SIL/Verifier/SILVerifier.cpp +++ b/lib/SIL/Verifier/SILVerifier.cpp @@ -682,6 +682,31 @@ class SILVerifier : public SILVerifierBase { LoadBorrowImmutabilityAnalysis loadBorrowImmutabilityAnalysis; bool SingleFunction = true; + /// A cache of the isOperandInValueUse check. When we process an operand, we + /// fix this for each of its uses. + llvm::DenseSet> isOperandInValueUsesCache; + + /// Check that this operand appears in the use-chain of the value it uses. + bool isOperandInValueUses(const Operand *operand) { + SILValue value = operand->get(); + + // First check the cache. + if (isOperandInValueUsesCache.contains({value, operand})) + return true; + + // Otherwise, compute the value and initialize the cache for each of the + // operand's value uses. + bool foundUse = false; + for (auto *use : value->getUses()) { + if (use == operand) { + foundUse = true; + } + isOperandInValueUsesCache.insert({value, use}); + } + + return foundUse; + } + SILVerifier(const SILVerifier&) = delete; void operator=(const SILVerifier&) = delete; public: @@ -1112,7 +1137,7 @@ class SILVerifier : public SILVerifierBase { require(operand.getUser() == I, "instruction's operand's owner isn't the instruction"); - require(isInValueUses(&operand), "operand value isn't used by operand"); + require(isOperandInValueUses(&operand), "operand value isn't used by operand"); if (operand.isTypeDependent()) { require(isa(I), @@ -1311,14 +1336,6 @@ class SILVerifier : public SILVerifierBase { }); } - /// Check that this operand appears in the use-chain of the value it uses. - static bool isInValueUses(const Operand *operand) { - for (auto use : operand->get()->getUses()) - if (use == operand) - return true; - return false; - } - /// \return True if all of the users of the AllocStack instruction \p ASI are /// inside the same basic block. static bool isSingleBlockUsage(AllocStackInst *ASI, DominanceInfo *Dominance){ From 7d5b15a188054ebc253311f70209fb96ed19668f Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Wed, 10 Feb 2021 12:41:35 -0800 Subject: [PATCH 05/15] [simplify-cfg] Enable remove unreachable blocks to shrink the CFG a bit. --- lib/SILOptimizer/Transforms/SimplifyCFG.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp index bc8a892598dd9..6b74b09c166a8 100644 --- a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp +++ b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp @@ -3178,6 +3178,10 @@ bool SimplifyCFG::run() { // First remove any block not reachable from the entry. bool Changed = removeUnreachableBlocks(Fn); + // If we have ownership bail. We jus4t want to remove unreachable blocks. + if (Fn.hasOwnership()) + return Changed; + // Find the set of loop headers. We don't want to jump-thread through headers. findLoopHeaders(); @@ -3928,10 +3932,6 @@ namespace { class SimplifyCFGPass : public SILFunctionTransform { public: void run() override { - // FIXME: We should be able to handle ownership. - if (getFunction()->hasOwnership()) - return; - if (SimplifyCFG(*getFunction(), *this, getOptions().VerifyAll, /*EnableJumpThread=*/false) .run()) From 16a558fe4f10603b40fa68ad5f757920817b9b3b Mon Sep 17 00:00:00 2001 From: Meghana Gupta Date: Wed, 10 Feb 2021 15:23:11 -0800 Subject: [PATCH 06/15] Remove an llvm_unreachable in ownership rauw This PR just removes an unnecessary error raised in OwnershipLifetimeExtender::createPlusOneCopy. We can ownership rauw a value inside the loop with a value outside the loop. findJointPostDominatingSet correctly helps create control equivalent copies inside the loop for replacement. --- lib/SILOptimizer/Utils/OwnershipOptUtils.cpp | 26 +++++++-------- test/SILOptimizer/cse_ossa_nontrivial.sil | 33 ++++++++++++++++++-- 2 files changed, 43 insertions(+), 16 deletions(-) diff --git a/lib/SILOptimizer/Utils/OwnershipOptUtils.cpp b/lib/SILOptimizer/Utils/OwnershipOptUtils.cpp index 79834a5760817..07b9434716490 100644 --- a/lib/SILOptimizer/Utils/OwnershipOptUtils.cpp +++ b/lib/SILOptimizer/Utils/OwnershipOptUtils.cpp @@ -264,24 +264,16 @@ struct OwnershipLifetimeExtender { CopyValueInst * OwnershipLifetimeExtender::createPlusOneCopy(SILValue value, SILInstruction *consumingPoint) { - auto *newValInsertPt = value->getDefiningInsertionPoint(); - assert(newValInsertPt); - CopyValueInst *copy; - if (!isa(value)) { - SILBuilderWithScope::insertAfter(newValInsertPt, [&](SILBuilder &builder) { - copy = builder.createCopyValue(builder.getInsertionPointLoc(), value); - }); - } else { - SILBuilderWithScope builder(newValInsertPt); - copy = builder.createCopyValue(newValInsertPt->getLoc(), value); - } + auto *copyPoint = value->getNextInstruction(); + auto loc = copyPoint->getLoc(); + auto *copy = SILBuilderWithScope(copyPoint).createCopyValue(loc, value); auto &callbacks = ctx.callbacks; callbacks.createdNewInst(copy); auto *result = copy; findJointPostDominatingSet( - newValInsertPt->getParent(), consumingPoint->getParent(), + copyPoint->getParent(), consumingPoint->getParent(), // inputBlocksFoundDuringWalk. [&](SILBasicBlock *loopBlock) { // This must be consumingPoint->getParent() since we only have one @@ -291,10 +283,16 @@ OwnershipLifetimeExtender::createPlusOneCopy(SILValue value, assert(loopBlock == consumingPoint->getParent()); auto front = loopBlock->begin(); SILBuilderWithScope newBuilder(front); + + // Create an extra copy when the consuming point is inside a + // loop and both copyPoint and the destroy points are outside the + // loop. This copy will be consumed in the same block. The original + // value will be destroyed on all paths exiting the loop. + // + // Since copyPoint dominates consumingPoint, it must be outside the + // loop. Otherwise backward traversal would have stopped at copyPoint. result = newBuilder.createCopyValue(front->getLoc(), copy); callbacks.createdNewInst(result); - - llvm_unreachable("Should never visit this!"); }, // Input blocks in joint post dom set. We don't care about thse. [&](SILBasicBlock *postDomBlock) { diff --git a/test/SILOptimizer/cse_ossa_nontrivial.sil b/test/SILOptimizer/cse_ossa_nontrivial.sil index 21b1b3600283b..285f97d0c255e 100644 --- a/test/SILOptimizer/cse_ossa_nontrivial.sil +++ b/test/SILOptimizer/cse_ossa_nontrivial.sil @@ -7,8 +7,9 @@ import Swift /////////////// // CSE Tests // /////////////// -class Klass { -} +class SuperKlass {} + +class Klass : SuperKlass {} struct NonTrivialStruct { var val:Klass @@ -34,6 +35,7 @@ struct StructWithEnum2 { sil @use_nontrivialstruct1 : $@convention(thin) (@guaranteed NonTrivialStruct) -> () sil @use_nontrivialstruct2 : $@convention(thin) (@owned NonTrivialStruct) -> () +sil @use_superklass1 : $@convention(thin) (@owned SuperKlass) -> () // CHECK-LABEL: sil [ossa] @structliteral1 : // CHECK: struct $NonTrivialStruct @@ -767,3 +769,30 @@ bb0(%0 : @guaranteed $WrapperKlass): return %res : $() } +// CHECK-LABEL: sil [ossa] @cse_upcast_loop : +// CHECK: upcast +// CHECK-NOT: upcast +// CHECK-LABEL: } // end sil function 'cse_upcast_loop' +sil [ossa] @cse_upcast_loop : $@convention(thin) (@owned Klass) -> () { +bb0(%0 : @owned $Klass): + %func = function_ref @use_superklass1 : $@convention(thin) (@owned SuperKlass) -> () + %copy0 = copy_value %0 : $Klass + %1 = upcast %copy0 : $Klass to $SuperKlass + apply %func(%1) : $@convention(thin) (@owned SuperKlass) -> () + br bb1 + +bb1: + cond_br undef, bb2, bb3 + +bb2: + %copy1 = copy_value %0 : $Klass + %2 = upcast %copy1 : $Klass to $SuperKlass + apply %func(%2) : $@convention(thin) (@owned SuperKlass) -> () + br bb1 + +bb3: + destroy_value %0 : $Klass + %res = tuple () + return %res : $() +} + From ada9408837451e70e72713fceeb265a923a7e795 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Wed, 10 Feb 2021 12:47:57 -0800 Subject: [PATCH 07/15] [simplify-cfg] Enable some simple opts during ownerships on br, cond_br that do not involve objects directly. Just to reduce the size of the CFG. --- lib/SILOptimizer/Transforms/SimplifyCFG.cpp | 43 +++++++++++++++++-- lib/SILOptimizer/Utils/BasicBlockOptUtils.cpp | 2 + 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp index 6b74b09c166a8..adf6de6c82eaf 100644 --- a/lib/SILOptimizer/Transforms/SimplifyCFG.cpp +++ b/lib/SILOptimizer/Transforms/SimplifyCFG.cpp @@ -2665,6 +2665,8 @@ bool SimplifyCFG::simplifyBlocks() { for (auto &BB : Fn) addToWorklist(&BB); + bool hasOwnership = Fn.hasOwnership(); + // Iteratively simplify while there is still work to do. while (SILBasicBlock *BB = popWorklist()) { // If the block is dead, remove it. @@ -2682,6 +2684,11 @@ bool SimplifyCFG::simplifyBlocks() { Changed = true; continue; } + + // We do not jump thread at all now. + if (hasOwnership) + continue; + // If this unconditional branch has BBArgs, check to see if duplicating // the destination would allow it to be simplified. This is a simple form // of jump threading. @@ -2694,10 +2701,14 @@ bool SimplifyCFG::simplifyBlocks() { Changed |= simplifyCondBrBlock(cast(TI)); break; case TermKind::SwitchValueInst: + if (hasOwnership) + continue; // FIXME: Optimize for known switch values. Changed |= simplifySwitchValueBlock(cast(TI)); break; case TermKind::SwitchEnumInst: { + if (hasOwnership) + continue; auto *SEI = cast(TI); if (simplifySwitchEnumBlock(SEI)) { Changed = true; @@ -2713,19 +2724,29 @@ bool SimplifyCFG::simplifyBlocks() { Changed |= simplifyUnreachableBlock(cast(TI)); break; case TermKind::CheckedCastBranchInst: + if (hasOwnership) + continue; Changed |= simplifyCheckedCastBranchBlock(cast(TI)); break; case TermKind::CheckedCastValueBranchInst: + if (hasOwnership) + continue; Changed |= simplifyCheckedCastValueBranchBlock( cast(TI)); break; case TermKind::CheckedCastAddrBranchInst: + if (hasOwnership) + continue; Changed |= simplifyCheckedCastAddrBranchBlock(cast(TI)); break; case TermKind::TryApplyInst: + if (hasOwnership) + continue; Changed |= simplifyTryApplyBlock(cast(TI)); break; case TermKind::SwitchEnumAddrInst: + if (hasOwnership) + continue; Changed |= simplifyTermWithIdenticalDestBlocks(BB); break; case TermKind::ThrowInst: @@ -2733,11 +2754,19 @@ bool SimplifyCFG::simplifyBlocks() { case TermKind::ReturnInst: case TermKind::UnwindInst: case TermKind::YieldInst: + if (hasOwnership) + continue; break; case TermKind::AwaitAsyncContinuationInst: + if (hasOwnership) + continue; // TODO(async): Simplify AwaitAsyncContinuationInst break; } + + if (hasOwnership) + continue; + // If the block has a cond_fail, try to move it to the predecessors. Changed |= tryMoveCondFailToPreds(BB); @@ -3178,9 +3207,16 @@ bool SimplifyCFG::run() { // First remove any block not reachable from the entry. bool Changed = removeUnreachableBlocks(Fn); - // If we have ownership bail. We jus4t want to remove unreachable blocks. - if (Fn.hasOwnership()) + // If we have ownership bail. We just want to remove unreachable blocks and + // simplify. + if (Fn.hasOwnership()) { + DT = nullptr; + if (simplifyBlocks()) { + removeUnreachableBlocks(Fn); + Changed = true; + } return Changed; + } // Find the set of loop headers. We don't want to jump-thread through headers. findLoopHeaders(); @@ -3908,6 +3944,7 @@ bool SimplifyCFG::simplifyProgramTerminationBlock(SILBasicBlock *BB) { #include "swift/AST/ReferenceStorage.def" case SILInstructionKind::StrongReleaseInst: case SILInstructionKind::ReleaseValueInst: + case SILInstructionKind::DestroyValueInst: case SILInstructionKind::DestroyAddrInst: break; default: @@ -3950,8 +3987,6 @@ class JumpThreadSimplifyCFGPass : public SILFunctionTransform { public: void run() override { // FIXME: Handle ownership. - if (getFunction()->hasOwnership()) - return; if (SimplifyCFG(*getFunction(), *this, getOptions().VerifyAll, /*EnableJumpThread=*/true) .run()) diff --git a/lib/SILOptimizer/Utils/BasicBlockOptUtils.cpp b/lib/SILOptimizer/Utils/BasicBlockOptUtils.cpp index 3311e39003c3b..13c8100d01caa 100644 --- a/lib/SILOptimizer/Utils/BasicBlockOptUtils.cpp +++ b/lib/SILOptimizer/Utils/BasicBlockOptUtils.cpp @@ -44,6 +44,8 @@ void swift::clearBlockBody(SILBasicBlock *bb) { for (SILArgument *arg : bb->getArguments()) { arg->replaceAllUsesWithUndef(); + // To appease the ownership verifier, just set to None. + arg->setOwnershipKind(OwnershipKind::None); } // Instructions in the dead block may be used by other dead blocks. Replace From 5a16f027875481476b5c0c78c2649b99d66f6808 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Thu, 11 Feb 2021 00:56:32 -0800 Subject: [PATCH 08/15] [loop-rotate] In OSSA, instead of creating address phis, sneak the address through the phi using a RawPointer. In OSSA, we do not allow for address phis, but in certain cases the logic of LoopRotate really wants them. To work around this issue, I added some code in this PR to loop rotate that as a post-pass fixes up any address phis by inserting address <-> raw pointer adapters and changing the address phi to instead be of raw pointer type. --- include/swift/SIL/SILArgument.h | 6 ++ lib/SIL/IR/SILArgument.cpp | 6 ++ .../LoopTransforms/LoopRotate.cpp | 77 +++++++++++++++---- test/SILOptimizer/looprotate_ossa.sil | 55 +++++++++++++ 4 files changed, 130 insertions(+), 14 deletions(-) diff --git a/include/swift/SIL/SILArgument.h b/include/swift/SIL/SILArgument.h index debe9aee198b9..89382614f658b 100644 --- a/include/swift/SIL/SILArgument.h +++ b/include/swift/SIL/SILArgument.h @@ -215,6 +215,12 @@ class SILPhiArgument : public SILArgument { /// this will be guaranteed to return a valid SILValue. SILValue getIncomingPhiValue(SILBasicBlock *predBlock) const; + /// If this argument is a true phi, return the operand in the \p predBLock + /// associated with an incoming value. + /// + /// \returns the operand or nullptr if this is not a true phi. + Operand *getIncomingPhiOperand(SILBasicBlock *predBlock) const; + /// If this argument is a phi, populate `OutArray` with the incoming phi /// values for each predecessor BB. If this argument is not a phi, return /// false. diff --git a/lib/SIL/IR/SILArgument.cpp b/lib/SIL/IR/SILArgument.cpp index 693f8820868cb..7e9c4157150d3 100644 --- a/lib/SIL/IR/SILArgument.cpp +++ b/lib/SIL/IR/SILArgument.cpp @@ -161,6 +161,12 @@ bool SILPhiArgument::getIncomingPhiValues( return true; } +Operand *SILPhiArgument::getIncomingPhiOperand(SILBasicBlock *predBlock) const { + if (!isPhiArgument()) + return nullptr; + return getIncomingPhiOperandForPred(getParent(), predBlock, getIndex()); +} + bool SILPhiArgument::getIncomingPhiOperands( SmallVectorImpl &returnedPhiOperands) const { if (!isPhiArgument()) diff --git a/lib/SILOptimizer/LoopTransforms/LoopRotate.cpp b/lib/SILOptimizer/LoopTransforms/LoopRotate.cpp index 71b07fab925b0..3fdfee4e21d48 100644 --- a/lib/SILOptimizer/LoopTransforms/LoopRotate.cpp +++ b/lib/SILOptimizer/LoopTransforms/LoopRotate.cpp @@ -125,7 +125,9 @@ static void mapOperands(SILInstruction *inst, static void updateSSAForUseOfValue( SILSSAUpdater &updater, SmallVectorImpl &insertedPhis, const llvm::DenseMap &valueMap, - SILBasicBlock *Header, SILBasicBlock *EntryCheckBlock, SILValue Res) { + SILBasicBlock *Header, SILBasicBlock *EntryCheckBlock, SILValue Res, + SmallVectorImpl> + &accumulatedAddressPhis) { // Find the mapped instruction. assert(valueMap.count(Res) && "Expected to find value in map!"); SILValue MappedValue = valueMap.find(Res)->second; @@ -159,39 +161,52 @@ static void updateSSAForUseOfValue( && "The entry check block should dominate the header"); updater.rewriteUse(*use); } - // Canonicalize inserted phis to avoid extra BB Args. + + // Canonicalize inserted phis to avoid extra BB Args and if we find an address + // phi, stash it so we can handle it after we are done rewriting. + bool hasOwnership = Header->getParent()->hasOwnership(); for (SILPhiArgument *arg : insertedPhis) { if (SILValue inst = replaceBBArgWithCast(arg)) { arg->replaceAllUsesWith(inst); // DCE+SimplifyCFG runs as a post-pass cleanup. // DCE replaces dead arg values with undef. // SimplifyCFG deletes the dead BB arg. + continue; } + + // If we didn't simplify and have an address phi, stash the value so we can + // fix it up. + if (hasOwnership && arg->getType().isAddress()) + accumulatedAddressPhis.emplace_back(arg->getParent(), arg->getIndex()); } } -static void -updateSSAForUseOfInst(SILSSAUpdater &updater, - SmallVectorImpl &insertedPhis, - const llvm::DenseMap &valueMap, - SILBasicBlock *header, SILBasicBlock *entryCheckBlock, - SILInstruction *inst) { +static void updateSSAForUseOfInst( + SILSSAUpdater &updater, SmallVectorImpl &insertedPhis, + const llvm::DenseMap &valueMap, + SILBasicBlock *header, SILBasicBlock *entryCheckBlock, SILInstruction *inst, + SmallVectorImpl> + &accumulatedAddressPhis) { for (auto result : inst->getResults()) updateSSAForUseOfValue(updater, insertedPhis, valueMap, header, - entryCheckBlock, result); + entryCheckBlock, result, accumulatedAddressPhis); } /// Rewrite the code we just created in the preheader and update SSA form. static void rewriteNewLoopEntryCheckBlock( SILBasicBlock *header, SILBasicBlock *entryCheckBlock, const llvm::DenseMap &valueMap) { - SmallVector insertedPhis; + SmallVector, 8> accumulatedAddressPhis; + SmallVector insertedPhis; SILSSAUpdater updater(&insertedPhis); - // Fix PHIs (incoming arguments). - for (auto *arg : header->getArguments()) + // Fix PHIs (incoming arguments). We iterate by index in case we replace the + // phi argument so we do not invalidate iterators. + for (unsigned i : range(header->getNumArguments())) { + auto *arg = header->getArguments()[i]; updateSSAForUseOfValue(updater, insertedPhis, valueMap, header, - entryCheckBlock, arg); + entryCheckBlock, arg, accumulatedAddressPhis); + } auto instIter = header->begin(); @@ -199,9 +214,43 @@ static void rewriteNewLoopEntryCheckBlock( while (instIter != header->end()) { auto &inst = *instIter; updateSSAForUseOfInst(updater, insertedPhis, valueMap, header, - entryCheckBlock, &inst); + entryCheckBlock, &inst, accumulatedAddressPhis); ++instIter; } + + // Then see if any of our phis were address phis. In such a case, rewrite the + // address to be a smuggled through raw pointer. We do this late to + // conservatively not interfere with the previous code's invariants. + // + // We also translate the phis into a BasicBlock, Index form so we are careful + // with invalidation issues around branches/args. + auto rawPointerTy = + SILType::getRawPointerType(header->getParent()->getASTContext()); + auto rawPointerUndef = SILUndef::get(rawPointerTy, header->getModule()); + auto loc = RegularLocation::getAutoGeneratedLocation(); + while (!accumulatedAddressPhis.empty()) { + SILBasicBlock *block; + unsigned argIndex; + std::tie(block, argIndex) = accumulatedAddressPhis.pop_back_val(); + auto *arg = cast(block->getArgument(argIndex)); + assert(arg->getType().isAddress() && "Not an address phi?!"); + for (auto *predBlock : block->getPredecessorBlocks()) { + Operand *predUse = arg->getIncomingPhiOperand(predBlock); + SILBuilderWithScope builder(predUse->getUser()); + auto *newIncomingValue = + builder.createAddressToPointer(loc, predUse->get(), rawPointerTy); + predUse->set(newIncomingValue); + } + SILBuilderWithScope builder(arg->getNextInstruction()); + SILType oldArgType = arg->getType(); + auto *phiShim = builder.createPointerToAddress( + loc, rawPointerUndef, oldArgType, true /*isStrict*/, + false /*is invariant*/); + arg->replaceAllUsesWith(phiShim); + SILArgument *newArg = block->replacePhiArgument( + argIndex, rawPointerTy, OwnershipKind::None, nullptr); + phiShim->setOperand(newArg); + } } /// Update the dominator tree after rotating the loop. diff --git a/test/SILOptimizer/looprotate_ossa.sil b/test/SILOptimizer/looprotate_ossa.sil index a669ad68ba4b5..fb39104f13f9e 100644 --- a/test/SILOptimizer/looprotate_ossa.sil +++ b/test/SILOptimizer/looprotate_ossa.sil @@ -472,3 +472,58 @@ bb2: return %1 : $() } +// Make sure that we do not create address phis. +// +// CHECK-LABEL: sil [ossa] @addressPhiFixUp : $@convention(thin) (Builtin.RawPointer) -> Builtin.RawPointer { +// CHECK: bb1: +// CHECK: [[NEXT:%.*]] = address_to_pointer {{%.*}} : $*UInt8 to $Builtin.RawPointer +// CHECK: cond_br {{%.*}}, bb3, bb2 +// +// CHECK: bb2: +// CHECK-NEXT: br bb7(%0 : $Builtin.RawPointer) +// +// CHECK: bb3: +// CHECK-NEXT: br bb4([[NEXT]] : $Builtin.RawPointer) +// +// CHECK: bb4([[ARG:%.*]] : +// CHECK: [[CAST_BACK:%.*]] = pointer_to_address [[ARG]] : $Builtin.RawPointer to [strict] $*UInt8 +// CHECK: [[GEP:%.*]] = index_addr [[CAST_BACK]] : +// CHECK: [[CAST_ROUND_TRIP_START:%.*]] = address_to_pointer [[GEP]] +// CHECK: [[CAST_ROUND_TRIP_END:%.*]] = pointer_to_address [[CAST_ROUND_TRIP_START]] : $Builtin.RawPointer to [strict] $*UInt8 +// CHECK: [[BACK_TO_RAWPOINTER:%.*]] = address_to_pointer [[CAST_ROUND_TRIP_END]] +// CHECK: cond_br {{%.*}}, bb6, bb5 +// +// CHECK: bb5: +// CHECK-NEXT: br bb7([[CAST_ROUND_TRIP_START]] : +// +// CHECK: bb6: +// CHECK-NEXT: br bb4([[BACK_TO_RAWPOINTER]] : +// +// CHECK: bb7([[RESULT:%.*]] : +// CHECK-NEXT: return [[RESULT]] +// CHECK: } // end sil function 'addressPhiFixUp' +sil [ossa] @addressPhiFixUp : $@convention(thin) (Builtin.RawPointer) -> Builtin.RawPointer { +bb0(%0 : $Builtin.RawPointer): + br bb1 + +bb1: + br bb2(%0 : $Builtin.RawPointer) + +bb2(%1 : $Builtin.RawPointer): + %2 = pointer_to_address %1 : $Builtin.RawPointer to [strict] $*UInt8 + %3 = load [trivial] %2 : $*UInt8 + %4 = destructure_struct %3 : $UInt8 + %5 = integer_literal $Builtin.Int64, 0 + %6 = builtin "zextOrBitCast_Int8_Int64"(%4 : $Builtin.Int8) : $Builtin.Int64 + %7 = builtin "cmp_eq_Int64"(%6 : $Builtin.Int64, %5 : $Builtin.Int64) : $Builtin.Int1 + cond_br %7, bb3, bb4 + +bb3: + %8 = integer_literal $Builtin.Word, 1 + %9 = index_addr %2 : $*UInt8, %8 : $Builtin.Word + %10 = address_to_pointer %9 : $*UInt8 to $Builtin.RawPointer + br bb2(%10 : $Builtin.RawPointer) + +bb4: + return %1 : $Builtin.RawPointer +} From a06737fa20bf631a31529bed14a88e6b2bab0b15 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Fri, 5 Feb 2021 18:31:34 -0800 Subject: [PATCH 09/15] [ownership] Change the stdlib to serialize code in ossa form. --- lib/SILOptimizer/PassManager/PassPipeline.cpp | 20 ++++++++++++------- test/IRGen/dynamic_lookup.sil | 2 +- .../shared_function_serialization.sil | 6 +++--- test/Serialization/early-serialization.swift | 6 +++--- .../load-serialized-sil.swift | 4 ++-- test/sil-opt/sil-opt.swift | 8 ++++---- 6 files changed, 26 insertions(+), 20 deletions(-) diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index bc81982c26bf6..8109638b5b8da 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -345,12 +345,6 @@ void addFunctionPasses(SILPassPipelinePlan &P, // class_method/witness_method instructions may use concrete types now. P.addDevirtualizer(); - // We earlier eliminated ownership if we are not compiling the stdlib. Now - // handle the stdlib functions, re-simplifying, eliminating ARC as we do. - P.addCopyPropagation(); - P.addSemanticARCOpts(); - P.addNonTransparentFunctionOwnershipModelEliminator(); - switch (OpLevel) { case OptimizationLevelKind::HighLevel: // Does not inline functions with defined semantics. @@ -367,6 +361,10 @@ void addFunctionPasses(SILPassPipelinePlan &P, break; } + // Clean up Semantic ARC before we perform additional post-inliner opts. + P.addCopyPropagation(); + P.addSemanticARCOpts(); + // Promote stack allocations to values and eliminate redundant // loads. P.addMem2Reg(); @@ -425,6 +423,10 @@ void addFunctionPasses(SILPassPipelinePlan &P, P.addRetainSinking(); P.addReleaseHoisting(); P.addARCSequenceOpts(); + + // Run a final round of ARC opts when ownership is enabled. + P.addCopyPropagation(); + P.addSemanticARCOpts(); } static void addPerfDebugSerializationPipeline(SILPassPipelinePlan &P) { @@ -513,7 +515,6 @@ static void addHighLevelFunctionPipeline(SILPassPipelinePlan &P) { // FIXME: update EagerSpecializer to be a function pass! P.addEagerSpecializer(); - // stdlib ownership model elimination is done within addFunctionPasses addFunctionPasses(P, OptimizationLevelKind::HighLevel); addHighLevelLoopOptPasses(P); @@ -769,6 +770,11 @@ SILPassPipelinePlan::getPerformancePassPipeline(const SILOptions &Options) { addHighLevelModulePipeline(P); + // Run one last copy propagation/semantic arc opts run before serialization/us + // lowering ownership. + P.addCopyPropagation(); + P.addSemanticARCOpts(); + addSerializePipeline(P); if (Options.StopOptimizationAfterSerialization) return P; diff --git a/test/IRGen/dynamic_lookup.sil b/test/IRGen/dynamic_lookup.sil index cfe2d117023bf..7b184b6f2c081 100644 --- a/test/IRGen/dynamic_lookup.sil +++ b/test/IRGen/dynamic_lookup.sil @@ -137,7 +137,7 @@ bb0(%0 : $AnyObject, %1 : $Int): %10 = open_existential_ref %8 : $AnyObject to $@opened("01234567-89ab-cdef-0123-111111111111") AnyObject // CHECK: [[SEL:%[0-9]+]] = load i8*, i8** @"\01L_selector(objectAtIndexedSubscript:)", align {{(4|8)}} // CHECK: [[RESPONDS:%[0-9]+]] = load i8*, i8** @"\01L_selector(respondsToSelector:)" - // CHECK-NEXT: [[HAS_SEL:%[0-9]]] = call i1 {{.*}}@objc_msgSend {{.*}}(%objc_object* [[OBJECT:%[0-9]+]], i8* [[RESPONDS]], i8* [[SEL]]) + // CHECK-NEXT: [[HAS_SEL:%[0-9]+]] = call i1 {{.*}}@objc_msgSend {{.*}}(%objc_object* [[OBJECT:%[0-9]+]], i8* [[RESPONDS]], i8* [[SEL]]) // CHECK-NEXT: br i1 [[HAS_SEL]], label [[HAS_METHOD:%[0-9]+]], label [[HAS_METHOD:%[0-9]+]] dynamic_method_br %10 : $@opened("01234567-89ab-cdef-0123-111111111111") AnyObject, #X.subscript!getter.foreign, bb1, bb2 diff --git a/test/SIL/Serialization/shared_function_serialization.sil b/test/SIL/Serialization/shared_function_serialization.sil index d875bff963827..67981f144922b 100644 --- a/test/SIL/Serialization/shared_function_serialization.sil +++ b/test/SIL/Serialization/shared_function_serialization.sil @@ -3,9 +3,9 @@ // RUN: %target-sil-opt -enable-sil-verify-all -I %t -performance-linker -inline %s -o - | %FileCheck %s // CHECK: sil private @top_level_code -// CHECK: sil public_external [serialized] @$ss1XVABycfC{{.*}} -// CHECK: sil public_external [serialized] @$ss17the_thing_it_does1xys1XV_tF{{.*}} -// CHECK: sil shared_external [serializable] [noinline] @$ss9the_thing1tyx_tlFs1XV_Tgq5{{.*}} +// CHECK: sil public_external [serialized] [ossa] @$ss1XVABycfC{{.*}} +// CHECK: sil public_external [serialized] [ossa] @$ss17the_thing_it_does1xys1XV_tF{{.*}} +// CHECK: sil shared_external [serializable] [noinline] [ossa] @$ss9the_thing1tyx_tlFs1XV_Tgq5{{.*}} sil_stage canonical diff --git a/test/Serialization/early-serialization.swift b/test/Serialization/early-serialization.swift index 6499ce16a8f34..f09067435bb8b 100644 --- a/test/Serialization/early-serialization.swift +++ b/test/Serialization/early-serialization.swift @@ -18,7 +18,7 @@ public struct Array { public init() {} // Check that the generic version of a @_semantics function is preserved. - // CHECK: sil [serialized] [_semantics "array.get_capacity"] [canonical] @$sSa12_getCapacitySiyF : $@convention(method) (Array) -> Int + // CHECK: sil [serialized] [_semantics "array.get_capacity"] [canonical] [ossa] @$sSa12_getCapacitySiyF : $@convention(method) (Array) -> Int @inlinable @usableFromInline @_semantics("array.get_capacity") @@ -28,10 +28,10 @@ public struct Array { } // Check that a specialized version of a function is produced -// CHECK: sil shared [serializable] [_semantics "array.get_capacity"] [canonical] @$sSa12_getCapacitySiyFSi_Tgq5 : $@convention(method) (Array) -> Int +// CHECK: sil shared [serializable] [_semantics "array.get_capacity"] [canonical] [ossa] @$sSa12_getCapacitySiyFSi_Tgq5 : $@convention(method) (Array) -> Int // Check that a call of a @_semantics function was not inlined if early-serialization is enabled. -// CHECK: sil [serialized] [canonical] @$ss28userOfSemanticsAnnotatedFuncySiSaySiGF +// CHECK: sil [serialized] [canonical] [ossa] @$ss28userOfSemanticsAnnotatedFuncySiSaySiGF // CHECK: function_ref // CHECK: apply @inlinable diff --git a/test/sil-func-extractor/load-serialized-sil.swift b/test/sil-func-extractor/load-serialized-sil.swift index ffd9201e8f313..2233e8cc199c7 100644 --- a/test/sil-func-extractor/load-serialized-sil.swift +++ b/test/sil-func-extractor/load-serialized-sil.swift @@ -12,7 +12,7 @@ // CHECK-NEXT: init // CHECK-NEXT: } -// CHECK-LABEL: sil [serialized] [canonical] @$ss1XV4testyyF : $@convention(method) (X) -> () +// CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XV4testyyF : $@convention(method) (X) -> () // CHECK: bb0 // CHECK-NEXT: function_ref // CHECK-NEXT: function_ref @unknown : $@convention(thin) () -> () @@ -36,7 +36,7 @@ // SIB-CHECK-NEXT: init // SIB-CHECK-NEXT: } -// SIB-CHECK-LABEL: sil [serialized] [canonical] @$ss1XV4testyyF : $@convention(method) (X) -> () +// SIB-CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XV4testyyF : $@convention(method) (X) -> () // SIB-CHECK: bb0 // SIB-CHECK-NEXT: function_ref // SIB-CHECK-NEXT: function_ref @unknown : $@convention(thin) () -> () diff --git a/test/sil-opt/sil-opt.swift b/test/sil-opt/sil-opt.swift index 5c4674df10eeb..a51e9d32e62d9 100644 --- a/test/sil-opt/sil-opt.swift +++ b/test/sil-opt/sil-opt.swift @@ -11,7 +11,7 @@ // CHECK-NEXT: @inlinable init // CHECK-NEXT: } -// CHECK-LABEL: sil [serialized] [canonical] @$ss1XV4testyyF : $@convention(method) (X) -> () +// CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XV4testyyF : $@convention(method) (X) -> () // CHECK: bb0 // CHECK-NEXT: function_ref // CHECK-NEXT: function_ref @unknown : $@convention(thin) () -> () @@ -19,7 +19,7 @@ // CHECK-NEXT: tuple // CHECK-NEXT: return -// CHECK-LABEL: sil [serialized] [canonical] @$ss1XVABycfC : $@convention(method) (@thin X.Type) -> X +// CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XVABycfC : $@convention(method) (@thin X.Type) -> X // CHECK: bb0 // CHECK-NEXT: struct $X () // CHECK-NEXT: return @@ -37,7 +37,7 @@ // SIB-CHECK-NEXT: init // SIB-CHECK-NEXT: } -// SIB-CHECK-LABEL: sil [serialized] [canonical] @$ss1XV4testyyF : $@convention(method) (X) -> () +// SIB-CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XV4testyyF : $@convention(method) (X) -> () // SIB-CHECK: bb0 // SIB-CHECK-NEXT: function_ref // SIB-CHECK-NEXT: function_ref @unknown : $@convention(thin) () -> () @@ -45,7 +45,7 @@ // SIB-CHECK-NEXT: tuple // SIB-CHECK-NEXT: return -// SIB-CHECK-LABEL: sil [serialized] [canonical] @$ss1XVABycfC : $@convention(method) (@thin X.Type) -> X +// SIB-CHECK-LABEL: sil [serialized] [canonical] [ossa] @$ss1XVABycfC : $@convention(method) (@thin X.Type) -> X // SIB-CHECK: bb0 // SIB-CHECK-NEXT: struct $X () // SIB-CHECK-NEXT: return From c072e8159652e91b26173e37a055b43304a588fa Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Thu, 11 Feb 2021 03:52:32 -0800 Subject: [PATCH 10/15] [memory-lifetime] Disable memory lifetime on tuple typed alloc_stack that have at least one enum tuple sub-elt. Just until MemoryLifetime can handle enums completely. --- lib/SIL/Verifier/MemoryLifetime.cpp | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/lib/SIL/Verifier/MemoryLifetime.cpp b/lib/SIL/Verifier/MemoryLifetime.cpp index 34de7e05bd0b5..1c7b959dbc4ba 100644 --- a/lib/SIL/Verifier/MemoryLifetime.cpp +++ b/lib/SIL/Verifier/MemoryLifetime.cpp @@ -142,6 +142,8 @@ static bool canHandleAllocStack(AllocStackInst *asi) { if (asi->hasDynamicLifetime()) return false; + SILType stackType = asi->getType(); + // Currently in this verifier, we stop verifying if we find a switch_enum_addr // use. This creates a problem since no one has gone through and changed the // frontend/optimizer to understand that it needs to insert destroy_addr on @@ -155,9 +157,16 @@ static bool canHandleAllocStack(AllocStackInst *asi) { // implemented. // // https://bugs.swift.org/browse/SR-14123 - if (asi->getType().getEnumOrBoundGenericEnum()) + if (stackType.getEnumOrBoundGenericEnum()) return false; + // Same for tuples that have an enum element. We are just working around this + // for now until the radar above is solved. + if (auto tt = stackType.getAs()) + for (unsigned i : range(tt->getNumElements())) + if (stackType.getTupleElementType(i).getEnumOrBoundGenericEnum()) + return false; + // Otherwise we can optimize! return true; } From 0bb257ff6291c549e2e1371215e5fb37df7837f0 Mon Sep 17 00:00:00 2001 From: Meghana Gupta Date: Thu, 11 Feb 2021 11:27:58 -0800 Subject: [PATCH 11/15] [sil] Add another run of ARCSequenceOpts before inlining in function passes. This eliminates some regressions by eliminating phase ordering in between ARCSequenceOpts/inlining with read only functions whose read onlyness is lost after inlining. --- lib/SILOptimizer/PassManager/PassPipeline.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 8109638b5b8da..7347db3eeb6d9 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -344,6 +344,7 @@ void addFunctionPasses(SILPassPipelinePlan &P, // Run devirtualizer after the specializer, because many // class_method/witness_method instructions may use concrete types now. P.addDevirtualizer(); + P.addARCSequenceOpts(); switch (OpLevel) { case OptimizationLevelKind::HighLevel: From f9cb27b286389878638282b50f8c3d94d681b4f5 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Fri, 12 Feb 2021 06:38:14 -0800 Subject: [PATCH 12/15] Disable a test that I know how to fix so we can test. --- test/SILOptimizer/let_properties_opts.swift | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/SILOptimizer/let_properties_opts.swift b/test/SILOptimizer/let_properties_opts.swift index c22d41d74e4de..aa66e97cf5978 100644 --- a/test/SILOptimizer/let_properties_opts.swift +++ b/test/SILOptimizer/let_properties_opts.swift @@ -3,6 +3,8 @@ // REQUIRES: optimized_stdlib +// REQUIRES: disabled_temp_for_testing_reasons_dont_commit_this + // Test propagation of non-static let properties with compile-time constant values. // TODO: Once this optimization can remove the propagated fileprivate/internal let properties or From fe8b3a8d5ea5e0acd13d1be5df3aab09aafe4425 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Fri, 12 Feb 2021 06:43:52 -0800 Subject: [PATCH 13/15] clean up extra arc --- lib/SILOptimizer/PassManager/PassPipeline.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 7347db3eeb6d9..37753e2bfa4de 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -344,6 +344,11 @@ void addFunctionPasses(SILPassPipelinePlan &P, // Run devirtualizer after the specializer, because many // class_method/witness_method instructions may use concrete types now. P.addDevirtualizer(); + + // Cleanup additional ARC before inlining. + P.addSemanticARCOpts(); + P.addCopyPropagation(); + P.addSemanticARCOpts(); P.addARCSequenceOpts(); switch (OpLevel) { From babc53f3b10e05f208d62eaad886086fa6a10af6 Mon Sep 17 00:00:00 2001 From: Andrew Trick Date: Thu, 11 Feb 2021 17:58:38 -0800 Subject: [PATCH 14/15] destructure conversion --- .../Utils/CanonicalOSSALifetime.h | 71 +- .../Transforms/CopyPropagation.cpp | 49 +- .../Utils/CanonicalOSSALifetime.cpp | 380 +++-- test/SILOptimizer/copy_propagation.sil | 1252 ++--------------- test/SILOptimizer/copy_propagation_opaque.sil | 1163 +++++++++++++++ 5 files changed, 1677 insertions(+), 1238 deletions(-) create mode 100644 test/SILOptimizer/copy_propagation_opaque.sil diff --git a/include/swift/SILOptimizer/Utils/CanonicalOSSALifetime.h b/include/swift/SILOptimizer/Utils/CanonicalOSSALifetime.h index 69e7af8c8cde2..a931cd43eb718 100644 --- a/include/swift/SILOptimizer/Utils/CanonicalOSSALifetime.h +++ b/include/swift/SILOptimizer/Utils/CanonicalOSSALifetime.h @@ -102,6 +102,18 @@ namespace swift { +/// Convert this struct_extract into a copy+destructure. Return the destructured +/// result or invalid SILValue. The caller must delete the extract and its +/// now-dead copy use. +/// +// If a copied-def is a struct-extract, attempt a destructure conversion +// %extract = struct_extract %... : $TypeWithSingleOwnershipValue +// %copy = copy_value %extract : $OwnershipValue +// To: +// %copy = copy_value %extract : $TypeWithSingleOwnershipValue +// (%extracted,...) = destructure %copy : $TypeWithSingleOwnershipValue +SILValue convertExtractToDestructure(StructExtractInst *extract); + /// Information about consumes on the extended-lifetime boundary. Consuming uses /// within the lifetime are not included--they will consume a copy after /// rewriting. For borrowed def values, the consumes do not include the end of @@ -175,6 +187,50 @@ class CanonicalOSSAConsumeInfo { SWIFT_ASSERT_ONLY_DECL(void dump() const LLVM_ATTRIBUTE_USED); }; +// Worklist of pointer-like things that have an invalid default value. Avoid +// revisiting nodes--suitable for DAGs, but pops finished nodes without +// preserving them in the vector. +// +// The primary API has two methods: intialize() and pop(). Others are provided +// for flexibility. +// +// TODO: make this a better utility. +template struct PtrWorklist { + SmallPtrSet ptrVisited; + SmallVector ptrVector; + + PtrWorklist() = default; + + PtrWorklist(const PtrWorklist &) = delete; + + void initialize(T t) { + clear(); + insert(t); + } + + template void initializeRange(R &&range) { + clear(); + ptrVisited.insert(range.begin(), range.end()); + ptrVector.append(range.begin(), range.end()); + } + + T pop() { return empty() ? T() : ptrVector.pop_back_val(); } + + bool empty() const { return ptrVector.empty(); } + + unsigned size() const { return ptrVector.size(); } + + void clear() { + ptrVector.clear(); + ptrVisited.clear(); + } + + void insert(T t) { + if (ptrVisited.insert(t).second) + ptrVector.push_back(t); + } +}; + /// Canonicalize OSSA lifetimes. /// /// Allows the allocation of analysis state to be reused across calls to @@ -221,11 +277,11 @@ class CanonicalizeOSSALifetime { /// outisde the pruned liveness at the time it is discovered. llvm::SmallPtrSet debugValues; - /// Reuse a general worklist for def-use traversal. - SmallSetVector defUseWorklist; + /// Reuse a general visited set for def-use traversal. + PtrWorklist defUseWorklist; /// Reuse a general worklist for CFG traversal. - SmallSetVector blockWorklist; + PtrWorklist blockWorklist; /// Pruned liveness for the extended live range including copies. For this /// purpose, only consuming instructions are considered "lifetime @@ -298,6 +354,15 @@ class CanonicalizeOSSALifetime { bool consolidateBorrowScope(); + bool findBorrowScopeUses(llvm::SmallPtrSetImpl &useInsts); + + void filterOuterBorrowUseInsts( + llvm::SmallPtrSetImpl &outerUseInsts); + + void rewriteOuterBorrowUsesAndFindConsumes( + SILValue incomingValue, + llvm::SmallPtrSetImpl &outerUseInsts); + bool computeCanonicalLiveness(); bool endsAccessOverlappingPrunedBoundary(SILInstruction *inst); diff --git a/lib/SILOptimizer/Transforms/CopyPropagation.cpp b/lib/SILOptimizer/Transforms/CopyPropagation.cpp index 1ad6814ee1340..9d4ca62245176 100644 --- a/lib/SILOptimizer/Transforms/CopyPropagation.cpp +++ b/lib/SILOptimizer/Transforms/CopyPropagation.cpp @@ -32,6 +32,9 @@ #include "swift/SILOptimizer/Utils/CanonicalOSSALifetime.h" #include "swift/SILOptimizer/Utils/InstOptUtils.h" +//!!! +#include "swift/Basic/Defer.h" + using namespace swift; //===----------------------------------------------------------------------===// @@ -72,6 +75,13 @@ void CopyPropagation::run() { auto *dominanceAnalysis = getAnalysis(); auto *deBlocksAnalysis = getAnalysis(); + //!!! + if (f->hasName("$ss18_StringBreadcrumbsCyABSScfc")) { + llvm::DebugFlag = true; + llvm::setCurrentDebugType("copy-propagation"); + } + SWIFT_DEFER { llvm::DebugFlag = false; }; + // Debug label for unit testing. LLVM_DEBUG(llvm::dbgs() << "*** CopyPropagation: " << f->getName() << "\n"); @@ -88,12 +98,49 @@ void CopyPropagation::run() { CanonicalizeOSSALifetime::getCanonicalCopiedDef(copy)); } } + // Push copy_value instructions above their struct_extract operands by + // inserting destructures. + // + // copiedDefs be be modified, but it never shrinks + for (unsigned idx = 0; idx < copiedDefs.size(); ++idx) { + SILValue def = copiedDefs[idx]; + auto *copy = dyn_cast(def); + if (!copy) + continue; + + auto *extract = dyn_cast(copy->getOperand()); + if (!extract + || SILValue(extract).getOwnershipKind() != OwnershipKind::Guaranteed) + continue; + + if (SILValue destructuredResult = convertExtractToDestructure(extract)) { + // Remove to-be-deleted instructions from copiedDeds. The extract cannot + // be in the copiedDefs set since getCanonicalCopiedDef does not allow a + // guaranteed projection to be a canonical def. + copiedDefs.remove(copy); + --idx; // point back to the current element, which was erased. + + // TODO: unfortunately SetVector has no element replacement. + copiedDefs.insert(destructuredResult); + + auto *destructure = cast( + destructuredResult.getDefiningInstruction()); + auto *newCopy = cast(destructure->getOperand()); + copiedDefs.insert( + CanonicalizeOSSALifetime::getCanonicalCopiedDef(newCopy)); + + LLVM_DEBUG(llvm::dbgs() << "Destructure Conversion:\n" + << *extract << " to " << *destructure); + // Delete both the copy and the extract. + InstructionDeleter().recursivelyDeleteUsersIfDead(extract); + } + } // Perform copy propgation for each copied value. CanonicalizeOSSALifetime canonicalizer(pruneDebug, accessBlockAnalysis, dominanceAnalysis, deBlocksAnalysis->get(f)); // Cleanup dead copies. If getCanonicalCopiedDef returns a copy (because the - // copy's source operand is unrecgonized), then the copy is itself treated + // copy's source operand is unrecgonized), then thecan copy is itself treated // like a def and may be dead after canonicalization. llvm::SmallVector deadCopies; for (auto &def : copiedDefs) { diff --git a/lib/SILOptimizer/Utils/CanonicalOSSALifetime.cpp b/lib/SILOptimizer/Utils/CanonicalOSSALifetime.cpp index 9894dc8fb0d76..235571012070d 100644 --- a/lib/SILOptimizer/Utils/CanonicalOSSALifetime.cpp +++ b/lib/SILOptimizer/Utils/CanonicalOSSALifetime.cpp @@ -41,6 +41,7 @@ #define DEBUG_TYPE "copy-propagation" #include "swift/SILOptimizer/Utils/CanonicalOSSALifetime.h" +#include "swift/SIL/DebugUtils.h" #include "swift/SIL/InstructionUtils.h" #include "swift/SIL/OwnershipUtils.h" #include "swift/SILOptimizer/Utils/CFGOptUtils.h" @@ -76,8 +77,9 @@ SILValue CanonicalizeOSSALifetime::getCanonicalCopiedDef(SILValue v) { case BorrowedValueKind::SILFunctionArgument: return def; case BorrowedValueKind::BeginBorrow: { - // TODO: Remove this call to visitLocalScopeEndingUses and the - // same-block check once computeBorrowLiveness supports multiple blocks. +#if 0 //!!! + // TODO: Remove this call to visitLocalScopeEndingUses and the + // same-block check once computeBorrowLiveness supports multiple blocks. auto *defBB = def->getParentBlock(); if (borrowedVal.visitLocalScopeEndingUses( [&](Operand *endBorrow) { @@ -86,6 +88,8 @@ SILValue CanonicalizeOSSALifetime::getCanonicalCopiedDef(SILValue v) { return def; } break; +#endif + return def; } case BorrowedValueKind::LoadBorrow: case BorrowedValueKind::Phi: @@ -112,6 +116,32 @@ static void copyLiveUse(Operand *use) { LLVM_DEBUG(llvm::dbgs() << " Copying at last use " << *copy); } +// TODO: generalize this to handle multiple nondebug uses of the struct_extract. +SILValue swift::convertExtractToDestructure(StructExtractInst *extract) { + if (!hasOneNonDebugUse(extract)) + return nullptr; + + if (!extract->isFieldOnlyNonTrivialField()) + return nullptr; + + auto *extractCopy = + dyn_cast(getNonDebugUses(extract).begin()->getUser()); + if (!extractCopy) + return nullptr; + + SILBuilderWithScope builder(extract); + auto loc = extract->getLoc(); + auto *copy = builder.createCopyValue(loc, extract->getOperand()); + auto *destructure = builder.createDestructureStruct(loc, copy); + + SILValue nonTrivialResult = destructure->getResult(extract->getFieldIndex()); + assert(!nonTrivialResult->getType().isTrivial(*destructure->getFunction()) + && "field idx mismatch"); + + extractCopy->replaceAllUsesWith(nonTrivialResult); + return nonTrivialResult; +} + //===----------------------------------------------------------------------===// // MARK: Rewrite borrow scopes //===----------------------------------------------------------------------===// @@ -139,9 +169,9 @@ bool CanonicalizeOSSALifetime::computeBorrowLiveness() { case BorrowedValueKind::BeginBorrow: break; } - if (!EnableRewriteBorrows) { + if (!EnableRewriteBorrows) return false; - } + // Note that there is no need to look through any reborrows. The reborrowed // value is considered a separate lifetime for canonicalization. Any copies of // the reborrowed value will not be rewritten when canonicalizing the current @@ -150,12 +180,13 @@ bool CanonicalizeOSSALifetime::computeBorrowLiveness() { liveness.updateForUse(use->getUser(), /*lifetimeEnding*/ true); return true; }); - +#if 0 //!!! // TODO: Fix getCanonicalCopiedDef to allow multi-block borrows and remove // this assert. This should only be done once consolidateBorrowScope can // handle persistentCopies, otherwise we may end up generating more dynamic // copies than the non-canonical form. assert(liveness.numLiveBlocks() == 1); +#endif return true; } @@ -165,7 +196,7 @@ bool CanonicalizeOSSALifetime::computeBorrowLiveness() { // // To use an existing outer copy, we could find its earliest consume. But the // new copy will immediately canonicalized and a canonical begin_borrow scope -// have no outside uses of its first block. +// have no outer uses of its first block. static CopyValueInst *createOuterCopy(BeginBorrowInst *beginBorrow) { SILBuilderWithScope B(beginBorrow); @@ -178,25 +209,20 @@ static CopyValueInst *createOuterCopy(BeginBorrowInst *beginBorrow) { return copy; } -// If this succeeds, then all uses of the borrowed value outside the borrow -// scope will be rewritten to use an outer copy, and all remaining uses of the -// borrowed value will be confined to the borrow scope. +// This def-use traversal is similar to findExtendedTransitiveGuaranteedUses(), +// however, to cover the canonical lifetime, it looks through copies. It also +// considers uses within the introduced borrow scope itself (instead of simply +// visiting the scope-ending uses). It does not, however, look into nested +// borrow scopes uses, since nested scopes are canonicalized independently. // -// TODO: Canonicalize multi-block borrow scopes, load_borrow scope, and phi -// borrow scopes by adding one copy per block to persistentCopies for -// each block that dominates an outer use. -bool CanonicalizeOSSALifetime::consolidateBorrowScope() { - if (isa(currentDef)) { - return true; - } - // Gather all outer uses before rewriting any to avoid scanning any basic - // block more than once. - SmallVector outerUses; - llvm::SmallPtrSet outerUseInsts; +// \p useInsts are the potentially outer use instructions. This set will +// be pared down to only the outer uses in the next step. +bool CanonicalizeOSSALifetime::findBorrowScopeUses( + llvm::SmallPtrSetImpl &useInsts) { + //!!!SmallVector outerUses; auto isUserInLiveOutBlock = [&](SILInstruction *user) { - // TODO: enable isUserInLiveOutBlock once we support multi-block borrows - // return (liveness.getBlockLiveness(user->getParent()) - // == PrunedLiveBlocks::LiveOut); + return (liveness.getBlockLiveness(user->getParent()) + == PrunedLiveBlocks::LiveOut); return false; }; auto recordOuterUse = [&](Operand *use) { @@ -205,23 +231,13 @@ bool CanonicalizeOSSALifetime::consolidateBorrowScope() { if (isUserInLiveOutBlock(use->getUser())) { return; } - outerUses.push_back(use); - outerUseInsts.insert(use->getUser()); + //!!!outerUses.push_back(use); + useInsts.insert(use->getUser()); }; - // getCanonicalCopiedDef ensures that if currentDef is a guaranteed value, - // then it is a borrow scope introducer. - assert(BorrowedValue(currentDef).isLocalScope()); - - // This def-use traversal is similar to - // findExtendedTransitiveGuaranteedUses(), however, to cover the canonical - // lifetime, it looks through copies. It also considered uses within the - // introduced borrow scope itself (instead of simply visiting the scope-ending - // uses). It does not, however, look into nested borrow scopes uses, since - // nested scopes are canonicalized independently. - defUseWorklist.clear(); - defUseWorklist.insert(currentDef); - while (!defUseWorklist.empty()) { - SILValue value = defUseWorklist.pop_back_val(); + defUseWorklist.initialize(currentDef); + // Avoid revisiting uses because we recurse through + // struct/destructure. Otherwise the order does not matter. + while (SILValue value = defUseWorklist.pop()) { for (Operand *use : value->getUses()) { auto *user = use->getUser(); // Recurse through copies. @@ -257,10 +273,27 @@ bool CanonicalizeOSSALifetime::consolidateBorrowScope() { return false; break; + case OperandOwnership::ForwardingConsume: + // Recurse through destructure, but also record them as an outer + // use. Note that they will consider to be outer uses even if they are + // within this scope as long as any of their transitively uses our + // outside the scope. + // + // FIXME: handle all ForwardingOperands. + if (auto *destructure = dyn_cast(user)) { + recordOuterUse(use); + for (auto result : destructure->getResults()) { + if (result.getOwnershipKind() == OwnershipKind::Owned) { + defUseWorklist.insert(result); + } + } + continue; + } + LLVM_FALLTHROUGH; + case OperandOwnership::InstantaneousUse: case OperandOwnership::UnownedInstantaneousUse: case OperandOwnership::BitwiseEscape: - case OperandOwnership::ForwardingConsume: case OperandOwnership::DestroyingConsume: recordOuterUse(use); break; @@ -273,71 +306,173 @@ bool CanonicalizeOSSALifetime::consolidateBorrowScope() { // a borrow scope is an outer use must visit the same set of uses. borrowOper.visitExtendedScopeEndingUses([&](Operand *endBorrow) { if (!isUserInLiveOutBlock(endBorrow->getUser())) { - outerUseInsts.insert(endBorrow->getUser()); + useInsts.insert(endBorrow->getUser()); } return true; }); break; } } // end switch OperandOwnership - } // end def-use traversal + } // end def-use traversal + + return true; +} +void CanonicalizeOSSALifetime::filterOuterBorrowUseInsts( + llvm::SmallPtrSetImpl &outerUseInsts) { auto *beginBorrow = cast(currentDef); - SmallVector scopeEndingInst; - BorrowedValue(beginBorrow).getLocalScopeEndingInstructions(scopeEndingInst); - assert(scopeEndingInst.size() == 1 && "expected single-block borrow"); + SmallVector scopeEndingInsts; + BorrowedValue(beginBorrow).getLocalScopeEndingInstructions(scopeEndingInsts); + blockWorklist.clear(); // Remove outer uses that occur before the end of the borrow scope by - // forward iterating from begin_borrow to end_borrow. - for (auto instIter = beginBorrow->getIterator(), - endIter = scopeEndingInst[0]->getIterator(); - instIter != endIter; ++instIter) { - outerUseInsts.erase(&*instIter); + // reverse iterating from the end_borrow. + auto scanBlock = [&](SILBasicBlock *bb, SILBasicBlock::iterator endIter) { + auto beginIter = bb->begin(); + if (bb == beginBorrow->getParent()) { + beginIter = std::next(beginBorrow->getIterator()); + } else { + blockWorklist.insert(bb); + } + for (auto instIter = endIter; instIter != beginIter;) { + --instIter; + outerUseInsts.erase(&*instIter); + } + }; + for (auto *scopeEnd : scopeEndingInsts) { + scanBlock(scopeEnd->getParent(), std::next(scopeEnd->getIterator())); } - if (outerUseInsts.empty()) { - return true; + // This worklist is also a visited set, so we never pop the entries. + while (auto *bb = blockWorklist.pop()) { + for (auto *predBB : bb->getPredecessorBlocks()) { + scanBlock(predBB, predBB->end()); + } } - // Rewrite the outer uses and record lifetime-ending uses. +} + +// Repeat the same def-use traversal as findBorrowScopeUses(). This time, +// instead of recording all the uses, rewrite the operands of outser uses, +// record consumingUses, and add forwarding operations to the outerUseInsts if +// they have transitive outer uses. +// +// Recurse through forwarded consumes, but don't revisit uses. Once an outer +// use it visited, it marks its incoming operand as an outer use. +// +// Return true if any outer uses were found and rewritten. +void CanonicalizeOSSALifetime::rewriteOuterBorrowUsesAndFindConsumes( + SILValue incomingValue, + llvm::SmallPtrSetImpl &outerUseInsts) { + + SILValue newIncomingValue = + (incomingValue == currentDef) ? outerCopy : incomingValue; + + // Outer uses specific to the current incomingValue + SmallVector currentOuterUseInsts; SmallVector consumingUses; SmallPtrSet unclaimedConsumingUsers; - this->outerCopy = createOuterCopy(beginBorrow); - for (Operand *use : outerUses) { - if (!outerUseInsts.count(use->getUser())) { - // The immediate use is within this borrow scope. - BorrowingOperand borrowOper(use); - if (borrowOper.kind == BorrowingOperandKind::Invalid) { - continue; - } - // For sub-borrows also check that the scope-ending instructions are - // within the scope. - if (borrowOper.visitExtendedScopeEndingUses([&](Operand *endBorrow) { - return !outerUseInsts.count(endBorrow->getUser()); - })) { - continue; - } - } + + auto rewriteOuterUse = [&](Operand *use) { LLVM_DEBUG(llvm::dbgs() << " Use of outer copy " << *use->getUser()); - use->set(outerCopy); + use->set(newIncomingValue); + currentOuterUseInsts.push_back(use->getUser()); + outerUseInsts.insert(incomingValue->getDefiningInstruction()); if (use->isLifetimeEnding()) { consumingUses.push_back(use); unclaimedConsumingUsers.insert(use->getUser()); } - } - // Insert a destroy on the outer copy's lifetime frontier, or claim an - // existing consume. - ValueLifetimeAnalysis lifetimeAnalysis(outerCopy, outerUseInsts); + }; + // defUseWorklist is used recursively here + unsigned defUseStart = defUseWorklist.size(); + defUseWorklist.insert(incomingValue); + while (defUseStart < defUseWorklist.size()) { + SILValue value = defUseWorklist.pop(); + // Gather the uses before updating any of them. + SmallVector uses(value->getUses()); + for (Operand *use : uses) { + auto *user = use->getUser(); + // Transitively poke through copies. + if (auto *copy = dyn_cast(user)) { + defUseWorklist.insert(copy); + continue; + } + // Note: debug_value uses are handled like normal uses here. They should + // be stripped later if required when handling outerCopy or + // persistentCopies. + + switch (use->getOperandOwnership()) { + case OperandOwnership::NonUse: + case OperandOwnership::TrivialUse: + case OperandOwnership::InteriorPointer: + case OperandOwnership::ForwardingBorrow: + case OperandOwnership::EndBorrow: + case OperandOwnership::Reborrow: + case OperandOwnership::ForwardingUnowned: + case OperandOwnership::PointerEscape: + break; + + case OperandOwnership::ForwardingConsume: + // FIXME: Recurse through any ForwardingOperand. + if (auto *destructure = dyn_cast(user)) { + for (auto result : destructure->getResults()) { + if (result.getOwnershipKind() != OwnershipKind::Owned) + continue; + + // Process transitive users and set add this destructure to + // outerUseInsts if any outer uses were found. + rewriteOuterBorrowUsesAndFindConsumes(result, outerUseInsts); + if (outerUseInsts.count(destructure)) { + rewriteOuterUse(use); + } + } + continue; + } + LLVM_FALLTHROUGH; + + case OperandOwnership::InstantaneousUse: + case OperandOwnership::UnownedInstantaneousUse: + case OperandOwnership::BitwiseEscape: + case OperandOwnership::DestroyingConsume: + if (outerUseInsts.count(use->getUser())) { + rewriteOuterUse(use); + } + break; + case OperandOwnership::Borrow: + BorrowingOperand borrowOper(use); + assert(borrowOper && "BorrowingOperand must handle OperandOwnership"); + + // For borrows, record the scope-ending instructions in addition to the + // borrow instruction outer use points. + if (outerUseInsts.count(use->getUser()) + || !borrowOper.visitExtendedScopeEndingUses([&](Operand *endScope) { + return !outerUseInsts.count(endScope->getUser()); + })) { + rewriteOuterUse(use); + } + break; + } + } // end switch OperandOwnership + } // end def-use traversal + + // Insert a destroy on the outer copy's or forwarding consume's lifetime + // frontier, or claim an existing consume. + // + // FIXME: replace with ValueLifetimeBoundary and do not modify the CFG here. + ValueLifetimeAnalysis lifetimeAnalysis( + newIncomingValue.getDefiningInstruction(), currentOuterUseInsts); ValueLifetimeAnalysis::Frontier frontier; bool result = lifetimeAnalysis.computeFrontier( - frontier, ValueLifetimeAnalysis::DontModifyCFG, deBlocks); + frontier, ValueLifetimeAnalysis::AllowToModifyCFG, deBlocks); assert(result); while (!frontier.empty()) { auto *insertPt = frontier.pop_back_val(); - if (unclaimedConsumingUsers.erase(&*std::prev(insertPt->getIterator()))) { + auto insertIter = insertPt->getIterator(); + if (insertIter != insertPt->getParent()->begin() + && unclaimedConsumingUsers.erase(&*std::prev(insertIter))) { continue; } SILBuilderWithScope(insertPt).createDestroyValue(insertPt->getLoc(), - outerCopy); + newIncomingValue); } - // Add copies for consuming users of outerCopy. + // Add copies for consuming users of newIncomingValue. for (auto *use : consumingUses) { // If the user is still in the unclaimedConsumingUsers set, then it does not // end the outer copy's lifetime and therefore requires a copy. Only one @@ -348,6 +483,71 @@ bool CanonicalizeOSSALifetime::consolidateBorrowScope() { copyLiveUse(use); } } + +#if 0 //!!! + for (Operand *use : outerUses) { + if (!outerUseInsts.count(use->getUser())) { + // The immediate use is within this borrow scope. + BorrowingOperand borrowOper(use); + if (borrowOper.kind == BorrowingOperandKind::Invalid) { + continue; + } + // For sub-borrows also check that the scope-ending instructions are + // within the scope. + if (borrowOper.visitExtendedScopeEndingUses([&](Operand *endBorrow) { + return !outerUseInsts.count(endBorrow->getUser()); + })) { + continue; + } + } + LLVM_DEBUG(llvm::dbgs() << " Use of outer copy " << *use->getUser()); + use->set(outerCopy); + if (use->isLifetimeEnding()) { + consumingUses.push_back(use); + unclaimedConsumingUsers.insert(use->getUser()); + } + } +#endif //!!! +} + +// If this succeeds, then all uses of the borrowed value outside the borrow +// scope will be rewritten to use an outer copy, and all remaining uses of the +// borrowed value will be confined to the borrow scope. +// +// TODO: Canonicalize multi-block borrow scopes, load_borrow scope, and phi +// borrow scopes by adding one copy per block to persistentCopies for +// each block that dominates an outer use. +bool CanonicalizeOSSALifetime::consolidateBorrowScope() { + if (isa(currentDef)) { + return true; + } + // getCanonicalCopiedDef ensures that if currentDef is a guaranteed value, + // then it is a borrow scope introducer. + assert(BorrowedValue(currentDef).isLocalScope()); + + // Gather all potential outer uses before rewriting any to avoid scanning any + // basic block more than once. + llvm::SmallPtrSet outerUseInsts; + if (!findBorrowScopeUses(outerUseInsts)) + return false; + + filterOuterBorrowUseInsts(outerUseInsts); + if (outerUseInsts.empty()) { + return true; + } +#if 0 //!!! + // FIXME: if an outer use is a terminator, bail. VLA will put insertion points + // on edges for those which requires splitting. This can be fixed by migrating + // to ValueLifetimeBoundary. + for (SILInstruction *user : outerUseInsts) { + if (isa(user)) + return false; + } +#endif + this->outerCopy = createOuterCopy(cast(currentDef)); + + defUseWorklist.clear(); + rewriteOuterBorrowUsesAndFindConsumes(currentDef, outerUseInsts); return true; } @@ -356,10 +556,8 @@ bool CanonicalizeOSSALifetime::consolidateBorrowScope() { //===----------------------------------------------------------------------===// bool CanonicalizeOSSALifetime::computeCanonicalLiveness() { - defUseWorklist.clear(); - defUseWorklist.insert(currentDef); - while (!defUseWorklist.empty()) { - SILValue value = defUseWorklist.pop_back_val(); + defUseWorklist.initialize(currentDef); + while (SILValue value = defUseWorklist.pop()) { for (Operand *use : value->getUses()) { auto *user = use->getUser(); @@ -563,11 +761,8 @@ void CanonicalizeOSSALifetime::extendLivenessThroughOverlappingAccess() { bool changed = true; while (changed) { changed = false; - blockWorklist.clear(); - blockWorklist.insert(consumingBlocks.begin(), consumingBlocks.end()); - // This worklist is also a visited set, so we never pop the entries. - for (unsigned blockIdx = 0; blockIdx < blockWorklist.size(); ++blockIdx) { - SILBasicBlock *bb = blockWorklist[blockIdx]; + blockWorklist.initializeRange(consumingBlocks); + while (auto *bb = blockWorklist.pop()) { auto blockLiveness = liveness.getBlockLiveness(bb); // Ignore blocks within pruned liveness. if (blockLiveness == PrunedLiveBlocks::LiveOut) { @@ -732,12 +927,9 @@ void CanonicalizeOSSALifetime::findOrInsertDestroys() { // Visit each original consuming use or destroy as the starting point for a // backward CFG traversal. - blockWorklist.clear(); - blockWorklist.insert(consumingBlocks.begin(), consumingBlocks.end()); - // This worklist is also a visited set, so we never pop the entries. - for (unsigned blockIdx = 0; blockIdx < blockWorklist.size(); ++blockIdx) { + blockWorklist.initializeRange(consumingBlocks); + while (auto *bb = blockWorklist.pop()) { // Process each block that has not been visited and is not LiveOut. - SILBasicBlock *bb = blockWorklist[blockIdx]; switch (liveness.getBlockLiveness(bb)) { case PrunedLiveBlocks::LiveOut: // A lifetimeEndBlock may be determined to be LiveOut after analyzing the @@ -835,8 +1027,8 @@ void CanonicalizeOSSALifetime::rewriteCopies() { setChanged(); } } - while (!defUseWorklist.empty()) { - CopyValueInst *srcCopy = cast(defUseWorklist.pop_back_val()); + while (SILValue value = defUseWorklist.pop()) { + CopyValueInst *srcCopy = cast(value); // Recurse through copies while replacing their uses. Operand *reusedCopyOp = nullptr; for (auto useIter = srcCopy->use_begin(); useIter != srcCopy->use_end();) { diff --git a/test/SILOptimizer/copy_propagation.sil b/test/SILOptimizer/copy_propagation.sil index 2c6b16e53c20b..0c521bb2d7207 100644 --- a/test/SILOptimizer/copy_propagation.sil +++ b/test/SILOptimizer/copy_propagation.sil @@ -1,1169 +1,59 @@ -// RUN: %target-sil-opt -copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -enable-sil-verify-all %s | %FileCheck %s --check-prefixes=CHECK,CHECK-OPT -// RUN: %target-sil-opt -mandatory-copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -enable-sil-verify-all %s | %FileCheck %s --check-prefixes=CHECK,CHECK-DEBUG -// RUN: %target-sil-opt -copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -debug-only=copy-propagation %s -o /dev/null 2>&1 | %FileCheck %s --check-prefix=CHECK-TRACE +// RUN: %target-sil-opt -copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-verify-all %s | %FileCheck %s // REQUIRES: asserts sil_stage canonical import Builtin -import Swift - -sil [ossa] @getOwnedC : $@convention(thin) () -> (@owned C) -sil [ossa] @takeOwned : $@convention(thin) (@in T) -> () -sil [ossa] @takeMultipleOwned : $@convention(thin) (@in T, @in T) -> () -sil [ossa] @takeGuaranteed : $@convention(thin) (@in_guaranteed T) -> () -sil [ossa] @takeGuaranteedAndOwnedArg : $@convention(thin) (@in_guaranteed T, @in T) -> () - -class B { } class C { - var a: Int64 + var a: Builtin.Int64 } + +sil [ossa] @getOwnedC : $@convention(thin) () -> (@owned C) sil [ossa] @takeOwnedC : $@convention(thin) (@owned C) -> () sil [ossa] @takeOwnedCTwice : $@convention(thin) (@owned C, @owned C) -> () sil [ossa] @takeGuaranteedC : $@convention(thin) (@guaranteed C) -> () -struct NativeObjectPair { - var obj1 : Builtin.NativeObject - var obj2 : Builtin.NativeObject -} - -// Once Mem2Reg supports ownership, it will leave behind extra copies as -// seen in the SIL test below for simple assignment: -// public func testVarAssign(_ t: T) -> T { -// var u = t -// return u -// } -// CopyPropagation should leave behind a single copy and no destroys. -// -// CHECK-LABEL: sil [ossa] @testVarAssign : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0(%0 : @guaranteed $T): -// CHECK-NOT: destroy -// CHECK: [[CPY:%.*]] = copy_value %0 : $T -// CHECK_CHECK-NOT: destroy -// CHECK_CHECK: return [[CPY]] : $T -// CHECK-LABEL: } // end sil function 'testVarAssign' -sil [ossa] @testVarAssign : $@convention(thin) (@in_guaranteed T) -> @out T { -bb0(%0 : @guaranteed $T): - %1 = copy_value %0 : $T - %2 = copy_value %1 : $T - destroy_value %1 : $T - return %2 : $T -} - -// CHECK-LABEL: sil [ossa] @multiReturnValue : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T) { -// CHECK: bb0(%0 : @guaranteed $T): -// CHECK-NOT: destroy -// CHECK: [[CPY1:%.*]] = copy_value %0 : $T -// CHECK_CHECK-NOT: destroy -// CHECK_CHECK: [[CPY2:%.*]] = copy_value %0 : $T -// CHECK_CHECK-NOT: destroy -// CHECK_CHECK: [[R:%.*]] = tuple ([[CPY1]] : $T, [[CPY2]] : $T) -// CHECK_CHECK-NOT: destroy -// CHECK_CHECK: return [[R]] : $(T, T) -// CHECK-LABEL: } // end sil function 'multiReturnValue' -sil [ossa] @multiReturnValue : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T) { -bb0(%0 : @guaranteed $T): - %1 = copy_value %0 : $T - %2 = copy_value %1 : $T - %3 = copy_value %1 : $T - %4 = tuple (%2 : $T, %3 : $T) - destroy_value %1 : $T - return %4 : $(T, T) -} - -// CHECK-LABEL: sil [ossa] @multiCallResult : $@convention(thin) (@in_guaranteed T) -> @out T { -// CHECK: bb0(%0 : @guaranteed $T): -// CHECK_CHECK-NEXT: // function_ref multiReturnValue -// CHECK_CHECK-NEXT: [[F:%.*]] = function_ref @multiReturnValue : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK_CHECK-NEXT: [[CALL:%.*]] = apply [[F]](%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) -// CHECK_CHECK-NEXT: ([[D1:%.*]], [[D2:%.*]]) = destructure_tuple [[CALL]] : $(T, T) -// CHECK_CHECK-NEXT: destroy_value [[D2]] : $T -// CHECK_CHECK-NEXT: return [[D1]] : $T -// CHECK-LABEL: } // end sil function 'multiCallResult' -sil [ossa] @multiCallResult : $@convention(thin) (@in_guaranteed T) -> @out T { -bb0(%0 : @guaranteed $T): - %1 = copy_value %0 : $T - %2 = function_ref @multiReturnValue : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) - %3 = apply %2(%1) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) - (%4, %5) = destructure_tuple %3 : $(T, T) - %6 = copy_value %4 : $T - %7 = copy_value %5 : $T - destroy_value %1 : $T - destroy_value %4 : $T - destroy_value %5 : $T - destroy_value %7 : $T - return %6 : $T -} - -// CHECK-LABEL: sil [ossa] @testPhi : $@convention(thin) (@in_guaranteed T, @in_guaranteed T, Bool) -> @out T { -// CHECK: bb0(%0 : @guaranteed $T, %1 : @guaranteed $T, %2 : $Bool): -// CHECK_CHECK-NEXT: struct_extract %2 : $Bool, #Bool._value -// CHECK_CHECK-NEXT: cond_br %{{.*}}, bb1, bb2 -// -// CHECK: bb1: -// CHECK_CHECK-NEXT: copy_value %0 : $T -// CHECK_CHECK-NEXT: br bb3(% -// -// CHECK: bb2: -// CHECK_CHECK-NEXT: copy_value %1 : $T -// CHECK_CHECK-NEXT: br bb3(% -// -// CHECK: bb3(% -// CHECK_CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testPhi' -sil [ossa] @testPhi : $@convention(thin) (@in_guaranteed T, @in_guaranteed T, Bool) -> @out T { -bb0(%0 : @guaranteed $T, %1 : @guaranteed $T, %2 : $Bool): - %3 = copy_value %0 : $T - %4 = copy_value %1 : $T - %5 = struct_extract %2 : $Bool, #Bool._value - cond_br %5, bb1, bb2 - -bb1: - %7 = copy_value %3 : $T - br bb3(%7 : $T) - -bb2: - %9 = copy_value %4 : $T - br bb3(%9 : $T) - -bb3(%11 : @owned $T): - destroy_value %4 : $T - destroy_value %3 : $T - return %11 : $T -} - -// CHECK-LABEL: sil [ossa] @testConsume : $@convention(thin) (@in T, @inout T) -> () { -// CHECK: bb0(%0 : @owned $T, %1 : $*T): -// -// Mandatory opt reuses the original copy for the consuming store. -// CHECK-DEBUG-NEXT: [[STOREVAL:%.*]] = copy_value %0 : $T -// -// CHECK-NEXT: debug_value %0 : $T -// CHECK-DEBUG-NEXT: store [[STOREVAL]] to [assign] %1 : $*T -// CHECK-OPT-NEXT: store %0 to [assign] %1 : $*T -// -// The non-consuming use now uses the original value. -// CHECK-DEBUG-NEXT: debug_value %0 : $T -// -// CHECK-NEXT: debug_value_addr %1 : $*T -// -// The original destroy is deleted with optimizations enabled. -// CHECK-DEBUG-NEXT: destroy_value %0 : $T -// CHECK-NEXT: tuple () -// CHECK-NEXT: return -// CHECK-LABEL: // end sil function 'testConsume' -sil [ossa] @testConsume : $@convention(thin) (@in T, @inout T) -> () { -bb0(%arg : @owned $T, %addr : $*T): - %copy = copy_value %arg : $T - debug_value %copy : $T - store %copy to [assign] %addr : $*T - debug_value %arg : $T - debug_value_addr %addr : $*T - destroy_value %arg : $T - %v = tuple () - return %v : $() -} - -// CHECK-LABEL: sil [ossa] @testDestroyEdge : $@convention(thin) (@in T, Builtin.Int1) -> () { -// CHECK: bb0(%0 : @owned $T, %1 : $Builtin.Int1): -// CHECK-OPT-NEXT: destroy_value %0 : $T -// CHECK-DEBUG-NEXT: cond_br %1, bb2, bb1 -// -// CHECK: bb1: -// Debug build inserts a new destroy -// CHECK-DEBUG-NEXT: destroy_value %0 : $T -// CHECK-NEXT: br bb3 -// -// CHECK: bb2: -// The original copy is deleted in both cases. -// CHECK-DEBUG-NEXT: debug_value %0 : $T -// CHECK-DEBUG-NEXT: destroy_value %0 : $T -// CHECK-NEXT: br bb3 -// -// CHECK: bb3: -// The original destroy is deleted in both cases. -// CHECK-NEXT: tuple () -// CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testDestroyEdge' -sil [ossa] @testDestroyEdge : $@convention(thin) (@in T, Builtin.Int1) -> () { -bb0(%arg : @owned $T, %z : $Builtin.Int1): - cond_br %z, bb2, bb1 - -bb1: - br bb3 - -bb2: - debug_value %arg : $T - %copy = copy_value %arg : $T - destroy_value %copy : $T - br bb3 - -bb3: - destroy_value %arg : $T - %10 = tuple () - return %10 : $() -} - -// Test the same user instruction with both @guaranteed and @owned operands taking the same copied value. -// We need to keep the value alive to the end of the instruction. -// -// CHECK-LABEL: sil [ossa] @testGuaranteedAndOwnedArg : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : @owned $T): -// CHECK-NEXT: [[CPY:%.*]] = copy_value %0 : $T -// CHECK-NEXT: // function_ref takeGuaranteedAndOwnedArg -// CHECK-NEXT: function_ref @takeGuaranteedAndOwnedArg : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in τ_0_0) -> () -// CHECK-NEXT: apply %{{.*}}(%0, [[CPY]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in τ_0_0) -> () -// CHECK-NEXT: destroy_value %0 : $T -// CHECK-NEXT: return %{{.*}} : $() -// CHECK-LABEL: } // end sil function 'testGuaranteedAndOwnedArg' -sil [ossa] @testGuaranteedAndOwnedArg : $@convention(thin) (@in T) -> () { -bb(%0 : @owned $T): - %copy = copy_value %0 : $T - %f = function_ref @takeGuaranteedAndOwnedArg : $@convention(thin) (@in_guaranteed T, @in T) -> () - %call = apply %f(%0, %copy) : $@convention(thin) (@in_guaranteed T, @in T) -> () - destroy_value %0 : $T - return %call : $() -} - -// Reuse one of the copies for the apply. Eliminate the other copy and destroy. -// Which copy is reused is unfortunately sensitive to the use list order. -// -// CHECK-TRACE-LABEL: CopyPropagation: testCopy2OperReuse -// CHECK-TRACE: Removing destroy_value %0 : $T -// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @testCopy2OperReuse : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : @owned $T): -// CHECK-NEXT: [[CP:%.*]] = copy_value %0 : $T -// CHECK-NEXT: // function_ref takeMultipleOwned -// CHECK-NEXT: function_ref @takeMultipleOwned : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () -// CHECK-NEXT: apply %{{.*}}(%0, [[CP]]) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () -// CHECK-NEXT: tuple () -// CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testCopy2OperReuse' -sil [ossa] @testCopy2OperReuse : $@convention(thin) (@in T) -> () { -bb0(%arg : @owned $T): - %copy1 = copy_value %arg : $T - %copy2 = copy_value %arg : $T - %f = function_ref @takeMultipleOwned : $@convention(thin) (@in T, @in T) -> () - %call = apply %f(%copy1, %copy2) : $@convention(thin) (@in T, @in T) -> () - destroy_value %arg : $T - %10 = tuple () - return %10 : $() -} - -// Reuse one copy and eliminate the other copy and destroy. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testCopy2CallReuse -// CHECK-TRACE: Removing destroy_value %0 : $T -// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @testCopy2CallReuse : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : @owned $T): -// CHECK-NEXT: [[CP:%.*]] = copy_value %0 : $T -// CHECK-NEXT: // function_ref -// CHECK-NEXT: function_ref -// CHECK-NEXT: apply %{{.*}}([[CP]]) -// CHECK-NEXT: apply %{{.*}}(%0) -// CHECK-NEXT: tuple -// CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testCopy2CallReuse' -sil [ossa] @testCopy2CallReuse : $@convention(thin) (@in T) -> () { -bb0(%arg : @owned $T): - %copy1 = copy_value %arg : $T - %copy2 = copy_value %arg : $T - %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call1 = apply %f(%copy1) : $@convention(thin) (@in T) -> () - %call2 = apply %f(%copy2) : $@convention(thin) (@in T) -> () - destroy_value %arg : $T - %10 = tuple () - return %10 : $() -} - -// bb1 has a consuming instruction but is also live-out. Reuse the copy in bb1. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: liveoutConsume -// CHECK-TRACE: Removing destroy_value %0 : $T -// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @liveoutConsume : $@convention(thin) (@owned T, Builtin.Int1) -> () { -// CHECK: bb0(%0 : @owned $T, %1 : $Builtin.Int1): -// CHECK-NOT: copy_value -// CHECK: cond_br %1, bb2, bb1 -// CHECK: bb1: -// CHECK: copy_value %0 : $T -// CHECK: apply -// CHECK: br bb3 -// CHECK: bb3: -// CHECK-NOT: copy_value -// CHECK: apply -// CHECK-NOT: destroy_value -// CHECK-LABEL: } // end sil function 'liveoutConsume' -sil [ossa] @liveoutConsume : $@convention(thin) (@owned T, Builtin.Int1) -> () { -bb0(%arg : @owned $T, %z : $Builtin.Int1): - %copy1 = copy_value %arg : $T - cond_br %z, bb2, bb1 - -bb1: - %copy2 = copy_value %arg : $T - %f1 = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call1 = apply %f1(%copy2) : $@convention(thin) (@in T) -> () - br bb3 - -bb2: - br bb3 - -bb3: - %f2 = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call2 = apply %f2(%copy1) : $@convention(thin) (@in T) -> () - destroy_value %arg : $T - %10 = tuple () - return %10 : $() -} - -// The LiveWithin block has a destroy, but it's before the first use. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testDestroyBeforeUse -// CHECK-TRACE: Removing destroy_value %1 : $T -// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T -// -// CHECK-LABEL: sil [ossa] @testDestroyBeforeUse : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : @owned $T): -// CHECK-NOT: copy_value -// CHECK-NOT: destroy_value -// CHECK: apply -// CHECK-NOT: destroy_value -// CHECK: return -// CHECK-LABEL: } // end sil function 'testDestroyBeforeUse' -sil [ossa] @testDestroyBeforeUse : $@convention(thin) (@in T) -> () { -bb0(%arg : @owned $T): - %copy = copy_value %arg : $T - destroy_value %copy : $T - %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call2 = apply %f(%arg) : $@convention(thin) (@in T) -> () - %10 = tuple () - return %10 : $() -} - -// The LiveWithin block has a destroy, but it's after an unrelated call. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testDestroyAfterCall -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @testDestroyAfterCall : $@convention(thin) (@in T, @in T) -> () { -// CHECK: bb0(%0 : @owned $T, %1 : @owned $T): -// CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () -// CHECK: destroy_value %1 : $T -// CHECK-LABEL: } // end sil function 'testDestroyAfterCall' -sil [ossa] @testDestroyAfterCall : $@convention(thin) (@in T, @in T) -> () { -bb0(%arg1 : @owned $T, %arg2 : @owned $T): - %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call = apply %f(%arg1) : $@convention(thin) (@in T) -> () - destroy_value %arg2 : $T - %10 = tuple () - return %10 : $() -} - -// A copy may have multiple uses -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testSharedCopy -// CHECK-TRACE: Removing destroy_value %0 : $T -// CHECK-TRACE: Removing %1 = copy_value %0 : $T -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @testSharedCopy : $@convention(thin) (@in T) -> () { -// CHECK-NOT: copy_value -// CHECK: apply -// CHECK: apply -// CHECK-NOT: destroy_value -// CHECK-LABEL: } // end sil function 'testSharedCopy' -sil [ossa] @testSharedCopy : $@convention(thin) (@in T) -> () { -bb0(%arg : @owned $T): - %copy = copy_value %arg : $T - %f1 = function_ref @takeGuaranteed : $@convention(thin) (@in_guaranteed T) -> () - %call1 = apply %f1(%copy) : $@convention(thin) (@in_guaranteed T) -> () - %f2 = function_ref @takeOwned : $@convention(thin) (@in T) -> () - %call2 = apply %f2(%copy) : $@convention(thin) (@in T) -> () - destroy_value %arg : $T - %10 = tuple () - return %10 : $() -} - -// A copy within a borrow scope is not handled by CopyPropagation. An -// earlier pass should have hoisted the copy outside of the borrow -// scope. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testBorrowCopy -// CHECK-TRACE: Outer copy [[OUTERCOPY:%.*]] = copy_value %0 : $T -// CHECK-TRACE: Use of outer copy destroy_value -// CHECK-TRACE: Removing %{{.*}} = copy_value -// CHECK-TRACE: Removing destroy_value [[OUTERCOPY]] : $T -// CHECK-TRACE: Removing [[OUTERCOPY]] = copy_value %0 : $T -// -// CHECK-LABEL: sil [ossa] @testBorrowCopy : $@convention(thin) (@in T) -> () { -// CHECK-LABEL: bb0(%0 : @owned $T): -// CHECK-NEXT: begin_borrow %0 : $T -// CHECK-NEXT: end_borrow -// CHECK-NEXT: destroy_value %0 : $T -// CHECK-NEXT: tuple () -// CHECK-NEXT: return -// CHECK-LABEL: } -sil [ossa] @testBorrowCopy : $@convention(thin) (@in T) -> () { -bb0(%0 : @owned $T): - %3 = begin_borrow %0 : $T - %4 = copy_value %3 : $T - end_borrow %3 : $T - destroy_value %4 : $T - destroy_value %0 : $T - %17 = tuple () - return %17 : $() -} - -// CHECK-TRACE-LABEL: *** CopyPropagation: testCopyBorrow -// CHECK-TRACE: Removing destroy_value %1 : $T -// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T -// CHECK-TRACE-NOT: Removing -// -// CHECK-LABEL: sil [ossa] @testCopyBorrow : $@convention(thin) (@in T) -> () { -// CHECK: bb0(%0 : @owned $T): -// CHECK-NEXT: %1 = begin_borrow %0 : $T -// CHECK-NEXT: end_borrow %1 : $T -// CHECK-NEXT: destroy_value %0 : $T -// CHECK-NEXT: tuple -// CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testCopyBorrow' -sil [ossa] @testCopyBorrow : $@convention(thin) (@in T) -> () { -bb0(%0 : @owned $T): - %1 = copy_value %0 : $T - %2 = begin_borrow %1 : $T - end_borrow %2 : $T - destroy_value %1 : $T - destroy_value %0 : $T - %17 = tuple () - return %17 : $() -} - -sil @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error) - -// CHECK-TRACE-LABEL: *** CopyPropagation: testTryApply -// -// CHECK-LABEL: sil [ossa] @testTryApply : $@convention(thin) (@in T) -> @error Error { -// CHECK: bb0(%0 : @owned $T): -// CHECK: function_ref @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> @error Error -// CHECK: try_apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (τ_0_0) -> @error Error, normal bb1, error bb2 -// CHECK: bb1(%3 : $()): -// CHECK: destroy_value %0 : $T -// CHECK: br bb3 -// CHECK: bb2(%{{.*}} : @owned $Error): -// CHECK: destroy_value %0 : $T -// CHECK: destroy_value %{{.*}} : $Error -// CHECK: br bb3 -// CHECK: bb3: -// CHECK-NOT: destroy -// CHECK: return -// CHECK-LABEL: } // end sil function 'testTryApply' -sil [ossa] @testTryApply : $@convention(thin) (@in T) -> (@error Error) { -bb0(%0 : @owned $T): - %1 = copy_value %0 : $T - destroy_value %0 : $T - %f = function_ref @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error) - try_apply %f(%1) : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error), normal bb1, error bb2 - -bb1(%returnval : $()): - br bb3 - -bb2(%error : @owned $Error): - destroy_value %error : $Error - br bb3 - -bb3: - destroy_value %1 : $T - %17 = tuple () - return %17 : $() -} - -// ----------------------------------------------------------------------------- -// Test that convert_escape_to_noescape is a PointerEscape - -sil @closure : $@convention(thin) (@thick T.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for - -sil @takeClosure : $@convention(thin) (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for ) -> (@owned AnyObject, @error Error) - -// CHECK-TRACE-LABEL: *** CopyPropagation: testConvertFunction -// -// CHECK-LABEL: sil [ossa] @testConvertFunction : $@convention(thin) (@in_guaranteed T) -> @owned AnyObject { -// CHECK: bb0(%0 : @guaranteed $T): -// CHECK: [[CLOSURE:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> -// CHECK: [[CONVERT:%.*]] = convert_function [[CLOSURE]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for to $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: [[COPY:%.*]] = copy_value [[CONVERT]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: [[NOESCAPE:%.*]] = convert_escape_to_noescape [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for to $@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: try_apply %{{.*}}([[NOESCAPE]]) : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error), normal bb1, error bb2 -// CHECK: bb1 -// CHECK: destroy_value [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: destroy_value [[CONVERT]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: return -// CHECK: bb2 -// CHECK: destroy_value [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for -// CHECK: unreachable -// CHECK-LABEL: } // end sil function 'testConvertFunction' -sil [ossa] @testConvertFunction : $@convention(thin) (@in_guaranteed T) -> @owned AnyObject { -bb0(%0 : @guaranteed $T): - %2 = metatype $@thick T.Type - %3 = function_ref @closure : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> - %4 = apply %3(%2) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> - %5 = convert_function %4 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for to $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - %6 = copy_value %5 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - %7 = convert_escape_to_noescape %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for to $@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - %8 = function_ref @takeClosure : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error) - try_apply %8(%7) : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error), normal bb1, error bb2 - -bb1(%10 : @owned $AnyObject): - destroy_value %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - destroy_value %5 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - return %10 : $AnyObject - -bb2(%14 : @owned $Error): - destroy_value %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for - unreachable -} - -// ----------------------------------------------------------------------------- -// Test end_apply +class Klass {} -struct Struct { - var st: Int +struct HasObjectAndInt { + var object: Klass + var value: Builtin.Int64 } -sil @swift_modifyAtWritableKeyPath : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 -sil @modifyInt : $@convention(thin) (@inout Int) -> () - -// CHECK-TRACE-LABEL: *** CopyPropagation: testBeginApply -// -// CHECK-LABEL: sil hidden [ossa] @testBeginApply : $@convention(thin) (@inout Struct) -> () { -// CHECK: begin_apply -// CHECK-NOT: destroy -// CHECK: apply -// CHECK-NOT: destroy -// CHECK: end_apply -// CHECK-NOT: destroy -// CHECK: destroy_value %{{.*}} : $WritableKeyPath -// CHECK-NOT: destroy -// CHECK-LABEL: } // end sil function 'testBeginApply' -sil hidden [ossa] @testBeginApply : $@convention(thin) (@inout Struct) -> () { -bb0(%0 : $*Struct): - %2 = keypath $WritableKeyPath, (root $Struct; stored_property #Struct.st : $Int) - debug_value %2 : $WritableKeyPath, let, name "kp" - %4 = copy_value %2 : $WritableKeyPath - %5 = function_ref @swift_modifyAtWritableKeyPath : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 - (%6, %7) = begin_apply %5(%0, %4) : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 - %8 = function_ref @modifyInt : $@convention(thin) (@inout Int) -> () - %9 = apply %8(%6) : $@convention(thin) (@inout Int) -> () - end_apply %7 - destroy_value %4 : $WritableKeyPath - destroy_value %2 : $WritableKeyPath - %13 = tuple () - return %13 : $() +struct Wrapper { + var hasObject: HasObjectAndInt } -// FIXME: project_box is currently a PointerEscape, so box live ranges are not canonicalized. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testProjectBox -// -// CHECK-LABEL: sil [ossa] @testProjectBox : $@convention(thin) (@owned B) -> @owned B { -// CHECK: copy_value -// CHECK: destroy_value -// CHECK: destroy_value -// CHECK-LABEL: } // end sil function 'testProjectBox' -sil [ossa] @testProjectBox : $@convention(thin) (@owned B) -> @owned B { -bb0(%0 : @owned $B): - %box = alloc_box $<τ_0_0> { var τ_0_0 } - %boxadr = project_box %box : $<τ_0_0> { var τ_0_0 } , 0 - store %0 to [init] %boxadr : $*B - %load = load [copy] %boxadr : $*B - %copy = copy_value %box : $<τ_0_0> { var τ_0_0 } - destroy_value %box : $<τ_0_0> { var τ_0_0 } - destroy_value %copy : $<τ_0_0> { var τ_0_0 } - return %load : $B +struct UInt64 { + @_hasStorage public var _value: Builtin.Int64 { get set } + init(_value: Builtin.Int64) } -// FIXME: mark_dependence is currently a PointerEscape, so dependent live ranges are not canonicalized. -// -// CHECK-TRACE-LABEL: *** CopyPropagation: testMarkDependence -// -// CHECK-LABEL: sil [ossa] @testMarkDependence : $@convention(thin) (@inout Builtin.Int64, @owned B) -> Builtin.Int64 { -// CHECK: copy_value -// CHECK: destroy_value -// CHECK: destroy_value -// CHECK-LABEL: } // end sil function 'testMarkDependence' -sil [ossa] @testMarkDependence : $@convention(thin) (@inout Builtin.Int64, @owned B) -> Builtin.Int64 { -bb0(%0 : $*Builtin.Int64, %1 : @owned $B): - %ptr = mark_dependence %0 : $*Builtin.Int64 on %1 : $B - %val = load [trivial] %ptr : $*Builtin.Int64 - %copy = copy_value %1 : $B - destroy_value %1 : $B - destroy_value %copy : $B - return %val : $Builtin.Int64 +internal struct _StringObject { + @usableFromInline + @_hasStorage internal var _countAndFlagsBits: UInt64 { get set } + @usableFromInline + @_hasStorage internal var _object: Builtin.BridgeObject { get set } + init(_countAndFlagsBits: UInt64, _object: Builtin.BridgeObject) } -// CHECK-TRACE-LABEL: *** CopyPropagation: testBitwiseEscape -// -// CHECK-LABEL: sil [ossa] @testBitwiseEscape : $@convention(thin) (@guaranteed C) -> Builtin.RawPointer { -// CHECK-NOT: copy_value -// CHECK-NOT: destroy_value -// CHECK-LABEL: } // end sil function 'testBitwiseEscape' -sil [ossa] @testBitwiseEscape : $@convention(thin) (@guaranteed C) -> Builtin.RawPointer { -bb0(%0 : @guaranteed $C): - %raw = ref_to_raw_pointer %0 : $C to $Builtin.RawPointer - %copy = copy_value %0 : $C - destroy_value %copy : $C - return %raw : $Builtin.RawPointer +struct _StringGuts { + @_hasStorage internal var _object: _StringObject { get set } + init(_object: _StringObject) } -// CHECK-TRACE-LABEL: *** CopyPropagation: testInteriorPointer -// -// CHECK-LABEL: sil [ossa] @testInteriorPointer : $@convention(thin) (@guaranteed C) -> Int64 { -// CHECK: bb0(%0 : @guaranteed $C): -// CHECK-NEXT: begin_borrow -// CHECK-NEXT: ref_element_addr -// CHECK-NEXT: load -// CHECK-NEXT: end_borrow -// CHECK-NEXT: return -// CHECK-LABEL: } // end sil function 'testInteriorPointer' -sil [ossa] @testInteriorPointer : $@convention(thin) (@guaranteed C) -> Int64 { -bb0(%0 : @guaranteed $C): - %copy1 = copy_value %0 : $C - %borrow = begin_borrow %copy1 : $C - %adr = ref_element_addr %borrow : $C, #C.a - %val = load [trivial] %adr : $*Int64 - %copy2 = copy_value %borrow : $C - end_borrow %borrow : $C - destroy_value %copy1 : $C - destroy_value %copy2 : $C - return %val : $Int64 +public struct String { + @_hasStorage var _guts: _StringGuts { get set } + init(_guts: _StringGuts) } -// CHECK-TRACE-LABEL: *** CopyPropagation: testExtract -// -// CHECK-LABEL: sil [ossa] @testExtract : $@convention(thin) (@guaranteed NativeObjectPair) -> @owned Builtin.NativeObject { -// CHECK: bb0(%0 : @guaranteed $NativeObjectPair): -// CHECK-NEXT: [[B:%.*]] = begin_borrow %0 : $NativeObjectPair -// CHECK-NEXT: [[E:%.*]] = struct_extract [[B]] : $NativeObjectPair, #NativeObjectPair.obj1 -// CHECK-NEXT: [[C:%.*]] = copy_value [[E]] : $Builtin.NativeObject -// CHECK-NEXT: end_borrow -// CHECK-NEXT: return [[C]] : $Builtin.NativeObject -// CHECK-LABEL: } // end sil function 'testExtract' -sil [ossa] @testExtract : $@convention(thin) (@guaranteed NativeObjectPair) -> @owned Builtin.NativeObject { -bb0(%0 : @guaranteed $NativeObjectPair): - %copy1 = copy_value %0 : $NativeObjectPair - %borrow = begin_borrow %copy1 : $NativeObjectPair - %copy2 = copy_value %borrow : $NativeObjectPair - %val = struct_extract %borrow : $NativeObjectPair, #NativeObjectPair.obj1 - %copy3 = copy_value %val : $Builtin.NativeObject - end_borrow %borrow : $NativeObjectPair - %copy4 = copy_value %copy3 : $Builtin.NativeObject - destroy_value %copy1 : $NativeObjectPair - destroy_value %copy2 : $NativeObjectPair - destroy_value %copy3 : $Builtin.NativeObject - return %copy4 : $Builtin.NativeObject -} - -// ============================================================================= -// Test extending liveness though overlapping access scopes. -// ============================================================================= - -class X {} -class Y {} - -sil [ossa] @getObject : $@convention(thin) () -> @owned AnyObject - -// No overlap (access ignored): -// def -// use -// begin_access -// end_access -// destroy -// -// CHECK-LABEL: sil [ossa] @testNoOverlapInLiveBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK-NOT: copy_value -// CHECK: store [[DEF]] to [init] -// CHECK: begin_access -// CHECK: end_access -// CHECK: bb1: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testNoOverlapInLiveBlock' -sil [ossa] @testNoOverlapInLiveBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - // use - store %def to [init] %adr : $*AnyObject - // end canonical lifetime - %access = begin_access [read] [dynamic] %adr : $*AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - br bb1 - -bb1: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// No overlap (access ignored): -// def -// use -// br... -// bb... -// begin_access -// end_access -// destroy -// -// CHECK-LABEL: sil [ossa] @testNoOverlapInDeadBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK-NOT: copy_value -// CHECK: store [[DEF]] to [init] %{{.*}} : $*AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK: begin_access -// CHECK: end_access -// CHECK: br bb2 -// CHECK: bb2: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testNoOverlapInDeadBlock' -sil [ossa] @testNoOverlapInDeadBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - // use - store %def to [init] %adr : $*AnyObject - // end canonical lifetime - br bb1 - -bb1: - %access = begin_access [read] [dynamic] %adr : $*AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - br bb2 - -bb2: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Overlapping (must extend pruned liveness): -// -// %def -// begin_access // access scope unrelated to def -// use %def // pruned liveness ends here -// end_access -// -// CHECK-LABEL: sil [ossa] @testOverlapInLiveBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testOverlapInLiveBlock' -sil [ossa] @testOverlapInLiveBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %access = begin_access [read] [dynamic] %adr : $*AnyObject - // use - store %def to [init] %adr : $*AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - // Branch to avoid reusing the destroy_value - br bb1 - -bb1: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Overlapping (must extend pruned liveness): -// -// %def -// begin_access // access scope unrelated to def -// use %def // pruned liveness ends here -// br... -// bb... -// end_access -// -// CHECK-LABEL: sil [ossa] @testOverlapInDeadBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb2 -// CHECK: bb2: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testOverlapInDeadBlock' -sil [ossa] @testOverlapInDeadBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %access = begin_access [read] [dynamic] %adr : $*AnyObject - // use - store %def to [init] %adr : $*AnyObject - br bb1 - -bb1: - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - br bb2 - -bb2: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Fully Overlapping (must extend pruned liveness): -// -// begin_access // access scope unrelated to def -// %def -// use %def // pruned liveness ends here -// end_access -// -// CHECK-LABEL: sil [ossa] @testFullOverlapInDefBlock : $@convention(thin) () -> () { -// CHECK: begin_access -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testFullOverlapInDefBlock' -sil [ossa] @testFullOverlapInDefBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %access = begin_access [read] [dynamic] %adr : $*AnyObject - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - // use - store %def to [init] %adr : $*AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - // Branch to avoid reusing the destroy_value - br bb1 - -bb1: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Fully Overlapping (must extend pruned liveness): -// -// begin_access // access scope unrelated to def -// br... -// bb... -// %def -// use %def // pruned liveness ends here -// end_access -// -// CHECK-LABEL: sil [ossa] @testFullOverlapBeforeDefBlock : $@convention(thin) () -> () { -// CHECK: begin_access -// CHECK: br bb1 -// CHECK: bb1: -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb2 -// CHECK: bb2: -// CHECK-NOT: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testFullOverlapBeforeDefBlock' -sil [ossa] @testFullOverlapBeforeDefBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %access = begin_access [read] [dynamic] %adr : $*AnyObject - br bb1 - -bb1: - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - // use - store %def to [init] %adr : $*AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - // Branch to avoid reusing the destroy_value - br bb2 - -bb2: - destroy_value %copy : $AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Original Overlapping (unnecessarilly extends pruned liveness): -// -// TODO: this copy could be avoided but is probably an unusual case, -// and sinking the destroy outside the access scope might help to -// optimize the access itself. -// -// %def -// begin_access // access scope unrelated to def -// use %def // pruned liveness ends here -// destroy %def -// end_access -// -// CHECK-LABEL: sil [ossa] @testOriginalOverlapInLiveBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb1 -// CHECK-LABEL: } // end sil function 'testOriginalOverlapInLiveBlock' -sil [ossa] @testOriginalOverlapInLiveBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %access = begin_access [read] [dynamic] %adr : $*AnyObject - // use - store %def to [init] %adr : $*AnyObject - destroy_value %copy : $AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - br bb1 - -bb1: - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Original Overlapping (unnecessarilly extends pruned liveness): -// -// TODO: this copy could be avoided but is probably an unusual case, -// and sinking the destroy outside the access scope might help to -// optimize the access itself. -// -// %def -// begin_access // access scope unrelated to def -// use %def // pruned liveness ends here -// br bb1 -// bb1: -// destroy %def -// end_access -// -// CHECK-LABEL: sil [ossa] @testOriginalOverlapInDeadBlock : $@convention(thin) () -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK: end_access -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK-LABEL: } // end sil function 'testOriginalOverlapInDeadBlock' -sil [ossa] @testOriginalOverlapInDeadBlock : $@convention(thin) () -> () { -bb0: - %box = alloc_box ${ var AnyObject }, var, name "x" - %adr = project_box %box : ${ var AnyObject }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %access = begin_access [read] [dynamic] %adr : $*AnyObject - // use - store %def to [init] %adr : $*AnyObject - br bb1 - -bb1: - destroy_value %copy : $AnyObject - %obj = load [copy] %access : $*AnyObject - end_access %access : $*AnyObject - destroy_value %obj : $AnyObject - destroy_value %box : ${ var AnyObject } - %v = tuple () - return %v : $() -} - -// Interleaved access (requires iterative lifetime extension): -// -// %def -// begin_access X -// use %def // Initial pruned lifetime boundary -// begin_access Y -// end_access X // Lifetime boundary after first extension -// end_access Y // Lifetime boundary after second extension -// destroy %def -// -// CHECK-LABEL: sil [ossa] @testInterleavedAccessScope : $@convention(thin) (@inout AnyObject) -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access {{.*}} : $*X -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [assign] %{{.*}} : $*AnyObject -// CHECK: begin_access {{.*}} : $*Y -// CHECK: end_access {{.*}} : $*X -// CHECK: end_access {{.*}} : $*Y -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK-NOT: destroy_value {{.*}} : $AnyObject -// CHECK-LABEL: } // end sil function 'testInterleavedAccessScope' -sil [ossa] @testInterleavedAccessScope : $@convention(thin) (@inout AnyObject) -> () { -bb0(%0 : $*AnyObject): - %x = alloc_box ${ var X }, var, name "x" - %xadr = project_box %x : ${ var X }, 0 - %y = alloc_box ${ var Y }, var, name "y" - %yadr = project_box %y : ${ var Y }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %accessX = begin_access [read] [dynamic] %xadr : $*X - // use - store %def to [assign] %0 : $*AnyObject - %accessY = begin_access [read] [dynamic] %yadr : $*Y - // accessX overlaps pruned liveness on the first iteration - end_access %accessX : $*X - // accessY only overlaps pruned liveness on the second iteration - end_access %accessY : $*Y - br bb1 - -bb1: - destroy_value %copy : $AnyObject - destroy_value %y : ${ var Y } - destroy_value %x : ${ var X } - %v = tuple () - return %v : $() -} - -// Interleaved non-local access (requires iterative lifetime extension): -// -// %def -// begin_access X -// use %def // Initial pruned lifetime boundary -// br bb1 -// bb1: -// begin_access Y -// br bb2 -// bb2: -// end_access X // Lifetime boundary after first extension -// br bb3 -// bb3: -// end_access Y // Lifetime boundary after second extension -// br bb4 -// bb4: -// destroy %def -// -// CHECK-LABEL: sil [ossa] @testInterleavedNonLocalAccessScope : $@convention(thin) (@inout AnyObject) -> () { -// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject -// CHECK: begin_access {{.*}} : $*X -// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject -// CHECK: store [[COPY]] to [assign] %{{.*}} : $*AnyObject -// CHECK: br bb1 -// CHECK: bb1: -// CHECK: begin_access {{.*}} : $*Y -// CHECK: br bb2 -// CHECK: bb2: -// CHECK: end_access {{.*}} : $*X -// CHECK: br bb3 -// CHECK: bb3: -// CHECK: end_access {{.*}} : $*Y -// CHECK: destroy_value [[DEF]] : $AnyObject -// CHECK: br bb4 -// CHECK: bb4: -// CHECK-NOT: destroy_value {{.*}} : $AnyObject -// CHECK-LABEL: } // end sil function 'testInterleavedNonLocalAccessScope' -sil [ossa] @testInterleavedNonLocalAccessScope : $@convention(thin) (@inout AnyObject) -> () { -bb0(%0 : $*AnyObject): - %x = alloc_box ${ var X }, var, name "x" - %xadr = project_box %x : ${ var X }, 0 - %y = alloc_box ${ var Y }, var, name "y" - %yadr = project_box %y : ${ var Y }, 0 - %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject - // def - %def = apply %f() : $@convention(thin) () -> @owned AnyObject - %copy = copy_value %def : $AnyObject - %accessX = begin_access [read] [dynamic] %xadr : $*X - // use - store %def to [assign] %0 : $*AnyObject - br bb1 - -bb1: - %accessY = begin_access [read] [dynamic] %yadr : $*Y - br bb2 - -bb2: - // accessX overlaps pruned liveness on the first iteration - end_access %accessX : $*X - br bb3 - -bb3: - // accessY only overlaps pruned liveness on the second iteration - end_access %accessY : $*Y - br bb4 - -bb4: - destroy_value %copy : $AnyObject - destroy_value %y : ${ var Y } - destroy_value %x : ${ var X } - %v = tuple () - return %v : $() +extension String { + public struct UTF16View { + @_hasStorage internal var _guts: _StringGuts { get set } + init(_guts: _StringGuts) + } } // ============================================================================= @@ -1669,3 +559,85 @@ bb3(%borrow3 : @guaranteed $C, %copy3 : @owned $C): %result = tuple () return %result : $() } + +// Test conversion from struct_extract to destructure. +// +// CHECK-LABEL: sil [ossa] @testDestructureConversion : $@convention(thin) (@owned Wrapper) -> () { +// CHECK: bb0(%0 : @owned $Wrapper): +// CHECK: [[BORROW:%.*]] = begin_borrow %0 : $Wrapper +// CHECK: [[COPY:%.*]] = copy_value [[BORROW]] : $Wrapper +// CHECK: [[SPLIT:%.*]] = destructure_struct [[COPY]] : $Wrapper +// CHECK: begin_borrow [[SPLIT]] : $HasObjectAndInt +// CHECK: destroy_value [[SPLIT]] : $HasObjectAndInt +// CHECK-LABEL: } // end sil function 'testDestructureConversion' +sil [ossa] @testDestructureConversion : $@convention(thin) (@owned Wrapper) -> () { +bb0(%0 : @owned $Wrapper): + %1 = begin_borrow %0 : $Wrapper + %2 = struct_extract %1 : $Wrapper, #Wrapper.hasObject + // This copy is only used by a nested borrow scope. + %3 = copy_value %2 : $HasObjectAndInt + // This borrow scope is only used by debug_value and extracting a trivial member + %4 = begin_borrow %3 : $HasObjectAndInt + debug_value %4 : $HasObjectAndInt, let, name "self", argno 1 + %6 = struct_extract %4 : $HasObjectAndInt, #HasObjectAndInt.value + %7 = builtin "and_Int64"(%6 : $Builtin.Int64, undef : $Builtin.Int64) : $Builtin.Int64 + end_borrow %4 : $HasObjectAndInt + end_borrow %1 : $Wrapper + destroy_value %3 : $HasObjectAndInt + destroy_value %0 : $Wrapper + %99 = tuple () + return %99 : $() +} + +// testUselessBorrow +sil [ossa] @testUselessBorrow : $@convention(thin) (@owned String) -> () { +// %0 // users: %29, %1 +bb0(%0 : @owned $String): + %1 = begin_borrow %0 : $String // users: %5, %2 + %2 = struct_extract %1 : $String, #String._guts // user: %3 + %3 = copy_value %2 : $_StringGuts // user: %4 + %4 = struct $String.UTF16View (%3 : $_StringGuts) // users: %28, %7 + end_borrow %1 : $String // id: %5 + br bb1 // id: %6 + +bb1: // Preds: bb7 bb5 bb0 + %7 = begin_borrow %4 : $String.UTF16View // users: %15, %8 + %8 = struct_extract %7 : $String.UTF16View, #String.UTF16View._guts // user: %9 + %9 = struct_extract %8 : $_StringGuts, #_StringGuts._object // user: %10 + %10 = copy_value %9 : $_StringObject // users: %23, %19, %11 + %11 = begin_borrow %10 : $_StringObject // users: %22, %21, %18, %16 + cond_br undef, bb2, bb3 // id: %12 + +bb2: // Preds: bb1 + br bb4 // id: %13 + +bb3: // Preds: bb1 + br bb4 // id: %14 + +bb4: // Preds: bb3 bb2 + end_borrow %7 : $String.UTF16View // id: %15 + debug_value %11 : $_StringObject, let, name "self", argno 1 // id: %16 + cond_br undef, bb5, bb6 // id: %17 + +bb5: // Preds: bb4 + end_borrow %11 : $_StringObject // id: %18 + destroy_value %10 : $_StringObject // id: %19 + br bb1 // id: %20 + +bb6: // Preds: bb4 + %21 = struct_extract %11 : $_StringObject, #_StringObject._countAndFlagsBits // user: %24 + end_borrow %11 : $_StringObject // id: %22 + destroy_value %10 : $_StringObject // id: %23 + %24 = struct_extract %21 : $UInt64, #UInt64._value // user: %25 + %25 = builtin "and_Int64"(%24 : $Builtin.Int64, undef : $Builtin.Int64) : $Builtin.Int64 + cond_br undef, bb7, bb8 // id: %26 + +bb7: // Preds: bb6 + br bb1 // id: %27 + +bb8: // Preds: bb6 + destroy_value %4 : $String.UTF16View // id: %28 + destroy_value %0 : $String // id: %29 + %30 = tuple () // user: %31 + return %30 : $() // id: %31 +} // end sil function 'testUselessBorrow' diff --git a/test/SILOptimizer/copy_propagation_opaque.sil b/test/SILOptimizer/copy_propagation_opaque.sil new file mode 100644 index 0000000000000..dfb9aa8c0350c --- /dev/null +++ b/test/SILOptimizer/copy_propagation_opaque.sil @@ -0,0 +1,1163 @@ +// RUN: %target-sil-opt -copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -enable-sil-verify-all %s | %FileCheck %s --check-prefixes=CHECK,CHECK-OPT +// RUN: %target-sil-opt -mandatory-copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -enable-sil-verify-all %s | %FileCheck %s --check-prefixes=CHECK,CHECK-DEBUG +// RUN: %target-sil-opt -copy-propagation -canonical-ossa-rewrite-borrows -enable-sil-opaque-values -debug-only=copy-propagation %s -o /dev/null 2>&1 | %FileCheck %s --check-prefix=CHECK-TRACE + +// REQUIRES: asserts + +sil_stage canonical + +import Builtin +import Swift + +sil [ossa] @takeOwned : $@convention(thin) (@in T) -> () +sil [ossa] @takeMultipleOwned : $@convention(thin) (@in T, @in T) -> () +sil [ossa] @takeGuaranteed : $@convention(thin) (@in_guaranteed T) -> () +sil [ossa] @takeGuaranteedAndOwnedArg : $@convention(thin) (@in_guaranteed T, @in T) -> () + +class B { } + +class C { + var a: Int64 +} + +struct NativeObjectPair { + var obj1 : Builtin.NativeObject + var obj2 : Builtin.NativeObject +} + +// Once Mem2Reg supports ownership, it will leave behind extra copies as +// seen in the SIL test below for simple assignment: +// public func testVarAssign(_ t: T) -> T { +// var u = t +// return u +// } +// CopyPropagation should leave behind a single copy and no destroys. +// +// CHECK-LABEL: sil [ossa] @testVarAssign : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0(%0 : @guaranteed $T): +// CHECK-NOT: destroy +// CHECK: [[CPY:%.*]] = copy_value %0 : $T +// CHECK_CHECK-NOT: destroy +// CHECK_CHECK: return [[CPY]] : $T +// CHECK-LABEL: } // end sil function 'testVarAssign' +sil [ossa] @testVarAssign : $@convention(thin) (@in_guaranteed T) -> @out T { +bb0(%0 : @guaranteed $T): + %1 = copy_value %0 : $T + %2 = copy_value %1 : $T + destroy_value %1 : $T + return %2 : $T +} + +// CHECK-LABEL: sil [ossa] @multiReturnValue : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T) { +// CHECK: bb0(%0 : @guaranteed $T): +// CHECK-NOT: destroy +// CHECK: [[CPY1:%.*]] = copy_value %0 : $T +// CHECK_CHECK-NOT: destroy +// CHECK_CHECK: [[CPY2:%.*]] = copy_value %0 : $T +// CHECK_CHECK-NOT: destroy +// CHECK_CHECK: [[R:%.*]] = tuple ([[CPY1]] : $T, [[CPY2]] : $T) +// CHECK_CHECK-NOT: destroy +// CHECK_CHECK: return [[R]] : $(T, T) +// CHECK-LABEL: } // end sil function 'multiReturnValue' +sil [ossa] @multiReturnValue : $@convention(thin) (@in_guaranteed T) -> (@out T, @out T) { +bb0(%0 : @guaranteed $T): + %1 = copy_value %0 : $T + %2 = copy_value %1 : $T + %3 = copy_value %1 : $T + %4 = tuple (%2 : $T, %3 : $T) + destroy_value %1 : $T + return %4 : $(T, T) +} + +// CHECK-LABEL: sil [ossa] @multiCallResult : $@convention(thin) (@in_guaranteed T) -> @out T { +// CHECK: bb0(%0 : @guaranteed $T): +// CHECK_CHECK-NEXT: // function_ref multiReturnValue +// CHECK_CHECK-NEXT: [[F:%.*]] = function_ref @multiReturnValue : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) +// CHECK_CHECK-NEXT: [[CALL:%.*]] = apply [[F]](%0) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) +// CHECK_CHECK-NEXT: ([[D1:%.*]], [[D2:%.*]]) = destructure_tuple [[CALL]] : $(T, T) +// CHECK_CHECK-NEXT: destroy_value [[D2]] : $T +// CHECK_CHECK-NEXT: return [[D1]] : $T +// CHECK-LABEL: } // end sil function 'multiCallResult' +sil [ossa] @multiCallResult : $@convention(thin) (@in_guaranteed T) -> @out T { +bb0(%0 : @guaranteed $T): + %1 = copy_value %0 : $T + %2 = function_ref @multiReturnValue : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) + %3 = apply %2(%1) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> (@out τ_0_0, @out τ_0_0) + (%4, %5) = destructure_tuple %3 : $(T, T) + %6 = copy_value %4 : $T + %7 = copy_value %5 : $T + destroy_value %1 : $T + destroy_value %4 : $T + destroy_value %5 : $T + destroy_value %7 : $T + return %6 : $T +} + +// CHECK-LABEL: sil [ossa] @testPhi : $@convention(thin) (@in_guaranteed T, @in_guaranteed T, Bool) -> @out T { +// CHECK: bb0(%0 : @guaranteed $T, %1 : @guaranteed $T, %2 : $Bool): +// CHECK_CHECK-NEXT: struct_extract %2 : $Bool, #Bool._value +// CHECK_CHECK-NEXT: cond_br %{{.*}}, bb1, bb2 +// +// CHECK: bb1: +// CHECK_CHECK-NEXT: copy_value %0 : $T +// CHECK_CHECK-NEXT: br bb3(% +// +// CHECK: bb2: +// CHECK_CHECK-NEXT: copy_value %1 : $T +// CHECK_CHECK-NEXT: br bb3(% +// +// CHECK: bb3(% +// CHECK_CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testPhi' +sil [ossa] @testPhi : $@convention(thin) (@in_guaranteed T, @in_guaranteed T, Bool) -> @out T { +bb0(%0 : @guaranteed $T, %1 : @guaranteed $T, %2 : $Bool): + %3 = copy_value %0 : $T + %4 = copy_value %1 : $T + %5 = struct_extract %2 : $Bool, #Bool._value + cond_br %5, bb1, bb2 + +bb1: + %7 = copy_value %3 : $T + br bb3(%7 : $T) + +bb2: + %9 = copy_value %4 : $T + br bb3(%9 : $T) + +bb3(%11 : @owned $T): + destroy_value %4 : $T + destroy_value %3 : $T + return %11 : $T +} + +// CHECK-LABEL: sil [ossa] @testConsume : $@convention(thin) (@in T, @inout T) -> () { +// CHECK: bb0(%0 : @owned $T, %1 : $*T): +// +// Mandatory opt reuses the original copy for the consuming store. +// CHECK-DEBUG-NEXT: [[STOREVAL:%.*]] = copy_value %0 : $T +// +// CHECK-NEXT: debug_value %0 : $T +// CHECK-DEBUG-NEXT: store [[STOREVAL]] to [assign] %1 : $*T +// CHECK-OPT-NEXT: store %0 to [assign] %1 : $*T +// +// The non-consuming use now uses the original value. +// CHECK-DEBUG-NEXT: debug_value %0 : $T +// +// CHECK-NEXT: debug_value_addr %1 : $*T +// +// The original destroy is deleted with optimizations enabled. +// CHECK-DEBUG-NEXT: destroy_value %0 : $T +// CHECK-NEXT: tuple () +// CHECK-NEXT: return +// CHECK-LABEL: // end sil function 'testConsume' +sil [ossa] @testConsume : $@convention(thin) (@in T, @inout T) -> () { +bb0(%arg : @owned $T, %addr : $*T): + %copy = copy_value %arg : $T + debug_value %copy : $T + store %copy to [assign] %addr : $*T + debug_value %arg : $T + debug_value_addr %addr : $*T + destroy_value %arg : $T + %v = tuple () + return %v : $() +} + +// CHECK-LABEL: sil [ossa] @testDestroyEdge : $@convention(thin) (@in T, Builtin.Int1) -> () { +// CHECK: bb0(%0 : @owned $T, %1 : $Builtin.Int1): +// CHECK-OPT-NEXT: destroy_value %0 : $T +// CHECK-DEBUG-NEXT: cond_br %1, bb2, bb1 +// +// CHECK: bb1: +// Debug build inserts a new destroy +// CHECK-DEBUG-NEXT: destroy_value %0 : $T +// CHECK-NEXT: br bb3 +// +// CHECK: bb2: +// The original copy is deleted in both cases. +// CHECK-DEBUG-NEXT: debug_value %0 : $T +// CHECK-DEBUG-NEXT: destroy_value %0 : $T +// CHECK-NEXT: br bb3 +// +// CHECK: bb3: +// The original destroy is deleted in both cases. +// CHECK-NEXT: tuple () +// CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testDestroyEdge' +sil [ossa] @testDestroyEdge : $@convention(thin) (@in T, Builtin.Int1) -> () { +bb0(%arg : @owned $T, %z : $Builtin.Int1): + cond_br %z, bb2, bb1 + +bb1: + br bb3 + +bb2: + debug_value %arg : $T + %copy = copy_value %arg : $T + destroy_value %copy : $T + br bb3 + +bb3: + destroy_value %arg : $T + %10 = tuple () + return %10 : $() +} + +// Test the same user instruction with both @guaranteed and @owned operands taking the same copied value. +// We need to keep the value alive to the end of the instruction. +// +// CHECK-LABEL: sil [ossa] @testGuaranteedAndOwnedArg : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : @owned $T): +// CHECK-NEXT: [[CPY:%.*]] = copy_value %0 : $T +// CHECK-NEXT: // function_ref takeGuaranteedAndOwnedArg +// CHECK-NEXT: function_ref @takeGuaranteedAndOwnedArg : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in τ_0_0) -> () +// CHECK-NEXT: apply %{{.*}}(%0, [[CPY]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in τ_0_0) -> () +// CHECK-NEXT: destroy_value %0 : $T +// CHECK-NEXT: return %{{.*}} : $() +// CHECK-LABEL: } // end sil function 'testGuaranteedAndOwnedArg' +sil [ossa] @testGuaranteedAndOwnedArg : $@convention(thin) (@in T) -> () { +bb(%0 : @owned $T): + %copy = copy_value %0 : $T + %f = function_ref @takeGuaranteedAndOwnedArg : $@convention(thin) (@in_guaranteed T, @in T) -> () + %call = apply %f(%0, %copy) : $@convention(thin) (@in_guaranteed T, @in T) -> () + destroy_value %0 : $T + return %call : $() +} + +// Reuse one of the copies for the apply. Eliminate the other copy and destroy. +// Which copy is reused is unfortunately sensitive to the use list order. +// +// CHECK-TRACE-LABEL: CopyPropagation: testCopy2OperReuse +// CHECK-TRACE: Removing destroy_value %0 : $T +// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @testCopy2OperReuse : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : @owned $T): +// CHECK-NEXT: [[CP:%.*]] = copy_value %0 : $T +// CHECK-NEXT: // function_ref takeMultipleOwned +// CHECK-NEXT: function_ref @takeMultipleOwned : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () +// CHECK-NEXT: apply %{{.*}}(%0, [[CP]]) : $@convention(thin) <τ_0_0> (@in τ_0_0, @in τ_0_0) -> () +// CHECK-NEXT: tuple () +// CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testCopy2OperReuse' +sil [ossa] @testCopy2OperReuse : $@convention(thin) (@in T) -> () { +bb0(%arg : @owned $T): + %copy1 = copy_value %arg : $T + %copy2 = copy_value %arg : $T + %f = function_ref @takeMultipleOwned : $@convention(thin) (@in T, @in T) -> () + %call = apply %f(%copy1, %copy2) : $@convention(thin) (@in T, @in T) -> () + destroy_value %arg : $T + %10 = tuple () + return %10 : $() +} + +// Reuse one copy and eliminate the other copy and destroy. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testCopy2CallReuse +// CHECK-TRACE: Removing destroy_value %0 : $T +// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @testCopy2CallReuse : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : @owned $T): +// CHECK-NEXT: [[CP:%.*]] = copy_value %0 : $T +// CHECK-NEXT: // function_ref +// CHECK-NEXT: function_ref +// CHECK-NEXT: apply %{{.*}}([[CP]]) +// CHECK-NEXT: apply %{{.*}}(%0) +// CHECK-NEXT: tuple +// CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testCopy2CallReuse' +sil [ossa] @testCopy2CallReuse : $@convention(thin) (@in T) -> () { +bb0(%arg : @owned $T): + %copy1 = copy_value %arg : $T + %copy2 = copy_value %arg : $T + %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call1 = apply %f(%copy1) : $@convention(thin) (@in T) -> () + %call2 = apply %f(%copy2) : $@convention(thin) (@in T) -> () + destroy_value %arg : $T + %10 = tuple () + return %10 : $() +} + +// bb1 has a consuming instruction but is also live-out. Reuse the copy in bb1. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: liveoutConsume +// CHECK-TRACE: Removing destroy_value %0 : $T +// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @liveoutConsume : $@convention(thin) (@owned T, Builtin.Int1) -> () { +// CHECK: bb0(%0 : @owned $T, %1 : $Builtin.Int1): +// CHECK-NOT: copy_value +// CHECK: cond_br %1, bb2, bb1 +// CHECK: bb1: +// CHECK: copy_value %0 : $T +// CHECK: apply +// CHECK: br bb3 +// CHECK: bb3: +// CHECK-NOT: copy_value +// CHECK: apply +// CHECK-NOT: destroy_value +// CHECK-LABEL: } // end sil function 'liveoutConsume' +sil [ossa] @liveoutConsume : $@convention(thin) (@owned T, Builtin.Int1) -> () { +bb0(%arg : @owned $T, %z : $Builtin.Int1): + %copy1 = copy_value %arg : $T + cond_br %z, bb2, bb1 + +bb1: + %copy2 = copy_value %arg : $T + %f1 = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call1 = apply %f1(%copy2) : $@convention(thin) (@in T) -> () + br bb3 + +bb2: + br bb3 + +bb3: + %f2 = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call2 = apply %f2(%copy1) : $@convention(thin) (@in T) -> () + destroy_value %arg : $T + %10 = tuple () + return %10 : $() +} + +// The LiveWithin block has a destroy, but it's before the first use. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testDestroyBeforeUse +// CHECK-TRACE: Removing destroy_value %1 : $T +// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T +// +// CHECK-LABEL: sil [ossa] @testDestroyBeforeUse : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : @owned $T): +// CHECK-NOT: copy_value +// CHECK-NOT: destroy_value +// CHECK: apply +// CHECK-NOT: destroy_value +// CHECK: return +// CHECK-LABEL: } // end sil function 'testDestroyBeforeUse' +sil [ossa] @testDestroyBeforeUse : $@convention(thin) (@in T) -> () { +bb0(%arg : @owned $T): + %copy = copy_value %arg : $T + destroy_value %copy : $T + %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call2 = apply %f(%arg) : $@convention(thin) (@in T) -> () + %10 = tuple () + return %10 : $() +} + +// The LiveWithin block has a destroy, but it's after an unrelated call. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testDestroyAfterCall +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @testDestroyAfterCall : $@convention(thin) (@in T, @in T) -> () { +// CHECK: bb0(%0 : @owned $T, %1 : @owned $T): +// CHECK: apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () +// CHECK: destroy_value %1 : $T +// CHECK-LABEL: } // end sil function 'testDestroyAfterCall' +sil [ossa] @testDestroyAfterCall : $@convention(thin) (@in T, @in T) -> () { +bb0(%arg1 : @owned $T, %arg2 : @owned $T): + %f = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call = apply %f(%arg1) : $@convention(thin) (@in T) -> () + destroy_value %arg2 : $T + %10 = tuple () + return %10 : $() +} + +// A copy may have multiple uses +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testSharedCopy +// CHECK-TRACE: Removing destroy_value %0 : $T +// CHECK-TRACE: Removing %1 = copy_value %0 : $T +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @testSharedCopy : $@convention(thin) (@in T) -> () { +// CHECK-NOT: copy_value +// CHECK: apply +// CHECK: apply +// CHECK-NOT: destroy_value +// CHECK-LABEL: } // end sil function 'testSharedCopy' +sil [ossa] @testSharedCopy : $@convention(thin) (@in T) -> () { +bb0(%arg : @owned $T): + %copy = copy_value %arg : $T + %f1 = function_ref @takeGuaranteed : $@convention(thin) (@in_guaranteed T) -> () + %call1 = apply %f1(%copy) : $@convention(thin) (@in_guaranteed T) -> () + %f2 = function_ref @takeOwned : $@convention(thin) (@in T) -> () + %call2 = apply %f2(%copy) : $@convention(thin) (@in T) -> () + destroy_value %arg : $T + %10 = tuple () + return %10 : $() +} + +// A copy within a borrow scope is not handled by CopyPropagation. An +// earlier pass should have hoisted the copy outside of the borrow +// scope. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testBorrowCopy +// CHECK-TRACE: Outer copy [[OUTERCOPY:%.*]] = copy_value %0 : $T +// CHECK-TRACE: Use of outer copy destroy_value +// CHECK-TRACE: Removing %{{.*}} = copy_value +// CHECK-TRACE: Removing destroy_value [[OUTERCOPY]] : $T +// CHECK-TRACE: Removing [[OUTERCOPY]] = copy_value %0 : $T +// +// CHECK-LABEL: sil [ossa] @testBorrowCopy : $@convention(thin) (@in T) -> () { +// CHECK-LABEL: bb0(%0 : @owned $T): +// CHECK-NEXT: begin_borrow %0 : $T +// CHECK-NEXT: end_borrow +// CHECK-NEXT: destroy_value %0 : $T +// CHECK-NEXT: tuple () +// CHECK-NEXT: return +// CHECK-LABEL: } +sil [ossa] @testBorrowCopy : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + %3 = begin_borrow %0 : $T + %4 = copy_value %3 : $T + end_borrow %3 : $T + destroy_value %4 : $T + destroy_value %0 : $T + %17 = tuple () + return %17 : $() +} + +// CHECK-TRACE-LABEL: *** CopyPropagation: testCopyBorrow +// CHECK-TRACE: Removing destroy_value %1 : $T +// CHECK-TRACE: Removing %{{.*}} = copy_value %0 : $T +// CHECK-TRACE-NOT: Removing +// +// CHECK-LABEL: sil [ossa] @testCopyBorrow : $@convention(thin) (@in T) -> () { +// CHECK: bb0(%0 : @owned $T): +// CHECK-NEXT: %1 = begin_borrow %0 : $T +// CHECK-NEXT: end_borrow %1 : $T +// CHECK-NEXT: destroy_value %0 : $T +// CHECK-NEXT: tuple +// CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testCopyBorrow' +sil [ossa] @testCopyBorrow : $@convention(thin) (@in T) -> () { +bb0(%0 : @owned $T): + %1 = copy_value %0 : $T + %2 = begin_borrow %1 : $T + end_borrow %2 : $T + destroy_value %1 : $T + destroy_value %0 : $T + %17 = tuple () + return %17 : $() +} + +sil @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error) + +// CHECK-TRACE-LABEL: *** CopyPropagation: testTryApply +// +// CHECK-LABEL: sil [ossa] @testTryApply : $@convention(thin) (@in T) -> @error Error { +// CHECK: bb0(%0 : @owned $T): +// CHECK: function_ref @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> @error Error +// CHECK: try_apply %{{.*}}(%0) : $@convention(thin) <τ_0_0> (τ_0_0) -> @error Error, normal bb1, error bb2 +// CHECK: bb1(%3 : $()): +// CHECK: destroy_value %0 : $T +// CHECK: br bb3 +// CHECK: bb2(%{{.*}} : @owned $Error): +// CHECK: destroy_value %0 : $T +// CHECK: destroy_value %{{.*}} : $Error +// CHECK: br bb3 +// CHECK: bb3: +// CHECK-NOT: destroy +// CHECK: return +// CHECK-LABEL: } // end sil function 'testTryApply' +sil [ossa] @testTryApply : $@convention(thin) (@in T) -> (@error Error) { +bb0(%0 : @owned $T): + %1 = copy_value %0 : $T + destroy_value %0 : $T + %f = function_ref @testThrows : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error) + try_apply %f(%1) : $@convention(thin) <τ_0_0> (τ_0_0) -> (@error Error), normal bb1, error bb2 + +bb1(%returnval : $()): + br bb3 + +bb2(%error : @owned $Error): + destroy_value %error : $Error + br bb3 + +bb3: + destroy_value %1 : $T + %17 = tuple () + return %17 : $() +} + +// ----------------------------------------------------------------------------- +// Test that convert_escape_to_noescape is a PointerEscape + +sil @closure : $@convention(thin) (@thick T.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for + +sil @takeClosure : $@convention(thin) (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for ) -> (@owned AnyObject, @error Error) + +// CHECK-TRACE-LABEL: *** CopyPropagation: testConvertFunction +// +// CHECK-LABEL: sil [ossa] @testConvertFunction : $@convention(thin) (@in_guaranteed T) -> @owned AnyObject { +// CHECK: bb0(%0 : @guaranteed $T): +// CHECK: [[CLOSURE:%.*]] = apply %{{.*}}(%{{.*}}) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> +// CHECK: [[CONVERT:%.*]] = convert_function [[CLOSURE]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for to $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: [[COPY:%.*]] = copy_value [[CONVERT]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: [[NOESCAPE:%.*]] = convert_escape_to_noescape [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for to $@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: try_apply %{{.*}}([[NOESCAPE]]) : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error), normal bb1, error bb2 +// CHECK: bb1 +// CHECK: destroy_value [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: destroy_value [[CONVERT]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: return +// CHECK: bb2 +// CHECK: destroy_value [[COPY]] : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for +// CHECK: unreachable +// CHECK-LABEL: } // end sil function 'testConvertFunction' +sil [ossa] @testConvertFunction : $@convention(thin) (@in_guaranteed T) -> @owned AnyObject { +bb0(%0 : @guaranteed $T): + %2 = metatype $@thick T.Type + %3 = function_ref @closure : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> + %4 = apply %3(%2) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> @owned @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for <τ_0_0, τ_0_0> + %5 = convert_function %4 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> Bool for to $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + %6 = copy_value %5 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + %7 = convert_escape_to_noescape %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for to $@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + %8 = function_ref @takeClosure : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error) + try_apply %8(%7) : $@convention(thin) <τ_0_0> (@noescape @callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for <τ_0_0, τ_0_0>) -> (@owned AnyObject, @error Error), normal bb1, error bb2 + +bb1(%10 : @owned $AnyObject): + destroy_value %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + destroy_value %5 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + return %10 : $AnyObject + +bb2(%14 : @owned $Error): + destroy_value %6 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> (Bool, @error Error) for + unreachable +} + +// ----------------------------------------------------------------------------- +// Test end_apply + +struct Struct { + var st: Int +} + +sil @swift_modifyAtWritableKeyPath : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 +sil @modifyInt : $@convention(thin) (@inout Int) -> () + +// CHECK-TRACE-LABEL: *** CopyPropagation: testBeginApply +// +// CHECK-LABEL: sil hidden [ossa] @testBeginApply : $@convention(thin) (@inout Struct) -> () { +// CHECK: begin_apply +// CHECK-NOT: destroy +// CHECK: apply +// CHECK-NOT: destroy +// CHECK: end_apply +// CHECK-NOT: destroy +// CHECK: destroy_value %{{.*}} : $WritableKeyPath +// CHECK-NOT: destroy +// CHECK-LABEL: } // end sil function 'testBeginApply' +sil hidden [ossa] @testBeginApply : $@convention(thin) (@inout Struct) -> () { +bb0(%0 : $*Struct): + %2 = keypath $WritableKeyPath, (root $Struct; stored_property #Struct.st : $Int) + debug_value %2 : $WritableKeyPath, let, name "kp" + %4 = copy_value %2 : $WritableKeyPath + %5 = function_ref @swift_modifyAtWritableKeyPath : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 + (%6, %7) = begin_apply %5(%0, %4) : $@yield_once @convention(thin) <τ_0_0, τ_0_1> (@inout τ_0_0, @guaranteed WritableKeyPath<τ_0_0, τ_0_1>) -> @yields @inout τ_0_1 + %8 = function_ref @modifyInt : $@convention(thin) (@inout Int) -> () + %9 = apply %8(%6) : $@convention(thin) (@inout Int) -> () + end_apply %7 + destroy_value %4 : $WritableKeyPath + destroy_value %2 : $WritableKeyPath + %13 = tuple () + return %13 : $() +} + +// FIXME: project_box is currently a PointerEscape, so box live ranges are not canonicalized. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testProjectBox +// +// CHECK-LABEL: sil [ossa] @testProjectBox : $@convention(thin) (@owned B) -> @owned B { +// CHECK: copy_value +// CHECK: destroy_value +// CHECK: destroy_value +// CHECK-LABEL: } // end sil function 'testProjectBox' +sil [ossa] @testProjectBox : $@convention(thin) (@owned B) -> @owned B { +bb0(%0 : @owned $B): + %box = alloc_box $<τ_0_0> { var τ_0_0 } + %boxadr = project_box %box : $<τ_0_0> { var τ_0_0 } , 0 + store %0 to [init] %boxadr : $*B + %load = load [copy] %boxadr : $*B + %copy = copy_value %box : $<τ_0_0> { var τ_0_0 } + destroy_value %box : $<τ_0_0> { var τ_0_0 } + destroy_value %copy : $<τ_0_0> { var τ_0_0 } + return %load : $B +} + +// FIXME: mark_dependence is currently a PointerEscape, so dependent live ranges are not canonicalized. +// +// CHECK-TRACE-LABEL: *** CopyPropagation: testMarkDependence +// +// CHECK-LABEL: sil [ossa] @testMarkDependence : $@convention(thin) (@inout Builtin.Int64, @owned B) -> Builtin.Int64 { +// CHECK: copy_value +// CHECK: destroy_value +// CHECK: destroy_value +// CHECK-LABEL: } // end sil function 'testMarkDependence' +sil [ossa] @testMarkDependence : $@convention(thin) (@inout Builtin.Int64, @owned B) -> Builtin.Int64 { +bb0(%0 : $*Builtin.Int64, %1 : @owned $B): + %ptr = mark_dependence %0 : $*Builtin.Int64 on %1 : $B + %val = load [trivial] %ptr : $*Builtin.Int64 + %copy = copy_value %1 : $B + destroy_value %1 : $B + destroy_value %copy : $B + return %val : $Builtin.Int64 +} + +// CHECK-TRACE-LABEL: *** CopyPropagation: testBitwiseEscape +// +// CHECK-LABEL: sil [ossa] @testBitwiseEscape : $@convention(thin) (@guaranteed C) -> Builtin.RawPointer { +// CHECK-NOT: copy_value +// CHECK-NOT: destroy_value +// CHECK-LABEL: } // end sil function 'testBitwiseEscape' +sil [ossa] @testBitwiseEscape : $@convention(thin) (@guaranteed C) -> Builtin.RawPointer { +bb0(%0 : @guaranteed $C): + %raw = ref_to_raw_pointer %0 : $C to $Builtin.RawPointer + %copy = copy_value %0 : $C + destroy_value %copy : $C + return %raw : $Builtin.RawPointer +} + +// CHECK-TRACE-LABEL: *** CopyPropagation: testInteriorPointer +// +// CHECK-LABEL: sil [ossa] @testInteriorPointer : $@convention(thin) (@guaranteed C) -> Int64 { +// CHECK: bb0(%0 : @guaranteed $C): +// CHECK-NEXT: begin_borrow +// CHECK-NEXT: ref_element_addr +// CHECK-NEXT: load +// CHECK-NEXT: end_borrow +// CHECK-NEXT: return +// CHECK-LABEL: } // end sil function 'testInteriorPointer' +sil [ossa] @testInteriorPointer : $@convention(thin) (@guaranteed C) -> Int64 { +bb0(%0 : @guaranteed $C): + %copy1 = copy_value %0 : $C + %borrow = begin_borrow %copy1 : $C + %adr = ref_element_addr %borrow : $C, #C.a + %val = load [trivial] %adr : $*Int64 + %copy2 = copy_value %borrow : $C + end_borrow %borrow : $C + destroy_value %copy1 : $C + destroy_value %copy2 : $C + return %val : $Int64 +} + +// CHECK-TRACE-LABEL: *** CopyPropagation: testExtract +// +// CHECK-LABEL: sil [ossa] @testExtract : $@convention(thin) (@guaranteed NativeObjectPair) -> @owned Builtin.NativeObject { +// CHECK: bb0(%0 : @guaranteed $NativeObjectPair): +// CHECK-NEXT: [[B:%.*]] = begin_borrow %0 : $NativeObjectPair +// CHECK-NEXT: [[E:%.*]] = struct_extract [[B]] : $NativeObjectPair, #NativeObjectPair.obj1 +// CHECK-NEXT: [[C:%.*]] = copy_value [[E]] : $Builtin.NativeObject +// CHECK-NEXT: end_borrow +// CHECK-NEXT: return [[C]] : $Builtin.NativeObject +// CHECK-LABEL: } // end sil function 'testExtract' +sil [ossa] @testExtract : $@convention(thin) (@guaranteed NativeObjectPair) -> @owned Builtin.NativeObject { +bb0(%0 : @guaranteed $NativeObjectPair): + %copy1 = copy_value %0 : $NativeObjectPair + %borrow = begin_borrow %copy1 : $NativeObjectPair + %copy2 = copy_value %borrow : $NativeObjectPair + %val = struct_extract %borrow : $NativeObjectPair, #NativeObjectPair.obj1 + %copy3 = copy_value %val : $Builtin.NativeObject + end_borrow %borrow : $NativeObjectPair + %copy4 = copy_value %copy3 : $Builtin.NativeObject + destroy_value %copy1 : $NativeObjectPair + destroy_value %copy2 : $NativeObjectPair + destroy_value %copy3 : $Builtin.NativeObject + return %copy4 : $Builtin.NativeObject +} + +// ============================================================================= +// Test extending liveness though overlapping access scopes. +// ============================================================================= + +class X {} +class Y {} + +sil [ossa] @getObject : $@convention(thin) () -> @owned AnyObject + +// No overlap (access ignored): +// def +// use +// begin_access +// end_access +// destroy +// +// CHECK-LABEL: sil [ossa] @testNoOverlapInLiveBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK-NOT: copy_value +// CHECK: store [[DEF]] to [init] +// CHECK: begin_access +// CHECK: end_access +// CHECK: bb1: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testNoOverlapInLiveBlock' +sil [ossa] @testNoOverlapInLiveBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + // use + store %def to [init] %adr : $*AnyObject + // end canonical lifetime + %access = begin_access [read] [dynamic] %adr : $*AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + br bb1 + +bb1: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// No overlap (access ignored): +// def +// use +// br... +// bb... +// begin_access +// end_access +// destroy +// +// CHECK-LABEL: sil [ossa] @testNoOverlapInDeadBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK-NOT: copy_value +// CHECK: store [[DEF]] to [init] %{{.*}} : $*AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: begin_access +// CHECK: end_access +// CHECK: br bb2 +// CHECK: bb2: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testNoOverlapInDeadBlock' +sil [ossa] @testNoOverlapInDeadBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + // use + store %def to [init] %adr : $*AnyObject + // end canonical lifetime + br bb1 + +bb1: + %access = begin_access [read] [dynamic] %adr : $*AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + br bb2 + +bb2: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Overlapping (must extend pruned liveness): +// +// %def +// begin_access // access scope unrelated to def +// use %def // pruned liveness ends here +// end_access +// +// CHECK-LABEL: sil [ossa] @testOverlapInLiveBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testOverlapInLiveBlock' +sil [ossa] @testOverlapInLiveBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %access = begin_access [read] [dynamic] %adr : $*AnyObject + // use + store %def to [init] %adr : $*AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + // Branch to avoid reusing the destroy_value + br bb1 + +bb1: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Overlapping (must extend pruned liveness): +// +// %def +// begin_access // access scope unrelated to def +// use %def // pruned liveness ends here +// br... +// bb... +// end_access +// +// CHECK-LABEL: sil [ossa] @testOverlapInDeadBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb2 +// CHECK: bb2: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testOverlapInDeadBlock' +sil [ossa] @testOverlapInDeadBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %access = begin_access [read] [dynamic] %adr : $*AnyObject + // use + store %def to [init] %adr : $*AnyObject + br bb1 + +bb1: + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + br bb2 + +bb2: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Fully Overlapping (must extend pruned liveness): +// +// begin_access // access scope unrelated to def +// %def +// use %def // pruned liveness ends here +// end_access +// +// CHECK-LABEL: sil [ossa] @testFullOverlapInDefBlock : $@convention(thin) () -> () { +// CHECK: begin_access +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testFullOverlapInDefBlock' +sil [ossa] @testFullOverlapInDefBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %access = begin_access [read] [dynamic] %adr : $*AnyObject + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + // use + store %def to [init] %adr : $*AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + // Branch to avoid reusing the destroy_value + br bb1 + +bb1: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Fully Overlapping (must extend pruned liveness): +// +// begin_access // access scope unrelated to def +// br... +// bb... +// %def +// use %def // pruned liveness ends here +// end_access +// +// CHECK-LABEL: sil [ossa] @testFullOverlapBeforeDefBlock : $@convention(thin) () -> () { +// CHECK: begin_access +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb2 +// CHECK: bb2: +// CHECK-NOT: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testFullOverlapBeforeDefBlock' +sil [ossa] @testFullOverlapBeforeDefBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %access = begin_access [read] [dynamic] %adr : $*AnyObject + br bb1 + +bb1: + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + // use + store %def to [init] %adr : $*AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + // Branch to avoid reusing the destroy_value + br bb2 + +bb2: + destroy_value %copy : $AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Original Overlapping (unnecessarilly extends pruned liveness): +// +// TODO: this copy could be avoided but is probably an unusual case, +// and sinking the destroy outside the access scope might help to +// optimize the access itself. +// +// %def +// begin_access // access scope unrelated to def +// use %def // pruned liveness ends here +// destroy %def +// end_access +// +// CHECK-LABEL: sil [ossa] @testOriginalOverlapInLiveBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb1 +// CHECK-LABEL: } // end sil function 'testOriginalOverlapInLiveBlock' +sil [ossa] @testOriginalOverlapInLiveBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %access = begin_access [read] [dynamic] %adr : $*AnyObject + // use + store %def to [init] %adr : $*AnyObject + destroy_value %copy : $AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + br bb1 + +bb1: + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Original Overlapping (unnecessarilly extends pruned liveness): +// +// TODO: this copy could be avoided but is probably an unusual case, +// and sinking the destroy outside the access scope might help to +// optimize the access itself. +// +// %def +// begin_access // access scope unrelated to def +// use %def // pruned liveness ends here +// br bb1 +// bb1: +// destroy %def +// end_access +// +// CHECK-LABEL: sil [ossa] @testOriginalOverlapInDeadBlock : $@convention(thin) () -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [init] %{{.*}} : $*AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: end_access +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK-LABEL: } // end sil function 'testOriginalOverlapInDeadBlock' +sil [ossa] @testOriginalOverlapInDeadBlock : $@convention(thin) () -> () { +bb0: + %box = alloc_box ${ var AnyObject }, var, name "x" + %adr = project_box %box : ${ var AnyObject }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %access = begin_access [read] [dynamic] %adr : $*AnyObject + // use + store %def to [init] %adr : $*AnyObject + br bb1 + +bb1: + destroy_value %copy : $AnyObject + %obj = load [copy] %access : $*AnyObject + end_access %access : $*AnyObject + destroy_value %obj : $AnyObject + destroy_value %box : ${ var AnyObject } + %v = tuple () + return %v : $() +} + +// Interleaved access (requires iterative lifetime extension): +// +// %def +// begin_access X +// use %def // Initial pruned lifetime boundary +// begin_access Y +// end_access X // Lifetime boundary after first extension +// end_access Y // Lifetime boundary after second extension +// destroy %def +// +// CHECK-LABEL: sil [ossa] @testInterleavedAccessScope : $@convention(thin) (@inout AnyObject) -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access {{.*}} : $*X +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [assign] %{{.*}} : $*AnyObject +// CHECK: begin_access {{.*}} : $*Y +// CHECK: end_access {{.*}} : $*X +// CHECK: end_access {{.*}} : $*Y +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK-NOT: destroy_value {{.*}} : $AnyObject +// CHECK-LABEL: } // end sil function 'testInterleavedAccessScope' +sil [ossa] @testInterleavedAccessScope : $@convention(thin) (@inout AnyObject) -> () { +bb0(%0 : $*AnyObject): + %x = alloc_box ${ var X }, var, name "x" + %xadr = project_box %x : ${ var X }, 0 + %y = alloc_box ${ var Y }, var, name "y" + %yadr = project_box %y : ${ var Y }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %accessX = begin_access [read] [dynamic] %xadr : $*X + // use + store %def to [assign] %0 : $*AnyObject + %accessY = begin_access [read] [dynamic] %yadr : $*Y + // accessX overlaps pruned liveness on the first iteration + end_access %accessX : $*X + // accessY only overlaps pruned liveness on the second iteration + end_access %accessY : $*Y + br bb1 + +bb1: + destroy_value %copy : $AnyObject + destroy_value %y : ${ var Y } + destroy_value %x : ${ var X } + %v = tuple () + return %v : $() +} + +// Interleaved non-local access (requires iterative lifetime extension): +// +// %def +// begin_access X +// use %def // Initial pruned lifetime boundary +// br bb1 +// bb1: +// begin_access Y +// br bb2 +// bb2: +// end_access X // Lifetime boundary after first extension +// br bb3 +// bb3: +// end_access Y // Lifetime boundary after second extension +// br bb4 +// bb4: +// destroy %def +// +// CHECK-LABEL: sil [ossa] @testInterleavedNonLocalAccessScope : $@convention(thin) (@inout AnyObject) -> () { +// CHECK: [[DEF:%.*]] = apply %{{.*}}() : $@convention(thin) () -> @owned AnyObject +// CHECK: begin_access {{.*}} : $*X +// CHECK: [[COPY:%.*]] = copy_value [[DEF]] : $AnyObject +// CHECK: store [[COPY]] to [assign] %{{.*}} : $*AnyObject +// CHECK: br bb1 +// CHECK: bb1: +// CHECK: begin_access {{.*}} : $*Y +// CHECK: br bb2 +// CHECK: bb2: +// CHECK: end_access {{.*}} : $*X +// CHECK: br bb3 +// CHECK: bb3: +// CHECK: end_access {{.*}} : $*Y +// CHECK: destroy_value [[DEF]] : $AnyObject +// CHECK: br bb4 +// CHECK: bb4: +// CHECK-NOT: destroy_value {{.*}} : $AnyObject +// CHECK-LABEL: } // end sil function 'testInterleavedNonLocalAccessScope' +sil [ossa] @testInterleavedNonLocalAccessScope : $@convention(thin) (@inout AnyObject) -> () { +bb0(%0 : $*AnyObject): + %x = alloc_box ${ var X }, var, name "x" + %xadr = project_box %x : ${ var X }, 0 + %y = alloc_box ${ var Y }, var, name "y" + %yadr = project_box %y : ${ var Y }, 0 + %f = function_ref @getObject : $@convention(thin) () -> @owned AnyObject + // def + %def = apply %f() : $@convention(thin) () -> @owned AnyObject + %copy = copy_value %def : $AnyObject + %accessX = begin_access [read] [dynamic] %xadr : $*X + // use + store %def to [assign] %0 : $*AnyObject + br bb1 + +bb1: + %accessY = begin_access [read] [dynamic] %yadr : $*Y + br bb2 + +bb2: + // accessX overlaps pruned liveness on the first iteration + end_access %accessX : $*X + br bb3 + +bb3: + // accessY only overlaps pruned liveness on the second iteration + end_access %accessY : $*Y + br bb4 + +bb4: + destroy_value %copy : $AnyObject + destroy_value %y : ${ var Y } + destroy_value %x : ${ var X } + %v = tuple () + return %v : $() +} From 8d5cb4e00b46aa42b352c339bf912d2f6ecda8a7 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Fri, 12 Feb 2021 07:02:58 -0800 Subject: [PATCH 15/15] Remove some logging stuff from copy prop. --- lib/SILOptimizer/Transforms/CopyPropagation.cpp | 7 ------- 1 file changed, 7 deletions(-) diff --git a/lib/SILOptimizer/Transforms/CopyPropagation.cpp b/lib/SILOptimizer/Transforms/CopyPropagation.cpp index 9d4ca62245176..b190a08c238e0 100644 --- a/lib/SILOptimizer/Transforms/CopyPropagation.cpp +++ b/lib/SILOptimizer/Transforms/CopyPropagation.cpp @@ -75,13 +75,6 @@ void CopyPropagation::run() { auto *dominanceAnalysis = getAnalysis(); auto *deBlocksAnalysis = getAnalysis(); - //!!! - if (f->hasName("$ss18_StringBreadcrumbsCyABSScfc")) { - llvm::DebugFlag = true; - llvm::setCurrentDebugType("copy-propagation"); - } - SWIFT_DEFER { llvm::DebugFlag = false; }; - // Debug label for unit testing. LLVM_DEBUG(llvm::dbgs() << "*** CopyPropagation: " << f->getName() << "\n");