diff --git a/lib/SILOptimizer/Mandatory/CMakeLists.txt b/lib/SILOptimizer/Mandatory/CMakeLists.txt index 53f8c0af9ff24..8249f146a2258 100644 --- a/lib/SILOptimizer/Mandatory/CMakeLists.txt +++ b/lib/SILOptimizer/Mandatory/CMakeLists.txt @@ -12,7 +12,6 @@ target_sources(swiftSILOptimizer PRIVATE DiagnoseStaticExclusivity.cpp DiagnoseUnreachable.cpp Differentiation.cpp - GuaranteedARCOpts.cpp IRGenPrepare.cpp MandatoryInlining.cpp PredictableMemOpt.cpp diff --git a/lib/SILOptimizer/Mandatory/GuaranteedARCOpts.cpp b/lib/SILOptimizer/Mandatory/GuaranteedARCOpts.cpp deleted file mode 100644 index 6013dc4801556..0000000000000 --- a/lib/SILOptimizer/Mandatory/GuaranteedARCOpts.cpp +++ /dev/null @@ -1,260 +0,0 @@ -//===--- GuaranteedARCOpts.cpp --------------------------------------------===// -// -// This source file is part of the Swift.org open source project -// -// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors -// Licensed under Apache License v2.0 with Runtime Library Exception -// -// See https://swift.org/LICENSE.txt for license information -// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors -// -//===----------------------------------------------------------------------===// - -#define DEBUG_TYPE "sil-guaranteed-arc-opts" -#include "swift/SILOptimizer/PassManager/Passes.h" -#include "swift/SILOptimizer/PassManager/Transforms.h" -#include "swift/SIL/SILVisitor.h" -#include "llvm/ADT/Statistic.h" - -using namespace swift; - -STATISTIC(NumInstsEliminated, "Number of instructions eliminated"); - -namespace { - -struct GuaranteedARCOptsVisitor - : SILInstructionVisitor { - bool visitSILInstruction(SILInstruction *I) { return false; } - bool visitDestroyAddrInst(DestroyAddrInst *DAI); - bool visitStrongReleaseInst(StrongReleaseInst *SRI); - bool visitDestroyValueInst(DestroyValueInst *DVI); - bool visitReleaseValueInst(ReleaseValueInst *RVI); -}; - -} // end anonymous namespace - -static SILBasicBlock::reverse_iterator -getPrevReverseIterator(SILInstruction *I) { - return std::next(I->getIterator().getReverse()); -} - -bool GuaranteedARCOptsVisitor::visitDestroyAddrInst(DestroyAddrInst *DAI) { - SILValue Operand = DAI->getOperand(); - - for (auto II = getPrevReverseIterator(DAI), IE = DAI->getParent()->rend(); - II != IE;) { - auto *Inst = &*II; - ++II; - - if (auto *CA = dyn_cast(Inst)) { - if (CA->getSrc() == Operand && !CA->isTakeOfSrc()) { - CA->setIsTakeOfSrc(IsTake); - DAI->eraseFromParent(); - NumInstsEliminated += 2; - return true; - } - } - - // destroy_addrs commonly exist in a block of dealloc_stack's, which don't - // affect take-ability. - if (isa(Inst)) - continue; - - // This code doesn't try to prove tricky validity constraints about whether - // it is safe to push the destroy_addr past interesting instructions. - if (Inst->mayHaveSideEffects()) - break; - } - - // If we didn't find a copy_addr to fold this into, emit the destroy_addr. - return false; -} - -static bool couldReduceStrongRefcount(SILInstruction *Inst) { - // Simple memory accesses cannot reduce refcounts. - switch (Inst->getKind()) { -#define UNCHECKED_REF_STORAGE(Name, ...) \ - case SILInstructionKind::Name##RetainValueInst: \ - case SILInstructionKind::StrongCopy##Name##ValueInst: -#define NEVER_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \ - case SILInstructionKind::Store##Name##Inst: -#define ALWAYS_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \ - case SILInstructionKind::Name##RetainInst: \ - case SILInstructionKind::StrongRetain##Name##Inst: \ - case SILInstructionKind::StrongCopy##Name##ValueInst: -#define SOMETIMES_LOADABLE_CHECKED_REF_STORAGE(Name, ...) \ - NEVER_LOADABLE_CHECKED_REF_STORAGE(Name, "...") \ - ALWAYS_LOADABLE_CHECKED_REF_STORAGE(Name, "...") -#include "swift/AST/ReferenceStorage.def" - case SILInstructionKind::LoadInst: - case SILInstructionKind::StoreInst: - case SILInstructionKind::RetainValueInst: - case SILInstructionKind::StrongRetainInst: - case SILInstructionKind::AllocStackInst: - case SILInstructionKind::DeallocStackInst: - case SILInstructionKind::BeginAccessInst: - case SILInstructionKind::EndAccessInst: - case SILInstructionKind::BeginUnpairedAccessInst: - case SILInstructionKind::EndUnpairedAccessInst: - return false; - default: - break; - } - - // Assign and copyaddr of trivial types cannot drop refcounts, and 'inits' - // never can either. Nontrivial ones can though, because the overwritten - // value drops a retain. We would have to do more alias analysis to be able - // to safely ignore one of those. - if (auto *AI = dyn_cast(Inst)) { - SILType StoredType = AI->getOperand(0)->getType(); - if (StoredType.isTrivial(*Inst->getFunction()) || - StoredType.is()) - return false; - } - - if (auto *CAI = dyn_cast(Inst)) { - // Initializations can only increase refcounts. - if (CAI->isInitializationOfDest()) - return false; - - SILType StoredType = CAI->getOperand(0)->getType().getObjectType(); - if (StoredType.isTrivial(*Inst->getFunction()) || - StoredType.is()) - return false; - } - - // This code doesn't try to prove tricky validity constraints about whether - // it is safe to push the release past interesting instructions. - return Inst->mayHaveSideEffects(); -} - -bool GuaranteedARCOptsVisitor::visitStrongReleaseInst(StrongReleaseInst *SRI) { - SILValue Operand = SRI->getOperand(); - // Release on a functionref is a noop. - if (isa(Operand) || isa(Operand) || - isa(Operand)) { - SRI->eraseFromParent(); - ++NumInstsEliminated; - return true; - } - - // Check to see if the instruction immediately before the insertion point is a - // strong_retain of the specified operand. If so, we can zap the pair. - for (auto II = getPrevReverseIterator(SRI), IE = SRI->getParent()->rend(); - II != IE;) { - auto *Inst = &*II; - ++II; - - if (isa(Inst) || isa(Inst)) { - if (Inst->getOperand(0) == Operand) { - Inst->eraseFromParent(); - SRI->eraseFromParent(); - NumInstsEliminated += 2; - return true; - } - // Skip past unrelated retains. - continue; - } - - // Scan past simple instructions that cannot reduce strong refcounts. - if (couldReduceStrongRefcount(Inst)) - break; - } - - // If we didn't find a retain to fold this into, return false. - return false; -} - -bool GuaranteedARCOptsVisitor::visitDestroyValueInst(DestroyValueInst *DVI) { - SILValue Operand = DVI->getOperand(); - for (auto II = getPrevReverseIterator(DVI), IE = DVI->getParent()->rend(); - II != IE;) { - auto *Inst = &*II; - ++II; - - if (auto *CVI = dyn_cast(Inst)) { - if (SILValue(CVI) == Operand || CVI->getOperand() == Operand) { - CVI->replaceAllUsesWith(CVI->getOperand()); - CVI->eraseFromParent(); - DVI->eraseFromParent(); - NumInstsEliminated += 2; - return true; - } - // Skip past unrelated retains. - continue; - } - - // Scan past simple instructions that cannot reduce refcounts. - if (couldReduceStrongRefcount(Inst)) - break; - } - - return false; -} - -bool GuaranteedARCOptsVisitor::visitReleaseValueInst(ReleaseValueInst *RVI) { - SILValue Operand = RVI->getOperand(); - - for (auto II = getPrevReverseIterator(RVI), IE = RVI->getParent()->rend(); - II != IE;) { - auto *Inst = &*II; - ++II; - - if (isa(Inst) || isa(Inst)) { - if (Inst->getOperand(0) == Operand) { - Inst->eraseFromParent(); - RVI->eraseFromParent(); - NumInstsEliminated += 2; - return true; - } - // Skip past unrelated retains. - continue; - } - - // Scan past simple instructions that cannot reduce refcounts. - if (couldReduceStrongRefcount(Inst)) - break; - } - - // If we didn't find a retain to fold this into, emit the release. - return false; -} - -//===----------------------------------------------------------------------===// -// Top Level Entrypoint -//===----------------------------------------------------------------------===// - -namespace { - -// Even though this is a mandatory pass, it is rerun after deserialization in -// case DiagnosticConstantPropagation exposed anything new in this assert -// configuration. -struct GuaranteedARCOpts : SILFunctionTransform { - void run() override { - // Skip ownership SIL. We are going to have a run of semantic arc opts here. - if (getFunction()->hasOwnership()) - return; - - GuaranteedARCOptsVisitor Visitor; - - bool MadeChange = false; - SILFunction *F = getFunction(); - for (auto &BB : *F) { - for (auto II = BB.begin(), IE = BB.end(); II != IE;) { - SILInstruction *I = &*II; - ++II; - MadeChange |= Visitor.visit(I); - } - } - - if (MadeChange) { - invalidateAnalysis(SILAnalysis::InvalidationKind::Instructions); - } - } -}; - -} // end anonymous namespace - -SILTransform *swift::createGuaranteedARCOpts() { - return new GuaranteedARCOpts(); -} diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index d6bb8fba1e34b..8a5aa59e4d963 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -78,8 +78,15 @@ static void addDefiniteInitialization(SILPassPipelinePlan &P) { P.addRawSILInstLowering(); } -static void addMandatoryOptPipeline(SILPassPipelinePlan &P) { - P.startPipeline("Guaranteed Passes"); +// This pipeline defines a set of mandatory diagnostic passes and a set of +// supporting optimization passes that enable those diagnostics. These are run +// before any performance optimizations and in contrast to those optimizations +// _IS_ run when SourceKit emits diagnostics. +// +// Any passes not needed for diagnostic emission that need to run at -Onone +// should be in the -Onone pass pipeline and the prepare optimizations pipeline. +static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { + P.startPipeline("Mandatory Diagnostic Passes + Enabling Optimization Passes"); P.addSILGenCleanup(); P.addDiagnoseInvalidEscapingCaptures(); P.addDiagnoseStaticExclusivity(); @@ -142,7 +149,6 @@ static void addMandatoryOptPipeline(SILPassPipelinePlan &P) { // dead allocations. P.addPredictableDeadAllocationElimination(); - P.addGuaranteedARCOpts(); P.addDiagnoseUnreachable(); P.addDiagnoseInfiniteRecursion(); P.addYieldOnceCheck(); @@ -169,7 +175,7 @@ SILPassPipelinePlan::getDiagnosticPassPipeline(const SILOptions &Options) { } // Otherwise run the rest of diagnostics. - addMandatoryOptPipeline(P); + addMandatoryDiagnosticOptPipeline(P); if (SILViewGuaranteedCFG) { addCFGPrinterPipeline(P, "SIL View Guaranteed CFG"); @@ -738,9 +744,14 @@ SILPassPipelinePlan SILPassPipelinePlan::getOnonePassPipeline(const SILOptions &Options) { SILPassPipelinePlan P(Options); - P.startPipeline("Mandatory Combines"); + // These are optimizations that we do not need to enable diagnostics (or + // depend on other passes needed for diagnostics). Thus we can run them later + // and avoid having SourceKit run these passes when just emitting diagnostics + // in the editor. + P.startPipeline("non-Diagnostic Enabling Mandatory Optimizations"); P.addForEachLoopUnroll(); P.addMandatoryCombine(); + P.addGuaranteedARCOpts(); // First serialize the SIL if we are asked to. P.startPipeline("Serialization"); diff --git a/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp b/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp index f1fb26fe516a7..f9c713fbfd34e 100644 --- a/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp +++ b/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp @@ -872,11 +872,20 @@ struct SemanticARCOptVisitor /// consumed operand. FrozenMultiMap joinedOwnedIntroducerToConsumedOperands; + /// If set to true, then we should only run cheap optimizations that do not + /// build up data structures or analyze code in depth. + /// + /// As an example, we do not do load [copy] optimizations here since they + /// generally involve more complex analysis, but simple peepholes of + /// copy_values we /do/ allow. + bool onlyGuaranteedOpts; + using FrozenMultiMapRange = decltype(joinedOwnedIntroducerToConsumedOperands)::PairToSecondEltRange; - explicit SemanticARCOptVisitor(SILFunction &F) - : F(F), addressToExhaustiveWriteListCache(constructCacheValue) {} + explicit SemanticARCOptVisitor(SILFunction &F, bool onlyGuaranteedOpts) + : F(F), addressToExhaustiveWriteListCache(constructCacheValue), + onlyGuaranteedOpts(onlyGuaranteedOpts) {} DeadEndBlocks &getDeadEndBlocks() { if (!TheDeadEndBlocks) @@ -1432,6 +1441,10 @@ bool SemanticARCOptVisitor::visitBeginBorrowInst(BeginBorrowInst *bbi) { // // TODO: This needs a better name. bool SemanticARCOptVisitor::performGuaranteedCopyValueOptimization(CopyValueInst *cvi) { + // For now, do not run this optimization. This is just to be careful. + if (onlyGuaranteedOpts) + return false; + SmallVector borrowScopeIntroducers; // Find all borrow introducers for our copy operand. If we are unable to find @@ -1609,6 +1622,8 @@ bool SemanticARCOptVisitor::performGuaranteedCopyValueOptimization(CopyValueInst /// If cvi only has destroy value users, then cvi is a dead live range. Lets /// eliminate all such dead live ranges. bool SemanticARCOptVisitor::eliminateDeadLiveRangeCopyValue(CopyValueInst *cvi) { + // This is a cheap optimization generally. + // See if we are lucky and have a simple case. if (auto *op = cvi->getSingleUse()) { if (auto *dvi = dyn_cast(op->getUser())) { @@ -1807,6 +1822,10 @@ bool SemanticARCOptVisitor::tryJoiningCopyValueLiveRangeWithOperand( } // Otherwise, we couldn't handle this case, so return false. + // + // NOTE: We would generally do a more complex analysis here to handle the more + // general case. That would most likely /not/ be a guaranteed optimization + // until we investigate/measure. return false; } @@ -2114,6 +2133,11 @@ bool SemanticARCOptVisitor::isWrittenTo(LoadInst *load, // Convert a load [copy] from unique storage [read] that has all uses that can // accept a guaranteed parameter to a load_borrow. bool SemanticARCOptVisitor::visitLoadInst(LoadInst *li) { + // This optimization can use more complex analysis. We should do some + // experiments before enabling this by default as a guaranteed optimization. + if (onlyGuaranteedOpts) + return false; + if (li->getOwnershipQualifier() != LoadOwnershipQualifier::Copy) return false; @@ -2156,6 +2180,11 @@ namespace { // case DiagnosticConstantPropagation exposed anything new in this assert // configuration. struct SemanticARCOpts : SILFunctionTransform { + bool guaranteedOptsOnly; + + SemanticARCOpts(bool guaranteedOptsOnly) + : guaranteedOptsOnly(guaranteedOptsOnly) {} + void run() override { SILFunction &f = *getFunction(); @@ -2168,7 +2197,7 @@ struct SemanticARCOpts : SILFunctionTransform { "Can not perform semantic arc optimization unless ownership " "verification is enabled"); - SemanticARCOptVisitor visitor(f); + SemanticARCOptVisitor visitor(f, guaranteedOptsOnly); // Add all the results of all instructions that we want to visit to the // worklist. @@ -2194,4 +2223,10 @@ struct SemanticARCOpts : SILFunctionTransform { } // end anonymous namespace -SILTransform *swift::createSemanticARCOpts() { return new SemanticARCOpts(); } +SILTransform *swift::createSemanticARCOpts() { + return new SemanticARCOpts(false /*guaranteed*/); +} + +SILTransform *swift::createGuaranteedARCOpts() { + return new SemanticARCOpts(true /*guaranteed*/); +} diff --git a/test/ClangImporter/serialization-sil.swift b/test/ClangImporter/serialization-sil.swift index dcef22c31aebe..544e1823ab7b2 100644 --- a/test/ClangImporter/serialization-sil.swift +++ b/test/ClangImporter/serialization-sil.swift @@ -33,8 +33,7 @@ public func testPartialApply(_ obj: Test) { // CHECK: [[PROP1_OBJ_COPY:%.*]] = copy_value [[PROP1_OBJ]] // CHECK: [[PROP1_PARTIAL:%.+]] = partial_apply [callee_guaranteed] [[PROP1_METHOD]]([[PROP1_OBJ_COPY]]) : $@convention(objc_method) (@opened([[PROP1_EXISTENTIAL]]) Test) -> @autoreleased AnyObject // CHECK: [[PROP1_PARTIAL_COPY:%.*]] = copy_value [[PROP1_PARTIAL]] - // CHECK: [[PROP1_PARTIAL_COPY_BORROW:%.*]] = begin_borrow [[PROP1_PARTIAL_COPY]] - // CHECK: = apply [[PROP1_PARTIAL_COPY_BORROW]]() : $@callee_guaranteed () -> @owned AnyObject + // CHECK: = apply [[PROP1_PARTIAL_COPY]]() : $@callee_guaranteed () -> @owned AnyObject _ = prop1 } if let prop2 = obj.innerPointerProp { @@ -43,8 +42,7 @@ public func testPartialApply(_ obj: Test) { // CHECK: [[PROP2_TRUE]]([[PROP2_METHOD:%.+]] : $@convention(objc_method) (@opened([[PROP2_EXISTENTIAL]]) Test) -> @unowned_inner_pointer UnsafeMutableRawPointer): // CHECK: [[PROP2_OBJ_COPY:%.*]] = copy_value [[PROP2_OBJ]] // CHECK: [[PROP2_PARTIAL:%.+]] = partial_apply [callee_guaranteed] [[PROP2_METHOD]]([[PROP2_OBJ_COPY]]) : $@convention(objc_method) (@opened([[PROP2_EXISTENTIAL]]) Test) -> @unowned_inner_pointer UnsafeMutableRawPointer - // CHECK: [[PROP2_PARTIAL_BORROW:%.*]] = begin_borrow [[PROP2_PARTIAL]] - // CHECK: = apply [[PROP2_PARTIAL_BORROW]]() : $@callee_guaranteed () -> UnsafeMutableRawPointer + // CHECK: = apply [[PROP2_PARTIAL]]() : $@callee_guaranteed () -> UnsafeMutableRawPointer _ = prop2 } } // CHECK: // end sil function '$s4Test16testPartialApplyyySoAA_pF' diff --git a/test/Constraints/keypath_dynamic_member_lookup.swift b/test/Constraints/keypath_dynamic_member_lookup.swift index 8fb128488caf6..6cb171bf31395 100644 --- a/test/Constraints/keypath_dynamic_member_lookup.swift +++ b/test/Constraints/keypath_dynamic_member_lookup.swift @@ -46,9 +46,9 @@ var lens = Lens(Rectangle(topLeft: topLeft, _ = lens.topLeft.x // CHECK: function_ref @$s29keypath_dynamic_member_lookup4LensV0B6MemberACyqd__Gs15WritableKeyPathCyxqd__G_tcluig -// CHECK-NEXT: apply %69({{.*}}) +// CHECK-NEXT: apply %68({{.*}}) // CHECK: function_ref @$s29keypath_dynamic_member_lookup4LensV0B6MemberACyqd__Gs15WritableKeyPathCyxqd__G_tcluig -// CHECK-NEXT: apply %76({{.*}}) +// CHECK-NEXT: apply %75({{.*}}) _ = lens.topLeft.y lens.topLeft = Lens(Point(x: 1, y: 2)) // Ok diff --git a/test/IRGen/access_markers.sil b/test/IRGen/access_markers.sil index c7d99af93e243..b7ae337aae1a5 100644 --- a/test/IRGen/access_markers.sil +++ b/test/IRGen/access_markers.sil @@ -17,9 +17,12 @@ class A { sil_vtable A {} -// CHECK-LABEL: define {{.*}}void @testPaired( -sil @testPaired : $(@guaranteed A) -> () { -bb0(%0 : $A): +sil @int_user : $@convention(thin) (Int64, Int64) -> () + +// CHECK-LABEL: define {{.*}} @testPaired( +sil [ossa] @testPaired : $(@guaranteed A) -> () { +bb0(%0 : @guaranteed $A): + %func = function_ref @int_user : $@convention(thin) (Int64, Int64) -> () // CHECK: [[SCRATCH1:%.*]] = alloca [[BUFFER:.* x i8.]], align // CHECK: [[SCRATCH2:%.*]] = alloca [[BUFFER]], align @@ -34,7 +37,7 @@ bb0(%0 : $A): // CHECK-NEXT: getelementptr inbounds %Ts5Int64V, %Ts5Int64V* [[PROPERTY]], i32 0, i32 0 // CHECK-NEXT: load i64, i64* - %4 = load %3 : $*Int64 + %4 = load [trivial] %3 : $*Int64 // CHECK-NEXT: call void @swift_endAccess([[BUFFER]]* [[SCRATCH1]]) // CHECK-NEXT: [[T0:%.*]] = bitcast [[BUFFER]]* [[SCRATCH1]] to i8* @@ -49,15 +52,17 @@ bb0(%0 : $A): // CHECK-NEXT: getelementptr inbounds %Ts5Int64V, %Ts5Int64V* [[PROPERTY]], i32 0, i32 0 // CHECK-NEXT: load i64, i64* - %7 = load %6 : $*Int64 + %7 = load [trivial] %6 : $*Int64 // CHECK-NEXT: call void @swift_endAccess([[BUFFER]]* [[SCRATCH2]]) // CHECK-NEXT: [[T0:%.*]] = bitcast [[BUFFER]]* [[SCRATCH2]] to i8* // CHECK-NEXT: call void @llvm.lifetime.end.p0i8(i64 {{.*}}, i8* [[T0]]) end_access %6 : $*Int64 - %20 = tuple () - return %20 : $() + apply %func(%4, %7) : $@convention(thin) (Int64, Int64) -> () + + %9999 = tuple() + return %9999 : $() } // CHECK-LABEL: define {{.*}}void @testUnpaired( diff --git a/test/IRGen/global_resilience.sil b/test/IRGen/global_resilience.sil index 34b9c40b80782..d3622367a23c6 100644 --- a/test/IRGen/global_resilience.sil +++ b/test/IRGen/global_resilience.sil @@ -55,8 +55,8 @@ sil_global hidden @fixedGlobal : $LargeResilientStruct // CHECK: @otherGlobal = {{(dllexport )?}}{{(protected )?}}global [[BUFFER]] zeroinitializer sil_global [let] @otherGlobal : $Size -// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @testSmallGlobal() -sil @testSmallGlobal : $@convention(thin) () -> () { +// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc i32 @testSmallGlobal() +sil @testSmallGlobal : $@convention(thin) () -> Int32 { bb0: // This is just a no-op alloc_global @smallGlobal @@ -67,10 +67,8 @@ bb0: %x_addr = struct_element_addr %addr : $*SmallResilientStruct, #SmallResilientStruct.x %x = load %x_addr : $*Int32 - %tuple = tuple () - - // CHECK: ret void - return %tuple : $() + // CHECK: ret i32 [[VALUE]] + return %x : $Int32 } // CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @testLargeGlobal() @@ -89,21 +87,19 @@ bb0: return %tuple : $() } -// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @testFixedGlobal() -sil @testFixedGlobal : $@convention(thin) () -> () { +// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc i64 @testFixedGlobal() +sil @testFixedGlobal : $@convention(thin) () -> Int64 { bb0: alloc_global @fixedGlobal %addr = global_addr @fixedGlobal : $*LargeResilientStruct - // CHECK: load i64, i64* getelementptr inbounds (%T17global_resilience20LargeResilientStructV, %T17global_resilience20LargeResilientStructV* @fixedGlobal, i32 0, i32 1, i32 0) + // CHECK: [[VALUE:%.*]] = load i64, i64* getelementptr inbounds (%T17global_resilience20LargeResilientStructV, %T17global_resilience20LargeResilientStructV* @fixedGlobal, i32 0, i32 1, i32 0) %x_addr = struct_element_addr %addr : $*LargeResilientStruct, #LargeResilientStruct.x %x = load %x_addr : $*Int64 - %tuple = tuple () - - // CHECK: ret void - return %tuple : $() + // CHECK: ret i64 [[VALUE]] + return %x : $Int64 } // CHECK-LABEL: define {{.*}} @testOtherGlobal diff --git a/test/IRGen/struct_resilience.swift b/test/IRGen/struct_resilience.swift index c47b446e131f0..85013fb1fd517 100644 --- a/test/IRGen/struct_resilience.swift +++ b/test/IRGen/struct_resilience.swift @@ -2,7 +2,7 @@ // RUN: %empty-directory(%t) // RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_struct.swiftmodule -module-name=resilient_struct %S/../Inputs/resilient_struct.swift // RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_enum.swiftmodule -module-name=resilient_enum -I %t %S/../Inputs/resilient_enum.swift -// RUN: %target-swift-frontend -module-name struct_resilience -I %t -emit-ir -enable-library-evolution %s | %FileCheck %s +// RUN: %target-swift-frontend -module-name struct_resilience -Xllvm -sil-disable-pass=GuaranteedARCOpts -I %t -emit-ir -enable-library-evolution %s | %FileCheck %s // RUN: %target-swift-frontend -module-name struct_resilience -I %t -emit-ir -enable-library-evolution -O %s import resilient_struct diff --git a/test/IRGen/unknown_object.sil b/test/IRGen/unknown_object.sil index e5e86bb1b9d7a..af6767a3088fb 100644 --- a/test/IRGen/unknown_object.sil +++ b/test/IRGen/unknown_object.sil @@ -4,6 +4,8 @@ sil_stage canonical import Builtin +sil @anyobject_user : $@convention(thin) (@guaranteed Builtin.AnyObject) -> () + // CHECK-LABEL: @retain_release_unknown_object sil [ossa] @retain_release_unknown_object : $@convention(thin) (@guaranteed Builtin.AnyObject) -> () { entry(%x : @guaranteed $Builtin.AnyObject): @@ -13,6 +15,8 @@ entry(%x : @guaranteed $Builtin.AnyObject): br bb1 bb1: + %func = function_ref @anyobject_user : $@convention(thin) (@guaranteed Builtin.AnyObject) -> () + apply %func(%y) : $@convention(thin) (@guaranteed Builtin.AnyObject) -> () // CHECK-native: swift_release // CHECK-objc: swift_unknownObjectRelease destroy_value %y : $Builtin.AnyObject diff --git a/test/PrintAsObjC/extensions.swift b/test/PrintAsObjC/extensions.swift index 17c164389f81c..dbc4b4bcddc1a 100644 --- a/test/PrintAsObjC/extensions.swift +++ b/test/PrintAsObjC/extensions.swift @@ -1,8 +1,8 @@ // Please keep this file in alphabetical order! // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend(mock-sdk: %clang-importer-sdk) -disable-sil-ownership-verifier -emit-module -o %t %s -disable-objc-attr-requires-foundation-module -// RUN: %target-swift-frontend(mock-sdk: %clang-importer-sdk) -disable-sil-ownership-verifier -parse-as-library %t/extensions.swiftmodule -typecheck -emit-objc-header-path %t/extensions.h -import-objc-header %S/../Inputs/empty.h -disable-objc-attr-requires-foundation-module +// RUN: %target-swift-frontend(mock-sdk: %clang-importer-sdk) -disable-sil-ownership-verifier -emit-module -o %t %s -disable-objc-attr-requires-foundation-module -Xllvm -sil-disable-pass=GuaranteedARCOpts +// RUN: %target-swift-frontend(mock-sdk: %clang-importer-sdk) -disable-sil-ownership-verifier -parse-as-library %t/extensions.swiftmodule -typecheck -emit-objc-header-path %t/extensions.h -import-objc-header %S/../Inputs/empty.h -disable-objc-attr-requires-foundation-module -Xllvm -sil-disable-pass=GuaranteedARCOpts // RUN: %FileCheck %s < %t/extensions.h // RUN: %FileCheck --check-prefix=NEGATIVE %s < %t/extensions.h // RUN: %check-in-clang %t/extensions.h diff --git a/test/SILGen/reabstract.swift b/test/SILGen/reabstract.swift index 2ea92c02697c4..d96db14d1c5ca 100644 --- a/test/SILGen/reabstract.swift +++ b/test/SILGen/reabstract.swift @@ -34,13 +34,11 @@ func test0() { // MANDATORY: reabstract.liftOptional // MANDATORY-NEXT: [[T1:%.*]] = function_ref @$s10reabstract12liftOptional{{[_0-9a-zA-Z]*}}F // MANDATORY-NEXT: [[T2:%.*]] = thin_to_thick_function [[T1]] -// MANDATORY-NEXT: strong_retain [[T2]] // MANDATORY-NEXT: [[CVT:%.*]] = convert_escape_to_noescape [[T2]] // MANDATORY-NEXT: //{{.*}}reabstraction thunk // MANDATORY-NEXT: [[T3:%.*]] = function_ref [[THUNK:@.*]] : // MANDATORY-NEXT: [[T4:%.*]] = partial_apply [callee_guaranteed] [on_stack] [[T3]]([[CVT]]) // MANDATORY-NEXT: [[T5:%.*]] = convert_function [[T4]] -// MANDATORY-NEXT: strong_release [[T2]] // MANDATORY-NEXT: // function_ref // MANDATORY-NEXT: [[T0:%.*]] = function_ref @$s10reabstract6takeFn{{[_0-9a-zA-Z]*}}F // MANDATORY-NEXT: apply [[T0]]([[T5]]) diff --git a/test/SILOptimizer/closure_lifetime_fixup_objc.swift b/test/SILOptimizer/closure_lifetime_fixup_objc.swift index 757d4b401136a..9167fa17f6b3c 100644 --- a/test/SILOptimizer/closure_lifetime_fixup_objc.swift +++ b/test/SILOptimizer/closure_lifetime_fixup_objc.swift @@ -14,9 +14,6 @@ public protocol DangerousEscaper { // CHECK: bb0([[ARG:%.*]] : $@callee_guaranteed () -> (), [[SELF:%.*]] : $DangerousEscaper): // CHECK: [[OE:%.*]] = open_existential_ref [[SELF]] -// Copy (1). -// CHECK: strong_retain [[ARG]] : $@callee_guaranteed () -> () - // Extend the lifetime to the end of this function (2). // CHECK: strong_retain [[ARG]] : $@callee_guaranteed () -> () @@ -42,8 +39,6 @@ public protocol DangerousEscaper { // CHECK: destroy_addr [[CLOSURE_ADDR]] : $*@callee_guaranteed () -> () // CHECK: dealloc_stack [[BLOCK_STORAGE]] : $*@block_storage @callee_guaranteed () -> () -// Release of closure copy (1). -// CHECK: strong_release %0 : $@callee_guaranteed () -> () // CHECK: [[METH:%.*]] = objc_method [[OE]] : $@opened("{{.*}}") DangerousEscaper, #DangerousEscaper.malicious!foreign : (Self) -> (() -> ()) -> (), $@convention(objc_method) <τ_0_0 where τ_0_0 : DangerousEscaper> (@convention(block) @noescape () -> (), τ_0_0) -> () // CHECK: apply [[METH]]<@opened("{{.*}}") DangerousEscaper>([[BLOCK_COPY]], [[OE]]) : $@convention(objc_method) <τ_0_0 where τ_0_0 : DangerousEscaper> (@convention(block) @noescape () -> (), τ_0_0) -> () diff --git a/test/SILOptimizer/guaranteed_arc_opts_qualified.sil b/test/SILOptimizer/guaranteed_arc_opts_qualified.sil deleted file mode 100644 index f328cb8bb26ff..0000000000000 --- a/test/SILOptimizer/guaranteed_arc_opts_qualified.sil +++ /dev/null @@ -1,184 +0,0 @@ -// RUN: %target-sil-opt -guaranteed-arc-opts %s | %FileCheck %s - -sil_stage raw - -import Builtin - -sil @kraken : $@convention(thin) () -> () - -// CHECK-LABEL: sil @retainvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject, [[ARG2:%.*]] : $Builtin.NativeObject): -// CHECK-NOT: retain_value [[ARG1]] -// CHECK: retain_value [[ARG2]] -// CHECK-NOT: release_value [[ARG1]] -sil @retainvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject): - retain_value %0 : $Builtin.NativeObject - retain_value %1 : $Builtin.NativeObject - release_value %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @retainvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () { -// CHECK-NOT: retain_value -// CHECK-NOT: release_value -sil @retainvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.Int32): - retain_value %0 : $Builtin.NativeObject - %2 = integer_literal $Builtin.Int32, 0 - store %2 to %1 : $*Builtin.Int32 - release_value %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @retainvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () { -// CHECK: retain_value -// CHECK: release_value -sil @retainvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - retain_value %0 : $Builtin.NativeObject - %1 = function_ref @kraken : $@convention(thin) () -> () - apply %1() : $@convention(thin) () -> () - release_value %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @retainvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () { -// CHECK: release_value -sil @retainvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - release_value %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @strongretain_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject, [[ARG2:%.*]] : $Builtin.NativeObject): -// CHECK-NOT: strong_retain [[ARG1]] -// CHECK: strong_retain [[ARG2]] -// CHECK-NOT: strong_release [[ARG1]] -sil @strongretain_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject): - strong_retain %0 : $Builtin.NativeObject - strong_retain %1 : $Builtin.NativeObject - strong_release %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @strongretain_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () { -// CHECK-NOT: strong_retain -// CHECK-NOT: strong_release -sil @strongretain_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.Int32): - strong_retain %0 : $Builtin.NativeObject - %2 = integer_literal $Builtin.Int32, 0 - store %2 to %1 : $*Builtin.Int32 - strong_release %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @strongretain_test3 : $@convention(thin) (Builtin.NativeObject) -> () { -// CHECK: strong_retain -// CHECK: strong_release -sil @strongretain_test3 : $@convention(thin) (Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - strong_retain %0 : $Builtin.NativeObject - %1 = function_ref @kraken : $@convention(thin) () -> () - apply %1() : $@convention(thin) () -> () - strong_release %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @strongretain_test4 : $@convention(thin) (Builtin.NativeObject) -> () { -// CHECK: strong_release -sil @strongretain_test4 : $@convention(thin) (Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject): - strong_release %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @copyaddr_test1 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject { -// CHECK: bb0([[ARG1:%.*]] : $*Builtin.NativeObject, [[ARG2:%.*]] : $*Builtin.NativeObject): -// CHECK: copy_addr [take] {{%.*}} to {{%.*}} -// CHECK-NOT: destroy_addr [[ARG1]] -sil @copyaddr_test1 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject { -bb0(%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject): - copy_addr %1 to %0 : $*Builtin.NativeObject - destroy_addr %1 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @copyaddr_test2 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject { -// CHECK: copy_addr -// CHECK: destroy_addr -sil @copyaddr_test2 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject { -bb0(%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject): - copy_addr %1 to %0 : $*Builtin.NativeObject - %2 = function_ref @kraken : $@convention(thin) () -> () - apply %2() : $@convention(thin) () -> () - destroy_addr %1 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @copyaddr_test3 : $@convention(thin) (@in Builtin.NativeObject) -> () { -// CHECK: destroy_addr -sil @copyaddr_test3 : $@convention(thin) (@in Builtin.NativeObject) -> () { -bb0(%0 : $*Builtin.NativeObject): - destroy_addr %0 : $*Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -class C { - var val: Builtin.Int32 - init(i: Builtin.Int32) - deinit -} - -// CHECK-LABEL: sil @access_test : $@convention(thin) (@owned C, Builtin.Int32) -> () -// CHECK-NOT: strong_retain -// CHECK-NOT: strong_release -sil @access_test : $@convention(thin) (@owned C, Builtin.Int32) -> () { -bb0(%0 : $C, %1 : $Builtin.Int32): - strong_retain %0 : $C - %valadr = ref_element_addr %0 : $C, #C.val - %access = begin_access [modify] [dynamic] %valadr : $*Builtin.Int32 - store %1 to %access : $*Builtin.Int32 - end_access %access : $*Builtin.Int32 - strong_release %0 : $C - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @mixed_test_1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -// CHECK-NOT: retain -// CHECK-NOT: release -// CHECK: } // end sil function 'mixed_test_1' -sil @mixed_test_1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject): - strong_retain %0 : $Builtin.NativeObject - release_value %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} - -// CHECK-LABEL: sil @mixed_test_2 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -// CHECK-NOT: retain -// CHECK-NOT: release -// CHECK: } // end sil function 'mixed_test_2' -sil @mixed_test_2 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () { -bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject): - retain_value %0 : $Builtin.NativeObject - strong_release %0 : $Builtin.NativeObject - %9999 = tuple() - return %9999 : $() -} diff --git a/test/SILOptimizer/opaque_values_mandatory.sil b/test/SILOptimizer/opaque_values_mandatory.sil index 2d85de76d09df..7d87029f154e2 100644 --- a/test/SILOptimizer/opaque_values_mandatory.sil +++ b/test/SILOptimizer/opaque_values_mandatory.sil @@ -1,4 +1,6 @@ -// RUN: %target-sil-opt -diagnostics -enable-sil-opaque-values -emit-sorted-sil %s | %FileCheck %s +// RUN: %target-sil-opt -diagnostics -enable-sil-opaque-values %s | \ +// RUN: %target-sil-opt -Onone-performance -enable-sil-opaque-values -emit-sorted-sil | \ +// RUN: %FileCheck %s import Builtin @@ -6,8 +8,6 @@ sil_stage raw typealias Int = Builtin.Int64 -// Test trivial, guaranteed copyforwarding. -// ---- // CHECK-LABEL: sil hidden @f010_diagnose_identity : $@convention(thin) (@in T) -> @out T { // CHECK: bb0(%0 : $T): // CHECK: return %0 : $T @@ -65,8 +65,10 @@ bb0(%0 : $Int): // CHECK: bb0(%0 : $T): // CHECK: %1 = copy_value %0 : $T // CHECK: %2 = copy_value %0 : $T -// CHECK: %3 = tuple (%1 : $T, %2 : $T, %0 : $T) -// CHECK: return %3 : $(T, T, T) +// CHECK: %3 = copy_value %0 : $T +// CHECK: destroy_value %0 +// CHECK: %5 = tuple (%1 : $T, %2 : $T, %3 : $T) +// CHECK: return %5 : $(T, T, T) // CHECK-LABEL: } // end sil function 'f040_multiResult' sil hidden [noinline] @f040_multiResult : $@convention(thin) (@in T) -> (@out T, @out T, @out T) { bb0(%0 : $T): diff --git a/test/SILOptimizer/pass_printer.swift b/test/SILOptimizer/pass_printer.swift index fdb0b39774cd2..b525c2c498d05 100644 --- a/test/SILOptimizer/pass_printer.swift +++ b/test/SILOptimizer/pass_printer.swift @@ -2,7 +2,7 @@ // RUN: %target-swift-frontend -Xllvm -sil-print-after=definite-init -emit-sil %s -o /dev/null 2>&1 | %FileCheck --check-prefix=AFTER %s // RUN: %target-swift-frontend -Xllvm -sil-disable-pass=definite-init -Xllvm -sil-print-pass-name -emit-sil %s -o /dev/null 2>&1 | %FileCheck --check-prefix=DISABLE %s -// BEFORE: SIL function before #{{[0-9]+}}, stage Guaranteed Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) -// AFTER: SIL function after #{{[0-9]+}}, stage Guaranteed Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) -// DISABLE: (Disabled) #{{[0-9]+}}, stage Guaranteed Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) +// BEFORE: SIL function before #{{[0-9]+}}, stage Mandatory Diagnostic Passes + Enabling Optimization Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) +// AFTER: SIL function after #{{[0-9]+}}, stage Mandatory Diagnostic Passes + Enabling Optimization Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) +// DISABLE: (Disabled) #{{[0-9]+}}, stage Mandatory Diagnostic Passes + Enabling Optimization Passes, pass {{[0-9]+}}: DefiniteInitialization (definite-init) func foo() {} diff --git a/test/sil-passpipeline-dump/basic.test-sh b/test/sil-passpipeline-dump/basic.test-sh index e435b2a63194d..2458889e1a491 100644 --- a/test/sil-passpipeline-dump/basic.test-sh +++ b/test/sil-passpipeline-dump/basic.test-sh @@ -1,8 +1,8 @@ // RUN: %sil-passpipeline-dumper -Onone | %FileCheck %s // CHECK: --- -// CHECK: name: Mandatory Combines -// CHECK: passes: [ "for-each-loop-unroll", "mandatory-combine" ] +// CHECK: name: non-Diagnostic Enabling Mandatory Optimizations +// CHECK: passes: [ "for-each-loop-unroll", "mandatory-combine", "guaranteed-arc-opts" ] // CHECK: --- // CHECK: name: Serialization // CHECK: passes: [ "serialize-sil", "ownership-model-eliminator" ] diff --git a/tools/sil-opt/SILOpt.cpp b/tools/sil-opt/SILOpt.cpp index edee557560ee7..cdbba938fe322 100644 --- a/tools/sil-opt/SILOpt.cpp +++ b/tools/sil-opt/SILOpt.cpp @@ -48,7 +48,13 @@ namespace cl = llvm::cl; namespace { -enum class OptGroup { Unknown, Diagnostics, Performance, Lowering }; +enum class OptGroup { + Unknown, + Diagnostics, + OnonePerformance, + Performance, + Lowering +}; } // end anonymous namespace @@ -147,6 +153,8 @@ static llvm::cl::opt OptimizationGroup( clEnumValN(OptGroup::Diagnostics, "diagnostics", "Run diagnostic passes"), clEnumValN(OptGroup::Performance, "O", "Run performance passes"), + clEnumValN(OptGroup::OnonePerformance, "Onone-performance", + "Run Onone perf passes"), clEnumValN(OptGroup::Lowering, "lowering", "Run lowering passes")), llvm::cl::init(OptGroup::Unknown)); @@ -489,18 +497,27 @@ int main(int argc, char **argv) { SILMod->installSILRemarkStreamer(); } - if (OptimizationGroup == OptGroup::Diagnostics) { + switch (OptimizationGroup) { + case OptGroup::Diagnostics: runSILDiagnosticPasses(*SILMod.get()); - } else if (OptimizationGroup == OptGroup::Performance) { + break; + case OptGroup::Performance: runSILOptimizationPasses(*SILMod.get()); - } else if (OptimizationGroup == OptGroup::Lowering) { + break; + case OptGroup::Lowering: runSILLoweringPasses(*SILMod.get()); - } else { + break; + case OptGroup::OnonePerformance: + runSILPassesForOnone(*SILMod.get()); + break; + case OptGroup::Unknown: { auto T = irgen::createIRGenModule( SILMod.get(), Invocation.getOutputFilenameForAtMostOnePrimary(), Invocation.getMainInputFilenameForDebugInfoForAtMostOnePrimary(), ""); runCommandLineSelectedPasses(SILMod.get(), T.second); irgen::deleteIRGenModule(T); + break; + } } if (EmitSIB) {