diff --git a/include/swift/Runtime/HeapObject.h b/include/swift/Runtime/HeapObject.h index e688f0576e6fa..d5f06e24384fb 100644 --- a/include/swift/Runtime/HeapObject.h +++ b/include/swift/Runtime/HeapObject.h @@ -1086,4 +1086,53 @@ swift_getTypeName(const Metadata *type, bool qualified); } // end namespace swift +#if SWIFT_OBJC_INTEROP +/// Standard ObjC lifecycle methods for Swift objects +#define STANDARD_OBJC_METHOD_IMPLS_FOR_SWIFT_OBJECTS \ +- (id)retain { \ + auto SELF = reinterpret_cast(self); \ + swift_retain(SELF); \ + return self; \ +} \ +- (void)release { \ + auto SELF = reinterpret_cast(self); \ + swift_release(SELF); \ +} \ +- (id)autorelease { \ + return _objc_rootAutorelease(self); \ +} \ +- (NSUInteger)retainCount { \ + return swift::swift_retainCount(reinterpret_cast(self)); \ +} \ +- (BOOL)_isDeallocating { \ + return swift_isDeallocating(reinterpret_cast(self)); \ +} \ +- (BOOL)_tryRetain { \ + return swift_tryRetain(reinterpret_cast(self)) != nullptr; \ +} \ +- (BOOL)allowsWeakReference { \ + return !swift_isDeallocating(reinterpret_cast(self)); \ +} \ +- (BOOL)retainWeakReference { \ + return swift_tryRetain(reinterpret_cast(self)) != nullptr; \ +} \ +- (void)_setWeaklyReferenced { \ + auto heapObj = reinterpret_cast(self); \ + heapObj->refCounts.setPureSwiftDeallocation(false); \ +} \ +- (bool)_setAssociatedObject:(id)obj \ + forKey:(const void *)key \ + associationPolicy:(objc_AssociationPolicy)policy { \ + auto heapObj = reinterpret_cast(self); \ + heapObj->refCounts.setPureSwiftDeallocation(false); \ + /* false to let libobjc know it still needs to associate the object */ \ + return false; \ +} \ +- (void)dealloc { \ + swift_rootObjCDealloc(reinterpret_cast(self)); \ +} + +#endif // SWIFT_OBJC_INTEROP + + #endif // SWIFT_RUNTIME_ALLOC_H diff --git a/stdlib/public/SwiftShims/RefCount.h b/stdlib/public/SwiftShims/RefCount.h index 6e0886a432659..1de032c56f49b 100644 --- a/stdlib/public/SwiftShims/RefCount.h +++ b/stdlib/public/SwiftShims/RefCount.h @@ -238,14 +238,29 @@ struct RefCountBitOffsets; // 32-bit out of line template <> struct RefCountBitOffsets<8> { - static const size_t IsImmortalShift = 0; - static const size_t IsImmortalBitCount = 1; - static const uint64_t IsImmortalMask = maskForField(IsImmortal); - - static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal); + /* + The bottom 32 bits (on 64 bit architectures, fewer on 32 bit) of the refcount + field are effectively a union of two different configurations: + + ---Normal case--- + Bit 0: Does this object need to call out to the ObjC runtime for deallocation + Bits 1-31: Unowned refcount + + ---Immortal case--- + All bits set, the object does not deallocate or have a refcount + */ + static const size_t PureSwiftDeallocShift = 0; + static const size_t PureSwiftDeallocBitCount = 1; + static const uint64_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc); + + static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc); static const size_t UnownedRefCountBitCount = 31; static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount); + static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount + static const size_t IsImmortalBitCount = 32; + static const uint64_t IsImmortalMask = maskForField(IsImmortal); + static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount); static const size_t IsDeinitingBitCount = 1; static const uint64_t IsDeinitingMask = maskForField(IsDeiniting); @@ -271,14 +286,18 @@ struct RefCountBitOffsets<8> { // 32-bit inline template <> struct RefCountBitOffsets<4> { - static const size_t IsImmortalShift = 0; - static const size_t IsImmortalBitCount = 1; - static const uint64_t IsImmortalMask = maskForField(IsImmortal); + static const size_t PureSwiftDeallocShift = 0; + static const size_t PureSwiftDeallocBitCount = 1; + static const uint32_t PureSwiftDeallocMask = maskForField(PureSwiftDealloc); - static const size_t UnownedRefCountShift = shiftAfterField(IsImmortal); + static const size_t UnownedRefCountShift = shiftAfterField(PureSwiftDealloc); static const size_t UnownedRefCountBitCount = 7; static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount); + static const size_t IsImmortalShift = 0; // overlaps PureSwiftDealloc and UnownedRefCount + static const size_t IsImmortalBitCount = 8; + static const uint32_t IsImmortalMask = maskForField(IsImmortal); + static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount); static const size_t IsDeinitingBitCount = 1; static const uint32_t IsDeinitingMask = maskForField(IsDeiniting); @@ -369,16 +388,39 @@ class RefCountBitsT { enum Immortal_t { Immortal }; LLVM_ATTRIBUTE_ALWAYS_INLINE - bool isImmortal() const { - return bool(getField(IsImmortal)); + bool isImmortal(bool checkSlowRCBit) const { + if (checkSlowRCBit) { + return (getField(IsImmortal) == Offsets::IsImmortalMask) && + bool(getField(UseSlowRC)); + } else { + return (getField(IsImmortal) == Offsets::IsImmortalMask); + } + } + + LLVM_ATTRIBUTE_ALWAYS_INLINE + bool isOverflowingUnownedRefCount(uint32_t oldValue, uint32_t inc) const { + auto newValue = getUnownedRefCount(); + return newValue != oldValue + inc || + newValue == Offsets::UnownedRefCountMask; } LLVM_ATTRIBUTE_ALWAYS_INLINE void setIsImmortal(bool value) { - setField(IsImmortal, value); + assert(value); + setField(IsImmortal, Offsets::IsImmortalMask); setField(UseSlowRC, value); } + LLVM_ATTRIBUTE_ALWAYS_INLINE + bool pureSwiftDeallocation() const { + return bool(getField(PureSwiftDealloc)) && !bool(getField(UseSlowRC)); + } + + LLVM_ATTRIBUTE_ALWAYS_INLINE + void setPureSwiftDeallocation(bool value) { + setField(PureSwiftDealloc, value); + } + LLVM_ATTRIBUTE_ALWAYS_INLINE RefCountBitsT() = default; @@ -386,16 +428,16 @@ class RefCountBitsT { constexpr RefCountBitsT(uint32_t strongExtraCount, uint32_t unownedCount) : bits((BitsType(strongExtraCount) << Offsets::StrongExtraRefCountShift) | + (BitsType(1) << Offsets::PureSwiftDeallocShift) | (BitsType(unownedCount) << Offsets::UnownedRefCountShift)) { } LLVM_ATTRIBUTE_ALWAYS_INLINE constexpr RefCountBitsT(Immortal_t immortal) - : bits((BitsType(2) << Offsets::StrongExtraRefCountShift) | - (BitsType(2) << Offsets::UnownedRefCountShift) | - (BitsType(1) << Offsets::IsImmortalShift) | - (BitsType(1) << Offsets::UseSlowRCShift)) + : bits((BitsType(2) << Offsets::StrongExtraRefCountShift) | + (BitsType(Offsets::IsImmortalMask)) | + (BitsType(1) << Offsets::UseSlowRCShift)) { } LLVM_ATTRIBUTE_ALWAYS_INLINE @@ -433,7 +475,7 @@ class RefCountBitsT { LLVM_ATTRIBUTE_ALWAYS_INLINE bool hasSideTable() const { - bool hasSide = getUseSlowRC() && !isImmortal(); + bool hasSide = getUseSlowRC() && !isImmortal(false); // Side table refcount must not point to another side table. assert((refcountIsInline || !hasSide) && @@ -523,7 +565,7 @@ class RefCountBitsT { LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE bool decrementStrongExtraRefCount(uint32_t dec) { #ifndef NDEBUG - if (!hasSideTable() && !isImmortal()) { + if (!hasSideTable() && !isImmortal(false)) { // Can't check these assertions with side table present. if (getIsDeiniting()) @@ -558,7 +600,7 @@ class RefCountBitsT { static_assert(Offsets::UnownedRefCountBitCount + Offsets::IsDeinitingBitCount + Offsets::StrongExtraRefCountBitCount + - Offsets::IsImmortalBitCount + + Offsets::PureSwiftDeallocBitCount + Offsets::UseSlowRCBitCount == sizeof(bits)*8, "inspect isUniquelyReferenced after adding fields"); @@ -715,7 +757,7 @@ class RefCounts { void setIsImmortal(bool immortal) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(true)) { return; } RefCountBits newbits; @@ -725,7 +767,28 @@ class RefCounts { } while (!refCounts.compare_exchange_weak(oldbits, newbits, std::memory_order_relaxed)); } - + + void setPureSwiftDeallocation(bool nonobjc) { + auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); + //Immortal and no objc complications share a bit, so don't let setting + //the complications one clear the immmortal one + if (oldbits.isImmortal(true) || oldbits.pureSwiftDeallocation() == nonobjc){ + assert(!oldbits.hasSideTable()); + return; + } + RefCountBits newbits; + do { + newbits = oldbits; + newbits.setPureSwiftDeallocation(nonobjc); + } while (!refCounts.compare_exchange_weak(oldbits, newbits, + std::memory_order_relaxed)); + } + + bool getPureSwiftDeallocation() { + auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); + return bits.pureSwiftDeallocation(); + } + // Initialize from another refcount bits. // Only inline -> out-of-line is allowed (used for new side table entries). void init(InlineRefCountBits newBits) { @@ -740,7 +803,7 @@ class RefCounts { newbits = oldbits; bool fast = newbits.incrementStrongExtraRefCount(inc); if (SWIFT_UNLIKELY(!fast)) { - if (oldbits.isImmortal()) + if (oldbits.isImmortal(false)) return; return incrementSlow(oldbits, inc); } @@ -753,7 +816,7 @@ class RefCounts { auto newbits = oldbits; bool fast = newbits.incrementStrongExtraRefCount(inc); if (SWIFT_UNLIKELY(!fast)) { - if (oldbits.isImmortal()) + if (oldbits.isImmortal(false)) return; return incrementNonAtomicSlow(oldbits, inc); } @@ -771,7 +834,7 @@ class RefCounts { newbits = oldbits; bool fast = newbits.incrementStrongExtraRefCount(1); if (SWIFT_UNLIKELY(!fast)) { - if (oldbits.isImmortal()) + if (oldbits.isImmortal(false)) return true; return tryIncrementSlow(oldbits); } @@ -788,7 +851,7 @@ class RefCounts { auto newbits = oldbits; bool fast = newbits.incrementStrongExtraRefCount(1); if (SWIFT_UNLIKELY(!fast)) { - if (oldbits.isImmortal()) + if (oldbits.isImmortal(false)) return true; return tryIncrementNonAtomicSlow(oldbits); } @@ -824,7 +887,7 @@ class RefCounts { // Precondition: the reference count must be 1 void decrementFromOneNonAtomic() { auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (bits.isImmortal()) { + if (bits.isImmortal(true)) { return; } if (bits.hasSideTable()) @@ -922,7 +985,7 @@ class RefCounts { // Decrement completed normally. New refcount is not zero. deinitNow = false; } - else if (oldbits.isImmortal()) { + else if (oldbits.isImmortal(false)) { return false; } else if (oldbits.hasSideTable()) { // Decrement failed because we're on some other slow path. @@ -961,7 +1024,7 @@ class RefCounts { // Decrement completed normally. New refcount is not zero. deinitNow = false; } - else if (oldbits.isImmortal()) { + else if (oldbits.isImmortal(false)) { return false; } else if (oldbits.hasSideTable()) { @@ -1001,7 +1064,7 @@ class RefCounts { bool fast = newbits.decrementStrongExtraRefCount(dec); if (SWIFT_UNLIKELY(!fast)) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return false; } // Slow paths include side table; deinit; underflow @@ -1025,7 +1088,7 @@ class RefCounts { // Increment the unowned reference count. void incrementUnowned(uint32_t inc) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (oldbits.isImmortal()) + if (oldbits.isImmortal(true)) return; RefCountBits newbits; do { @@ -1037,7 +1100,7 @@ class RefCounts { uint32_t oldValue = newbits.incrementUnownedRefCount(inc); // Check overflow and use the side table on overflow. - if (newbits.getUnownedRefCount() != oldValue + inc) + if (newbits.isOverflowingUnownedRefCount(oldValue, inc)) return incrementUnownedSlow(inc); } while (!refCounts.compare_exchange_weak(oldbits, newbits, @@ -1046,7 +1109,7 @@ class RefCounts { void incrementUnownedNonAtomic(uint32_t inc) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (oldbits.isImmortal()) + if (oldbits.isImmortal(true)) return; if (oldbits.hasSideTable()) return oldbits.getSideTable()->incrementUnownedNonAtomic(inc); @@ -1056,7 +1119,7 @@ class RefCounts { uint32_t oldValue = newbits.incrementUnownedRefCount(inc); // Check overflow and use the side table on overflow. - if (newbits.getUnownedRefCount() != oldValue + inc) + if (newbits.isOverflowingUnownedRefCount(oldValue, inc)) return incrementUnownedSlow(inc); refCounts.store(newbits, std::memory_order_relaxed); @@ -1066,7 +1129,7 @@ class RefCounts { // Return true if the caller should free the object. bool decrementUnownedShouldFree(uint32_t dec) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (oldbits.isImmortal()) + if (oldbits.isImmortal(true)) return false; RefCountBits newbits; @@ -1094,7 +1157,7 @@ class RefCounts { bool decrementUnownedShouldFreeNonAtomic(uint32_t dec) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - if (oldbits.isImmortal()) + if (oldbits.isImmortal(true)) return false; if (oldbits.hasSideTable()) return oldbits.getSideTable()->decrementUnownedShouldFreeNonAtomic(dec); @@ -1383,7 +1446,7 @@ inline bool RefCounts::doDecrementNonAtomic(uint32_t dec) { auto newbits = oldbits; bool fast = newbits.decrementStrongExtraRefCount(dec); if (!fast) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return false; } return doDecrementNonAtomicSlow(oldbits, dec); diff --git a/stdlib/public/runtime/HeapObject.cpp b/stdlib/public/runtime/HeapObject.cpp index 712c0afca7fc7..aa306722f3209 100644 --- a/stdlib/public/runtime/HeapObject.cpp +++ b/stdlib/public/runtime/HeapObject.cpp @@ -42,6 +42,8 @@ # include # include # include "swift/Runtime/ObjCBridge.h" +# include "swift/Runtime/Once.h" +# include #endif #include "Leaks.h" @@ -84,7 +86,6 @@ static inline bool isValidPointerForNativeRetain(const void *p) { return _ ## name ## _ args; \ } while(0) - static HeapObject *_swift_allocObject_(HeapMetadata const *metadata, size_t requiredSize, size_t requiredAlignmentMask) { @@ -609,13 +610,25 @@ void swift::swift_rootObjCDealloc(HeapObject *self) { } #endif +#if SWIFT_OBJC_INTEROP +static bool _check_fast_dealloc() { + //This will always be in libobjc, so RTLD_DEFAULT won't have to do an + //expensive search in practice + return dlsym(RTLD_DEFAULT, "_objc_has_weak_formation_callout") != nullptr; +} +#endif + void swift::swift_deallocClassInstance(HeapObject *object, size_t allocatedSize, size_t allocatedAlignMask) { + #if SWIFT_OBJC_INTEROP // We need to let the ObjC runtime clean up any associated objects or weak // references associated with this object. - objc_destructInstance((id)object); + const bool fastDeallocSupported = SWIFT_LAZY_CONSTANT(_check_fast_dealloc()); + if (!fastDeallocSupported || !object->refCounts.getPureSwiftDeallocation()) { + objc_destructInstance((id)object); + } #endif swift_deallocObject(object, allocatedSize, allocatedAlignMask); } diff --git a/stdlib/public/runtime/RefCount.cpp b/stdlib/public/runtime/RefCount.cpp index 4b3ca72a4e2fe..4dc7394f92540 100644 --- a/stdlib/public/runtime/RefCount.cpp +++ b/stdlib/public/runtime/RefCount.cpp @@ -17,7 +17,7 @@ namespace swift { template void RefCounts::incrementSlow(RefCountBits oldbits, uint32_t n) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return; } else if (oldbits.hasSideTable()) { @@ -36,7 +36,7 @@ template void RefCounts::incrementSlow(SideTableRefCountB template void RefCounts::incrementNonAtomicSlow(RefCountBits oldbits, uint32_t n) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return; } else if (oldbits.hasSideTable()) { @@ -52,7 +52,7 @@ template void RefCounts::incrementNonAtomicSlow(SideTable template bool RefCounts::tryIncrementSlow(RefCountBits oldbits) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return true; } else if (oldbits.hasSideTable()) @@ -65,7 +65,7 @@ template bool RefCounts::tryIncrementSlow(SideTableRefCou template bool RefCounts::tryIncrementNonAtomicSlow(RefCountBits oldbits) { - if (oldbits.isImmortal()) { + if (oldbits.isImmortal(false)) { return true; } else if (oldbits.hasSideTable()) diff --git a/stdlib/public/runtime/SwiftObject.mm b/stdlib/public/runtime/SwiftObject.mm index 6a2393e033e5e..3fe43c5e75598 100644 --- a/stdlib/public/runtime/SwiftObject.mm +++ b/stdlib/public/runtime/SwiftObject.mm @@ -264,33 +264,7 @@ - (void)doesNotRecognizeSelector: (SEL) sel { class_getName(cls), sel_getName(sel)); } -- (id)retain { - auto SELF = reinterpret_cast(self); - swift_retain(SELF); - return self; -} -- (void)release { - auto SELF = reinterpret_cast(self); - swift_release(SELF); -} -- (id)autorelease { - return _objc_rootAutorelease(self); -} -- (NSUInteger)retainCount { - return swift::swift_retainCount(reinterpret_cast(self)); -} -- (BOOL)_isDeallocating { - return swift_isDeallocating(reinterpret_cast(self)); -} -- (BOOL)_tryRetain { - return swift_tryRetain(reinterpret_cast(self)) != nullptr; -} -- (BOOL)allowsWeakReference { - return !swift_isDeallocating(reinterpret_cast(self)); -} -- (BOOL)retainWeakReference { - return swift_tryRetain(reinterpret_cast(self)) != nullptr; -} +STANDARD_OBJC_METHOD_IMPLS_FOR_SWIFT_OBJECTS // Retaining the class object itself is a no-op. + (id)retain { @@ -318,10 +292,6 @@ + (BOOL)retainWeakReference { return YES; } -- (void)dealloc { - swift_rootObjCDealloc(reinterpret_cast(self)); -} - - (BOOL)isKindOfClass:(Class)someClass { for (auto cls = _swift_getClassOfAllocated(self); cls != nullptr; cls = cls->Superclass) diff --git a/stdlib/public/stubs/SwiftNativeNSXXXBase.mm.gyb b/stdlib/public/stubs/SwiftNativeNSXXXBase.mm.gyb index 9016bb1c97d2b..89e389f8ba8bc 100644 --- a/stdlib/public/stubs/SwiftNativeNSXXXBase.mm.gyb +++ b/stdlib/public/stubs/SwiftNativeNSXXXBase.mm.gyb @@ -65,36 +65,6 @@ SWIFT_RUNTIME_STDLIB_API - (id)initWithCoder: (NSCoder *)coder { return [super init]; } -- (id)retain { - auto SELF = reinterpret_cast(self); - swift_retain(SELF); - return self; -} -- (oneway void)release { - auto SELF = reinterpret_cast(self); - swift_release(SELF); -} -- (id)autorelease { - return _objc_rootAutorelease(self); -} -- (NSUInteger)retainCount { - auto SELF = reinterpret_cast(self); - return swift_retainCount(SELF); -} - -- (BOOL)_tryRetain { - auto SELF = reinterpret_cast(self); - return (bool)swift_tryRetain(SELF); -} -- (BOOL)_isDeallocating { - return swift_isDeallocating(reinterpret_cast(self)); -} -- (BOOL)allowsWeakReference { - return !swift_isDeallocating(reinterpret_cast(self)); -} -- (BOOL)retainWeakReference { - return swift_tryRetain(reinterpret_cast(self)) != nullptr; -} + (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key { return NO; @@ -102,9 +72,9 @@ SWIFT_RUNTIME_STDLIB_API #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wobjc-missing-super-calls" -- (void)dealloc { - swift_rootObjCDealloc(reinterpret_cast(self)); -} + +STANDARD_OBJC_METHOD_IMPLS_FOR_SWIFT_OBJECTS + #pragma clang diagnostic pop @end diff --git a/test/Interpreter/SDK/Inputs/ObjCWeak/ObjCWeak.m b/test/Interpreter/SDK/Inputs/ObjCWeak/ObjCWeak.m index adcae83569c05..54294ad0a4281 100644 --- a/test/Interpreter/SDK/Inputs/ObjCWeak/ObjCWeak.m +++ b/test/Interpreter/SDK/Inputs/ObjCWeak/ObjCWeak.m @@ -1,16 +1,21 @@ #include "ObjCWeak.h" +#include + +extern id _Nullable +objc_initWeak(id _Nullable * _Nonnull location, id _Nullable val); void tryWeakReferencing(id (^makeThing)(void)) { id thingy; @autoreleasepool { - thingy = makeThing(); + thingy = [makeThing() retain]; } - - __weak id weakThingy = thingy; - + + id weakThingy = nil; + objc_initWeak(&weakThingy, thingy); + @autoreleasepool { fputs("before giving up strong reference:\n", stderr); - id x = weakThingy; + id x = objc_loadWeak(&weakThingy); if (x) { fputs([[x description] UTF8String], stderr); fputs("\n", stderr); @@ -18,12 +23,35 @@ void tryWeakReferencing(id (^makeThing)(void)) { fputs("Gone\n", stderr); } } - + + [thingy release]; thingy = nil; + for (int i = 0; i < 100; i++) { + @autoreleasepool { + id tmp = makeThing(); + id weakTmp = nil; + objc_initWeak(&weakTmp, tmp); + objc_loadWeak(&weakTmp); + objc_storeWeak(&weakTmp, nil); + } + } + @autoreleasepool { fputs("after giving up strong reference:\n", stderr); - id x = weakThingy; + id x = objc_loadWeak(&weakThingy); + if (x) { + fputs([[x description] UTF8String], stderr); + fputs("\n", stderr); + } else { + fputs("Gone\n", stderr); + } + } + objc_storeWeak(&weakThingy, nil); + + @autoreleasepool { + fputs("after giving up weak reference:\n", stderr); + id x = objc_loadWeak(&weakThingy); if (x) { fputs([[x description] UTF8String], stderr); fputs("\n", stderr); diff --git a/test/Interpreter/SDK/weak_objc_interop.swift b/test/Interpreter/SDK/weak_objc_interop.swift index cc85d7ac15f9d..cc567473f2339 100644 --- a/test/Interpreter/SDK/weak_objc_interop.swift +++ b/test/Interpreter/SDK/weak_objc_interop.swift @@ -1,7 +1,7 @@ // RUN: %empty-directory(%t) // // RUN: cp %s %t/main.swift -// RUN: %target-clang -fobjc-arc %S/Inputs/ObjCWeak/ObjCWeak.m -c -o %t/ObjCWeak.o +// RUN: %target-clang -fno-objc-arc %S/Inputs/ObjCWeak/ObjCWeak.m -c -o %t/ObjCWeak.o // RUN: %target-build-swift %t/main.swift -I %S/Inputs/ObjCWeak/ -Xlinker %t/ObjCWeak.o -o %t/weak_objc_interop -Xfrontend -disable-access-control // RUN: %target-codesign %t/weak_objc_interop // RUN: %target-run %t/weak_objc_interop 2>&1 | %FileCheck %s diff --git a/unittests/runtime/LongTests/LongRefcounting.cpp b/unittests/runtime/LongTests/LongRefcounting.cpp index 3b83f10c77e24..d3cd449e91bff 100644 --- a/unittests/runtime/LongTests/LongRefcounting.cpp +++ b/unittests/runtime/LongTests/LongRefcounting.cpp @@ -239,8 +239,8 @@ static void unownedReleaseALot(TestObject *object, uint64_t count) { } } -// Maximum legal unowned retain count. 31 bits with no implicit +1. -const uint64_t maxURC = (1ULL << (32 - 1)) - 1; +// Maximum legal unowned retain count. 31 bits minus one with no implicit +1. +const uint64_t maxURC = (1ULL << (32 - 1)) - 2; TEST(LongRefcountingTest, unowned_retain_max) { // Don't generate millions of failures if something goes wrong. @@ -282,7 +282,7 @@ TEST(LongRefcountingTest, unowned_retain_overflow_DeathTest) { auto object = allocTestObject(&deinited, 1); // URC is 1. Retain to maxURC, then retain again and verify overflow error. - unownedRetainALot(object, maxURC - 1); + unownedRetainALot(object, maxURC); EXPECT_EQ(0u, deinited); EXPECT_ALLOCATED(object); ASSERT_DEATH(swift_unownedRetain(object), @@ -329,7 +329,7 @@ TEST(LongRefcountingTest, nonatomic_unowned_retain_overflow_DeathTest) { auto object = allocTestObject(&deinited, 1); // URC is 1. Retain to maxURC, then retain again and verify overflow error. - unownedRetainALot(object, maxURC - 1); + unownedRetainALot(object, maxURC); EXPECT_EQ(0u, deinited); EXPECT_ALLOCATED(object); ASSERT_DEATH(swift_nonatomic_unownedRetain(object),