From 4733da2744e4a92827c5ca8e9ec60abe7bb571a1 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Wed, 13 Dec 2023 14:10:07 -0800 Subject: [PATCH 01/10] Performance improvements for reading keypaths --- stdlib/public/core/KeyPath.swift | 492 +++++++++++++++++++++---------- 1 file changed, 330 insertions(+), 162 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index f1ffe8bd28d2d..64beee58db22d 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -68,52 +68,51 @@ public class AnyKeyPath: _AppendKeyPath { TODO: Find a better method of refactoring this variable if possible. */ - func assignOffsetToStorage(offset: Int) { + final func assignOffsetToStorage(offset: Int) { let maximumOffsetOn32BitArchitecture = 4094 guard offset >= 0 else { return } - // TODO: This just gets the architecture size (32 or 64 bits). - // Is there a more efficient way? Something in Builtin maybe? - let architectureSize = MemoryLayout.size - if architectureSize == 8 { - _kvcKeyPathStringPtr = UnsafePointer(bitPattern: -offset - 1) - } - else { - if offset <= maximumOffsetOn32BitArchitecture { - _kvcKeyPathStringPtr = UnsafePointer(bitPattern: (offset + 1)) - } - else { - _kvcKeyPathStringPtr = nil - } + +#if _pointerBitWidth(_64) + _kvcKeyPathStringPtr = UnsafePointer(bitPattern: -offset - 1) +#elseif _pointerBitWidth(_32) + if offset <= maximumOffsetOn32BitArchitecture { + _kvcKeyPathStringPtr = UnsafePointer(bitPattern: (offset + 1)) + } else { + _kvcKeyPathStringPtr = nil } +#else +#error("Unsupported platform") +#endif } - func getOffsetFromStorage() -> Int? { + final func getOffsetFromStorage() -> Int? { let maximumOffsetOn32BitArchitecture = 4094 guard _kvcKeyPathStringPtr != nil else { return nil } - let architectureSize = MemoryLayout.size - if architectureSize == 8 { - let offset = -Int(bitPattern: _kvcKeyPathStringPtr) - 1 - guard offset >= 0 else { - // This happens to be an actual _kvcKeyPathStringPtr, not an offset, if we get here. - return nil - } - return offset - } - else { - let offset = Int(bitPattern: _kvcKeyPathStringPtr) - 1 - // Pointers above 0x7fffffff will come in as negative numbers which are - // less than maximumOffsetOn32BitArchitecture, be sure to reject them. - if (offset >= 0 && offset <= maximumOffsetOn32BitArchitecture) { - return offset - } +#if _pointerBitWidth(_64) + let offset = (0 &- Int(bitPattern: _kvcKeyPathStringPtr)) &- 1 + guard _fastPath(offset >= 0) else { + // This happens to be an actual _kvcKeyPathStringPtr, not an offset, if + // we get here. return nil } + return offset +#elseif _pointerBitWidth(_32) + let offset = Int(bitPattern: _kvcKeyPathStringPtr) &- 1 + // Pointers above 0x7fffffff will come in as negative numbers which are + // less than maximumOffsetOn32BitArchitecture, be sure to reject them. + if offset >= 0, offset <= maximumOffsetOn32BitArchitecture { + return offset + } + return nil +#else +#error("Unsupported platform") +#endif } // SPI for the Foundation overlay to allow interop with KVC keypath-based @@ -330,63 +329,106 @@ public class KeyPath: PartialKeyPath { @usableFromInline @_unavailableInEmbedded internal final func _projectReadOnly(from root: Root) -> Value { - + let (rootType, valueType) = Self._rootAndValueType + // One performance improvement is to skip right to Value // if this keypath traverses through structs only. if let offset = getOffsetFromStorage() { - return withUnsafeBytes(of: root) { - let pointer = $0.baseAddress.unsafelyUnwrapped.advanced(by: offset) + return _withUnprotectedUnsafeBytes(of: root) { + let pointer = $0.baseAddress._unsafelyUnwrappedUnchecked + offset return pointer.assumingMemoryBound(to: Value.self).pointee } } - - // TODO: For perf, we could use a local growable buffer instead of Any - var curBase: Any = root + + var currentType = rootType + return withBuffer { var buffer = $0 - if buffer.data.isEmpty { - return unsafeBitCast(root, to: Value.self) + + if _slowPath(buffer.data.isEmpty) { + return Builtin.reinterpretCast(root) } - while true { - let (rawComponent, optNextType) = buffer.next() - let valueType = optNextType ?? Value.self - let isLast = optNextType == nil - - func project(_ base: CurValue) -> Value? { - func project2(_: NewValue.Type) -> Value? { - switch rawComponent._projectReadOnly(base, - to: NewValue.self, endingWith: Value.self) { - case .continue(let newBase): + + let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked + let endOfBuffer = bufferPtr + buffer.data.count + let maxSize = Int(truncatingIfNeeded: + endOfBuffer.load(as: UInt32.self) + ) + let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) + + // 16 is the max alignment allowed on practically every platform we deploy + // to. + return _withUnprotectedUnsafeTemporaryAllocation( + byteCount: roundedMaxSize, + alignment: 16 + ) { + let currentValueBuffer = $0 + + currentValueBuffer.withMemoryRebound(to: Root.self) { + $0.initializeElement(at: 0, to: root) + } + + while true { + let (rawComponent, optNextType) = buffer.next() + let newType = optNextType ?? valueType + let isLast = optNextType == nil + + func projectCurrent(_: Current.Type) -> Value { + func projectNew(_: New) -> Value { + var isBreak = false + + let newBase = currentValueBuffer.withMemoryRebound( + to: Current.self + ) { + return rawComponent._projectReadOnly( + $0[0], + to: New.self, + endingWith: Value.self, + &isBreak + ) + } + + // If we've broken from the projection, it means we found nil + // while optional chaining. + guard _fastPath(!isBreak) else { + var value: Value = Builtin.zeroInitializer() + + // Optional.none has a tag of 1 + let tag: UInt32 = 1 + Builtin.injectEnumTag(&value, tag._value) + + return value + } + if isLast { - _internalInvariant(NewValue.self == Value.self, - "key path does not terminate in correct type") - return unsafeBitCast(newBase, to: Value.self) - } else { - curBase = newBase - return nil + _internalInvariant( + New.self == Value.self, + "key path does not terminate in correct type" + ) + + return Builtin.reinterpretCast(newBase) as Value } - case .break(let result): - return result - } - } - return _openExistential(valueType, do: project2) - } + currentValueBuffer.withMemoryRebound(to: Current.self) { + $0.deinitializeElement(at: 0) + } + + currentValueBuffer.withMemoryRebound(to: New.self) { + $0.initializeElement(at: 0, to: newBase) + } - let result = _openExistential(curBase, do: project) + currentType = newType + return Builtin.reinterpretCast(newBase) as Value + } - if let result = result { - return result - } + return _openExistential(newType, do: projectNew(_:)) + } + + let result = _openExistential(currentType, do: projectCurrent(_:)) - // Note: This should never be taken. The only time this will occur is if - // the API keypath is referencing has a nullability violation. - // In certain cases, `Value` has the same layout as `Value?` and the - // "nullptr" representation of `Value` is represented as `nil` for - // `Value?`. If we're returning a `Value`, but manage to get its `nil` - // representation, then the above check will fail. - if _slowPath(isLast) { - _preconditionFailure("Could not resolve KeyPath") + if isLast { + return result + } } } } @@ -486,23 +528,78 @@ public class ReferenceWritableKeyPath< var keepAlive: AnyObject? let address: UnsafeMutablePointer = withBuffer { var buffer = $0 + // Project out the reference prefix. - var base: Any = origBase - while buffer.hasReferencePrefix { - let (rawComponent, optNextType) = buffer.next() - _internalInvariant(optNextType != nil, + + let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked + let endOfBuffer = bufferPtr + buffer.data.count + let maxSize = Int(truncatingIfNeeded: + endOfBuffer.load(as: UInt32.self) + ) + let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) + + // 16 is the max alignment allowed on practically every platform we deploy + // to. + let base: Any = _withUnprotectedUnsafeTemporaryAllocation( + byteCount: roundedMaxSize, + alignment: 16 + ) { + var currentType: Any.Type = Root.self + let currentValueBuffer = $0 + + currentValueBuffer.withMemoryRebound(to: Root.self) { + $0.initializeElement(at: 0, to: origBase) + } + + while buffer.hasReferencePrefix { + let (rawComponent, optNextType) = buffer.next() + _internalInvariant(optNextType != nil, "reference prefix should not go to end of buffer") - let nextType = optNextType.unsafelyUnwrapped - - func project(_: NewValue.Type) -> Any { - func project2(_ base: CurValue) -> Any { - return rawComponent._projectReadOnly( - base, to: NewValue.self, endingWith: Value.self) - .assumingContinue + let nextType = optNextType._unsafelyUnwrappedUnchecked + + func projectNew(_: New.Type) { + func projectCurrent(_: Current.Type) { + var isBreak = false + + let newBase = currentValueBuffer.withMemoryRebound( + to: Current.self + ) { + return rawComponent._projectReadOnly( + $0[0], + to: New.self, + endingWith: Value.self, + &isBreak + ) + } + + guard _fastPath(!isBreak) else { + _internalInvariantFailure("should not have stopped key path projection") + } + + currentValueBuffer.withMemoryRebound(to: Current.self) { + $0.deinitializeElement(at: 0) + } + + currentValueBuffer.withMemoryRebound(to: New.self) { + $0.initializeElement(at: 0, to: newBase) + } + + currentType = nextType + } + + _openExistential(currentType, do: projectCurrent(_:)) + } + + _openExistential(nextType, do: projectNew(_:)) + } + + func projectCurrent(_: Current.Type) -> Any { + return currentValueBuffer.withMemoryRebound(to: Current.self) { + $0[0] } - return _openExistential(base, do: project2) } - base = _openExistential(nextType, do: project) + + return _openExistential(currentType, do: projectCurrent(_:)) } // Start formal access to the mutable value, based on the final base @@ -535,7 +632,7 @@ public class ReferenceWritableKeyPath< return UnsafeMutablePointer(mutating: typedPointer) } } - return _openExistential(base, do: formalMutation) + return _openExistential(base, do: formalMutation(_:)) } return (address, keepAlive) @@ -1044,10 +1141,10 @@ internal struct RawKeyPathComponent { internal var discriminator: UInt32 { get { - return (_value & Header.discriminatorMask) >> Header.discriminatorShift + return (_value & Header.discriminatorMask) &>> Header.discriminatorShift } set { - let shifted = newValue << Header.discriminatorShift + let shifted = newValue &<< Header.discriminatorShift _internalInvariant(shifted & Header.discriminatorMask == shifted, "discriminator doesn't fit") _value = _value & ~Header.discriminatorMask | shifted @@ -1276,7 +1373,7 @@ internal struct RawKeyPathComponent { // The component header is 4 bytes, but may be followed by an aligned // pointer field for some kinds of component, forcing padding. internal static var pointerAlignmentSkew: Int { - return MemoryLayout.size - MemoryLayout.size + return MemoryLayout.size &- MemoryLayout.size } internal var isTrivialPropertyDescriptor: Bool { @@ -1313,20 +1410,20 @@ internal struct RawKeyPathComponent { // The body holds a pointer to the external property descriptor, // and some number of substitution arguments, the count of which is // in the payload. - return 4 * (1 + Int(payload)) + return 4 &* (1 &+ Int(payload)) case .computed: // The body holds at minimum the id and getter. var size = 8 // If settable, it also holds the setter. if isComputedSettable { - size += 4 + size &+= 4 } // If there are arguments, there's also a layout function, // witness table, and initializer function. // Property descriptors never carry argument information, though. if !forPropertyDescriptor && hasComputedArguments { - size += 12 + size &+= 12 } return size @@ -1422,19 +1519,19 @@ internal struct RawKeyPathComponent { return 0 case .computed: // align to pointer, minimum two pointers for id and get - var total = Header.pointerAlignmentSkew + ptrSize * 2 + var total = Header.pointerAlignmentSkew &+ ptrSize &* 2 // additional word for a setter if header.isComputedSettable { - total += ptrSize + total &+= ptrSize } // include the argument size if header.hasComputedArguments { // two words for argument header: size, witnesses - total += ptrSize * 2 + total &+= ptrSize &* 2 // size of argument area - total += _computedArgumentSize + total &+= _computedArgumentSize if header.isComputedInstantiatedFromExternalWithArguments { - total += Header.externalWithArgumentsExtraSize + total &+= Header.externalWithArgumentsExtraSize } } return total @@ -1450,9 +1547,9 @@ internal struct RawKeyPathComponent { // Offset overflowed into body _internalInvariant(body.count >= MemoryLayout.size, "component not big enough") - return Int(body.load(as: UInt32.self)) + return Int(truncatingIfNeeded: body.load(as: UInt32.self)) } - return Int(header.storedOffsetPayload) + return Int(truncatingIfNeeded: header.storedOffsetPayload) } internal var _computedIDValue: Int { @@ -1477,16 +1574,16 @@ internal struct RawKeyPathComponent { return ComputedAccessorsPtr( header: header, - value: body.baseAddress.unsafelyUnwrapped + + value: body.baseAddress._unsafelyUnwrappedUnchecked + Header.pointerAlignmentSkew + MemoryLayout.size) } internal var _computedArgumentHeaderPointer: UnsafeRawPointer { _internalInvariant(header.hasComputedArguments, "no arguments") - return body.baseAddress.unsafelyUnwrapped + return body.baseAddress._unsafelyUnwrappedUnchecked + Header.pointerAlignmentSkew - + MemoryLayout.size * + + MemoryLayout.size &* (header.isComputedSettable ? 3 : 2) } @@ -1501,7 +1598,7 @@ internal struct RawKeyPathComponent { } internal var _computedArguments: UnsafeRawPointer { - var base = _computedArgumentHeaderPointer + MemoryLayout.size * 2 + var base = _computedArgumentHeaderPointer + MemoryLayout.size &* 2 // If the component was instantiated from an external property descriptor // with its own arguments, we include some additional capture info to // be able to map to the original argument context by adjusting the size @@ -1517,7 +1614,7 @@ internal struct RawKeyPathComponent { internal var _computedArgumentWitnessSizeAdjustment: Int { if header.isComputedInstantiatedFromExternalWithArguments { return _computedArguments.load( - fromByteOffset: -Header.externalWithArgumentsExtraSize, + fromByteOffset: 0 &- Header.externalWithArgumentsExtraSize, as: Int.self) } return 0 @@ -1586,7 +1683,7 @@ internal struct RawKeyPathComponent { if header.hasComputedArguments, let destructor = _computedArgumentWitnesses.destroy { destructor(_computedMutableArguments, - _computedArgumentSize - _computedArgumentWitnessSizeAdjustment) + _computedArgumentSize &- _computedArgumentWitnessSizeAdjustment) } case .external: _internalInvariantFailure("should have been instantiated away") @@ -1705,22 +1802,25 @@ internal struct RawKeyPathComponent { internal func _projectReadOnly( _ base: CurValue, to: NewValue.Type, - endingWith: LeafValue.Type - ) -> ProjectionResult { + endingWith: LeafValue.Type, + _ isBreak: inout Bool + ) -> NewValue { switch value { case .struct(let offset): - var base2 = base - return .continue(withUnsafeBytes(of: &base2) { - let p = $0.baseAddress.unsafelyUnwrapped.advanced(by: offset) + let newValue = _withUnprotectedUnsafeBytes(of: base) { + let p = $0.baseAddress._unsafelyUnwrappedUnchecked + offset + // The contents of the struct should be well-typed, so we can assume // typed memory here. return p.assumingMemoryBound(to: NewValue.self).pointee - }) + } + + return newValue case .class(let offset): _internalInvariant(CurValue.self is AnyObject.Type, "base is not a class") - let baseObj = unsafeBitCast(base, to: AnyObject.self) + let baseObj: AnyObject = Builtin.reinterpretCast(base) let basePtr = UnsafeRawPointer(Builtin.bridgeToRawPointer(baseObj)) defer { _fixLifetime(baseObj) } @@ -1731,40 +1831,65 @@ internal struct RawKeyPathComponent { // 'modify' access. Builtin.performInstantaneousReadAccess(offsetAddress._rawValue, NewValue.self) - return .continue(offsetAddress - .assumingMemoryBound(to: NewValue.self) - .pointee) + return offsetAddress.assumingMemoryBound(to: NewValue.self).pointee case .get(id: _, accessors: let accessors, argument: let argument), .mutatingGetSet(id: _, accessors: let accessors, argument: let argument), .nonmutatingGetSet(id: _, accessors: let accessors, argument: let argument): - return .continue(accessors.getter()(base, - argument?.data.baseAddress ?? accessors._value, - argument?.data.count ?? 0)) + let getter: ComputedAccessorsPtr.Getter = accessors.getter() + + let newValue = getter( + base, + argument?.data.baseAddress ?? accessors._value, + argument?.data.count ?? 0 + ) + + return newValue case .optionalChain: _internalInvariant(CurValue.self == Optional.self, "should be unwrapping optional value") _internalInvariant(_isOptional(LeafValue.self), "leaf result should be optional") - if let baseValue = unsafeBitCast(base, to: Optional.self) { - return .continue(baseValue) - } else { - // TODO: A more efficient way of getting the `none` representation - // of a dynamically-optional type... - return .break((Optional<()>.none as Any) as! LeafValue) + + // Optional's tags are some = 0, none = 1 + let tag = UInt32(Builtin.getEnumTag(base)) + + if _fastPath(tag == 0) { + // Optional "shares" a layout with its Wrapped type meaning we can + // reinterpret the base address as an address to its Wrapped value. + return Builtin.reinterpretCast(base) } + // We found nil. + isBreak = true + + // Return some zeroed out value for NewValue if we break. The caller will + // handle returning nil. We do this to prevent allocating metadata in this + // function because returning something like 'NewValue?' would need to + // allocate the optional metadata for 'NewValue'. + return Builtin.zeroInitializer() + case .optionalForce: _internalInvariant(CurValue.self == Optional.self, "should be unwrapping optional value") - return .continue(unsafeBitCast(base, to: Optional.self)!) + + // Optional's tags are some = 0, none = 1 + let tag = UInt32(Builtin.getEnumTag(base)) + + if _fastPath(tag == 0) { + // Optional "shares" a layout with its Wrapped type meaning we can + // reinterpret the base address as an address to its Wrapped value. + return Builtin.reinterpretCast(base) + } + + _internalInvariantFailure("unwrapped nil optional") case .optionalWrap: _internalInvariant(NewValue.self == Optional.self, "should be wrapping optional value") - return .continue( - unsafeBitCast(base as Optional, to: NewValue.self)) + + return Builtin.reinterpretCast(base) } } @@ -1858,7 +1983,7 @@ internal struct RawKeyPathComponent { internal func _pop(from: inout UnsafeRawBufferPointer, as type: T.Type) -> T { let buffer = _pop(from: &from, as: type, count: 1) - return buffer.baseAddress.unsafelyUnwrapped.pointee + return buffer.baseAddress._unsafelyUnwrappedUnchecked.pointee } internal func _pop(from: inout UnsafeRawBufferPointer, as: T.Type, @@ -1866,11 +1991,11 @@ internal func _pop(from: inout UnsafeRawBufferPointer, from = MemoryLayout._roundingUpBaseToAlignment(from) let byteCount = MemoryLayout.stride * count let result = UnsafeBufferPointer( - start: from.baseAddress.unsafelyUnwrapped.assumingMemoryBound(to: T.self), + start: from.baseAddress._unsafelyUnwrappedUnchecked.assumingMemoryBound(to: T.self), count: count) from = UnsafeRawBufferPointer( - start: from.baseAddress.unsafelyUnwrapped + byteCount, + start: from.baseAddress._unsafelyUnwrappedUnchecked + byteCount, count: from.count - byteCount) return result } @@ -1909,7 +2034,7 @@ internal struct KeyPathBuffer { } internal mutating func pushRaw(size: Int, alignment: Int) -> UnsafeMutableRawBufferPointer { - var baseAddress = buffer.baseAddress.unsafelyUnwrapped + var baseAddress = buffer.baseAddress._unsafelyUnwrappedUnchecked var misalign = Int(bitPattern: baseAddress) & (alignment - 1) if misalign != 0 { misalign = alignment - misalign @@ -2506,7 +2631,7 @@ internal func _appendingKeyPaths< // Remember where the tail-allocated KVC string buffer begins. if appendedKVCLength > 0 { - kvcStringBuffer = destBuffer.baseAddress.unsafelyUnwrapped + kvcStringBuffer = destBuffer.baseAddress._unsafelyUnwrappedUnchecked .advanced(by: resultSize) destBuffer = .init(start: destBuffer.baseAddress, @@ -2577,8 +2702,8 @@ internal func _appendingKeyPaths< if root.getOffsetFromStorage() == nil, leaf.getOffsetFromStorage() == nil { if let kvcStringBuffer = kvcStringBuffer { - let rootPtr = root._kvcKeyPathStringPtr.unsafelyUnwrapped - let leafPtr = leaf._kvcKeyPathStringPtr.unsafelyUnwrapped + let rootPtr = root._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked + let leafPtr = leaf._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked _memcpy( dest: kvcStringBuffer, src: rootPtr, @@ -2684,19 +2809,22 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, // Instantiate a new key path object modeled on the pattern. // Do a pass to determine the class of the key path we'll be instantiating // and how much space we'll need for it. - let (keyPathClass, rootType, size, _) + let (keyPathClass, rootType, size, sizeWithMaxSize, _) = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) var pureStructOffset: UInt32? = nil // Allocate the instance. - let instance = keyPathClass._create(capacityInBytes: size) { instanceData in + let instance = keyPathClass._create( + capacityInBytes: sizeWithMaxSize + ) { instanceData in // Instantiate the pattern into the instance. pureStructOffset = _instantiateKeyPathBuffer( patternPtr, instanceData, rootType, - arguments + arguments, + size ) } @@ -2914,7 +3042,7 @@ internal func _resolveRelativeAddress(_ base: UnsafeRawPointer, _ offset: Int32) -> UnsafeRawPointer { // Sign-extend the offset to pointer width and add with wrap on overflow. return UnsafeRawPointer(bitPattern: Int(bitPattern: base) &+ Int(offset)) - .unsafelyUnwrapped + ._unsafelyUnwrappedUnchecked } internal func _resolveRelativeIndirectableAddress(_ base: UnsafeRawPointer, _ offset: Int32) @@ -2930,7 +3058,7 @@ internal func _resolveRelativeIndirectableAddress(_ base: UnsafeRawPointer, internal func _resolveCompactFunctionPointer(_ base: UnsafeRawPointer, _ offset: Int32) -> UnsafeRawPointer { #if SWIFT_COMPACT_ABSOLUTE_FUNCTION_POINTER - return UnsafeRawPointer(bitPattern: Int(offset)).unsafelyUnwrapped + return UnsafeRawPointer(bitPattern: Int(offset))._unsafelyUnwrappedUnchecked #else return _resolveRelativeAddress(base, offset) #endif @@ -2977,7 +3105,7 @@ internal func _walkKeyPathPattern( offset = .unresolvedFieldOffset(_pop(from: &componentBuffer, as: UInt32.self)) case RawKeyPathComponent.Header.unresolvedIndirectOffsetPayload: - let base = componentBuffer.baseAddress.unsafelyUnwrapped + let base = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let relativeOffset = _pop(from: &componentBuffer, as: Int32.self) let ptr = _resolveRelativeIndirectableAddress(base, relativeOffset) @@ -2999,14 +3127,14 @@ internal func _walkKeyPathPattern( idValue: Int32, getter: UnsafeRawPointer, setter: UnsafeRawPointer?) { - let idValueBase = componentBuffer.baseAddress.unsafelyUnwrapped + let idValueBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let idValue = _pop(from: &componentBuffer, as: Int32.self) - let getterBase = componentBuffer.baseAddress.unsafelyUnwrapped + let getterBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let getterRef = _pop(from: &componentBuffer, as: Int32.self) let getter = _resolveCompactFunctionPointer(getterBase, getterRef) let setter: UnsafeRawPointer? if header.isComputedSettable { - let setterBase = componentBuffer.baseAddress.unsafelyUnwrapped + let setterBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let setterRef = _pop(from: &componentBuffer, as: Int32.self) setter = _resolveCompactFunctionPointer(setterBase, setterRef) } else { @@ -3020,7 +3148,7 @@ internal func _walkKeyPathPattern( componentBuffer: inout UnsafeRawBufferPointer) -> KeyPathPatternComputedArguments? { if header.hasComputedArguments { - let getLayoutBase = componentBuffer.baseAddress.unsafelyUnwrapped + let getLayoutBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let getLayoutRef = _pop(from: &componentBuffer, as: Int32.self) let getLayoutRaw = _resolveCompactFunctionPointer(getLayoutBase, getLayoutRef) let getLayoutSigned = _PtrAuth.sign(pointer: getLayoutRaw, @@ -3029,7 +3157,7 @@ internal func _walkKeyPathPattern( let getLayout = unsafeBitCast(getLayoutSigned, to: KeyPathComputedArgumentLayoutFn.self) - let witnessesBase = componentBuffer.baseAddress.unsafelyUnwrapped + let witnessesBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let witnessesRef = _pop(from: &componentBuffer, as: Int32.self) let witnesses: UnsafeRawPointer if witnessesRef == 0 { @@ -3038,7 +3166,7 @@ internal func _walkKeyPathPattern( witnesses = _resolveRelativeAddress(witnessesBase, witnessesRef) } - let initializerBase = componentBuffer.baseAddress.unsafelyUnwrapped + let initializerBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked let initializerRef = _pop(from: &componentBuffer, as: Int32.self) let initializerRaw = _resolveCompactFunctionPointer(initializerBase, initializerRef) @@ -3111,7 +3239,7 @@ internal func _walkKeyPathPattern( // Look at the external property descriptor to see if we should take it // over the component given in the pattern. let genericParamCount = Int(header.payload) - let descriptorBase = buffer.baseAddress.unsafelyUnwrapped + let descriptorBase = buffer.baseAddress._unsafelyUnwrappedUnchecked let descriptorOffset = _pop(from: &buffer, as: Int32.self) let descriptor = @@ -3221,7 +3349,7 @@ internal func _walkKeyPathPattern( // Otherwise, pop the intermediate component type accessor and // go around again. - let componentTypeBase = buffer.baseAddress.unsafelyUnwrapped + let componentTypeBase = buffer.baseAddress._unsafelyUnwrappedUnchecked let componentTypeOffset = _pop(from: &buffer, as: Int32.self) let componentTypeRef = _resolveRelativeAddress(componentTypeBase, componentTypeOffset) @@ -3237,7 +3365,10 @@ internal func _walkKeyPathPattern( @_unavailableInEmbedded internal struct GetKeyPathClassAndInstanceSizeFromPattern : KeyPathPatternVisitor { - var size: Int = MemoryLayout.size // start with one word for the header + // start with one word for the header + var size: Int = MemoryLayout.size + var sizeWithMaxSize: Int = 0 + var capability: KeyPathKind = .value var didChain: Bool = false var root: Any.Type! @@ -3405,6 +3536,9 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern } mutating func finish() { + sizeWithMaxSize = size + roundUpToPointerAlignment() + sizeWithMaxSize &+= MemoryLayout.size } } @@ -3416,6 +3550,7 @@ internal func _getKeyPathClassAndInstanceSizeFromPattern( keyPathClass: AnyKeyPath.Type, rootType: Any.Type, size: Int, + sizeWithMaxSize: Int, alignmentMask: Int ) { var walker = GetKeyPathClassAndInstanceSizeFromPattern(patternArgs: arguments) @@ -3445,10 +3580,15 @@ internal func _getKeyPathClassAndInstanceSizeFromPattern( return (keyPathClass: classTy, rootType: walker.root!, size: walker.size, + sizeWithMaxSize: walker.sizeWithMaxSize, // FIXME: Handle overalignment alignmentMask: MemoryLayout._alignmentMask) } +internal func _getTypeSize(_: Type.Type) -> Int { + MemoryLayout.size +} + @_unavailableInEmbedded internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { var destData: UnsafeMutableRawBufferPointer @@ -3457,6 +3597,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { var base: Any.Type var structOffset: UInt32 = 0 var isPureStruct: [Bool] = [] + var maxSize: Int = 0 init(destData: UnsafeMutableRawBufferPointer, patternArgs: UnsafeRawPointer?, @@ -3464,6 +3605,17 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { self.destData = destData self.patternArgs = patternArgs self.base = root + + // FIXME: This will not work on arm64e. + let metadataPtr = unsafeBitCast(root, to: UnsafeRawPointer.self) + let vwtPtr = metadataPtr.load( + fromByteOffset: 0 &- MemoryLayout.size, + as: UnsafeRawPointer.self + ) + self.maxSize = vwtPtr.load(fromByteOffset: 0x40, as: Int.self) + + // FIXME: The following doesn't work as it crashes the compiler in IRGen. + //self.maxSize = _openExistential(root, do: _getTypeSize(_:)) } // Track the triviality of the resulting object data. @@ -3478,7 +3630,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { misalign: Int ) { let alignment = MemoryLayout.alignment - var baseAddress = destData.baseAddress.unsafelyUnwrapped + var baseAddress = destData.baseAddress._unsafelyUnwrappedUnchecked var misalign = Int(bitPattern: baseAddress) & (alignment - 1) if misalign != 0 { misalign = alignment - misalign @@ -3490,7 +3642,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { let size = MemoryLayout.size let (baseAddress, misalign) = adjustDestForAlignment(of: T.self) _withUnprotectedUnsafeBytes(of: value) { - _memcpy(dest: baseAddress, src: $0.baseAddress.unsafelyUnwrapped, + _memcpy(dest: baseAddress, src: $0.baseAddress._unsafelyUnwrappedUnchecked, size: UInt(size)) } destData = UnsafeMutableRawBufferPointer( @@ -3513,7 +3665,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { mutating func updatePreviousComponentAddr() -> UnsafeMutableRawPointer? { let oldValue = previousComponentAddr - previousComponentAddr = destData.baseAddress.unsafelyUnwrapped + previousComponentAddr = destData.baseAddress._unsafelyUnwrappedUnchecked return oldValue } @@ -3709,10 +3861,10 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { _internalInvariant(Int(bitPattern: destData.baseAddress) & alignmentMask == 0, "argument destination not aligned") arguments.initializer(patternArgs, - destData.baseAddress.unsafelyUnwrapped) + destData.baseAddress._unsafelyUnwrappedUnchecked) destData = UnsafeMutableRawBufferPointer( - start: destData.baseAddress.unsafelyUnwrapped + baseSize, + start: destData.baseAddress._unsafelyUnwrappedUnchecked + baseSize, count: destData.count - baseSize) } @@ -3729,7 +3881,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { // Write the descriptor's generic arguments, which should all be relative // references to metadata accessor functions. for i in externalArgs.indices { - let base = externalArgs.baseAddress.unsafelyUnwrapped + i + let base = externalArgs.baseAddress._unsafelyUnwrappedUnchecked + i let offset = base.pointee let metadataRef = _resolveRelativeAddress(UnsafeRawPointer(base), offset) let result = _resolveKeyPathGenericArgReference( @@ -3768,9 +3920,24 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { arguments: patternArgs) pushDest(metadata) base = metadata + + // FIXME: This will not work on arm64e. + let metadataPtr = unsafeBitCast(metadata, to: UnsafeRawPointer.self) + let vwtPtr = metadataPtr.load( + fromByteOffset: 0 &- MemoryLayout.size, + as: UnsafeRawPointer.self + ) + let size = vwtPtr.load(fromByteOffset: 0x40, as: Int.self) + //let size = _openExistential(metadata, do: _getTypeSize(_:)) + + maxSize = Swift.max(maxSize, size) } mutating func finish() { + // Finally, push our max size at the end of the buffer (and round up if + // necessary). + pushDest(maxSize) + // Should have filled the entire buffer by the time we reach the end of the // pattern. _internalInvariant(destData.isEmpty, @@ -3793,7 +3960,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { instantiateVisitor: InstantiateKeyPathBuffer) { self.sizeVisitor = sizeVisitor self.instantiateVisitor = instantiateVisitor - origDest = self.instantiateVisitor.destData.baseAddress.unsafelyUnwrapped + origDest = self.instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked } mutating func visitHeader(genericEnvironment: UnsafeRawPointer?, @@ -3886,7 +4053,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { } func checkSizeConsistency() { - let nextDest = instantiateVisitor.destData.baseAddress.unsafelyUnwrapped + let nextDest = instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked let curSize = nextDest - origDest + MemoryLayout.size _internalInvariant(curSize == sizeVisitor.size, @@ -3900,12 +4067,13 @@ internal func _instantiateKeyPathBuffer( _ pattern: UnsafeRawPointer, _ origDestData: UnsafeMutableRawBufferPointer, _ rootType: Any.Type, - _ arguments: UnsafeRawPointer + _ arguments: UnsafeRawPointer, + _ sizeBeforeMaxSize: Int ) -> UInt32? { - let destHeaderPtr = origDestData.baseAddress.unsafelyUnwrapped + let destHeaderPtr = origDestData.baseAddress._unsafelyUnwrappedUnchecked var destData = UnsafeMutableRawBufferPointer( start: destHeaderPtr.advanced(by: MemoryLayout.size), - count: origDestData.count - MemoryLayout.size) + count: origDestData.count &- MemoryLayout.size) #if INTERNAL_CHECKS_ENABLED // If checks are enabled, use a validating walker that ensures that the @@ -3939,7 +4107,7 @@ internal func _instantiateKeyPathBuffer( // Write out the header. let destHeader = KeyPathBuffer.Header( - size: origDestData.count - MemoryLayout.size, + size: sizeBeforeMaxSize &- MemoryLayout.size, trivial: isTrivial, hasReferencePrefix: endOfReferencePrefixComponent != nil) From 00ace6695fcc9207b21986ee4dd4922e44c639fe Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Thu, 14 Dec 2023 09:13:03 -0800 Subject: [PATCH 02/10] Round up sizeWithMaxSize --- stdlib/public/core/KeyPath.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 64beee58db22d..d47ac9d2b991f 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -3537,7 +3537,7 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern mutating func finish() { sizeWithMaxSize = size - roundUpToPointerAlignment() + sizeWithMaxSize = MemoryLayout._roundingUpToAlignment(sizeWithMaxSize) sizeWithMaxSize &+= MemoryLayout.size } } From 6fdb684fd2ed5c61c0f2db763cbb2479c1fe8e42 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Fri, 15 Dec 2023 16:30:02 -0800 Subject: [PATCH 03/10] Optimization for single component and fix tests --- .../public/SwiftShims/swift/shims/KeyPath.h | 4 +- stdlib/public/core/KeyPath.swift | 187 ++++++++++++------ stdlib/public/core/ReflectionMirror.swift | 3 +- 3 files changed, 133 insertions(+), 61 deletions(-) diff --git a/stdlib/public/SwiftShims/swift/shims/KeyPath.h b/stdlib/public/SwiftShims/swift/shims/KeyPath.h index 17e9adbc165b0..e33772188fcc7 100644 --- a/stdlib/public/SwiftShims/swift/shims/KeyPath.h +++ b/stdlib/public/SwiftShims/swift/shims/KeyPath.h @@ -32,8 +32,10 @@ static const __swift_uint32_t _SwiftKeyPathBufferHeader_TrivialFlag = 0x80000000U; static const __swift_uint32_t _SwiftKeyPathBufferHeader_HasReferencePrefixFlag = 0x40000000U; +static const __swift_uint32_t _SwiftKeyPathBufferHeader_IsSingleComponentFlag + = 0x20000000U; static const __swift_uint32_t _SwiftKeyPathBufferHeader_ReservedMask - = 0x3F000000U; + = 0x1F000000U; // Bitfields for a key path component header. diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index d47ac9d2b991f..1ac09e11dbbc1 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -349,11 +349,23 @@ public class KeyPath: PartialKeyPath { return Builtin.reinterpretCast(root) } + if _fastPath(buffer.isSingleComponent) { + var isBreak = false + let (rawComponent, _) = buffer.next() + + return rawComponent._projectReadOnly( + root, + to: Value.self, + endingWith: Value.self, + &isBreak + ) + } + let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked - let endOfBuffer = bufferPtr + buffer.data.count - let maxSize = Int(truncatingIfNeeded: - endOfBuffer.load(as: UInt32.self) + let endOfBuffer = MemoryLayout._roundingUpToAlignment( + bufferPtr + buffer.data.count ) + let maxSize = endOfBuffer.load(as: Int.self) let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy @@ -372,16 +384,15 @@ public class KeyPath: PartialKeyPath { let (rawComponent, optNextType) = buffer.next() let newType = optNextType ?? valueType let isLast = optNextType == nil + var isBreak = false - func projectCurrent(_: Current.Type) -> Value { - func projectNew(_: New) -> Value { - var isBreak = false - + func projectCurrent(_: Current.Type) { + func projectNew(_: New.Type) { let newBase = currentValueBuffer.withMemoryRebound( to: Current.self ) { return rawComponent._projectReadOnly( - $0[0], + $0.moveElement(from: 0), to: New.self, endingWith: Value.self, &isBreak @@ -397,20 +408,11 @@ public class KeyPath: PartialKeyPath { let tag: UInt32 = 1 Builtin.injectEnumTag(&value, tag._value) - return value - } - - if isLast { - _internalInvariant( - New.self == Value.self, - "key path does not terminate in correct type" - ) - - return Builtin.reinterpretCast(newBase) as Value - } + currentValueBuffer.withMemoryRebound(to: Value.self) { + $0.initializeElement(at: 0, to: value) + } - currentValueBuffer.withMemoryRebound(to: Current.self) { - $0.deinitializeElement(at: 0) + return } currentValueBuffer.withMemoryRebound(to: New.self) { @@ -418,16 +420,24 @@ public class KeyPath: PartialKeyPath { } currentType = newType - return Builtin.reinterpretCast(newBase) as Value + + if isLast { + _internalInvariant( + New.self == Value.self, + "key path does not terminate in correct type" + ) + } } - return _openExistential(newType, do: projectNew(_:)) + _openExistential(newType, do: projectNew(_:)) } - let result = _openExistential(currentType, do: projectCurrent(_:)) + _openExistential(currentType, do: projectCurrent(_:)) - if isLast { - return result + if isLast || isBreak { + return currentValueBuffer.withMemoryRebound(to: Value.self) { + $0.moveElement(from: 0) + } } } } @@ -532,10 +542,10 @@ public class ReferenceWritableKeyPath< // Project out the reference prefix. let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked - let endOfBuffer = bufferPtr + buffer.data.count - let maxSize = Int(truncatingIfNeeded: - endOfBuffer.load(as: UInt32.self) + let endOfBuffer = MemoryLayout._roundingUpToAlignment( + bufferPtr + buffer.data.count ) + let maxSize = endOfBuffer.load(as: Int.self) let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy @@ -565,7 +575,7 @@ public class ReferenceWritableKeyPath< to: Current.self ) { return rawComponent._projectReadOnly( - $0[0], + $0.moveElement(from: 0), to: New.self, endingWith: Value.self, &isBreak @@ -573,11 +583,7 @@ public class ReferenceWritableKeyPath< } guard _fastPath(!isBreak) else { - _internalInvariantFailure("should not have stopped key path projection") - } - - currentValueBuffer.withMemoryRebound(to: Current.self) { - $0.deinitializeElement(at: 0) + _preconditionFailure("should not have stopped key path projection") } currentValueBuffer.withMemoryRebound(to: New.self) { @@ -595,7 +601,7 @@ public class ReferenceWritableKeyPath< func projectCurrent(_: Current.Type) -> Any { return currentValueBuffer.withMemoryRebound(to: Current.self) { - $0[0] + $0.moveElement(from: 0) } } @@ -1883,13 +1889,18 @@ internal struct RawKeyPathComponent { return Builtin.reinterpretCast(base) } - _internalInvariantFailure("unwrapped nil optional") + _preconditionFailure("unwrapped nil optional") case .optionalWrap: _internalInvariant(NewValue.self == Optional.self, "should be wrapping optional value") - return Builtin.reinterpretCast(base) + var new: NewValue = Builtin.reinterpretCast(base) + + let tag: UInt32 = 0 + Builtin.injectEnumTag(&new, tag._value) + + return new } } @@ -2005,6 +2016,7 @@ internal struct KeyPathBuffer { internal var data: UnsafeRawBufferPointer internal var trivial: Bool internal var hasReferencePrefix: Bool + internal var isSingleComponent: Bool internal init(base: UnsafeRawPointer) { let header = base.load(as: Header.self) @@ -2013,14 +2025,17 @@ internal struct KeyPathBuffer { count: header.size) trivial = header.trivial hasReferencePrefix = header.hasReferencePrefix + isSingleComponent = header.isSingleComponent } internal init(partialData: UnsafeRawBufferPointer, trivial: Bool = false, - hasReferencePrefix: Bool = false) { + hasReferencePrefix: Bool = false, + isSingleComponent: Bool = false) { self.data = partialData self.trivial = trivial self.hasReferencePrefix = hasReferencePrefix + self.isSingleComponent = isSingleComponent } internal var mutableData: UnsafeMutableRawBufferPointer { @@ -2064,11 +2079,17 @@ internal struct KeyPathBuffer { internal struct Header { internal var _value: UInt32 - internal init(size: Int, trivial: Bool, hasReferencePrefix: Bool) { + internal init( + size: Int, + trivial: Bool, + hasReferencePrefix: Bool, + isSingleComponent: Bool + ) { _internalInvariant(size <= Int(Header.sizeMask), "key path too big") _value = UInt32(size) | (trivial ? Header.trivialFlag : 0) | (hasReferencePrefix ? Header.hasReferencePrefixFlag : 0) + | (isSingleComponent ? Header.isSingleComponentFlag : 0) } internal static var sizeMask: UInt32 { @@ -2083,6 +2104,9 @@ internal struct KeyPathBuffer { internal static var hasReferencePrefixFlag: UInt32 { return _SwiftKeyPathBufferHeader_HasReferencePrefixFlag } + internal static var isSingleComponentFlag: UInt32 { + return _SwiftKeyPathBufferHeader_IsSingleComponentFlag + } internal var size: Int { return Int(_value & Header.sizeMask) } internal var trivial: Bool { return _value & Header.trivialFlag != 0 } @@ -2098,6 +2122,19 @@ internal struct KeyPathBuffer { } } } + internal var isSingleComponent: Bool { + get { + return _value & Header.isSingleComponentFlag != 0 + } + + set { + if newValue { + _value |= Header.isSingleComponentFlag + } else { + _value &= ~Header.isSingleComponentFlag + } + } + } // In a key path pattern, the "trivial" flag is used to indicate // "instantiable in-line" @@ -2583,7 +2620,7 @@ internal func _appendingKeyPaths< leaf: KeyPath ) -> Result { let resultTy = type(of: root).appendedType(with: type(of: leaf)) - var returnValue: AnyKeyPath = root.withBuffer { + var returnValue: AnyKeyPath = root.withBuffer { var rootBuffer = $0 return leaf.withBuffer { var leafBuffer = $0 @@ -2618,9 +2655,13 @@ internal func _appendingKeyPaths< // header, plus space for the middle type. // Align up the root so that we can put the component type after it. let rootSize = MemoryLayout._roundingUpToAlignment(rootBuffer.data.count) - let resultSize = rootSize + leafBuffer.data.count + var resultSize = rootSize + leafBuffer.data.count + 2 * MemoryLayout.size - // Tail-allocate space for the KVC string. + let componentSize = resultSize + // The first tail allocated member is the maxSize of the keypath. + resultSize = MemoryLayout._roundingUpToAlignment(resultSize) + resultSize += MemoryLayout.size + // Immediately following is the tail-allocated space for the KVC string. let totalResultSize = MemoryLayout ._roundingUpToAlignment(resultSize + appendedKVCLength) @@ -2643,16 +2684,23 @@ internal func _appendingKeyPaths< // Save space for the header. let leafIsReferenceWritable = type(of: leaf).kind == .reference destBuilder.pushHeader(KeyPathBuffer.Header( - size: resultSize - MemoryLayout.size, + size: componentSize - MemoryLayout.size, trivial: rootBuffer.trivial && leafBuffer.trivial, hasReferencePrefix: rootBuffer.hasReferencePrefix - || leafIsReferenceWritable + || leafIsReferenceWritable, + isSingleComponent: rootBuffer.isSingleComponent != + leafBuffer.isSingleComponent )) let leafHasReferencePrefix = leafBuffer.hasReferencePrefix - + + let rootBufferPtr = rootBuffer.data.baseAddress._unsafelyUnwrappedUnchecked + let rootEndOfBuffer = MemoryLayout._roundingUpToAlignment( + rootBufferPtr + rootBuffer.data.count + ) + let rootMaxSize = rootEndOfBuffer.load(as: Int.self) + // Clone the root components into the buffer. - while true { let (component, type) = rootBuffer.next() let isLast = type == nil @@ -2678,7 +2726,13 @@ internal func _appendingKeyPaths< break } } - + + let leafBufferPtr = leafBuffer.data.baseAddress._unsafelyUnwrappedUnchecked + let leafEndOfBuffer = MemoryLayout._roundingUpToAlignment( + leafBufferPtr + leafBuffer.data.count + ) + let leafMaxSize = leafEndOfBuffer.load(as: Int.self) + // Clone the leaf components into the buffer. while true { let (component, type) = leafBuffer.next() @@ -2693,7 +2747,10 @@ internal func _appendingKeyPaths< break } } - + + // Append our max size at the end of the buffer before the kvc string. + destBuilder.push(Swift.max(rootMaxSize, leafMaxSize)) + _internalInvariant(destBuilder.buffer.isEmpty, "did not fill entire result buffer") } @@ -2792,7 +2849,9 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, // This is a non-atomic load because the instantiated pointer will be // written with a release barrier, and loads of the instantiated key path // ought to carry a dependency through this loaded pointer. - let existingInstance = theOncePtr.load(as: UnsafeRawPointer?.self) + let existingInstance = UnsafeRawPointer( + bitPattern: UInt(Builtin.atomicload_acquire_Word(theOncePtr._rawValue)) + ) if let existingInstance = existingInstance { // Return the instantiated object at +1. @@ -2813,7 +2872,7 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) var pureStructOffset: UInt32? = nil - + // Allocate the instance. let instance = keyPathClass._create( capacityInBytes: sizeWithMaxSize @@ -2850,7 +2909,7 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, let instancePtr = Unmanaged.passRetained(instance) while true { - let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Word( + let (oldValue, won) = Builtin.cmpxchg_release_monotonic_Word( oncePtr._rawValue, 0._builtinWordValue, UInt(bitPattern: instancePtr.toOpaque())._builtinWordValue) @@ -4056,7 +4115,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { let nextDest = instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked let curSize = nextDest - origDest + MemoryLayout.size - _internalInvariant(curSize == sizeVisitor.size, + _internalInvariant(curSize == sizeVisitor.sizeWithMaxSize, "size and instantiation visitors out of sync") } } @@ -4109,7 +4168,9 @@ internal func _instantiateKeyPathBuffer( let destHeader = KeyPathBuffer.Header( size: sizeBeforeMaxSize &- MemoryLayout.size, trivial: isTrivial, - hasReferencePrefix: endOfReferencePrefixComponent != nil) + hasReferencePrefix: endOfReferencePrefixComponent != nil, + isSingleComponent: walker.isPureStruct.count == 1 + ) destHeaderPtr.storeBytes(of: destHeader, as: KeyPathBuffer.Header.self) @@ -4129,8 +4190,9 @@ internal func _instantiateKeyPathBuffer( } if isPureStruct { - offset = walker.structOffset + offset = walker.structOffset } + return offset } @@ -4160,7 +4222,8 @@ public func _createOffsetBasedKeyPath( let header = KeyPathBuffer.Header( size: kpBufferSize - MemoryLayout.size, trivial: true, - hasReferencePrefix: false + hasReferencePrefix: false, + isSingleComponent: true ) builder.pushHeader(header) @@ -4192,8 +4255,13 @@ public func _rerootKeyPath( _ existingKp: AnyKeyPath, to newRoot: NewRoot.Type ) -> PartialKeyPath { - let (isTrivial, hasReferencePrefix, componentSize) = existingKp.withBuffer { - ($0.trivial, $0.hasReferencePrefix, $0.data.count) + let ( + isTrivial, + hasReferencePrefix, + isSingleComponent, + componentSize + ) = existingKp.withBuffer { + ($0.trivial, $0.hasReferencePrefix, $0.isSingleComponent, $0.data.count) } let existingKpTy = type(of: existingKp) @@ -4222,7 +4290,8 @@ public func _rerootKeyPath( let header = KeyPathBuffer.Header( size: componentSize, trivial: isTrivial, - hasReferencePrefix: hasReferencePrefix + hasReferencePrefix: hasReferencePrefix, + isSingleComponent: isSingleComponent ) builder.pushHeader(header) diff --git a/stdlib/public/core/ReflectionMirror.swift b/stdlib/public/core/ReflectionMirror.swift index aa702fd012947..3ea429fe20d59 100644 --- a/stdlib/public/core/ReflectionMirror.swift +++ b/stdlib/public/core/ReflectionMirror.swift @@ -362,7 +362,8 @@ public func _forEachFieldWithKeyPath( destBuilder.pushHeader(KeyPathBuffer.Header( size: resultSize - MemoryLayout.size, trivial: true, - hasReferencePrefix: false + hasReferencePrefix: false, + isSingleComponent: true )) let component = RawKeyPathComponent( header: RawKeyPathComponent.Header(stored: .struct, From 25a4bbe5493a67454b91d99db88b85b18657d650 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Sat, 16 Dec 2023 13:10:53 -0800 Subject: [PATCH 04/10] Check maxSize on finish --- stdlib/public/core/KeyPath.swift | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 1ac09e11dbbc1..8acfdf03fa3d6 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -4108,14 +4108,20 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { sizeVisitor.finish() instantiateVisitor.finish() isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + checkSizeConsistency(checkMaxSize: true) } - func checkSizeConsistency() { + func checkSizeConsistency(checkMaxSize: Bool = false) { let nextDest = instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked let curSize = nextDest - origDest + MemoryLayout.size - _internalInvariant(curSize == sizeVisitor.sizeWithMaxSize, + let sizeVisitorSize = if checkMaxSize { + sizeVisitor.sizeWithMaxSize + } else { + sizeVisitor.size + } + + _internalInvariant(curSize == sizeVisitorSize, "size and instantiation visitors out of sync") } } From 581376e86e6b6b33fea26368098e67a4551d231c Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Tue, 19 Dec 2023 15:21:34 -0500 Subject: [PATCH 05/10] Move variable into closure --- stdlib/public/core/KeyPath.swift | 4 ++-- stdlib/public/core/TemporaryAllocation.swift | 9 +++++---- .../core/UnsafeRawBufferPointer.swift.gyb | 20 ++++++++++++++++++- 3 files changed, 26 insertions(+), 7 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 8acfdf03fa3d6..de1e0ab9de5b3 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -340,8 +340,6 @@ public class KeyPath: PartialKeyPath { } } - var currentType = rootType - return withBuffer { var buffer = $0 @@ -380,6 +378,8 @@ public class KeyPath: PartialKeyPath { $0.initializeElement(at: 0, to: root) } + var currentType = rootType + while true { let (rawComponent, optNextType) = buffer.next() let newType = optNextType ?? valueType diff --git a/stdlib/public/core/TemporaryAllocation.swift b/stdlib/public/core/TemporaryAllocation.swift index 0f7b6f2539c14..acb60219ff765 100644 --- a/stdlib/public/core/TemporaryAllocation.swift +++ b/stdlib/public/core/TemporaryAllocation.swift @@ -84,7 +84,7 @@ internal func _isStackAllocationSafe(byteCount: Int, alignment: Int) -> Bool { // without worrying about running out of space, and the compiler would emit // such allocations on the stack anyway when they represent structures or // stack-promoted objects. - if byteCount <= 1024 { + if _fastPath(byteCount <= 1024) { return true } @@ -178,7 +178,8 @@ internal func _withUnprotectedUnsafeTemporaryAllocation< _ body: (Builtin.RawPointer) throws -> R ) rethrows -> R { // How many bytes do we need to allocate? - let byteCount = _byteCountForTemporaryAllocation(of: type, capacity: capacity) + //let byteCount = _byteCountForTemporaryAllocation(of: type, capacity: capacity) + let byteCount = MemoryLayout.stride &* capacity guard _isStackAllocationSafe(byteCount: byteCount, alignment: alignment) else { return try _fallBackToHeapAllocation(byteCount: byteCount, alignment: alignment, body) @@ -294,7 +295,7 @@ public func _withUnprotectedUnsafeTemporaryAllocation( alignment: alignment ) { pointer in let buffer = UnsafeMutableRawBufferPointer( - start: .init(pointer), + _uncheckedStart: .init(pointer), count: byteCount ) return try body(buffer) @@ -374,7 +375,7 @@ public func _withUnprotectedUnsafeTemporaryAllocation< ) { pointer in Builtin.bindMemory(pointer, capacity._builtinWordValue, type) let buffer = UnsafeMutableBufferPointer( - start: .init(pointer), + _uncheckedStart: .init(pointer), count: capacity ) return try body(buffer) diff --git a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb index f46ad1ac24d46..101c6729c8830 100644 --- a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb +++ b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb @@ -116,6 +116,21 @@ public struct Unsafe${Mutable}RawBufferPointer { _debugPrecondition(count >= 0, "${Self} with negative count") _debugPrecondition(count == 0 || start != nil, "${Self} has a nil start and nonzero count") + + self.init(_uncheckedStart: start, count: count) + } + + @_alwaysEmitIntoClient + internal init( + _uncheckedStart start: Unsafe${Mutable}RawPointer?, + count: Int + ) { + _internalInvariant(count >= 0, "${Self} with negative count") + _internalInvariant( + count == 0 || start != nil, + "${Self} has a nil start and nonzero count" + ) + _position = start _end = start.map { $0 + _assumeNonNegative(count) } } @@ -1382,7 +1397,10 @@ public func _withUnprotectedUnsafeBytes< #else let addr = UnsafeRawPointer(Builtin.addressOfBorrow(value)) #endif - let buffer = UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) + let buffer = UnsafeRawBufferPointer( + _uncheckedStart: addr, + count: MemoryLayout.size + ) return try body(buffer) } From 02401d4bb04beb2e2ab71ac1e5a97b6bc3628f12 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Tue, 20 Feb 2024 11:01:00 -0800 Subject: [PATCH 06/10] Remove some workarounds --- stdlib/public/core/KeyPath.swift | 20 ++----------------- stdlib/public/core/TemporaryAllocation.swift | 7 +++---- .../core/UnsafeRawBufferPointer.swift.gyb | 19 +----------------- 3 files changed, 6 insertions(+), 40 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index de1e0ab9de5b3..3df6e9ad49bf2 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -3665,16 +3665,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { self.patternArgs = patternArgs self.base = root - // FIXME: This will not work on arm64e. - let metadataPtr = unsafeBitCast(root, to: UnsafeRawPointer.self) - let vwtPtr = metadataPtr.load( - fromByteOffset: 0 &- MemoryLayout.size, - as: UnsafeRawPointer.self - ) - self.maxSize = vwtPtr.load(fromByteOffset: 0x40, as: Int.self) - - // FIXME: The following doesn't work as it crashes the compiler in IRGen. - //self.maxSize = _openExistential(root, do: _getTypeSize(_:)) + self.maxSize = _openExistential(root, do: _getTypeSize(_:)) } // Track the triviality of the resulting object data. @@ -3980,14 +3971,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { pushDest(metadata) base = metadata - // FIXME: This will not work on arm64e. - let metadataPtr = unsafeBitCast(metadata, to: UnsafeRawPointer.self) - let vwtPtr = metadataPtr.load( - fromByteOffset: 0 &- MemoryLayout.size, - as: UnsafeRawPointer.self - ) - let size = vwtPtr.load(fromByteOffset: 0x40, as: Int.self) - //let size = _openExistential(metadata, do: _getTypeSize(_:)) + let size = _openExistential(metadata, do: _getTypeSize(_:)) maxSize = Swift.max(maxSize, size) } diff --git a/stdlib/public/core/TemporaryAllocation.swift b/stdlib/public/core/TemporaryAllocation.swift index acb60219ff765..7ebbcf97b61b2 100644 --- a/stdlib/public/core/TemporaryAllocation.swift +++ b/stdlib/public/core/TemporaryAllocation.swift @@ -178,8 +178,7 @@ internal func _withUnprotectedUnsafeTemporaryAllocation< _ body: (Builtin.RawPointer) throws -> R ) rethrows -> R { // How many bytes do we need to allocate? - //let byteCount = _byteCountForTemporaryAllocation(of: type, capacity: capacity) - let byteCount = MemoryLayout.stride &* capacity + let byteCount = _byteCountForTemporaryAllocation(of: type, capacity: capacity) guard _isStackAllocationSafe(byteCount: byteCount, alignment: alignment) else { return try _fallBackToHeapAllocation(byteCount: byteCount, alignment: alignment, body) @@ -295,7 +294,7 @@ public func _withUnprotectedUnsafeTemporaryAllocation( alignment: alignment ) { pointer in let buffer = UnsafeMutableRawBufferPointer( - _uncheckedStart: .init(pointer), + start: .init(pointer), count: byteCount ) return try body(buffer) @@ -375,7 +374,7 @@ public func _withUnprotectedUnsafeTemporaryAllocation< ) { pointer in Builtin.bindMemory(pointer, capacity._builtinWordValue, type) let buffer = UnsafeMutableBufferPointer( - _uncheckedStart: .init(pointer), + start: .init(pointer), count: capacity ) return try body(buffer) diff --git a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb index 101c6729c8830..af288923dd101 100644 --- a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb +++ b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb @@ -117,20 +117,6 @@ public struct Unsafe${Mutable}RawBufferPointer { _debugPrecondition(count == 0 || start != nil, "${Self} has a nil start and nonzero count") - self.init(_uncheckedStart: start, count: count) - } - - @_alwaysEmitIntoClient - internal init( - _uncheckedStart start: Unsafe${Mutable}RawPointer?, - count: Int - ) { - _internalInvariant(count >= 0, "${Self} with negative count") - _internalInvariant( - count == 0 || start != nil, - "${Self} has a nil start and nonzero count" - ) - _position = start _end = start.map { $0 + _assumeNonNegative(count) } } @@ -1397,10 +1383,7 @@ public func _withUnprotectedUnsafeBytes< #else let addr = UnsafeRawPointer(Builtin.addressOfBorrow(value)) #endif - let buffer = UnsafeRawBufferPointer( - _uncheckedStart: addr, - count: MemoryLayout.size - ) + let buffer = UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) return try body(buffer) } From 59c53b72f0d89b9fb0834a32d2e2540390101780 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Thu, 31 Oct 2024 10:30:05 -0700 Subject: [PATCH 07/10] Don't do the kvc stored offset optimization on 16 bit platforms --- stdlib/public/core/KeyPath.swift | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 3df6e9ad49bf2..ca311254e14bf 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -84,7 +84,7 @@ public class AnyKeyPath: _AppendKeyPath { _kvcKeyPathStringPtr = nil } #else -#error("Unsupported platform") + // Don't assign anything. #endif } @@ -111,7 +111,8 @@ public class AnyKeyPath: _AppendKeyPath { } return nil #else -#error("Unsupported platform") + // Otherwise, we assigned nothing so return nothing. + return nil #endif } From edee13a612f825eb316b749ac7282253b6e9c5eb Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Mon, 4 Nov 2024 17:10:18 -0800 Subject: [PATCH 08/10] Take the leaf type into account for max size --- stdlib/public/core/KeyPath.swift | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index ca311254e14bf..be47c09c393ec 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -3725,6 +3725,16 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { leafMetadataRef: MetadataReference, kvcCompatibilityString: UnsafeRawPointer?) { self.genericEnvironment = genericEnvironment + + let leaf = _resolveKeyPathMetadataReference( + leafMetadataRef, + genericEnvironment: genericEnvironment, + arguments: patternArgs + ) + + let size = _openExistential(leaf, do: _getTypeSize(_:)) + + maxSize = Swift.max(maxSize, size) } mutating func visitStoredComponent(kind: KeyPathStructOrClass, From 90bd2a008e52cb77b58554d36b5a137da04c173b Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Tue, 4 Feb 2025 16:21:09 -0800 Subject: [PATCH 09/10] Add maxSize helper to buffer and fix reroot --- stdlib/public/core/KeyPath.swift | 50 ++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index be47c09c393ec..a4ee6ce61a331 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -360,11 +360,7 @@ public class KeyPath: PartialKeyPath { ) } - let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked - let endOfBuffer = MemoryLayout._roundingUpToAlignment( - bufferPtr + buffer.data.count - ) - let maxSize = endOfBuffer.load(as: Int.self) + let maxSize = buffer.maxSize let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy @@ -542,11 +538,7 @@ public class ReferenceWritableKeyPath< // Project out the reference prefix. - let bufferPtr = buffer.data.baseAddress._unsafelyUnwrappedUnchecked - let endOfBuffer = MemoryLayout._roundingUpToAlignment( - bufferPtr + buffer.data.count - ) - let maxSize = endOfBuffer.load(as: Int.self) + let maxSize = buffer.maxSize let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy @@ -2043,6 +2035,15 @@ internal struct KeyPathBuffer { return UnsafeMutableRawBufferPointer(mutating: data) } + internal var maxSize: Int { + let bufferPtr = data.baseAddress._unsafelyUnwrappedUnchecked + let endOfBuffer = MemoryLayout._roundingUpToAlignment( + bufferPtr + data.count + ) + + return endOfBuffer.load(as: Int.self) + } + internal struct Builder { internal var buffer: UnsafeMutableRawBufferPointer internal init(_ buffer: UnsafeMutableRawBufferPointer) { @@ -2695,11 +2696,7 @@ internal func _appendingKeyPaths< let leafHasReferencePrefix = leafBuffer.hasReferencePrefix - let rootBufferPtr = rootBuffer.data.baseAddress._unsafelyUnwrappedUnchecked - let rootEndOfBuffer = MemoryLayout._roundingUpToAlignment( - rootBufferPtr + rootBuffer.data.count - ) - let rootMaxSize = rootEndOfBuffer.load(as: Int.self) + let rootMaxSize = rootBuffer.maxSize // Clone the root components into the buffer. while true { @@ -2728,11 +2725,7 @@ internal func _appendingKeyPaths< } } - let leafBufferPtr = leafBuffer.data.baseAddress._unsafelyUnwrappedUnchecked - let leafEndOfBuffer = MemoryLayout._roundingUpToAlignment( - leafBufferPtr + leafBuffer.data.count - ) - let leafMaxSize = leafEndOfBuffer.load(as: Int.self) + let leafMaxSize = leafBuffer.maxSize // Clone the leaf components into the buffer. while true { @@ -4283,9 +4276,18 @@ public func _rerootKeyPath( let newKpTy = _openExistential(existingKpTy.rootType, do: openedRoot(_:)) + // Buffer header + padding (if needed) + var capacity = MemoryLayout.size + + // Size of components + capacity += componentSize + + // Max size at the end of the buffer + capacity = MemoryLayout._roundingUpToAlignment(capacity) + capacity += MemoryLayout.size + return newKpTy._create( - // This is the buffer header + padding (if needed) + size of components - capacityInBytes: MemoryLayout.size + componentSize + capacityInBytes: capacity ) { var builder = KeyPathBuffer.Builder($0) let header = KeyPathBuffer.Header( @@ -4312,6 +4314,10 @@ public func _rerootKeyPath( break } } + + // Append the max size at the end of the existing keypath's buffer to the + // end of the new keypath's buffer. + builder.push(existingBuffer.maxSize) } } as! PartialKeyPath } From f38e84cfe1879de4c3bd56b943f18ff102d93d97 Mon Sep 17 00:00:00 2001 From: Alejandro Alonso Date: Wed, 5 Feb 2025 14:33:30 -0800 Subject: [PATCH 10/10] Pass buffer to _projectReadOnly and fix appends --- stdlib/public/core/KeyPath.swift | 146 +++++++++++++++---------------- 1 file changed, 70 insertions(+), 76 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index a4ee6ce61a331..0531dd325de78 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -352,12 +352,15 @@ public class KeyPath: PartialKeyPath { var isBreak = false let (rawComponent, _) = buffer.next() - return rawComponent._projectReadOnly( - root, - to: Value.self, - endingWith: Value.self, - &isBreak - ) + return Builtin.emplace { + rawComponent._projectReadOnly( + root, + to: Value.self, + endingWith: Value.self, + &isBreak, + pointer: UnsafeMutablePointer($0) + ) + } } let maxSize = buffer.maxSize @@ -385,37 +388,28 @@ public class KeyPath: PartialKeyPath { func projectCurrent(_: Current.Type) { func projectNew(_: New.Type) { - let newBase = currentValueBuffer.withMemoryRebound( + let base = currentValueBuffer.withMemoryRebound( to: Current.self ) { - return rawComponent._projectReadOnly( - $0.moveElement(from: 0), + $0.moveElement(from: 0) + } + + currentValueBuffer.withMemoryRebound(to: New.self) { + rawComponent._projectReadOnly( + base, to: New.self, endingWith: Value.self, - &isBreak + &isBreak, + pointer: $0.baseAddress._unsafelyUnwrappedUnchecked ) } // If we've broken from the projection, it means we found nil // while optional chaining. guard _fastPath(!isBreak) else { - var value: Value = Builtin.zeroInitializer() - - // Optional.none has a tag of 1 - let tag: UInt32 = 1 - Builtin.injectEnumTag(&value, tag._value) - - currentValueBuffer.withMemoryRebound(to: Value.self) { - $0.initializeElement(at: 0, to: value) - } - return } - currentValueBuffer.withMemoryRebound(to: New.self) { - $0.initializeElement(at: 0, to: newBase) - } - currentType = newType if isLast { @@ -564,14 +558,19 @@ public class ReferenceWritableKeyPath< func projectCurrent(_: Current.Type) { var isBreak = false - let newBase = currentValueBuffer.withMemoryRebound( + let base = currentValueBuffer.withMemoryRebound( to: Current.self ) { - return rawComponent._projectReadOnly( - $0.moveElement(from: 0), + $0.moveElement(from: 0) + } + + currentValueBuffer.withMemoryRebound(to: New.self) { + rawComponent._projectReadOnly( + base, to: New.self, endingWith: Value.self, - &isBreak + &isBreak, + pointer: $0.baseAddress._unsafelyUnwrappedUnchecked ) } @@ -579,10 +578,6 @@ public class ReferenceWritableKeyPath< _preconditionFailure("should not have stopped key path projection") } - currentValueBuffer.withMemoryRebound(to: New.self) { - $0.initializeElement(at: 0, to: newBase) - } - currentType = nextType } @@ -1781,41 +1776,23 @@ internal struct RawKeyPathComponent { count: buffer.count - componentSize) } - internal enum ProjectionResult { - /// Continue projecting the key path with the given new value. - case `continue`(NewValue) - /// Stop projecting the key path and use the given value as the final - /// result of the projection. - case `break`(LeafValue) - - internal var assumingContinue: NewValue { - switch self { - case .continue(let x): - return x - case .break: - _internalInvariantFailure("should not have stopped key path projection") - } - } - } - internal func _projectReadOnly( _ base: CurValue, to: NewValue.Type, endingWith: LeafValue.Type, - _ isBreak: inout Bool - ) -> NewValue { + _ isBreak: inout Bool, + pointer: UnsafeMutablePointer + ) { switch value { case .struct(let offset): - let newValue = _withUnprotectedUnsafeBytes(of: base) { + _withUnprotectedUnsafeBytes(of: base) { let p = $0.baseAddress._unsafelyUnwrappedUnchecked + offset // The contents of the struct should be well-typed, so we can assume // typed memory here. - return p.assumingMemoryBound(to: NewValue.self).pointee + pointer.initialize(to: p.assumingMemoryBound(to: NewValue.self).pointee) } - return newValue - case .class(let offset): _internalInvariant(CurValue.self is AnyObject.Type, "base is not a class") @@ -1830,21 +1807,24 @@ internal struct RawKeyPathComponent { // 'modify' access. Builtin.performInstantaneousReadAccess(offsetAddress._rawValue, NewValue.self) - return offsetAddress.assumingMemoryBound(to: NewValue.self).pointee + + pointer.initialize( + to: offsetAddress.assumingMemoryBound(to: NewValue.self).pointee + ) case .get(id: _, accessors: let accessors, argument: let argument), .mutatingGetSet(id: _, accessors: let accessors, argument: let argument), .nonmutatingGetSet(id: _, accessors: let accessors, argument: let argument): let getter: ComputedAccessorsPtr.Getter = accessors.getter() - let newValue = getter( - base, - argument?.data.baseAddress ?? accessors._value, - argument?.data.count ?? 0 + pointer.initialize( + to: getter( + base, + argument?.data.baseAddress ?? accessors._value, + argument?.data.count ?? 0 + ) ) - return newValue - case .optionalChain: _internalInvariant(CurValue.self == Optional.self, "should be unwrapping optional value") @@ -1857,17 +1837,21 @@ internal struct RawKeyPathComponent { if _fastPath(tag == 0) { // Optional "shares" a layout with its Wrapped type meaning we can // reinterpret the base address as an address to its Wrapped value. - return Builtin.reinterpretCast(base) + pointer.initialize(to: Builtin.reinterpretCast(base)) + return } // We found nil. isBreak = true - // Return some zeroed out value for NewValue if we break. The caller will - // handle returning nil. We do this to prevent allocating metadata in this - // function because returning something like 'NewValue?' would need to - // allocate the optional metadata for 'NewValue'. - return Builtin.zeroInitializer() + // Initialize the leaf optional value by simply injecting the tag (which + // we've found to be 1) directly. + pointer.withMemoryRebound(to: LeafValue.self, capacity: 1) { + Builtin.injectEnumTag( + &$0.pointee, + tag._value + ) + } case .optionalForce: _internalInvariant(CurValue.self == Optional.self, @@ -1879,7 +1863,8 @@ internal struct RawKeyPathComponent { if _fastPath(tag == 0) { // Optional "shares" a layout with its Wrapped type meaning we can // reinterpret the base address as an address to its Wrapped value. - return Builtin.reinterpretCast(base) + pointer.initialize(to: Builtin.reinterpretCast(base)) + return } _preconditionFailure("unwrapped nil optional") @@ -1893,7 +1878,7 @@ internal struct RawKeyPathComponent { let tag: UInt32 = 0 Builtin.injectEnumTag(&new, tag._value) - return new + pointer.initialize(to: new) } } @@ -2657,12 +2642,19 @@ internal func _appendingKeyPaths< // header, plus space for the middle type. // Align up the root so that we can put the component type after it. let rootSize = MemoryLayout._roundingUpToAlignment(rootBuffer.data.count) - var resultSize = rootSize + leafBuffer.data.count - + 2 * MemoryLayout.size + var resultSize = rootSize + // Root component size + leafBuffer.data.count + // Leaf component size + MemoryLayout.size // Middle type + + // Size of just our components is equal to root + leaf + middle let componentSize = resultSize - // The first tail allocated member is the maxSize of the keypath. + + resultSize += MemoryLayout.size // Header size (padding if needed) + + // The first member after the components is the maxSize of the keypath. resultSize = MemoryLayout._roundingUpToAlignment(resultSize) resultSize += MemoryLayout.size + // Immediately following is the tail-allocated space for the KVC string. let totalResultSize = MemoryLayout ._roundingUpToAlignment(resultSize + appendedKVCLength) @@ -2686,12 +2678,14 @@ internal func _appendingKeyPaths< // Save space for the header. let leafIsReferenceWritable = type(of: leaf).kind == .reference destBuilder.pushHeader(KeyPathBuffer.Header( - size: componentSize - MemoryLayout.size, + size: componentSize, trivial: rootBuffer.trivial && leafBuffer.trivial, hasReferencePrefix: rootBuffer.hasReferencePrefix || leafIsReferenceWritable, - isSingleComponent: rootBuffer.isSingleComponent != - leafBuffer.isSingleComponent + + // We've already checked if either is an identity, so both have at + // least 1 component. + isSingleComponent: false )) let leafHasReferencePrefix = leafBuffer.hasReferencePrefix