termux-packages/packages/swift/swift-stdlib-move-tag.patch

307 lines
12 KiB
Diff

diff --git a/swift/stdlib/public/SwiftShims/HeapObject.h b/swift/stdlib/public/SwiftShims/HeapObject.h
index 5e165fd3d4..978f1b2293 100644
--- a/swift/stdlib/public/SwiftShims/HeapObject.h
+++ b/swift/stdlib/public/SwiftShims/HeapObject.h
@@ -161,8 +161,13 @@ static_assert(alignof(HeapObject) == alignof(void*),
(__swift_uintptr_t) SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK
#define _swift_abi_ObjCReservedLowBits \
(unsigned) SWIFT_ABI_ARM64_OBJC_NUM_RESERVED_LOW_BITS
+#if defined(__ANDROID__)
+#define _swift_BridgeObject_TaggedPointerBits \
+ (__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64 >> 8
+#else
#define _swift_BridgeObject_TaggedPointerBits \
(__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64
+#endif
#elif defined(__powerpc64__)
diff --git a/swift/stdlib/public/core/Builtin.swift b/swift/stdlib/public/core/Builtin.swift
index 8a7bb33243..65be72766e 100644
--- a/swift/stdlib/public/core/Builtin.swift
+++ b/swift/stdlib/public/core/Builtin.swift
@@ -397,7 +397,13 @@ internal var _objectPointerIsObjCBit: UInt {
#else
@inlinable
internal var _objectPointerIsObjCBit: UInt {
- @inline(__always) get { return 0x4000_0000_0000_0000 }
+ @inline(__always) get {
+#if os(Android) && arch(arm64)
+ return 0x0040_0000_0000_0000
+#else
+ return 0x4000_0000_0000_0000
+#endif
+ }
}
#endif
diff --git a/swift/stdlib/public/core/KeyPath.swift b/swift/stdlib/public/core/KeyPath.swift
index 2d8f039742..3501f31c83 100644
--- a/swift/stdlib/public/core/KeyPath.swift
+++ b/swift/stdlib/public/core/KeyPath.swift
@@ -1764,7 +1764,12 @@ internal struct KeyPathBuffer {
internal mutating func pushRaw(size: Int, alignment: Int)
-> UnsafeMutableRawBufferPointer {
var baseAddress = buffer.baseAddress.unsafelyUnwrapped
+#if os(Android) && arch(arm64)
+ // Android AArch64 may tag the first byte so remove it before checking alignment.
+ var misalign = (0x00FF_FFFF_FFFF_FFFF & Int(bitPattern: baseAddress)) % alignment
+#else
var misalign = Int(bitPattern: baseAddress) % alignment
+#endif
if misalign != 0 {
misalign = alignment - misalign
baseAddress = baseAddress.advanced(by: misalign)
@@ -3242,7 +3247,12 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor {
) {
let alignment = MemoryLayout<T>.alignment
var baseAddress = destData.baseAddress.unsafelyUnwrapped
+#if os(Android) && arch(arm64)
+ // Android AArch64 may tag the first byte so remove it before checking alignment.
+ var misalign = (0x00FF_FFFF_FFFF_FFFF & Int(bitPattern: baseAddress)) % alignment
+#else
var misalign = Int(bitPattern: baseAddress) % alignment
+#endif
if misalign != 0 {
misalign = alignment - misalign
baseAddress = baseAddress.advanced(by: misalign)
diff --git a/swift/stdlib/public/core/SmallString.swift b/swift/stdlib/public/core/SmallString.swift
index df46b4e8bf..ac0969b570 100644
--- a/swift/stdlib/public/core/SmallString.swift
+++ b/swift/stdlib/public/core/SmallString.swift
@@ -23,6 +23,9 @@
// ↑ ↑
// first (leftmost) code unit discriminator (incl. count)
//
+// On Android AArch64, there is one less byte available because the discriminator
+// is stored in the penultimate code unit instead, to match where it's stored
+// for large strings.
@frozen @usableFromInline
internal struct _SmallString {
@usableFromInline
@@ -78,6 +81,8 @@ extension _SmallString {
internal static var capacity: Int {
#if arch(i386) || arch(arm) || arch(arm64_32) || arch(wasm32)
return 10
+#elseif os(Android) && arch(arm64)
+ return 14
#else
return 15
#endif
@@ -111,7 +116,11 @@ extension _SmallString {
// usage: it always clears the discriminator and count (in case it's full)
@inlinable @inline(__always)
internal var zeroTerminatedRawCodeUnits: RawBitPattern {
+#if os(Android) && arch(arm64)
+ let smallStringCodeUnitMask = ~UInt64(0xFFFF).bigEndian // zero last two bytes
+#else
let smallStringCodeUnitMask = ~UInt64(0xFF).bigEndian // zero last byte
+#endif
return (self._storage.0, self._storage.1 & smallStringCodeUnitMask)
}
diff --git a/swift/stdlib/public/core/StringObject.swift b/swift/stdlib/public/core/StringObject.swift
index b087e87f51..93f243ddd9 100644
--- a/swift/stdlib/public/core/StringObject.swift
+++ b/swift/stdlib/public/core/StringObject.swift
@@ -56,6 +56,11 @@
can compile to a fused check-and-branch, even if that burns part of the
encoding space.
+ On Android AArch64, we cannot use the top byte for large strings because it is
+ reserved by the OS for memory tagging since Android 11, so shift the
+ discriminator to the second byte instead. This burns one more byte on small
+ strings.
+
On 32-bit platforms, we store an explicit discriminator (as a UInt8) with the
same encoding as above, placed in the high bits. E.g. `b62` above is in
`_discriminator`'s `b6`.
@@ -111,8 +116,13 @@ internal struct _StringObject {
@inlinable @inline(__always)
init(count: Int, variant: Variant, discriminator: UInt64, flags: UInt16) {
+#if os(Android) && arch(arm64)
+ _internalInvariant(discriminator & 0x00FF_0000_0000_0000 == discriminator,
+ "only the second byte can carry the discriminator and small count on Android AArch64")
+#else
_internalInvariant(discriminator & 0xFF00_0000_0000_0000 == discriminator,
"only the top byte can carry the discriminator and small count")
+#endif
self._count = count
self._variant = variant
@@ -349,7 +359,13 @@ extension _StringObject.Nibbles {
extension _StringObject.Nibbles {
// Mask for address bits, i.e. non-discriminator and non-extra high bits
@inlinable @inline(__always)
- static internal var largeAddressMask: UInt64 { return 0x0FFF_FFFF_FFFF_FFFF }
+ static internal var largeAddressMask: UInt64 {
+#if os(Android) && arch(arm64)
+ return 0xFF0F_FFFF_FFFF_FFFF
+#else
+ return 0x0FFF_FFFF_FFFF_FFFF
+#endif
+ }
// Mask for address bits, i.e. non-discriminator and non-extra high bits
@inlinable @inline(__always)
@@ -360,20 +376,32 @@ extension _StringObject.Nibbles {
// Discriminator for small strings
@inlinable @inline(__always)
internal static func small(isASCII: Bool) -> UInt64 {
+#if os(Android) && arch(arm64)
+ return isASCII ? 0x00E0_0000_0000_0000 : 0x00A0_0000_0000_0000
+#else
return isASCII ? 0xE000_0000_0000_0000 : 0xA000_0000_0000_0000
+#endif
}
// Discriminator for small strings
@inlinable @inline(__always)
internal static func small(withCount count: Int, isASCII: Bool) -> UInt64 {
_internalInvariant(count <= _SmallString.capacity)
+#if os(Android) && arch(arm64)
+ return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 48
+#else
return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 56
+#endif
}
// Discriminator for large, immortal, swift-native strings
@inlinable @inline(__always)
internal static func largeImmortal() -> UInt64 {
+#if os(Android) && arch(arm64)
+ return 0x0080_0000_0000_0000
+#else
return 0x8000_0000_0000_0000
+#endif
}
// Discriminator for large, mortal (i.e. managed), swift-native strings
@@ -381,7 +409,11 @@ extension _StringObject.Nibbles {
internal static func largeMortal() -> UInt64 { return 0x0000_0000_0000_0000 }
internal static func largeCocoa(providesFastUTF8: Bool) -> UInt64 {
+#if os(Android) && arch(arm64)
+ return providesFastUTF8 ? 0x0040_0000_0000_0000 : 0x0050_0000_0000_0000
+#else
return providesFastUTF8 ? 0x4000_0000_0000_0000 : 0x5000_0000_0000_0000
+#endif
}
}
@@ -397,7 +429,11 @@ extension _StringObject {
@inlinable @inline(__always)
internal var isImmortal: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0080_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x8000_0000_0000_0000) != 0
+#endif
}
@inlinable @inline(__always)
@@ -405,7 +441,11 @@ extension _StringObject {
@inlinable @inline(__always)
internal var isSmall: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0020_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x2000_0000_0000_0000) != 0
+#endif
}
@inlinable @inline(__always)
@@ -419,7 +459,11 @@ extension _StringObject {
// - Non-Cocoa shared strings
@inlinable @inline(__always)
internal var providesFastUTF8: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0010_0000_0000_0000) == 0
+#else
return (discriminatedObjectRawBits & 0x1000_0000_0000_0000) == 0
+#endif
}
@inlinable @inline(__always)
@@ -429,16 +473,26 @@ extension _StringObject {
// conforms to `_AbstractStringStorage`
@inline(__always)
internal var hasStorage: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x00F0_0000_0000_0000) == 0
+#else
return (discriminatedObjectRawBits & 0xF000_0000_0000_0000) == 0
+#endif
}
// Whether we are a mortal, native (tail-allocated) string
@inline(__always)
internal var hasNativeStorage: Bool {
+#if os(Android) && arch(arm64)
+ // Android uses the same logic as explained below for other platforms,
+ // except isSmall is at b53, so shift it to b61 first before proceeding.
+ let bits = ~(discriminatedObjectRawBits << 8) & self._countAndFlagsBits
+#else
// b61 on the object means isSmall, and on countAndFlags means
// isNativelyStored. We just need to check that b61 is 0 on the object and 1
// on countAndFlags.
let bits = ~discriminatedObjectRawBits & self._countAndFlagsBits
+#endif
let result = bits & 0x2000_0000_0000_0000 != 0
_internalInvariant(!result || hasStorage, "native storage needs storage")
return result
@@ -466,7 +520,11 @@ extension _StringObject {
@inline(__always)
internal var largeIsCocoa: Bool {
_internalInvariant(isLarge)
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0040_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x4000_0000_0000_0000) != 0
+#endif
}
// Whether this string is in one of our fastest representations:
@@ -535,7 +593,11 @@ extension _StringObject {
@inlinable
internal static func getSmallCount(fromRaw x: UInt64) -> Int {
+#if os(Android) && arch(arm64)
+ return Int(truncatingIfNeeded: (x & 0x000F_0000_0000_0000) &>> 48)
+#else
return Int(truncatingIfNeeded: (x & 0x0F00_0000_0000_0000) &>> 56)
+#endif
}
@inlinable @inline(__always)
@@ -546,7 +608,11 @@ extension _StringObject {
@inlinable
internal static func getSmallIsASCII(fromRaw x: UInt64) -> Bool {
+#if os(Android) && arch(arm64)
+ return x & 0x0040_0000_0000_0000 != 0
+#else
return x & 0x4000_0000_0000_0000 != 0
+#endif
}
@inlinable @inline(__always)
internal var smallIsASCII: Bool {
diff --git a/swift/stdlib/public/runtime/HeapObject.cpp b/swift/stdlib/public/runtime/HeapObject.cpp
index 0a27620622..69b4350f7d 100644
--- a/swift/stdlib/public/runtime/HeapObject.cpp
+++ b/swift/stdlib/public/runtime/HeapObject.cpp
@@ -66,6 +66,10 @@ static inline bool isValidPointerForNativeRetain(const void *p) {
// arm64_32 is special since it has 32-bit pointers but __arm64__ is true.
// Catch it early since __POINTER_WIDTH__ is generally non-portable.
return p != nullptr;
+#elif defined(__ANDROID__) && defined(__aarch64__)
+ // Check the top of the second byte instead, since Android AArch64 reserves
+ // the top byte for its own pointer tagging since Android 11.
+ return (intptr_t)p << 8 > 0;
#elif defined(__x86_64__) || defined(__arm64__) || defined(__aarch64__) || defined(_M_ARM64) || defined(__s390x__) || (defined(__powerpc64__) && defined(__LITTLE_ENDIAN__))
// On these platforms, except s390x, the upper half of address space is reserved for the
// kernel, so we can assume that pointer values in this range are invalid.