termux-packages/packages/swift/swift-stdlib-move-tag.patch
2022-04-15 11:13:42 +05:30

293 lines
12 KiB
Diff

From 7b2256f97b59192046eab312838f467def880605
Date: Sun, 9 Jan 2022 21:45:13 +0530
Subject: [PATCH] [android] Move the string and other tags in pointers to the
second byte because Android enabled memory tagging
Starting with Android 11, AArch64 placed a tag in the top byte of pointers to
allocations, which has been slowly rolling out to more devices and collides
with Swift's tags. Moving these tags to the second byte works around this
problem.
---
stdlib/public/SwiftShims/HeapObject.h | 10 +++++
stdlib/public/core/KeyPath.swift | 10 +++++
stdlib/public/core/SmallString.swift | 9 ++++
stdlib/public/core/StringObject.swift | 64 ++++++++++++++++++++++++++-
stdlib/public/runtime/HeapObject.cpp | 5 +++
diff --git a/swift/stdlib/public/SwiftShims/HeapObject.h b/swift/stdlib/public/SwiftShims/HeapObject.h
index 3933b0b8d40e2..f45e8774951b3 100644
--- a/swift/stdlib/public/SwiftShims/HeapObject.h
+++ b/swift/stdlib/public/SwiftShims/HeapObject.h
@@ -157,12 +157,22 @@ static_assert(alignof(HeapObject) == alignof(void*),
#endif
#define _swift_abi_SwiftSpareBitsMask \
(__swift_uintptr_t) SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK
+#if defined(__ANDROID__)
+#define _swift_abi_ObjCReservedBitsMask \
+ (__swift_uintptr_t) SWIFT_ABI_ANDROID_ARM64_OBJC_RESERVED_BITS_MASK
+#else
#define _swift_abi_ObjCReservedBitsMask \
(__swift_uintptr_t) SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK
+#endif
#define _swift_abi_ObjCReservedLowBits \
(unsigned) SWIFT_ABI_ARM64_OBJC_NUM_RESERVED_LOW_BITS
+#if defined(__ANDROID__)
+#define _swift_BridgeObject_TaggedPointerBits \
+ (__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64 >> 8
+#else
#define _swift_BridgeObject_TaggedPointerBits \
(__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64
+#endif
#elif defined(__powerpc64__)
diff --git a/swift/stdlib/public/core/KeyPath.swift b/swift/stdlib/public/core/KeyPath.swift
index 2d8f03974253b..3501f31c83a9e 100644
--- a/swift/stdlib/public/core/KeyPath.swift
+++ b/swift/stdlib/public/core/KeyPath.swift
@@ -1764,7 +1764,7 @@ internal struct KeyPathBuffer {
internal mutating func pushRaw(size: Int, alignment: Int)
-> UnsafeMutableRawBufferPointer {
var baseAddress = buffer.baseAddress.unsafelyUnwrapped
- var misalign = Int(bitPattern: baseAddress) % alignment
+ var misalign = Int(bitPattern: baseAddress) & (alignment - 1)
if misalign != 0 {
misalign = alignment - misalign
baseAddress = baseAddress.advanced(by: misalign)
@@ -3242,7 +3242,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor {
) {
let alignment = MemoryLayout<T>.alignment
var baseAddress = destData.baseAddress.unsafelyUnwrapped
- var misalign = Int(bitPattern: baseAddress) % alignment
+ var misalign = Int(bitPattern: baseAddress) & (alignment - 1)
if misalign != 0 {
misalign = alignment - misalign
baseAddress = baseAddress.advanced(by: misalign)
diff --git a/swift/stdlib/public/core/SmallString.swift b/swift/stdlib/public/core/SmallString.swift
index df46b4e8bf449..ac0969b57041e 100644
--- a/swift/stdlib/public/core/SmallString.swift
+++ b/swift/stdlib/public/core/SmallString.swift
@@ -23,6 +23,9 @@
// ↑ ↑
// first (leftmost) code unit discriminator (incl. count)
//
+// On Android AArch64, there is one less byte available because the discriminator
+// is stored in the penultimate code unit instead, to match where it's stored
+// for large strings.
@frozen @usableFromInline
internal struct _SmallString {
@usableFromInline
@@ -78,6 +81,8 @@ extension _SmallString {
internal static var capacity: Int {
#if arch(i386) || arch(arm) || arch(arm64_32) || arch(wasm32)
return 10
+#elseif os(Android) && arch(arm64)
+ return 14
#else
return 15
#endif
@@ -111,7 +116,11 @@ extension _SmallString {
// usage: it always clears the discriminator and count (in case it's full)
@inlinable @inline(__always)
internal var zeroTerminatedRawCodeUnits: RawBitPattern {
+#if os(Android) && arch(arm64)
+ let smallStringCodeUnitMask = ~UInt64(0xFFFF).bigEndian // zero last two bytes
+#else
let smallStringCodeUnitMask = ~UInt64(0xFF).bigEndian // zero last byte
+#endif
return (self._storage.0, self._storage.1 & smallStringCodeUnitMask)
}
diff --git a/swift/stdlib/public/core/StringObject.swift b/swift/stdlib/public/core/StringObject.swift
index b087e87f51eb7..88ff7fbf08980 100644
--- a/swift/stdlib/public/core/StringObject.swift
+++ b/swift/stdlib/public/core/StringObject.swift
@@ -56,6 +56,11 @@
can compile to a fused check-and-branch, even if that burns part of the
encoding space.
+ On Android AArch64, we cannot use the top byte for large strings because it is
+ reserved by the OS for memory tagging since Android 11, so shift the
+ discriminator to the second byte instead. This burns one more byte on small
+ strings.
+
On 32-bit platforms, we store an explicit discriminator (as a UInt8) with the
same encoding as above, placed in the high bits. E.g. `b62` above is in
`_discriminator`'s `b6`.
@@ -111,8 +116,13 @@ internal struct _StringObject {
@inlinable @inline(__always)
init(count: Int, variant: Variant, discriminator: UInt64, flags: UInt16) {
+#if os(Android) && arch(arm64)
+ _internalInvariant(discriminator & 0x00FF_0000_0000_0000 == discriminator,
+ "only the second byte can carry the discriminator and small count on Android AArch64")
+#else
_internalInvariant(discriminator & 0xFF00_0000_0000_0000 == discriminator,
"only the top byte can carry the discriminator and small count")
+#endif
self._count = count
self._variant = variant
@@ -349,7 +359,13 @@ extension _StringObject.Nibbles {
extension _StringObject.Nibbles {
// Mask for address bits, i.e. non-discriminator and non-extra high bits
@inlinable @inline(__always)
- static internal var largeAddressMask: UInt64 { return 0x0FFF_FFFF_FFFF_FFFF }
+ static internal var largeAddressMask: UInt64 {
+#if os(Android) && arch(arm64)
+ return 0xFF0F_FFFF_FFFF_FFFF
+#else
+ return 0x0FFF_FFFF_FFFF_FFFF
+#endif
+ }
// Mask for address bits, i.e. non-discriminator and non-extra high bits
@inlinable @inline(__always)
@@ -360,20 +376,32 @@ extension _StringObject.Nibbles {
// Discriminator for small strings
@inlinable @inline(__always)
internal static func small(isASCII: Bool) -> UInt64 {
+#if os(Android) && arch(arm64)
+ return isASCII ? 0x00E0_0000_0000_0000 : 0x00A0_0000_0000_0000
+#else
return isASCII ? 0xE000_0000_0000_0000 : 0xA000_0000_0000_0000
+#endif
}
// Discriminator for small strings
@inlinable @inline(__always)
internal static func small(withCount count: Int, isASCII: Bool) -> UInt64 {
_internalInvariant(count <= _SmallString.capacity)
+#if os(Android) && arch(arm64)
+ return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 48
+#else
return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 56
+#endif
}
// Discriminator for large, immortal, swift-native strings
@inlinable @inline(__always)
internal static func largeImmortal() -> UInt64 {
+#if os(Android) && arch(arm64)
+ return 0x0080_0000_0000_0000
+#else
return 0x8000_0000_0000_0000
+#endif
}
// Discriminator for large, mortal (i.e. managed), swift-native strings
@@ -397,7 +425,11 @@ extension _StringObject {
@inlinable @inline(__always)
internal var isImmortal: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0080_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x8000_0000_0000_0000) != 0
+#endif
}
@inlinable @inline(__always)
@@ -405,7 +437,11 @@ extension _StringObject {
@inlinable @inline(__always)
internal var isSmall: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0020_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x2000_0000_0000_0000) != 0
+#endif
}
@inlinable @inline(__always)
@@ -419,7 +455,11 @@ extension _StringObject {
// - Non-Cocoa shared strings
@inlinable @inline(__always)
internal var providesFastUTF8: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0010_0000_0000_0000) == 0
+#else
return (discriminatedObjectRawBits & 0x1000_0000_0000_0000) == 0
+#endif
}
@inlinable @inline(__always)
@@ -429,16 +469,26 @@ extension _StringObject {
// conforms to `_AbstractStringStorage`
@inline(__always)
internal var hasStorage: Bool {
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x00F0_0000_0000_0000) == 0
+#else
return (discriminatedObjectRawBits & 0xF000_0000_0000_0000) == 0
+#endif
}
// Whether we are a mortal, native (tail-allocated) string
@inline(__always)
internal var hasNativeStorage: Bool {
+#if os(Android) && arch(arm64)
+ // Android uses the same logic as explained below for other platforms,
+ // except isSmall is at b53, so shift it to b61 first before proceeding.
+ let bits = ~(discriminatedObjectRawBits << 8) & self._countAndFlagsBits
+#else
// b61 on the object means isSmall, and on countAndFlags means
// isNativelyStored. We just need to check that b61 is 0 on the object and 1
// on countAndFlags.
let bits = ~discriminatedObjectRawBits & self._countAndFlagsBits
+#endif
let result = bits & 0x2000_0000_0000_0000 != 0
_internalInvariant(!result || hasStorage, "native storage needs storage")
return result
@@ -466,7 +516,11 @@ extension _StringObject {
@inline(__always)
internal var largeIsCocoa: Bool {
_internalInvariant(isLarge)
+#if os(Android) && arch(arm64)
+ return (discriminatedObjectRawBits & 0x0040_0000_0000_0000) != 0
+#else
return (discriminatedObjectRawBits & 0x4000_0000_0000_0000) != 0
+#endif
}
// Whether this string is in one of our fastest representations:
@@ -535,7 +589,11 @@ extension _StringObject {
@inlinable
internal static func getSmallCount(fromRaw x: UInt64) -> Int {
+#if os(Android) && arch(arm64)
+ return Int(truncatingIfNeeded: (x & 0x000F_0000_0000_0000) &>> 48)
+#else
return Int(truncatingIfNeeded: (x & 0x0F00_0000_0000_0000) &>> 56)
+#endif
}
@inlinable @inline(__always)
@@ -546,7 +604,11 @@ extension _StringObject {
@inlinable
internal static func getSmallIsASCII(fromRaw x: UInt64) -> Bool {
+#if os(Android) && arch(arm64)
+ return x & 0x0040_0000_0000_0000 != 0
+#else
return x & 0x4000_0000_0000_0000 != 0
+#endif
}
@inlinable @inline(__always)
internal var smallIsASCII: Bool {
diff --git a/swift/stdlib/public/runtime/HeapObject.cpp b/swift/stdlib/public/runtime/HeapObject.cpp
index 7f6f6ddbdaee1..d1ab476740acd 100644
--- a/swift/stdlib/public/runtime/HeapObject.cpp
+++ b/swift/stdlib/public/runtime/HeapObject.cpp
@@ -66,6 +66,10 @@ static inline bool isValidPointerForNativeRetain(const void *p) {
// arm64_32 is special since it has 32-bit pointers but __arm64__ is true.
// Catch it early since __POINTER_WIDTH__ is generally non-portable.
return p != nullptr;
+#elif defined(__ANDROID__) && defined(__aarch64__)
+ // Check the top of the second byte instead, since Android AArch64 reserves
+ // the top byte for its own pointer tagging since Android 11.
+ return (intptr_t)((uintptr_t)p << 8) > 0;
#elif defined(__x86_64__) || defined(__arm64__) || defined(__aarch64__) || defined(_M_ARM64) || defined(__s390x__) || (defined(__powerpc64__) && defined(__LITTLE_ENDIAN__))
// On these platforms, except s390x, the upper half of address space is reserved for the
// kernel, so we can assume that pointer values in this range are invalid.