/// Specifies the superclass of an instance.
struct objc_super {
/// Specifies an instance of a class.
__unsafe_unretained _Nonnull id receiver;
/// Specifies the particular superclass of the instance to message.
#if !defined(__cplusplus) && !__OBJC2__
/* For compatibility with old objc-runtime.h header */
__unsafe_unretained _Nonnull Class class;
#else
__unsafe_unretained _Nonnull Class super_class;
#endif
/* super_class is the first class to search */
};
/// Specifies the superclass of an instance.
struct objc_super {
/// Specifies an instance of a class.
__unsafe_unretained _Nonnull id receiver;
/// Specifies the particular superclass of the instance to message.
#if !defined(__cplusplus) && !__OBJC2__ //
/* For compatibility with old objc-runtime.h header */
__unsafe_unretained _Nonnull Class class;
#else
__unsafe_unretained _Nonnull Class super_class;
#endif
/* super_class is the first class to search */
};
/********************************************************************
* GetClassFromIsa_p16 src, needs_auth, auth_address
* src is a raw isa field. Sets p16 to the corresponding class pointer.
* The raw isa might be an indexed isa to be decoded, or a
* packed isa that needs to be masked.
*
* On exit:
* src is unchanged
* p16 is a class pointer
* x10 is clobbered
********************************************************************/
.macro GetClassFromIsa_p16 src, needs_auth, auth_address /* note: auth_address is not required if !needs_auth */
#if SUPPORT_INDEXED_ISA
...
#elif __LP64__
.if \needs_auth == 0 // _cache_getImp takes an authed class already
mov p16, \src
.else // needs_auth == 1
// 64-bit packed isa
ExtractISA p16, \src, \auth_address
.endif
#else
// 32-bit raw isa
mov p16, \src
#endif
.endmacro
/********************************************************************
*
* CacheLookup NORMAL|GETIMP|LOOKUP <function> MissLabelDynamic MissLabelConstant
*
* MissLabelConstant is only used for the GETIMP variant.
*
* Locate the implementation for a selector in a class method cache.
*
* When this is used in a function that doesn't hold the runtime lock,
* this represents the critical section that may access dead memory.
* If the kernel causes one of these functions to go down the recovery
* path, we pretend the lookup failed by jumping the JumpMiss branch.
*
* Takes:
* x1 = selector
* x16 = class to be searched
*
* Kills:
* x9,x10,x11,x12,x13,x15,x1
*
* Untouched:
* x14
*
* On exit: (found) calls or returns IMP
* with x16 = class, x17 = IMP
* In LOOKUP mode, the two low bits are set to 0x3
* if we hit a constant cache (used in objc_trace)
* (not found) jumps to LCacheMiss
* with x15 = class
* For constant caches in LOOKUP mode, the low bit
* of x16 is set to 0x1 to indicate we had to fallback.
* In addition, when LCacheMiss is __objc_msgSend_uncached or
* __objc_msgLookup_uncached, 0x2 will be set in x16
* to remember we took the slowpath.
* So the two low bits of x16 on exit mean:
* 0: dynamic hit
* 1: fallback to the parent class, when there is a preoptimized cache
* 2: slowpath
* 3: preoptimized cache hit
*
********************************************************************/
.macro CacheLookup Mode, Function, MissLabelDynamic, MissLabelConstant
//
// Restart protocol:
//
// As soon as we're past the LLookupStart\Function label we may have
// loaded an invalid cache pointer or mask.
//
// When task_restartable_ranges_synchronize() is called,
// (or when a signal hits us) before we're past LLookupEnd\Function,
// then our PC will be reset to LLookupRecover\Function which forcefully
// jumps to the cache-miss codepath which have the following
// requirements:
//
// GETIMP:
// The cache-miss is just returning NULL (setting x0 to 0)
//
// NORMAL and LOOKUP:
// - x0 contains the receiver
// - x1 contains the selector
// - x16 contains the isa
// - other registers are set as per calling conventions
//
// 将x16(class)移动到x15
mov x15, x16 // stash the original isa
LLookupStart\Function:
...
.endmacro
// CacheHit: x17 = cached IMP, x10 = address of buckets, x1 = SEL, x16 = isa
.macro CacheHit
.if $0 == NORMAL
// 调用缓存IMP
TailCallCachedImp x17, x10, x1, x16 // authenticate and call imp
.elseif $0 == GETIMP
mov p0, p17
cbz p0, 9f // don't ptrauth a nil imp
AuthAndResignAsIMP x0, x10, x1, x16 // authenticate imp and re-sign as IMP
9: ret // return IMP
.elseif $0 == LOOKUP
// No nil check for ptrauth: the caller would crash anyway when they
// jump to a nil IMP. We don't care if that jump also fails ptrauth.
AuthAndResignAsIMP x17, x10, x1, x16 // authenticate imp and re-sign as IMP
cmp x16, x15
cinc x16, x16, ne // x16 += 1 when x15 != x16 (for instrumentation ; fallback to the parent class)
ret // return imp via x17
.else
.abort oops
.endif
.endmacro
// wrap-around:
// p10 = first bucket
// p11 = mask (and maybe other bits on LP64)
// p12 = _cmd & mask
//
// A full cache can happen with CACHE_ALLOW_FULL_UTILIZATION.
// So stop when we circle back to the first probed bucket
// rather than when hitting the first bucket again.
//
// Note that we might probe the initial bucket twice
// when the first probed slot is the last entry.
...
add p13, p10, p11, LSR #(48 - (1+PTRSHIFT))
// p13 = buckets + (mask << 1+PTRSHIFT)
// see comment about maskZeroBits
...
add p12, p10, p12, LSL #(1+PTRSHIFT)
// p12 = first probed bucket
// do {
4: ldp p17, p9, [x13], #-BUCKET_SIZE // {imp, sel} = *bucket--
cmp p9, p1 // if (sel == _cmd)
b.eq 2b // goto hit
cmp p9, #0 // } while (sel != 0 &&
ccmp p13, p12, #0, ne // bucket > first_probed)
b.hi 4b
#if CONFIG_USE_PREOPT_CACHES
...
LLookupPreopt\Function:
#if __has_feature(ptrauth_calls)
// cache_t(p11) & #0x007ffffffffffffe 存入 p10
and p10, p11, #0x007ffffffffffffe // p10 = buckets
// 验证?没找到这个指令的具体作用
autdb x10, x16 // auth as early as possible
#endif
// 查找共享缓存内的数据
// x12 = (_cmd - first_shared_cache_sel)
adrp x9, _MagicSelRef@PAGE
ldr p9, [x9, _MagicSelRef@PAGEOFF]
sub p12, p1, p9
// w9 = ((_cmd - first_shared_cache_sel) >> hash_shift & hash_mask)
#if __has_feature(ptrauth_calls)
// bits 63..60 of x11 are the number of bits in hash_mask
// bits 59..55 of x11 is hash_shift
lsr x17, x11, #55 // w17 = (hash_shift, ...)
lsr w9, w12, w17 // >>= shift
lsr x17, x11, #60 // w17 = mask_bits
mov x11, #0x7fff
lsr x11, x11, x17 // p11 = mask (0x7fff >> mask_bits)
and x9, x9, x11 // &= mask
#else
// bits 63..53 of x11 is hash_mask
// bits 52..48 of x11 is hash_shift
lsr x17, x11, #48 // w17 = (hash_shift, hash_mask)
lsr w9, w12, w17 // >>= shift
and x9, x9, x11, LSR #53 // &= mask
#endif
ldr x17, [x10, x9, LSL #3] // x17 == sel_offs | (imp_offs << 32)
cmp x12, w17, uxtw
.if \Mode == GETIMP
...
.else
b.ne 5f // cache miss
sub x17, x16, x17, LSR #32 // imp = isa - imp_offs
.if \Mode == NORMAL
br x17
.elseif \Mode == LOOKUP
...
5: ldursw x9, [x10, #-8] // offset -8 is the fallback offset
add x16, x16, x9 // compute the fallback isa
b LLookupStart\Function // lookup again with a new isa
.endif
#endif // CONFIG_USE_PREOPT_CACHES
// Look up the class for a tagged pointer in x0, placing it in x16.
.macro GetTaggedClass
and x10, x0, #0x7 // x10 = small tag
asr x11, x0, #55 // x11 = large tag with 1s filling the top (because bit 63 is 1 on a tagged pointer)
cmp x10, #7 // tag == 7?
csel x12, x11, x10, eq // x12 = index in tagged pointer classes array, negative for extended tags.
// The extended tag array is placed immediately before the basic tag array
// so this looks into the right place either way. The sign extension done
// by the asr instruction produces the value extended_tag - 256, which produces
// the correct index in the extended tagged pointer classes array.
// x16 = _objc_debug_taggedpointer_classes[x12]
adrp x10, _objc_debug_taggedpointer_classes@PAGE
add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
ldr x16, [x10, x12, LSL #3]
.endmacro
STATIC_ENTRY __objc_msgSend_uncached
UNWIND __objc_msgSend_uncached, FrameWithNoSaves
// THIS IS NOT A CALLABLE C FUNCTION
// Out-of-band r10 is the searched class
// r10 is already the class to search
MethodTableLookup NORMAL // r11 = IMP
jmp *%r11 // goto *imp
END_ENTRY __objc_msgSend_uncached