Pull locking updates from Ingo Molnar: - rtmutex cleanup & spring cleaning pass that removes ~400 lines of code - Futex simplifications & cleanups - Add debugging to the CSD code, to help track down a tenacious race (or hw problem) - Add lockdep_assert_not_held(), to allow code to require a lock to not be held, and propagate this into the ath10k driver - Misc LKMM documentation updates - Misc KCSAN updates: cleanups & documentation updates - Misc fixes and cleanups - Fix locktorture bugs with ww_mutexes * tag 'locking-core-2021-04-28' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (44 commits) kcsan: Fix printk format string static_call: Relax static_call_update() function argument type static_call: Fix unused variable warn w/o MODULE locking/rtmutex: Clean up signal handling in __rt_mutex_slowlock() locking/rtmutex: Restrict the trylock WARN_ON() to debug locking/rtmutex: Fix misleading comment in rt_mutex_postunlock() locking/rtmutex: Consolidate the fast/slowpath invocation locking/rtmutex: Make text section and inlining consistent locking/rtmutex: Move debug functions as inlines into common header locking/rtmutex: Decrapify __rt_mutex_init() locking/rtmutex: Remove pointless CONFIG_RT_MUTEXES=n stubs locking/rtmutex: Inline chainwalk depth check locking/rtmutex: Move rt_mutex_debug_task_free() to rtmutex.c locking/rtmutex: Remove empty and unused debug stubs locking/rtmutex: Consolidate rt_mutex_init() locking/rtmutex: Remove output from deadlock detector locking/rtmutex: Remove rtmutex deadlock tester leftovers locking/rtmutex: Remove rt_mutex_timed_lock() MAINTAINERS: Add myself as futex reviewer locking/mutex: Remove repeated declaration ...
89 lines
1.8 KiB
C
89 lines
1.8 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#ifndef _ASM_X86_JUMP_LABEL_H
|
|
#define _ASM_X86_JUMP_LABEL_H
|
|
|
|
#define HAVE_JUMP_LABEL_BATCH
|
|
|
|
#define JUMP_LABEL_NOP_SIZE 5
|
|
|
|
#include <asm/asm.h>
|
|
#include <asm/nops.h>
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
#include <linux/stringify.h>
|
|
#include <linux/types.h>
|
|
|
|
static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
|
|
{
|
|
asm_volatile_goto("1:"
|
|
".byte " __stringify(BYTES_NOP5) "\n\t"
|
|
".pushsection __jump_table, \"aw\" \n\t"
|
|
_ASM_ALIGN "\n\t"
|
|
".long 1b - ., %l[l_yes] - . \n\t"
|
|
_ASM_PTR "%c0 + %c1 - .\n\t"
|
|
".popsection \n\t"
|
|
: : "i" (key), "i" (branch) : : l_yes);
|
|
|
|
return false;
|
|
l_yes:
|
|
return true;
|
|
}
|
|
|
|
static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
|
|
{
|
|
asm_volatile_goto("1:"
|
|
".byte 0xe9\n\t .long %l[l_yes] - 2f\n\t"
|
|
"2:\n\t"
|
|
".pushsection __jump_table, \"aw\" \n\t"
|
|
_ASM_ALIGN "\n\t"
|
|
".long 1b - ., %l[l_yes] - . \n\t"
|
|
_ASM_PTR "%c0 + %c1 - .\n\t"
|
|
".popsection \n\t"
|
|
: : "i" (key), "i" (branch) : : l_yes);
|
|
|
|
return false;
|
|
l_yes:
|
|
return true;
|
|
}
|
|
|
|
#else /* __ASSEMBLY__ */
|
|
|
|
.macro STATIC_JUMP_IF_TRUE target, key, def
|
|
.Lstatic_jump_\@:
|
|
.if \def
|
|
/* Equivalent to "jmp.d32 \target" */
|
|
.byte 0xe9
|
|
.long \target - .Lstatic_jump_after_\@
|
|
.Lstatic_jump_after_\@:
|
|
.else
|
|
.byte BYTES_NOP5
|
|
.endif
|
|
.pushsection __jump_table, "aw"
|
|
_ASM_ALIGN
|
|
.long .Lstatic_jump_\@ - ., \target - .
|
|
_ASM_PTR \key - .
|
|
.popsection
|
|
.endm
|
|
|
|
.macro STATIC_JUMP_IF_FALSE target, key, def
|
|
.Lstatic_jump_\@:
|
|
.if \def
|
|
.byte BYTES_NOP5
|
|
.else
|
|
/* Equivalent to "jmp.d32 \target" */
|
|
.byte 0xe9
|
|
.long \target - .Lstatic_jump_after_\@
|
|
.Lstatic_jump_after_\@:
|
|
.endif
|
|
.pushsection __jump_table, "aw"
|
|
_ASM_ALIGN
|
|
.long .Lstatic_jump_\@ - ., \target - .
|
|
_ASM_PTR \key + 1 - .
|
|
.popsection
|
|
.endm
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
#endif
|