aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMathieu Desnoyers <mathieu.desnoyers@efficios.com>2024-02-27 15:46:58 -0500
committerMathieu Desnoyers <mathieu.desnoyers@efficios.com>2024-02-27 17:28:07 -0500
commit21b58a3bb5e83b30729863ad3ec8becb16a1f1aa (patch)
treef3180b5e20727d7a07c2d8cfa30dfcd2265193eb
parent1bb8dd7bf0179d345b55c57e809f399b055ed80a (diff)
downloadlibrseq-21b58a3bb5e83b30729863ad3ec8becb16a1f1aa.tar.gz
Comment arm64 macros implementation
Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com> Change-Id: Id918fa702bfbee014766f949329d1068a0404340
-rw-r--r--include/rseq/rseq-arm64.h103
1 files changed, 93 insertions, 10 deletions
diff --git a/include/rseq/rseq-arm64.h b/include/rseq/rseq-arm64.h
index d1da1ec..0ec44f0 100644
--- a/include/rseq/rseq-arm64.h
+++ b/include/rseq/rseq-arm64.h
@@ -1,5 +1,5 @@
/* SPDX-License-Identifier: MIT */
-/* SPDX-FileCopyrightText: 2016-2018 Mathieu Desnoyers <mathieu.desnoyers@efficios.com> */
+/* SPDX-FileCopyrightText: 2016-2024 Mathieu Desnoyers <mathieu.desnoyers@efficios.com> */
/* SPDX-FileCopyrightText: 2018 Will Deacon <will.deacon@arm.com> */
/*
@@ -7,6 +7,11 @@
*/
/*
+ * RSEQ_ASM_*() macro helpers are internal to the librseq headers. Those
+ * are not part of the public API.
+ */
+
+/*
* aarch64 -mbig-endian generates mixed endianness code vs data:
* little-endian code and big-endian data. Ensure the RSEQ_SIG signature
* matches code endianness.
@@ -21,10 +26,19 @@
#define RSEQ_SIG RSEQ_SIG_DATA
+/*
+ * Refer to the Linux kernel memory model (LKMM) for documentation of
+ * the memory barriers.
+ */
+
+/* CPU memory barrier. */
#define rseq_smp_mb() __asm__ __volatile__ ("dmb ish" ::: "memory")
+/* CPU read memory barrier */
#define rseq_smp_rmb() __asm__ __volatile__ ("dmb ishld" ::: "memory")
+/* CPU write memory barrier */
#define rseq_smp_wmb() __asm__ __volatile__ ("dmb ishst" ::: "memory")
+/* Acquire: One-way permeable barrier. */
#define rseq_smp_load_acquire(p) \
__extension__ ({ \
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u; \
@@ -53,8 +67,10 @@ __extension__ ({ \
(rseq_unqual_scalar_typeof(*(p)))__u.__val; \
})
+/* Acquire barrier after control dependency. */
#define rseq_smp_acquire__after_ctrl_dep() rseq_smp_rmb()
+/* Release: One-way permeable barrier. */
#define rseq_smp_store_release(p, v) \
do { \
union { rseq_unqual_scalar_typeof(*(p)) __val; char __c[sizeof(*(p))]; } __u = \
@@ -87,10 +103,12 @@ do { \
} \
} while (0)
+/* Temporary scratch registers. */
#define RSEQ_ASM_TMP_REG32 "w15"
#define RSEQ_ASM_TMP_REG "x15"
#define RSEQ_ASM_TMP_REG_2 "x14"
+/* Only used in RSEQ_ASM_DEFINE_TABLE. */
#define __RSEQ_ASM_DEFINE_TABLE(label, version, flags, start_ip, \
post_commit_offset, abort_ip) \
" .pushsection __rseq_cs, \"aw\"\n" \
@@ -101,15 +119,40 @@ do { \
__rseq_str(post_commit_offset) ", " \
__rseq_str(abort_ip) "\n" \
" .popsection\n\t" \
- " .pushsection __rseq_cs_ptr_array, \"aw\"\n" \
+ " .pushsection __rseq_cs_ptr_array, \"aw\"\n" \
" .quad " __rseq_str(label) "b\n" \
" .popsection\n"
+/*
+ * Define an rseq critical section structure of version 0 with no flags.
+ *
+ * @label:
+ * Local label for the beginning of the critical section descriptor
+ * structure.
+ * @start_ip:
+ * Pointer to the first instruction of the sequence of consecutive assembly
+ * instructions.
+ * @post_commit_ip:
+ * Pointer to the instruction after the last instruction of the sequence of
+ * consecutive assembly instructions.
+ * @abort_ip:
+ * Pointer to the instruction where to move the execution flow in case of
+ * abort of the sequence of consecutive assembly instructions.
+ */
#define RSEQ_ASM_DEFINE_TABLE(label, start_ip, post_commit_ip, abort_ip) \
__RSEQ_ASM_DEFINE_TABLE(label, 0x0, 0x0, start_ip, \
(post_commit_ip - start_ip), abort_ip)
/*
+ * Define the @exit_ip pointer as an exit point for the sequence of consecutive
+ * assembly instructions at @start_ip.
+ *
+ * @start_ip:
+ * Pointer to the first instruction of the sequence of consecutive assembly
+ * instructions.
+ * @exit_ip:
+ * Pointer to an exit point instruction.
+ *
* Exit points of a rseq critical section consist of all instructions outside
* of the critical section where a critical section can either branch to or
* reach through the normal course of its execution. The abort IP and the
@@ -122,14 +165,16 @@ do { \
" .quad " __rseq_str(start_ip) ", " __rseq_str(exit_ip) "\n" \
" .popsection\n"
-#define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs) \
- RSEQ_INJECT_ASM(1) \
- " adrp " RSEQ_ASM_TMP_REG ", " __rseq_str(cs_label) "\n" \
- " add " RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG \
- ", :lo12:" __rseq_str(cs_label) "\n" \
- " str " RSEQ_ASM_TMP_REG ", %[" __rseq_str(rseq_cs) "]\n" \
- __rseq_str(label) ":\n"
-
+/*
+ * Define a critical section abort handler.
+ *
+ * @label:
+ * Local label to the abort handler.
+ * @teardown:
+ * Sequence of instructions to run on abort.
+ * @abort_label:
+ * C label to jump to at the end of the sequence.
+ */
#define RSEQ_ASM_DEFINE_ABORT(label, teardown, abort_label) \
" b 222f\n" \
" .inst " __rseq_str(RSEQ_SIG_CODE) "\n" \
@@ -138,60 +183,98 @@ do { \
" b %l[" __rseq_str(abort_label) "]\n" \
"222:\n"
+/* Jump to local label @label when @cpu_id != @current_cpu_id. */
+#define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs) \
+ RSEQ_INJECT_ASM(1) \
+ " adrp " RSEQ_ASM_TMP_REG ", " __rseq_str(cs_label) "\n" \
+ " add " RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG \
+ ", :lo12:" __rseq_str(cs_label) "\n" \
+ " str " RSEQ_ASM_TMP_REG ", %[" __rseq_str(rseq_cs) "]\n" \
+ __rseq_str(label) ":\n"
+
+/* Store @value to address @var. */
#define RSEQ_ASM_OP_STORE(value, var) \
" str %[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
+/* Store-release @value to address @var. */
#define RSEQ_ASM_OP_STORE_RELEASE(value, var) \
" stlr %[" __rseq_str(value) "], %[" __rseq_str(var) "]\n"
+/*
+ * End-of-sequence store of @value to address @var. Emit
+ * @post_commit_label label after the store instruction.
+ */
#define RSEQ_ASM_OP_FINAL_STORE(value, var, post_commit_label) \
RSEQ_ASM_OP_STORE(value, var) \
__rseq_str(post_commit_label) ":\n"
+/*
+ * End-of-sequence store-release of @value to address @var. Emit
+ * @post_commit_label label after the store instruction.
+ */
#define RSEQ_ASM_OP_FINAL_STORE_RELEASE(value, var, post_commit_label) \
RSEQ_ASM_OP_STORE_RELEASE(value, var) \
__rseq_str(post_commit_label) ":\n"
+/* Jump to local label @label when @var != @expect. */
#define RSEQ_ASM_OP_CBNE(var, expect, label) \
" ldr " RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n" \
" sub " RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG \
", %[" __rseq_str(expect) "]\n" \
" cbnz " RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
+/*
+ * Jump to local label @label when @var != @expect (32-bit register
+ * comparison).
+ */
#define RSEQ_ASM_OP_CBNE32(var, expect, label) \
" ldr " RSEQ_ASM_TMP_REG32 ", %[" __rseq_str(var) "]\n" \
" sub " RSEQ_ASM_TMP_REG32 ", " RSEQ_ASM_TMP_REG32 \
", %w[" __rseq_str(expect) "]\n" \
" cbnz " RSEQ_ASM_TMP_REG32 ", " __rseq_str(label) "\n"
+/* Jump to local label @label when @var == @expect. */
#define RSEQ_ASM_OP_CBEQ(var, expect, label) \
" ldr " RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n" \
" sub " RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG \
", %[" __rseq_str(expect) "]\n" \
" cbz " RSEQ_ASM_TMP_REG ", " __rseq_str(label) "\n"
+/* Jump to local label @label when @cpu_id != @current_cpu_id. */
#define RSEQ_ASM_CBNE_CPU_ID(cpu_id, current_cpu_id, label) \
RSEQ_INJECT_ASM(2) \
RSEQ_ASM_OP_CBNE32(current_cpu_id, cpu_id, label)
+/* Load @var into temporary register. */
#define RSEQ_ASM_OP_R_LOAD(var) \
" ldr " RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
+/* Store from temporary register into @var. */
#define RSEQ_ASM_OP_R_STORE(var) \
" str " RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n"
+/* Load from address in temporary register+@offset into temporary register. */
#define RSEQ_ASM_OP_R_LOAD_OFF(offset) \
" ldr " RSEQ_ASM_TMP_REG ", [" RSEQ_ASM_TMP_REG \
", %[" __rseq_str(offset) "]]\n"
+/* Add @count to temporary register. */
#define RSEQ_ASM_OP_R_ADD(count) \
" add " RSEQ_ASM_TMP_REG ", " RSEQ_ASM_TMP_REG \
", %[" __rseq_str(count) "]\n"
+/*
+ * End-of-sequence store of temporary register to address @var. Emit
+ * @post_commit_label label after the store instruction.
+ */
#define RSEQ_ASM_OP_R_FINAL_STORE(var, post_commit_label) \
" str " RSEQ_ASM_TMP_REG ", %[" __rseq_str(var) "]\n" \
__rseq_str(post_commit_label) ":\n"
+/*
+ * Copy @len bytes from @src to @dst. This is an inefficient bytewise
+ * copy and could be improved in the future.
+ */
#define RSEQ_ASM_OP_R_BYTEWISE_MEMCPY(dst, src, len) \
" cbz %[" __rseq_str(len) "], 333f\n" \
" mov " RSEQ_ASM_TMP_REG_2 ", %[" __rseq_str(len) "]\n" \