summaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel
diff options
context:
space:
mode:
authorRussell King <rmk+kernel@arm.linux.org.uk>2011-07-22 23:09:07 +0100
committerRussell King <rmk+kernel@arm.linux.org.uk>2011-07-22 23:09:07 +0100
commit3ad55155b222f2a901405dea20ff7c68828ecd92 (patch)
tree53b24c981387b037084a333dc5ae23be8e82ef4a /arch/arm/kernel
parent06f365acef5ca54fd5708a0d853c4a89609536f1 (diff)
parent6645cb61f3a1186a71475385d33f875dd8fb38bf (diff)
downloadlinux-3ad55155b222f2a901405dea20ff7c68828ecd92.tar.gz
linux-3ad55155b222f2a901405dea20ff7c68828ecd92.tar.bz2
linux-3ad55155b222f2a901405dea20ff7c68828ecd92.zip
Merge branch 'devel-stable' into for-next
Conflicts: arch/arm/kernel/entry-armv.S
Diffstat (limited to 'arch/arm/kernel')
-rw-r--r--arch/arm/kernel/Makefile7
-rw-r--r--arch/arm/kernel/entry-armv.S246
-rw-r--r--arch/arm/kernel/entry-header.S12
-rw-r--r--arch/arm/kernel/kprobes-arm.c999
-rw-r--r--arch/arm/kernel/kprobes-common.c577
-rw-r--r--arch/arm/kernel/kprobes-decode.c1670
-rw-r--r--arch/arm/kernel/kprobes-thumb.c1462
-rw-r--r--arch/arm/kernel/kprobes.c222
-rw-r--r--arch/arm/kernel/kprobes.h420
-rw-r--r--arch/arm/kernel/perf_event.c6
-rw-r--r--arch/arm/kernel/perf_event_v7.c344
-rw-r--r--arch/arm/kernel/ptrace.c28
-rw-r--r--arch/arm/kernel/setup.c10
-rw-r--r--arch/arm/kernel/traps.c17
14 files changed, 4123 insertions, 1897 deletions
diff --git a/arch/arm/kernel/Makefile b/arch/arm/kernel/Makefile
index a5b31af5c2b8..f7887dc53c1f 100644
--- a/arch/arm/kernel/Makefile
+++ b/arch/arm/kernel/Makefile
@@ -37,7 +37,12 @@ obj-$(CONFIG_HAVE_ARM_TWD) += smp_twd.o
obj-$(CONFIG_DYNAMIC_FTRACE) += ftrace.o
obj-$(CONFIG_FUNCTION_GRAPH_TRACER) += ftrace.o
obj-$(CONFIG_KEXEC) += machine_kexec.o relocate_kernel.o
-obj-$(CONFIG_KPROBES) += kprobes.o kprobes-decode.o
+obj-$(CONFIG_KPROBES) += kprobes.o kprobes-common.o
+ifdef CONFIG_THUMB2_KERNEL
+obj-$(CONFIG_KPROBES) += kprobes-thumb.o
+else
+obj-$(CONFIG_KPROBES) += kprobes-arm.o
+endif
obj-$(CONFIG_ATAGS_PROC) += atags.o
obj-$(CONFIG_OABI_COMPAT) += sys_oabi-compat.o
obj-$(CONFIG_ARM_THUMBEE) += thumbee.o
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index fa02a22a4c4b..a87cbf889ff4 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -377,7 +377,7 @@ ENDPROC(__pabt_svc)
.endm
.macro kuser_cmpxchg_check
-#if __LINUX_ARM_ARCH__ < 6 && !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
+#if !defined(CONFIG_CPU_32v6K) && !defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
#ifndef CONFIG_MMU
#warning "NPTL on non MMU needs fixing"
#else
@@ -386,7 +386,7 @@ ENDPROC(__pabt_svc)
@ perform a quick test inline since it should be false
@ 99.9999% of the time. The rest is done out of line.
cmp r4, #TASK_SIZE
- blhs kuser_cmpxchg_fixup
+ blhs kuser_cmpxchg64_fixup
#endif
#endif
.endm
@@ -701,31 +701,12 @@ ENDPROC(__switch_to)
/*
* User helpers.
*
- * These are segment of kernel provided user code reachable from user space
- * at a fixed address in kernel memory. This is used to provide user space
- * with some operations which require kernel help because of unimplemented
- * native feature and/or instructions in many ARM CPUs. The idea is for
- * this code to be executed directly in user mode for best efficiency but
- * which is too intimate with the kernel counter part to be left to user
- * libraries. In fact this code might even differ from one CPU to another
- * depending on the available instruction set and restrictions like on
- * SMP systems. In other words, the kernel reserves the right to change
- * this code as needed without warning. Only the entry points and their
- * results are guaranteed to be stable.
- *
* Each segment is 32-byte aligned and will be moved to the top of the high
* vector page. New segments (if ever needed) must be added in front of
* existing ones. This mechanism should be used only for things that are
* really small and justified, and not be abused freely.
*
- * User space is expected to implement those things inline when optimizing
- * for a processor that has the necessary native support, but only if such
- * resulting binaries are already to be incompatible with earlier ARM
- * processors due to the use of unsupported instructions other than what
- * is provided here. In other words don't make binaries unable to run on
- * earlier processors just for the sake of not using these kernel helpers
- * if your compiled code is not going to use the new instructions for other
- * purpose.
+ * See Documentation/arm/kernel_user_helpers.txt for formal definitions.
*/
THUMB( .arm )
@@ -742,96 +723,103 @@ ENDPROC(__switch_to)
__kuser_helper_start:
/*
- * Reference prototype:
- *
- * void __kernel_memory_barrier(void)
- *
- * Input:
- *
- * lr = return address
- *
- * Output:
- *
- * none
- *
- * Clobbered:
- *
- * none
- *
- * Definition and user space usage example:
- *
- * typedef void (__kernel_dmb_t)(void);
- * #define __kernel_dmb (*(__kernel_dmb_t *)0xffff0fa0)
- *
- * Apply any needed memory barrier to preserve consistency with data modified
- * manually and __kuser_cmpxchg usage.
- *
- * This could be used as follows:
- *
- * #define __kernel_dmb() \
- * asm volatile ( "mov r0, #0xffff0fff; mov lr, pc; sub pc, r0, #95" \
- * : : : "r0", "lr","cc" )
+ * Due to the length of some sequences, __kuser_cmpxchg64 spans 2 regular
+ * kuser "slots", therefore 0xffff0f80 is not used as a valid entry point.
*/
-__kuser_memory_barrier: @ 0xffff0fa0
+__kuser_cmpxchg64: @ 0xffff0f60
+
+#if defined(CONFIG_NEEDS_SYSCALL_FOR_CMPXCHG)
+
+ /*
+ * Poor you. No fast solution possible...
+ * The kernel itself must perform the operation.
+ * A special ghost syscall is used for that (see traps.c).
+ */
+ stmfd sp!, {r7, lr}
+ ldr r7, 1f @ it's 20 bits
+ swi __ARM_NR_cmpxchg64
+ ldmfd sp!, {r7, pc}
+1: .word __ARM_NR_cmpxchg64
+
+#elif defined(CONFIG_CPU_32v6K)
+
+ stmfd sp!, {r4, r5, r6, r7}
+ ldrd r4, r5, [r0] @ load old val
+ ldrd r6, r7, [r1] @ load new val
+ smp_dmb arm
+1: ldrexd r0, r1, [r2] @ load current val
+ eors r3, r0, r4 @ compare with oldval (1)
+ eoreqs r3, r1, r5 @ compare with oldval (2)
+ strexdeq r3, r6, r7, [r2] @ store newval if eq
+ teqeq r3, #1 @ success?
+ beq 1b @ if no then retry
smp_dmb arm
+ rsbs r0, r3, #0 @ set returned val and C flag
+ ldmfd sp!, {r4, r5, r6, r7}
+ bx lr
+
+#elif !defined(CONFIG_SMP)
+
+#ifdef CONFIG_MMU
+
+ /*
+ * The only thing that can break atomicity in this cmpxchg64
+ * implementation is either an IRQ or a data abort exception
+ * causing another process/thread to be scheduled in the middle of
+ * the critical sequence. The same strategy as for cmpxchg is used.
+ */
+ stmfd sp!, {r4, r5, r6, lr}
+ ldmia r0, {r4, r5} @ load old val
+ ldmia r1, {r6, lr} @ load new val
+1: ldmia r2, {r0, r1} @ load current val
+ eors r3, r0, r4 @ compare with oldval (1)
+ eoreqs r3, r1, r5 @ compare with oldval (2)
+2: stmeqia r2, {r6, lr} @ store newval if eq
+ rsbs r0, r3, #0 @ set return val and C flag
+ ldmfd sp!, {r4, r5, r6, pc}
+
+ .text
+kuser_cmpxchg64_fixup:
+ @ Called from kuser_cmpxchg_fixup.
+ @ r4 = address of interrupted insn (must be preserved).
+ @ sp = saved regs. r7 and r8 are clobbered.
+ @ 1b = first critical insn, 2b = last critical insn.
+ @ If r4 >= 1b and r4 <= 2b then saved pc_usr is set to 1b.
+ mov r7, #0xffff0fff
+ sub r7, r7, #(0xffff0fff - (0xffff0f60 + (1b - __kuser_cmpxchg64)))
+ subs r8, r4, r7
+ rsbcss r8, r8, #(2b - 1b)
+ strcs r7, [sp, #S_PC]
+#if __LINUX_ARM_ARCH__ < 6
+ bcc kuser_cmpxchg32_fixup
+#endif
+ mov pc, lr
+ .previous
+
+#else
+#warning "NPTL on non MMU needs fixing"
+ mov r0, #-1
+ adds r0, r0, #0
usr_ret lr
+#endif
+
+#else
+#error "incoherent kernel configuration"
+#endif
+
+ /* pad to next slot */
+ .rept (16 - (. - __kuser_cmpxchg64)/4)
+ .word 0
+ .endr
.align 5
-/*
- * Reference prototype:
- *
- * int __kernel_cmpxchg(int oldval, int newval, int *ptr)
- *
- * Input:
- *
- * r0 = oldval
- * r1 = newval
- * r2 = ptr
- * lr = return address
- *
- * Output:
- *
- * r0 = returned value (zero or non-zero)
- * C flag = set if r0 == 0, clear if r0 != 0
- *
- * Clobbered:
- *
- * r3, ip, flags
- *
- * Definition and user space usage example:
- *
- * typedef int (__kernel_cmpxchg_t)(int oldval, int newval, int *ptr);
- * #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *)0xffff0fc0)
- *
- * Atomically store newval in *ptr if *ptr is equal to oldval for user space.
- * Return zero if *ptr was changed or non-zero if no exchange happened.
- * The C flag is also set if *ptr was changed to allow for assembly
- * optimization in the calling code.
- *
- * Notes:
- *
- * - This routine already includes memory barriers as needed.
- *
- * For example, a user space atomic_add implementation could look like this:
- *
- * #define atomic_add(ptr, val) \
- * ({ register unsigned int *__ptr asm("r2") = (ptr); \
- * register unsigned int __result asm("r1"); \
- * asm volatile ( \
- * "1: @ atomic_add\n\t" \
- * "ldr r0, [r2]\n\t" \
- * "mov r3, #0xffff0fff\n\t" \
- * "add lr, pc, #4\n\t" \
- * "add r1, r0, %2\n\t" \
- * "add pc, r3, #(0xffff0fc0 - 0xffff0fff)\n\t" \
- * "bcc 1b" \
- * : "=&r" (__result) \
- * : "r" (__ptr), "rIL" (val) \
- * : "r0","r3","ip","lr","cc","memory" ); \
- * __result; })
- */
+__kuser_memory_barrier: @ 0xffff0fa0
+ smp_dmb arm
+ usr_ret lr
+
+ .align 5
__kuser_cmpxchg: @ 0xffff0fc0
@@ -868,7 +856,7 @@ __kuser_cmpxchg: @ 0xffff0fc0
usr_ret lr
.text
-kuser_cmpxchg_fixup:
+kuser_cmpxchg32_fixup:
@ Called from kuser_cmpxchg_check macro.
@ r4 = address of interrupted insn (must be preserved).
@ sp = saved regs. r7 and r8 are clobbered.
@@ -906,39 +894,6 @@ kuser_cmpxchg_fixup:
.align 5
-/*
- * Reference prototype:
- *
- * int __kernel_get_tls(void)
- *
- * Input:
- *
- * lr = return address
- *
- * Output:
- *
- * r0 = TLS value
- *
- * Clobbered:
- *
- * none
- *
- * Definition and user space usage example:
- *
- * typedef int (__kernel_get_tls_t)(void);
- * #define __kernel_get_tls (*(__kernel_get_tls_t *)0xffff0fe0)
- *
- * Get the TLS value as previously set via the __ARM_NR_set_tls syscall.
- *
- * This could be used as follows:
- *
- * #define __kernel_get_tls() \
- * ({ register unsigned int __val asm("r0"); \
- * asm( "mov r0, #0xffff0fff; mov lr, pc; sub pc, r0, #31" \
- * : "=r" (__val) : : "lr","cc" ); \
- * __val; })
- */
-
__kuser_get_tls: @ 0xffff0fe0
ldr r0, [pc, #(16 - 8)] @ read TLS, set in kuser_get_tls_init
usr_ret lr
@@ -947,19 +902,6 @@ __kuser_get_tls: @ 0xffff0fe0
.word 0 @ 0xffff0ff0 software TLS value, then
.endr @ pad up to __kuser_helper_version
-/*
- * Reference declaration:
- *
- * extern unsigned int __kernel_helper_version;
- *
- * Definition and user space usage example:
- *
- * #define __kernel_helper_version (*(unsigned int *)0xffff0ffc)
- *
- * User space may read this to determine the curent number of helpers
- * available.
- */
-
__kuser_helper_version: @ 0xffff0ffc
.word ((__kuser_helper_end - __kuser_helper_start) >> 5)
diff --git a/arch/arm/kernel/entry-header.S b/arch/arm/kernel/entry-header.S
index 4d6ad8348e89..9a8531eadd3d 100644
--- a/arch/arm/kernel/entry-header.S
+++ b/arch/arm/kernel/entry-header.S
@@ -121,15 +121,13 @@
.endm
#else /* CONFIG_THUMB2_KERNEL */
.macro svc_exit, rpsr
+ ldr lr, [sp, #S_SP] @ top of the stack
+ ldrd r0, r1, [sp, #S_LR] @ calling lr and pc
clrex @ clear the exclusive monitor
- ldr r0, [sp, #S_SP] @ top of the stack
- ldr r1, [sp, #S_PC] @ return address
- tst r0, #4 @ orig stack 8-byte aligned?
- stmdb r0, {r1, \rpsr} @ rfe context
+ stmdb lr!, {r0, r1, \rpsr} @ calling lr and rfe context
ldmia sp, {r0 - r12}
- ldr lr, [sp, #S_LR]
- addeq sp, sp, #S_FRAME_SIZE - 8 @ aligned
- addne sp, sp, #S_FRAME_SIZE - 4 @ not aligned
+ mov sp, lr
+ ldr lr, [sp], #4
rfeia sp!
.endm
diff --git a/arch/arm/kernel/kprobes-arm.c b/arch/arm/kernel/kprobes-arm.c
new file mode 100644
index 000000000000..79203ee1d039
--- /dev/null
+++ b/arch/arm/kernel/kprobes-arm.c
@@ -0,0 +1,999 @@
+/*
+ * arch/arm/kernel/kprobes-decode.c
+ *
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ */
+
+/*
+ * We do not have hardware single-stepping on ARM, This
+ * effort is further complicated by the ARM not having a
+ * "next PC" register. Instructions that change the PC
+ * can't be safely single-stepped in a MP environment, so
+ * we have a lot of work to do:
+ *
+ * In the prepare phase:
+ * *) If it is an instruction that does anything
+ * with the CPU mode, we reject it for a kprobe.
+ * (This is out of laziness rather than need. The
+ * instructions could be simulated.)
+ *
+ * *) Otherwise, decode the instruction rewriting its
+ * registers to take fixed, ordered registers and
+ * setting a handler for it to run the instruction.
+ *
+ * In the execution phase by an instruction's handler:
+ *
+ * *) If the PC is written to by the instruction, the
+ * instruction must be fully simulated in software.
+ *
+ * *) Otherwise, a modified form of the instruction is
+ * directly executed. Its handler calls the
+ * instruction in insn[0]. In insn[1] is a
+ * "mov pc, lr" to return.
+ *
+ * Before calling, load up the reordered registers
+ * from the original instruction's registers. If one
+ * of the original input registers is the PC, compute
+ * and adjust the appropriate input register.
+ *
+ * After call completes, copy the output registers to
+ * the original instruction's original registers.
+ *
+ * We don't use a real breakpoint instruction since that
+ * would have us in the kernel go from SVC mode to SVC
+ * mode losing the link register. Instead we use an
+ * undefined instruction. To simplify processing, the
+ * undefined instruction used for kprobes must be reserved
+ * exclusively for kprobes use.
+ *
+ * TODO: ifdef out some instruction decoding based on architecture.
+ */
+
+#include <linux/kernel.h>
+#include <linux/kprobes.h>
+
+#include "kprobes.h"
+
+#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
+
+#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
+
+#if __LINUX_ARM_ARCH__ >= 6
+#define BLX(reg) "blx "reg" \n\t"
+#else
+#define BLX(reg) "mov lr, pc \n\t" \
+ "mov pc, "reg" \n\t"
+#endif
+
+/*
+ * To avoid the complications of mimicing single-stepping on a
+ * processor without a Next-PC or a single-step mode, and to
+ * avoid having to deal with the side-effects of boosting, we
+ * simulate or emulate (almost) all ARM instructions.
+ *
+ * "Simulation" is where the instruction's behavior is duplicated in
+ * C code. "Emulation" is where the original instruction is rewritten
+ * and executed, often by altering its registers.
+ *
+ * By having all behavior of the kprobe'd instruction completed before
+ * returning from the kprobe_handler(), all locks (scheduler and
+ * interrupt) can safely be released. There is no need for secondary
+ * breakpoints, no race with MP or preemptable kernels, nor having to
+ * clean up resources counts at a later time impacting overall system
+ * performance. By rewriting the instruction, only the minimum registers
+ * need to be loaded and saved back optimizing performance.
+ *
+ * Calling the insnslot_*_rwflags version of a function doesn't hurt
+ * anything even when the CPSR flags aren't updated by the
+ * instruction. It's just a little slower in return for saving
+ * a little space by not having a duplicate function that doesn't
+ * update the flags. (The same optimization can be said for
+ * instructions that do or don't perform register writeback)
+ * Also, instructions can either read the flags, only write the
+ * flags, or read and write the flags. To save combinations
+ * rather than for sheer performance, flag functions just assume
+ * read and write of flags.
+ */
+
+static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ long iaddr = (long)p->addr;
+ int disp = branch_displacement(insn);
+
+ if (insn & (1 << 24))
+ regs->ARM_lr = iaddr + 4;
+
+ regs->ARM_pc = iaddr + 8 + disp;
+}
+
+static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ long iaddr = (long)p->addr;
+ int disp = branch_displacement(insn);
+
+ regs->ARM_lr = iaddr + 4;
+ regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
+ regs->ARM_cpsr |= PSR_T_BIT;
+}
+
+static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rm = insn & 0xf;
+ long rmv = regs->uregs[rm];
+
+ if (insn & (1 << 5))
+ regs->ARM_lr = (long)p->addr + 4;
+
+ regs->ARM_pc = rmv & ~0x1;
+ regs->ARM_cpsr &= ~PSR_T_BIT;
+ if (rmv & 0x1)
+ regs->ARM_cpsr |= PSR_T_BIT;
+}
+
+static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 12) & 0xf;
+ unsigned long mask = 0xf8ff03df; /* Mask out execution state */
+ regs->uregs[rd] = regs->ARM_cpsr & mask;
+}
+
+static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->uregs[12] = regs->uregs[13];
+}
+
+static void __kprobes
+emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = (unsigned long)p->addr + 8;
+ int rt = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rtv asm("r0") = regs->uregs[rt];
+ register unsigned long rt2v asm("r1") = regs->uregs[rt+1];
+ register unsigned long rnv asm("r2") = (rn == 15) ? pc
+ : regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ BLX("%[fn]")
+ : "=r" (rtv), "=r" (rt2v), "=r" (rnv)
+ : "0" (rtv), "1" (rt2v), "2" (rnv), "r" (rmv),
+ [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rt] = rtv;
+ regs->uregs[rt+1] = rt2v;
+ if (is_writeback(insn))
+ regs->uregs[rn] = rnv;
+}
+
+static void __kprobes
+emulate_ldr(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = (unsigned long)p->addr + 8;
+ int rt = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rtv asm("r0");
+ register unsigned long rnv asm("r2") = (rn == 15) ? pc
+ : regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ BLX("%[fn]")
+ : "=r" (rtv), "=r" (rnv)
+ : "1" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ if (rt == 15)
+ load_write_pc(rtv, regs);
+ else
+ regs->uregs[rt] = rtv;
+
+ if (is_writeback(insn))
+ regs->uregs[rn] = rnv;
+}
+
+static void __kprobes
+emulate_str(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long rtpc = (unsigned long)p->addr + str_pc_offset;
+ unsigned long rnpc = (unsigned long)p->addr + 8;
+ int rt = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rtv asm("r0") = (rt == 15) ? rtpc
+ : regs->uregs[rt];
+ register unsigned long rnv asm("r2") = (rn == 15) ? rnpc
+ : regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ BLX("%[fn]")
+ : "=r" (rnv)
+ : "r" (rtv), "0" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ if (is_writeback(insn))
+ regs->uregs[rn] = rnv;
+}
+
+static void __kprobes
+emulate_rd12rn16rm0rs8_rwflags(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = (unsigned long)p->addr + 8;
+ int rd = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+ int rs = (insn >> 8) & 0xf;
+
+ register unsigned long rdv asm("r0") = regs->uregs[rd];
+ register unsigned long rnv asm("r2") = (rn == 15) ? pc
+ : regs->uregs[rn];
+ register unsigned long rmv asm("r3") = (rm == 15) ? pc
+ : regs->uregs[rm];
+ register unsigned long rsv asm("r1") = regs->uregs[rs];
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ BLX("%[fn]")
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdv), [cpsr] "=r" (cpsr)
+ : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv),
+ "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ if (rd == 15)
+ alu_write_pc(rdv, regs);
+ else
+ regs->uregs[rd] = rdv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+static void __kprobes
+emulate_rd12rn16rm0_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rdv asm("r0") = regs->uregs[rd];
+ register unsigned long rnv asm("r2") = regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ BLX("%[fn]")
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdv), [cpsr] "=r" (cpsr)
+ : "0" (rdv), "r" (rnv), "r" (rmv),
+ "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+static void __kprobes
+emulate_rd16rn12rm0rs8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 16) & 0xf;
+ int rn = (insn >> 12) & 0xf;
+ int rm = insn & 0xf;
+ int rs = (insn >> 8) & 0xf;
+
+ register unsigned long rdv asm("r2") = regs->uregs[rd];
+ register unsigned long rnv asm("r0") = regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+ register unsigned long rsv asm("r1") = regs->uregs[rs];
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ BLX("%[fn]")
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdv), [cpsr] "=r" (cpsr)
+ : "0" (rdv), "r" (rnv), "r" (rmv), "r" (rsv),
+ "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+static void __kprobes
+emulate_rd12rm0_noflags_nopc(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 12) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rdv asm("r0") = regs->uregs[rd];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ BLX("%[fn]")
+ : "=r" (rdv)
+ : "0" (rdv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+}
+
+static void __kprobes
+emulate_rdlo12rdhi16rn0rm8_rwflags_nopc(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rdlo = (insn >> 12) & 0xf;
+ int rdhi = (insn >> 16) & 0xf;
+ int rn = insn & 0xf;
+ int rm = (insn >> 8) & 0xf;
+
+ register unsigned long rdlov asm("r0") = regs->uregs[rdlo];
+ register unsigned long rdhiv asm("r2") = regs->uregs[rdhi];
+ register unsigned long rnv asm("r3") = regs->uregs[rn];
+ register unsigned long rmv asm("r1") = regs->uregs[rm];
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ BLX("%[fn]")
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdlov), "=r" (rdhiv), [cpsr] "=r" (cpsr)
+ : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv),
+ "2" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rdlo] = rdlov;
+ regs->uregs[rdhi] = rdhiv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+/*
+ * For the instruction masking and comparisons in all the "space_*"
+ * functions below, Do _not_ rearrange the order of tests unless
+ * you're very, very sure of what you are doing. For the sake of
+ * efficiency, the masks for some tests sometimes assume other test
+ * have been done prior to them so the number of patterns to test
+ * for an instruction set can be as broad as possible to reduce the
+ * number of tests needed.
+ */
+
+static const union decode_item arm_1111_table[] = {
+ /* Unconditional instructions */
+
+ /* memory hint 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx */
+ /* PLDI (immediate) 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx */
+ /* PLDW (immediate) 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx */
+ /* PLD (immediate) 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe300000, 0xf4100000, kprobe_simulate_nop),
+
+ /* memory hint 1111 0110 x001 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLDI (register) 1111 0110 x101 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLDW (register) 1111 0111 x001 xxxx xxxx xxxx xxx0 xxxx */
+ /* PLD (register) 1111 0111 x101 xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_SIMULATE (0xfe300010, 0xf6100000, kprobe_simulate_nop),
+
+ /* BLX (immediate) 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe000000, 0xfa000000, simulate_blx1),
+
+ /* CPS 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
+ /* SETEND 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
+ /* SRS 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
+
+ /* Coprocessor instructions... */
+ /* MCRR2 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* MRRC2 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* LDC2 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
+ /* STC2 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
+ /* CDP2 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
+ /* MCR2 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
+ /* MRC2 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_0xx0____0xxx_table[] = {
+ /* Miscellaneous instructions */
+
+ /* MRS cpsr cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_SIMULATEX(0x0ff000f0, 0x01000000, simulate_mrs,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* BX cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_SIMULATE (0x0ff000f0, 0x01200010, simulate_blx2bx),
+
+ /* BLX (register) cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
+ DECODE_SIMULATEX(0x0ff000f0, 0x01200030, simulate_blx2bx,
+ REGS(0, 0, 0, 0, NOPC)),
+
+ /* CLZ cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x01600010, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* QADD cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx */
+ /* QSUB cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx */
+ /* QDADD cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx */
+ /* QDSUB cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx */
+ DECODE_EMULATEX (0x0f9000f0, 0x01000050, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* BXJ cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
+ /* MSR cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
+ /* MRS spsr cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
+ /* BKPT 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
+ /* SMC cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_0xx0____1xx0_table[] = {
+ /* Halfword multiply and multiply-accumulate */
+
+ /* SMLALxy cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x01400080, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SMULWy cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
+ DECODE_OR (0x0ff000b0, 0x012000a0),
+ /* SMULxy cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x01600080, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* SMLAxy cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx */
+ DECODE_OR (0x0ff00090, 0x01000080),
+ /* SMLAWy cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx */
+ DECODE_EMULATEX (0x0ff000b0, 0x01200080, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0000_____1001_table[] = {
+ /* Multiply and multiply-accumulate */
+
+ /* MUL cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx */
+ /* MULS cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0fe000f0, 0x00000090, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* MLA cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx */
+ /* MLAS cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_OR (0x0fe000f0, 0x00200090),
+ /* MLS cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x00600090, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* UMAAL cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_OR (0x0ff000f0, 0x00400090),
+ /* UMULL cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx */
+ /* UMULLS cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx */
+ /* UMLAL cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx */
+ /* UMLALS cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx */
+ /* SMULL cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx */
+ /* SMULLS cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx */
+ /* SMLAL cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx */
+ /* SMLALS cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0f8000f0, 0x00800090, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0001_____1001_table[] = {
+ /* Synchronization primitives */
+
+ /* SMP/SWPB cccc 0001 0x00 xxxx xxxx xxxx 1001 xxxx */
+ DECODE_EMULATEX (0x0fb000f0, 0x01000090, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* LDREX/STREX{,D,B,H} cccc 0001 1xxx xxxx xxxx xxxx 1001 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_000x_____1xx1_table[] = {
+ /* Extra load/store instructions */
+
+ /* STRHT cccc 0000 xx10 xxxx xxxx xxxx 1011 xxxx */
+ /* ??? cccc 0000 xx10 xxxx xxxx xxxx 11x1 xxxx */
+ /* LDRHT cccc 0000 xx11 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSBT cccc 0000 xx11 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSHT cccc 0000 xx11 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_REJECT (0x0f200090, 0x00200090),
+
+ /* LDRD/STRD lr,pc,{... cccc 000x x0x0 xxxx 111x xxxx 1101 xxxx */
+ DECODE_REJECT (0x0e10e0d0, 0x0000e0d0),
+
+ /* LDRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1101 xxxx */
+ /* STRD (register) cccc 000x x0x0 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e5000d0, 0x000000d0, emulate_ldrdstrd,
+ REGS(NOPCWB, NOPCX, 0, 0, NOPC)),
+
+ /* LDRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1101 xxxx */
+ /* STRD (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e5000d0, 0x004000d0, emulate_ldrdstrd,
+ REGS(NOPCWB, NOPCX, 0, 0, 0)),
+
+ /* STRH (register) cccc 000x x0x0 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0e5000f0, 0x000000b0, emulate_str,
+ REGS(NOPCWB, NOPC, 0, 0, NOPC)),
+
+ /* LDRH (register) cccc 000x x0x1 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSB (register) cccc 000x x0x1 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSH (register) cccc 000x x0x1 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e500090, 0x00100090, emulate_ldr,
+ REGS(NOPCWB, NOPC, 0, 0, NOPC)),
+
+ /* STRH (immediate) cccc 000x x1x0 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0e5000f0, 0x004000b0, emulate_str,
+ REGS(NOPCWB, NOPC, 0, 0, 0)),
+
+ /* LDRH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1011 xxxx */
+ /* LDRSB (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1101 xxxx */
+ /* LDRSH (immediate) cccc 000x x1x1 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0e500090, 0x00500090, emulate_ldr,
+ REGS(NOPCWB, NOPC, 0, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_000x_table[] = {
+ /* Data-processing (register) */
+
+ /* <op>S PC, ... cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0x0e10f000, 0x0010f000),
+
+ /* MOV IP, SP 1110 0001 1010 0000 1100 0000 0000 1101 */
+ DECODE_SIMULATE (0xffffffff, 0xe1a0c00d, simulate_mov_ipsp),
+
+ /* TST (register) cccc 0001 0001 xxxx xxxx xxxx xxx0 xxxx */
+ /* TEQ (register) cccc 0001 0011 xxxx xxxx xxxx xxx0 xxxx */
+ /* CMP (register) cccc 0001 0101 xxxx xxxx xxxx xxx0 xxxx */
+ /* CMN (register) cccc 0001 0111 xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0f900010, 0x01100000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, 0, 0, 0, ANY)),
+
+ /* MOV (register) cccc 0001 101x xxxx xxxx xxxx xxx0 xxxx */
+ /* MVN (register) cccc 0001 111x xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0fa00010, 0x01a00000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(0, ANY, 0, 0, ANY)),
+
+ /* AND (register) cccc 0000 000x xxxx xxxx xxxx xxx0 xxxx */
+ /* EOR (register) cccc 0000 001x xxxx xxxx xxxx xxx0 xxxx */
+ /* SUB (register) cccc 0000 010x xxxx xxxx xxxx xxx0 xxxx */
+ /* RSB (register) cccc 0000 011x xxxx xxxx xxxx xxx0 xxxx */
+ /* ADD (register) cccc 0000 100x xxxx xxxx xxxx xxx0 xxxx */
+ /* ADC (register) cccc 0000 101x xxxx xxxx xxxx xxx0 xxxx */
+ /* SBC (register) cccc 0000 110x xxxx xxxx xxxx xxx0 xxxx */
+ /* RSC (register) cccc 0000 111x xxxx xxxx xxxx xxx0 xxxx */
+ /* ORR (register) cccc 0001 100x xxxx xxxx xxxx xxx0 xxxx */
+ /* BIC (register) cccc 0001 110x xxxx xxxx xxxx xxx0 xxxx */
+ DECODE_EMULATEX (0x0e000010, 0x00000000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, ANY, 0, 0, ANY)),
+
+ /* TST (reg-shift reg) cccc 0001 0001 xxxx xxxx xxxx 0xx1 xxxx */
+ /* TEQ (reg-shift reg) cccc 0001 0011 xxxx xxxx xxxx 0xx1 xxxx */
+ /* CMP (reg-shift reg) cccc 0001 0101 xxxx xxxx xxxx 0xx1 xxxx */
+ /* CMN (reg-shift reg) cccc 0001 0111 xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0f900090, 0x01100010, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, 0, NOPC, 0, ANY)),
+
+ /* MOV (reg-shift reg) cccc 0001 101x xxxx xxxx xxxx 0xx1 xxxx */
+ /* MVN (reg-shift reg) cccc 0001 111x xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0fa00090, 0x01a00010, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(0, ANY, NOPC, 0, ANY)),
+
+ /* AND (reg-shift reg) cccc 0000 000x xxxx xxxx xxxx 0xx1 xxxx */
+ /* EOR (reg-shift reg) cccc 0000 001x xxxx xxxx xxxx 0xx1 xxxx */
+ /* SUB (reg-shift reg) cccc 0000 010x xxxx xxxx xxxx 0xx1 xxxx */
+ /* RSB (reg-shift reg) cccc 0000 011x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ADD (reg-shift reg) cccc 0000 100x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ADC (reg-shift reg) cccc 0000 101x xxxx xxxx xxxx 0xx1 xxxx */
+ /* SBC (reg-shift reg) cccc 0000 110x xxxx xxxx xxxx 0xx1 xxxx */
+ /* RSC (reg-shift reg) cccc 0000 111x xxxx xxxx xxxx 0xx1 xxxx */
+ /* ORR (reg-shift reg) cccc 0001 100x xxxx xxxx xxxx 0xx1 xxxx */
+ /* BIC (reg-shift reg) cccc 0001 110x xxxx xxxx xxxx 0xx1 xxxx */
+ DECODE_EMULATEX (0x0e000090, 0x00000010, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, ANY, NOPC, 0, ANY)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_001x_table[] = {
+ /* Data-processing (immediate) */
+
+ /* MOVW cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
+ /* MOVT cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0fb00000, 0x03000000, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* YIELD cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
+ DECODE_OR (0x0fff00ff, 0x03200001),
+ /* SEV cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
+ DECODE_EMULATE (0x0fff00ff, 0x03200004, kprobe_emulate_none),
+ /* NOP cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
+ /* WFE cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
+ /* WFI cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
+ DECODE_SIMULATE (0x0fff00fc, 0x03200000, kprobe_simulate_nop),
+ /* DBG cccc 0011 0010 0000 xxxx xxxx ffff xxxx */
+ /* unallocated hints cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
+ /* MSR (immediate) cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0fb00000, 0x03200000),
+
+ /* <op>S PC, ... cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0x0e10f000, 0x0210f000),
+
+ /* TST (immediate) cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx */
+ /* TEQ (immediate) cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx */
+ /* CMP (immediate) cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* CMN (immediate) cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0f900000, 0x03100000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, 0, 0, 0, 0)),
+
+ /* MOV (immediate) cccc 0011 101x xxxx xxxx xxxx xxxx xxxx */
+ /* MVN (immediate) cccc 0011 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0fa00000, 0x03a00000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(0, ANY, 0, 0, 0)),
+
+ /* AND (immediate) cccc 0010 000x xxxx xxxx xxxx xxxx xxxx */
+ /* EOR (immediate) cccc 0010 001x xxxx xxxx xxxx xxxx xxxx */
+ /* SUB (immediate) cccc 0010 010x xxxx xxxx xxxx xxxx xxxx */
+ /* RSB (immediate) cccc 0010 011x xxxx xxxx xxxx xxxx xxxx */
+ /* ADD (immediate) cccc 0010 100x xxxx xxxx xxxx xxxx xxxx */
+ /* ADC (immediate) cccc 0010 101x xxxx xxxx xxxx xxxx xxxx */
+ /* SBC (immediate) cccc 0010 110x xxxx xxxx xxxx xxxx xxxx */
+ /* RSC (immediate) cccc 0010 111x xxxx xxxx xxxx xxxx xxxx */
+ /* ORR (immediate) cccc 0011 100x xxxx xxxx xxxx xxxx xxxx */
+ /* BIC (immediate) cccc 0011 110x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e000000, 0x02000000, emulate_rd12rn16rm0rs8_rwflags,
+ REGS(ANY, ANY, 0, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0110_____xxx1_table[] = {
+ /* Media instructions */
+
+ /* SEL cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x068000b0, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* SSAT cccc 0110 101x xxxx xxxx xxxx xx01 xxxx */
+ /* USAT cccc 0110 111x xxxx xxxx xxxx xx01 xxxx */
+ DECODE_OR(0x0fa00030, 0x06a00010),
+ /* SSAT16 cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx */
+ /* USAT16 cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx */
+ DECODE_EMULATEX (0x0fb000f0, 0x06a00030, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* REV cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
+ /* REV16 cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
+ /* RBIT cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
+ /* REVSH cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
+ DECODE_EMULATEX (0x0fb00070, 0x06b00030, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* ??? cccc 0110 0x00 xxxx xxxx xxxx xxx1 xxxx */
+ DECODE_REJECT (0x0fb00010, 0x06000010),
+ /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1011 xxxx */
+ DECODE_REJECT (0x0f8000f0, 0x060000b0),
+ /* ??? cccc 0110 0xxx xxxx xxxx xxxx 1101 xxxx */
+ DECODE_REJECT (0x0f8000f0, 0x060000d0),
+ /* SADD16 cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx */
+ /* SADDSUBX cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx */
+ /* SSUBADDX cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx */
+ /* SSUB16 cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx */
+ /* SADD8 cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx */
+ /* SSUB8 cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx */
+ /* QADD16 cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx */
+ /* QADDSUBX cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx */
+ /* QSUBADDX cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx */
+ /* QSUB16 cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx */
+ /* QADD8 cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx */
+ /* QSUB8 cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx */
+ /* SHADD16 cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx */
+ /* SHADDSUBX cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx */
+ /* SHSUBADDX cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx */
+ /* SHSUB16 cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx */
+ /* SHADD8 cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx */
+ /* SHSUB8 cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx */
+ /* UADD16 cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx */
+ /* UADDSUBX cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx */
+ /* USUBADDX cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx */
+ /* USUB16 cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx */
+ /* UADD8 cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx */
+ /* USUB8 cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx */
+ /* UQADD16 cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx */
+ /* UQADDSUBX cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx */
+ /* UQSUBADDX cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx */
+ /* UQSUB16 cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx */
+ /* UQADD8 cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx */
+ /* UQSUB8 cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx */
+ /* UHADD16 cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx */
+ /* UHADDSUBX cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx */
+ /* UHSUBADDX cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx */
+ /* UHSUB16 cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx */
+ /* UHADD8 cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx */
+ /* UHSUB8 cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_EMULATEX (0x0f800010, 0x06000010, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* PKHBT cccc 0110 1000 xxxx xxxx xxxx x001 xxxx */
+ /* PKHTB cccc 0110 1000 xxxx xxxx xxxx x101 xxxx */
+ DECODE_EMULATEX (0x0ff00030, 0x06800010, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPC, NOPC, 0, 0, NOPC)),
+
+ /* ??? cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx */
+ /* ??? cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx */
+ DECODE_REJECT (0x0fb000f0, 0x06900070),
+
+ /* SXTB16 cccc 0110 1000 1111 xxxx xxxx 0111 xxxx */
+ /* SXTB cccc 0110 1010 1111 xxxx xxxx 0111 xxxx */
+ /* SXTH cccc 0110 1011 1111 xxxx xxxx 0111 xxxx */
+ /* UXTB16 cccc 0110 1100 1111 xxxx xxxx 0111 xxxx */
+ /* UXTB cccc 0110 1110 1111 xxxx xxxx 0111 xxxx */
+ /* UXTH cccc 0110 1111 1111 xxxx xxxx 0111 xxxx */
+ DECODE_EMULATEX (0x0f8f00f0, 0x068f0070, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* SXTAB16 cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx */
+ /* SXTAB cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx */
+ /* SXTAH cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAB16 cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAB cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx */
+ /* UXTAH cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx */
+ DECODE_EMULATEX (0x0f8000f0, 0x06800070, emulate_rd12rn16rm0_rwflags_nopc,
+ REGS(NOPCX, NOPC, 0, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_0111_____xxx1_table[] = {
+ /* Media instructions */
+
+ /* UNDEFINED cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
+ DECODE_REJECT (0x0ff000f0, 0x07f000f0),
+
+ /* SMLALD cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
+ /* SMLSLD cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
+ DECODE_EMULATEX (0x0ff00090, 0x07400010, emulate_rdlo12rdhi16rn0rm8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SMUAD cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx */
+ /* SMUSD cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx */
+ DECODE_OR (0x0ff0f090, 0x0700f010),
+ /* SMMUL cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx */
+ DECODE_OR (0x0ff0f0d0, 0x0750f010),
+ /* USAD8 cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff0f0f0, 0x0780f010, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, 0, NOPC, 0, NOPC)),
+
+ /* SMLAD cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx */
+ /* SMLSD cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx */
+ DECODE_OR (0x0ff00090, 0x07000010),
+ /* SMMLA cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx */
+ DECODE_OR (0x0ff000d0, 0x07500010),
+ /* USADA8 cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_EMULATEX (0x0ff000f0, 0x07800010, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, NOPCX, NOPC, 0, NOPC)),
+
+ /* SMMLS cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx */
+ DECODE_EMULATEX (0x0ff000d0, 0x075000d0, emulate_rd16rn12rm0rs8_rwflags_nopc,
+ REGS(NOPC, NOPC, NOPC, 0, NOPC)),
+
+ /* SBFX cccc 0111 101x xxxx xxxx xxxx x101 xxxx */
+ /* UBFX cccc 0111 111x xxxx xxxx xxxx x101 xxxx */
+ DECODE_EMULATEX (0x0fa00070, 0x07a00050, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPC)),
+
+ /* BFC cccc 0111 110x xxxx xxxx xxxx x001 1111 */
+ DECODE_EMULATEX (0x0fe0007f, 0x07c0001f, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, 0)),
+
+ /* BFI cccc 0111 110x xxxx xxxx xxxx x001 xxxx */
+ DECODE_EMULATEX (0x0fe00070, 0x07c00010, emulate_rd12rm0_noflags_nopc,
+ REGS(0, NOPC, 0, 0, NOPCX)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_01xx_table[] = {
+ /* Load/store word and unsigned byte */
+
+ /* LDRB/STRB pc,[...] cccc 01xx x0xx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0c40f000, 0x0440f000),
+
+ /* STRT cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRT cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
+ /* STRBT cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRBT cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0d200000, 0x04200000),
+
+ /* STR (immediate) cccc 010x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ /* STRB (immediate) cccc 010x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x04000000, emulate_str,
+ REGS(NOPCWB, ANY, 0, 0, 0)),
+
+ /* LDR (immediate) cccc 010x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (immediate) cccc 010x x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x04100000, emulate_ldr,
+ REGS(NOPCWB, ANY, 0, 0, 0)),
+
+ /* STR (register) cccc 011x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ /* STRB (register) cccc 011x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x06000000, emulate_str,
+ REGS(NOPCWB, ANY, 0, 0, NOPC)),
+
+ /* LDR (register) cccc 011x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (register) cccc 011x x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0x0e100000, 0x06100000, emulate_ldr,
+ REGS(NOPCWB, ANY, 0, 0, NOPC)),
+
+ DECODE_END
+};
+
+static const union decode_item arm_cccc_100x_table[] = {
+ /* Block data transfer instructions */
+
+ /* LDM cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
+ /* STM cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_CUSTOM (0x0e400000, 0x08000000, kprobe_decode_ldmstm),
+
+ /* STM (user registers) cccc 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDM (user registers) cccc 100x x1x1 xxxx 0xxx xxxx xxxx xxxx */
+ /* LDM (exception ret) cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
+ DECODE_END
+};
+
+const union decode_item kprobe_decode_arm_table[] = {
+ /*
+ * Unconditional instructions
+ * 1111 xxxx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf0000000, 0xf0000000, arm_1111_table),
+
+ /*
+ * Miscellaneous instructions
+ * cccc 0001 0xx0 xxxx xxxx xxxx 0xxx xxxx
+ */
+ DECODE_TABLE (0x0f900080, 0x01000000, arm_cccc_0001_0xx0____0xxx_table),
+
+ /*
+ * Halfword multiply and multiply-accumulate
+ * cccc 0001 0xx0 xxxx xxxx xxxx 1xx0 xxxx
+ */
+ DECODE_TABLE (0x0f900090, 0x01000080, arm_cccc_0001_0xx0____1xx0_table),
+
+ /*
+ * Multiply and multiply-accumulate
+ * cccc 0000 xxxx xxxx xxxx xxxx 1001 xxxx
+ */
+ DECODE_TABLE (0x0f0000f0, 0x00000090, arm_cccc_0000_____1001_table),
+
+ /*
+ * Synchronization primitives
+ * cccc 0001 xxxx xxxx xxxx xxxx 1001 xxxx
+ */
+ DECODE_TABLE (0x0f0000f0, 0x01000090, arm_cccc_0001_____1001_table),
+
+ /*
+ * Extra load/store instructions
+ * cccc 000x xxxx xxxx xxxx xxxx 1xx1 xxxx
+ */
+ DECODE_TABLE (0x0e000090, 0x00000090, arm_cccc_000x_____1xx1_table),
+
+ /*
+ * Data-processing (register)
+ * cccc 000x xxxx xxxx xxxx xxxx xxx0 xxxx
+ * Data-processing (register-shifted register)
+ * cccc 000x xxxx xxxx xxxx xxxx 0xx1 xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x00000000, arm_cccc_000x_table),
+
+ /*
+ * Data-processing (immediate)
+ * cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x02000000, arm_cccc_001x_table),
+
+ /*
+ * Media instructions
+ * cccc 011x xxxx xxxx xxxx xxxx xxx1 xxxx
+ */
+ DECODE_TABLE (0x0f000010, 0x06000010, arm_cccc_0110_____xxx1_table),
+ DECODE_TABLE (0x0f000010, 0x07000010, arm_cccc_0111_____xxx1_table),
+
+ /*
+ * Load/store word and unsigned byte
+ * cccc 01xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0c000000, 0x04000000, arm_cccc_01xx_table),
+
+ /*
+ * Block data transfer instructions
+ * cccc 100x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0x0e000000, 0x08000000, arm_cccc_100x_table),
+
+ /* B cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
+ /* BL cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATE (0x0e000000, 0x0a000000, simulate_bbl),
+
+ /*
+ * Supervisor Call, and coprocessor instructions
+ */
+
+ /* MCRR cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* MRRC cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* LDC cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
+ /* STC cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
+ /* CDP cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
+ /* MCR cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
+ /* MRC cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
+ /* SVC cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0x0c000000, 0x0c000000),
+
+ DECODE_END
+};
+
+static void __kprobes arm_singlestep(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_pc += 4;
+ p->ainsn.insn_handler(p, regs);
+}
+
+/* Return:
+ * INSN_REJECTED If instruction is one not allowed to kprobe,
+ * INSN_GOOD If instruction is supported and uses instruction slot,
+ * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
+ *
+ * For instructions we don't want to kprobe (INSN_REJECTED return result):
+ * These are generally ones that modify the processor state making
+ * them "hard" to simulate such as switches processor modes or
+ * make accesses in alternate modes. Any of these could be simulated
+ * if the work was put into it, but low return considering they
+ * should also be very rare.
+ */
+enum kprobe_insn __kprobes
+arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ asi->insn_singlestep = arm_singlestep;
+ asi->insn_check_cc = kprobe_condition_checks[insn>>28];
+ return kprobe_decode_insn(insn, asi, kprobe_decode_arm_table, false);
+}
diff --git a/arch/arm/kernel/kprobes-common.c b/arch/arm/kernel/kprobes-common.c
new file mode 100644
index 000000000000..a5394fb4e4e0
--- /dev/null
+++ b/arch/arm/kernel/kprobes-common.c
@@ -0,0 +1,577 @@
+/*
+ * arch/arm/kernel/kprobes-common.c
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * Some contents moved here from arch/arm/include/asm/kprobes-arm.c which is
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/kernel.h>
+#include <linux/kprobes.h>
+
+#include "kprobes.h"
+
+
+#ifndef find_str_pc_offset
+
+/*
+ * For STR and STM instructions, an ARM core may choose to use either
+ * a +8 or a +12 displacement from the current instruction's address.
+ * Whichever value is chosen for a given core, it must be the same for
+ * both instructions and may not change. This function measures it.
+ */
+
+int str_pc_offset;
+
+void __init find_str_pc_offset(void)
+{
+ int addr, scratch, ret;
+
+ __asm__ (
+ "sub %[ret], pc, #4 \n\t"
+ "str pc, %[addr] \n\t"
+ "ldr %[scr], %[addr] \n\t"
+ "sub %[ret], %[scr], %[ret] \n\t"
+ : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
+
+ str_pc_offset = ret;
+}
+
+#endif /* !find_str_pc_offset */
+
+
+#ifndef test_load_write_pc_interworking
+
+bool load_write_pc_interworks;
+
+void __init test_load_write_pc_interworking(void)
+{
+ int arch = cpu_architecture();
+ BUG_ON(arch == CPU_ARCH_UNKNOWN);
+ load_write_pc_interworks = arch >= CPU_ARCH_ARMv5T;
+}
+
+#endif /* !test_load_write_pc_interworking */
+
+
+#ifndef test_alu_write_pc_interworking
+
+bool alu_write_pc_interworks;
+
+void __init test_alu_write_pc_interworking(void)
+{
+ int arch = cpu_architecture();
+ BUG_ON(arch == CPU_ARCH_UNKNOWN);
+ alu_write_pc_interworks = arch >= CPU_ARCH_ARMv7;
+}
+
+#endif /* !test_alu_write_pc_interworking */
+
+
+void __init arm_kprobe_decode_init(void)
+{
+ find_str_pc_offset();
+ test_load_write_pc_interworking();
+ test_alu_write_pc_interworking();
+}
+
+
+static unsigned long __kprobes __check_eq(unsigned long cpsr)
+{
+ return cpsr & PSR_Z_BIT;
+}
+
+static unsigned long __kprobes __check_ne(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_Z_BIT;
+}
+
+static unsigned long __kprobes __check_cs(unsigned long cpsr)
+{
+ return cpsr & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_cc(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_mi(unsigned long cpsr)
+{
+ return cpsr & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_pl(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_vs(unsigned long cpsr)
+{
+ return cpsr & PSR_V_BIT;
+}
+
+static unsigned long __kprobes __check_vc(unsigned long cpsr)
+{
+ return (~cpsr) & PSR_V_BIT;
+}
+
+static unsigned long __kprobes __check_hi(unsigned long cpsr)
+{
+ cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
+ return cpsr & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_ls(unsigned long cpsr)
+{
+ cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
+ return (~cpsr) & PSR_C_BIT;
+}
+
+static unsigned long __kprobes __check_ge(unsigned long cpsr)
+{
+ cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ return (~cpsr) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_lt(unsigned long cpsr)
+{
+ cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ return cpsr & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_gt(unsigned long cpsr)
+{
+ unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
+ return (~temp) & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_le(unsigned long cpsr)
+{
+ unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
+ temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
+ return temp & PSR_N_BIT;
+}
+
+static unsigned long __kprobes __check_al(unsigned long cpsr)
+{
+ return true;
+}
+
+kprobe_check_cc * const kprobe_condition_checks[16] = {
+ &__check_eq, &__check_ne, &__check_cs, &__check_cc,
+ &__check_mi, &__check_pl, &__check_vs, &__check_vc,
+ &__check_hi, &__check_ls, &__check_ge, &__check_lt,
+ &__check_gt, &__check_le, &__check_al, &__check_al
+};
+
+
+void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs)
+{
+}
+
+void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs)
+{
+ p->ainsn.insn_fn();
+}
+
+static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rn = (insn >> 16) & 0xf;
+ int lbit = insn & (1 << 20);
+ int wbit = insn & (1 << 21);
+ int ubit = insn & (1 << 23);
+ int pbit = insn & (1 << 24);
+ long *addr = (long *)regs->uregs[rn];
+ int reg_bit_vector;
+ int reg_count;
+
+ reg_count = 0;
+ reg_bit_vector = insn & 0xffff;
+ while (reg_bit_vector) {
+ reg_bit_vector &= (reg_bit_vector - 1);
+ ++reg_count;
+ }
+
+ if (!ubit)
+ addr -= reg_count;
+ addr += (!pbit == !ubit);
+
+ reg_bit_vector = insn & 0xffff;
+ while (reg_bit_vector) {
+ int reg = __ffs(reg_bit_vector);
+ reg_bit_vector &= (reg_bit_vector - 1);
+ if (lbit)
+ regs->uregs[reg] = *addr++;
+ else
+ *addr++ = regs->uregs[reg];
+ }
+
+ if (wbit) {
+ if (!ubit)
+ addr -= reg_count;
+ addr -= (!pbit == !ubit);
+ regs->uregs[rn] = (long)addr;
+ }
+}
+
+static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_pc = (long)p->addr + str_pc_offset;
+ simulate_ldm1stm1(p, regs);
+ regs->ARM_pc = (long)p->addr + 4;
+}
+
+static void __kprobes simulate_ldm1_pc(struct kprobe *p, struct pt_regs *regs)
+{
+ simulate_ldm1stm1(p, regs);
+ load_write_pc(regs->ARM_pc, regs);
+}
+
+static void __kprobes
+emulate_generic_r0_12_noflags(struct kprobe *p, struct pt_regs *regs)
+{
+ register void *rregs asm("r1") = regs;
+ register void *rfn asm("lr") = p->ainsn.insn_fn;
+
+ __asm__ __volatile__ (
+ "stmdb sp!, {%[regs], r11} \n\t"
+ "ldmia %[regs], {r0-r12} \n\t"
+#if __LINUX_ARM_ARCH__ >= 6
+ "blx %[fn] \n\t"
+#else
+ "str %[fn], [sp, #-4]! \n\t"
+ "adr lr, 1f \n\t"
+ "ldr pc, [sp], #4 \n\t"
+ "1: \n\t"
+#endif
+ "ldr lr, [sp], #4 \n\t" /* lr = regs */
+ "stmia lr, {r0-r12} \n\t"
+ "ldr r11, [sp], #4 \n\t"
+ : [regs] "=r" (rregs), [fn] "=r" (rfn)
+ : "0" (rregs), "1" (rfn)
+ : "r0", "r2", "r3", "r4", "r5", "r6", "r7",
+ "r8", "r9", "r10", "r12", "memory", "cc"
+ );
+}
+
+static void __kprobes
+emulate_generic_r2_14_noflags(struct kprobe *p, struct pt_regs *regs)
+{
+ emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+2));
+}
+
+static void __kprobes
+emulate_ldm_r3_15(struct kprobe *p, struct pt_regs *regs)
+{
+ emulate_generic_r0_12_noflags(p, (struct pt_regs *)(regs->uregs+3));
+ load_write_pc(regs->ARM_pc, regs);
+}
+
+enum kprobe_insn __kprobes
+kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ kprobe_insn_handler_t *handler = 0;
+ unsigned reglist = insn & 0xffff;
+ int is_ldm = insn & 0x100000;
+ int rn = (insn >> 16) & 0xf;
+
+ if (rn <= 12 && (reglist & 0xe000) == 0) {
+ /* Instruction only uses registers in the range R0..R12 */
+ handler = emulate_generic_r0_12_noflags;
+
+ } else if (rn >= 2 && (reglist & 0x8003) == 0) {
+ /* Instruction only uses registers in the range R2..R14 */
+ rn -= 2;
+ reglist >>= 2;
+ handler = emulate_generic_r2_14_noflags;
+
+ } else if (rn >= 3 && (reglist & 0x0007) == 0) {
+ /* Instruction only uses registers in the range R3..R15 */
+ if (is_ldm && (reglist & 0x8000)) {
+ rn -= 3;
+ reglist >>= 3;
+ handler = emulate_ldm_r3_15;
+ }
+ }
+
+ if (handler) {
+ /* We can emulate the instruction in (possibly) modified form */
+ asi->insn[0] = (insn & 0xfff00000) | (rn << 16) | reglist;
+ asi->insn_handler = handler;
+ return INSN_GOOD;
+ }
+
+ /* Fallback to slower simulation... */
+ if (reglist & 0x8000)
+ handler = is_ldm ? simulate_ldm1_pc : simulate_stm1_pc;
+ else
+ handler = simulate_ldm1stm1;
+ asi->insn_handler = handler;
+ return INSN_GOOD_NO_SLOT;
+}
+
+
+/*
+ * Prepare an instruction slot to receive an instruction for emulating.
+ * This is done by placing a subroutine return after the location where the
+ * instruction will be placed. We also modify ARM instructions to be
+ * unconditional as the condition code will already be checked before any
+ * emulation handler is called.
+ */
+static kprobe_opcode_t __kprobes
+prepare_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
+ bool thumb)
+{
+#ifdef CONFIG_THUMB2_KERNEL
+ if (thumb) {
+ u16 *thumb_insn = (u16 *)asi->insn;
+ thumb_insn[1] = 0x4770; /* Thumb bx lr */
+ thumb_insn[2] = 0x4770; /* Thumb bx lr */
+ return insn;
+ }
+ asi->insn[1] = 0xe12fff1e; /* ARM bx lr */
+#else
+ asi->insn[1] = 0xe1a0f00e; /* mov pc, lr */
+#endif
+ /* Make an ARM instruction unconditional */
+ if (insn < 0xe0000000)
+ insn = (insn | 0xe0000000) & ~0x10000000;
+ return insn;
+}
+
+/*
+ * Write a (probably modified) instruction into the slot previously prepared by
+ * prepare_emulated_insn
+ */
+static void __kprobes
+set_emulated_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
+ bool thumb)
+{
+#ifdef CONFIG_THUMB2_KERNEL
+ if (thumb) {
+ u16 *ip = (u16 *)asi->insn;
+ if (is_wide_instruction(insn))
+ *ip++ = insn >> 16;
+ *ip++ = insn;
+ return;
+ }
+#endif
+ asi->insn[0] = insn;
+}
+
+/*
+ * When we modify the register numbers encoded in an instruction to be emulated,
+ * the new values come from this define. For ARM and 32-bit Thumb instructions
+ * this gives...
+ *
+ * bit position 16 12 8 4 0
+ * ---------------+---+---+---+---+---+
+ * register r2 r0 r1 -- r3
+ */
+#define INSN_NEW_BITS 0x00020103
+
+/* Each nibble has same value as that at INSN_NEW_BITS bit 16 */
+#define INSN_SAMEAS16_BITS 0x22222222
+
+/*
+ * Validate and modify each of the registers encoded in an instruction.
+ *
+ * Each nibble in regs contains a value from enum decode_reg_type. For each
+ * non-zero value, the corresponding nibble in pinsn is validated and modified
+ * according to the type.
+ */
+static bool __kprobes decode_regs(kprobe_opcode_t* pinsn, u32 regs)
+{
+ kprobe_opcode_t insn = *pinsn;
+ kprobe_opcode_t mask = 0xf; /* Start at least significant nibble */
+
+ for (; regs != 0; regs >>= 4, mask <<= 4) {
+
+ kprobe_opcode_t new_bits = INSN_NEW_BITS;
+
+ switch (regs & 0xf) {
+
+ case REG_TYPE_NONE:
+ /* Nibble not a register, skip to next */
+ continue;
+
+ case REG_TYPE_ANY:
+ /* Any register is allowed */
+ break;
+
+ case REG_TYPE_SAMEAS16:
+ /* Replace register with same as at bit position 16 */
+ new_bits = INSN_SAMEAS16_BITS;
+ break;
+
+ case REG_TYPE_SP:
+ /* Only allow SP (R13) */
+ if ((insn ^ 0xdddddddd) & mask)
+ goto reject;
+ break;
+
+ case REG_TYPE_PC:
+ /* Only allow PC (R15) */
+ if ((insn ^ 0xffffffff) & mask)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOSP:
+ /* Reject SP (R13) */
+ if (((insn ^ 0xdddddddd) & mask) == 0)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOSPPC:
+ case REG_TYPE_NOSPPCX:
+ /* Reject SP and PC (R13 and R15) */
+ if (((insn ^ 0xdddddddd) & 0xdddddddd & mask) == 0)
+ goto reject;
+ break;
+
+ case REG_TYPE_NOPCWB:
+ if (!is_writeback(insn))
+ break; /* No writeback, so any register is OK */
+ /* fall through... */
+ case REG_TYPE_NOPC:
+ case REG_TYPE_NOPCX:
+ /* Reject PC (R15) */
+ if (((insn ^ 0xffffffff) & mask) == 0)
+ goto reject;
+ break;
+ }
+
+ /* Replace value of nibble with new register number... */
+ insn &= ~mask;
+ insn |= new_bits & mask;
+ }
+
+ *pinsn = insn;
+ return true;
+
+reject:
+ return false;
+}
+
+static const int decode_struct_sizes[NUM_DECODE_TYPES] = {
+ [DECODE_TYPE_TABLE] = sizeof(struct decode_table),
+ [DECODE_TYPE_CUSTOM] = sizeof(struct decode_custom),
+ [DECODE_TYPE_SIMULATE] = sizeof(struct decode_simulate),
+ [DECODE_TYPE_EMULATE] = sizeof(struct decode_emulate),
+ [DECODE_TYPE_OR] = sizeof(struct decode_or),
+ [DECODE_TYPE_REJECT] = sizeof(struct decode_reject)
+};
+
+/*
+ * kprobe_decode_insn operates on data tables in order to decode an ARM
+ * architecture instruction onto which a kprobe has been placed.
+ *
+ * These instruction decoding tables are a concatenation of entries each
+ * of which consist of one of the following structs:
+ *
+ * decode_table
+ * decode_custom
+ * decode_simulate
+ * decode_emulate
+ * decode_or
+ * decode_reject
+ *
+ * Each of these starts with a struct decode_header which has the following
+ * fields:
+ *
+ * type_regs
+ * mask
+ * value
+ *
+ * The least significant DECODE_TYPE_BITS of type_regs contains a value
+ * from enum decode_type, this indicates which of the decode_* structs
+ * the entry contains. The value DECODE_TYPE_END indicates the end of the
+ * table.
+ *
+ * When the table is parsed, each entry is checked in turn to see if it
+ * matches the instruction to be decoded using the test:
+ *
+ * (insn & mask) == value
+ *
+ * If no match is found before the end of the table is reached then decoding
+ * fails with INSN_REJECTED.
+ *
+ * When a match is found, decode_regs() is called to validate and modify each
+ * of the registers encoded in the instruction; the data it uses to do this
+ * is (type_regs >> DECODE_TYPE_BITS). A validation failure will cause decoding
+ * to fail with INSN_REJECTED.
+ *
+ * Once the instruction has passed the above tests, further processing
+ * depends on the type of the table entry's decode struct.
+ *
+ */
+int __kprobes
+kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
+ const union decode_item *table, bool thumb)
+{
+ const struct decode_header *h = (struct decode_header *)table;
+ const struct decode_header *next;
+ bool matched = false;
+
+ insn = prepare_emulated_insn(insn, asi, thumb);
+
+ for (;; h = next) {
+ enum decode_type type = h->type_regs.bits & DECODE_TYPE_MASK;
+ u32 regs = h->type_regs.bits >> DECODE_TYPE_BITS;
+
+ if (type == DECODE_TYPE_END)
+ return INSN_REJECTED;
+
+ next = (struct decode_header *)
+ ((uintptr_t)h + decode_struct_sizes[type]);
+
+ if (!matched && (insn & h->mask.bits) != h->value.bits)
+ continue;
+
+ if (!decode_regs(&insn, regs))
+ return INSN_REJECTED;
+
+ switch (type) {
+
+ case DECODE_TYPE_TABLE: {
+ struct decode_table *d = (struct decode_table *)h;
+ next = (struct decode_header *)d->table.table;
+ break;
+ }
+
+ case DECODE_TYPE_CUSTOM: {
+ struct decode_custom *d = (struct decode_custom *)h;
+ return (*d->decoder.decoder)(insn, asi);
+ }
+
+ case DECODE_TYPE_SIMULATE: {
+ struct decode_simulate *d = (struct decode_simulate *)h;
+ asi->insn_handler = d->handler.handler;
+ return INSN_GOOD_NO_SLOT;
+ }
+
+ case DECODE_TYPE_EMULATE: {
+ struct decode_emulate *d = (struct decode_emulate *)h;
+ asi->insn_handler = d->handler.handler;
+ set_emulated_insn(insn, asi, thumb);
+ return INSN_GOOD;
+ }
+
+ case DECODE_TYPE_OR:
+ matched = true;
+ break;
+
+ case DECODE_TYPE_REJECT:
+ default:
+ return INSN_REJECTED;
+ }
+ }
+ }
diff --git a/arch/arm/kernel/kprobes-decode.c b/arch/arm/kernel/kprobes-decode.c
deleted file mode 100644
index 15eeff6aea0e..000000000000
--- a/arch/arm/kernel/kprobes-decode.c
+++ /dev/null
@@ -1,1670 +0,0 @@
-/*
- * arch/arm/kernel/kprobes-decode.c
- *
- * Copyright (C) 2006, 2007 Motorola Inc.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License version 2 as
- * published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * General Public License for more details.
- */
-
-/*
- * We do not have hardware single-stepping on ARM, This
- * effort is further complicated by the ARM not having a
- * "next PC" register. Instructions that change the PC
- * can't be safely single-stepped in a MP environment, so
- * we have a lot of work to do:
- *
- * In the prepare phase:
- * *) If it is an instruction that does anything
- * with the CPU mode, we reject it for a kprobe.
- * (This is out of laziness rather than need. The
- * instructions could be simulated.)
- *
- * *) Otherwise, decode the instruction rewriting its
- * registers to take fixed, ordered registers and
- * setting a handler for it to run the instruction.
- *
- * In the execution phase by an instruction's handler:
- *
- * *) If the PC is written to by the instruction, the
- * instruction must be fully simulated in software.
- *
- * *) Otherwise, a modified form of the instruction is
- * directly executed. Its handler calls the
- * instruction in insn[0]. In insn[1] is a
- * "mov pc, lr" to return.
- *
- * Before calling, load up the reordered registers
- * from the original instruction's registers. If one
- * of the original input registers is the PC, compute
- * and adjust the appropriate input register.
- *
- * After call completes, copy the output registers to
- * the original instruction's original registers.
- *
- * We don't use a real breakpoint instruction since that
- * would have us in the kernel go from SVC mode to SVC
- * mode losing the link register. Instead we use an
- * undefined instruction. To simplify processing, the
- * undefined instruction used for kprobes must be reserved
- * exclusively for kprobes use.
- *
- * TODO: ifdef out some instruction decoding based on architecture.
- */
-
-#include <linux/kernel.h>
-#include <linux/kprobes.h>
-
-#define sign_extend(x, signbit) ((x) | (0 - ((x) & (1 << (signbit)))))
-
-#define branch_displacement(insn) sign_extend(((insn) & 0xffffff) << 2, 25)
-
-#define is_r15(insn, bitpos) (((insn) & (0xf << bitpos)) == (0xf << bitpos))
-
-/*
- * Test if load/store instructions writeback the address register.
- * if P (bit 24) == 0 or W (bit 21) == 1
- */
-#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000)
-
-#define PSR_fs (PSR_f|PSR_s)
-
-#define KPROBE_RETURN_INSTRUCTION 0xe1a0f00e /* mov pc, lr */
-
-typedef long (insn_0arg_fn_t)(void);
-typedef long (insn_1arg_fn_t)(long);
-typedef long (insn_2arg_fn_t)(long, long);
-typedef long (insn_3arg_fn_t)(long, long, long);
-typedef long (insn_4arg_fn_t)(long, long, long, long);
-typedef long long (insn_llret_0arg_fn_t)(void);
-typedef long long (insn_llret_3arg_fn_t)(long, long, long);
-typedef long long (insn_llret_4arg_fn_t)(long, long, long, long);
-
-union reg_pair {
- long long dr;
-#ifdef __LITTLE_ENDIAN
- struct { long r0, r1; };
-#else
- struct { long r1, r0; };
-#endif
-};
-
-/*
- * For STR and STM instructions, an ARM core may choose to use either
- * a +8 or a +12 displacement from the current instruction's address.
- * Whichever value is chosen for a given core, it must be the same for
- * both instructions and may not change. This function measures it.
- */
-
-static int str_pc_offset;
-
-static void __init find_str_pc_offset(void)
-{
- int addr, scratch, ret;
-
- __asm__ (
- "sub %[ret], pc, #4 \n\t"
- "str pc, %[addr] \n\t"
- "ldr %[scr], %[addr] \n\t"
- "sub %[ret], %[scr], %[ret] \n\t"
- : [ret] "=r" (ret), [scr] "=r" (scratch), [addr] "+m" (addr));
-
- str_pc_offset = ret;
-}
-
-/*
- * The insnslot_?arg_r[w]flags() functions below are to keep the
- * msr -> *fn -> mrs instruction sequences indivisible so that
- * the state of the CPSR flags aren't inadvertently modified
- * just before or just after the call.
- */
-
-static inline long __kprobes
-insnslot_0arg_rflags(long cpsr, insn_0arg_fn_t *fn)
-{
- register long ret asm("r0");
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret)
- : [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- return ret;
-}
-
-static inline long long __kprobes
-insnslot_llret_0arg_rflags(long cpsr, insn_llret_0arg_fn_t *fn)
-{
- register long ret0 asm("r0");
- register long ret1 asm("r1");
- union reg_pair fnr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret0), "=r" (ret1)
- : [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- fnr.r0 = ret0;
- fnr.r1 = ret1;
- return fnr.dr;
-}
-
-static inline long __kprobes
-insnslot_1arg_rflags(long r0, long cpsr, insn_1arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long ret asm("r0");
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret)
- : "0" (rr0), [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- return ret;
-}
-
-static inline long __kprobes
-insnslot_2arg_rflags(long r0, long r1, long cpsr, insn_2arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long ret asm("r0");
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret)
- : "0" (rr0), "r" (rr1),
- [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- return ret;
-}
-
-static inline long __kprobes
-insnslot_3arg_rflags(long r0, long r1, long r2, long cpsr, insn_3arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long ret asm("r0");
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret)
- : "0" (rr0), "r" (rr1), "r" (rr2),
- [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- return ret;
-}
-
-static inline long long __kprobes
-insnslot_llret_3arg_rflags(long r0, long r1, long r2, long cpsr,
- insn_llret_3arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long ret0 asm("r0");
- register long ret1 asm("r1");
- union reg_pair fnr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret0), "=r" (ret1)
- : "0" (rr0), "r" (rr1), "r" (rr2),
- [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- fnr.r0 = ret0;
- fnr.r1 = ret1;
- return fnr.dr;
-}
-
-static inline long __kprobes
-insnslot_4arg_rflags(long r0, long r1, long r2, long r3, long cpsr,
- insn_4arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long rr3 asm("r3") = r3;
- register long ret asm("r0");
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[cpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- : "=r" (ret)
- : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
- [cpsr] "r" (cpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- return ret;
-}
-
-static inline long __kprobes
-insnslot_1arg_rwflags(long r0, long *cpsr, insn_1arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long ret asm("r0");
- long oldcpsr = *cpsr;
- long newcpsr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[oldcpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- "mrs %[newcpsr], cpsr \n\t"
- : "=r" (ret), [newcpsr] "=r" (newcpsr)
- : "0" (rr0), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
- return ret;
-}
-
-static inline long __kprobes
-insnslot_2arg_rwflags(long r0, long r1, long *cpsr, insn_2arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long ret asm("r0");
- long oldcpsr = *cpsr;
- long newcpsr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[oldcpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- "mrs %[newcpsr], cpsr \n\t"
- : "=r" (ret), [newcpsr] "=r" (newcpsr)
- : "0" (rr0), "r" (rr1), [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
- return ret;
-}
-
-static inline long __kprobes
-insnslot_3arg_rwflags(long r0, long r1, long r2, long *cpsr,
- insn_3arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long ret asm("r0");
- long oldcpsr = *cpsr;
- long newcpsr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[oldcpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- "mrs %[newcpsr], cpsr \n\t"
- : "=r" (ret), [newcpsr] "=r" (newcpsr)
- : "0" (rr0), "r" (rr1), "r" (rr2),
- [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
- return ret;
-}
-
-static inline long __kprobes
-insnslot_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
- insn_4arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long rr3 asm("r3") = r3;
- register long ret asm("r0");
- long oldcpsr = *cpsr;
- long newcpsr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[oldcpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- "mrs %[newcpsr], cpsr \n\t"
- : "=r" (ret), [newcpsr] "=r" (newcpsr)
- : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
- [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
- return ret;
-}
-
-static inline long long __kprobes
-insnslot_llret_4arg_rwflags(long r0, long r1, long r2, long r3, long *cpsr,
- insn_llret_4arg_fn_t *fn)
-{
- register long rr0 asm("r0") = r0;
- register long rr1 asm("r1") = r1;
- register long rr2 asm("r2") = r2;
- register long rr3 asm("r3") = r3;
- register long ret0 asm("r0");
- register long ret1 asm("r1");
- long oldcpsr = *cpsr;
- long newcpsr;
- union reg_pair fnr;
-
- __asm__ __volatile__ (
- "msr cpsr_fs, %[oldcpsr] \n\t"
- "mov lr, pc \n\t"
- "mov pc, %[fn] \n\t"
- "mrs %[newcpsr], cpsr \n\t"
- : "=r" (ret0), "=r" (ret1), [newcpsr] "=r" (newcpsr)
- : "0" (rr0), "r" (rr1), "r" (rr2), "r" (rr3),
- [oldcpsr] "r" (oldcpsr), [fn] "r" (fn)
- : "lr", "cc"
- );
- *cpsr = (oldcpsr & ~PSR_fs) | (newcpsr & PSR_fs);
- fnr.r0 = ret0;
- fnr.r1 = ret1;
- return fnr.dr;
-}
-
-/*
- * To avoid the complications of mimicing single-stepping on a
- * processor without a Next-PC or a single-step mode, and to
- * avoid having to deal with the side-effects of boosting, we
- * simulate or emulate (almost) all ARM instructions.
- *
- * "Simulation" is where the instruction's behavior is duplicated in
- * C code. "Emulation" is where the original instruction is rewritten
- * and executed, often by altering its registers.
- *
- * By having all behavior of the kprobe'd instruction completed before
- * returning from the kprobe_handler(), all locks (scheduler and
- * interrupt) can safely be released. There is no need for secondary
- * breakpoints, no race with MP or preemptable kernels, nor having to
- * clean up resources counts at a later time impacting overall system
- * performance. By rewriting the instruction, only the minimum registers
- * need to be loaded and saved back optimizing performance.
- *
- * Calling the insnslot_*_rwflags version of a function doesn't hurt
- * anything even when the CPSR flags aren't updated by the
- * instruction. It's just a little slower in return for saving
- * a little space by not having a duplicate function that doesn't
- * update the flags. (The same optimization can be said for
- * instructions that do or don't perform register writeback)
- * Also, instructions can either read the flags, only write the
- * flags, or read and write the flags. To save combinations
- * rather than for sheer performance, flag functions just assume
- * read and write of flags.
- */
-
-static void __kprobes simulate_bbl(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- long iaddr = (long)p->addr;
- int disp = branch_displacement(insn);
-
- if (insn & (1 << 24))
- regs->ARM_lr = iaddr + 4;
-
- regs->ARM_pc = iaddr + 8 + disp;
-}
-
-static void __kprobes simulate_blx1(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- long iaddr = (long)p->addr;
- int disp = branch_displacement(insn);
-
- regs->ARM_lr = iaddr + 4;
- regs->ARM_pc = iaddr + 8 + disp + ((insn >> 23) & 0x2);
- regs->ARM_cpsr |= PSR_T_BIT;
-}
-
-static void __kprobes simulate_blx2bx(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- int rm = insn & 0xf;
- long rmv = regs->uregs[rm];
-
- if (insn & (1 << 5))
- regs->ARM_lr = (long)p->addr + 4;
-
- regs->ARM_pc = rmv & ~0x1;
- regs->ARM_cpsr &= ~PSR_T_BIT;
- if (rmv & 0x1)
- regs->ARM_cpsr |= PSR_T_BIT;
-}
-
-static void __kprobes simulate_mrs(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- unsigned long mask = 0xf8ff03df; /* Mask out execution state */
- regs->uregs[rd] = regs->ARM_cpsr & mask;
-}
-
-static void __kprobes simulate_ldm1stm1(struct kprobe *p, struct pt_regs *regs)
-{
- kprobe_opcode_t insn = p->opcode;
- int rn = (insn >> 16) & 0xf;
- int lbit = insn & (1 << 20);
- int wbit = insn & (1 << 21);
- int ubit = insn & (1 << 23);
- int pbit = insn & (1 << 24);
- long *addr = (long *)regs->uregs[rn];
- int reg_bit_vector;
- int reg_count;
-
- reg_count = 0;
- reg_bit_vector = insn & 0xffff;
- while (reg_bit_vector) {
- reg_bit_vector &= (reg_bit_vector - 1);
- ++reg_count;
- }
-
- if (!ubit)
- addr -= reg_count;
- addr += (!pbit == !ubit);
-
- reg_bit_vector = insn & 0xffff;
- while (reg_bit_vector) {
- int reg = __ffs(reg_bit_vector);
- reg_bit_vector &= (reg_bit_vector - 1);
- if (lbit)
- regs->uregs[reg] = *addr++;
- else
- *addr++ = regs->uregs[reg];
- }
-
- if (wbit) {
- if (!ubit)
- addr -= reg_count;
- addr -= (!pbit == !ubit);
- regs->uregs[rn] = (long)addr;
- }
-}
-
-static void __kprobes simulate_stm1_pc(struct kprobe *p, struct pt_regs *regs)
-{
- regs->ARM_pc = (long)p->addr + str_pc_offset;
- simulate_ldm1stm1(p, regs);
- regs->ARM_pc = (long)p->addr + 4;
-}
-
-static void __kprobes simulate_mov_ipsp(struct kprobe *p, struct pt_regs *regs)
-{
- regs->uregs[12] = regs->uregs[13];
-}
-
-static void __kprobes emulate_ldrd(struct kprobe *p, struct pt_regs *regs)
-{
- insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf; /* rm may be invalid, don't care. */
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
-
- /* Not following the C calling convention here, so need asm(). */
- __asm__ __volatile__ (
- "ldr r0, %[rn] \n\t"
- "ldr r1, %[rm] \n\t"
- "msr cpsr_fs, %[cpsr]\n\t"
- "mov lr, pc \n\t"
- "mov pc, %[i_fn] \n\t"
- "str r0, %[rn] \n\t" /* in case of writeback */
- "str r2, %[rd0] \n\t"
- "str r3, %[rd1] \n\t"
- : [rn] "+m" (rnv),
- [rd0] "=m" (regs->uregs[rd]),
- [rd1] "=m" (regs->uregs[rd+1])
- : [rm] "m" (rmv),
- [cpsr] "r" (regs->ARM_cpsr),
- [i_fn] "r" (i_fn)
- : "r0", "r1", "r2", "r3", "lr", "cc"
- );
- if (is_writeback(insn))
- regs->uregs[rn] = rnv;
-}
-
-static void __kprobes emulate_strd(struct kprobe *p, struct pt_regs *regs)
-{
- insn_4arg_fn_t *i_fn = (insn_4arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf;
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
- /* rm/rmv may be invalid, don't care. */
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long rnv_wb;
-
- rnv_wb = insnslot_4arg_rflags(rnv, rmv, regs->uregs[rd],
- regs->uregs[rd+1],
- regs->ARM_cpsr, i_fn);
- if (is_writeback(insn))
- regs->uregs[rn] = rnv_wb;
-}
-
-static void __kprobes emulate_ldr(struct kprobe *p, struct pt_regs *regs)
-{
- insn_llret_3arg_fn_t *i_fn = (insn_llret_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- union reg_pair fnr;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf;
- long rdv;
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long cpsr = regs->ARM_cpsr;
-
- fnr.dr = insnslot_llret_3arg_rflags(rnv, 0, rmv, cpsr, i_fn);
- if (rn != 15)
- regs->uregs[rn] = fnr.r0; /* Save Rn in case of writeback. */
- rdv = fnr.r1;
-
- if (rd == 15) {
-#if __LINUX_ARM_ARCH__ >= 5
- cpsr &= ~PSR_T_BIT;
- if (rdv & 0x1)
- cpsr |= PSR_T_BIT;
- regs->ARM_cpsr = cpsr;
- rdv &= ~0x1;
-#else
- rdv &= ~0x2;
-#endif
- }
- regs->uregs[rd] = rdv;
-}
-
-static void __kprobes emulate_str(struct kprobe *p, struct pt_regs *regs)
-{
- insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long iaddr = (long)p->addr;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf;
- long rdv = (rd == 15) ? iaddr + str_pc_offset : regs->uregs[rd];
- long rnv = (rn == 15) ? iaddr + 8 : regs->uregs[rn];
- long rmv = regs->uregs[rm]; /* rm/rmv may be invalid, don't care. */
- long rnv_wb;
-
- rnv_wb = insnslot_3arg_rflags(rnv, rdv, rmv, regs->ARM_cpsr, i_fn);
- if (rn != 15)
- regs->uregs[rn] = rnv_wb; /* Save Rn in case of writeback. */
-}
-
-static void __kprobes emulate_sat(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rm = insn & 0xf;
- long rmv = regs->uregs[rm];
-
- /* Writes Q flag */
- regs->uregs[rd] = insnslot_1arg_rwflags(rmv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes emulate_sel(struct kprobe *p, struct pt_regs *regs)
-{
- insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf;
- long rnv = regs->uregs[rn];
- long rmv = regs->uregs[rm];
-
- /* Reads GE bits */
- regs->uregs[rd] = insnslot_2arg_rflags(rnv, rmv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes emulate_none(struct kprobe *p, struct pt_regs *regs)
-{
- insn_0arg_fn_t *i_fn = (insn_0arg_fn_t *)&p->ainsn.insn[0];
-
- insnslot_0arg_rflags(regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes emulate_nop(struct kprobe *p, struct pt_regs *regs)
-{
-}
-
-static void __kprobes
-emulate_rd12_modify(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- long rdv = regs->uregs[rd];
-
- regs->uregs[rd] = insnslot_1arg_rflags(rdv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_rd12rn0_modify(struct kprobe *p, struct pt_regs *regs)
-{
- insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rn = insn & 0xf;
- long rdv = regs->uregs[rd];
- long rnv = regs->uregs[rn];
-
- regs->uregs[rd] = insnslot_2arg_rflags(rdv, rnv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes emulate_rd12rm0(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rm = insn & 0xf;
- long rmv = regs->uregs[rm];
-
- regs->uregs[rd] = insnslot_1arg_rflags(rmv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_rd12rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- int rm = insn & 0xf;
- long rnv = regs->uregs[rn];
- long rmv = regs->uregs[rm];
-
- regs->uregs[rd] =
- insnslot_2arg_rwflags(rnv, rmv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_rd16rn12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 16) & 0xf;
- int rn = (insn >> 12) & 0xf;
- int rs = (insn >> 8) & 0xf;
- int rm = insn & 0xf;
- long rnv = regs->uregs[rn];
- long rsv = regs->uregs[rs];
- long rmv = regs->uregs[rm];
-
- regs->uregs[rd] =
- insnslot_3arg_rwflags(rnv, rsv, rmv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_rd16rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_2arg_fn_t *i_fn = (insn_2arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 16) & 0xf;
- int rs = (insn >> 8) & 0xf;
- int rm = insn & 0xf;
- long rsv = regs->uregs[rs];
- long rmv = regs->uregs[rm];
-
- regs->uregs[rd] =
- insnslot_2arg_rwflags(rsv, rmv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_rdhi16rdlo12rs8rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_llret_4arg_fn_t *i_fn = (insn_llret_4arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- union reg_pair fnr;
- int rdhi = (insn >> 16) & 0xf;
- int rdlo = (insn >> 12) & 0xf;
- int rs = (insn >> 8) & 0xf;
- int rm = insn & 0xf;
- long rsv = regs->uregs[rs];
- long rmv = regs->uregs[rm];
-
- fnr.dr = insnslot_llret_4arg_rwflags(regs->uregs[rdhi],
- regs->uregs[rdlo], rsv, rmv,
- &regs->ARM_cpsr, i_fn);
- regs->uregs[rdhi] = fnr.r0;
- regs->uregs[rdlo] = fnr.r1;
-}
-
-static void __kprobes
-emulate_alu_imm_rflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
-
- regs->uregs[rd] = insnslot_1arg_rflags(rnv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_alu_imm_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf;
- long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
-
- regs->uregs[rd] = insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_alu_tests_imm(struct kprobe *p, struct pt_regs *regs)
-{
- insn_1arg_fn_t *i_fn = (insn_1arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- int rn = (insn >> 16) & 0xf;
- long rnv = (rn == 15) ? (long)p->addr + 8 : regs->uregs[rn];
-
- insnslot_1arg_rwflags(rnv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_alu_rflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
- int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
- int rm = insn & 0xf;
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long rsv = regs->uregs[rs];
-
- regs->uregs[rd] =
- insnslot_3arg_rflags(rnv, rmv, rsv, regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_alu_rwflags(struct kprobe *p, struct pt_regs *regs)
-{
- insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- int rd = (insn >> 12) & 0xf;
- int rn = (insn >> 16) & 0xf; /* rn/rnv/rs/rsv may be */
- int rs = (insn >> 8) & 0xf; /* invalid, don't care. */
- int rm = insn & 0xf;
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long rsv = regs->uregs[rs];
-
- regs->uregs[rd] =
- insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
-}
-
-static void __kprobes
-emulate_alu_tests(struct kprobe *p, struct pt_regs *regs)
-{
- insn_3arg_fn_t *i_fn = (insn_3arg_fn_t *)&p->ainsn.insn[0];
- kprobe_opcode_t insn = p->opcode;
- long ppc = (long)p->addr + 8;
- int rn = (insn >> 16) & 0xf;
- int rs = (insn >> 8) & 0xf; /* rs/rsv may be invalid, don't care. */
- int rm = insn & 0xf;
- long rnv = (rn == 15) ? ppc : regs->uregs[rn];
- long rmv = (rm == 15) ? ppc : regs->uregs[rm];
- long rsv = regs->uregs[rs];
-
- insnslot_3arg_rwflags(rnv, rmv, rsv, &regs->ARM_cpsr, i_fn);
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_ldr_str(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- int not_imm = (insn & (1 << 26)) ? (insn & (1 << 25))
- : (~insn & (1 << 22));
-
- if (is_writeback(insn) && is_r15(insn, 16))
- return INSN_REJECTED; /* Writeback to PC */
-
- insn &= 0xfff00fff;
- insn |= 0x00001000; /* Rn = r0, Rd = r1 */
- if (not_imm) {
- insn &= ~0xf;
- insn |= 2; /* Rm = r2 */
- }
- asi->insn[0] = insn;
- asi->insn_handler = (insn & (1 << 20)) ? emulate_ldr : emulate_str;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd12_modify(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xffff0fff; /* Rd = r0 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd12_modify;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd12rn0_modify(kprobe_opcode_t insn,
- struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xffff0ff0; /* Rd = r0 */
- insn |= 0x00000001; /* Rn = r1 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd12rn0_modify;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd12rm0(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd12rm0;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd12rn16rm0_wflags(kprobe_opcode_t insn,
- struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
- insn |= 0x00000001; /* Rm = r1 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd12rn16rm0_rwflags;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd16rs8rm0_wflags(kprobe_opcode_t insn,
- struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 16))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xfff0f0f0; /* Rd = r0, Rs = r0 */
- insn |= 0x00000001; /* Rm = r1 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd16rs8rm0_rwflags;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rd16rn12rs8rm0_wflags(kprobe_opcode_t insn,
- struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 16))
- return INSN_REJECTED; /* Rd is PC */
-
- insn &= 0xfff000f0; /* Rd = r0, Rn = r0 */
- insn |= 0x00000102; /* Rs = r1, Rm = r2 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rd16rn12rs8rm0_rwflags;
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-prep_emulate_rdhi16rdlo12rs8rm0_wflags(kprobe_opcode_t insn,
- struct arch_specific_insn *asi)
-{
- if (is_r15(insn, 16) || is_r15(insn, 12))
- return INSN_REJECTED; /* RdHi or RdLo is PC */
-
- insn &= 0xfff000f0; /* RdHi = r0, RdLo = r1 */
- insn |= 0x00001203; /* Rs = r2, Rm = r3 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_rdhi16rdlo12rs8rm0_rwflags;
- return INSN_GOOD;
-}
-
-/*
- * For the instruction masking and comparisons in all the "space_*"
- * functions below, Do _not_ rearrange the order of tests unless
- * you're very, very sure of what you are doing. For the sake of
- * efficiency, the masks for some tests sometimes assume other test
- * have been done prior to them so the number of patterns to test
- * for an instruction set can be as broad as possible to reduce the
- * number of tests needed.
- */
-
-static enum kprobe_insn __kprobes
-space_1111(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* memory hint : 1111 0100 x001 xxxx xxxx xxxx xxxx xxxx : */
- /* PLDI : 1111 0100 x101 xxxx xxxx xxxx xxxx xxxx : */
- /* PLDW : 1111 0101 x001 xxxx xxxx xxxx xxxx xxxx : */
- /* PLD : 1111 0101 x101 xxxx xxxx xxxx xxxx xxxx : */
- if ((insn & 0xfe300000) == 0xf4100000) {
- asi->insn_handler = emulate_nop;
- return INSN_GOOD_NO_SLOT;
- }
-
- /* BLX(1) : 1111 101x xxxx xxxx xxxx xxxx xxxx xxxx : */
- if ((insn & 0xfe000000) == 0xfa000000) {
- asi->insn_handler = simulate_blx1;
- return INSN_GOOD_NO_SLOT;
- }
-
- /* CPS : 1111 0001 0000 xxx0 xxxx xxxx xx0x xxxx */
- /* SETEND: 1111 0001 0000 0001 xxxx xxxx 0000 xxxx */
-
- /* SRS : 1111 100x x1x0 xxxx xxxx xxxx xxxx xxxx */
- /* RFE : 1111 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
-
- /* Coprocessor instructions... */
- /* MCRR2 : 1111 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
- /* MRRC2 : 1111 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd != Rn) */
- /* LDC2 : 1111 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
- /* STC2 : 1111 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
- /* CDP2 : 1111 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
- /* MCR2 : 1111 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
- /* MRC2 : 1111 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
-
- return INSN_REJECTED;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_000x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx xxx0 xxxx */
- if ((insn & 0x0f900010) == 0x01000000) {
-
- /* MRS cpsr : cccc 0001 0000 xxxx xxxx xxxx 0000 xxxx */
- if ((insn & 0x0ff000f0) == 0x01000000) {
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
- asi->insn_handler = simulate_mrs;
- return INSN_GOOD_NO_SLOT;
- }
-
- /* SMLALxy : cccc 0001 0100 xxxx xxxx xxxx 1xx0 xxxx */
- if ((insn & 0x0ff00090) == 0x01400080)
- return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
- asi);
-
- /* SMULWy : cccc 0001 0010 xxxx xxxx xxxx 1x10 xxxx */
- /* SMULxy : cccc 0001 0110 xxxx xxxx xxxx 1xx0 xxxx */
- if ((insn & 0x0ff000b0) == 0x012000a0 ||
- (insn & 0x0ff00090) == 0x01600080)
- return prep_emulate_rd16rs8rm0_wflags(insn, asi);
-
- /* SMLAxy : cccc 0001 0000 xxxx xxxx xxxx 1xx0 xxxx : Q */
- /* SMLAWy : cccc 0001 0010 xxxx xxxx xxxx 1x00 xxxx : Q */
- if ((insn & 0x0ff00090) == 0x01000080 ||
- (insn & 0x0ff000b0) == 0x01200080)
- return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
-
- /* BXJ : cccc 0001 0010 xxxx xxxx xxxx 0010 xxxx */
- /* MSR : cccc 0001 0x10 xxxx xxxx xxxx 0000 xxxx */
- /* MRS spsr : cccc 0001 0100 xxxx xxxx xxxx 0000 xxxx */
-
- /* Other instruction encodings aren't yet defined */
- return INSN_REJECTED;
- }
-
- /* cccc 0001 0xx0 xxxx xxxx xxxx xxxx 0xx1 xxxx */
- else if ((insn & 0x0f900090) == 0x01000010) {
-
- /* BLX(2) : cccc 0001 0010 xxxx xxxx xxxx 0011 xxxx */
- /* BX : cccc 0001 0010 xxxx xxxx xxxx 0001 xxxx */
- if ((insn & 0x0ff000d0) == 0x01200010) {
- if ((insn & 0x0ff000ff) == 0x0120003f)
- return INSN_REJECTED; /* BLX pc */
- asi->insn_handler = simulate_blx2bx;
- return INSN_GOOD_NO_SLOT;
- }
-
- /* CLZ : cccc 0001 0110 xxxx xxxx xxxx 0001 xxxx */
- if ((insn & 0x0ff000f0) == 0x01600010)
- return prep_emulate_rd12rm0(insn, asi);
-
- /* QADD : cccc 0001 0000 xxxx xxxx xxxx 0101 xxxx :Q */
- /* QSUB : cccc 0001 0010 xxxx xxxx xxxx 0101 xxxx :Q */
- /* QDADD : cccc 0001 0100 xxxx xxxx xxxx 0101 xxxx :Q */
- /* QDSUB : cccc 0001 0110 xxxx xxxx xxxx 0101 xxxx :Q */
- if ((insn & 0x0f9000f0) == 0x01000050)
- return prep_emulate_rd12rn16rm0_wflags(insn, asi);
-
- /* BKPT : 1110 0001 0010 xxxx xxxx xxxx 0111 xxxx */
- /* SMC : cccc 0001 0110 xxxx xxxx xxxx 0111 xxxx */
-
- /* Other instruction encodings aren't yet defined */
- return INSN_REJECTED;
- }
-
- /* cccc 0000 xxxx xxxx xxxx xxxx xxxx 1001 xxxx */
- else if ((insn & 0x0f0000f0) == 0x00000090) {
-
- /* MUL : cccc 0000 0000 xxxx xxxx xxxx 1001 xxxx : */
- /* MULS : cccc 0000 0001 xxxx xxxx xxxx 1001 xxxx :cc */
- /* MLA : cccc 0000 0010 xxxx xxxx xxxx 1001 xxxx : */
- /* MLAS : cccc 0000 0011 xxxx xxxx xxxx 1001 xxxx :cc */
- /* UMAAL : cccc 0000 0100 xxxx xxxx xxxx 1001 xxxx : */
- /* undef : cccc 0000 0101 xxxx xxxx xxxx 1001 xxxx : */
- /* MLS : cccc 0000 0110 xxxx xxxx xxxx 1001 xxxx : */
- /* undef : cccc 0000 0111 xxxx xxxx xxxx 1001 xxxx : */
- /* UMULL : cccc 0000 1000 xxxx xxxx xxxx 1001 xxxx : */
- /* UMULLS : cccc 0000 1001 xxxx xxxx xxxx 1001 xxxx :cc */
- /* UMLAL : cccc 0000 1010 xxxx xxxx xxxx 1001 xxxx : */
- /* UMLALS : cccc 0000 1011 xxxx xxxx xxxx 1001 xxxx :cc */
- /* SMULL : cccc 0000 1100 xxxx xxxx xxxx 1001 xxxx : */
- /* SMULLS : cccc 0000 1101 xxxx xxxx xxxx 1001 xxxx :cc */
- /* SMLAL : cccc 0000 1110 xxxx xxxx xxxx 1001 xxxx : */
- /* SMLALS : cccc 0000 1111 xxxx xxxx xxxx 1001 xxxx :cc */
- if ((insn & 0x00d00000) == 0x00500000)
- return INSN_REJECTED;
- else if ((insn & 0x00e00000) == 0x00000000)
- return prep_emulate_rd16rs8rm0_wflags(insn, asi);
- else if ((insn & 0x00a00000) == 0x00200000)
- return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
- else
- return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn,
- asi);
- }
-
- /* cccc 000x xxxx xxxx xxxx xxxx xxxx 1xx1 xxxx */
- else if ((insn & 0x0e000090) == 0x00000090) {
-
- /* SWP : cccc 0001 0000 xxxx xxxx xxxx 1001 xxxx */
- /* SWPB : cccc 0001 0100 xxxx xxxx xxxx 1001 xxxx */
- /* ??? : cccc 0001 0x01 xxxx xxxx xxxx 1001 xxxx */
- /* ??? : cccc 0001 0x10 xxxx xxxx xxxx 1001 xxxx */
- /* ??? : cccc 0001 0x11 xxxx xxxx xxxx 1001 xxxx */
- /* STREX : cccc 0001 1000 xxxx xxxx xxxx 1001 xxxx */
- /* LDREX : cccc 0001 1001 xxxx xxxx xxxx 1001 xxxx */
- /* STREXD: cccc 0001 1010 xxxx xxxx xxxx 1001 xxxx */
- /* LDREXD: cccc 0001 1011 xxxx xxxx xxxx 1001 xxxx */
- /* STREXB: cccc 0001 1100 xxxx xxxx xxxx 1001 xxxx */
- /* LDREXB: cccc 0001 1101 xxxx xxxx xxxx 1001 xxxx */
- /* STREXH: cccc 0001 1110 xxxx xxxx xxxx 1001 xxxx */
- /* LDREXH: cccc 0001 1111 xxxx xxxx xxxx 1001 xxxx */
-
- /* LDRD : cccc 000x xxx0 xxxx xxxx xxxx 1101 xxxx */
- /* STRD : cccc 000x xxx0 xxxx xxxx xxxx 1111 xxxx */
- /* LDRH : cccc 000x xxx1 xxxx xxxx xxxx 1011 xxxx */
- /* STRH : cccc 000x xxx0 xxxx xxxx xxxx 1011 xxxx */
- /* LDRSB : cccc 000x xxx1 xxxx xxxx xxxx 1101 xxxx */
- /* LDRSH : cccc 000x xxx1 xxxx xxxx xxxx 1111 xxxx */
- if ((insn & 0x0f0000f0) == 0x01000090) {
- if ((insn & 0x0fb000f0) == 0x01000090) {
- /* SWP/SWPB */
- return prep_emulate_rd12rn16rm0_wflags(insn,
- asi);
- } else {
- /* STREX/LDREX variants and unallocaed space */
- return INSN_REJECTED;
- }
-
- } else if ((insn & 0x0e1000d0) == 0x00000d0) {
- /* STRD/LDRD */
- if ((insn & 0x0000e000) == 0x0000e000)
- return INSN_REJECTED; /* Rd is LR or PC */
- if (is_writeback(insn) && is_r15(insn, 16))
- return INSN_REJECTED; /* Writeback to PC */
-
- insn &= 0xfff00fff;
- insn |= 0x00002000; /* Rn = r0, Rd = r2 */
- if (!(insn & (1 << 22))) {
- /* Register index */
- insn &= ~0xf;
- insn |= 1; /* Rm = r1 */
- }
- asi->insn[0] = insn;
- asi->insn_handler =
- (insn & (1 << 5)) ? emulate_strd : emulate_ldrd;
- return INSN_GOOD;
- }
-
- /* LDRH/STRH/LDRSB/LDRSH */
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
- return prep_emulate_ldr_str(insn, asi);
- }
-
- /* cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx xxxx */
-
- /*
- * ALU op with S bit and Rd == 15 :
- * cccc 000x xxx1 xxxx 1111 xxxx xxxx xxxx
- */
- if ((insn & 0x0e10f000) == 0x0010f000)
- return INSN_REJECTED;
-
- /*
- * "mov ip, sp" is the most common kprobe'd instruction by far.
- * Check and optimize for it explicitly.
- */
- if (insn == 0xe1a0c00d) {
- asi->insn_handler = simulate_mov_ipsp;
- return INSN_GOOD_NO_SLOT;
- }
-
- /*
- * Data processing: Immediate-shift / Register-shift
- * ALU op : cccc 000x xxxx xxxx xxxx xxxx xxxx xxxx
- * CPY : cccc 0001 1010 xxxx xxxx 0000 0000 xxxx
- * MOV : cccc 0001 101x xxxx xxxx xxxx xxxx xxxx
- * *S (bit 20) updates condition codes
- * ADC/SBC/RSC reads the C flag
- */
- insn &= 0xfff00ff0; /* Rn = r0, Rd = r0 */
- insn |= 0x00000001; /* Rm = r1 */
- if (insn & 0x010) {
- insn &= 0xfffff0ff; /* register shift */
- insn |= 0x00000200; /* Rs = r2 */
- }
- asi->insn[0] = insn;
-
- if ((insn & 0x0f900000) == 0x01100000) {
- /*
- * TST : cccc 0001 0001 xxxx xxxx xxxx xxxx xxxx
- * TEQ : cccc 0001 0011 xxxx xxxx xxxx xxxx xxxx
- * CMP : cccc 0001 0101 xxxx xxxx xxxx xxxx xxxx
- * CMN : cccc 0001 0111 xxxx xxxx xxxx xxxx xxxx
- */
- asi->insn_handler = emulate_alu_tests;
- } else {
- /* ALU ops which write to Rd */
- asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
- emulate_alu_rwflags : emulate_alu_rflags;
- }
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_001x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* MOVW : cccc 0011 0000 xxxx xxxx xxxx xxxx xxxx */
- /* MOVT : cccc 0011 0100 xxxx xxxx xxxx xxxx xxxx */
- if ((insn & 0x0fb00000) == 0x03000000)
- return prep_emulate_rd12_modify(insn, asi);
-
- /* hints : cccc 0011 0010 0000 xxxx xxxx xxxx xxxx */
- if ((insn & 0x0fff0000) == 0x03200000) {
- unsigned op2 = insn & 0x000000ff;
- if (op2 == 0x01 || op2 == 0x04) {
- /* YIELD : cccc 0011 0010 0000 xxxx xxxx 0000 0001 */
- /* SEV : cccc 0011 0010 0000 xxxx xxxx 0000 0100 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_none;
- return INSN_GOOD;
- } else if (op2 <= 0x03) {
- /* NOP : cccc 0011 0010 0000 xxxx xxxx 0000 0000 */
- /* WFE : cccc 0011 0010 0000 xxxx xxxx 0000 0010 */
- /* WFI : cccc 0011 0010 0000 xxxx xxxx 0000 0011 */
- /*
- * We make WFE and WFI true NOPs to avoid stalls due
- * to missing events whilst processing the probe.
- */
- asi->insn_handler = emulate_nop;
- return INSN_GOOD_NO_SLOT;
- }
- /* For DBG and unallocated hints it's safest to reject them */
- return INSN_REJECTED;
- }
-
- /*
- * MSR : cccc 0011 0x10 xxxx xxxx xxxx xxxx xxxx
- * ALU op with S bit and Rd == 15 :
- * cccc 001x xxx1 xxxx 1111 xxxx xxxx xxxx
- */
- if ((insn & 0x0fb00000) == 0x03200000 || /* MSR */
- (insn & 0x0e10f000) == 0x0210f000) /* ALU s-bit, R15 */
- return INSN_REJECTED;
-
- /*
- * Data processing: 32-bit Immediate
- * ALU op : cccc 001x xxxx xxxx xxxx xxxx xxxx xxxx
- * MOV : cccc 0011 101x xxxx xxxx xxxx xxxx xxxx
- * *S (bit 20) updates condition codes
- * ADC/SBC/RSC reads the C flag
- */
- insn &= 0xfff00fff; /* Rn = r0 and Rd = r0 */
- asi->insn[0] = insn;
-
- if ((insn & 0x0f900000) == 0x03100000) {
- /*
- * TST : cccc 0011 0001 xxxx xxxx xxxx xxxx xxxx
- * TEQ : cccc 0011 0011 xxxx xxxx xxxx xxxx xxxx
- * CMP : cccc 0011 0101 xxxx xxxx xxxx xxxx xxxx
- * CMN : cccc 0011 0111 xxxx xxxx xxxx xxxx xxxx
- */
- asi->insn_handler = emulate_alu_tests_imm;
- } else {
- /* ALU ops which write to Rd */
- asi->insn_handler = (insn & (1 << 20)) ? /* S-bit */
- emulate_alu_imm_rwflags : emulate_alu_imm_rflags;
- }
- return INSN_GOOD;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_0110__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* SEL : cccc 0110 1000 xxxx xxxx xxxx 1011 xxxx GE: !!! */
- if ((insn & 0x0ff000f0) == 0x068000b0) {
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
- insn &= 0xfff00ff0; /* Rd = r0, Rn = r0 */
- insn |= 0x00000001; /* Rm = r1 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_sel;
- return INSN_GOOD;
- }
-
- /* SSAT : cccc 0110 101x xxxx xxxx xxxx xx01 xxxx :Q */
- /* USAT : cccc 0110 111x xxxx xxxx xxxx xx01 xxxx :Q */
- /* SSAT16 : cccc 0110 1010 xxxx xxxx xxxx 0011 xxxx :Q */
- /* USAT16 : cccc 0110 1110 xxxx xxxx xxxx 0011 xxxx :Q */
- if ((insn & 0x0fa00030) == 0x06a00010 ||
- (insn & 0x0fb000f0) == 0x06a00030) {
- if (is_r15(insn, 12))
- return INSN_REJECTED; /* Rd is PC */
- insn &= 0xffff0ff0; /* Rd = r0, Rm = r0 */
- asi->insn[0] = insn;
- asi->insn_handler = emulate_sat;
- return INSN_GOOD;
- }
-
- /* REV : cccc 0110 1011 xxxx xxxx xxxx 0011 xxxx */
- /* REV16 : cccc 0110 1011 xxxx xxxx xxxx 1011 xxxx */
- /* RBIT : cccc 0110 1111 xxxx xxxx xxxx 0011 xxxx */
- /* REVSH : cccc 0110 1111 xxxx xxxx xxxx 1011 xxxx */
- if ((insn & 0x0ff00070) == 0x06b00030 ||
- (insn & 0x0ff00070) == 0x06f00030)
- return prep_emulate_rd12rm0(insn, asi);
-
- /* ??? : cccc 0110 0000 xxxx xxxx xxxx xxx1 xxxx : */
- /* SADD16 : cccc 0110 0001 xxxx xxxx xxxx 0001 xxxx :GE */
- /* SADDSUBX : cccc 0110 0001 xxxx xxxx xxxx 0011 xxxx :GE */
- /* SSUBADDX : cccc 0110 0001 xxxx xxxx xxxx 0101 xxxx :GE */
- /* SSUB16 : cccc 0110 0001 xxxx xxxx xxxx 0111 xxxx :GE */
- /* SADD8 : cccc 0110 0001 xxxx xxxx xxxx 1001 xxxx :GE */
- /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0001 xxxx xxxx xxxx 1101 xxxx : */
- /* SSUB8 : cccc 0110 0001 xxxx xxxx xxxx 1111 xxxx :GE */
- /* QADD16 : cccc 0110 0010 xxxx xxxx xxxx 0001 xxxx : */
- /* QADDSUBX : cccc 0110 0010 xxxx xxxx xxxx 0011 xxxx : */
- /* QSUBADDX : cccc 0110 0010 xxxx xxxx xxxx 0101 xxxx : */
- /* QSUB16 : cccc 0110 0010 xxxx xxxx xxxx 0111 xxxx : */
- /* QADD8 : cccc 0110 0010 xxxx xxxx xxxx 1001 xxxx : */
- /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0010 xxxx xxxx xxxx 1101 xxxx : */
- /* QSUB8 : cccc 0110 0010 xxxx xxxx xxxx 1111 xxxx : */
- /* SHADD16 : cccc 0110 0011 xxxx xxxx xxxx 0001 xxxx : */
- /* SHADDSUBX : cccc 0110 0011 xxxx xxxx xxxx 0011 xxxx : */
- /* SHSUBADDX : cccc 0110 0011 xxxx xxxx xxxx 0101 xxxx : */
- /* SHSUB16 : cccc 0110 0011 xxxx xxxx xxxx 0111 xxxx : */
- /* SHADD8 : cccc 0110 0011 xxxx xxxx xxxx 1001 xxxx : */
- /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0011 xxxx xxxx xxxx 1101 xxxx : */
- /* SHSUB8 : cccc 0110 0011 xxxx xxxx xxxx 1111 xxxx : */
- /* ??? : cccc 0110 0100 xxxx xxxx xxxx xxx1 xxxx : */
- /* UADD16 : cccc 0110 0101 xxxx xxxx xxxx 0001 xxxx :GE */
- /* UADDSUBX : cccc 0110 0101 xxxx xxxx xxxx 0011 xxxx :GE */
- /* USUBADDX : cccc 0110 0101 xxxx xxxx xxxx 0101 xxxx :GE */
- /* USUB16 : cccc 0110 0101 xxxx xxxx xxxx 0111 xxxx :GE */
- /* UADD8 : cccc 0110 0101 xxxx xxxx xxxx 1001 xxxx :GE */
- /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0101 xxxx xxxx xxxx 1101 xxxx : */
- /* USUB8 : cccc 0110 0101 xxxx xxxx xxxx 1111 xxxx :GE */
- /* UQADD16 : cccc 0110 0110 xxxx xxxx xxxx 0001 xxxx : */
- /* UQADDSUBX : cccc 0110 0110 xxxx xxxx xxxx 0011 xxxx : */
- /* UQSUBADDX : cccc 0110 0110 xxxx xxxx xxxx 0101 xxxx : */
- /* UQSUB16 : cccc 0110 0110 xxxx xxxx xxxx 0111 xxxx : */
- /* UQADD8 : cccc 0110 0110 xxxx xxxx xxxx 1001 xxxx : */
- /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0110 xxxx xxxx xxxx 1101 xxxx : */
- /* UQSUB8 : cccc 0110 0110 xxxx xxxx xxxx 1111 xxxx : */
- /* UHADD16 : cccc 0110 0111 xxxx xxxx xxxx 0001 xxxx : */
- /* UHADDSUBX : cccc 0110 0111 xxxx xxxx xxxx 0011 xxxx : */
- /* UHSUBADDX : cccc 0110 0111 xxxx xxxx xxxx 0101 xxxx : */
- /* UHSUB16 : cccc 0110 0111 xxxx xxxx xxxx 0111 xxxx : */
- /* UHADD8 : cccc 0110 0111 xxxx xxxx xxxx 1001 xxxx : */
- /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1011 xxxx : */
- /* ??? : cccc 0110 0111 xxxx xxxx xxxx 1101 xxxx : */
- /* UHSUB8 : cccc 0110 0111 xxxx xxxx xxxx 1111 xxxx : */
- if ((insn & 0x0f800010) == 0x06000010) {
- if ((insn & 0x00300000) == 0x00000000 ||
- (insn & 0x000000e0) == 0x000000a0 ||
- (insn & 0x000000e0) == 0x000000c0)
- return INSN_REJECTED; /* Unallocated space */
- return prep_emulate_rd12rn16rm0_wflags(insn, asi);
- }
-
- /* PKHBT : cccc 0110 1000 xxxx xxxx xxxx x001 xxxx : */
- /* PKHTB : cccc 0110 1000 xxxx xxxx xxxx x101 xxxx : */
- if ((insn & 0x0ff00030) == 0x06800010)
- return prep_emulate_rd12rn16rm0_wflags(insn, asi);
-
- /* SXTAB16 : cccc 0110 1000 xxxx xxxx xxxx 0111 xxxx : */
- /* SXTB16 : cccc 0110 1000 1111 xxxx xxxx 0111 xxxx : */
- /* ??? : cccc 0110 1001 xxxx xxxx xxxx 0111 xxxx : */
- /* SXTAB : cccc 0110 1010 xxxx xxxx xxxx 0111 xxxx : */
- /* SXTB : cccc 0110 1010 1111 xxxx xxxx 0111 xxxx : */
- /* SXTAH : cccc 0110 1011 xxxx xxxx xxxx 0111 xxxx : */
- /* SXTH : cccc 0110 1011 1111 xxxx xxxx 0111 xxxx : */
- /* UXTAB16 : cccc 0110 1100 xxxx xxxx xxxx 0111 xxxx : */
- /* UXTB16 : cccc 0110 1100 1111 xxxx xxxx 0111 xxxx : */
- /* ??? : cccc 0110 1101 xxxx xxxx xxxx 0111 xxxx : */
- /* UXTAB : cccc 0110 1110 xxxx xxxx xxxx 0111 xxxx : */
- /* UXTB : cccc 0110 1110 1111 xxxx xxxx 0111 xxxx : */
- /* UXTAH : cccc 0110 1111 xxxx xxxx xxxx 0111 xxxx : */
- /* UXTH : cccc 0110 1111 1111 xxxx xxxx 0111 xxxx : */
- if ((insn & 0x0f8000f0) == 0x06800070) {
- if ((insn & 0x00300000) == 0x00100000)
- return INSN_REJECTED; /* Unallocated space */
-
- if ((insn & 0x000f0000) == 0x000f0000)
- return prep_emulate_rd12rm0(insn, asi);
- else
- return prep_emulate_rd12rn16rm0_wflags(insn, asi);
- }
-
- /* Other instruction encodings aren't yet defined */
- return INSN_REJECTED;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_0111__1(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* Undef : cccc 0111 1111 xxxx xxxx xxxx 1111 xxxx */
- if ((insn & 0x0ff000f0) == 0x03f000f0)
- return INSN_REJECTED;
-
- /* SMLALD : cccc 0111 0100 xxxx xxxx xxxx 00x1 xxxx */
- /* SMLSLD : cccc 0111 0100 xxxx xxxx xxxx 01x1 xxxx */
- if ((insn & 0x0ff00090) == 0x07400010)
- return prep_emulate_rdhi16rdlo12rs8rm0_wflags(insn, asi);
-
- /* SMLAD : cccc 0111 0000 xxxx xxxx xxxx 00x1 xxxx :Q */
- /* SMUAD : cccc 0111 0000 xxxx 1111 xxxx 00x1 xxxx :Q */
- /* SMLSD : cccc 0111 0000 xxxx xxxx xxxx 01x1 xxxx :Q */
- /* SMUSD : cccc 0111 0000 xxxx 1111 xxxx 01x1 xxxx : */
- /* SMMLA : cccc 0111 0101 xxxx xxxx xxxx 00x1 xxxx : */
- /* SMMUL : cccc 0111 0101 xxxx 1111 xxxx 00x1 xxxx : */
- /* USADA8 : cccc 0111 1000 xxxx xxxx xxxx 0001 xxxx : */
- /* USAD8 : cccc 0111 1000 xxxx 1111 xxxx 0001 xxxx : */
- if ((insn & 0x0ff00090) == 0x07000010 ||
- (insn & 0x0ff000d0) == 0x07500010 ||
- (insn & 0x0ff000f0) == 0x07800010) {
-
- if ((insn & 0x0000f000) == 0x0000f000)
- return prep_emulate_rd16rs8rm0_wflags(insn, asi);
- else
- return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
- }
-
- /* SMMLS : cccc 0111 0101 xxxx xxxx xxxx 11x1 xxxx : */
- if ((insn & 0x0ff000d0) == 0x075000d0)
- return prep_emulate_rd16rn12rs8rm0_wflags(insn, asi);
-
- /* SBFX : cccc 0111 101x xxxx xxxx xxxx x101 xxxx : */
- /* UBFX : cccc 0111 111x xxxx xxxx xxxx x101 xxxx : */
- if ((insn & 0x0fa00070) == 0x07a00050)
- return prep_emulate_rd12rm0(insn, asi);
-
- /* BFI : cccc 0111 110x xxxx xxxx xxxx x001 xxxx : */
- /* BFC : cccc 0111 110x xxxx xxxx xxxx x001 1111 : */
- if ((insn & 0x0fe00070) == 0x07c00010) {
-
- if ((insn & 0x0000000f) == 0x0000000f)
- return prep_emulate_rd12_modify(insn, asi);
- else
- return prep_emulate_rd12rn0_modify(insn, asi);
- }
-
- return INSN_REJECTED;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_01xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* LDR : cccc 01xx x0x1 xxxx xxxx xxxx xxxx xxxx */
- /* LDRB : cccc 01xx x1x1 xxxx xxxx xxxx xxxx xxxx */
- /* LDRBT : cccc 01x0 x111 xxxx xxxx xxxx xxxx xxxx */
- /* LDRT : cccc 01x0 x011 xxxx xxxx xxxx xxxx xxxx */
- /* STR : cccc 01xx x0x0 xxxx xxxx xxxx xxxx xxxx */
- /* STRB : cccc 01xx x1x0 xxxx xxxx xxxx xxxx xxxx */
- /* STRBT : cccc 01x0 x110 xxxx xxxx xxxx xxxx xxxx */
- /* STRT : cccc 01x0 x010 xxxx xxxx xxxx xxxx xxxx */
-
- if ((insn & 0x00500000) == 0x00500000 && is_r15(insn, 12))
- return INSN_REJECTED; /* LDRB into PC */
-
- return prep_emulate_ldr_str(insn, asi);
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_100x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* LDM(2) : cccc 100x x101 xxxx 0xxx xxxx xxxx xxxx */
- /* LDM(3) : cccc 100x x1x1 xxxx 1xxx xxxx xxxx xxxx */
- if ((insn & 0x0e708000) == 0x85000000 ||
- (insn & 0x0e508000) == 0x85010000)
- return INSN_REJECTED;
-
- /* LDM(1) : cccc 100x x0x1 xxxx xxxx xxxx xxxx xxxx */
- /* STM(1) : cccc 100x x0x0 xxxx xxxx xxxx xxxx xxxx */
- asi->insn_handler = ((insn & 0x108000) == 0x008000) ? /* STM & R15 */
- simulate_stm1_pc : simulate_ldm1stm1;
- return INSN_GOOD_NO_SLOT;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_101x(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* B : cccc 1010 xxxx xxxx xxxx xxxx xxxx xxxx */
- /* BL : cccc 1011 xxxx xxxx xxxx xxxx xxxx xxxx */
- asi->insn_handler = simulate_bbl;
- return INSN_GOOD_NO_SLOT;
-}
-
-static enum kprobe_insn __kprobes
-space_cccc_11xx(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- /* Coprocessor instructions... */
- /* MCRR : cccc 1100 0100 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
- /* MRRC : cccc 1100 0101 xxxx xxxx xxxx xxxx xxxx : (Rd!=Rn) */
- /* LDC : cccc 110x xxx1 xxxx xxxx xxxx xxxx xxxx */
- /* STC : cccc 110x xxx0 xxxx xxxx xxxx xxxx xxxx */
- /* CDP : cccc 1110 xxxx xxxx xxxx xxxx xxx0 xxxx */
- /* MCR : cccc 1110 xxx0 xxxx xxxx xxxx xxx1 xxxx */
- /* MRC : cccc 1110 xxx1 xxxx xxxx xxxx xxx1 xxxx */
-
- /* SVC : cccc 1111 xxxx xxxx xxxx xxxx xxxx xxxx */
-
- return INSN_REJECTED;
-}
-
-static unsigned long __kprobes __check_eq(unsigned long cpsr)
-{
- return cpsr & PSR_Z_BIT;
-}
-
-static unsigned long __kprobes __check_ne(unsigned long cpsr)
-{
- return (~cpsr) & PSR_Z_BIT;
-}
-
-static unsigned long __kprobes __check_cs(unsigned long cpsr)
-{
- return cpsr & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_cc(unsigned long cpsr)
-{
- return (~cpsr) & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_mi(unsigned long cpsr)
-{
- return cpsr & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_pl(unsigned long cpsr)
-{
- return (~cpsr) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_vs(unsigned long cpsr)
-{
- return cpsr & PSR_V_BIT;
-}
-
-static unsigned long __kprobes __check_vc(unsigned long cpsr)
-{
- return (~cpsr) & PSR_V_BIT;
-}
-
-static unsigned long __kprobes __check_hi(unsigned long cpsr)
-{
- cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
- return cpsr & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_ls(unsigned long cpsr)
-{
- cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
- return (~cpsr) & PSR_C_BIT;
-}
-
-static unsigned long __kprobes __check_ge(unsigned long cpsr)
-{
- cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- return (~cpsr) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_lt(unsigned long cpsr)
-{
- cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- return cpsr & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_gt(unsigned long cpsr)
-{
- unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
- return (~temp) & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_le(unsigned long cpsr)
-{
- unsigned long temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
- temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
- return temp & PSR_N_BIT;
-}
-
-static unsigned long __kprobes __check_al(unsigned long cpsr)
-{
- return true;
-}
-
-static kprobe_check_cc * const condition_checks[16] = {
- &__check_eq, &__check_ne, &__check_cs, &__check_cc,
- &__check_mi, &__check_pl, &__check_vs, &__check_vc,
- &__check_hi, &__check_ls, &__check_ge, &__check_lt,
- &__check_gt, &__check_le, &__check_al, &__check_al
-};
-
-/* Return:
- * INSN_REJECTED If instruction is one not allowed to kprobe,
- * INSN_GOOD If instruction is supported and uses instruction slot,
- * INSN_GOOD_NO_SLOT If instruction is supported but doesn't use its slot.
- *
- * For instructions we don't want to kprobe (INSN_REJECTED return result):
- * These are generally ones that modify the processor state making
- * them "hard" to simulate such as switches processor modes or
- * make accesses in alternate modes. Any of these could be simulated
- * if the work was put into it, but low return considering they
- * should also be very rare.
- */
-enum kprobe_insn __kprobes
-arm_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
-{
- asi->insn_check_cc = condition_checks[insn>>28];
- asi->insn[1] = KPROBE_RETURN_INSTRUCTION;
-
- if ((insn & 0xf0000000) == 0xf0000000)
-
- return space_1111(insn, asi);
-
- else if ((insn & 0x0e000000) == 0x00000000)
-
- return space_cccc_000x(insn, asi);
-
- else if ((insn & 0x0e000000) == 0x02000000)
-
- return space_cccc_001x(insn, asi);
-
- else if ((insn & 0x0f000010) == 0x06000010)
-
- return space_cccc_0110__1(insn, asi);
-
- else if ((insn & 0x0f000010) == 0x07000010)
-
- return space_cccc_0111__1(insn, asi);
-
- else if ((insn & 0x0c000000) == 0x04000000)
-
- return space_cccc_01xx(insn, asi);
-
- else if ((insn & 0x0e000000) == 0x08000000)
-
- return space_cccc_100x(insn, asi);
-
- else if ((insn & 0x0e000000) == 0x0a000000)
-
- return space_cccc_101x(insn, asi);
-
- return space_cccc_11xx(insn, asi);
-}
-
-void __init arm_kprobe_decode_init(void)
-{
- find_str_pc_offset();
-}
diff --git a/arch/arm/kernel/kprobes-thumb.c b/arch/arm/kernel/kprobes-thumb.c
new file mode 100644
index 000000000000..902ca59e8b11
--- /dev/null
+++ b/arch/arm/kernel/kprobes-thumb.c
@@ -0,0 +1,1462 @@
+/*
+ * arch/arm/kernel/kprobes-thumb.c
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include <linux/kernel.h>
+#include <linux/kprobes.h>
+
+#include "kprobes.h"
+
+
+/*
+ * True if current instruction is in an IT block.
+ */
+#define in_it_block(cpsr) ((cpsr & 0x06000c00) != 0x00000000)
+
+/*
+ * Return the condition code to check for the currently executing instruction.
+ * This is in ITSTATE<7:4> which is in CPSR<15:12> but is only valid if
+ * in_it_block returns true.
+ */
+#define current_cond(cpsr) ((cpsr >> 12) & 0xf)
+
+/*
+ * Return the PC value for a probe in thumb code.
+ * This is the address of the probed instruction plus 4.
+ * We subtract one because the address will have bit zero set to indicate
+ * a pointer to thumb code.
+ */
+static inline unsigned long __kprobes thumb_probe_pc(struct kprobe *p)
+{
+ return (unsigned long)p->addr - 1 + 4;
+}
+
+static void __kprobes
+t32_simulate_table_branch(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ unsigned long rnv = (rn == 15) ? pc : regs->uregs[rn];
+ unsigned long rmv = regs->uregs[rm];
+ unsigned int halfwords;
+
+ if (insn & 0x10) /* TBH */
+ halfwords = ((u16 *)rnv)[rmv];
+ else /* TBB */
+ halfwords = ((u8 *)rnv)[rmv];
+
+ regs->ARM_pc = pc + 2 * halfwords;
+}
+
+static void __kprobes
+t32_simulate_mrs(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 8) & 0xf;
+ unsigned long mask = 0xf8ff03df; /* Mask out execution state */
+ regs->uregs[rd] = regs->ARM_cpsr & mask;
+}
+
+static void __kprobes
+t32_simulate_cond_branch(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+
+ long offset = insn & 0x7ff; /* imm11 */
+ offset += (insn & 0x003f0000) >> 5; /* imm6 */
+ offset += (insn & 0x00002000) << 4; /* J1 */
+ offset += (insn & 0x00000800) << 7; /* J2 */
+ offset -= (insn & 0x04000000) >> 7; /* Apply sign bit */
+
+ regs->ARM_pc = pc + (offset * 2);
+}
+
+static enum kprobe_insn __kprobes
+t32_decode_cond_branch(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ int cc = (insn >> 22) & 0xf;
+ asi->insn_check_cc = kprobe_condition_checks[cc];
+ asi->insn_handler = t32_simulate_cond_branch;
+ return INSN_GOOD_NO_SLOT;
+}
+
+static void __kprobes
+t32_simulate_branch(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+
+ long offset = insn & 0x7ff; /* imm11 */
+ offset += (insn & 0x03ff0000) >> 5; /* imm10 */
+ offset += (insn & 0x00002000) << 9; /* J1 */
+ offset += (insn & 0x00000800) << 10; /* J2 */
+ if (insn & 0x04000000)
+ offset -= 0x00800000; /* Apply sign bit */
+ else
+ offset ^= 0x00600000; /* Invert J1 and J2 */
+
+ if (insn & (1 << 14)) {
+ /* BL or BLX */
+ regs->ARM_lr = (unsigned long)p->addr + 4;
+ if (!(insn & (1 << 12))) {
+ /* BLX so switch to ARM mode */
+ regs->ARM_cpsr &= ~PSR_T_BIT;
+ pc &= ~3;
+ }
+ }
+
+ regs->ARM_pc = pc + (offset * 2);
+}
+
+static void __kprobes
+t32_simulate_ldr_literal(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long addr = thumb_probe_pc(p) & ~3;
+ int rt = (insn >> 12) & 0xf;
+ unsigned long rtv;
+
+ long offset = insn & 0xfff;
+ if (insn & 0x00800000)
+ addr += offset;
+ else
+ addr -= offset;
+
+ if (insn & 0x00400000) {
+ /* LDR */
+ rtv = *(unsigned long *)addr;
+ if (rt == 15) {
+ bx_write_pc(rtv, regs);
+ return;
+ }
+ } else if (insn & 0x00200000) {
+ /* LDRH */
+ if (insn & 0x01000000)
+ rtv = *(s16 *)addr;
+ else
+ rtv = *(u16 *)addr;
+ } else {
+ /* LDRB */
+ if (insn & 0x01000000)
+ rtv = *(s8 *)addr;
+ else
+ rtv = *(u8 *)addr;
+ }
+
+ regs->uregs[rt] = rtv;
+}
+
+static enum kprobe_insn __kprobes
+t32_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ enum kprobe_insn ret = kprobe_decode_ldmstm(insn, asi);
+
+ /* Fixup modified instruction to have halfwords in correct order...*/
+ insn = asi->insn[0];
+ ((u16 *)asi->insn)[0] = insn >> 16;
+ ((u16 *)asi->insn)[1] = insn & 0xffff;
+
+ return ret;
+}
+
+static void __kprobes
+t32_emulate_ldrdstrd(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p) & ~3;
+ int rt1 = (insn >> 12) & 0xf;
+ int rt2 = (insn >> 8) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+
+ register unsigned long rt1v asm("r0") = regs->uregs[rt1];
+ register unsigned long rt2v asm("r1") = regs->uregs[rt2];
+ register unsigned long rnv asm("r2") = (rn == 15) ? pc
+ : regs->uregs[rn];
+
+ __asm__ __volatile__ (
+ "blx %[fn]"
+ : "=r" (rt1v), "=r" (rt2v), "=r" (rnv)
+ : "0" (rt1v), "1" (rt2v), "2" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ if (rn != 15)
+ regs->uregs[rn] = rnv; /* Writeback base register */
+ regs->uregs[rt1] = rt1v;
+ regs->uregs[rt2] = rt2v;
+}
+
+static void __kprobes
+t32_emulate_ldrstr(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rt = (insn >> 12) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rtv asm("r0") = regs->uregs[rt];
+ register unsigned long rnv asm("r2") = regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ "blx %[fn]"
+ : "=r" (rtv), "=r" (rnv)
+ : "0" (rtv), "1" (rnv), "r" (rmv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rn] = rnv; /* Writeback base register */
+ if (rt == 15) /* Can't be true for a STR as they aren't allowed */
+ bx_write_pc(rtv, regs);
+ else
+ regs->uregs[rt] = rtv;
+}
+
+static void __kprobes
+t32_emulate_rd8rn16rm0_rwflags(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 8) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rdv asm("r1") = regs->uregs[rd];
+ register unsigned long rnv asm("r2") = regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ "blx %[fn] \n\t"
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdv), [cpsr] "=r" (cpsr)
+ : "0" (rdv), "r" (rnv), "r" (rmv),
+ "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+static void __kprobes
+t32_emulate_rd8pc16_noflags(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ int rd = (insn >> 8) & 0xf;
+
+ register unsigned long rdv asm("r1") = regs->uregs[rd];
+ register unsigned long rnv asm("r2") = pc & ~3;
+
+ __asm__ __volatile__ (
+ "blx %[fn]"
+ : "=r" (rdv)
+ : "0" (rdv), "r" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+}
+
+static void __kprobes
+t32_emulate_rd8rn16_noflags(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rd = (insn >> 8) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+
+ register unsigned long rdv asm("r1") = regs->uregs[rd];
+ register unsigned long rnv asm("r2") = regs->uregs[rn];
+
+ __asm__ __volatile__ (
+ "blx %[fn]"
+ : "=r" (rdv)
+ : "0" (rdv), "r" (rnv), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rd] = rdv;
+}
+
+static void __kprobes
+t32_emulate_rdlo12rdhi8rn16rm0_noflags(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rdlo = (insn >> 12) & 0xf;
+ int rdhi = (insn >> 8) & 0xf;
+ int rn = (insn >> 16) & 0xf;
+ int rm = insn & 0xf;
+
+ register unsigned long rdlov asm("r0") = regs->uregs[rdlo];
+ register unsigned long rdhiv asm("r1") = regs->uregs[rdhi];
+ register unsigned long rnv asm("r2") = regs->uregs[rn];
+ register unsigned long rmv asm("r3") = regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ "blx %[fn]"
+ : "=r" (rdlov), "=r" (rdhiv)
+ : "0" (rdlov), "1" (rdhiv), "r" (rnv), "r" (rmv),
+ [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ regs->uregs[rdlo] = rdlov;
+ regs->uregs[rdhi] = rdhiv;
+}
+
+/* These emulation encodings are functionally equivalent... */
+#define t32_emulate_rd8rn16rm0ra12_noflags \
+ t32_emulate_rdlo12rdhi8rn16rm0_noflags
+
+static const union decode_item t32_table_1110_100x_x0xx[] = {
+ /* Load/store multiple instructions */
+
+ /* Rn is PC 1110 100x x0xx 1111 xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe4f0000, 0xe80f0000),
+
+ /* SRS 1110 1000 00x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1110 1000 00x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffc00000, 0xe8000000),
+ /* SRS 1110 1001 10x0 xxxx xxxx xxxx xxxx xxxx */
+ /* RFE 1110 1001 10x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffc00000, 0xe9800000),
+
+ /* STM Rn, {...pc} 1110 100x x0x0 xxxx 1xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe508000, 0xe8008000),
+ /* LDM Rn, {...lr,pc} 1110 100x x0x1 xxxx 11xx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe50c000, 0xe810c000),
+ /* LDM/STM Rn, {...sp} 1110 100x x0xx xxxx xx1x xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe402000, 0xe8002000),
+
+ /* STMIA 1110 1000 10x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDMIA 1110 1000 10x1 xxxx xxxx xxxx xxxx xxxx */
+ /* STMDB 1110 1001 00x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDMDB 1110 1001 00x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xfe400000, 0xe8000000, t32_decode_ldmstm),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1110_100x_x1xx[] = {
+ /* Load/store dual, load/store exclusive, table branch */
+
+ /* STRD (immediate) 1110 1000 x110 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRD (immediate) 1110 1000 x111 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_OR (0xff600000, 0xe8600000),
+ /* STRD (immediate) 1110 1001 x1x0 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRD (immediate) 1110 1001 x1x1 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xff400000, 0xe9400000, t32_emulate_ldrdstrd,
+ REGS(NOPCWB, NOSPPC, NOSPPC, 0, 0)),
+
+ /* TBB 1110 1000 1101 xxxx xxxx xxxx 0000 xxxx */
+ /* TBH 1110 1000 1101 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_SIMULATEX(0xfff000e0, 0xe8d00000, t32_simulate_table_branch,
+ REGS(NOSP, 0, 0, 0, NOSPPC)),
+
+ /* STREX 1110 1000 0100 xxxx xxxx xxxx xxxx xxxx */
+ /* LDREX 1110 1000 0101 xxxx xxxx xxxx xxxx xxxx */
+ /* STREXB 1110 1000 1100 xxxx xxxx xxxx 0100 xxxx */
+ /* STREXH 1110 1000 1100 xxxx xxxx xxxx 0101 xxxx */
+ /* STREXD 1110 1000 1100 xxxx xxxx xxxx 0111 xxxx */
+ /* LDREXB 1110 1000 1101 xxxx xxxx xxxx 0100 xxxx */
+ /* LDREXH 1110 1000 1101 xxxx xxxx xxxx 0101 xxxx */
+ /* LDREXD 1110 1000 1101 xxxx xxxx xxxx 0111 xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1110_101x[] = {
+ /* Data-processing (shifted register) */
+
+ /* TST 1110 1010 0001 xxxx xxxx 1111 xxxx xxxx */
+ /* TEQ 1110 1010 1001 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xff700f00, 0xea100f00, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, 0, 0, NOSPPC)),
+
+ /* CMN 1110 1011 0001 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_OR (0xfff00f00, 0xeb100f00),
+ /* CMP 1110 1011 1011 xxxx xxxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfff00f00, 0xebb00f00, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOPC, 0, 0, 0, NOSPPC)),
+
+ /* MOV 1110 1010 010x 1111 xxxx xxxx xxxx xxxx */
+ /* MVN 1110 1010 011x 1111 xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xffcf0000, 0xea4f0000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(0, 0, NOSPPC, 0, NOSPPC)),
+
+ /* ??? 1110 1010 101x xxxx xxxx xxxx xxxx xxxx */
+ /* ??? 1110 1010 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffa00000, 0xeaa00000),
+ /* ??? 1110 1011 001x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xeb200000),
+ /* ??? 1110 1011 100x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xeb800000),
+ /* ??? 1110 1011 111x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xffe00000, 0xebe00000),
+
+ /* ADD/SUB SP, SP, Rm, LSL #0..3 */
+ /* 1110 1011 x0xx 1101 x000 1101 xx00 xxxx */
+ DECODE_EMULATEX (0xff4f7f30, 0xeb0d0d00, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(SP, 0, SP, 0, NOSPPC)),
+
+ /* ADD/SUB SP, SP, Rm, shift */
+ /* 1110 1011 x0xx 1101 xxxx 1101 xxxx xxxx */
+ DECODE_REJECT (0xff4f0f00, 0xeb0d0d00),
+
+ /* ADD/SUB Rd, SP, Rm, shift */
+ /* 1110 1011 x0xx 1101 xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xff4f0000, 0xeb0d0000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(SP, 0, NOPC, 0, NOSPPC)),
+
+ /* AND 1110 1010 000x xxxx xxxx xxxx xxxx xxxx */
+ /* BIC 1110 1010 001x xxxx xxxx xxxx xxxx xxxx */
+ /* ORR 1110 1010 010x xxxx xxxx xxxx xxxx xxxx */
+ /* ORN 1110 1010 011x xxxx xxxx xxxx xxxx xxxx */
+ /* EOR 1110 1010 100x xxxx xxxx xxxx xxxx xxxx */
+ /* PKH 1110 1010 110x xxxx xxxx xxxx xxxx xxxx */
+ /* ADD 1110 1011 000x xxxx xxxx xxxx xxxx xxxx */
+ /* ADC 1110 1011 010x xxxx xxxx xxxx xxxx xxxx */
+ /* SBC 1110 1011 011x xxxx xxxx xxxx xxxx xxxx */
+ /* SUB 1110 1011 101x xxxx xxxx xxxx xxxx xxxx */
+ /* RSB 1110 1011 110x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfe000000, 0xea000000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0x0x___0[] = {
+ /* Data-processing (modified immediate) */
+
+ /* TST 1111 0x00 0001 xxxx 0xxx 1111 xxxx xxxx */
+ /* TEQ 1111 0x00 1001 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfb708f00, 0xf0100f00, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, 0, 0, 0)),
+
+ /* CMN 1111 0x01 0001 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_OR (0xfbf08f00, 0xf1100f00),
+ /* CMP 1111 0x01 1011 xxxx 0xxx 1111 xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08f00, 0xf1b00f00, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOPC, 0, 0, 0, 0)),
+
+ /* MOV 1111 0x00 010x 1111 0xxx xxxx xxxx xxxx */
+ /* MVN 1111 0x00 011x 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbcf8000, 0xf04f0000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* ??? 1111 0x00 101x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf0a00000),
+ /* ??? 1111 0x00 110x xxxx 0xxx xxxx xxxx xxxx */
+ /* ??? 1111 0x00 111x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbc08000, 0xf0c00000),
+ /* ??? 1111 0x01 001x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1200000),
+ /* ??? 1111 0x01 100x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1800000),
+ /* ??? 1111 0x01 111x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfbe08000, 0xf1e00000),
+
+ /* ADD Rd, SP, #imm 1111 0x01 000x 1101 0xxx xxxx xxxx xxxx */
+ /* SUB Rd, SP, #imm 1111 0x01 101x 1101 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb4f8000, 0xf10d0000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(SP, 0, NOPC, 0, 0)),
+
+ /* AND 1111 0x00 000x xxxx 0xxx xxxx xxxx xxxx */
+ /* BIC 1111 0x00 001x xxxx 0xxx xxxx xxxx xxxx */
+ /* ORR 1111 0x00 010x xxxx 0xxx xxxx xxxx xxxx */
+ /* ORN 1111 0x00 011x xxxx 0xxx xxxx xxxx xxxx */
+ /* EOR 1111 0x00 100x xxxx 0xxx xxxx xxxx xxxx */
+ /* ADD 1111 0x01 000x xxxx 0xxx xxxx xxxx xxxx */
+ /* ADC 1111 0x01 010x xxxx 0xxx xxxx xxxx xxxx */
+ /* SBC 1111 0x01 011x xxxx 0xxx xxxx xxxx xxxx */
+ /* SUB 1111 0x01 101x xxxx 0xxx xxxx xxxx xxxx */
+ /* RSB 1111 0x01 110x xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfa008000, 0xf0000000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0x1x___0[] = {
+ /* Data-processing (plain binary immediate) */
+
+ /* ADDW Rd, PC, #imm 1111 0x10 0000 1111 0xxx xxxx xxxx xxxx */
+ DECODE_OR (0xfbff8000, 0xf20f0000),
+ /* SUBW Rd, PC, #imm 1111 0x10 1010 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8000, 0xf2af0000, t32_emulate_rd8pc16_noflags,
+ REGS(PC, 0, NOSPPC, 0, 0)),
+
+ /* ADDW SP, SP, #imm 1111 0x10 0000 1101 0xxx 1101 xxxx xxxx */
+ DECODE_OR (0xfbff8f00, 0xf20d0d00),
+ /* SUBW SP, SP, #imm 1111 0x10 1010 1101 0xxx 1101 xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8f00, 0xf2ad0d00, t32_emulate_rd8rn16_noflags,
+ REGS(SP, 0, SP, 0, 0)),
+
+ /* ADDW 1111 0x10 0000 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_OR (0xfbf08000, 0xf2000000),
+ /* SUBW 1111 0x10 1010 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08000, 0xf2a00000, t32_emulate_rd8rn16_noflags,
+ REGS(NOPCX, 0, NOSPPC, 0, 0)),
+
+ /* MOVW 1111 0x10 0100 xxxx 0xxx xxxx xxxx xxxx */
+ /* MOVT 1111 0x10 1100 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb708000, 0xf2400000, t32_emulate_rd8rn16_noflags,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* SSAT16 1111 0x11 0010 xxxx 0000 xxxx 00xx xxxx */
+ /* SSAT 1111 0x11 00x0 xxxx 0xxx xxxx xxxx xxxx */
+ /* USAT16 1111 0x11 1010 xxxx 0000 xxxx 00xx xxxx */
+ /* USAT 1111 0x11 10x0 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb508000, 0xf3000000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ /* SFBX 1111 0x11 0100 xxxx 0xxx xxxx xxxx xxxx */
+ /* UFBX 1111 0x11 1100 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfb708000, 0xf3400000, t32_emulate_rd8rn16_noflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, 0)),
+
+ /* BFC 1111 0x11 0110 1111 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbff8000, 0xf36f0000, t32_emulate_rd8rn16_noflags,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /* BFI 1111 0x11 0110 xxxx 0xxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfbf08000, 0xf3600000, t32_emulate_rd8rn16_noflags,
+ REGS(NOSPPCX, 0, NOSPPC, 0, 0)),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_0xxx___1[] = {
+ /* Branches and miscellaneous control */
+
+ /* YIELD 1111 0011 1010 xxxx 10x0 x000 0000 0001 */
+ DECODE_OR (0xfff0d7ff, 0xf3a08001),
+ /* SEV 1111 0011 1010 xxxx 10x0 x000 0000 0100 */
+ DECODE_EMULATE (0xfff0d7ff, 0xf3a08004, kprobe_emulate_none),
+ /* NOP 1111 0011 1010 xxxx 10x0 x000 0000 0000 */
+ /* WFE 1111 0011 1010 xxxx 10x0 x000 0000 0010 */
+ /* WFI 1111 0011 1010 xxxx 10x0 x000 0000 0011 */
+ DECODE_SIMULATE (0xfff0d7fc, 0xf3a08000, kprobe_simulate_nop),
+
+ /* MRS Rd, CPSR 1111 0011 1110 xxxx 10x0 xxxx xxxx xxxx */
+ DECODE_SIMULATEX(0xfff0d000, 0xf3e08000, t32_simulate_mrs,
+ REGS(0, 0, NOSPPC, 0, 0)),
+
+ /*
+ * Unsupported instructions
+ * 1111 0x11 1xxx xxxx 10x0 xxxx xxxx xxxx
+ *
+ * MSR 1111 0011 100x xxxx 10x0 xxxx xxxx xxxx
+ * DBG hint 1111 0011 1010 xxxx 10x0 x000 1111 xxxx
+ * Unallocated hints 1111 0011 1010 xxxx 10x0 x000 xxxx xxxx
+ * CPS 1111 0011 1010 xxxx 10x0 xxxx xxxx xxxx
+ * CLREX/DSB/DMB/ISB 1111 0011 1011 xxxx 10x0 xxxx xxxx xxxx
+ * BXJ 1111 0011 1100 xxxx 10x0 xxxx xxxx xxxx
+ * SUBS PC,LR,#<imm8> 1111 0011 1101 xxxx 10x0 xxxx xxxx xxxx
+ * MRS Rd, SPSR 1111 0011 1111 xxxx 10x0 xxxx xxxx xxxx
+ * SMC 1111 0111 1111 xxxx 1000 xxxx xxxx xxxx
+ * UNDEFINED 1111 0111 1111 xxxx 1010 xxxx xxxx xxxx
+ * ??? 1111 0111 1xxx xxxx 1010 xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xfb80d000, 0xf3808000),
+
+ /* Bcc 1111 0xxx xxxx xxxx 10x0 xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xf800d000, 0xf0008000, t32_decode_cond_branch),
+
+ /* BLX 1111 0xxx xxxx xxxx 11x0 xxxx xxxx xxx0 */
+ DECODE_OR (0xf800d001, 0xf000c000),
+ /* B 1111 0xxx xxxx xxxx 10x1 xxxx xxxx xxxx */
+ /* BL 1111 0xxx xxxx xxxx 11x1 xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xf8009000, 0xf0009000, t32_simulate_branch),
+
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_100x_x0x1__1111[] = {
+ /* Memory hints */
+
+ /* PLD (literal) 1111 1000 x001 1111 1111 xxxx xxxx xxxx */
+ /* PLI (literal) 1111 1001 x001 1111 1111 xxxx xxxx xxxx */
+ DECODE_SIMULATE (0xfe7ff000, 0xf81ff000, kprobe_simulate_nop),
+
+ /* PLD{W} (immediate) 1111 1000 10x1 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_OR (0xffd0f000, 0xf890f000),
+ /* PLD{W} (immediate) 1111 1000 00x1 xxxx 1111 1100 xxxx xxxx */
+ DECODE_OR (0xffd0ff00, 0xf810fc00),
+ /* PLI (immediate) 1111 1001 1001 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_OR (0xfff0f000, 0xf990f000),
+ /* PLI (immediate) 1111 1001 0001 xxxx 1111 1100 xxxx xxxx */
+ DECODE_SIMULATEX(0xfff0ff00, 0xf910fc00, kprobe_simulate_nop,
+ REGS(NOPCX, 0, 0, 0, 0)),
+
+ /* PLD{W} (register) 1111 1000 00x1 xxxx 1111 0000 00xx xxxx */
+ DECODE_OR (0xffd0ffc0, 0xf810f000),
+ /* PLI (register) 1111 1001 0001 xxxx 1111 0000 00xx xxxx */
+ DECODE_SIMULATEX(0xfff0ffc0, 0xf910f000, kprobe_simulate_nop,
+ REGS(NOPCX, 0, 0, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_100x[] = {
+ /* Store/Load single data item */
+
+ /* ??? 1111 100x x11x xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfe600000, 0xf8600000),
+
+ /* ??? 1111 1001 0101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xfff00000, 0xf9500000),
+
+ /* ??? 1111 100x 0xxx xxxx xxxx 10x0 xxxx xxxx */
+ DECODE_REJECT (0xfe800d00, 0xf8000800),
+
+ /* STRBT 1111 1000 0000 xxxx xxxx 1110 xxxx xxxx */
+ /* STRHT 1111 1000 0010 xxxx xxxx 1110 xxxx xxxx */
+ /* STRT 1111 1000 0100 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRBT 1111 1000 0001 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRSBT 1111 1001 0001 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRHT 1111 1000 0011 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRSHT 1111 1001 0011 xxxx xxxx 1110 xxxx xxxx */
+ /* LDRT 1111 1000 0101 xxxx xxxx 1110 xxxx xxxx */
+ DECODE_REJECT (0xfe800f00, 0xf8000e00),
+
+ /* STR{,B,H} Rn,[PC...] 1111 1000 xxx0 1111 xxxx xxxx xxxx xxxx */
+ DECODE_REJECT (0xff1f0000, 0xf80f0000),
+
+ /* STR{,B,H} PC,[Rn...] 1111 1000 xxx0 xxxx 1111 xxxx xxxx xxxx */
+ DECODE_REJECT (0xff10f000, 0xf800f000),
+
+ /* LDR (literal) 1111 1000 x101 1111 xxxx xxxx xxxx xxxx */
+ DECODE_SIMULATEX(0xff7f0000, 0xf85f0000, t32_simulate_ldr_literal,
+ REGS(PC, ANY, 0, 0, 0)),
+
+ /* STR (immediate) 1111 1000 0100 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDR (immediate) 1111 1000 0101 xxxx xxxx 1xxx xxxx xxxx */
+ DECODE_OR (0xffe00800, 0xf8400800),
+ /* STR (immediate) 1111 1000 1100 xxxx xxxx xxxx xxxx xxxx */
+ /* LDR (immediate) 1111 1000 1101 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xffe00000, 0xf8c00000, t32_emulate_ldrstr,
+ REGS(NOPCX, ANY, 0, 0, 0)),
+
+ /* STR (register) 1111 1000 0100 xxxx xxxx 0000 00xx xxxx */
+ /* LDR (register) 1111 1000 0101 xxxx xxxx 0000 00xx xxxx */
+ DECODE_EMULATEX (0xffe00fc0, 0xf8400000, t32_emulate_ldrstr,
+ REGS(NOPCX, ANY, 0, 0, NOSPPC)),
+
+ /* LDRB (literal) 1111 1000 x001 1111 xxxx xxxx xxxx xxxx */
+ /* LDRSB (literal) 1111 1001 x001 1111 xxxx xxxx xxxx xxxx */
+ /* LDRH (literal) 1111 1000 x011 1111 xxxx xxxx xxxx xxxx */
+ /* LDRSH (literal) 1111 1001 x011 1111 xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfe5f0000, 0xf81f0000, t32_simulate_ldr_literal,
+ REGS(PC, NOSPPCX, 0, 0, 0)),
+
+ /* STRB (immediate) 1111 1000 0000 xxxx xxxx 1xxx xxxx xxxx */
+ /* STRH (immediate) 1111 1000 0010 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRB (immediate) 1111 1000 0001 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRSB (immediate) 1111 1001 0001 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRH (immediate) 1111 1000 0011 xxxx xxxx 1xxx xxxx xxxx */
+ /* LDRSH (immediate) 1111 1001 0011 xxxx xxxx 1xxx xxxx xxxx */
+ DECODE_OR (0xfec00800, 0xf8000800),
+ /* STRB (immediate) 1111 1000 1000 xxxx xxxx xxxx xxxx xxxx */
+ /* STRH (immediate) 1111 1000 1010 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRB (immediate) 1111 1000 1001 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRSB (immediate) 1111 1001 1001 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRH (immediate) 1111 1000 1011 xxxx xxxx xxxx xxxx xxxx */
+ /* LDRSH (immediate) 1111 1001 1011 xxxx xxxx xxxx xxxx xxxx */
+ DECODE_EMULATEX (0xfec00000, 0xf8800000, t32_emulate_ldrstr,
+ REGS(NOPCX, NOSPPCX, 0, 0, 0)),
+
+ /* STRB (register) 1111 1000 0000 xxxx xxxx 0000 00xx xxxx */
+ /* STRH (register) 1111 1000 0010 xxxx xxxx 0000 00xx xxxx */
+ /* LDRB (register) 1111 1000 0001 xxxx xxxx 0000 00xx xxxx */
+ /* LDRSB (register) 1111 1001 0001 xxxx xxxx 0000 00xx xxxx */
+ /* LDRH (register) 1111 1000 0011 xxxx xxxx 0000 00xx xxxx */
+ /* LDRSH (register) 1111 1001 0011 xxxx xxxx 0000 00xx xxxx */
+ DECODE_EMULATEX (0xfe800fc0, 0xf8000000, t32_emulate_ldrstr,
+ REGS(NOPCX, NOSPPCX, 0, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1010___1111[] = {
+ /* Data-processing (register) */
+
+ /* ??? 1111 1010 011x xxxx 1111 xxxx 1xxx xxxx */
+ DECODE_REJECT (0xffe0f080, 0xfa60f080),
+
+ /* SXTH 1111 1010 0000 1111 1111 xxxx 1xxx xxxx */
+ /* UXTH 1111 1010 0001 1111 1111 xxxx 1xxx xxxx */
+ /* SXTB16 1111 1010 0010 1111 1111 xxxx 1xxx xxxx */
+ /* UXTB16 1111 1010 0011 1111 1111 xxxx 1xxx xxxx */
+ /* SXTB 1111 1010 0100 1111 1111 xxxx 1xxx xxxx */
+ /* UXTB 1111 1010 0101 1111 1111 xxxx 1xxx xxxx */
+ DECODE_EMULATEX (0xff8ff080, 0xfa0ff080, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(0, 0, NOSPPC, 0, NOSPPC)),
+
+
+ /* ??? 1111 1010 1xxx xxxx 1111 xxxx 0x11 xxxx */
+ DECODE_REJECT (0xff80f0b0, 0xfa80f030),
+ /* ??? 1111 1010 1x11 xxxx 1111 xxxx 0xxx xxxx */
+ DECODE_REJECT (0xffb0f080, 0xfab0f000),
+
+ /* SADD16 1111 1010 1001 xxxx 1111 xxxx 0000 xxxx */
+ /* SASX 1111 1010 1010 xxxx 1111 xxxx 0000 xxxx */
+ /* SSAX 1111 1010 1110 xxxx 1111 xxxx 0000 xxxx */
+ /* SSUB16 1111 1010 1101 xxxx 1111 xxxx 0000 xxxx */
+ /* SADD8 1111 1010 1000 xxxx 1111 xxxx 0000 xxxx */
+ /* SSUB8 1111 1010 1100 xxxx 1111 xxxx 0000 xxxx */
+
+ /* QADD16 1111 1010 1001 xxxx 1111 xxxx 0001 xxxx */
+ /* QASX 1111 1010 1010 xxxx 1111 xxxx 0001 xxxx */
+ /* QSAX 1111 1010 1110 xxxx 1111 xxxx 0001 xxxx */
+ /* QSUB16 1111 1010 1101 xxxx 1111 xxxx 0001 xxxx */
+ /* QADD8 1111 1010 1000 xxxx 1111 xxxx 0001 xxxx */
+ /* QSUB8 1111 1010 1100 xxxx 1111 xxxx 0001 xxxx */
+
+ /* SHADD16 1111 1010 1001 xxxx 1111 xxxx 0010 xxxx */
+ /* SHASX 1111 1010 1010 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSAX 1111 1010 1110 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSUB16 1111 1010 1101 xxxx 1111 xxxx 0010 xxxx */
+ /* SHADD8 1111 1010 1000 xxxx 1111 xxxx 0010 xxxx */
+ /* SHSUB8 1111 1010 1100 xxxx 1111 xxxx 0010 xxxx */
+
+ /* UADD16 1111 1010 1001 xxxx 1111 xxxx 0100 xxxx */
+ /* UASX 1111 1010 1010 xxxx 1111 xxxx 0100 xxxx */
+ /* USAX 1111 1010 1110 xxxx 1111 xxxx 0100 xxxx */
+ /* USUB16 1111 1010 1101 xxxx 1111 xxxx 0100 xxxx */
+ /* UADD8 1111 1010 1000 xxxx 1111 xxxx 0100 xxxx */
+ /* USUB8 1111 1010 1100 xxxx 1111 xxxx 0100 xxxx */
+
+ /* UQADD16 1111 1010 1001 xxxx 1111 xxxx 0101 xxxx */
+ /* UQASX 1111 1010 1010 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSAX 1111 1010 1110 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSUB16 1111 1010 1101 xxxx 1111 xxxx 0101 xxxx */
+ /* UQADD8 1111 1010 1000 xxxx 1111 xxxx 0101 xxxx */
+ /* UQSUB8 1111 1010 1100 xxxx 1111 xxxx 0101 xxxx */
+
+ /* UHADD16 1111 1010 1001 xxxx 1111 xxxx 0110 xxxx */
+ /* UHASX 1111 1010 1010 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSAX 1111 1010 1110 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSUB16 1111 1010 1101 xxxx 1111 xxxx 0110 xxxx */
+ /* UHADD8 1111 1010 1000 xxxx 1111 xxxx 0110 xxxx */
+ /* UHSUB8 1111 1010 1100 xxxx 1111 xxxx 0110 xxxx */
+ DECODE_OR (0xff80f080, 0xfa80f000),
+
+ /* SXTAH 1111 1010 0000 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAH 1111 1010 0001 xxxx 1111 xxxx 1xxx xxxx */
+ /* SXTAB16 1111 1010 0010 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAB16 1111 1010 0011 xxxx 1111 xxxx 1xxx xxxx */
+ /* SXTAB 1111 1010 0100 xxxx 1111 xxxx 1xxx xxxx */
+ /* UXTAB 1111 1010 0101 xxxx 1111 xxxx 1xxx xxxx */
+ DECODE_OR (0xff80f080, 0xfa00f080),
+
+ /* QADD 1111 1010 1000 xxxx 1111 xxxx 1000 xxxx */
+ /* QDADD 1111 1010 1000 xxxx 1111 xxxx 1001 xxxx */
+ /* QSUB 1111 1010 1000 xxxx 1111 xxxx 1010 xxxx */
+ /* QDSUB 1111 1010 1000 xxxx 1111 xxxx 1011 xxxx */
+ DECODE_OR (0xfff0f0c0, 0xfa80f080),
+
+ /* SEL 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
+ DECODE_OR (0xfff0f0f0, 0xfaa0f080),
+
+ /* LSL 1111 1010 000x xxxx 1111 xxxx 0000 xxxx */
+ /* LSR 1111 1010 001x xxxx 1111 xxxx 0000 xxxx */
+ /* ASR 1111 1010 010x xxxx 1111 xxxx 0000 xxxx */
+ /* ROR 1111 1010 011x xxxx 1111 xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff80f0f0, 0xfa00f000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ /* CLZ 1111 1010 1010 xxxx 1111 xxxx 1000 xxxx */
+ DECODE_OR (0xfff0f0f0, 0xfab0f080),
+
+ /* REV 1111 1010 1001 xxxx 1111 xxxx 1000 xxxx */
+ /* REV16 1111 1010 1001 xxxx 1111 xxxx 1001 xxxx */
+ /* RBIT 1111 1010 1001 xxxx 1111 xxxx 1010 xxxx */
+ /* REVSH 1111 1010 1001 xxxx 1111 xxxx 1011 xxxx */
+ DECODE_EMULATEX (0xfff0f0c0, 0xfa90f080, t32_emulate_rd8rn16_noflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, SAMEAS16)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1011_0[] = {
+ /* Multiply, multiply accumulate, and absolute difference */
+
+ /* ??? 1111 1011 0000 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff0f0f0, 0xfb00f010),
+ /* ??? 1111 1011 0111 xxxx 1111 xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff0f0f0, 0xfb70f010),
+
+ /* SMULxy 1111 1011 0001 xxxx 1111 xxxx 00xx xxxx */
+ DECODE_OR (0xfff0f0c0, 0xfb10f000),
+ /* MUL 1111 1011 0000 xxxx 1111 xxxx 0000 xxxx */
+ /* SMUAD{X} 1111 1011 0010 xxxx 1111 xxxx 000x xxxx */
+ /* SMULWy 1111 1011 0011 xxxx 1111 xxxx 000x xxxx */
+ /* SMUSD{X} 1111 1011 0100 xxxx 1111 xxxx 000x xxxx */
+ /* SMMUL{R} 1111 1011 0101 xxxx 1111 xxxx 000x xxxx */
+ /* USAD8 1111 1011 0111 xxxx 1111 xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff80f0e0, 0xfb00f000, t32_emulate_rd8rn16rm0_rwflags,
+ REGS(NOSPPC, 0, NOSPPC, 0, NOSPPC)),
+
+ /* ??? 1111 1011 0111 xxxx xxxx xxxx 0001 xxxx */
+ DECODE_REJECT (0xfff000f0, 0xfb700010),
+
+ /* SMLAxy 1111 1011 0001 xxxx xxxx xxxx 00xx xxxx */
+ DECODE_OR (0xfff000c0, 0xfb100000),
+ /* MLA 1111 1011 0000 xxxx xxxx xxxx 0000 xxxx */
+ /* MLS 1111 1011 0000 xxxx xxxx xxxx 0001 xxxx */
+ /* SMLAD{X} 1111 1011 0010 xxxx xxxx xxxx 000x xxxx */
+ /* SMLAWy 1111 1011 0011 xxxx xxxx xxxx 000x xxxx */
+ /* SMLSD{X} 1111 1011 0100 xxxx xxxx xxxx 000x xxxx */
+ /* SMMLA{R} 1111 1011 0101 xxxx xxxx xxxx 000x xxxx */
+ /* SMMLS{R} 1111 1011 0110 xxxx xxxx xxxx 000x xxxx */
+ /* USADA8 1111 1011 0111 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff8000c0, 0xfb000000, t32_emulate_rd8rn16rm0ra12_noflags,
+ REGS(NOSPPC, NOSPPCX, NOSPPC, 0, NOSPPC)),
+
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+static const union decode_item t32_table_1111_1011_1[] = {
+ /* Long multiply, long multiply accumulate, and divide */
+
+ /* UMAAL 1111 1011 1110 xxxx xxxx xxxx 0110 xxxx */
+ DECODE_OR (0xfff000f0, 0xfbe00060),
+ /* SMLALxy 1111 1011 1100 xxxx xxxx xxxx 10xx xxxx */
+ DECODE_OR (0xfff000c0, 0xfbc00080),
+ /* SMLALD{X} 1111 1011 1100 xxxx xxxx xxxx 110x xxxx */
+ /* SMLSLD{X} 1111 1011 1101 xxxx xxxx xxxx 110x xxxx */
+ DECODE_OR (0xffe000e0, 0xfbc000c0),
+ /* SMULL 1111 1011 1000 xxxx xxxx xxxx 0000 xxxx */
+ /* UMULL 1111 1011 1010 xxxx xxxx xxxx 0000 xxxx */
+ /* SMLAL 1111 1011 1100 xxxx xxxx xxxx 0000 xxxx */
+ /* UMLAL 1111 1011 1110 xxxx xxxx xxxx 0000 xxxx */
+ DECODE_EMULATEX (0xff9000f0, 0xfb800000, t32_emulate_rdlo12rdhi8rn16rm0_noflags,
+ REGS(NOSPPC, NOSPPC, NOSPPC, 0, NOSPPC)),
+
+ /* SDIV 1111 1011 1001 xxxx xxxx xxxx 1111 xxxx */
+ /* UDIV 1111 1011 1011 xxxx xxxx xxxx 1111 xxxx */
+ /* Other unallocated instructions... */
+ DECODE_END
+};
+
+const union decode_item kprobe_decode_thumb32_table[] = {
+
+ /*
+ * Load/store multiple instructions
+ * 1110 100x x0xx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe400000, 0xe8000000, t32_table_1110_100x_x0xx),
+
+ /*
+ * Load/store dual, load/store exclusive, table branch
+ * 1110 100x x1xx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe400000, 0xe8400000, t32_table_1110_100x_x1xx),
+
+ /*
+ * Data-processing (shifted register)
+ * 1110 101x xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe000000, 0xea000000, t32_table_1110_101x),
+
+ /*
+ * Coprocessor instructions
+ * 1110 11xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xfc000000, 0xec000000),
+
+ /*
+ * Data-processing (modified immediate)
+ * 1111 0x0x xxxx xxxx 0xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfa008000, 0xf0000000, t32_table_1111_0x0x___0),
+
+ /*
+ * Data-processing (plain binary immediate)
+ * 1111 0x1x xxxx xxxx 0xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfa008000, 0xf2000000, t32_table_1111_0x1x___0),
+
+ /*
+ * Branches and miscellaneous control
+ * 1111 0xxx xxxx xxxx 1xxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf8008000, 0xf0008000, t32_table_1111_0xxx___1),
+
+ /*
+ * Advanced SIMD element or structure load/store instructions
+ * 1111 1001 xxx0 xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_REJECT (0xff100000, 0xf9000000),
+
+ /*
+ * Memory hints
+ * 1111 100x x0x1 xxxx 1111 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe50f000, 0xf810f000, t32_table_1111_100x_x0x1__1111),
+
+ /*
+ * Store single data item
+ * 1111 1000 xxx0 xxxx xxxx xxxx xxxx xxxx
+ * Load single data items
+ * 1111 100x xxx1 xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xfe000000, 0xf8000000, t32_table_1111_100x),
+
+ /*
+ * Data-processing (register)
+ * 1111 1010 xxxx xxxx 1111 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff00f000, 0xfa00f000, t32_table_1111_1010___1111),
+
+ /*
+ * Multiply, multiply accumulate, and absolute difference
+ * 1111 1011 0xxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff800000, 0xfb000000, t32_table_1111_1011_0),
+
+ /*
+ * Long multiply, long multiply accumulate, and divide
+ * 1111 1011 1xxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xff800000, 0xfb800000, t32_table_1111_1011_1),
+
+ /*
+ * Coprocessor instructions
+ * 1111 11xx xxxx xxxx xxxx xxxx xxxx xxxx
+ */
+ DECODE_END
+};
+
+static void __kprobes
+t16_simulate_bxblx(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ int rm = (insn >> 3) & 0xf;
+ unsigned long rmv = (rm == 15) ? pc : regs->uregs[rm];
+
+ if (insn & (1 << 7)) /* BLX ? */
+ regs->ARM_lr = (unsigned long)p->addr + 2;
+
+ bx_write_pc(rmv, regs);
+}
+
+static void __kprobes
+t16_simulate_ldr_literal(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long* base = (unsigned long *)(thumb_probe_pc(p) & ~3);
+ long index = insn & 0xff;
+ int rt = (insn >> 8) & 0x7;
+ regs->uregs[rt] = base[index];
+}
+
+static void __kprobes
+t16_simulate_ldrstr_sp_relative(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long* base = (unsigned long *)regs->ARM_sp;
+ long index = insn & 0xff;
+ int rt = (insn >> 8) & 0x7;
+ if (insn & 0x800) /* LDR */
+ regs->uregs[rt] = base[index];
+ else /* STR */
+ base[index] = regs->uregs[rt];
+}
+
+static void __kprobes
+t16_simulate_reladr(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long base = (insn & 0x800) ? regs->ARM_sp
+ : (thumb_probe_pc(p) & ~3);
+ long offset = insn & 0xff;
+ int rt = (insn >> 8) & 0x7;
+ regs->uregs[rt] = base + offset * 4;
+}
+
+static void __kprobes
+t16_simulate_add_sp_imm(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ long imm = insn & 0x7f;
+ if (insn & 0x80) /* SUB */
+ regs->ARM_sp -= imm * 4;
+ else /* ADD */
+ regs->ARM_sp += imm * 4;
+}
+
+static void __kprobes
+t16_simulate_cbz(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ int rn = insn & 0x7;
+ kprobe_opcode_t nonzero = regs->uregs[rn] ? insn : ~insn;
+ if (nonzero & 0x800) {
+ long i = insn & 0x200;
+ long imm5 = insn & 0xf8;
+ unsigned long pc = thumb_probe_pc(p);
+ regs->ARM_pc = pc + (i >> 3) + (imm5 >> 2);
+ }
+}
+
+static void __kprobes
+t16_simulate_it(struct kprobe *p, struct pt_regs *regs)
+{
+ /*
+ * The 8 IT state bits are split into two parts in CPSR:
+ * ITSTATE<1:0> are in CPSR<26:25>
+ * ITSTATE<7:2> are in CPSR<15:10>
+ * The new IT state is in the lower byte of insn.
+ */
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long cpsr = regs->ARM_cpsr;
+ cpsr &= ~PSR_IT_MASK;
+ cpsr |= (insn & 0xfc) << 8;
+ cpsr |= (insn & 0x03) << 25;
+ regs->ARM_cpsr = cpsr;
+}
+
+static void __kprobes
+t16_singlestep_it(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_pc += 2;
+ t16_simulate_it(p, regs);
+}
+
+static enum kprobe_insn __kprobes
+t16_decode_it(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ asi->insn_singlestep = t16_singlestep_it;
+ return INSN_GOOD_NO_SLOT;
+}
+
+static void __kprobes
+t16_simulate_cond_branch(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ long offset = insn & 0x7f;
+ offset -= insn & 0x80; /* Apply sign bit */
+ regs->ARM_pc = pc + (offset * 2);
+}
+
+static enum kprobe_insn __kprobes
+t16_decode_cond_branch(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ int cc = (insn >> 8) & 0xf;
+ asi->insn_check_cc = kprobe_condition_checks[cc];
+ asi->insn_handler = t16_simulate_cond_branch;
+ return INSN_GOOD_NO_SLOT;
+}
+
+static void __kprobes
+t16_simulate_branch(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ long offset = insn & 0x3ff;
+ offset -= insn & 0x400; /* Apply sign bit */
+ regs->ARM_pc = pc + (offset * 2);
+}
+
+static unsigned long __kprobes
+t16_emulate_loregs(struct kprobe *p, struct pt_regs *regs)
+{
+ unsigned long oldcpsr = regs->ARM_cpsr;
+ unsigned long newcpsr;
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[oldcpsr] \n\t"
+ "ldmia %[regs], {r0-r7} \n\t"
+ "blx %[fn] \n\t"
+ "stmia %[regs], {r0-r7} \n\t"
+ "mrs %[newcpsr], cpsr \n\t"
+ : [newcpsr] "=r" (newcpsr)
+ : [oldcpsr] "r" (oldcpsr), [regs] "r" (regs),
+ [fn] "r" (p->ainsn.insn_fn)
+ : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
+ "lr", "memory", "cc"
+ );
+
+ return (oldcpsr & ~APSR_MASK) | (newcpsr & APSR_MASK);
+}
+
+static void __kprobes
+t16_emulate_loregs_rwflags(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_cpsr = t16_emulate_loregs(p, regs);
+}
+
+static void __kprobes
+t16_emulate_loregs_noitrwflags(struct kprobe *p, struct pt_regs *regs)
+{
+ unsigned long cpsr = t16_emulate_loregs(p, regs);
+ if (!in_it_block(cpsr))
+ regs->ARM_cpsr = cpsr;
+}
+
+static void __kprobes
+t16_emulate_hiregs(struct kprobe *p, struct pt_regs *regs)
+{
+ kprobe_opcode_t insn = p->opcode;
+ unsigned long pc = thumb_probe_pc(p);
+ int rdn = (insn & 0x7) | ((insn & 0x80) >> 4);
+ int rm = (insn >> 3) & 0xf;
+
+ register unsigned long rdnv asm("r1");
+ register unsigned long rmv asm("r0");
+ unsigned long cpsr = regs->ARM_cpsr;
+
+ rdnv = (rdn == 15) ? pc : regs->uregs[rdn];
+ rmv = (rm == 15) ? pc : regs->uregs[rm];
+
+ __asm__ __volatile__ (
+ "msr cpsr_fs, %[cpsr] \n\t"
+ "blx %[fn] \n\t"
+ "mrs %[cpsr], cpsr \n\t"
+ : "=r" (rdnv), [cpsr] "=r" (cpsr)
+ : "0" (rdnv), "r" (rmv), "1" (cpsr), [fn] "r" (p->ainsn.insn_fn)
+ : "lr", "memory", "cc"
+ );
+
+ if (rdn == 15)
+ rdnv &= ~1;
+
+ regs->uregs[rdn] = rdnv;
+ regs->ARM_cpsr = (regs->ARM_cpsr & ~APSR_MASK) | (cpsr & APSR_MASK);
+}
+
+static enum kprobe_insn __kprobes
+t16_decode_hiregs(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ insn &= ~0x00ff;
+ insn |= 0x001; /* Set Rdn = R1 and Rm = R0 */
+ ((u16 *)asi->insn)[0] = insn;
+ asi->insn_handler = t16_emulate_hiregs;
+ return INSN_GOOD;
+}
+
+static void __kprobes
+t16_emulate_push(struct kprobe *p, struct pt_regs *regs)
+{
+ __asm__ __volatile__ (
+ "ldr r9, [%[regs], #13*4] \n\t"
+ "ldr r8, [%[regs], #14*4] \n\t"
+ "ldmia %[regs], {r0-r7} \n\t"
+ "blx %[fn] \n\t"
+ "str r9, [%[regs], #13*4] \n\t"
+ :
+ : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
+ "lr", "memory", "cc"
+ );
+}
+
+static enum kprobe_insn __kprobes
+t16_decode_push(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ /*
+ * To simulate a PUSH we use a Thumb-2 "STMDB R9!, {registers}"
+ * and call it with R9=SP and LR in the register list represented
+ * by R8.
+ */
+ ((u16 *)asi->insn)[0] = 0xe929; /* 1st half STMDB R9!,{} */
+ ((u16 *)asi->insn)[1] = insn & 0x1ff; /* 2nd half (register list) */
+ asi->insn_handler = t16_emulate_push;
+ return INSN_GOOD;
+}
+
+static void __kprobes
+t16_emulate_pop_nopc(struct kprobe *p, struct pt_regs *regs)
+{
+ __asm__ __volatile__ (
+ "ldr r9, [%[regs], #13*4] \n\t"
+ "ldmia %[regs], {r0-r7} \n\t"
+ "blx %[fn] \n\t"
+ "stmia %[regs], {r0-r7} \n\t"
+ "str r9, [%[regs], #13*4] \n\t"
+ :
+ : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9",
+ "lr", "memory", "cc"
+ );
+}
+
+static void __kprobes
+t16_emulate_pop_pc(struct kprobe *p, struct pt_regs *regs)
+{
+ register unsigned long pc asm("r8");
+
+ __asm__ __volatile__ (
+ "ldr r9, [%[regs], #13*4] \n\t"
+ "ldmia %[regs], {r0-r7} \n\t"
+ "blx %[fn] \n\t"
+ "stmia %[regs], {r0-r7} \n\t"
+ "str r9, [%[regs], #13*4] \n\t"
+ : "=r" (pc)
+ : [regs] "r" (regs), [fn] "r" (p->ainsn.insn_fn)
+ : "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r9",
+ "lr", "memory", "cc"
+ );
+
+ bx_write_pc(pc, regs);
+}
+
+static enum kprobe_insn __kprobes
+t16_decode_pop(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ /*
+ * To simulate a POP we use a Thumb-2 "LDMDB R9!, {registers}"
+ * and call it with R9=SP and PC in the register list represented
+ * by R8.
+ */
+ ((u16 *)asi->insn)[0] = 0xe8b9; /* 1st half LDMIA R9!,{} */
+ ((u16 *)asi->insn)[1] = insn & 0x1ff; /* 2nd half (register list) */
+ asi->insn_handler = insn & 0x100 ? t16_emulate_pop_pc
+ : t16_emulate_pop_nopc;
+ return INSN_GOOD;
+}
+
+static const union decode_item t16_table_1011[] = {
+ /* Miscellaneous 16-bit instructions */
+
+ /* ADD (SP plus immediate) 1011 0000 0xxx xxxx */
+ /* SUB (SP minus immediate) 1011 0000 1xxx xxxx */
+ DECODE_SIMULATE (0xff00, 0xb000, t16_simulate_add_sp_imm),
+
+ /* CBZ 1011 00x1 xxxx xxxx */
+ /* CBNZ 1011 10x1 xxxx xxxx */
+ DECODE_SIMULATE (0xf500, 0xb100, t16_simulate_cbz),
+
+ /* SXTH 1011 0010 00xx xxxx */
+ /* SXTB 1011 0010 01xx xxxx */
+ /* UXTH 1011 0010 10xx xxxx */
+ /* UXTB 1011 0010 11xx xxxx */
+ /* REV 1011 1010 00xx xxxx */
+ /* REV16 1011 1010 01xx xxxx */
+ /* ??? 1011 1010 10xx xxxx */
+ /* REVSH 1011 1010 11xx xxxx */
+ DECODE_REJECT (0xffc0, 0xba80),
+ DECODE_EMULATE (0xf500, 0xb000, t16_emulate_loregs_rwflags),
+
+ /* PUSH 1011 010x xxxx xxxx */
+ DECODE_CUSTOM (0xfe00, 0xb400, t16_decode_push),
+ /* POP 1011 110x xxxx xxxx */
+ DECODE_CUSTOM (0xfe00, 0xbc00, t16_decode_pop),
+
+ /*
+ * If-Then, and hints
+ * 1011 1111 xxxx xxxx
+ */
+
+ /* YIELD 1011 1111 0001 0000 */
+ DECODE_OR (0xffff, 0xbf10),
+ /* SEV 1011 1111 0100 0000 */
+ DECODE_EMULATE (0xffff, 0xbf40, kprobe_emulate_none),
+ /* NOP 1011 1111 0000 0000 */
+ /* WFE 1011 1111 0010 0000 */
+ /* WFI 1011 1111 0011 0000 */
+ DECODE_SIMULATE (0xffcf, 0xbf00, kprobe_simulate_nop),
+ /* Unassigned hints 1011 1111 xxxx 0000 */
+ DECODE_REJECT (0xff0f, 0xbf00),
+ /* IT 1011 1111 xxxx xxxx */
+ DECODE_CUSTOM (0xff00, 0xbf00, t16_decode_it),
+
+ /* SETEND 1011 0110 010x xxxx */
+ /* CPS 1011 0110 011x xxxx */
+ /* BKPT 1011 1110 xxxx xxxx */
+ /* And unallocated instructions... */
+ DECODE_END
+};
+
+const union decode_item kprobe_decode_thumb16_table[] = {
+
+ /*
+ * Shift (immediate), add, subtract, move, and compare
+ * 00xx xxxx xxxx xxxx
+ */
+
+ /* CMP (immediate) 0010 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf800, 0x2800, t16_emulate_loregs_rwflags),
+
+ /* ADD (register) 0001 100x xxxx xxxx */
+ /* SUB (register) 0001 101x xxxx xxxx */
+ /* LSL (immediate) 0000 0xxx xxxx xxxx */
+ /* LSR (immediate) 0000 1xxx xxxx xxxx */
+ /* ASR (immediate) 0001 0xxx xxxx xxxx */
+ /* ADD (immediate, Thumb) 0001 110x xxxx xxxx */
+ /* SUB (immediate, Thumb) 0001 111x xxxx xxxx */
+ /* MOV (immediate) 0010 0xxx xxxx xxxx */
+ /* ADD (immediate, Thumb) 0011 0xxx xxxx xxxx */
+ /* SUB (immediate, Thumb) 0011 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xc000, 0x0000, t16_emulate_loregs_noitrwflags),
+
+ /*
+ * 16-bit Thumb data-processing instructions
+ * 0100 00xx xxxx xxxx
+ */
+
+ /* TST (register) 0100 0010 00xx xxxx */
+ DECODE_EMULATE (0xffc0, 0x4200, t16_emulate_loregs_rwflags),
+ /* CMP (register) 0100 0010 10xx xxxx */
+ /* CMN (register) 0100 0010 11xx xxxx */
+ DECODE_EMULATE (0xff80, 0x4280, t16_emulate_loregs_rwflags),
+ /* AND (register) 0100 0000 00xx xxxx */
+ /* EOR (register) 0100 0000 01xx xxxx */
+ /* LSL (register) 0100 0000 10xx xxxx */
+ /* LSR (register) 0100 0000 11xx xxxx */
+ /* ASR (register) 0100 0001 00xx xxxx */
+ /* ADC (register) 0100 0001 01xx xxxx */
+ /* SBC (register) 0100 0001 10xx xxxx */
+ /* ROR (register) 0100 0001 11xx xxxx */
+ /* RSB (immediate) 0100 0010 01xx xxxx */
+ /* ORR (register) 0100 0011 00xx xxxx */
+ /* MUL 0100 0011 00xx xxxx */
+ /* BIC (register) 0100 0011 10xx xxxx */
+ /* MVN (register) 0100 0011 10xx xxxx */
+ DECODE_EMULATE (0xfc00, 0x4000, t16_emulate_loregs_noitrwflags),
+
+ /*
+ * Special data instructions and branch and exchange
+ * 0100 01xx xxxx xxxx
+ */
+
+ /* BLX pc 0100 0111 1111 1xxx */
+ DECODE_REJECT (0xfff8, 0x47f8),
+
+ /* BX (register) 0100 0111 0xxx xxxx */
+ /* BLX (register) 0100 0111 1xxx xxxx */
+ DECODE_SIMULATE (0xff00, 0x4700, t16_simulate_bxblx),
+
+ /* ADD pc, pc 0100 0100 1111 1111 */
+ DECODE_REJECT (0xffff, 0x44ff),
+
+ /* ADD (register) 0100 0100 xxxx xxxx */
+ /* CMP (register) 0100 0101 xxxx xxxx */
+ /* MOV (register) 0100 0110 xxxx xxxx */
+ DECODE_CUSTOM (0xfc00, 0x4400, t16_decode_hiregs),
+
+ /*
+ * Load from Literal Pool
+ * LDR (literal) 0100 1xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf800, 0x4800, t16_simulate_ldr_literal),
+
+ /*
+ * 16-bit Thumb Load/store instructions
+ * 0101 xxxx xxxx xxxx
+ * 011x xxxx xxxx xxxx
+ * 100x xxxx xxxx xxxx
+ */
+
+ /* STR (register) 0101 000x xxxx xxxx */
+ /* STRH (register) 0101 001x xxxx xxxx */
+ /* STRB (register) 0101 010x xxxx xxxx */
+ /* LDRSB (register) 0101 011x xxxx xxxx */
+ /* LDR (register) 0101 100x xxxx xxxx */
+ /* LDRH (register) 0101 101x xxxx xxxx */
+ /* LDRB (register) 0101 110x xxxx xxxx */
+ /* LDRSH (register) 0101 111x xxxx xxxx */
+ /* STR (immediate, Thumb) 0110 0xxx xxxx xxxx */
+ /* LDR (immediate, Thumb) 0110 1xxx xxxx xxxx */
+ /* STRB (immediate, Thumb) 0111 0xxx xxxx xxxx */
+ /* LDRB (immediate, Thumb) 0111 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xc000, 0x4000, t16_emulate_loregs_rwflags),
+ /* STRH (immediate, Thumb) 1000 0xxx xxxx xxxx */
+ /* LDRH (immediate, Thumb) 1000 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf000, 0x8000, t16_emulate_loregs_rwflags),
+ /* STR (immediate, Thumb) 1001 0xxx xxxx xxxx */
+ /* LDR (immediate, Thumb) 1001 1xxx xxxx xxxx */
+ DECODE_SIMULATE (0xf000, 0x9000, t16_simulate_ldrstr_sp_relative),
+
+ /*
+ * Generate PC-/SP-relative address
+ * ADR (literal) 1010 0xxx xxxx xxxx
+ * ADD (SP plus immediate) 1010 1xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf000, 0xa000, t16_simulate_reladr),
+
+ /*
+ * Miscellaneous 16-bit instructions
+ * 1011 xxxx xxxx xxxx
+ */
+ DECODE_TABLE (0xf000, 0xb000, t16_table_1011),
+
+ /* STM 1100 0xxx xxxx xxxx */
+ /* LDM 1100 1xxx xxxx xxxx */
+ DECODE_EMULATE (0xf000, 0xc000, t16_emulate_loregs_rwflags),
+
+ /*
+ * Conditional branch, and Supervisor Call
+ */
+
+ /* Permanently UNDEFINED 1101 1110 xxxx xxxx */
+ /* SVC 1101 1111 xxxx xxxx */
+ DECODE_REJECT (0xfe00, 0xde00),
+
+ /* Conditional branch 1101 xxxx xxxx xxxx */
+ DECODE_CUSTOM (0xf000, 0xd000, t16_decode_cond_branch),
+
+ /*
+ * Unconditional branch
+ * B 1110 0xxx xxxx xxxx
+ */
+ DECODE_SIMULATE (0xf800, 0xe000, t16_simulate_branch),
+
+ DECODE_END
+};
+
+static unsigned long __kprobes thumb_check_cc(unsigned long cpsr)
+{
+ if (unlikely(in_it_block(cpsr)))
+ return kprobe_condition_checks[current_cond(cpsr)](cpsr);
+ return true;
+}
+
+static void __kprobes thumb16_singlestep(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_pc += 2;
+ p->ainsn.insn_handler(p, regs);
+ regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
+}
+
+static void __kprobes thumb32_singlestep(struct kprobe *p, struct pt_regs *regs)
+{
+ regs->ARM_pc += 4;
+ p->ainsn.insn_handler(p, regs);
+ regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
+}
+
+enum kprobe_insn __kprobes
+thumb16_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ asi->insn_singlestep = thumb16_singlestep;
+ asi->insn_check_cc = thumb_check_cc;
+ return kprobe_decode_insn(insn, asi, kprobe_decode_thumb16_table, true);
+}
+
+enum kprobe_insn __kprobes
+thumb32_kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi)
+{
+ asi->insn_singlestep = thumb32_singlestep;
+ asi->insn_check_cc = thumb_check_cc;
+ return kprobe_decode_insn(insn, asi, kprobe_decode_thumb32_table, true);
+}
diff --git a/arch/arm/kernel/kprobes.c b/arch/arm/kernel/kprobes.c
index 1656c87501c0..129c1163248b 100644
--- a/arch/arm/kernel/kprobes.c
+++ b/arch/arm/kernel/kprobes.c
@@ -28,14 +28,16 @@
#include <asm/traps.h>
#include <asm/cacheflush.h>
+#include "kprobes.h"
+
#define MIN_STACK_SIZE(addr) \
min((unsigned long)MAX_STACK_SIZE, \
(unsigned long)current_thread_info() + THREAD_START_SP - (addr))
-#define flush_insns(addr, cnt) \
+#define flush_insns(addr, size) \
flush_icache_range((unsigned long)(addr), \
(unsigned long)(addr) + \
- sizeof(kprobe_opcode_t) * (cnt))
+ (size))
/* Used as a marker in ARM_pc to note when we're in a jprobe. */
#define JPROBE_MAGIC_ADDR 0xffffffff
@@ -49,16 +51,35 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
kprobe_opcode_t insn;
kprobe_opcode_t tmp_insn[MAX_INSN_SIZE];
unsigned long addr = (unsigned long)p->addr;
+ bool thumb;
+ kprobe_decode_insn_t *decode_insn;
int is;
- if (addr & 0x3 || in_exception_text(addr))
+ if (in_exception_text(addr))
return -EINVAL;
+#ifdef CONFIG_THUMB2_KERNEL
+ thumb = true;
+ addr &= ~1; /* Bit 0 would normally be set to indicate Thumb code */
+ insn = ((u16 *)addr)[0];
+ if (is_wide_instruction(insn)) {
+ insn <<= 16;
+ insn |= ((u16 *)addr)[1];
+ decode_insn = thumb32_kprobe_decode_insn;
+ } else
+ decode_insn = thumb16_kprobe_decode_insn;
+#else /* !CONFIG_THUMB2_KERNEL */
+ thumb = false;
+ if (addr & 0x3)
+ return -EINVAL;
insn = *p->addr;
+ decode_insn = arm_kprobe_decode_insn;
+#endif
+
p->opcode = insn;
p->ainsn.insn = tmp_insn;
- switch (arm_kprobe_decode_insn(insn, &p->ainsn)) {
+ switch ((*decode_insn)(insn, &p->ainsn)) {
case INSN_REJECTED: /* not supported */
return -EINVAL;
@@ -68,7 +89,10 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
return -ENOMEM;
for (is = 0; is < MAX_INSN_SIZE; ++is)
p->ainsn.insn[is] = tmp_insn[is];
- flush_insns(p->ainsn.insn, MAX_INSN_SIZE);
+ flush_insns(p->ainsn.insn,
+ sizeof(p->ainsn.insn[0]) * MAX_INSN_SIZE);
+ p->ainsn.insn_fn = (kprobe_insn_fn_t *)
+ ((uintptr_t)p->ainsn.insn | thumb);
break;
case INSN_GOOD_NO_SLOT: /* instruction doesn't need insn slot */
@@ -79,24 +103,88 @@ int __kprobes arch_prepare_kprobe(struct kprobe *p)
return 0;
}
+#ifdef CONFIG_THUMB2_KERNEL
+
+/*
+ * For a 32-bit Thumb breakpoint spanning two memory words we need to take
+ * special precautions to insert the breakpoint atomically, especially on SMP
+ * systems. This is achieved by calling this arming function using stop_machine.
+ */
+static int __kprobes set_t32_breakpoint(void *addr)
+{
+ ((u16 *)addr)[0] = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION >> 16;
+ ((u16 *)addr)[1] = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION & 0xffff;
+ flush_insns(addr, 2*sizeof(u16));
+ return 0;
+}
+
+void __kprobes arch_arm_kprobe(struct kprobe *p)
+{
+ uintptr_t addr = (uintptr_t)p->addr & ~1; /* Remove any Thumb flag */
+
+ if (!is_wide_instruction(p->opcode)) {
+ *(u16 *)addr = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION;
+ flush_insns(addr, sizeof(u16));
+ } else if (addr & 2) {
+ /* A 32-bit instruction spanning two words needs special care */
+ stop_machine(set_t32_breakpoint, (void *)addr, &cpu_online_map);
+ } else {
+ /* Word aligned 32-bit instruction can be written atomically */
+ u32 bkp = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION;
+#ifndef __ARMEB__ /* Swap halfwords for little-endian */
+ bkp = (bkp >> 16) | (bkp << 16);
+#endif
+ *(u32 *)addr = bkp;
+ flush_insns(addr, sizeof(u32));
+ }
+}
+
+#else /* !CONFIG_THUMB2_KERNEL */
+
void __kprobes arch_arm_kprobe(struct kprobe *p)
{
- *p->addr = KPROBE_BREAKPOINT_INSTRUCTION;
- flush_insns(p->addr, 1);
+ kprobe_opcode_t insn = p->opcode;
+ kprobe_opcode_t brkp = KPROBE_ARM_BREAKPOINT_INSTRUCTION;
+ if (insn >= 0xe0000000)
+ brkp |= 0xe0000000; /* Unconditional instruction */
+ else
+ brkp |= insn & 0xf0000000; /* Copy condition from insn */
+ *p->addr = brkp;
+ flush_insns(p->addr, sizeof(p->addr[0]));
}
+#endif /* !CONFIG_THUMB2_KERNEL */
+
/*
* The actual disarming is done here on each CPU and synchronized using
* stop_machine. This synchronization is necessary on SMP to avoid removing
* a probe between the moment the 'Undefined Instruction' exception is raised
* and the moment the exception handler reads the faulting instruction from
- * memory.
+ * memory. It is also needed to atomically set the two half-words of a 32-bit
+ * Thumb breakpoint.
*/
int __kprobes __arch_disarm_kprobe(void *p)
{
struct kprobe *kp = p;
+#ifdef CONFIG_THUMB2_KERNEL
+ u16 *addr = (u16 *)((uintptr_t)kp->addr & ~1);
+ kprobe_opcode_t insn = kp->opcode;
+ unsigned int len;
+
+ if (is_wide_instruction(insn)) {
+ ((u16 *)addr)[0] = insn>>16;
+ ((u16 *)addr)[1] = insn;
+ len = 2*sizeof(u16);
+ } else {
+ ((u16 *)addr)[0] = insn;
+ len = sizeof(u16);
+ }
+ flush_insns(addr, len);
+
+#else /* !CONFIG_THUMB2_KERNEL */
*kp->addr = kp->opcode;
- flush_insns(kp->addr, 1);
+ flush_insns(kp->addr, sizeof(kp->addr[0]));
+#endif
return 0;
}
@@ -130,12 +218,24 @@ static void __kprobes set_current_kprobe(struct kprobe *p)
__get_cpu_var(current_kprobe) = p;
}
-static void __kprobes singlestep(struct kprobe *p, struct pt_regs *regs,
- struct kprobe_ctlblk *kcb)
+static void __kprobes
+singlestep_skip(struct kprobe *p, struct pt_regs *regs)
{
+#ifdef CONFIG_THUMB2_KERNEL
+ regs->ARM_cpsr = it_advance(regs->ARM_cpsr);
+ if (is_wide_instruction(p->opcode))
+ regs->ARM_pc += 4;
+ else
+ regs->ARM_pc += 2;
+#else
regs->ARM_pc += 4;
- if (p->ainsn.insn_check_cc(regs->ARM_cpsr))
- p->ainsn.insn_handler(p, regs);
+#endif
+}
+
+static inline void __kprobes
+singlestep(struct kprobe *p, struct pt_regs *regs, struct kprobe_ctlblk *kcb)
+{
+ p->ainsn.insn_singlestep(p, regs);
}
/*
@@ -149,11 +249,23 @@ void __kprobes kprobe_handler(struct pt_regs *regs)
{
struct kprobe *p, *cur;
struct kprobe_ctlblk *kcb;
- kprobe_opcode_t *addr = (kprobe_opcode_t *)regs->ARM_pc;
kcb = get_kprobe_ctlblk();
cur = kprobe_running();
- p = get_kprobe(addr);
+
+#ifdef CONFIG_THUMB2_KERNEL
+ /*
+ * First look for a probe which was registered using an address with
+ * bit 0 set, this is the usual situation for pointers to Thumb code.
+ * If not found, fallback to looking for one with bit 0 clear.
+ */
+ p = get_kprobe((kprobe_opcode_t *)(regs->ARM_pc | 1));
+ if (!p)
+ p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc);
+
+#else /* ! CONFIG_THUMB2_KERNEL */
+ p = get_kprobe((kprobe_opcode_t *)regs->ARM_pc);
+#endif
if (p) {
if (cur) {
@@ -173,7 +285,8 @@ void __kprobes kprobe_handler(struct pt_regs *regs)
/* impossible cases */
BUG();
}
- } else {
+ } else if (p->ainsn.insn_check_cc(regs->ARM_cpsr)) {
+ /* Probe hit and conditional execution check ok. */
set_current_kprobe(p);
kcb->kprobe_status = KPROBE_HIT_ACTIVE;
@@ -193,6 +306,13 @@ void __kprobes kprobe_handler(struct pt_regs *regs)
}
reset_current_kprobe();
}
+ } else {
+ /*
+ * Probe hit but conditional execution check failed,
+ * so just skip the instruction and continue as if
+ * nothing had happened.
+ */
+ singlestep_skip(p, regs);
}
} else if (cur) {
/* We probably hit a jprobe. Call its break handler. */
@@ -300,7 +420,11 @@ void __naked __kprobes kretprobe_trampoline(void)
"bl trampoline_handler \n\t"
"mov lr, r0 \n\t"
"ldmia sp!, {r0 - r11} \n\t"
+#ifdef CONFIG_THUMB2_KERNEL
+ "bx lr \n\t"
+#else
"mov pc, lr \n\t"
+#endif
: : : "memory");
}
@@ -378,11 +502,22 @@ int __kprobes setjmp_pre_handler(struct kprobe *p, struct pt_regs *regs)
struct jprobe *jp = container_of(p, struct jprobe, kp);
struct kprobe_ctlblk *kcb = get_kprobe_ctlblk();
long sp_addr = regs->ARM_sp;
+ long cpsr;
kcb->jprobe_saved_regs = *regs;
memcpy(kcb->jprobes_stack, (void *)sp_addr, MIN_STACK_SIZE(sp_addr));
regs->ARM_pc = (long)jp->entry;
- regs->ARM_cpsr |= PSR_I_BIT;
+
+ cpsr = regs->ARM_cpsr | PSR_I_BIT;
+#ifdef CONFIG_THUMB2_KERNEL
+ /* Set correct Thumb state in cpsr */
+ if (regs->ARM_pc & 1)
+ cpsr |= PSR_T_BIT;
+ else
+ cpsr &= ~PSR_T_BIT;
+#endif
+ regs->ARM_cpsr = cpsr;
+
preempt_disable();
return 1;
}
@@ -404,7 +539,12 @@ void __kprobes jprobe_return(void)
* This is to prevent any simulated instruction from writing
* over the regs when they are accessing the stack.
*/
+#ifdef CONFIG_THUMB2_KERNEL
+ "sub r0, %0, %1 \n\t"
+ "mov sp, r0 \n\t"
+#else
"sub sp, %0, %1 \n\t"
+#endif
"ldr r0, ="__stringify(JPROBE_MAGIC_ADDR)"\n\t"
"str %0, [sp, %2] \n\t"
"str r0, [sp, %3] \n\t"
@@ -415,15 +555,28 @@ void __kprobes jprobe_return(void)
* Return to the context saved by setjmp_pre_handler
* and restored by longjmp_break_handler.
*/
+#ifdef CONFIG_THUMB2_KERNEL
+ "ldr lr, [sp, %2] \n\t" /* lr = saved sp */
+ "ldrd r0, r1, [sp, %5] \n\t" /* r0,r1 = saved lr,pc */
+ "ldr r2, [sp, %4] \n\t" /* r2 = saved psr */
+ "stmdb lr!, {r0, r1, r2} \n\t" /* push saved lr and */
+ /* rfe context */
+ "ldmia sp, {r0 - r12} \n\t"
+ "mov sp, lr \n\t"
+ "ldr lr, [sp], #4 \n\t"
+ "rfeia sp! \n\t"
+#else
"ldr r0, [sp, %4] \n\t"
"msr cpsr_cxsf, r0 \n\t"
"ldmia sp, {r0 - pc} \n\t"
+#endif
:
: "r" (kcb->jprobe_saved_regs.ARM_sp),
"I" (sizeof(struct pt_regs) * 2),
"J" (offsetof(struct pt_regs, ARM_sp)),
"J" (offsetof(struct pt_regs, ARM_pc)),
- "J" (offsetof(struct pt_regs, ARM_cpsr))
+ "J" (offsetof(struct pt_regs, ARM_cpsr)),
+ "J" (offsetof(struct pt_regs, ARM_lr))
: "memory", "cc");
}
@@ -460,17 +613,44 @@ int __kprobes arch_trampoline_kprobe(struct kprobe *p)
return 0;
}
-static struct undef_hook kprobes_break_hook = {
+#ifdef CONFIG_THUMB2_KERNEL
+
+static struct undef_hook kprobes_thumb16_break_hook = {
+ .instr_mask = 0xffff,
+ .instr_val = KPROBE_THUMB16_BREAKPOINT_INSTRUCTION,
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = SVC_MODE,
+ .fn = kprobe_trap_handler,
+};
+
+static struct undef_hook kprobes_thumb32_break_hook = {
.instr_mask = 0xffffffff,
- .instr_val = KPROBE_BREAKPOINT_INSTRUCTION,
+ .instr_val = KPROBE_THUMB32_BREAKPOINT_INSTRUCTION,
.cpsr_mask = MODE_MASK,
.cpsr_val = SVC_MODE,
.fn = kprobe_trap_handler,
};
+#else /* !CONFIG_THUMB2_KERNEL */
+
+static struct undef_hook kprobes_arm_break_hook = {
+ .instr_mask = 0x0fffffff,
+ .instr_val = KPROBE_ARM_BREAKPOINT_INSTRUCTION,
+ .cpsr_mask = MODE_MASK,
+ .cpsr_val = SVC_MODE,
+ .fn = kprobe_trap_handler,
+};
+
+#endif /* !CONFIG_THUMB2_KERNEL */
+
int __init arch_init_kprobes()
{
arm_kprobe_decode_init();
- register_undef_hook(&kprobes_break_hook);
+#ifdef CONFIG_THUMB2_KERNEL
+ register_undef_hook(&kprobes_thumb16_break_hook);
+ register_undef_hook(&kprobes_thumb32_break_hook);
+#else
+ register_undef_hook(&kprobes_arm_break_hook);
+#endif
return 0;
}
diff --git a/arch/arm/kernel/kprobes.h b/arch/arm/kernel/kprobes.h
new file mode 100644
index 000000000000..a6aeda0a6c7f
--- /dev/null
+++ b/arch/arm/kernel/kprobes.h
@@ -0,0 +1,420 @@
+/*
+ * arch/arm/kernel/kprobes.h
+ *
+ * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
+ *
+ * Some contents moved here from arch/arm/include/asm/kprobes.h which is
+ * Copyright (C) 2006, 2007 Motorola Inc.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ */
+
+#ifndef _ARM_KERNEL_KPROBES_H
+#define _ARM_KERNEL_KPROBES_H
+
+/*
+ * These undefined instructions must be unique and
+ * reserved solely for kprobes' use.
+ */
+#define KPROBE_ARM_BREAKPOINT_INSTRUCTION 0x07f001f8
+#define KPROBE_THUMB16_BREAKPOINT_INSTRUCTION 0xde18
+#define KPROBE_THUMB32_BREAKPOINT_INSTRUCTION 0xf7f0a018
+
+
+enum kprobe_insn {
+ INSN_REJECTED,
+ INSN_GOOD,
+ INSN_GOOD_NO_SLOT
+};
+
+typedef enum kprobe_insn (kprobe_decode_insn_t)(kprobe_opcode_t,
+ struct arch_specific_insn *);
+
+#ifdef CONFIG_THUMB2_KERNEL
+
+enum kprobe_insn thumb16_kprobe_decode_insn(kprobe_opcode_t,
+ struct arch_specific_insn *);
+enum kprobe_insn thumb32_kprobe_decode_insn(kprobe_opcode_t,
+ struct arch_specific_insn *);
+
+#else /* !CONFIG_THUMB2_KERNEL */
+
+enum kprobe_insn arm_kprobe_decode_insn(kprobe_opcode_t,
+ struct arch_specific_insn *);
+#endif
+
+void __init arm_kprobe_decode_init(void);
+
+extern kprobe_check_cc * const kprobe_condition_checks[16];
+
+
+#if __LINUX_ARM_ARCH__ >= 7
+
+/* str_pc_offset is architecturally defined from ARMv7 onwards */
+#define str_pc_offset 8
+#define find_str_pc_offset()
+
+#else /* __LINUX_ARM_ARCH__ < 7 */
+
+/* We need a run-time check to determine str_pc_offset */
+extern int str_pc_offset;
+void __init find_str_pc_offset(void);
+
+#endif
+
+
+/*
+ * Update ITSTATE after normal execution of an IT block instruction.
+ *
+ * The 8 IT state bits are split into two parts in CPSR:
+ * ITSTATE<1:0> are in CPSR<26:25>
+ * ITSTATE<7:2> are in CPSR<15:10>
+ */
+static inline unsigned long it_advance(unsigned long cpsr)
+ {
+ if ((cpsr & 0x06000400) == 0) {
+ /* ITSTATE<2:0> == 0 means end of IT block, so clear IT state */
+ cpsr &= ~PSR_IT_MASK;
+ } else {
+ /* We need to shift left ITSTATE<4:0> */
+ const unsigned long mask = 0x06001c00; /* Mask ITSTATE<4:0> */
+ unsigned long it = cpsr & mask;
+ it <<= 1;
+ it |= it >> (27 - 10); /* Carry ITSTATE<2> to correct place */
+ it &= mask;
+ cpsr &= ~mask;
+ cpsr |= it;
+ }
+ return cpsr;
+}
+
+static inline void __kprobes bx_write_pc(long pcv, struct pt_regs *regs)
+{
+ long cpsr = regs->ARM_cpsr;
+ if (pcv & 0x1) {
+ cpsr |= PSR_T_BIT;
+ pcv &= ~0x1;
+ } else {
+ cpsr &= ~PSR_T_BIT;
+ pcv &= ~0x2; /* Avoid UNPREDICTABLE address allignment */
+ }
+ regs->ARM_cpsr = cpsr;
+ regs->ARM_pc = pcv;
+}
+
+
+#if __LINUX_ARM_ARCH__ >= 6
+
+/* Kernels built for >= ARMv6 should never run on <= ARMv5 hardware, so... */
+#define load_write_pc_interworks true
+#define test_load_write_pc_interworking()
+
+#else /* __LINUX_ARM_ARCH__ < 6 */
+
+/* We need run-time testing to determine if load_write_pc() should interwork. */
+extern bool load_write_pc_interworks;
+void __init test_load_write_pc_interworking(void);
+
+#endif
+
+static inline void __kprobes load_write_pc(long pcv, struct pt_regs *regs)
+{
+ if (load_write_pc_interworks)
+ bx_write_pc(pcv, regs);
+ else
+ regs->ARM_pc = pcv;
+}
+
+
+#if __LINUX_ARM_ARCH__ >= 7
+
+#define alu_write_pc_interworks true
+#define test_alu_write_pc_interworking()
+
+#elif __LINUX_ARM_ARCH__ <= 5
+
+/* Kernels built for <= ARMv5 should never run on >= ARMv6 hardware, so... */
+#define alu_write_pc_interworks false
+#define test_alu_write_pc_interworking()
+
+#else /* __LINUX_ARM_ARCH__ == 6 */
+
+/* We could be an ARMv6 binary on ARMv7 hardware so we need a run-time check. */
+extern bool alu_write_pc_interworks;
+void __init test_alu_write_pc_interworking(void);
+
+#endif /* __LINUX_ARM_ARCH__ == 6 */
+
+static inline void __kprobes alu_write_pc(long pcv, struct pt_regs *regs)
+{
+ if (alu_write_pc_interworks)
+ bx_write_pc(pcv, regs);
+ else
+ regs->ARM_pc = pcv;
+}
+
+
+void __kprobes kprobe_simulate_nop(struct kprobe *p, struct pt_regs *regs);
+void __kprobes kprobe_emulate_none(struct kprobe *p, struct pt_regs *regs);
+
+enum kprobe_insn __kprobes
+kprobe_decode_ldmstm(kprobe_opcode_t insn, struct arch_specific_insn *asi);
+
+/*
+ * Test if load/store instructions writeback the address register.
+ * if P (bit 24) == 0 or W (bit 21) == 1
+ */
+#define is_writeback(insn) ((insn ^ 0x01000000) & 0x01200000)
+
+/*
+ * The following definitions and macros are used to build instruction
+ * decoding tables for use by kprobe_decode_insn.
+ *
+ * These tables are a concatenation of entries each of which consist of one of
+ * the decode_* structs. All of the fields in every type of decode structure
+ * are of the union type decode_item, therefore the entire decode table can be
+ * viewed as an array of these and declared like:
+ *
+ * static const union decode_item table_name[] = {};
+ *
+ * In order to construct each entry in the table, macros are used to
+ * initialise a number of sequential decode_item values in a layout which
+ * matches the relevant struct. E.g. DECODE_SIMULATE initialise a struct
+ * decode_simulate by initialising four decode_item objects like this...
+ *
+ * {.bits = _type},
+ * {.bits = _mask},
+ * {.bits = _value},
+ * {.handler = _handler},
+ *
+ * Initialising a specified member of the union means that the compiler
+ * will produce a warning if the argument is of an incorrect type.
+ *
+ * Below is a list of each of the macros used to initialise entries and a
+ * description of the action performed when that entry is matched to an
+ * instruction. A match is found when (instruction & mask) == value.
+ *
+ * DECODE_TABLE(mask, value, table)
+ * Instruction decoding jumps to parsing the new sub-table 'table'.
+ *
+ * DECODE_CUSTOM(mask, value, decoder)
+ * The custom function 'decoder' is called to the complete decoding
+ * of an instruction.
+ *
+ * DECODE_SIMULATE(mask, value, handler)
+ * Set the probes instruction handler to 'handler', this will be used
+ * to simulate the instruction when the probe is hit. Decoding returns
+ * with INSN_GOOD_NO_SLOT.
+ *
+ * DECODE_EMULATE(mask, value, handler)
+ * Set the probes instruction handler to 'handler', this will be used
+ * to emulate the instruction when the probe is hit. The modified
+ * instruction (see below) is placed in the probes instruction slot so it
+ * may be called by the emulation code. Decoding returns with INSN_GOOD.
+ *
+ * DECODE_REJECT(mask, value)
+ * Instruction decoding fails with INSN_REJECTED
+ *
+ * DECODE_OR(mask, value)
+ * This allows the mask/value test of multiple table entries to be
+ * logically ORed. Once an 'or' entry is matched the decoding action to
+ * be performed is that of the next entry which isn't an 'or'. E.g.
+ *
+ * DECODE_OR (mask1, value1)
+ * DECODE_OR (mask2, value2)
+ * DECODE_SIMULATE (mask3, value3, simulation_handler)
+ *
+ * This means that if any of the three mask/value pairs match the
+ * instruction being decoded, then 'simulation_handler' will be used
+ * for it.
+ *
+ * Both the SIMULATE and EMULATE macros have a second form which take an
+ * additional 'regs' argument.
+ *
+ * DECODE_SIMULATEX(mask, value, handler, regs)
+ * DECODE_EMULATEX (mask, value, handler, regs)
+ *
+ * These are used to specify what kind of CPU register is encoded in each of the
+ * least significant 5 nibbles of the instruction being decoded. The regs value
+ * is specified using the REGS macro, this takes any of the REG_TYPE_* values
+ * from enum decode_reg_type as arguments; only the '*' part of the name is
+ * given. E.g.
+ *
+ * REGS(0, ANY, NOPC, 0, ANY)
+ *
+ * This indicates an instruction is encoded like:
+ *
+ * bits 19..16 ignore
+ * bits 15..12 any register allowed here
+ * bits 11.. 8 any register except PC allowed here
+ * bits 7.. 4 ignore
+ * bits 3.. 0 any register allowed here
+ *
+ * This register specification is checked after a decode table entry is found to
+ * match an instruction (through the mask/value test). Any invalid register then
+ * found in the instruction will cause decoding to fail with INSN_REJECTED. In
+ * the above example this would happen if bits 11..8 of the instruction were
+ * 1111, indicating R15 or PC.
+ *
+ * As well as checking for legal combinations of registers, this data is also
+ * used to modify the registers encoded in the instructions so that an
+ * emulation routines can use it. (See decode_regs() and INSN_NEW_BITS.)
+ *
+ * Here is a real example which matches ARM instructions of the form
+ * "AND <Rd>,<Rn>,<Rm>,<shift> <Rs>"
+ *
+ * DECODE_EMULATEX (0x0e000090, 0x00000010, emulate_rd12rn16rm0rs8_rwflags,
+ * REGS(ANY, ANY, NOPC, 0, ANY)),
+ * ^ ^ ^ ^
+ * Rn Rd Rs Rm
+ *
+ * Decoding the instruction "AND R4, R5, R6, ASL R15" will be rejected because
+ * Rs == R15
+ *
+ * Decoding the instruction "AND R4, R5, R6, ASL R7" will be accepted and the
+ * instruction will be modified to "AND R0, R2, R3, ASL R1" and then placed into
+ * the kprobes instruction slot. This can then be called later by the handler
+ * function emulate_rd12rn16rm0rs8_rwflags in order to simulate the instruction.
+ */
+
+enum decode_type {
+ DECODE_TYPE_END,
+ DECODE_TYPE_TABLE,
+ DECODE_TYPE_CUSTOM,
+ DECODE_TYPE_SIMULATE,
+ DECODE_TYPE_EMULATE,
+ DECODE_TYPE_OR,
+ DECODE_TYPE_REJECT,
+ NUM_DECODE_TYPES /* Must be last enum */
+};
+
+#define DECODE_TYPE_BITS 4
+#define DECODE_TYPE_MASK ((1 << DECODE_TYPE_BITS) - 1)
+
+enum decode_reg_type {
+ REG_TYPE_NONE = 0, /* Not a register, ignore */
+ REG_TYPE_ANY, /* Any register allowed */
+ REG_TYPE_SAMEAS16, /* Register should be same as that at bits 19..16 */
+ REG_TYPE_SP, /* Register must be SP */
+ REG_TYPE_PC, /* Register must be PC */
+ REG_TYPE_NOSP, /* Register must not be SP */
+ REG_TYPE_NOSPPC, /* Register must not be SP or PC */
+ REG_TYPE_NOPC, /* Register must not be PC */
+ REG_TYPE_NOPCWB, /* No PC if load/store write-back flag also set */
+
+ /* The following types are used when the encoding for PC indicates
+ * another instruction form. This distiction only matters for test
+ * case coverage checks.
+ */
+ REG_TYPE_NOPCX, /* Register must not be PC */
+ REG_TYPE_NOSPPCX, /* Register must not be SP or PC */
+
+ /* Alias to allow '0' arg to be used in REGS macro. */
+ REG_TYPE_0 = REG_TYPE_NONE
+};
+
+#define REGS(r16, r12, r8, r4, r0) \
+ ((REG_TYPE_##r16) << 16) + \
+ ((REG_TYPE_##r12) << 12) + \
+ ((REG_TYPE_##r8) << 8) + \
+ ((REG_TYPE_##r4) << 4) + \
+ (REG_TYPE_##r0)
+
+union decode_item {
+ u32 bits;
+ const union decode_item *table;
+ kprobe_insn_handler_t *handler;
+ kprobe_decode_insn_t *decoder;
+};
+
+
+#define DECODE_END \
+ {.bits = DECODE_TYPE_END}
+
+
+struct decode_header {
+ union decode_item type_regs;
+ union decode_item mask;
+ union decode_item value;
+};
+
+#define DECODE_HEADER(_type, _mask, _value, _regs) \
+ {.bits = (_type) | ((_regs) << DECODE_TYPE_BITS)}, \
+ {.bits = (_mask)}, \
+ {.bits = (_value)}
+
+
+struct decode_table {
+ struct decode_header header;
+ union decode_item table;
+};
+
+#define DECODE_TABLE(_mask, _value, _table) \
+ DECODE_HEADER(DECODE_TYPE_TABLE, _mask, _value, 0), \
+ {.table = (_table)}
+
+
+struct decode_custom {
+ struct decode_header header;
+ union decode_item decoder;
+};
+
+#define DECODE_CUSTOM(_mask, _value, _decoder) \
+ DECODE_HEADER(DECODE_TYPE_CUSTOM, _mask, _value, 0), \
+ {.decoder = (_decoder)}
+
+
+struct decode_simulate {
+ struct decode_header header;
+ union decode_item handler;
+};
+
+#define DECODE_SIMULATEX(_mask, _value, _handler, _regs) \
+ DECODE_HEADER(DECODE_TYPE_SIMULATE, _mask, _value, _regs), \
+ {.handler = (_handler)}
+
+#define DECODE_SIMULATE(_mask, _value, _handler) \
+ DECODE_SIMULATEX(_mask, _value, _handler, 0)
+
+
+struct decode_emulate {
+ struct decode_header header;
+ union decode_item handler;
+};
+
+#define DECODE_EMULATEX(_mask, _value, _handler, _regs) \
+ DECODE_HEADER(DECODE_TYPE_EMULATE, _mask, _value, _regs), \
+ {.handler = (_handler)}
+
+#define DECODE_EMULATE(_mask, _value, _handler) \
+ DECODE_EMULATEX(_mask, _value, _handler, 0)
+
+
+struct decode_or {
+ struct decode_header header;
+};
+
+#define DECODE_OR(_mask, _value) \
+ DECODE_HEADER(DECODE_TYPE_OR, _mask, _value, 0)
+
+
+struct decode_reject {
+ struct decode_header header;
+};
+
+#define DECODE_REJECT(_mask, _value) \
+ DECODE_HEADER(DECODE_TYPE_REJECT, _mask, _value, 0)
+
+
+int kprobe_decode_insn(kprobe_opcode_t insn, struct arch_specific_insn *asi,
+ const union decode_item *table, bool thumb16);
+
+
+#endif /* _ARM_KERNEL_KPROBES_H */
diff --git a/arch/arm/kernel/perf_event.c b/arch/arm/kernel/perf_event.c
index 8d8507858e5c..53c9c2610cbc 100644
--- a/arch/arm/kernel/perf_event.c
+++ b/arch/arm/kernel/perf_event.c
@@ -662,6 +662,12 @@ init_hw_perf_events(void)
case 0xC090: /* Cortex-A9 */
armpmu = armv7_a9_pmu_init();
break;
+ case 0xC050: /* Cortex-A5 */
+ armpmu = armv7_a5_pmu_init();
+ break;
+ case 0xC0F0: /* Cortex-A15 */
+ armpmu = armv7_a15_pmu_init();
+ break;
}
/* Intel CPUs [xscale]. */
} else if (0x69 == implementor) {
diff --git a/arch/arm/kernel/perf_event_v7.c b/arch/arm/kernel/perf_event_v7.c
index 4960686afb58..963317896c80 100644
--- a/arch/arm/kernel/perf_event_v7.c
+++ b/arch/arm/kernel/perf_event_v7.c
@@ -17,17 +17,23 @@
*/
#ifdef CONFIG_CPU_V7
-/* Common ARMv7 event types */
+/*
+ * Common ARMv7 event types
+ *
+ * Note: An implementation may not be able to count all of these events
+ * but the encodings are considered to be `reserved' in the case that
+ * they are not available.
+ */
enum armv7_perf_types {
ARMV7_PERFCTR_PMNC_SW_INCR = 0x00,
ARMV7_PERFCTR_IFETCH_MISS = 0x01,
ARMV7_PERFCTR_ITLB_MISS = 0x02,
- ARMV7_PERFCTR_DCACHE_REFILL = 0x03,
- ARMV7_PERFCTR_DCACHE_ACCESS = 0x04,
+ ARMV7_PERFCTR_DCACHE_REFILL = 0x03, /* L1 */
+ ARMV7_PERFCTR_DCACHE_ACCESS = 0x04, /* L1 */
ARMV7_PERFCTR_DTLB_REFILL = 0x05,
ARMV7_PERFCTR_DREAD = 0x06,
ARMV7_PERFCTR_DWRITE = 0x07,
-
+ ARMV7_PERFCTR_INSTR_EXECUTED = 0x08,
ARMV7_PERFCTR_EXC_TAKEN = 0x09,
ARMV7_PERFCTR_EXC_EXECUTED = 0x0A,
ARMV7_PERFCTR_CID_WRITE = 0x0B,
@@ -39,21 +45,30 @@ enum armv7_perf_types {
*/
ARMV7_PERFCTR_PC_WRITE = 0x0C,
ARMV7_PERFCTR_PC_IMM_BRANCH = 0x0D,
+ ARMV7_PERFCTR_PC_PROC_RETURN = 0x0E,
ARMV7_PERFCTR_UNALIGNED_ACCESS = 0x0F,
+
+ /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
ARMV7_PERFCTR_PC_BRANCH_MIS_PRED = 0x10,
ARMV7_PERFCTR_CLOCK_CYCLES = 0x11,
-
- ARMV7_PERFCTR_PC_BRANCH_MIS_USED = 0x12,
+ ARMV7_PERFCTR_PC_BRANCH_PRED = 0x12,
+ ARMV7_PERFCTR_MEM_ACCESS = 0x13,
+ ARMV7_PERFCTR_L1_ICACHE_ACCESS = 0x14,
+ ARMV7_PERFCTR_L1_DCACHE_WB = 0x15,
+ ARMV7_PERFCTR_L2_DCACHE_ACCESS = 0x16,
+ ARMV7_PERFCTR_L2_DCACHE_REFILL = 0x17,
+ ARMV7_PERFCTR_L2_DCACHE_WB = 0x18,
+ ARMV7_PERFCTR_BUS_ACCESS = 0x19,
+ ARMV7_PERFCTR_MEMORY_ERROR = 0x1A,
+ ARMV7_PERFCTR_INSTR_SPEC = 0x1B,
+ ARMV7_PERFCTR_TTBR_WRITE = 0x1C,
+ ARMV7_PERFCTR_BUS_CYCLES = 0x1D,
ARMV7_PERFCTR_CPU_CYCLES = 0xFF
};
/* ARMv7 Cortex-A8 specific event types */
enum armv7_a8_perf_types {
- ARMV7_PERFCTR_INSTR_EXECUTED = 0x08,
-
- ARMV7_PERFCTR_PC_PROC_RETURN = 0x0E,
-
ARMV7_PERFCTR_WRITE_BUFFER_FULL = 0x40,
ARMV7_PERFCTR_L2_STORE_MERGED = 0x41,
ARMV7_PERFCTR_L2_STORE_BUFF = 0x42,
@@ -138,6 +153,39 @@ enum armv7_a9_perf_types {
ARMV7_PERFCTR_PLE_RQST_PROG = 0xA5
};
+/* ARMv7 Cortex-A5 specific event types */
+enum armv7_a5_perf_types {
+ ARMV7_PERFCTR_IRQ_TAKEN = 0x86,
+ ARMV7_PERFCTR_FIQ_TAKEN = 0x87,
+
+ ARMV7_PERFCTR_EXT_MEM_RQST = 0xc0,
+ ARMV7_PERFCTR_NC_EXT_MEM_RQST = 0xc1,
+ ARMV7_PERFCTR_PREFETCH_LINEFILL = 0xc2,
+ ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP = 0xc3,
+ ARMV7_PERFCTR_ENTER_READ_ALLOC = 0xc4,
+ ARMV7_PERFCTR_READ_ALLOC = 0xc5,
+
+ ARMV7_PERFCTR_STALL_SB_FULL = 0xc9,
+};
+
+/* ARMv7 Cortex-A15 specific event types */
+enum armv7_a15_perf_types {
+ ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS = 0x40,
+ ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS = 0x41,
+ ARMV7_PERFCTR_L1_DCACHE_READ_REFILL = 0x42,
+ ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL = 0x43,
+
+ ARMV7_PERFCTR_L1_DTLB_READ_REFILL = 0x4C,
+ ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL = 0x4D,
+
+ ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS = 0x50,
+ ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS = 0x51,
+ ARMV7_PERFCTR_L2_DCACHE_READ_REFILL = 0x52,
+ ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL = 0x53,
+
+ ARMV7_PERFCTR_SPEC_PC_WRITE = 0x76,
+};
+
/*
* Cortex-A8 HW events mapping
*
@@ -207,11 +255,6 @@ static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
},
},
[C(DTLB)] = {
- /*
- * Only ITLB misses and DTLB refills are supported.
- * If users want the DTLB refills misses a raw counter
- * must be used.
- */
[C(OP_READ)] = {
[C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
[C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
@@ -323,11 +366,6 @@ static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
},
},
[C(DTLB)] = {
- /*
- * Only ITLB misses and DTLB refills are supported.
- * If users want the DTLB refills misses a raw counter
- * must be used.
- */
[C(OP_READ)] = {
[C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
[C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
@@ -374,6 +412,242 @@ static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
};
/*
+ * Cortex-A5 HW events mapping
+ */
+static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
+ [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
+ [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
+ [PERF_COUNT_HW_CACHE_REFERENCES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_CACHE_MISSES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
+ [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ [PERF_COUNT_HW_BUS_CYCLES] = HW_OP_UNSUPPORTED,
+};
+
+static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
+ [PERF_COUNT_HW_CACHE_OP_MAX]
+ [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+ [C(L1D)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_DCACHE_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_DCACHE_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_DCACHE_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_DCACHE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_PREFETCH_LINEFILL,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
+ },
+ },
+ [C(L1I)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_IFETCH_MISS,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_IFETCH_MISS,
+ },
+ /*
+ * The prefetch counters don't differentiate between the I
+ * side and the D side.
+ */
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_PREFETCH_LINEFILL,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
+ },
+ },
+ [C(LL)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(DTLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(ITLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_MISS,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_MISS,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(BPU)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+};
+
+/*
+ * Cortex-A15 HW events mapping
+ */
+static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
+ [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
+ [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
+ [PERF_COUNT_HW_CACHE_REFERENCES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_CACHE_MISSES] = HW_OP_UNSUPPORTED,
+ [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_SPEC_PC_WRITE,
+ [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
+};
+
+static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
+ [PERF_COUNT_HW_CACHE_OP_MAX]
+ [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
+ [C(L1D)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L1_DCACHE_READ_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(L1I)] = {
+ /*
+ * Not all performance counters differentiate between read
+ * and write accesses/misses so we're not always strictly
+ * correct, but it's the best we can do. Writes and reads get
+ * combined in these cases.
+ */
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_IFETCH_MISS,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_IFETCH_MISS,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(LL)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L2_DCACHE_READ_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)]
+ = ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(DTLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L1_DTLB_READ_REFILL,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(ITLB)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_MISS,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_MISS,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+ [C(BPU)] = {
+ [C(OP_READ)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_WRITE)] = {
+ [C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
+ [C(RESULT_MISS)]
+ = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
+ },
+ [C(OP_PREFETCH)] = {
+ [C(RESULT_ACCESS)] = CACHE_OP_UNSUPPORTED,
+ [C(RESULT_MISS)] = CACHE_OP_UNSUPPORTED,
+ },
+ },
+};
+
+/*
* Perf Events counters
*/
enum armv7_counters {
@@ -905,6 +1179,26 @@ static const struct arm_pmu *__init armv7_a9_pmu_init(void)
armv7pmu.num_events = armv7_read_num_pmnc_events();
return &armv7pmu;
}
+
+static const struct arm_pmu *__init armv7_a5_pmu_init(void)
+{
+ armv7pmu.id = ARM_PERF_PMU_ID_CA5;
+ armv7pmu.name = "ARMv7 Cortex-A5";
+ armv7pmu.cache_map = &armv7_a5_perf_cache_map;
+ armv7pmu.event_map = &armv7_a5_perf_map;
+ armv7pmu.num_events = armv7_read_num_pmnc_events();
+ return &armv7pmu;
+}
+
+static const struct arm_pmu *__init armv7_a15_pmu_init(void)
+{
+ armv7pmu.id = ARM_PERF_PMU_ID_CA15;
+ armv7pmu.name = "ARMv7 Cortex-A15";
+ armv7pmu.cache_map = &armv7_a15_perf_cache_map;
+ armv7pmu.event_map = &armv7_a15_perf_map;
+ armv7pmu.num_events = armv7_read_num_pmnc_events();
+ return &armv7pmu;
+}
#else
static const struct arm_pmu *__init armv7_a8_pmu_init(void)
{
@@ -915,4 +1209,14 @@ static const struct arm_pmu *__init armv7_a9_pmu_init(void)
{
return NULL;
}
+
+static const struct arm_pmu *__init armv7_a5_pmu_init(void)
+{
+ return NULL;
+}
+
+static const struct arm_pmu *__init armv7_a15_pmu_init(void)
+{
+ return NULL;
+}
#endif /* CONFIG_CPU_V7 */
diff --git a/arch/arm/kernel/ptrace.c b/arch/arm/kernel/ptrace.c
index 97260060bf26..897ade059f58 100644
--- a/arch/arm/kernel/ptrace.c
+++ b/arch/arm/kernel/ptrace.c
@@ -228,34 +228,12 @@ static struct undef_hook thumb_break_hook = {
.fn = break_trap,
};
-static int thumb2_break_trap(struct pt_regs *regs, unsigned int instr)
-{
- unsigned int instr2;
- void __user *pc;
-
- /* Check the second half of the instruction. */
- pc = (void __user *)(instruction_pointer(regs) + 2);
-
- if (processor_mode(regs) == SVC_MODE) {
- instr2 = *(u16 *) pc;
- } else {
- get_user(instr2, (u16 __user *)pc);
- }
-
- if (instr2 == 0xa000) {
- ptrace_break(current, regs);
- return 0;
- } else {
- return 1;
- }
-}
-
static struct undef_hook thumb2_break_hook = {
- .instr_mask = 0xffff,
- .instr_val = 0xf7f0,
+ .instr_mask = 0xffffffff,
+ .instr_val = 0xf7f0a000,
.cpsr_mask = PSR_T_BIT,
.cpsr_val = PSR_T_BIT,
- .fn = thumb2_break_trap,
+ .fn = break_trap,
};
static int __init ptrace_break_init(void)
diff --git a/arch/arm/kernel/setup.c b/arch/arm/kernel/setup.c
index 9c3278f37796..70bca649e925 100644
--- a/arch/arm/kernel/setup.c
+++ b/arch/arm/kernel/setup.c
@@ -919,6 +919,12 @@ void __init setup_arch(char **cmdline_p)
tcm_init();
+#ifdef CONFIG_ZONE_DMA
+ if (mdesc->dma_zone_size) {
+ extern unsigned long arm_dma_zone_size;
+ arm_dma_zone_size = mdesc->dma_zone_size;
+ }
+#endif
#ifdef CONFIG_MULTI_IRQ_HANDLER
handle_arch_irq = mdesc->handle_irq;
#endif
@@ -980,6 +986,10 @@ static const char *hwcap_str[] = {
"neon",
"vfpv3",
"vfpv3d16",
+ "tls",
+ "vfpv4",
+ "idiva",
+ "idivt",
NULL
};
diff --git a/arch/arm/kernel/traps.c b/arch/arm/kernel/traps.c
index 6807cb1e76dd..2d3436e9f71f 100644
--- a/arch/arm/kernel/traps.c
+++ b/arch/arm/kernel/traps.c
@@ -355,9 +355,24 @@ asmlinkage void __exception do_undefinstr(struct pt_regs *regs)
pc = (void __user *)instruction_pointer(regs);
if (processor_mode(regs) == SVC_MODE) {
- instr = *(u32 *) pc;
+#ifdef CONFIG_THUMB2_KERNEL
+ if (thumb_mode(regs)) {
+ instr = ((u16 *)pc)[0];
+ if (is_wide_instruction(instr)) {
+ instr <<= 16;
+ instr |= ((u16 *)pc)[1];
+ }
+ } else
+#endif
+ instr = *(u32 *) pc;
} else if (thumb_mode(regs)) {
get_user(instr, (u16 __user *)pc);
+ if (is_wide_instruction(instr)) {
+ unsigned int instr2;
+ get_user(instr2, (u16 __user *)pc+1);
+ instr <<= 16;
+ instr |= instr2;
+ }
} else {
get_user(instr, (u32 __user *)pc);
}