1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (C) 2013 Huawei Ltd.
4 * Author: Jiang Liu <liuj97@gmail.com>
6 * Copyright (C) 2014-2016 Zi Shen Lim <zlim.lnx@gmail.com>
8 #include <linux/bitops.h>
10 #include <linux/printk.h>
11 #include <linux/sizes.h>
12 #include <linux/types.h>
14 #include <asm/debug-monitors.h>
15 #include <asm/errno.h>
17 #include <asm/kprobes.h>
19 #define AARCH64_INSN_SF_BIT BIT(31)
20 #define AARCH64_INSN_N_BIT BIT(22)
21 #define AARCH64_INSN_LSL_12 BIT(22)
23 static const int aarch64_insn_encoding_class[] = {
24 AARCH64_INSN_CLS_UNKNOWN,
25 AARCH64_INSN_CLS_UNKNOWN,
27 AARCH64_INSN_CLS_UNKNOWN,
28 AARCH64_INSN_CLS_LDST,
29 AARCH64_INSN_CLS_DP_REG,
30 AARCH64_INSN_CLS_LDST,
31 AARCH64_INSN_CLS_DP_FPSIMD,
32 AARCH64_INSN_CLS_DP_IMM,
33 AARCH64_INSN_CLS_DP_IMM,
34 AARCH64_INSN_CLS_BR_SYS,
35 AARCH64_INSN_CLS_BR_SYS,
36 AARCH64_INSN_CLS_LDST,
37 AARCH64_INSN_CLS_DP_REG,
38 AARCH64_INSN_CLS_LDST,
39 AARCH64_INSN_CLS_DP_FPSIMD,
42 enum aarch64_insn_encoding_class __kprobes aarch64_get_insn_class(u32 insn)
44 return aarch64_insn_encoding_class[(insn >> 25) & 0xf];
47 bool __kprobes aarch64_insn_is_steppable_hint(u32 insn)
49 if (!aarch64_insn_is_hint(insn))
52 switch (insn & 0xFE0) {
53 case AARCH64_INSN_HINT_XPACLRI:
54 case AARCH64_INSN_HINT_PACIA_1716:
55 case AARCH64_INSN_HINT_PACIB_1716:
56 case AARCH64_INSN_HINT_PACIAZ:
57 case AARCH64_INSN_HINT_PACIASP:
58 case AARCH64_INSN_HINT_PACIBZ:
59 case AARCH64_INSN_HINT_PACIBSP:
60 case AARCH64_INSN_HINT_BTI:
61 case AARCH64_INSN_HINT_BTIC:
62 case AARCH64_INSN_HINT_BTIJ:
63 case AARCH64_INSN_HINT_BTIJC:
64 case AARCH64_INSN_HINT_NOP:
71 bool aarch64_insn_is_branch_imm(u32 insn)
73 return (aarch64_insn_is_b(insn) || aarch64_insn_is_bl(insn) ||
74 aarch64_insn_is_tbz(insn) || aarch64_insn_is_tbnz(insn) ||
75 aarch64_insn_is_cbz(insn) || aarch64_insn_is_cbnz(insn) ||
76 aarch64_insn_is_bcond(insn));
79 bool __kprobes aarch64_insn_uses_literal(u32 insn)
81 /* ldr/ldrsw (literal), prfm */
83 return aarch64_insn_is_ldr_lit(insn) ||
84 aarch64_insn_is_ldrsw_lit(insn) ||
85 aarch64_insn_is_adr_adrp(insn) ||
86 aarch64_insn_is_prfm_lit(insn);
89 bool __kprobes aarch64_insn_is_branch(u32 insn)
91 /* b, bl, cb*, tb*, ret*, b.cond, br*, blr* */
93 return aarch64_insn_is_b(insn) ||
94 aarch64_insn_is_bl(insn) ||
95 aarch64_insn_is_cbz(insn) ||
96 aarch64_insn_is_cbnz(insn) ||
97 aarch64_insn_is_tbz(insn) ||
98 aarch64_insn_is_tbnz(insn) ||
99 aarch64_insn_is_ret(insn) ||
100 aarch64_insn_is_ret_auth(insn) ||
101 aarch64_insn_is_br(insn) ||
102 aarch64_insn_is_br_auth(insn) ||
103 aarch64_insn_is_blr(insn) ||
104 aarch64_insn_is_blr_auth(insn) ||
105 aarch64_insn_is_bcond(insn);
108 static int __kprobes aarch64_get_imm_shift_mask(enum aarch64_insn_imm_type type,
109 u32 *maskp, int *shiftp)
115 case AARCH64_INSN_IMM_26:
119 case AARCH64_INSN_IMM_19:
123 case AARCH64_INSN_IMM_16:
127 case AARCH64_INSN_IMM_14:
131 case AARCH64_INSN_IMM_12:
135 case AARCH64_INSN_IMM_9:
139 case AARCH64_INSN_IMM_7:
143 case AARCH64_INSN_IMM_6:
144 case AARCH64_INSN_IMM_S:
148 case AARCH64_INSN_IMM_R:
152 case AARCH64_INSN_IMM_N:
166 #define ADR_IMM_HILOSPLIT 2
167 #define ADR_IMM_SIZE SZ_2M
168 #define ADR_IMM_LOMASK ((1 << ADR_IMM_HILOSPLIT) - 1)
169 #define ADR_IMM_HIMASK ((ADR_IMM_SIZE >> ADR_IMM_HILOSPLIT) - 1)
170 #define ADR_IMM_LOSHIFT 29
171 #define ADR_IMM_HISHIFT 5
173 u64 aarch64_insn_decode_immediate(enum aarch64_insn_imm_type type, u32 insn)
175 u32 immlo, immhi, mask;
179 case AARCH64_INSN_IMM_ADR:
181 immlo = (insn >> ADR_IMM_LOSHIFT) & ADR_IMM_LOMASK;
182 immhi = (insn >> ADR_IMM_HISHIFT) & ADR_IMM_HIMASK;
183 insn = (immhi << ADR_IMM_HILOSPLIT) | immlo;
184 mask = ADR_IMM_SIZE - 1;
187 if (aarch64_get_imm_shift_mask(type, &mask, &shift) < 0) {
188 pr_err("%s: unknown immediate encoding %d\n", __func__,
194 return (insn >> shift) & mask;
197 u32 __kprobes aarch64_insn_encode_immediate(enum aarch64_insn_imm_type type,
200 u32 immlo, immhi, mask;
203 if (insn == AARCH64_BREAK_FAULT)
204 return AARCH64_BREAK_FAULT;
207 case AARCH64_INSN_IMM_ADR:
209 immlo = (imm & ADR_IMM_LOMASK) << ADR_IMM_LOSHIFT;
210 imm >>= ADR_IMM_HILOSPLIT;
211 immhi = (imm & ADR_IMM_HIMASK) << ADR_IMM_HISHIFT;
213 mask = ((ADR_IMM_LOMASK << ADR_IMM_LOSHIFT) |
214 (ADR_IMM_HIMASK << ADR_IMM_HISHIFT));
217 if (aarch64_get_imm_shift_mask(type, &mask, &shift) < 0) {
218 pr_err("%s: unknown immediate encoding %d\n", __func__,
220 return AARCH64_BREAK_FAULT;
224 /* Update the immediate field. */
225 insn &= ~(mask << shift);
226 insn |= (imm & mask) << shift;
231 u32 aarch64_insn_decode_register(enum aarch64_insn_register_type type,
237 case AARCH64_INSN_REGTYPE_RT:
238 case AARCH64_INSN_REGTYPE_RD:
241 case AARCH64_INSN_REGTYPE_RN:
244 case AARCH64_INSN_REGTYPE_RT2:
245 case AARCH64_INSN_REGTYPE_RA:
248 case AARCH64_INSN_REGTYPE_RM:
252 pr_err("%s: unknown register type encoding %d\n", __func__,
257 return (insn >> shift) & GENMASK(4, 0);
260 static u32 aarch64_insn_encode_register(enum aarch64_insn_register_type type,
262 enum aarch64_insn_register reg)
266 if (insn == AARCH64_BREAK_FAULT)
267 return AARCH64_BREAK_FAULT;
269 if (reg < AARCH64_INSN_REG_0 || reg > AARCH64_INSN_REG_SP) {
270 pr_err("%s: unknown register encoding %d\n", __func__, reg);
271 return AARCH64_BREAK_FAULT;
275 case AARCH64_INSN_REGTYPE_RT:
276 case AARCH64_INSN_REGTYPE_RD:
279 case AARCH64_INSN_REGTYPE_RN:
282 case AARCH64_INSN_REGTYPE_RT2:
283 case AARCH64_INSN_REGTYPE_RA:
286 case AARCH64_INSN_REGTYPE_RM:
287 case AARCH64_INSN_REGTYPE_RS:
291 pr_err("%s: unknown register type encoding %d\n", __func__,
293 return AARCH64_BREAK_FAULT;
296 insn &= ~(GENMASK(4, 0) << shift);
297 insn |= reg << shift;
302 static const u32 aarch64_insn_ldst_size[] = {
303 [AARCH64_INSN_SIZE_8] = 0,
304 [AARCH64_INSN_SIZE_16] = 1,
305 [AARCH64_INSN_SIZE_32] = 2,
306 [AARCH64_INSN_SIZE_64] = 3,
309 static u32 aarch64_insn_encode_ldst_size(enum aarch64_insn_size_type type,
314 if (type < AARCH64_INSN_SIZE_8 || type > AARCH64_INSN_SIZE_64) {
315 pr_err("%s: unknown size encoding %d\n", __func__, type);
316 return AARCH64_BREAK_FAULT;
319 size = aarch64_insn_ldst_size[type];
320 insn &= ~GENMASK(31, 30);
326 static inline long label_imm_common(unsigned long pc, unsigned long addr,
331 if ((pc & 0x3) || (addr & 0x3)) {
332 pr_err("%s: A64 instructions must be word aligned\n", __func__);
336 offset = ((long)addr - (long)pc);
338 if (offset < -range || offset >= range) {
339 pr_err("%s: offset out of range\n", __func__);
346 u32 __kprobes aarch64_insn_gen_branch_imm(unsigned long pc, unsigned long addr,
347 enum aarch64_insn_branch_type type)
353 * B/BL support [-128M, 128M) offset
354 * ARM64 virtual address arrangement guarantees all kernel and module
355 * texts are within +/-128M.
357 offset = label_imm_common(pc, addr, SZ_128M);
358 if (offset >= SZ_128M)
359 return AARCH64_BREAK_FAULT;
362 case AARCH64_INSN_BRANCH_LINK:
363 insn = aarch64_insn_get_bl_value();
365 case AARCH64_INSN_BRANCH_NOLINK:
366 insn = aarch64_insn_get_b_value();
369 pr_err("%s: unknown branch encoding %d\n", __func__, type);
370 return AARCH64_BREAK_FAULT;
373 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_26, insn,
377 u32 aarch64_insn_gen_comp_branch_imm(unsigned long pc, unsigned long addr,
378 enum aarch64_insn_register reg,
379 enum aarch64_insn_variant variant,
380 enum aarch64_insn_branch_type type)
385 offset = label_imm_common(pc, addr, SZ_1M);
387 return AARCH64_BREAK_FAULT;
390 case AARCH64_INSN_BRANCH_COMP_ZERO:
391 insn = aarch64_insn_get_cbz_value();
393 case AARCH64_INSN_BRANCH_COMP_NONZERO:
394 insn = aarch64_insn_get_cbnz_value();
397 pr_err("%s: unknown branch encoding %d\n", __func__, type);
398 return AARCH64_BREAK_FAULT;
402 case AARCH64_INSN_VARIANT_32BIT:
404 case AARCH64_INSN_VARIANT_64BIT:
405 insn |= AARCH64_INSN_SF_BIT;
408 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
409 return AARCH64_BREAK_FAULT;
412 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
414 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
418 u32 aarch64_insn_gen_cond_branch_imm(unsigned long pc, unsigned long addr,
419 enum aarch64_insn_condition cond)
424 offset = label_imm_common(pc, addr, SZ_1M);
426 insn = aarch64_insn_get_bcond_value();
428 if (cond < AARCH64_INSN_COND_EQ || cond > AARCH64_INSN_COND_AL) {
429 pr_err("%s: unknown condition encoding %d\n", __func__, cond);
430 return AARCH64_BREAK_FAULT;
434 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
438 u32 __kprobes aarch64_insn_gen_hint(enum aarch64_insn_hint_cr_op op)
440 return aarch64_insn_get_hint_value() | op;
443 u32 __kprobes aarch64_insn_gen_nop(void)
445 return aarch64_insn_gen_hint(AARCH64_INSN_HINT_NOP);
448 u32 aarch64_insn_gen_branch_reg(enum aarch64_insn_register reg,
449 enum aarch64_insn_branch_type type)
454 case AARCH64_INSN_BRANCH_NOLINK:
455 insn = aarch64_insn_get_br_value();
457 case AARCH64_INSN_BRANCH_LINK:
458 insn = aarch64_insn_get_blr_value();
460 case AARCH64_INSN_BRANCH_RETURN:
461 insn = aarch64_insn_get_ret_value();
464 pr_err("%s: unknown branch encoding %d\n", __func__, type);
465 return AARCH64_BREAK_FAULT;
468 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, reg);
471 u32 aarch64_insn_gen_load_store_reg(enum aarch64_insn_register reg,
472 enum aarch64_insn_register base,
473 enum aarch64_insn_register offset,
474 enum aarch64_insn_size_type size,
475 enum aarch64_insn_ldst_type type)
480 case AARCH64_INSN_LDST_LOAD_REG_OFFSET:
481 insn = aarch64_insn_get_ldr_reg_value();
483 case AARCH64_INSN_LDST_STORE_REG_OFFSET:
484 insn = aarch64_insn_get_str_reg_value();
487 pr_err("%s: unknown load/store encoding %d\n", __func__, type);
488 return AARCH64_BREAK_FAULT;
491 insn = aarch64_insn_encode_ldst_size(size, insn);
493 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
495 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
498 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn,
502 u32 aarch64_insn_gen_load_store_imm(enum aarch64_insn_register reg,
503 enum aarch64_insn_register base,
505 enum aarch64_insn_size_type size,
506 enum aarch64_insn_ldst_type type)
511 if (size < AARCH64_INSN_SIZE_8 || size > AARCH64_INSN_SIZE_64) {
512 pr_err("%s: unknown size encoding %d\n", __func__, type);
513 return AARCH64_BREAK_FAULT;
516 shift = aarch64_insn_ldst_size[size];
517 if (imm & ~(BIT(12 + shift) - BIT(shift))) {
518 pr_err("%s: invalid imm: %d\n", __func__, imm);
519 return AARCH64_BREAK_FAULT;
525 case AARCH64_INSN_LDST_LOAD_IMM_OFFSET:
526 insn = aarch64_insn_get_ldr_imm_value();
528 case AARCH64_INSN_LDST_STORE_IMM_OFFSET:
529 insn = aarch64_insn_get_str_imm_value();
532 pr_err("%s: unknown load/store encoding %d\n", __func__, type);
533 return AARCH64_BREAK_FAULT;
536 insn = aarch64_insn_encode_ldst_size(size, insn);
538 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
540 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
543 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm);
546 u32 aarch64_insn_gen_load_literal(unsigned long pc, unsigned long addr,
547 enum aarch64_insn_register reg,
553 offset = label_imm_common(pc, addr, SZ_1M);
555 return AARCH64_BREAK_FAULT;
557 insn = aarch64_insn_get_ldr_lit_value();
562 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn, reg);
564 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
568 u32 aarch64_insn_gen_load_store_pair(enum aarch64_insn_register reg1,
569 enum aarch64_insn_register reg2,
570 enum aarch64_insn_register base,
572 enum aarch64_insn_variant variant,
573 enum aarch64_insn_ldst_type type)
579 case AARCH64_INSN_LDST_LOAD_PAIR_PRE_INDEX:
580 insn = aarch64_insn_get_ldp_pre_value();
582 case AARCH64_INSN_LDST_STORE_PAIR_PRE_INDEX:
583 insn = aarch64_insn_get_stp_pre_value();
585 case AARCH64_INSN_LDST_LOAD_PAIR_POST_INDEX:
586 insn = aarch64_insn_get_ldp_post_value();
588 case AARCH64_INSN_LDST_STORE_PAIR_POST_INDEX:
589 insn = aarch64_insn_get_stp_post_value();
592 pr_err("%s: unknown load/store encoding %d\n", __func__, type);
593 return AARCH64_BREAK_FAULT;
597 case AARCH64_INSN_VARIANT_32BIT:
598 if ((offset & 0x3) || (offset < -256) || (offset > 252)) {
599 pr_err("%s: offset must be multiples of 4 in the range of [-256, 252] %d\n",
601 return AARCH64_BREAK_FAULT;
605 case AARCH64_INSN_VARIANT_64BIT:
606 if ((offset & 0x7) || (offset < -512) || (offset > 504)) {
607 pr_err("%s: offset must be multiples of 8 in the range of [-512, 504] %d\n",
609 return AARCH64_BREAK_FAULT;
612 insn |= AARCH64_INSN_SF_BIT;
615 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
616 return AARCH64_BREAK_FAULT;
619 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn,
622 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT2, insn,
625 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
628 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_7, insn,
632 u32 aarch64_insn_gen_load_store_ex(enum aarch64_insn_register reg,
633 enum aarch64_insn_register base,
634 enum aarch64_insn_register state,
635 enum aarch64_insn_size_type size,
636 enum aarch64_insn_ldst_type type)
641 case AARCH64_INSN_LDST_LOAD_EX:
642 case AARCH64_INSN_LDST_LOAD_ACQ_EX:
643 insn = aarch64_insn_get_load_ex_value();
644 if (type == AARCH64_INSN_LDST_LOAD_ACQ_EX)
647 case AARCH64_INSN_LDST_STORE_EX:
648 case AARCH64_INSN_LDST_STORE_REL_EX:
649 insn = aarch64_insn_get_store_ex_value();
650 if (type == AARCH64_INSN_LDST_STORE_REL_EX)
654 pr_err("%s: unknown load/store exclusive encoding %d\n", __func__, type);
655 return AARCH64_BREAK_FAULT;
658 insn = aarch64_insn_encode_ldst_size(size, insn);
660 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn,
663 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
666 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT2, insn,
667 AARCH64_INSN_REG_ZR);
669 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RS, insn,
673 #ifdef CONFIG_ARM64_LSE_ATOMICS
674 static u32 aarch64_insn_encode_ldst_order(enum aarch64_insn_mem_order_type type,
680 case AARCH64_INSN_MEM_ORDER_NONE:
683 case AARCH64_INSN_MEM_ORDER_ACQ:
686 case AARCH64_INSN_MEM_ORDER_REL:
689 case AARCH64_INSN_MEM_ORDER_ACQREL:
693 pr_err("%s: unknown mem order %d\n", __func__, type);
694 return AARCH64_BREAK_FAULT;
697 insn &= ~GENMASK(23, 22);
703 u32 aarch64_insn_gen_atomic_ld_op(enum aarch64_insn_register result,
704 enum aarch64_insn_register address,
705 enum aarch64_insn_register value,
706 enum aarch64_insn_size_type size,
707 enum aarch64_insn_mem_atomic_op op,
708 enum aarch64_insn_mem_order_type order)
713 case AARCH64_INSN_MEM_ATOMIC_ADD:
714 insn = aarch64_insn_get_ldadd_value();
716 case AARCH64_INSN_MEM_ATOMIC_CLR:
717 insn = aarch64_insn_get_ldclr_value();
719 case AARCH64_INSN_MEM_ATOMIC_EOR:
720 insn = aarch64_insn_get_ldeor_value();
722 case AARCH64_INSN_MEM_ATOMIC_SET:
723 insn = aarch64_insn_get_ldset_value();
725 case AARCH64_INSN_MEM_ATOMIC_SWP:
726 insn = aarch64_insn_get_swp_value();
729 pr_err("%s: unimplemented mem atomic op %d\n", __func__, op);
730 return AARCH64_BREAK_FAULT;
734 case AARCH64_INSN_SIZE_32:
735 case AARCH64_INSN_SIZE_64:
738 pr_err("%s: unimplemented size encoding %d\n", __func__, size);
739 return AARCH64_BREAK_FAULT;
742 insn = aarch64_insn_encode_ldst_size(size, insn);
744 insn = aarch64_insn_encode_ldst_order(order, insn);
746 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn,
749 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
752 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RS, insn,
756 static u32 aarch64_insn_encode_cas_order(enum aarch64_insn_mem_order_type type,
762 case AARCH64_INSN_MEM_ORDER_NONE:
765 case AARCH64_INSN_MEM_ORDER_ACQ:
768 case AARCH64_INSN_MEM_ORDER_REL:
771 case AARCH64_INSN_MEM_ORDER_ACQREL:
772 order = BIT(15) | BIT(22);
775 pr_err("%s: unknown mem order %d\n", __func__, type);
776 return AARCH64_BREAK_FAULT;
779 insn &= ~(BIT(15) | BIT(22));
785 u32 aarch64_insn_gen_cas(enum aarch64_insn_register result,
786 enum aarch64_insn_register address,
787 enum aarch64_insn_register value,
788 enum aarch64_insn_size_type size,
789 enum aarch64_insn_mem_order_type order)
794 case AARCH64_INSN_SIZE_32:
795 case AARCH64_INSN_SIZE_64:
798 pr_err("%s: unimplemented size encoding %d\n", __func__, size);
799 return AARCH64_BREAK_FAULT;
802 insn = aarch64_insn_get_cas_value();
804 insn = aarch64_insn_encode_ldst_size(size, insn);
806 insn = aarch64_insn_encode_cas_order(order, insn);
808 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RT, insn,
811 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
814 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RS, insn,
819 static u32 aarch64_insn_encode_prfm_imm(enum aarch64_insn_prfm_type type,
820 enum aarch64_insn_prfm_target target,
821 enum aarch64_insn_prfm_policy policy,
824 u32 imm_type = 0, imm_target = 0, imm_policy = 0;
827 case AARCH64_INSN_PRFM_TYPE_PLD:
829 case AARCH64_INSN_PRFM_TYPE_PLI:
832 case AARCH64_INSN_PRFM_TYPE_PST:
836 pr_err("%s: unknown prfm type encoding %d\n", __func__, type);
837 return AARCH64_BREAK_FAULT;
841 case AARCH64_INSN_PRFM_TARGET_L1:
843 case AARCH64_INSN_PRFM_TARGET_L2:
846 case AARCH64_INSN_PRFM_TARGET_L3:
850 pr_err("%s: unknown prfm target encoding %d\n", __func__, target);
851 return AARCH64_BREAK_FAULT;
855 case AARCH64_INSN_PRFM_POLICY_KEEP:
857 case AARCH64_INSN_PRFM_POLICY_STRM:
861 pr_err("%s: unknown prfm policy encoding %d\n", __func__, policy);
862 return AARCH64_BREAK_FAULT;
865 /* In this case, imm5 is encoded into Rt field. */
866 insn &= ~GENMASK(4, 0);
867 insn |= imm_policy | (imm_target << 1) | (imm_type << 3);
872 u32 aarch64_insn_gen_prefetch(enum aarch64_insn_register base,
873 enum aarch64_insn_prfm_type type,
874 enum aarch64_insn_prfm_target target,
875 enum aarch64_insn_prfm_policy policy)
877 u32 insn = aarch64_insn_get_prfm_value();
879 insn = aarch64_insn_encode_ldst_size(AARCH64_INSN_SIZE_64, insn);
881 insn = aarch64_insn_encode_prfm_imm(type, target, policy, insn);
883 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
886 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, 0);
889 u32 aarch64_insn_gen_add_sub_imm(enum aarch64_insn_register dst,
890 enum aarch64_insn_register src,
891 int imm, enum aarch64_insn_variant variant,
892 enum aarch64_insn_adsb_type type)
897 case AARCH64_INSN_ADSB_ADD:
898 insn = aarch64_insn_get_add_imm_value();
900 case AARCH64_INSN_ADSB_SUB:
901 insn = aarch64_insn_get_sub_imm_value();
903 case AARCH64_INSN_ADSB_ADD_SETFLAGS:
904 insn = aarch64_insn_get_adds_imm_value();
906 case AARCH64_INSN_ADSB_SUB_SETFLAGS:
907 insn = aarch64_insn_get_subs_imm_value();
910 pr_err("%s: unknown add/sub encoding %d\n", __func__, type);
911 return AARCH64_BREAK_FAULT;
915 case AARCH64_INSN_VARIANT_32BIT:
917 case AARCH64_INSN_VARIANT_64BIT:
918 insn |= AARCH64_INSN_SF_BIT;
921 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
922 return AARCH64_BREAK_FAULT;
925 /* We can't encode more than a 24bit value (12bit + 12bit shift) */
926 if (imm & ~(BIT(24) - 1))
929 /* If we have something in the top 12 bits... */
930 if (imm & ~(SZ_4K - 1)) {
931 /* ... and in the low 12 bits -> error */
932 if (imm & (SZ_4K - 1))
936 insn |= AARCH64_INSN_LSL_12;
939 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
941 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
943 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm);
946 pr_err("%s: invalid immediate encoding %d\n", __func__, imm);
947 return AARCH64_BREAK_FAULT;
950 u32 aarch64_insn_gen_bitfield(enum aarch64_insn_register dst,
951 enum aarch64_insn_register src,
953 enum aarch64_insn_variant variant,
954 enum aarch64_insn_bitfield_type type)
960 case AARCH64_INSN_BITFIELD_MOVE:
961 insn = aarch64_insn_get_bfm_value();
963 case AARCH64_INSN_BITFIELD_MOVE_UNSIGNED:
964 insn = aarch64_insn_get_ubfm_value();
966 case AARCH64_INSN_BITFIELD_MOVE_SIGNED:
967 insn = aarch64_insn_get_sbfm_value();
970 pr_err("%s: unknown bitfield encoding %d\n", __func__, type);
971 return AARCH64_BREAK_FAULT;
975 case AARCH64_INSN_VARIANT_32BIT:
976 mask = GENMASK(4, 0);
978 case AARCH64_INSN_VARIANT_64BIT:
979 insn |= AARCH64_INSN_SF_BIT | AARCH64_INSN_N_BIT;
980 mask = GENMASK(5, 0);
983 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
984 return AARCH64_BREAK_FAULT;
988 pr_err("%s: invalid immr encoding %d\n", __func__, immr);
989 return AARCH64_BREAK_FAULT;
992 pr_err("%s: invalid imms encoding %d\n", __func__, imms);
993 return AARCH64_BREAK_FAULT;
996 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
998 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
1000 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_R, insn, immr);
1002 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_S, insn, imms);
1005 u32 aarch64_insn_gen_movewide(enum aarch64_insn_register dst,
1007 enum aarch64_insn_variant variant,
1008 enum aarch64_insn_movewide_type type)
1013 case AARCH64_INSN_MOVEWIDE_ZERO:
1014 insn = aarch64_insn_get_movz_value();
1016 case AARCH64_INSN_MOVEWIDE_KEEP:
1017 insn = aarch64_insn_get_movk_value();
1019 case AARCH64_INSN_MOVEWIDE_INVERSE:
1020 insn = aarch64_insn_get_movn_value();
1023 pr_err("%s: unknown movewide encoding %d\n", __func__, type);
1024 return AARCH64_BREAK_FAULT;
1027 if (imm & ~(SZ_64K - 1)) {
1028 pr_err("%s: invalid immediate encoding %d\n", __func__, imm);
1029 return AARCH64_BREAK_FAULT;
1033 case AARCH64_INSN_VARIANT_32BIT:
1034 if (shift != 0 && shift != 16) {
1035 pr_err("%s: invalid shift encoding %d\n", __func__,
1037 return AARCH64_BREAK_FAULT;
1040 case AARCH64_INSN_VARIANT_64BIT:
1041 insn |= AARCH64_INSN_SF_BIT;
1042 if (shift != 0 && shift != 16 && shift != 32 && shift != 48) {
1043 pr_err("%s: invalid shift encoding %d\n", __func__,
1045 return AARCH64_BREAK_FAULT;
1049 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1050 return AARCH64_BREAK_FAULT;
1053 insn |= (shift >> 4) << 21;
1055 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1057 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm);
1060 u32 aarch64_insn_gen_add_sub_shifted_reg(enum aarch64_insn_register dst,
1061 enum aarch64_insn_register src,
1062 enum aarch64_insn_register reg,
1064 enum aarch64_insn_variant variant,
1065 enum aarch64_insn_adsb_type type)
1070 case AARCH64_INSN_ADSB_ADD:
1071 insn = aarch64_insn_get_add_value();
1073 case AARCH64_INSN_ADSB_SUB:
1074 insn = aarch64_insn_get_sub_value();
1076 case AARCH64_INSN_ADSB_ADD_SETFLAGS:
1077 insn = aarch64_insn_get_adds_value();
1079 case AARCH64_INSN_ADSB_SUB_SETFLAGS:
1080 insn = aarch64_insn_get_subs_value();
1083 pr_err("%s: unknown add/sub encoding %d\n", __func__, type);
1084 return AARCH64_BREAK_FAULT;
1088 case AARCH64_INSN_VARIANT_32BIT:
1089 if (shift & ~(SZ_32 - 1)) {
1090 pr_err("%s: invalid shift encoding %d\n", __func__,
1092 return AARCH64_BREAK_FAULT;
1095 case AARCH64_INSN_VARIANT_64BIT:
1096 insn |= AARCH64_INSN_SF_BIT;
1097 if (shift & ~(SZ_64 - 1)) {
1098 pr_err("%s: invalid shift encoding %d\n", __func__,
1100 return AARCH64_BREAK_FAULT;
1104 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1105 return AARCH64_BREAK_FAULT;
1109 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1111 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
1113 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, reg);
1115 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_6, insn, shift);
1118 u32 aarch64_insn_gen_data1(enum aarch64_insn_register dst,
1119 enum aarch64_insn_register src,
1120 enum aarch64_insn_variant variant,
1121 enum aarch64_insn_data1_type type)
1126 case AARCH64_INSN_DATA1_REVERSE_16:
1127 insn = aarch64_insn_get_rev16_value();
1129 case AARCH64_INSN_DATA1_REVERSE_32:
1130 insn = aarch64_insn_get_rev32_value();
1132 case AARCH64_INSN_DATA1_REVERSE_64:
1133 if (variant != AARCH64_INSN_VARIANT_64BIT) {
1134 pr_err("%s: invalid variant for reverse64 %d\n",
1136 return AARCH64_BREAK_FAULT;
1138 insn = aarch64_insn_get_rev64_value();
1141 pr_err("%s: unknown data1 encoding %d\n", __func__, type);
1142 return AARCH64_BREAK_FAULT;
1146 case AARCH64_INSN_VARIANT_32BIT:
1148 case AARCH64_INSN_VARIANT_64BIT:
1149 insn |= AARCH64_INSN_SF_BIT;
1152 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1153 return AARCH64_BREAK_FAULT;
1156 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1158 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
1161 u32 aarch64_insn_gen_data2(enum aarch64_insn_register dst,
1162 enum aarch64_insn_register src,
1163 enum aarch64_insn_register reg,
1164 enum aarch64_insn_variant variant,
1165 enum aarch64_insn_data2_type type)
1170 case AARCH64_INSN_DATA2_UDIV:
1171 insn = aarch64_insn_get_udiv_value();
1173 case AARCH64_INSN_DATA2_SDIV:
1174 insn = aarch64_insn_get_sdiv_value();
1176 case AARCH64_INSN_DATA2_LSLV:
1177 insn = aarch64_insn_get_lslv_value();
1179 case AARCH64_INSN_DATA2_LSRV:
1180 insn = aarch64_insn_get_lsrv_value();
1182 case AARCH64_INSN_DATA2_ASRV:
1183 insn = aarch64_insn_get_asrv_value();
1185 case AARCH64_INSN_DATA2_RORV:
1186 insn = aarch64_insn_get_rorv_value();
1189 pr_err("%s: unknown data2 encoding %d\n", __func__, type);
1190 return AARCH64_BREAK_FAULT;
1194 case AARCH64_INSN_VARIANT_32BIT:
1196 case AARCH64_INSN_VARIANT_64BIT:
1197 insn |= AARCH64_INSN_SF_BIT;
1200 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1201 return AARCH64_BREAK_FAULT;
1204 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1206 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
1208 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, reg);
1211 u32 aarch64_insn_gen_data3(enum aarch64_insn_register dst,
1212 enum aarch64_insn_register src,
1213 enum aarch64_insn_register reg1,
1214 enum aarch64_insn_register reg2,
1215 enum aarch64_insn_variant variant,
1216 enum aarch64_insn_data3_type type)
1221 case AARCH64_INSN_DATA3_MADD:
1222 insn = aarch64_insn_get_madd_value();
1224 case AARCH64_INSN_DATA3_MSUB:
1225 insn = aarch64_insn_get_msub_value();
1228 pr_err("%s: unknown data3 encoding %d\n", __func__, type);
1229 return AARCH64_BREAK_FAULT;
1233 case AARCH64_INSN_VARIANT_32BIT:
1235 case AARCH64_INSN_VARIANT_64BIT:
1236 insn |= AARCH64_INSN_SF_BIT;
1239 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1240 return AARCH64_BREAK_FAULT;
1243 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1245 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RA, insn, src);
1247 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn,
1250 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn,
1254 u32 aarch64_insn_gen_logical_shifted_reg(enum aarch64_insn_register dst,
1255 enum aarch64_insn_register src,
1256 enum aarch64_insn_register reg,
1258 enum aarch64_insn_variant variant,
1259 enum aarch64_insn_logic_type type)
1264 case AARCH64_INSN_LOGIC_AND:
1265 insn = aarch64_insn_get_and_value();
1267 case AARCH64_INSN_LOGIC_BIC:
1268 insn = aarch64_insn_get_bic_value();
1270 case AARCH64_INSN_LOGIC_ORR:
1271 insn = aarch64_insn_get_orr_value();
1273 case AARCH64_INSN_LOGIC_ORN:
1274 insn = aarch64_insn_get_orn_value();
1276 case AARCH64_INSN_LOGIC_EOR:
1277 insn = aarch64_insn_get_eor_value();
1279 case AARCH64_INSN_LOGIC_EON:
1280 insn = aarch64_insn_get_eon_value();
1282 case AARCH64_INSN_LOGIC_AND_SETFLAGS:
1283 insn = aarch64_insn_get_ands_value();
1285 case AARCH64_INSN_LOGIC_BIC_SETFLAGS:
1286 insn = aarch64_insn_get_bics_value();
1289 pr_err("%s: unknown logical encoding %d\n", __func__, type);
1290 return AARCH64_BREAK_FAULT;
1294 case AARCH64_INSN_VARIANT_32BIT:
1295 if (shift & ~(SZ_32 - 1)) {
1296 pr_err("%s: invalid shift encoding %d\n", __func__,
1298 return AARCH64_BREAK_FAULT;
1301 case AARCH64_INSN_VARIANT_64BIT:
1302 insn |= AARCH64_INSN_SF_BIT;
1303 if (shift & ~(SZ_64 - 1)) {
1304 pr_err("%s: invalid shift encoding %d\n", __func__,
1306 return AARCH64_BREAK_FAULT;
1310 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1311 return AARCH64_BREAK_FAULT;
1315 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst);
1317 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, src);
1319 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, reg);
1321 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_6, insn, shift);
1325 * MOV (register) is architecturally an alias of ORR (shifted register) where
1326 * MOV <*d>, <*m> is equivalent to ORR <*d>, <*ZR>, <*m>
1328 u32 aarch64_insn_gen_move_reg(enum aarch64_insn_register dst,
1329 enum aarch64_insn_register src,
1330 enum aarch64_insn_variant variant)
1332 return aarch64_insn_gen_logical_shifted_reg(dst, AARCH64_INSN_REG_ZR,
1334 AARCH64_INSN_LOGIC_ORR);
1337 u32 aarch64_insn_gen_adr(unsigned long pc, unsigned long addr,
1338 enum aarch64_insn_register reg,
1339 enum aarch64_insn_adr_type type)
1345 case AARCH64_INSN_ADR_TYPE_ADR:
1346 insn = aarch64_insn_get_adr_value();
1349 case AARCH64_INSN_ADR_TYPE_ADRP:
1350 insn = aarch64_insn_get_adrp_value();
1351 offset = (addr - ALIGN_DOWN(pc, SZ_4K)) >> 12;
1354 pr_err("%s: unknown adr encoding %d\n", __func__, type);
1355 return AARCH64_BREAK_FAULT;
1358 if (offset < -SZ_1M || offset >= SZ_1M)
1359 return AARCH64_BREAK_FAULT;
1361 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, reg);
1363 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_ADR, insn, offset);
1367 * Decode the imm field of a branch, and return the byte offset as a
1368 * signed value (so it can be used when computing a new branch
1371 s32 aarch64_get_branch_offset(u32 insn)
1375 if (aarch64_insn_is_b(insn) || aarch64_insn_is_bl(insn)) {
1376 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_26, insn);
1377 return (imm << 6) >> 4;
1380 if (aarch64_insn_is_cbz(insn) || aarch64_insn_is_cbnz(insn) ||
1381 aarch64_insn_is_bcond(insn)) {
1382 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_19, insn);
1383 return (imm << 13) >> 11;
1386 if (aarch64_insn_is_tbz(insn) || aarch64_insn_is_tbnz(insn)) {
1387 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_14, insn);
1388 return (imm << 18) >> 16;
1391 /* Unhandled instruction */
1396 * Encode the displacement of a branch in the imm field and return the
1397 * updated instruction.
1399 u32 aarch64_set_branch_offset(u32 insn, s32 offset)
1401 if (aarch64_insn_is_b(insn) || aarch64_insn_is_bl(insn))
1402 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_26, insn,
1405 if (aarch64_insn_is_cbz(insn) || aarch64_insn_is_cbnz(insn) ||
1406 aarch64_insn_is_bcond(insn))
1407 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_19, insn,
1410 if (aarch64_insn_is_tbz(insn) || aarch64_insn_is_tbnz(insn))
1411 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_14, insn,
1414 /* Unhandled instruction */
1418 s32 aarch64_insn_adrp_get_offset(u32 insn)
1420 BUG_ON(!aarch64_insn_is_adrp(insn));
1421 return aarch64_insn_decode_immediate(AARCH64_INSN_IMM_ADR, insn) << 12;
1424 u32 aarch64_insn_adrp_set_offset(u32 insn, s32 offset)
1426 BUG_ON(!aarch64_insn_is_adrp(insn));
1427 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_ADR, insn,
1432 * Extract the Op/CR data from a msr/mrs instruction.
1434 u32 aarch64_insn_extract_system_reg(u32 insn)
1436 return (insn & 0x1FFFE0) >> 5;
1439 bool aarch32_insn_is_wide(u32 insn)
1441 return insn >= 0xe800;
1445 * Macros/defines for extracting register numbers from instruction.
1447 u32 aarch32_insn_extract_reg_num(u32 insn, int offset)
1449 return (insn & (0xf << offset)) >> offset;
1452 #define OPC2_MASK 0x7
1453 #define OPC2_OFFSET 5
1454 u32 aarch32_insn_mcr_extract_opc2(u32 insn)
1456 return (insn & (OPC2_MASK << OPC2_OFFSET)) >> OPC2_OFFSET;
1459 #define CRM_MASK 0xf
1460 u32 aarch32_insn_mcr_extract_crm(u32 insn)
1462 return insn & CRM_MASK;
1465 static bool range_of_ones(u64 val)
1467 /* Doesn't handle full ones or full zeroes */
1468 u64 sval = val >> __ffs64(val);
1470 /* One of Sean Eron Anderson's bithack tricks */
1471 return ((sval + 1) & (sval)) == 0;
1474 static u32 aarch64_encode_immediate(u64 imm,
1475 enum aarch64_insn_variant variant,
1478 unsigned int immr, imms, n, ones, ror, esz, tmp;
1482 case AARCH64_INSN_VARIANT_32BIT:
1485 case AARCH64_INSN_VARIANT_64BIT:
1486 insn |= AARCH64_INSN_SF_BIT;
1490 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1491 return AARCH64_BREAK_FAULT;
1494 mask = GENMASK(esz - 1, 0);
1496 /* Can't encode full zeroes, full ones, or value wider than the mask */
1497 if (!imm || imm == mask || imm & ~mask)
1498 return AARCH64_BREAK_FAULT;
1501 * Inverse of Replicate(). Try to spot a repeating pattern
1502 * with a pow2 stride.
1504 for (tmp = esz / 2; tmp >= 2; tmp /= 2) {
1505 u64 emask = BIT(tmp) - 1;
1507 if ((imm & emask) != ((imm >> tmp) & emask))
1514 /* N is only set if we're encoding a 64bit value */
1517 /* Trim imm to the element size */
1520 /* That's how many ones we need to encode */
1521 ones = hweight64(imm);
1524 * imms is set to (ones - 1), prefixed with a string of ones
1525 * and a zero if they fit. Cap it to 6 bits.
1528 imms |= 0xf << ffs(esz);
1531 /* Compute the rotation */
1532 if (range_of_ones(imm)) {
1534 * Pattern: 0..01..10..0
1536 * Compute how many rotate we need to align it right
1541 * Pattern: 0..01..10..01..1
1543 * Fill the unused top bits with ones, and check if
1544 * the result is a valid immediate (all ones with a
1545 * contiguous ranges of zeroes).
1548 if (!range_of_ones(~imm))
1549 return AARCH64_BREAK_FAULT;
1552 * Compute the rotation to get a continuous set of
1553 * ones, with the first bit set at position 0
1559 * immr is the number of bits we need to rotate back to the
1560 * original set of ones. Note that this is relative to the
1563 immr = (esz - ror) % esz;
1565 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_N, insn, n);
1566 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_R, insn, immr);
1567 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_S, insn, imms);
1570 u32 aarch64_insn_gen_logical_immediate(enum aarch64_insn_logic_type type,
1571 enum aarch64_insn_variant variant,
1572 enum aarch64_insn_register Rn,
1573 enum aarch64_insn_register Rd,
1579 case AARCH64_INSN_LOGIC_AND:
1580 insn = aarch64_insn_get_and_imm_value();
1582 case AARCH64_INSN_LOGIC_ORR:
1583 insn = aarch64_insn_get_orr_imm_value();
1585 case AARCH64_INSN_LOGIC_EOR:
1586 insn = aarch64_insn_get_eor_imm_value();
1588 case AARCH64_INSN_LOGIC_AND_SETFLAGS:
1589 insn = aarch64_insn_get_ands_imm_value();
1592 pr_err("%s: unknown logical encoding %d\n", __func__, type);
1593 return AARCH64_BREAK_FAULT;
1596 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, Rd);
1597 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, Rn);
1598 return aarch64_encode_immediate(imm, variant, insn);
1601 u32 aarch64_insn_gen_extr(enum aarch64_insn_variant variant,
1602 enum aarch64_insn_register Rm,
1603 enum aarch64_insn_register Rn,
1604 enum aarch64_insn_register Rd,
1609 insn = aarch64_insn_get_extr_value();
1612 case AARCH64_INSN_VARIANT_32BIT:
1614 return AARCH64_BREAK_FAULT;
1616 case AARCH64_INSN_VARIANT_64BIT:
1618 return AARCH64_BREAK_FAULT;
1619 insn |= AARCH64_INSN_SF_BIT;
1620 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_N, insn, 1);
1623 pr_err("%s: unknown variant encoding %d\n", __func__, variant);
1624 return AARCH64_BREAK_FAULT;
1627 insn = aarch64_insn_encode_immediate(AARCH64_INSN_IMM_S, insn, lsb);
1628 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, Rd);
1629 insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RN, insn, Rn);
1630 return aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RM, insn, Rm);
1633 u32 aarch64_insn_gen_dmb(enum aarch64_insn_mb_type type)
1639 case AARCH64_INSN_MB_SY:
1642 case AARCH64_INSN_MB_ST:
1645 case AARCH64_INSN_MB_LD:
1648 case AARCH64_INSN_MB_ISH:
1651 case AARCH64_INSN_MB_ISHST:
1654 case AARCH64_INSN_MB_ISHLD:
1657 case AARCH64_INSN_MB_NSH:
1660 case AARCH64_INSN_MB_NSHST:
1663 case AARCH64_INSN_MB_NSHLD:
1667 pr_err("%s: unknown dmb type %d\n", __func__, type);
1668 return AARCH64_BREAK_FAULT;
1671 insn = aarch64_insn_get_dmb_value();
1672 insn &= ~GENMASK(11, 8);