forked from Mirrors/opensbi

Rather than hand-rolling scaled pointer arithmetic with casts and shifts, let the compiler do so by indexing an array of GPRs, taking advantage of the language's type system to scale based on whatever type the register happens to be. This makes it easier to support CHERI where the registers are capabilities, not plain integers, and so this pointer arithmetic would need to change (and currently REGBYTES is both the size of a register and the size of an integer word upstream). Signed-off-by: Jessica Clarke <jrtc27@jrtc27.com> Reviewed-by: Anup Patel <anup@brainfault.org> Link: https://lore.kernel.org/r/20250709232932.37622-1-jrtc27@jrtc27.com Signed-off-by: Anup Patel <anup@brainfault.org>
1398 lines
48 KiB
C
1398 lines
48 KiB
C
/*
|
|
* SPDX-License-Identifier: BSD-2-Clause
|
|
*
|
|
* Copyright (c) 2019 Western Digital Corporation or its affiliates.
|
|
*
|
|
* Authors:
|
|
* Anup Patel <anup.patel@wdc.com>
|
|
*/
|
|
|
|
#ifndef __RISCV_ENCODING_H__
|
|
#define __RISCV_ENCODING_H__
|
|
|
|
#include <sbi/sbi_const.h>
|
|
|
|
/* clang-format off */
|
|
#define MSTATUS_SIE _UL(0x00000002)
|
|
#define MSTATUS_MIE _UL(0x00000008)
|
|
#define MSTATUS_SPIE_SHIFT 5
|
|
#define MSTATUS_SPIE (_UL(1) << MSTATUS_SPIE_SHIFT)
|
|
#define MSTATUS_UBE _UL(0x00000040)
|
|
#define MSTATUS_MPIE _UL(0x00000080)
|
|
#define MSTATUS_SPP_SHIFT 8
|
|
#define MSTATUS_SPP (_UL(1) << MSTATUS_SPP_SHIFT)
|
|
#define MSTATUS_MPP_SHIFT 11
|
|
#define MSTATUS_MPP (_UL(3) << MSTATUS_MPP_SHIFT)
|
|
#define MSTATUS_FS _UL(0x00006000)
|
|
#define MSTATUS_XS _UL(0x00018000)
|
|
#define MSTATUS_VS _UL(0x00000600)
|
|
#define MSTATUS_MPRV _UL(0x00020000)
|
|
#define MSTATUS_SUM _UL(0x00040000)
|
|
#define MSTATUS_MXR _UL(0x00080000)
|
|
#define MSTATUS_TVM _UL(0x00100000)
|
|
#define MSTATUS_TW _UL(0x00200000)
|
|
#define MSTATUS_TSR _UL(0x00400000)
|
|
#define MSTATUS_SPELP _UL(0x00800000)
|
|
#define MSTATUS_SDT _UL(0x01000000)
|
|
#define MSTATUS32_SD _UL(0x80000000)
|
|
#if __riscv_xlen == 64
|
|
#define MSTATUS_UXL _ULL(0x0000000300000000)
|
|
#define MSTATUS_SXL _ULL(0x0000000C00000000)
|
|
#define MSTATUS_SBE _ULL(0x0000001000000000)
|
|
#define MSTATUS_MBE _ULL(0x0000002000000000)
|
|
#define MSTATUS_GVA _ULL(0x0000004000000000)
|
|
#define MSTATUS_GVA_SHIFT 38
|
|
#define MSTATUS_MPV _ULL(0x0000008000000000)
|
|
#define MSTATUS_MPELP _ULL(0x0000020000000000)
|
|
#define MSTATUS_MDT _ULL(0x0000040000000000)
|
|
#else
|
|
#define MSTATUSH_SBE _UL(0x00000010)
|
|
#define MSTATUSH_MBE _UL(0x00000020)
|
|
#define MSTATUSH_GVA _UL(0x00000040)
|
|
#define MSTATUSH_GVA_SHIFT 6
|
|
#define MSTATUSH_MPV _UL(0x00000080)
|
|
#define MSTATUSH_MPELP _UL(0x00000200)
|
|
#define MSTATUSH_MDT _UL(0x00000400)
|
|
#endif
|
|
#define MSTATUS32_SD _UL(0x80000000)
|
|
#define MSTATUS64_SD _ULL(0x8000000000000000)
|
|
|
|
#define SSTATUS_SIE MSTATUS_SIE
|
|
#define SSTATUS_SPIE_SHIFT MSTATUS_SPIE_SHIFT
|
|
#define SSTATUS_SPIE MSTATUS_SPIE
|
|
#define SSTATUS_SPP_SHIFT MSTATUS_SPP_SHIFT
|
|
#define SSTATUS_SPP MSTATUS_SPP
|
|
#define SSTATUS_FS MSTATUS_FS
|
|
#define SSTATUS_XS MSTATUS_XS
|
|
#define SSTATUS_VS MSTATUS_VS
|
|
#define SSTATUS_SUM MSTATUS_SUM
|
|
#define SSTATUS_MXR MSTATUS_MXR
|
|
#define SSTATUS32_SD MSTATUS32_SD
|
|
#define SSTATUS64_UXL MSTATUS_UXL
|
|
#define SSTATUS64_SD MSTATUS64_SD
|
|
|
|
#if __riscv_xlen == 64
|
|
#define HSTATUS_VSXL _UL(0x300000000)
|
|
#define HSTATUS_VSXL_SHIFT 32
|
|
#endif
|
|
#define HSTATUS_VTSR _UL(0x00400000)
|
|
#define HSTATUS_VTW _UL(0x00200000)
|
|
#define HSTATUS_VTVM _UL(0x00100000)
|
|
#define HSTATUS_VGEIN _UL(0x0003f000)
|
|
#define HSTATUS_VGEIN_SHIFT 12
|
|
#define HSTATUS_HU _UL(0x00000200)
|
|
#define HSTATUS_SPVP _UL(0x00000100)
|
|
#define HSTATUS_SPV _UL(0x00000080)
|
|
#define HSTATUS_GVA _UL(0x00000040)
|
|
#define HSTATUS_VSBE _UL(0x00000020)
|
|
|
|
#define MTVEC_MODE _UL(0x00000003)
|
|
|
|
#define MCAUSE_IRQ_MASK (_UL(1) << (__riscv_xlen - 1))
|
|
|
|
#define IRQ_S_SOFT 1
|
|
#define IRQ_VS_SOFT 2
|
|
#define IRQ_M_SOFT 3
|
|
#define IRQ_S_TIMER 5
|
|
#define IRQ_VS_TIMER 6
|
|
#define IRQ_M_TIMER 7
|
|
#define IRQ_S_EXT 9
|
|
#define IRQ_VS_EXT 10
|
|
#define IRQ_M_EXT 11
|
|
#define IRQ_S_GEXT 12
|
|
#define IRQ_PMU_OVF 13
|
|
|
|
#define MIP_SSIP (_UL(1) << IRQ_S_SOFT)
|
|
#define MIP_VSSIP (_UL(1) << IRQ_VS_SOFT)
|
|
#define MIP_MSIP (_UL(1) << IRQ_M_SOFT)
|
|
#define MIP_STIP (_UL(1) << IRQ_S_TIMER)
|
|
#define MIP_VSTIP (_UL(1) << IRQ_VS_TIMER)
|
|
#define MIP_MTIP (_UL(1) << IRQ_M_TIMER)
|
|
#define MIP_SEIP (_UL(1) << IRQ_S_EXT)
|
|
#define MIP_VSEIP (_UL(1) << IRQ_VS_EXT)
|
|
#define MIP_MEIP (_UL(1) << IRQ_M_EXT)
|
|
#define MIP_SGEIP (_UL(1) << IRQ_S_GEXT)
|
|
#define MIP_LCOFIP (_UL(1) << IRQ_PMU_OVF)
|
|
|
|
#define SIP_SSIP MIP_SSIP
|
|
#define SIP_STIP MIP_STIP
|
|
|
|
#define PRV_U _UL(0)
|
|
#define PRV_S _UL(1)
|
|
#define PRV_M _UL(3)
|
|
|
|
#define SATP32_MODE _UL(0x80000000)
|
|
#define SATP32_ASID _UL(0x7FC00000)
|
|
#define SATP32_PPN _UL(0x003FFFFF)
|
|
#define SATP64_MODE _ULL(0xF000000000000000)
|
|
#define SATP64_ASID _ULL(0x0FFFF00000000000)
|
|
#define SATP64_PPN _ULL(0x00000FFFFFFFFFFF)
|
|
|
|
#define SATP_MODE_OFF _UL(0)
|
|
#define SATP_MODE_SV32 _UL(1)
|
|
#define SATP_MODE_SV39 _UL(8)
|
|
#define SATP_MODE_SV48 _UL(9)
|
|
#define SATP_MODE_SV57 _UL(10)
|
|
#define SATP_MODE_SV64 _UL(11)
|
|
|
|
#define HGATP_MODE_OFF _UL(0)
|
|
#define HGATP_MODE_SV32X4 _UL(1)
|
|
#define HGATP_MODE_SV39X4 _UL(8)
|
|
#define HGATP_MODE_SV48X4 _UL(9)
|
|
|
|
#define HGATP32_MODE_SHIFT 31
|
|
#define HGATP32_VMID_SHIFT 22
|
|
#define HGATP32_VMID_MASK _UL(0x1FC00000)
|
|
#define HGATP32_PPN _UL(0x003FFFFF)
|
|
|
|
#define HGATP64_MODE_SHIFT 60
|
|
#define HGATP64_VMID_SHIFT 44
|
|
#define HGATP64_VMID_MASK _ULL(0x03FFF00000000000)
|
|
#define HGATP64_PPN _ULL(0x00000FFFFFFFFFFF)
|
|
|
|
#define PMP_R _UL(0x01)
|
|
#define PMP_W _UL(0x02)
|
|
#define PMP_X _UL(0x04)
|
|
#define PMP_A _UL(0x18)
|
|
#define PMP_A_TOR _UL(0x08)
|
|
#define PMP_A_NA4 _UL(0x10)
|
|
#define PMP_A_NAPOT _UL(0x18)
|
|
#define PMP_L _UL(0x80)
|
|
|
|
#define PMP_SHIFT 2
|
|
#define PMP_COUNT 64
|
|
#if __riscv_xlen == 64
|
|
#define PMP_ADDR_MASK ((_ULL(0x1) << 54) - 1)
|
|
#else
|
|
#define PMP_ADDR_MASK _UL(0xFFFFFFFF)
|
|
#endif
|
|
|
|
#if __riscv_xlen == 64
|
|
#define MSTATUS_SD MSTATUS64_SD
|
|
#define SSTATUS_SD SSTATUS64_SD
|
|
#define SATP_MODE SATP64_MODE
|
|
|
|
#define HGATP_PPN HGATP64_PPN
|
|
#define HGATP_VMID_SHIFT HGATP64_VMID_SHIFT
|
|
#define HGATP_VMID_MASK HGATP64_VMID_MASK
|
|
#define HGATP_MODE_SHIFT HGATP64_MODE_SHIFT
|
|
#else
|
|
#define MSTATUS_SD MSTATUS32_SD
|
|
#define SSTATUS_SD SSTATUS32_SD
|
|
#define SATP_MODE SATP32_MODE
|
|
|
|
#define HGATP_PPN HGATP32_PPN
|
|
#define HGATP_VMID_SHIFT HGATP32_VMID_SHIFT
|
|
#define HGATP_VMID_MASK HGATP32_VMID_MASK
|
|
#define HGATP_MODE_SHIFT HGATP32_MODE_SHIFT
|
|
#endif
|
|
|
|
#define TOPI_IID_SHIFT 16
|
|
#define TOPI_IID_MASK 0xfff
|
|
#define TOPI_IPRIO_MASK 0xff
|
|
|
|
#if __riscv_xlen == 64
|
|
#define MHPMEVENT_OF (_UL(1) << 63)
|
|
#define MHPMEVENT_MINH (_UL(1) << 62)
|
|
#define MHPMEVENT_SINH (_UL(1) << 61)
|
|
#define MHPMEVENT_UINH (_UL(1) << 60)
|
|
#define MHPMEVENT_VSINH (_UL(1) << 59)
|
|
#define MHPMEVENT_VUINH (_UL(1) << 58)
|
|
#else
|
|
#define MHPMEVENTH_OF (_ULL(1) << 31)
|
|
#define MHPMEVENTH_MINH (_ULL(1) << 30)
|
|
#define MHPMEVENTH_SINH (_ULL(1) << 29)
|
|
#define MHPMEVENTH_UINH (_ULL(1) << 28)
|
|
#define MHPMEVENTH_VSINH (_ULL(1) << 27)
|
|
#define MHPMEVENTH_VUINH (_ULL(1) << 26)
|
|
|
|
#define MHPMEVENT_OF (MHPMEVENTH_OF << 32)
|
|
#define MHPMEVENT_MINH (MHPMEVENTH_MINH << 32)
|
|
#define MHPMEVENT_SINH (MHPMEVENTH_SINH << 32)
|
|
#define MHPMEVENT_UINH (MHPMEVENTH_UINH << 32)
|
|
#define MHPMEVENT_VSINH (MHPMEVENTH_VSINH << 32)
|
|
#define MHPMEVENT_VUINH (MHPMEVENTH_VUINH << 32)
|
|
|
|
#endif
|
|
|
|
#define MHPMEVENT_SSCOF_MASK _ULL(0xFF00000000000000)
|
|
|
|
#define ENVCFG_STCE (_ULL(1) << 63)
|
|
#define ENVCFG_PBMTE (_ULL(1) << 62)
|
|
#define ENVCFG_ADUE_SHIFT 61
|
|
#define ENVCFG_ADUE (_ULL(1) << ENVCFG_ADUE_SHIFT)
|
|
#define ENVCFG_CDE (_ULL(1) << 60)
|
|
#define ENVCFG_DTE_SHIFT 59
|
|
#define ENVCFG_DTE (_ULL(1) << ENVCFG_DTE_SHIFT)
|
|
#define ENVCFG_PMM (_ULL(0x3) << 32)
|
|
#define ENVCFG_PMM_PMLEN_0 (_ULL(0x0) << 32)
|
|
#define ENVCFG_PMM_PMLEN_7 (_ULL(0x2) << 32)
|
|
#define ENVCFG_PMM_PMLEN_16 (_ULL(0x3) << 32)
|
|
#define ENVCFG_CBZE (_UL(1) << 7)
|
|
#define ENVCFG_CBCFE (_UL(1) << 6)
|
|
#define ENVCFG_CBIE_SHIFT 4
|
|
#define ENVCFG_CBIE (_UL(0x3) << ENVCFG_CBIE_SHIFT)
|
|
#define ENVCFG_CBIE_ILL _UL(0x0)
|
|
#define ENVCFG_CBIE_FLUSH _UL(0x1)
|
|
#define ENVCFG_CBIE_INV _UL(0x3)
|
|
#define ENVCFG_SSE_SHIFT 3
|
|
#define ENVCFG_SSE (_UL(1) << ENVCFG_SSE_SHIFT)
|
|
#define ENVCFG_LPE_SHIFT 2
|
|
#define ENVCFG_LPE (_UL(1) << ENVCFG_LPE_SHIFT)
|
|
#define ENVCFG_FIOM _UL(0x1)
|
|
|
|
/* ===== User-level CSRs ===== */
|
|
|
|
/* User Trap Setup (N-extension) */
|
|
#define CSR_USTATUS 0x000
|
|
#define CSR_UIE 0x004
|
|
#define CSR_UTVEC 0x005
|
|
#define CSR_SSP 0x011
|
|
|
|
/* User Trap Handling (N-extension) */
|
|
#define CSR_USCRATCH 0x040
|
|
#define CSR_UEPC 0x041
|
|
#define CSR_UCAUSE 0x042
|
|
#define CSR_UTVAL 0x043
|
|
#define CSR_UIP 0x044
|
|
|
|
/* User Floating-point CSRs */
|
|
#define CSR_FFLAGS 0x001
|
|
#define CSR_FRM 0x002
|
|
#define CSR_FCSR 0x003
|
|
|
|
/* User Counters/Timers */
|
|
#define CSR_CYCLE 0xc00
|
|
#define CSR_TIME 0xc01
|
|
#define CSR_INSTRET 0xc02
|
|
#define CSR_HPMCOUNTER3 0xc03
|
|
#define CSR_HPMCOUNTER4 0xc04
|
|
#define CSR_HPMCOUNTER5 0xc05
|
|
#define CSR_HPMCOUNTER6 0xc06
|
|
#define CSR_HPMCOUNTER7 0xc07
|
|
#define CSR_HPMCOUNTER8 0xc08
|
|
#define CSR_HPMCOUNTER9 0xc09
|
|
#define CSR_HPMCOUNTER10 0xc0a
|
|
#define CSR_HPMCOUNTER11 0xc0b
|
|
#define CSR_HPMCOUNTER12 0xc0c
|
|
#define CSR_HPMCOUNTER13 0xc0d
|
|
#define CSR_HPMCOUNTER14 0xc0e
|
|
#define CSR_HPMCOUNTER15 0xc0f
|
|
#define CSR_HPMCOUNTER16 0xc10
|
|
#define CSR_HPMCOUNTER17 0xc11
|
|
#define CSR_HPMCOUNTER18 0xc12
|
|
#define CSR_HPMCOUNTER19 0xc13
|
|
#define CSR_HPMCOUNTER20 0xc14
|
|
#define CSR_HPMCOUNTER21 0xc15
|
|
#define CSR_HPMCOUNTER22 0xc16
|
|
#define CSR_HPMCOUNTER23 0xc17
|
|
#define CSR_HPMCOUNTER24 0xc18
|
|
#define CSR_HPMCOUNTER25 0xc19
|
|
#define CSR_HPMCOUNTER26 0xc1a
|
|
#define CSR_HPMCOUNTER27 0xc1b
|
|
#define CSR_HPMCOUNTER28 0xc1c
|
|
#define CSR_HPMCOUNTER29 0xc1d
|
|
#define CSR_HPMCOUNTER30 0xc1e
|
|
#define CSR_HPMCOUNTER31 0xc1f
|
|
#define CSR_CYCLEH 0xc80
|
|
#define CSR_TIMEH 0xc81
|
|
#define CSR_INSTRETH 0xc82
|
|
#define CSR_HPMCOUNTER3H 0xc83
|
|
#define CSR_HPMCOUNTER4H 0xc84
|
|
#define CSR_HPMCOUNTER5H 0xc85
|
|
#define CSR_HPMCOUNTER6H 0xc86
|
|
#define CSR_HPMCOUNTER7H 0xc87
|
|
#define CSR_HPMCOUNTER8H 0xc88
|
|
#define CSR_HPMCOUNTER9H 0xc89
|
|
#define CSR_HPMCOUNTER10H 0xc8a
|
|
#define CSR_HPMCOUNTER11H 0xc8b
|
|
#define CSR_HPMCOUNTER12H 0xc8c
|
|
#define CSR_HPMCOUNTER13H 0xc8d
|
|
#define CSR_HPMCOUNTER14H 0xc8e
|
|
#define CSR_HPMCOUNTER15H 0xc8f
|
|
#define CSR_HPMCOUNTER16H 0xc90
|
|
#define CSR_HPMCOUNTER17H 0xc91
|
|
#define CSR_HPMCOUNTER18H 0xc92
|
|
#define CSR_HPMCOUNTER19H 0xc93
|
|
#define CSR_HPMCOUNTER20H 0xc94
|
|
#define CSR_HPMCOUNTER21H 0xc95
|
|
#define CSR_HPMCOUNTER22H 0xc96
|
|
#define CSR_HPMCOUNTER23H 0xc97
|
|
#define CSR_HPMCOUNTER24H 0xc98
|
|
#define CSR_HPMCOUNTER25H 0xc99
|
|
#define CSR_HPMCOUNTER26H 0xc9a
|
|
#define CSR_HPMCOUNTER27H 0xc9b
|
|
#define CSR_HPMCOUNTER28H 0xc9c
|
|
#define CSR_HPMCOUNTER29H 0xc9d
|
|
#define CSR_HPMCOUNTER30H 0xc9e
|
|
#define CSR_HPMCOUNTER31H 0xc9f
|
|
|
|
/* ===== Supervisor-level CSRs ===== */
|
|
|
|
/* Supervisor Trap Setup */
|
|
#define CSR_SSTATUS 0x100
|
|
#define CSR_SIE 0x104
|
|
#define CSR_STVEC 0x105
|
|
#define CSR_SCOUNTEREN 0x106
|
|
|
|
/* Supervisor Configuration */
|
|
#define CSR_SENVCFG 0x10a
|
|
|
|
/* Supervisor Conter Inhibit */
|
|
#define CSR_SCOUNTINHIBIT 0x120
|
|
|
|
/* Supervisor Trap Handling */
|
|
#define CSR_SSCRATCH 0x140
|
|
#define CSR_SEPC 0x141
|
|
#define CSR_SCAUSE 0x142
|
|
#define CSR_STVAL 0x143
|
|
#define CSR_SIP 0x144
|
|
|
|
/* Sstc extension */
|
|
#define CSR_STIMECMP 0x14D
|
|
#define CSR_STIMECMPH 0x15D
|
|
|
|
/* Supervisor Protection and Translation */
|
|
#define CSR_SATP 0x180
|
|
|
|
/* Supervisor Indirect Register Alias */
|
|
#define CSR_SISELECT 0x150
|
|
#define CSR_SIREG 0x151
|
|
#define CSR_SIREG2 0x152
|
|
#define CSR_SIREG3 0x153
|
|
#define CSR_SIREG4 0x155
|
|
#define CSR_SIREG5 0x156
|
|
#define CSR_SIREG6 0x157
|
|
|
|
/* Supervisor-Level Interrupts (AIA) */
|
|
#define CSR_STOPEI 0x15c
|
|
#define CSR_STOPI 0xdb0
|
|
|
|
/* Supervisor-Level High-Half CSRs (AIA) */
|
|
#define CSR_SIEH 0x114
|
|
#define CSR_SIPH 0x154
|
|
|
|
/* Supervisor stateen CSRs */
|
|
#define CSR_SSTATEEN0 0x10C
|
|
#define CSR_SSTATEEN1 0x10D
|
|
#define CSR_SSTATEEN2 0x10E
|
|
#define CSR_SSTATEEN3 0x10F
|
|
|
|
/* Machine-Level Control transfer records CSRs */
|
|
#define CSR_MCTRCTL 0x34e
|
|
|
|
/* Supervisor-Level Control transfer records CSRs */
|
|
#define CSR_SCTRCTL 0x14e
|
|
#define CSR_SCTRSTATUS 0x14f
|
|
#define CSR_SCTRDEPTH 0x15f
|
|
|
|
/* VS-Level Control transfer records CSRs */
|
|
#define CSR_VSCTRCTL 0x24e
|
|
|
|
/* ===== Hypervisor-level CSRs ===== */
|
|
|
|
/* Hypervisor Trap Setup (H-extension) */
|
|
#define CSR_HSTATUS 0x600
|
|
#define CSR_HEDELEG 0x602
|
|
#define CSR_HIDELEG 0x603
|
|
#define CSR_HIE 0x604
|
|
#define CSR_HCOUNTEREN 0x606
|
|
#define CSR_HGEIE 0x607
|
|
|
|
/* Hypervisor Configuration */
|
|
#define CSR_HENVCFG 0x60a
|
|
#define CSR_HENVCFGH 0x61a
|
|
|
|
/* Hypervisor Trap Handling (H-extension) */
|
|
#define CSR_HTVAL 0x643
|
|
#define CSR_HIP 0x644
|
|
#define CSR_HVIP 0x645
|
|
#define CSR_HTINST 0x64a
|
|
#define CSR_HGEIP 0xe12
|
|
|
|
/* Hypervisor Protection and Translation (H-extension) */
|
|
#define CSR_HGATP 0x680
|
|
|
|
/* Hypervisor Counter/Timer Virtualization Registers (H-extension) */
|
|
#define CSR_HTIMEDELTA 0x605
|
|
#define CSR_HTIMEDELTAH 0x615
|
|
|
|
/* Virtual Supervisor Registers (H-extension) */
|
|
#define CSR_VSSTATUS 0x200
|
|
#define CSR_VSIE 0x204
|
|
#define CSR_VSTVEC 0x205
|
|
#define CSR_VSSCRATCH 0x240
|
|
#define CSR_VSEPC 0x241
|
|
#define CSR_VSCAUSE 0x242
|
|
#define CSR_VSTVAL 0x243
|
|
#define CSR_VSIP 0x244
|
|
#define CSR_VSATP 0x280
|
|
|
|
/* Virtual Interrupts and Interrupt Priorities (H-extension with AIA) */
|
|
#define CSR_HVIEN 0x608
|
|
#define CSR_HVICTL 0x609
|
|
#define CSR_HVIPRIO1 0x646
|
|
#define CSR_HVIPRIO2 0x647
|
|
|
|
/* Virtual Supervisor Indirect Alias */
|
|
#define CSR_VSISELECT 0x250
|
|
#define CSR_VSIREG 0x251
|
|
#define CSR_VSIREG2 0x252
|
|
#define CSR_VSIREG3 0x253
|
|
#define CSR_VSIREG4 0x255
|
|
#define CSR_VSIREG5 0x256
|
|
#define CSR_VSIREG6 0x257
|
|
|
|
/* VS-Level Interrupts (H-extension with AIA) */
|
|
#define CSR_VSTOPEI 0x25c
|
|
#define CSR_VSTOPI 0xeb0
|
|
|
|
/* Hypervisor and VS-Level High-Half CSRs (H-extension with AIA) */
|
|
#define CSR_HIDELEGH 0x613
|
|
#define CSR_HVIENH 0x618
|
|
#define CSR_HVIPH 0x655
|
|
#define CSR_HVIPRIO1H 0x656
|
|
#define CSR_HVIPRIO2H 0x657
|
|
#define CSR_VSIEH 0x214
|
|
#define CSR_VSIPH 0x254
|
|
|
|
/* Hypervisor stateen CSRs */
|
|
#define CSR_HSTATEEN0 0x60C
|
|
#define CSR_HSTATEEN0H 0x61C
|
|
#define CSR_HSTATEEN1 0x60D
|
|
#define CSR_HSTATEEN1H 0x61D
|
|
#define CSR_HSTATEEN2 0x60E
|
|
#define CSR_HSTATEEN2H 0x61E
|
|
#define CSR_HSTATEEN3 0x60F
|
|
#define CSR_HSTATEEN3H 0x61F
|
|
|
|
/* ===== Machine-level CSRs ===== */
|
|
|
|
/* Machine Information Registers */
|
|
#define CSR_MVENDORID 0xf11
|
|
#define CSR_MARCHID 0xf12
|
|
#define CSR_MIMPID 0xf13
|
|
#define CSR_MHARTID 0xf14
|
|
#define CSR_MCONFIGPTR 0xf15
|
|
|
|
/* Machine Trap Setup */
|
|
#define CSR_MSTATUS 0x300
|
|
#define CSR_MISA 0x301
|
|
#define CSR_MEDELEG 0x302
|
|
#define CSR_MIDELEG 0x303
|
|
#define CSR_MIE 0x304
|
|
#define CSR_MTVEC 0x305
|
|
#define CSR_MCOUNTEREN 0x306
|
|
#define CSR_MSTATUSH 0x310
|
|
|
|
/* Machine Configuration */
|
|
#define CSR_MENVCFG 0x30a
|
|
#define CSR_MENVCFGH 0x31a
|
|
|
|
/* Machine Trap Handling */
|
|
#define CSR_MSCRATCH 0x340
|
|
#define CSR_MEPC 0x341
|
|
#define CSR_MCAUSE 0x342
|
|
#define CSR_MTVAL 0x343
|
|
#define CSR_MIP 0x344
|
|
#define CSR_MTINST 0x34a
|
|
#define CSR_MTVAL2 0x34b
|
|
|
|
/* Machine Memory Protection */
|
|
#define CSR_PMPCFG0 0x3a0
|
|
#define CSR_PMPCFG1 0x3a1
|
|
#define CSR_PMPCFG2 0x3a2
|
|
#define CSR_PMPCFG3 0x3a3
|
|
#define CSR_PMPCFG4 0x3a4
|
|
#define CSR_PMPCFG5 0x3a5
|
|
#define CSR_PMPCFG6 0x3a6
|
|
#define CSR_PMPCFG7 0x3a7
|
|
#define CSR_PMPCFG8 0x3a8
|
|
#define CSR_PMPCFG9 0x3a9
|
|
#define CSR_PMPCFG10 0x3aa
|
|
#define CSR_PMPCFG11 0x3ab
|
|
#define CSR_PMPCFG12 0x3ac
|
|
#define CSR_PMPCFG13 0x3ad
|
|
#define CSR_PMPCFG14 0x3ae
|
|
#define CSR_PMPCFG15 0x3af
|
|
#define CSR_PMPADDR0 0x3b0
|
|
#define CSR_PMPADDR1 0x3b1
|
|
#define CSR_PMPADDR2 0x3b2
|
|
#define CSR_PMPADDR3 0x3b3
|
|
#define CSR_PMPADDR4 0x3b4
|
|
#define CSR_PMPADDR5 0x3b5
|
|
#define CSR_PMPADDR6 0x3b6
|
|
#define CSR_PMPADDR7 0x3b7
|
|
#define CSR_PMPADDR8 0x3b8
|
|
#define CSR_PMPADDR9 0x3b9
|
|
#define CSR_PMPADDR10 0x3ba
|
|
#define CSR_PMPADDR11 0x3bb
|
|
#define CSR_PMPADDR12 0x3bc
|
|
#define CSR_PMPADDR13 0x3bd
|
|
#define CSR_PMPADDR14 0x3be
|
|
#define CSR_PMPADDR15 0x3bf
|
|
#define CSR_PMPADDR16 0x3c0
|
|
#define CSR_PMPADDR17 0x3c1
|
|
#define CSR_PMPADDR18 0x3c2
|
|
#define CSR_PMPADDR19 0x3c3
|
|
#define CSR_PMPADDR20 0x3c4
|
|
#define CSR_PMPADDR21 0x3c5
|
|
#define CSR_PMPADDR22 0x3c6
|
|
#define CSR_PMPADDR23 0x3c7
|
|
#define CSR_PMPADDR24 0x3c8
|
|
#define CSR_PMPADDR25 0x3c9
|
|
#define CSR_PMPADDR26 0x3ca
|
|
#define CSR_PMPADDR27 0x3cb
|
|
#define CSR_PMPADDR28 0x3cc
|
|
#define CSR_PMPADDR29 0x3cd
|
|
#define CSR_PMPADDR30 0x3ce
|
|
#define CSR_PMPADDR31 0x3cf
|
|
#define CSR_PMPADDR32 0x3d0
|
|
#define CSR_PMPADDR33 0x3d1
|
|
#define CSR_PMPADDR34 0x3d2
|
|
#define CSR_PMPADDR35 0x3d3
|
|
#define CSR_PMPADDR36 0x3d4
|
|
#define CSR_PMPADDR37 0x3d5
|
|
#define CSR_PMPADDR38 0x3d6
|
|
#define CSR_PMPADDR39 0x3d7
|
|
#define CSR_PMPADDR40 0x3d8
|
|
#define CSR_PMPADDR41 0x3d9
|
|
#define CSR_PMPADDR42 0x3da
|
|
#define CSR_PMPADDR43 0x3db
|
|
#define CSR_PMPADDR44 0x3dc
|
|
#define CSR_PMPADDR45 0x3dd
|
|
#define CSR_PMPADDR46 0x3de
|
|
#define CSR_PMPADDR47 0x3df
|
|
#define CSR_PMPADDR48 0x3e0
|
|
#define CSR_PMPADDR49 0x3e1
|
|
#define CSR_PMPADDR50 0x3e2
|
|
#define CSR_PMPADDR51 0x3e3
|
|
#define CSR_PMPADDR52 0x3e4
|
|
#define CSR_PMPADDR53 0x3e5
|
|
#define CSR_PMPADDR54 0x3e6
|
|
#define CSR_PMPADDR55 0x3e7
|
|
#define CSR_PMPADDR56 0x3e8
|
|
#define CSR_PMPADDR57 0x3e9
|
|
#define CSR_PMPADDR58 0x3ea
|
|
#define CSR_PMPADDR59 0x3eb
|
|
#define CSR_PMPADDR60 0x3ec
|
|
#define CSR_PMPADDR61 0x3ed
|
|
#define CSR_PMPADDR62 0x3ee
|
|
#define CSR_PMPADDR63 0x3ef
|
|
|
|
/* Machine Counters/Timers */
|
|
#define CSR_MCYCLE 0xb00
|
|
#define CSR_MINSTRET 0xb02
|
|
#define CSR_MHPMCOUNTER3 0xb03
|
|
#define CSR_MHPMCOUNTER4 0xb04
|
|
#define CSR_MHPMCOUNTER5 0xb05
|
|
#define CSR_MHPMCOUNTER6 0xb06
|
|
#define CSR_MHPMCOUNTER7 0xb07
|
|
#define CSR_MHPMCOUNTER8 0xb08
|
|
#define CSR_MHPMCOUNTER9 0xb09
|
|
#define CSR_MHPMCOUNTER10 0xb0a
|
|
#define CSR_MHPMCOUNTER11 0xb0b
|
|
#define CSR_MHPMCOUNTER12 0xb0c
|
|
#define CSR_MHPMCOUNTER13 0xb0d
|
|
#define CSR_MHPMCOUNTER14 0xb0e
|
|
#define CSR_MHPMCOUNTER15 0xb0f
|
|
#define CSR_MHPMCOUNTER16 0xb10
|
|
#define CSR_MHPMCOUNTER17 0xb11
|
|
#define CSR_MHPMCOUNTER18 0xb12
|
|
#define CSR_MHPMCOUNTER19 0xb13
|
|
#define CSR_MHPMCOUNTER20 0xb14
|
|
#define CSR_MHPMCOUNTER21 0xb15
|
|
#define CSR_MHPMCOUNTER22 0xb16
|
|
#define CSR_MHPMCOUNTER23 0xb17
|
|
#define CSR_MHPMCOUNTER24 0xb18
|
|
#define CSR_MHPMCOUNTER25 0xb19
|
|
#define CSR_MHPMCOUNTER26 0xb1a
|
|
#define CSR_MHPMCOUNTER27 0xb1b
|
|
#define CSR_MHPMCOUNTER28 0xb1c
|
|
#define CSR_MHPMCOUNTER29 0xb1d
|
|
#define CSR_MHPMCOUNTER30 0xb1e
|
|
#define CSR_MHPMCOUNTER31 0xb1f
|
|
#define CSR_MCYCLEH 0xb80
|
|
#define CSR_MINSTRETH 0xb82
|
|
#define CSR_MHPMCOUNTER3H 0xb83
|
|
#define CSR_MHPMCOUNTER4H 0xb84
|
|
#define CSR_MHPMCOUNTER5H 0xb85
|
|
#define CSR_MHPMCOUNTER6H 0xb86
|
|
#define CSR_MHPMCOUNTER7H 0xb87
|
|
#define CSR_MHPMCOUNTER8H 0xb88
|
|
#define CSR_MHPMCOUNTER9H 0xb89
|
|
#define CSR_MHPMCOUNTER10H 0xb8a
|
|
#define CSR_MHPMCOUNTER11H 0xb8b
|
|
#define CSR_MHPMCOUNTER12H 0xb8c
|
|
#define CSR_MHPMCOUNTER13H 0xb8d
|
|
#define CSR_MHPMCOUNTER14H 0xb8e
|
|
#define CSR_MHPMCOUNTER15H 0xb8f
|
|
#define CSR_MHPMCOUNTER16H 0xb90
|
|
#define CSR_MHPMCOUNTER17H 0xb91
|
|
#define CSR_MHPMCOUNTER18H 0xb92
|
|
#define CSR_MHPMCOUNTER19H 0xb93
|
|
#define CSR_MHPMCOUNTER20H 0xb94
|
|
#define CSR_MHPMCOUNTER21H 0xb95
|
|
#define CSR_MHPMCOUNTER22H 0xb96
|
|
#define CSR_MHPMCOUNTER23H 0xb97
|
|
#define CSR_MHPMCOUNTER24H 0xb98
|
|
#define CSR_MHPMCOUNTER25H 0xb99
|
|
#define CSR_MHPMCOUNTER26H 0xb9a
|
|
#define CSR_MHPMCOUNTER27H 0xb9b
|
|
#define CSR_MHPMCOUNTER28H 0xb9c
|
|
#define CSR_MHPMCOUNTER29H 0xb9d
|
|
#define CSR_MHPMCOUNTER30H 0xb9e
|
|
#define CSR_MHPMCOUNTER31H 0xb9f
|
|
|
|
/* Machine Counter Setup */
|
|
#define CSR_MCOUNTINHIBIT 0x320
|
|
#define CSR_MCYCLECFG 0x321
|
|
#define CSR_MINSTRETCFG 0x322
|
|
#define CSR_MHPMEVENT3 0x323
|
|
#define CSR_MHPMEVENT4 0x324
|
|
#define CSR_MHPMEVENT5 0x325
|
|
#define CSR_MHPMEVENT6 0x326
|
|
#define CSR_MHPMEVENT7 0x327
|
|
#define CSR_MHPMEVENT8 0x328
|
|
#define CSR_MHPMEVENT9 0x329
|
|
#define CSR_MHPMEVENT10 0x32a
|
|
#define CSR_MHPMEVENT11 0x32b
|
|
#define CSR_MHPMEVENT12 0x32c
|
|
#define CSR_MHPMEVENT13 0x32d
|
|
#define CSR_MHPMEVENT14 0x32e
|
|
#define CSR_MHPMEVENT15 0x32f
|
|
#define CSR_MHPMEVENT16 0x330
|
|
#define CSR_MHPMEVENT17 0x331
|
|
#define CSR_MHPMEVENT18 0x332
|
|
#define CSR_MHPMEVENT19 0x333
|
|
#define CSR_MHPMEVENT20 0x334
|
|
#define CSR_MHPMEVENT21 0x335
|
|
#define CSR_MHPMEVENT22 0x336
|
|
#define CSR_MHPMEVENT23 0x337
|
|
#define CSR_MHPMEVENT24 0x338
|
|
#define CSR_MHPMEVENT25 0x339
|
|
#define CSR_MHPMEVENT26 0x33a
|
|
#define CSR_MHPMEVENT27 0x33b
|
|
#define CSR_MHPMEVENT28 0x33c
|
|
#define CSR_MHPMEVENT29 0x33d
|
|
#define CSR_MHPMEVENT30 0x33e
|
|
#define CSR_MHPMEVENT31 0x33f
|
|
|
|
/* For RV32 */
|
|
#define CSR_MCYCLECFGH 0x721
|
|
#define CSR_MINSTRETCFGH 0x722
|
|
#define CSR_MHPMEVENT3H 0x723
|
|
#define CSR_MHPMEVENT4H 0x724
|
|
#define CSR_MHPMEVENT5H 0x725
|
|
#define CSR_MHPMEVENT6H 0x726
|
|
#define CSR_MHPMEVENT7H 0x727
|
|
#define CSR_MHPMEVENT8H 0x728
|
|
#define CSR_MHPMEVENT9H 0x729
|
|
#define CSR_MHPMEVENT10H 0x72a
|
|
#define CSR_MHPMEVENT11H 0x72b
|
|
#define CSR_MHPMEVENT12H 0x72c
|
|
#define CSR_MHPMEVENT13H 0x72d
|
|
#define CSR_MHPMEVENT14H 0x72e
|
|
#define CSR_MHPMEVENT15H 0x72f
|
|
#define CSR_MHPMEVENT16H 0x730
|
|
#define CSR_MHPMEVENT17H 0x731
|
|
#define CSR_MHPMEVENT18H 0x732
|
|
#define CSR_MHPMEVENT19H 0x733
|
|
#define CSR_MHPMEVENT20H 0x734
|
|
#define CSR_MHPMEVENT21H 0x735
|
|
#define CSR_MHPMEVENT22H 0x736
|
|
#define CSR_MHPMEVENT23H 0x737
|
|
#define CSR_MHPMEVENT24H 0x738
|
|
#define CSR_MHPMEVENT25H 0x739
|
|
#define CSR_MHPMEVENT26H 0x73a
|
|
#define CSR_MHPMEVENT27H 0x73b
|
|
#define CSR_MHPMEVENT28H 0x73c
|
|
#define CSR_MHPMEVENT29H 0x73d
|
|
#define CSR_MHPMEVENT30H 0x73e
|
|
#define CSR_MHPMEVENT31H 0x73f
|
|
|
|
/* Machine Security Configuration CSR (mseccfg) */
|
|
#define CSR_MSECCFG 0x747
|
|
#define CSR_MSECCFGH 0x757
|
|
|
|
#define MSECCFG_MML_SHIFT (0)
|
|
#define MSECCFG_MML (_UL(1) << MSECCFG_MML_SHIFT)
|
|
#define MSECCFG_MMWP_SHIFT (1)
|
|
#define MSECCFG_MMWP (_UL(1) << MSECCFG_MMWP_SHIFT)
|
|
#define MSECCFG_RLB_SHIFT (2)
|
|
#define MSECCFG_RLB (_UL(1) << MSECCFG_RLB_SHIFT)
|
|
#define MSECCFG_USEED_SHIFT (8)
|
|
#define MSECCFG_USEED (_UL(1) << MSECCFG_USEED_SHIFT)
|
|
#define MSECCFG_SSEED_SHIFT (9)
|
|
#define MSECCFG_SSEED (_UL(1) << MSECCFG_SSEED_SHIFT)
|
|
|
|
/* Counter Overflow CSR */
|
|
#define CSR_SCOUNTOVF 0xda0
|
|
|
|
/* Debug/Trace Registers */
|
|
#define CSR_TSELECT 0x7a0
|
|
#define CSR_TDATA1 0x7a1
|
|
#define CSR_TDATA2 0x7a2
|
|
#define CSR_TDATA3 0x7a3
|
|
#define CSR_TINFO 0x7a4
|
|
|
|
/* Debug Mode Registers */
|
|
#define CSR_DCSR 0x7b0
|
|
#define CSR_DPC 0x7b1
|
|
#define CSR_DSCRATCH0 0x7b2
|
|
#define CSR_DSCRATCH1 0x7b3
|
|
|
|
/* Machine Indirect Register Alias */
|
|
#define CSR_MISELECT 0x350
|
|
#define CSR_MIREG 0x351
|
|
#define CSR_MIREG2 0x352
|
|
#define CSR_MIREG3 0x353
|
|
#define CSR_MIREG4 0x355
|
|
#define CSR_MIREG5 0x356
|
|
#define CSR_MIREG6 0x357
|
|
|
|
/* Machine-Level Interrupts (AIA) */
|
|
#define CSR_MTOPEI 0x35c
|
|
#define CSR_MTOPI 0xfb0
|
|
|
|
/* Virtual Interrupts for Supervisor Level (AIA) */
|
|
#define CSR_MVIEN 0x308
|
|
#define CSR_MVIP 0x309
|
|
|
|
/* Smstateen extension registers */
|
|
/* Machine stateen CSRs */
|
|
#define CSR_MSTATEEN0 0x30C
|
|
#define CSR_MSTATEEN0H 0x31C
|
|
#define CSR_MSTATEEN1 0x30D
|
|
#define CSR_MSTATEEN1H 0x31D
|
|
#define CSR_MSTATEEN2 0x30E
|
|
#define CSR_MSTATEEN2H 0x31E
|
|
#define CSR_MSTATEEN3 0x30F
|
|
#define CSR_MSTATEEN3H 0x31F
|
|
|
|
/* Machine-Level High-Half CSRs (AIA) */
|
|
#define CSR_MIDELEGH 0x313
|
|
#define CSR_MIEH 0x314
|
|
#define CSR_MVIENH 0x318
|
|
#define CSR_MVIPH 0x319
|
|
#define CSR_MIPH 0x354
|
|
|
|
/* Vector extension registers */
|
|
#define CSR_VSTART 0x8
|
|
#define CSR_VL 0xc20
|
|
#define CSR_VTYPE 0xc21
|
|
#define CSR_VLENB 0xc22
|
|
|
|
/* ===== Trap/Exception Causes ===== */
|
|
|
|
#define CAUSE_MISALIGNED_FETCH 0x0
|
|
#define CAUSE_FETCH_ACCESS 0x1
|
|
#define CAUSE_ILLEGAL_INSTRUCTION 0x2
|
|
#define CAUSE_BREAKPOINT 0x3
|
|
#define CAUSE_MISALIGNED_LOAD 0x4
|
|
#define CAUSE_LOAD_ACCESS 0x5
|
|
#define CAUSE_MISALIGNED_STORE 0x6
|
|
#define CAUSE_STORE_ACCESS 0x7
|
|
#define CAUSE_USER_ECALL 0x8
|
|
#define CAUSE_SUPERVISOR_ECALL 0x9
|
|
#define CAUSE_VIRTUAL_SUPERVISOR_ECALL 0xa
|
|
#define CAUSE_MACHINE_ECALL 0xb
|
|
#define CAUSE_FETCH_PAGE_FAULT 0xc
|
|
#define CAUSE_LOAD_PAGE_FAULT 0xd
|
|
#define CAUSE_STORE_PAGE_FAULT 0xf
|
|
#define CAUSE_DOUBLE_TRAP 0x10
|
|
#define CAUSE_SW_CHECK_EXCP 0x12
|
|
#define CAUSE_FETCH_GUEST_PAGE_FAULT 0x14
|
|
#define CAUSE_LOAD_GUEST_PAGE_FAULT 0x15
|
|
#define CAUSE_VIRTUAL_INST_FAULT 0x16
|
|
#define CAUSE_STORE_GUEST_PAGE_FAULT 0x17
|
|
|
|
/* Common defines for all smstateen */
|
|
#define SMSTATEEN_MAX_COUNT 4
|
|
#define SMSTATEEN0_CS_SHIFT 0
|
|
#define SMSTATEEN0_CS (_ULL(1) << SMSTATEEN0_CS_SHIFT)
|
|
#define SMSTATEEN0_FCSR_SHIFT 1
|
|
#define SMSTATEEN0_FCSR (_ULL(1) << SMSTATEEN0_FCSR_SHIFT)
|
|
#define SMSTATEEN0_CTR_SHIFT 54
|
|
#define SMSTATEEN0_CTR (_ULL(1) << SMSTATEEN0_CTR_SHIFT)
|
|
#define SMSTATEEN0_CONTEXT_SHIFT 57
|
|
#define SMSTATEEN0_CONTEXT (_ULL(1) << SMSTATEEN0_CONTEXT_SHIFT)
|
|
#define SMSTATEEN0_IMSIC_SHIFT 58
|
|
#define SMSTATEEN0_IMSIC (_ULL(1) << SMSTATEEN0_IMSIC_SHIFT)
|
|
#define SMSTATEEN0_AIA_SHIFT 59
|
|
#define SMSTATEEN0_AIA (_ULL(1) << SMSTATEEN0_AIA_SHIFT)
|
|
#define SMSTATEEN0_SVSLCT_SHIFT 60
|
|
#define SMSTATEEN0_SVSLCT (_ULL(1) << SMSTATEEN0_SVSLCT_SHIFT)
|
|
#define SMSTATEEN0_HSENVCFG_SHIFT 62
|
|
#define SMSTATEEN0_HSENVCFG (_ULL(1) << SMSTATEEN0_HSENVCFG_SHIFT)
|
|
#define SMSTATEEN_STATEN_SHIFT 63
|
|
#define SMSTATEEN_STATEN (_ULL(1) << SMSTATEEN_STATEN_SHIFT)
|
|
|
|
/* ===== Instruction Encodings ===== */
|
|
|
|
#define INSN_MATCH_LB 0x3
|
|
#define INSN_MASK_LB 0x707f
|
|
#define INSN_MATCH_LH 0x1003
|
|
#define INSN_MASK_LH 0x707f
|
|
#define INSN_MATCH_LW 0x2003
|
|
#define INSN_MASK_LW 0x707f
|
|
#define INSN_MATCH_LD 0x3003
|
|
#define INSN_MASK_LD 0x707f
|
|
#define INSN_MATCH_LBU 0x4003
|
|
#define INSN_MASK_LBU 0x707f
|
|
#define INSN_MATCH_LHU 0x5003
|
|
#define INSN_MASK_LHU 0x707f
|
|
#define INSN_MATCH_LWU 0x6003
|
|
#define INSN_MASK_LWU 0x707f
|
|
#define INSN_MATCH_SB 0x23
|
|
#define INSN_MASK_SB 0x707f
|
|
#define INSN_MATCH_SH 0x1023
|
|
#define INSN_MASK_SH 0x707f
|
|
#define INSN_MATCH_SW 0x2023
|
|
#define INSN_MASK_SW 0x707f
|
|
#define INSN_MATCH_SD 0x3023
|
|
#define INSN_MASK_SD 0x707f
|
|
|
|
#define INSN_MATCH_FLW 0x2007
|
|
#define INSN_MASK_FLW 0x707f
|
|
#define INSN_MATCH_FLD 0x3007
|
|
#define INSN_MASK_FLD 0x707f
|
|
#define INSN_MATCH_FLQ 0x4007
|
|
#define INSN_MASK_FLQ 0x707f
|
|
#define INSN_MATCH_FSW 0x2027
|
|
#define INSN_MASK_FSW 0x707f
|
|
#define INSN_MATCH_FSD 0x3027
|
|
#define INSN_MASK_FSD 0x707f
|
|
#define INSN_MATCH_FSQ 0x4027
|
|
#define INSN_MASK_FSQ 0x707f
|
|
|
|
#define INSN_MATCH_C_LD 0x6000
|
|
#define INSN_MASK_C_LD 0xe003
|
|
#define INSN_MATCH_C_SD 0xe000
|
|
#define INSN_MASK_C_SD 0xe003
|
|
#define INSN_MATCH_C_LW 0x4000
|
|
#define INSN_MASK_C_LW 0xe003
|
|
#define INSN_MATCH_C_SW 0xc000
|
|
#define INSN_MASK_C_SW 0xe003
|
|
#define INSN_MATCH_C_LDSP 0x6002
|
|
#define INSN_MASK_C_LDSP 0xe003
|
|
#define INSN_MATCH_C_SDSP 0xe002
|
|
#define INSN_MASK_C_SDSP 0xe003
|
|
#define INSN_MATCH_C_LWSP 0x4002
|
|
#define INSN_MASK_C_LWSP 0xe003
|
|
#define INSN_MATCH_C_SWSP 0xc002
|
|
#define INSN_MASK_C_SWSP 0xe003
|
|
|
|
#define INSN_MATCH_C_FLD 0x2000
|
|
#define INSN_MASK_C_FLD 0xe003
|
|
#define INSN_MATCH_C_FLW 0x6000
|
|
#define INSN_MASK_C_FLW 0xe003
|
|
#define INSN_MATCH_C_FSD 0xa000
|
|
#define INSN_MASK_C_FSD 0xe003
|
|
#define INSN_MATCH_C_FSW 0xe000
|
|
#define INSN_MASK_C_FSW 0xe003
|
|
#define INSN_MATCH_C_FLDSP 0x2002
|
|
#define INSN_MASK_C_FLDSP 0xe003
|
|
#define INSN_MATCH_C_FSDSP 0xa002
|
|
#define INSN_MASK_C_FSDSP 0xe003
|
|
#define INSN_MATCH_C_FLWSP 0x6002
|
|
#define INSN_MASK_C_FLWSP 0xe003
|
|
#define INSN_MATCH_C_FSWSP 0xe002
|
|
#define INSN_MASK_C_FSWSP 0xe003
|
|
|
|
#define INSN_MATCH_C_LHU 0x8400
|
|
#define INSN_MASK_C_LHU 0xfc43
|
|
#define INSN_MATCH_C_LH 0x8440
|
|
#define INSN_MASK_C_LH 0xfc43
|
|
#define INSN_MATCH_C_SH 0x8c00
|
|
#define INSN_MASK_C_SH 0xfc43
|
|
|
|
#define INSN_MASK_WFI 0xffffff00
|
|
#define INSN_MATCH_WFI 0x10500000
|
|
|
|
#define INSN_MASK_FENCE_TSO 0xffffffff
|
|
#define INSN_MATCH_FENCE_TSO 0x8330000f
|
|
|
|
#define INSN_MASK_VECTOR_UNIT_STRIDE 0xfdf0707f
|
|
#define INSN_MASK_VECTOR_FAULT_ONLY_FIRST 0xfdf0707f
|
|
#define INSN_MASK_VECTOR_STRIDE 0xfc00707f
|
|
#define INSN_MASK_VECTOR_WHOLE_REG 0xfff0707f
|
|
#define INSN_MASK_VECTOR_INDEXED 0xfc00707f
|
|
|
|
#define INSN_MATCH_VLUXSEG(n, bits) ((((n) - 1) << 29) | 0x04000007 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VSUXSEG(n, bits) ((((n) - 1) << 29) | 0x04000027 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VLOXSEG(n, bits) ((((n) - 1) << 29) | 0x0c000007 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VSOXSEG(n, bits) ((((n) - 1) << 29) | 0x0c000027 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VLSSEG(n, bits) ((((n) - 1) << 29) | 0x08000007 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VSSSEG(n, bits) ((((n) - 1) << 29) | 0x08000027 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VSSEG(n, bits) ((((n) - 1) << 29) | 0x00004027 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VLSEG(n, bits) ((((n) - 1) << 29) | 0x00004007 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
#define INSN_MATCH_VLSEGFF(n, bits) ((((n) - 1) << 29) | 0x1000007 | \
|
|
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
|
|
|
|
#define INSN_MATCH_VLE16V 0x00005007
|
|
#define INSN_MATCH_VLE32V 0x00006007
|
|
#define INSN_MATCH_VLE64V 0x00007007
|
|
#define INSN_MATCH_VSE16V 0x00005027
|
|
#define INSN_MATCH_VSE32V 0x00006027
|
|
#define INSN_MATCH_VSE64V 0x00007027
|
|
#define INSN_MATCH_VLSE16V 0x08005007
|
|
#define INSN_MATCH_VLSE32V 0x08006007
|
|
#define INSN_MATCH_VLSE64V 0x08007007
|
|
#define INSN_MATCH_VSSE16V 0x08005027
|
|
#define INSN_MATCH_VSSE32V 0x08006027
|
|
#define INSN_MATCH_VSSE64V 0x08007027
|
|
#define INSN_MATCH_VLOXEI16V 0x0c005007
|
|
#define INSN_MATCH_VLOXEI32V 0x0c006007
|
|
#define INSN_MATCH_VLOXEI64V 0x0c007007
|
|
#define INSN_MATCH_VSOXEI16V 0x0c005027
|
|
#define INSN_MATCH_VSOXEI32V 0x0c006027
|
|
#define INSN_MATCH_VSOXEI64V 0x0c007027
|
|
#define INSN_MATCH_VLUXEI16V 0x04005007
|
|
#define INSN_MATCH_VLUXEI32V 0x04006007
|
|
#define INSN_MATCH_VLUXEI64V 0x04007007
|
|
#define INSN_MATCH_VSUXEI16V 0x04005027
|
|
#define INSN_MATCH_VSUXEI32V 0x04006027
|
|
#define INSN_MATCH_VSUXEI64V 0x04007027
|
|
#define INSN_MATCH_VLE16FFV 0x01005007
|
|
#define INSN_MATCH_VLE32FFV 0x01006007
|
|
#define INSN_MATCH_VLE64FFV 0x01007007
|
|
#define INSN_MATCH_VL1RE8V 0x02800007
|
|
#define INSN_MATCH_VL1RE16V 0x02805007
|
|
#define INSN_MATCH_VL1RE32V 0x02806007
|
|
#define INSN_MATCH_VL1RE64V 0x02807007
|
|
#define INSN_MATCH_VL2RE8V 0x22800007
|
|
#define INSN_MATCH_VL2RE16V 0x22805007
|
|
#define INSN_MATCH_VL2RE32V 0x22806007
|
|
#define INSN_MATCH_VL2RE64V 0x22807007
|
|
#define INSN_MATCH_VL4RE8V 0x62800007
|
|
#define INSN_MATCH_VL4RE16V 0x62805007
|
|
#define INSN_MATCH_VL4RE32V 0x62806007
|
|
#define INSN_MATCH_VL4RE64V 0x62807007
|
|
#define INSN_MATCH_VL8RE8V 0xe2800007
|
|
#define INSN_MATCH_VL8RE16V 0xe2805007
|
|
#define INSN_MATCH_VL8RE32V 0xe2806007
|
|
#define INSN_MATCH_VL8RE64V 0xe2807007
|
|
#define INSN_MATCH_VS1RV 0x02800027
|
|
#define INSN_MATCH_VS2RV 0x22800027
|
|
#define INSN_MATCH_VS4RV 0x62800027
|
|
#define INSN_MATCH_VS8RV 0xe2800027
|
|
|
|
#define INSN_OPCODE_MASK 0x7f
|
|
#define INSN_OPCODE_VECTOR_LOAD 0x07
|
|
#define INSN_OPCODE_VECTOR_STORE 0x27
|
|
#define INSN_OPCODE_AMO 0x2f
|
|
|
|
#define IS_VECTOR_LOAD_STORE(insn) \
|
|
((((insn) & INSN_OPCODE_MASK) == INSN_OPCODE_VECTOR_LOAD) || \
|
|
(((insn) & INSN_OPCODE_MASK) == INSN_OPCODE_VECTOR_STORE))
|
|
|
|
#define IS_VECTOR_INSN_MATCH(insn, match, mask) \
|
|
(((insn) & (mask)) == ((match) & (mask)))
|
|
|
|
#define IS_UNIT_STRIDE_MATCH(insn, match) \
|
|
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_UNIT_STRIDE)
|
|
|
|
#define IS_STRIDE_MATCH(insn, match) \
|
|
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_STRIDE)
|
|
|
|
#define IS_INDEXED_MATCH(insn, match) \
|
|
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_INDEXED)
|
|
|
|
#define IS_FAULT_ONLY_FIRST_MATCH(insn, match) \
|
|
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_FAULT_ONLY_FIRST)
|
|
|
|
#define IS_WHOLE_REG_MATCH(insn, match) \
|
|
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_WHOLE_REG)
|
|
|
|
#define IS_UNIT_STRIDE_LOAD(insn) ( \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE16V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE32V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE64V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 64)))
|
|
|
|
#define IS_UNIT_STRIDE_STORE(insn) ( \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE16V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE32V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE64V) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 16)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 32)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 64)) || \
|
|
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 64)))
|
|
|
|
#define IS_STRIDE_LOAD(insn) ( \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE16V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE32V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE64V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 64)))
|
|
|
|
#define IS_STRIDE_STORE(insn) ( \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE16V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE32V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE64V) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 16)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 32)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 64)) || \
|
|
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 64)))
|
|
|
|
#define IS_INDEXED_LOAD(insn) ( \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI16V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI32V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI64V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI16V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI32V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI64V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 64)))
|
|
|
|
#define IS_INDEXED_STORE(insn) ( \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI16V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI32V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI64V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI16V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI32V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI64V) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 16)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 32)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 64)) || \
|
|
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 64)))
|
|
|
|
#define IS_FAULT_ONLY_FIRST_LOAD(insn) ( \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE16FFV) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE32FFV) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE64FFV) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 16)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 32)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 64)) || \
|
|
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 64)))
|
|
|
|
#define IS_WHOLE_REG_LOAD(insn) ( \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE8V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE16V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE32V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE64V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE8V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE16V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE32V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE64V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE8V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE16V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE32V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE64V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE8V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE16V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE32V) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE64V))
|
|
|
|
#define IS_WHOLE_REG_STORE(insn) ( \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS1RV) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS2RV) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS4RV) || \
|
|
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS8RV))
|
|
|
|
|
|
#if __riscv_xlen == 64
|
|
|
|
/* 64-bit read for VS-stage address translation (RV64) */
|
|
#define INSN_PSEUDO_VS_LOAD 0x00003000
|
|
/* 64-bit write for VS-stage address translation (RV64) */
|
|
#define INSN_PSEUDO_VS_STORE 0x00003020
|
|
|
|
#elif __riscv_xlen == 32
|
|
|
|
/* 32-bit read for VS-stage address translation (RV32) */
|
|
#define INSN_PSEUDO_VS_LOAD 0x00002000
|
|
|
|
/* 32-bit write for VS-stage address translation (RV32) */
|
|
#define INSN_PSEUDO_VS_STORE 0x00002020
|
|
|
|
#else
|
|
#error "Unexpected __riscv_xlen"
|
|
#endif
|
|
|
|
#define MASK_FUNCT3 0x7000
|
|
#define SHIFT_FUNCT3 12
|
|
|
|
#define MASK_RS1 0xf8000
|
|
#define MASK_RS2 0x1f00000
|
|
#define MASK_RD 0xf80
|
|
|
|
#define MASK_CSR 0xfff00000
|
|
#define SHIFT_CSR 20
|
|
|
|
#define MASK_AQRL 0x06000000
|
|
#define SHIFT_AQRL 25
|
|
|
|
#define VM_MASK 0x1
|
|
#define VIEW_MASK 0x3
|
|
#define VSEW_MASK 0x3
|
|
#define VLMUL_MASK 0x7
|
|
#define VD_MASK 0x1f
|
|
#define VS2_MASK 0x1f
|
|
#define INSN_16BIT_MASK 0x3
|
|
#define INSN_32BIT_MASK 0x1c
|
|
|
|
#define INSN_IS_16BIT(insn) \
|
|
(((insn) & INSN_16BIT_MASK) != INSN_16BIT_MASK)
|
|
#define INSN_IS_32BIT(insn) \
|
|
(((insn) & INSN_16BIT_MASK) == INSN_16BIT_MASK && \
|
|
((insn) & INSN_32BIT_MASK) != INSN_32BIT_MASK)
|
|
|
|
#define INSN_LEN(insn) (INSN_IS_16BIT(insn) ? 2 : 4)
|
|
|
|
#if __riscv_xlen == 64
|
|
#define LOG_REGBYTES 3
|
|
#else
|
|
#define LOG_REGBYTES 2
|
|
#endif
|
|
#define REGBYTES (1 << LOG_REGBYTES)
|
|
|
|
#define SH_VSEW 3
|
|
#define SH_VIEW 12
|
|
#define SH_VD 7
|
|
#define SH_VS2 20
|
|
#define SH_VM 25
|
|
#define SH_MEW 28
|
|
#define SH_RD 7
|
|
#define SH_RS1 15
|
|
#define SH_RS2 20
|
|
#define SH_RS2C 2
|
|
|
|
#define RV_X(x, s, n) (((x) >> (s)) & ((1 << (n)) - 1))
|
|
#define RVC_LW_IMM(x) ((RV_X(x, 6, 1) << 2) | \
|
|
(RV_X(x, 10, 3) << 3) | \
|
|
(RV_X(x, 5, 1) << 6))
|
|
#define RVC_LD_IMM(x) ((RV_X(x, 10, 3) << 3) | \
|
|
(RV_X(x, 5, 2) << 6))
|
|
#define RVC_LWSP_IMM(x) ((RV_X(x, 4, 3) << 2) | \
|
|
(RV_X(x, 12, 1) << 5) | \
|
|
(RV_X(x, 2, 2) << 6))
|
|
#define RVC_LDSP_IMM(x) ((RV_X(x, 5, 2) << 3) | \
|
|
(RV_X(x, 12, 1) << 5) | \
|
|
(RV_X(x, 2, 3) << 6))
|
|
#define RVC_SWSP_IMM(x) ((RV_X(x, 9, 4) << 2) | \
|
|
(RV_X(x, 7, 2) << 6))
|
|
#define RVC_SDSP_IMM(x) ((RV_X(x, 10, 3) << 3) | \
|
|
(RV_X(x, 7, 3) << 6))
|
|
#define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3))
|
|
#define RVC_RS2S(insn) (8 + RV_X(insn, SH_RS2C, 3))
|
|
#define RVC_RS2(insn) RV_X(insn, SH_RS2C, 5)
|
|
|
|
#define SHIFT_RIGHT(x, y) \
|
|
((y) < 0 ? ((x) << -(y)) : ((x) >> (y)))
|
|
|
|
#define GET_FUNC3(insn) ((insn & MASK_FUNCT3) >> SHIFT_FUNCT3)
|
|
#define GET_RM(insn) GET_FUNC3(insn)
|
|
#define GET_RS1_NUM(insn) ((insn & MASK_RS1) >> SH_RS1)
|
|
#define GET_RS2_NUM(insn) ((insn & MASK_RS2) >> SH_RS2)
|
|
#define GET_RS1S_NUM(insn) RVC_RS1S(insn)
|
|
#define GET_RS2S_NUM(insn) RVC_RS2S(insn)
|
|
#define GET_RS2C_NUM(insn) RVC_RS2(insn)
|
|
#define GET_RD_NUM(insn) ((insn & MASK_RD) >> SH_RD)
|
|
#define GET_CSR_NUM(insn) ((insn & MASK_CSR) >> SHIFT_CSR)
|
|
#define GET_AQRL(insn) ((insn & MASK_AQRL) >> SHIFT_AQRL)
|
|
|
|
#define IMM_I(insn) ((s32)(insn) >> 20)
|
|
#define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \
|
|
(s32)(((insn) >> 7) & 0x1f))
|
|
|
|
#define IS_MASKED(insn) (((insn >> SH_VM) & VM_MASK) == 0)
|
|
#define GET_VD(insn) ((insn >> SH_VD) & VD_MASK)
|
|
#define GET_VS2(insn) ((insn >> SH_VS2) & VS2_MASK)
|
|
#define GET_VIEW(insn) (((insn) >> SH_VIEW) & VIEW_MASK)
|
|
#define GET_MEW(insn) (((insn) >> SH_MEW) & 1)
|
|
#define GET_VSEW(vtype) (((vtype) >> SH_VSEW) & VSEW_MASK)
|
|
#define GET_VLMUL(vtype) ((vtype) & VLMUL_MASK)
|
|
#define GET_LEN(view) (1UL << (view))
|
|
#define GET_NF(insn) (1 + ((insn >> 29) & 7))
|
|
#define GET_VEMUL(vlmul, view, vsew) ((vlmul + view - vsew) & 7)
|
|
#define GET_EMUL(vemul) (1UL << ((vemul) >= 4 ? 0 : (vemul)))
|
|
|
|
#define CSRRW 1
|
|
#define CSRRS 2
|
|
#define CSRRC 3
|
|
#define CSRRWI 5
|
|
#define CSRRSI 6
|
|
#define CSRRCI 7
|
|
|
|
/* clang-format on */
|
|
|
|
#endif
|