lib: sbi_misaligned_ldst: Add handling of vector load/store

Add misaligned load/store handling for the vector extension
to the sbi_misaligned_ldst library.

This implementation is inspired from the misaligned_vec_ldst
implementation in the riscv-pk project.

Co-developed-by: Zong Li <zong.li@sifive.com>
Signed-off-by: Zong Li <zong.li@sifive.com>
Signed-off-by: Nylon Chen <nylon.chen@sifive.com>
Reviewed-by: Andy Chiu <andy.chiu@sifive.com>
Reviewed-by: Anup Patel <anup@brainfault.org>
This commit is contained in:
Nylon Chen
2024-12-06 11:21:49 +08:00
committed by Anup Patel
parent c5a8b15e39
commit c2acc5e5b0
6 changed files with 760 additions and 12 deletions

View File

@@ -763,6 +763,12 @@
#define CSR_MVIPH 0x319
#define CSR_MIPH 0x354
/* Vector extension registers */
#define CSR_VSTART 0x8
#define CSR_VL 0xc20
#define CSR_VTYPE 0xc21
#define CSR_VLENB 0xc22
/* ===== Trap/Exception Causes ===== */
#define CAUSE_MISALIGNED_FETCH 0x0
@@ -891,11 +897,364 @@
#define INSN_MASK_FENCE_TSO 0xffffffff
#define INSN_MATCH_FENCE_TSO 0x8330000f
#define INSN_MASK_VECTOR_UNIT_STRIDE 0xfdf0707f
#define INSN_MASK_VECTOR_FAULT_ONLY_FIRST 0xfdf0707f
#define INSN_MASK_VECTOR_STRIDE 0xfc00707f
#define INSN_MASK_VECTOR_WHOLE_REG 0xfff0707f
#define INSN_MASK_VECTOR_INDEXED 0xfc00707f
#define INSN_MATCH_VLUXSEG(n, bits) ((((n) - 1) << 29) | 0x04000007 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VSUXSEG(n, bits) ((((n) - 1) << 29) | 0x04000027 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VLOXSEG(n, bits) ((((n) - 1) << 29) | 0x0c000007 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VSOXSEG(n, bits) ((((n) - 1) << 29) | 0x0c000027 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VLSSEG(n, bits) ((((n) - 1) << 29) | 0x08000007 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VSSSEG(n, bits) ((((n) - 1) << 29) | 0x08000027 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VSSEG(n, bits) ((((n) - 1) << 29) | 0x00004027 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VLSEG(n, bits) ((((n) - 1) << 29) | 0x00004007 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VLSEGFF(n, bits) ((((n) - 1) << 29) | 0x1000007 | \
((bits) == 16 ? 5 : (bits) == 32 ? 6 : 7) << 12)
#define INSN_MATCH_VLE16V 0x00005007
#define INSN_MATCH_VLE32V 0x00006007
#define INSN_MATCH_VLE64V 0x00007007
#define INSN_MATCH_VSE16V 0x00005027
#define INSN_MATCH_VSE32V 0x00006027
#define INSN_MATCH_VSE64V 0x00007027
#define INSN_MATCH_VLSE16V 0x08005007
#define INSN_MATCH_VLSE32V 0x08006007
#define INSN_MATCH_VLSE64V 0x08007007
#define INSN_MATCH_VSSE16V 0x08005027
#define INSN_MATCH_VSSE32V 0x08006027
#define INSN_MATCH_VSSE64V 0x08007027
#define INSN_MATCH_VLOXEI16V 0x0c005007
#define INSN_MATCH_VLOXEI32V 0x0c006007
#define INSN_MATCH_VLOXEI64V 0x0c007007
#define INSN_MATCH_VSOXEI16V 0x0c005027
#define INSN_MATCH_VSOXEI32V 0x0c006027
#define INSN_MATCH_VSOXEI64V 0x0c007027
#define INSN_MATCH_VLUXEI16V 0x04005007
#define INSN_MATCH_VLUXEI32V 0x04006007
#define INSN_MATCH_VLUXEI64V 0x04007007
#define INSN_MATCH_VSUXEI16V 0x04005027
#define INSN_MATCH_VSUXEI32V 0x04006027
#define INSN_MATCH_VSUXEI64V 0x04007027
#define INSN_MATCH_VLE16FFV 0x01005007
#define INSN_MATCH_VLE32FFV 0x01006007
#define INSN_MATCH_VLE64FFV 0x01007007
#define INSN_MATCH_VL1RE8V 0x02800007
#define INSN_MATCH_VL1RE16V 0x02805007
#define INSN_MATCH_VL1RE32V 0x02806007
#define INSN_MATCH_VL1RE64V 0x02807007
#define INSN_MATCH_VL2RE8V 0x22800007
#define INSN_MATCH_VL2RE16V 0x22805007
#define INSN_MATCH_VL2RE32V 0x22806007
#define INSN_MATCH_VL2RE64V 0x22807007
#define INSN_MATCH_VL4RE8V 0x62800007
#define INSN_MATCH_VL4RE16V 0x62805007
#define INSN_MATCH_VL4RE32V 0x62806007
#define INSN_MATCH_VL4RE64V 0x62807007
#define INSN_MATCH_VL8RE8V 0xe2800007
#define INSN_MATCH_VL8RE16V 0xe2805007
#define INSN_MATCH_VL8RE32V 0xe2806007
#define INSN_MATCH_VL8RE64V 0xe2807007
#define INSN_MATCH_VS1RV 0x02800027
#define INSN_MATCH_VS2RV 0x22800027
#define INSN_MATCH_VS4RV 0x62800027
#define INSN_MATCH_VS8RV 0xe2800027
#define INSN_MASK_VECTOR_LOAD_STORE 0x7f
#define INSN_MATCH_VECTOR_LOAD 0x07
#define INSN_MATCH_VECTOR_STORE 0x27
#define IS_VECTOR_LOAD_STORE(insn) \
((((insn) & INSN_MASK_VECTOR_LOAD_STORE) == INSN_MATCH_VECTOR_LOAD) || \
(((insn) & INSN_MASK_VECTOR_LOAD_STORE) == INSN_MATCH_VECTOR_STORE))
#define IS_VECTOR_INSN_MATCH(insn, match, mask) \
(((insn) & (mask)) == ((match) & (mask)))
#define IS_UNIT_STRIDE_MATCH(insn, match) \
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_UNIT_STRIDE)
#define IS_STRIDE_MATCH(insn, match) \
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_STRIDE)
#define IS_INDEXED_MATCH(insn, match) \
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_INDEXED)
#define IS_FAULT_ONLY_FIRST_MATCH(insn, match) \
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_FAULT_ONLY_FIRST)
#define IS_WHOLE_REG_MATCH(insn, match) \
IS_VECTOR_INSN_MATCH(insn, match, INSN_MASK_VECTOR_WHOLE_REG)
#define IS_UNIT_STRIDE_LOAD(insn) ( \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE16V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE32V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLE64V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(2, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(3, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(4, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(5, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(6, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(7, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VLSEG(8, 64)))
#define IS_UNIT_STRIDE_STORE(insn) ( \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE16V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE32V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSE64V) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 16)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 32)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(2, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(3, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(4, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(5, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(6, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(7, 64)) || \
IS_UNIT_STRIDE_MATCH(insn, INSN_MATCH_VSSEG(8, 64)))
#define IS_STRIDE_LOAD(insn) ( \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE16V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE32V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSE64V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(2, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(3, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(4, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(5, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(6, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(7, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VLSSEG(8, 64)))
#define IS_STRIDE_STORE(insn) ( \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE16V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE32V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSE64V) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 16)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 32)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(2, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(3, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(4, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(5, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(6, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(7, 64)) || \
IS_STRIDE_MATCH(insn, INSN_MATCH_VSSSEG(8, 64)))
#define IS_INDEXED_LOAD(insn) ( \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI16V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI32V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXEI64V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI16V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI32V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXEI64V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(2, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(3, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(4, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(5, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(6, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(7, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLUXSEG(8, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(2, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(3, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(4, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(5, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(6, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(7, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VLOXSEG(8, 64)))
#define IS_INDEXED_STORE(insn) ( \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI16V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI32V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXEI64V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI16V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI32V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXEI64V) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(2, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(3, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(4, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(5, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(6, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(7, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSUXSEG(8, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 16)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 32)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(2, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(3, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(4, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(5, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(6, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(7, 64)) || \
IS_INDEXED_MATCH(insn, INSN_MATCH_VSOXSEG(8, 64)))
#define IS_FAULT_ONLY_FIRST_LOAD(insn) ( \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE16FFV) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE32FFV) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLE64FFV) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 16)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 32)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(2, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(3, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(4, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(5, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(6, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(7, 64)) || \
IS_FAULT_ONLY_FIRST_MATCH(insn, INSN_MATCH_VLSEGFF(8, 64)))
#define IS_WHOLE_REG_LOAD(insn) ( \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE8V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE16V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE32V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL1RE64V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE8V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE16V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE32V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL2RE64V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE8V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE16V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE32V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL4RE64V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE8V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE16V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE32V) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VL8RE64V))
#define IS_WHOLE_REG_STORE(insn) ( \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS1RV) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS2RV) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS4RV) || \
IS_WHOLE_REG_MATCH(insn, INSN_MATCH_VS8RV))
#if __riscv_xlen == 64
/* 64-bit read for VS-stage address translation (RV64) */
#define INSN_PSEUDO_VS_LOAD 0x00003000
/* 64-bit write for VS-stage address translation (RV64) */
#define INSN_PSEUDO_VS_STORE 0x00003020
@@ -911,6 +1270,12 @@
#error "Unexpected __riscv_xlen"
#endif
#define VM_MASK 0x1
#define VIEW_MASK 0x3
#define VSEW_MASK 0x3
#define VLMUL_MASK 0x7
#define VD_MASK 0x1f
#define VS2_MASK 0x1f
#define INSN_16BIT_MASK 0x3
#define INSN_32BIT_MASK 0x1c
@@ -929,6 +1294,12 @@
#endif
#define REGBYTES (1 << LOG_REGBYTES)
#define SH_VSEW 3
#define SH_VIEW 12
#define SH_VD 7
#define SH_VS2 20
#define SH_VM 25
#define SH_MEW 28
#define SH_RD 7
#define SH_RS1 15
#define SH_RS2 20
@@ -982,6 +1353,18 @@
#define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \
(s32)(((insn) >> 7) & 0x1f))
#define IS_MASKED(insn) (((insn >> SH_VM) & VM_MASK) == 0)
#define GET_VD(insn) ((insn >> SH_VD) & VD_MASK)
#define GET_VS2(insn) ((insn >> SH_VS2) & VS2_MASK)
#define GET_VIEW(insn) (((insn) >> SH_VIEW) & VIEW_MASK)
#define GET_MEW(insn) (((insn) >> SH_MEW) & 1)
#define GET_VSEW(vtype) (((vtype) >> SH_VSEW) & VSEW_MASK)
#define GET_VLMUL(vtype) ((vtype) & VLMUL_MASK)
#define GET_LEN(view) (1UL << (view))
#define GET_NF(insn) (1 + ((insn >> 29) & 7))
#define GET_VEMUL(vlmul, view, vsew) ((vlmul + view - vsew) & 7)
#define GET_EMUL(vemul) (1UL << ((vemul) >= 4 ? 0 : (vemul)))
#define MASK_FUNCT3 0x7000
#define MASK_RS1 0xf8000
#define MASK_CSR 0xfff00000

View File

@@ -30,4 +30,13 @@ int sbi_store_access_handler(struct sbi_trap_context *tcntx);
int sbi_double_trap_handler(struct sbi_trap_context *tcntx);
ulong sbi_misaligned_tinst_fixup(ulong orig_tinst, ulong new_tinst,
ulong addr_offset);
int sbi_misaligned_v_ld_emulator(int rlen, union sbi_ldst_data *out_val,
struct sbi_trap_context *tcntx);
int sbi_misaligned_v_st_emulator(int wlen, union sbi_ldst_data in_val,
struct sbi_trap_context *tcntx);
#endif