From e2c3f01af4329aa7e9009edee13b7cfdf379071f Mon Sep 17 00:00:00 2001 From: Anup Patel Date: Sat, 6 Jun 2020 17:33:48 +0530 Subject: [PATCH] lib: Fix __sbi_hfence_gvma_vmid_gpa() and __sbi_hfence_vvma_asid_va() The arguments/parameters of __sbi_hfence_gvma_vmid_gpa() and __sbi_hfence_vvma_asid_va() functions are swapped so we fix it. Currently, we did not face any issues because QEMU does a full TLB flush for all HFENCE instructions. We also improve documentation of HFENCE.GVMA and HFENCE.VVMA instruction encoding. Signed-off-by: Anup Patel Reviewed-by: Atish Patra --- include/sbi/sbi_hfence.h | 6 ++- lib/sbi/sbi_hfence.S | 84 ++++++++++++++++++++++++++++++++++------ lib/sbi/sbi_tlb.c | 4 +- 3 files changed, 78 insertions(+), 16 deletions(-) diff --git a/include/sbi/sbi_hfence.h b/include/sbi/sbi_hfence.h index 824a8d60..4420f279 100644 --- a/include/sbi/sbi_hfence.h +++ b/include/sbi/sbi_hfence.h @@ -10,8 +10,9 @@ #ifndef __SBI_FENCE_H__ #define __SBI_FENCE_H__ + /** Invalidate Stage2 TLBs for given VMID and guest physical address */ -void __sbi_hfence_gvma_vmid_gpa(unsigned long vmid, unsigned long gpa); +void __sbi_hfence_gvma_vmid_gpa(unsigned long gpa, unsigned long vmid); /** Invalidate Stage2 TLBs for given VMID */ void __sbi_hfence_gvma_vmid(unsigned long vmid); @@ -23,7 +24,7 @@ void __sbi_hfence_gvma_gpa(unsigned long gpa); void __sbi_hfence_gvma_all(void); /** Invalidate unified TLB entries for given asid and guest virtual address */ -void __sbi_hfence_vvma_asid_va(unsigned long asid, unsigned long va); +void __sbi_hfence_vvma_asid_va(unsigned long va, unsigned long asid); /** Invalidate unified TLB entries for given ASID for a guest*/ void __sbi_hfence_vvma_asid(unsigned long asid); @@ -33,4 +34,5 @@ void __sbi_hfence_vvma_va(unsigned long va); /** Invalidate all possible Stage2 TLBs */ void __sbi_hfence_vvma_all(void); + #endif diff --git a/lib/sbi/sbi_hfence.S b/lib/sbi/sbi_hfence.S index 30a6e9fa..d05becbf 100644 --- a/lib/sbi/sbi_hfence.S +++ b/lib/sbi/sbi_hfence.S @@ -9,67 +9,127 @@ */ /* - * Instruction encoding of hfence.gvma is: + * HFENCE.GVMA rs1, rs2 + * HFENCE.GVMA zero, rs2 + * HFENCE.GVMA rs1 + * HFENCE.GVMA + * + * rs1!=zero and rs2!=zero ==> HFENCE.GVMA rs1, rs2 + * rs1==zero and rs2!=zero ==> HFENCE.GVMA zero, rs2 + * rs1!=zero and rs2==zero ==> HFENCE.GVMA rs1 + * rs1==zero and rs2==zero ==> HFENCE.GVMA + * + * Instruction encoding of HFENCE.GVMA is: * 0110001 rs2(5) rs1(5) 000 00000 1110011 */ .align 3 .global __sbi_hfence_gvma_vmid_gpa __sbi_hfence_gvma_vmid_gpa: - /* hfence.gvma a1, a0 */ - .word 0x62a60073 + /* + * rs1 = a0 (GPA) + * rs2 = a1 (VMID) + * HFENCE.GVMA a0, a1 + * 0110001 01011 01010 000 00000 1110011 + */ + .word 0x62b50073 ret .align 3 .global __sbi_hfence_gvma_vmid __sbi_hfence_gvma_vmid: - /* hfence.gvma zero, a0 */ + /* + * rs1 = zero + * rs2 = a0 (VMID) + * HFENCE.GVMA zero, a0 + * 0110001 01010 00000 000 00000 1110011 + */ .word 0x62a00073 ret .align 3 .global __sbi_hfence_gvma_gpa __sbi_hfence_gvma_gpa: - /* hfence.gvma a0 */ + /* + * rs1 = a0 (GPA) + * rs2 = zero + * HFENCE.GVMA a0 + * 0110001 00000 01010 000 00000 1110011 + */ .word 0x62050073 ret .align 3 .global __sbi_hfence_gvma_all __sbi_hfence_gvma_all: - /* hfence.gvma */ + /* + * rs1 = zero + * rs2 = zero + * HFENCE.GVMA + * 0110001 00000 00000 000 00000 1110011 + */ .word 0x62000073 ret /* - * Instruction encoding of hfence.bvma is: + * HFENCE.VVMA rs1, rs2 + * HFENCE.VVMA zero, rs2 + * HFENCE.VVMA rs1 + * HFENCE.VVMA + * + * rs1!=zero and rs2!=zero ==> HFENCE.VVMA rs1, rs2 + * rs1==zero and rs2!=zero ==> HFENCE.VVMA zero, rs2 + * rs1!=zero and rs2==zero ==> HFENCE.VVMA rs1 + * rs1==zero and rs2==zero ==> HFENCE.vVMA + * + * Instruction encoding of HFENCE.VVMA is: * 0010001 rs2(5) rs1(5) 000 00000 1110011 */ .align 3 .global __sbi_hfence_vvma_asid_va __sbi_hfence_vvma_asid_va: - /* hfence.bvma a1, a0 */ - .word 0x22a60073 + /* + * rs1 = a0 (VA) + * rs2 = a1 (ASID) + * HFENCE.VVMA a0, a1 + * 0010001 01011 01010 000 00000 1110011 + */ + .word 0x22b50073 ret .align 3 .global __sbi_hfence_vvma_asid __sbi_hfence_vvma_asid: - /* hfence.bvma zero, a0 */ + /* + * rs1 = zero + * rs2 = a0 (ASID) + * HFENCE.VVMA zero, a0 + * 0010001 01010 00000 000 00000 1110011 + */ .word 0x22a00073 ret .align 3 .global __sbi_hfence_vvma_va __sbi_hfence_vvma_va: - /* hfence.bvma a0 */ + /* + * rs1 = a0 (VA) + * rs2 = zero + * HFENCE.VVMA zero, a0 + * 0010001 00000 01010 000 00000 1110011 + */ .word 0x22050073 ret .align 3 .global __sbi_hfence_vvma_all __sbi_hfence_vvma_all: - /* hfence.bvma */ + /* + * rs1 = zero + * rs2 = zero + * HFENCE.VVMA + * 0010001 00000 00000 000 00000 1110011 + */ .word 0x22000073 ret diff --git a/lib/sbi/sbi_tlb.c b/lib/sbi/sbi_tlb.c index c6ca7b19..c8e62cdd 100644 --- a/lib/sbi/sbi_tlb.c +++ b/lib/sbi/sbi_tlb.c @@ -112,7 +112,7 @@ static void sbi_tlb_hfence_vvma_asid(struct sbi_tlb_info *tinfo) } for (i = 0; i < size; i += PAGE_SIZE) { - __sbi_hfence_vvma_asid_va(asid, start + i); + __sbi_hfence_vvma_asid_va(start + i, asid); } done: @@ -137,7 +137,7 @@ static void sbi_tlb_hfence_gvma_vmid(struct sbi_tlb_info *tinfo) } for (i = 0; i < size; i += PAGE_SIZE) { - __sbi_hfence_gvma_vmid_gpa(vmid, start+i); + __sbi_hfence_gvma_vmid_gpa(start + i, vmid); } }