lib: Fix __sbi_hfence_gvma_vmid_gpa() and __sbi_hfence_vvma_asid_va()

The arguments/parameters of __sbi_hfence_gvma_vmid_gpa() and
__sbi_hfence_vvma_asid_va() functions are swapped so we fix it.

Currently, we did not face any issues because QEMU does a full
TLB flush for all HFENCE instructions.

We also improve documentation of HFENCE.GVMA and HFENCE.VVMA
instruction encoding.

Signed-off-by: Anup Patel <anup.patel@wdc.com>
Reviewed-by: Atish Patra <atish.patra@wdc.com>
This commit is contained in:
Anup Patel
2020-06-06 17:33:48 +05:30
committed by Anup Patel
parent 32f87e5a86
commit e2c3f01af4
3 changed files with 78 additions and 16 deletions

View File

@@ -10,8 +10,9 @@
#ifndef __SBI_FENCE_H__ #ifndef __SBI_FENCE_H__
#define __SBI_FENCE_H__ #define __SBI_FENCE_H__
/** Invalidate Stage2 TLBs for given VMID and guest physical address */ /** Invalidate Stage2 TLBs for given VMID and guest physical address */
void __sbi_hfence_gvma_vmid_gpa(unsigned long vmid, unsigned long gpa); void __sbi_hfence_gvma_vmid_gpa(unsigned long gpa, unsigned long vmid);
/** Invalidate Stage2 TLBs for given VMID */ /** Invalidate Stage2 TLBs for given VMID */
void __sbi_hfence_gvma_vmid(unsigned long vmid); void __sbi_hfence_gvma_vmid(unsigned long vmid);
@@ -23,7 +24,7 @@ void __sbi_hfence_gvma_gpa(unsigned long gpa);
void __sbi_hfence_gvma_all(void); void __sbi_hfence_gvma_all(void);
/** Invalidate unified TLB entries for given asid and guest virtual address */ /** Invalidate unified TLB entries for given asid and guest virtual address */
void __sbi_hfence_vvma_asid_va(unsigned long asid, unsigned long va); void __sbi_hfence_vvma_asid_va(unsigned long va, unsigned long asid);
/** Invalidate unified TLB entries for given ASID for a guest*/ /** Invalidate unified TLB entries for given ASID for a guest*/
void __sbi_hfence_vvma_asid(unsigned long asid); void __sbi_hfence_vvma_asid(unsigned long asid);
@@ -33,4 +34,5 @@ void __sbi_hfence_vvma_va(unsigned long va);
/** Invalidate all possible Stage2 TLBs */ /** Invalidate all possible Stage2 TLBs */
void __sbi_hfence_vvma_all(void); void __sbi_hfence_vvma_all(void);
#endif #endif

View File

@@ -9,67 +9,127 @@
*/ */
/* /*
* Instruction encoding of hfence.gvma is: * HFENCE.GVMA rs1, rs2
* HFENCE.GVMA zero, rs2
* HFENCE.GVMA rs1
* HFENCE.GVMA
*
* rs1!=zero and rs2!=zero ==> HFENCE.GVMA rs1, rs2
* rs1==zero and rs2!=zero ==> HFENCE.GVMA zero, rs2
* rs1!=zero and rs2==zero ==> HFENCE.GVMA rs1
* rs1==zero and rs2==zero ==> HFENCE.GVMA
*
* Instruction encoding of HFENCE.GVMA is:
* 0110001 rs2(5) rs1(5) 000 00000 1110011 * 0110001 rs2(5) rs1(5) 000 00000 1110011
*/ */
.align 3 .align 3
.global __sbi_hfence_gvma_vmid_gpa .global __sbi_hfence_gvma_vmid_gpa
__sbi_hfence_gvma_vmid_gpa: __sbi_hfence_gvma_vmid_gpa:
/* hfence.gvma a1, a0 */ /*
.word 0x62a60073 * rs1 = a0 (GPA)
* rs2 = a1 (VMID)
* HFENCE.GVMA a0, a1
* 0110001 01011 01010 000 00000 1110011
*/
.word 0x62b50073
ret ret
.align 3 .align 3
.global __sbi_hfence_gvma_vmid .global __sbi_hfence_gvma_vmid
__sbi_hfence_gvma_vmid: __sbi_hfence_gvma_vmid:
/* hfence.gvma zero, a0 */ /*
* rs1 = zero
* rs2 = a0 (VMID)
* HFENCE.GVMA zero, a0
* 0110001 01010 00000 000 00000 1110011
*/
.word 0x62a00073 .word 0x62a00073
ret ret
.align 3 .align 3
.global __sbi_hfence_gvma_gpa .global __sbi_hfence_gvma_gpa
__sbi_hfence_gvma_gpa: __sbi_hfence_gvma_gpa:
/* hfence.gvma a0 */ /*
* rs1 = a0 (GPA)
* rs2 = zero
* HFENCE.GVMA a0
* 0110001 00000 01010 000 00000 1110011
*/
.word 0x62050073 .word 0x62050073
ret ret
.align 3 .align 3
.global __sbi_hfence_gvma_all .global __sbi_hfence_gvma_all
__sbi_hfence_gvma_all: __sbi_hfence_gvma_all:
/* hfence.gvma */ /*
* rs1 = zero
* rs2 = zero
* HFENCE.GVMA
* 0110001 00000 00000 000 00000 1110011
*/
.word 0x62000073 .word 0x62000073
ret ret
/* /*
* Instruction encoding of hfence.bvma is: * HFENCE.VVMA rs1, rs2
* HFENCE.VVMA zero, rs2
* HFENCE.VVMA rs1
* HFENCE.VVMA
*
* rs1!=zero and rs2!=zero ==> HFENCE.VVMA rs1, rs2
* rs1==zero and rs2!=zero ==> HFENCE.VVMA zero, rs2
* rs1!=zero and rs2==zero ==> HFENCE.VVMA rs1
* rs1==zero and rs2==zero ==> HFENCE.vVMA
*
* Instruction encoding of HFENCE.VVMA is:
* 0010001 rs2(5) rs1(5) 000 00000 1110011 * 0010001 rs2(5) rs1(5) 000 00000 1110011
*/ */
.align 3 .align 3
.global __sbi_hfence_vvma_asid_va .global __sbi_hfence_vvma_asid_va
__sbi_hfence_vvma_asid_va: __sbi_hfence_vvma_asid_va:
/* hfence.bvma a1, a0 */ /*
.word 0x22a60073 * rs1 = a0 (VA)
* rs2 = a1 (ASID)
* HFENCE.VVMA a0, a1
* 0010001 01011 01010 000 00000 1110011
*/
.word 0x22b50073
ret ret
.align 3 .align 3
.global __sbi_hfence_vvma_asid .global __sbi_hfence_vvma_asid
__sbi_hfence_vvma_asid: __sbi_hfence_vvma_asid:
/* hfence.bvma zero, a0 */ /*
* rs1 = zero
* rs2 = a0 (ASID)
* HFENCE.VVMA zero, a0
* 0010001 01010 00000 000 00000 1110011
*/
.word 0x22a00073 .word 0x22a00073
ret ret
.align 3 .align 3
.global __sbi_hfence_vvma_va .global __sbi_hfence_vvma_va
__sbi_hfence_vvma_va: __sbi_hfence_vvma_va:
/* hfence.bvma a0 */ /*
* rs1 = a0 (VA)
* rs2 = zero
* HFENCE.VVMA zero, a0
* 0010001 00000 01010 000 00000 1110011
*/
.word 0x22050073 .word 0x22050073
ret ret
.align 3 .align 3
.global __sbi_hfence_vvma_all .global __sbi_hfence_vvma_all
__sbi_hfence_vvma_all: __sbi_hfence_vvma_all:
/* hfence.bvma */ /*
* rs1 = zero
* rs2 = zero
* HFENCE.VVMA
* 0010001 00000 00000 000 00000 1110011
*/
.word 0x22000073 .word 0x22000073
ret ret

View File

@@ -112,7 +112,7 @@ static void sbi_tlb_hfence_vvma_asid(struct sbi_tlb_info *tinfo)
} }
for (i = 0; i < size; i += PAGE_SIZE) { for (i = 0; i < size; i += PAGE_SIZE) {
__sbi_hfence_vvma_asid_va(asid, start + i); __sbi_hfence_vvma_asid_va(start + i, asid);
} }
done: done:
@@ -137,7 +137,7 @@ static void sbi_tlb_hfence_gvma_vmid(struct sbi_tlb_info *tinfo)
} }
for (i = 0; i < size; i += PAGE_SIZE) { for (i = 0; i < size; i += PAGE_SIZE) {
__sbi_hfence_gvma_vmid_gpa(vmid, start+i); __sbi_hfence_gvma_vmid_gpa(start + i, vmid);
} }
} }