KVM: arm64: Add fgt trap masks for pKVM but do not activate them yet
Add the definitions needed for fine grain trapping for protected
VMs, so that they can be used in the following patch. This patch
does not activate any of the fgts yet.
Signed-off-by: Fuad Tabba <tabba@google.com>
diff --git a/arch/arm64/include/asm/kvm_pkvm.h b/arch/arm64/include/asm/kvm_pkvm.h
index a61711f..89ee538 100644
--- a/arch/arm64/include/asm/kvm_pkvm.h
+++ b/arch/arm64/include/asm/kvm_pkvm.h
@@ -214,6 +214,221 @@ bool pkvm_is_hyp_created(struct kvm *kvm);
FIELD_PREP(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_APA3), ID_AA64ISAR2_EL1_APA3_PAuth) \
)
+
+/* All HAFGRTR_EL2 bits are AMU */
+#define HAFGRTR_AMU __HAFGRTR_EL2_MASK
+
+#define PVM_HAFGRTR_EL2_SET \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR0_EL1_AMU), PVM_ID_AA64PFR0_ALLOW) ? 0ULL : HAFGRTR_AMU)
+
+#define PVM_HAFGRTR_EL2_CLR (0ULL)
+
+/* No support for debug, trace, of PMU for protected VMs */
+#define PVM_HDFGRTR_EL2_SET __HDFGRTR_EL2_MASK
+#define PVM_HDFGRTR_EL2_CLR __HDFGRTR_EL2_nMASK
+
+#define PVM_HDFGWTR_EL2_SET __HDFGWTR_EL2_MASK
+#define PVM_HDFGWTR_EL2_CLR __HDFGWTR_EL2_nMASK
+
+#define HFGxTR_RAS_IMP (\
+ HFGxTR_EL2_ERXADDR_EL1 | \
+ HFGxTR_EL2_ERXPFGF_EL1 | \
+ HFGxTR_EL2_ERXMISCn_EL1 | \
+ HFGxTR_EL2_ERXSTATUS_EL1 | \
+ HFGxTR_EL2_ERXCTLR_EL1 | \
+ HFGxTR_EL2_ERXFR_EL1 | \
+ HFGxTR_EL2_ERRSELR_EL1 | \
+ HFGxTR_EL2_ERRIDR_EL1 \
+ )
+#define HFGxTR_RAS_V1P1 (\
+ HFGxTR_EL2_ERXPFGCDN_EL1 | \
+ HFGxTR_EL2_ERXPFGCTL_EL1 \
+ )
+#define HFGxTR_GIC HFGxTR_EL2_ICC_IGRPENn_EL1
+#define HFGxTR_CSV2 (\
+ HFGxTR_EL2_SCXTNUM_EL0 | \
+ HFGxTR_EL2_SCXTNUM_EL1 \
+ )
+#define HFGxTR_LOR (\
+ HFGxTR_EL2_LORSA_EL1 | \
+ HFGxTR_EL2_LORN_EL1 | \
+ HFGxTR_EL2_LORID_EL1 | \
+ HFGxTR_EL2_LOREA_EL1 | \
+ HFGxTR_EL2_LORC_EL1 \
+ )
+#define HFGxTR_PAUTH (\
+ HFGxTR_EL2_APIBKey | \
+ HFGxTR_EL2_APIAKey | \
+ HFGxTR_EL2_APGAKey | \
+ HFGxTR_EL2_APDBKey | \
+ HFGxTR_EL2_APDAKey \
+ )
+#define HFGxTR_nAIE (\
+ HFGxTR_EL2_nAMAIR2_EL1 | \
+ HFGxTR_EL2_nMAIR2_EL1 \
+ )
+#define HFGxTR_nS2POE HFGxTR_EL2_nS2POR_EL1
+#define HFGxTR_nS1POE (\
+ HFGxTR_EL2_nPOR_EL1 | \
+ HFGxTR_EL2_nPOR_EL0 \
+ )
+#define HFGxTR_nS1PIE (\
+ HFGxTR_EL2_nPIR_EL1 | \
+ HFGxTR_EL2_nPIRE0_EL1 \
+ )
+#define HFGxTR_nTHE HFGxTR_EL2_nRCWMASK_EL1
+#define HFGxTR_nSME (\
+ HFGxTR_EL2_nTPIDR2_EL0 | \
+ HFGxTR_EL2_nSMPRI_EL1 \
+ )
+#define HFGxTR_nGCS (\
+ HFGxTR_EL2_nGCS_EL1 | \
+ HFGxTR_EL2_nGCS_EL0 \
+ )
+#define HFGxTR_nLS64 HFGxTR_EL2_nACCDATA_EL1
+
+#define PVM_HFGXTR_EL2_SET \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR0_EL1_RAS), PVM_ID_AA64PFR0_ALLOW) >= ID_AA64PFR0_EL1_RAS_IMP ? 0ULL : HFGxTR_RAS_IMP) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR0_EL1_RAS), PVM_ID_AA64PFR0_ALLOW) >= ID_AA64PFR0_EL1_RAS_V1P1 ? 0ULL : HFGxTR_RAS_V1P1) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR0_EL1_GIC), PVM_ID_AA64PFR0_ALLOW) ? 0ULL : HFGxTR_GIC) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR0_EL1_CSV2), PVM_ID_AA64PFR0_ALLOW) ? 0ULL : HFGxTR_CSV2) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR1_EL1_LO), PVM_ID_AA64MMFR1_ALLOW) ? 0ULL : HFGxTR_LOR) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_APA), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HFGxTR_PAUTH) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_API), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HFGxTR_PAUTH) | \
+ 0
+
+#define PVM_HFGXTR_EL2_CLR \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_AIE), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HFGxTR_nAIE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_S2POE), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HFGxTR_nS2POE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_S1POE), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HFGxTR_nS1POE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_S1PIE), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HFGxTR_nS1PIE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_THE), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HFGxTR_nTHE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_SME), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HFGxTR_nSME) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_GCS), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HFGxTR_nGCS) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_LS64), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HFGxTR_nLS64) | \
+ 0
+
+#define PVM_HFGRTR_EL2_SET PVM_HFGXTR_EL2_SET
+#define PVM_HFGWTR_EL2_SET PVM_HFGXTR_EL2_SET
+#define PVM_HFGRTR_EL2_CLR PVM_HFGXTR_EL2_CLR
+#define PVM_HFGWTR_EL2_CLR PVM_HFGXTR_EL2_CLR
+
+#define HFGITR_SPECRES (\
+ HFGITR_EL2_CPPRCTX | \
+ HFGITR_EL2_DVPRCTX | \
+ HFGITR_EL2_CFPRCTX \
+ )
+#define HFGITR_TLBIOS (\
+ HFGITR_EL2_TLBIVAALE1OS | \
+ HFGITR_EL2_TLBIVALE1OS | \
+ HFGITR_EL2_TLBIVAAE1OS | \
+ HFGITR_EL2_TLBIASIDE1OS | \
+ HFGITR_EL2_TLBIVAE1OS | \
+ HFGITR_EL2_TLBIVMALLE1OS \
+ )
+#define HFGITR_TLBIRANGE \
+ (\
+ HFGITR_TLBIOS | \
+ HFGITR_EL2_TLBIRVAALE1 | \
+ HFGITR_EL2_TLBIRVALE1 | \
+ HFGITR_EL2_TLBIRVAAE1 | \
+ HFGITR_EL2_TLBIRVAE1 | \
+ HFGITR_EL2_TLBIRVAE1 | \
+ HFGITR_EL2_TLBIRVAALE1IS | \
+ HFGITR_EL2_TLBIRVALE1IS | \
+ HFGITR_EL2_TLBIRVAAE1IS | \
+ HFGITR_EL2_TLBIRVAE1IS | \
+ HFGITR_EL2_TLBIVAALE1IS | \
+ HFGITR_EL2_TLBIVALE1IS | \
+ HFGITR_EL2_TLBIVAAE1IS | \
+ HFGITR_EL2_TLBIASIDE1IS | \
+ HFGITR_EL2_TLBIVAE1IS | \
+ HFGITR_EL2_TLBIVMALLE1IS | \
+ HFGITR_EL2_TLBIRVAALE1OS | \
+ HFGITR_EL2_TLBIRVALE1OS | \
+ HFGITR_EL2_TLBIRVAAE1OS | \
+ HFGITR_EL2_TLBIRVAE1OS \
+ )
+#define HFGITR_TLB HFGITR_TLBIRANGE
+#define HFGITR_PAN2 (\
+ HFGITR_EL2_ATS1E1WP | \
+ HFGITR_EL2_ATS1E1RP | \
+ HFGITR_EL2_ATS1E0W | \
+ HFGITR_EL2_ATS1E0R | \
+ HFGITR_EL2_ATS1E1W | \
+ HFGITR_EL2_ATS1E1R \
+ )
+#define HFGITR_PAN HFGITR_PAN2
+#define HFGITR_DPB2 HFGITR_EL2_DCCVADP
+#define HFGITR_DPB_IMP HFGITR_EL2_DCCVAP
+#define HFGITR_DPB (HFGITR_DPB_IMP | HFGITR_DPB2)
+#define HFGITR_nGCS (\
+ HFGITR_EL2_nGCSEPP | \
+ HFGITR_EL2_nGCSSTR_EL1 | \
+ HFGITR_EL2_nGCSPUSHM_EL1 \
+ )
+#define HFGITR_nBRBE (\
+ HFGITR_EL2_nBRBIALL | \
+ HFGITR_EL2_nBRBINJ \
+ )
+
+#define PVM_HFGITR_EL2_SET \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_ATS1A), PVM_ID_AA64ISAR2_ALLOW) ? 0ULL : HFGITR_EL2_ATS1E1A) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_SPECRES), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HFGITR_SPECRES) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_TLB), PVM_ID_AA64ISAR0_ALLOW) ? 0ULL : HFGITR_TLB) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR1_EL1_PAN), PVM_ID_AA64MMFR1_ALLOW) ? 0ULL : HFGITR_PAN) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_DPB), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HFGITR_DPB) | \
+ 0
+
+#define PVM_HFGITR_EL2_CLR \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_GCS), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HFGITR_nGCS) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_EL1_BRBE), PVM_ID_AA64DFR0_ALLOW) ? 0ULL : HFGITR_nBRBE) | \
+ 0
+
+#define HCRX_NMI HCRX_EL2_TALLINT
+
+#define HCRX_nPAuth_LR HCRX_EL2_PACMEn
+#define HCRX_nFPMR HCRX_EL2_EnFPM
+#define HCRX_nGCS HCRX_EL2_GCSEn
+#define HCRX_nSYSREG128 HCRX_EL2_EnIDCP128
+#define HCRX_nADERR HCRX_EL2_EnSDERR
+#define HCRX_nDoubleFault2 HCRX_EL2_TMEA
+#define HCRX_nANERR HCRX_EL2_EnSNERR
+#define HCRX_nD128 HCRX_EL2_D128En
+#define HCRX_nTHE HCRX_EL2_PTTWI
+#define HCRX_nSCTLR2 HCRX_EL2_SCTLR2En
+#define HCRX_nTCR2 HCRX_EL2_TCR2En
+#define HCRX_nMOPS (HCRX_EL2_MSCEn | HCRX_EL2_MCE2)
+#define HCRX_nCMOW HCRX_EL2_CMOW
+#define HCRX_nNMI (HCRX_EL2_VFNMI | HCRX_EL2_VINMI)
+#define HCRX_SME HCRX_EL2_SMPME
+#define HCRX_nXS (HCRX_EL2_FGTnXS | HCRX_EL2_FnXS)
+#define HCRX_nLS64 (HCRX_EL2_EnASR| HCRX_EL2_EnALS | HCRX_EL2_EnAS0)
+
+#define PVM_HCRX_EL2_SET \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_NMI), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_NMI) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_SME), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_SME) | \
+ 0
+
+#define PVM_HCRX_EL2_CLR \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_APA), PVM_ID_AA64ISAR1_ALLOW) < ID_AA64ISAR1_EL1_APA_PAuth_LR ? 0ULL : HCRX_nPAuth_LR) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_API), PVM_ID_AA64ISAR1_ALLOW) < ID_AA64ISAR1_EL1_APA_PAuth_LR ? 0ULL : HCRX_nPAuth_LR) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_GCS), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_nGCS) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_SYSREG_128), PVM_ID_AA64ISAR2_ALLOW) ? 0ULL : HCRX_nSYSREG128) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_ADERR), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HCRX_nADERR) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_DF2), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_nDoubleFault2) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_ANERR), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HCRX_nANERR) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR0_EL1_PARANGE), PVM_ID_AA64MMFR0_ALLOW) ? 0ULL : HCRX_nD128) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_THE), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_nTHE) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_SCTLRX), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HCRX_nSCTLR2) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR3_EL1_TCRX), PVM_ID_AA64MMFR3_ALLOW) ? 0ULL : HCRX_nTCR2) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR2_EL1_MOPS), PVM_ID_AA64ISAR2_ALLOW) ? 0ULL : HCRX_nMOPS) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64MMFR1_EL1_CMOW), PVM_ID_AA64MMFR1_ALLOW) ? 0ULL : HCRX_nCMOW) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64PFR1_EL1_NMI), PVM_ID_AA64PFR1_ALLOW) ? 0ULL : HCRX_nNMI) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_XS), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HCRX_nXS) | \
+ (FIELD_GET(ARM64_FEATURE_MASK(ID_AA64ISAR1_EL1_LS64), PVM_ID_AA64ISAR1_ALLOW) ? 0ULL : HCRX_nLS64) | \
+ 0
+
/*
* Returns the maximum number of breakpoints supported for protected VMs.
*/