AArch64: Disable Secure Cycle Counter
authorAlexei Fedorov <Alexei.Fedorov@arm.com>
Tue, 13 Aug 2019 14:17:53 +0000 (15:17 +0100)
committerAlexei Fedorov <Alexei.Fedorov@arm.com>
Wed, 21 Aug 2019 14:43:24 +0000 (15:43 +0100)
This patch fixes an issue when secure world timing information
can be leaked because Secure Cycle Counter is not disabled.
For ARMv8.5 the counter gets disabled by setting MDCR_El3.SCCD
bit on CPU cold/warm boot.
For the earlier architectures PMCR_EL0 register is saved/restored
on secure world entry/exit from/to Non-secure state, and cycle
counting gets disabled by setting PMCR_EL0.DP bit.
'include\aarch64\arch.h' header file was tided up and new
ARMv8.5-PMU related definitions were added.

Change-Id: I6f56db6bc77504634a352388990ad925a69ebbfa
Signed-off-by: Alexei Fedorov <Alexei.Fedorov@arm.com>
bl1/aarch64/bl1_exceptions.S
bl31/aarch64/ea_delegate.S
bl31/aarch64/runtime_exceptions.S
include/arch/aarch64/arch.h
include/arch/aarch64/el3_common_macros.S
include/lib/el3_runtime/aarch64/context.h
lib/el3_runtime/aarch64/context.S
lib/el3_runtime/aarch64/context_mgmt.c

index 19a0ac27a77e803a69fb51f1e5afd655ad3307fd..ed7c27a184032a2ea810c2bef6bb1d79b74a8a6a 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -223,6 +223,14 @@ smc_handler:
         */
        bl      save_gp_registers
 
+       /* -----------------------------------------------------
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        * -----------------------------------------------------
+        */
+       bl      save_pmcr_disable_pmu
+
        /* -----------------------------------------------------
         * Populate the parameters for the SMC handler. We
         * already have x0-x4 in place. x5 will point to a
index 40c3191ac5f719a3caf8aec1273ce1bd45f321e2..8dca10cf93385051818289163bae3a5b9fb2499b 100644 (file)
@@ -68,6 +68,13 @@ func enter_lower_el_sync_ea
        /* Save GP registers */
        bl      save_gp_registers
 
+       /*
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        */
+       bl      save_pmcr_disable_pmu
+
        /* Save ARMv8.3-PAuth registers and load firmware key */
 #if CTX_INCLUDE_PAUTH_REGS
        bl      pauth_context_save
@@ -106,6 +113,13 @@ func enter_lower_el_async_ea
        /* Save GP registers */
        bl      save_gp_registers
 
+       /*
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        */
+       bl      save_pmcr_disable_pmu
+
        /* Save ARMv8.3-PAuth registers and load firmware key */
 #if CTX_INCLUDE_PAUTH_REGS
        bl      pauth_context_save
index 6ffd9955555e91ed20c7817f10b878df57cd2d5c..fd7656e2ce24ca08f1ecba5d3b629980e6ea46c3 100644 (file)
 
        /* Save GP registers and restore them afterwards */
        bl      save_gp_registers
+
+       /*
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        */
+       bl      save_pmcr_disable_pmu
+
        bl      handle_lower_el_ea_esb
        bl      restore_gp_registers
 
 
        bl      save_gp_registers
 
+       /*
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        */
+       bl      save_pmcr_disable_pmu
+
        /* Save ARMv8.3-PAuth registers and load firmware key */
 #if CTX_INCLUDE_PAUTH_REGS
        bl      pauth_context_save
@@ -335,6 +350,13 @@ smc_handler64:
        /* Save general purpose registers */
        bl      save_gp_registers
 
+       /*
+        * If Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented, save PMCR_EL0 and
+        * disable all event counters and cycle counter.
+        */
+       bl      save_pmcr_disable_pmu
+
        /* Save ARMv8.3-PAuth registers and load firmware key */
 #if CTX_INCLUDE_PAUTH_REGS
        bl      pauth_context_save
index fa857fb1b5c1341837a27f9bbcf2e05678c43278..98a8590b3c0a986c861074797a198e34ccee5f42 100644 (file)
 #define ID_AA64PFR0_EL2_SHIFT  U(8)
 #define ID_AA64PFR0_EL3_SHIFT  U(12)
 #define ID_AA64PFR0_AMU_SHIFT  U(44)
-#define ID_AA64PFR0_AMU_LENGTH U(4)
 #define ID_AA64PFR0_AMU_MASK   ULL(0xf)
 #define ID_AA64PFR0_ELX_MASK   ULL(0xf)
+#define ID_AA64PFR0_GIC_SHIFT  U(24)
+#define ID_AA64PFR0_GIC_WIDTH  U(4)
+#define ID_AA64PFR0_GIC_MASK   ULL(0xf)
 #define ID_AA64PFR0_SVE_SHIFT  U(32)
 #define ID_AA64PFR0_SVE_MASK   ULL(0xf)
-#define ID_AA64PFR0_SVE_LENGTH U(4)
 #define ID_AA64PFR0_MPAM_SHIFT U(40)
 #define ID_AA64PFR0_MPAM_MASK  ULL(0xf)
 #define ID_AA64PFR0_DIT_SHIFT  U(48)
 #define ID_AA64PFR0_CSV2_MASK  ULL(0xf)
 #define ID_AA64PFR0_CSV2_LENGTH        U(4)
 
-/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
-#define ID_AA64DFR0_PMS_SHIFT  U(32)
-#define ID_AA64DFR0_PMS_LENGTH U(4)
-#define ID_AA64DFR0_PMS_MASK   ULL(0xf)
-
+/* Exception level handling */
 #define EL_IMPL_NONE           ULL(0)
 #define EL_IMPL_A64ONLY                ULL(1)
 #define EL_IMPL_A64_A32                ULL(2)
 
-#define ID_AA64PFR0_GIC_SHIFT  U(24)
-#define ID_AA64PFR0_GIC_WIDTH  U(4)
-#define ID_AA64PFR0_GIC_MASK   ULL(0xf)
+/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
+#define ID_AA64DFR0_PMS_SHIFT  U(32)
+#define ID_AA64DFR0_PMS_MASK   ULL(0xf)
 
 /* ID_AA64ISAR1_EL1 definitions */
 #define ID_AA64ISAR1_EL1       S3_0_C0_C6_1
 #define SCR_RESET_VAL          SCR_RES1_BITS
 
 /* MDCR_EL3 definitions */
+#define MDCR_SCCD_BIT          (ULL(1) << 23)
+#define MDCR_SPME_BIT          (ULL(1) << 17)
+#define MDCR_SDD_BIT           (ULL(1) << 16)
 #define MDCR_SPD32(x)          ((x) << 14)
 #define MDCR_SPD32_LEGACY      ULL(0x0)
 #define MDCR_SPD32_DISABLE     ULL(0x2)
 #define MDCR_SPD32_ENABLE      ULL(0x3)
-#define MDCR_SDD_BIT           (ULL(1) << 16)
 #define MDCR_NSPB(x)           ((x) << 12)
 #define MDCR_NSPB_EL1          ULL(0x3)
 #define MDCR_TDOSA_BIT         (ULL(1) << 10)
 #define MDCR_TDA_BIT           (ULL(1) << 9)
 #define MDCR_TPM_BIT           (ULL(1) << 6)
-#define MDCR_SCCD_BIT          (ULL(1) << 23)
 #define MDCR_EL3_RESET_VAL     ULL(0x0)
 
 /* MDCR_EL2 definitions */
+#define MDCR_EL2_HLP           (U(1) << 26)
+#define MDCR_EL2_HCCD          (U(1) << 23)
+#define MDCR_EL2_TTRF          (U(1) << 19)
+#define MDCR_EL2_HPMD          (U(1) << 17)
 #define MDCR_EL2_TPMS          (U(1) << 14)
 #define MDCR_EL2_E2PB(x)       ((x) << 12)
 #define MDCR_EL2_E2PB_EL1      U(0x3)
 #define PMCR_EL0_N_SHIFT       U(11)
 #define PMCR_EL0_N_MASK                U(0x1f)
 #define PMCR_EL0_N_BITS                (PMCR_EL0_N_MASK << PMCR_EL0_N_SHIFT)
+#define PMCR_EL0_LP_BIT                (U(1) << 7)
 #define PMCR_EL0_LC_BIT                (U(1) << 6)
 #define PMCR_EL0_DP_BIT                (U(1) << 5)
 #define PMCR_EL0_X_BIT         (U(1) << 4)
 #define PMCR_EL0_D_BIT         (U(1) << 3)
+#define PMCR_EL0_C_BIT         (U(1) << 2)
+#define PMCR_EL0_P_BIT         (U(1) << 1)
+#define PMCR_EL0_E_BIT         (U(1) << 0)
 
 /*******************************************************************************
  * Definitions for system register interface to SVE
index 22b32b491c53df0b0b4c20dc88375e28065f1753..a36b7da79f1433c799ff69fc3f9ce988a67a5d14 100644 (file)
         * ---------------------------------------------------------------------
         */
        mov_imm x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT | \
-                     MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) \
-                   ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT))
+                     MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) \
+                   ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT))
 
        msr     mdcr_el3, x0
 
+       /* ---------------------------------------------------------------------
+        * Initialise PMCR_EL0 setting all fields rather than relying
+        * on hw. Some fields are architecturally UNKNOWN on reset.
+        *
+        * PMCR_EL0.LP: Set to one so that event counter overflow, that
+        *  is recorded in PMOVSCLR_EL0[0-30], occurs on the increment
+        *  that changes PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU
+        *  is implemented. This bit is RES0 in versions of the architecture
+        *  earlier than ARMv8.5, setting it to 1 doesn't have any effect
+        *  on them.
+        *
+        * PMCR_EL0.LC: Set to one so that cycle counter overflow, that
+        *  is recorded in PMOVSCLR_EL0[31], occurs on the increment
+        *  that changes PMCCNTR_EL0[63] from 1 to 0.
+        *
+        * PMCR_EL0.DP: Set to one so that the cycle counter,
+        *  PMCCNTR_EL0 does not count when event counting is prohibited.
+        *
+        * PMCR_EL0.X: Set to zero to disable export of events.
+        *
+        * PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
+        *  counts on every clock cycle.
+        * ---------------------------------------------------------------------
+        */
+       mov_imm x0, ((PMCR_EL0_RESET_VAL | PMCR_EL0_LP_BIT | \
+                     PMCR_EL0_LC_BIT | PMCR_EL0_DP_BIT) & \
+                   ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT))
+
+       msr     pmcr_el0, x0
+
        /* ---------------------------------------------------------------------
         * Enable External Aborts and SError Interrupts now that the exception
         * vectors have been setup.
index a76a59b78580b01924376fe0a43dd0e66137b103..64fa8a9ea4dbc41ec96f09c9277fe0e7e44e5950 100644 (file)
@@ -59,7 +59,7 @@
 #define CTX_RUNTIME_SP         U(0x10)
 #define CTX_SPSR_EL3           U(0x18)
 #define CTX_ELR_EL3            U(0x20)
-#define CTX_UNUSED             U(0x28)
+#define CTX_PMCR_EL0           U(0x28)
 #define CTX_EL3STATE_END       U(0x30)
 
 /*******************************************************************************
 #define CTX_AFSR1_EL1          U(0x98)
 #define CTX_CONTEXTIDR_EL1     U(0xa0)
 #define CTX_VBAR_EL1           U(0xa8)
-#define CTX_PMCR_EL0           U(0xb0)
 
 /*
  * If the platform is AArch64-only, there is no need to save and restore these
  * AArch32 registers.
  */
 #if CTX_INCLUDE_AARCH32_REGS
-#define CTX_SPSR_ABT           U(0xc0)  /* Align to the next 16 byte boundary */
-#define CTX_SPSR_UND           U(0xc8)
-#define CTX_SPSR_IRQ           U(0xd0)
-#define CTX_SPSR_FIQ           U(0xd8)
-#define CTX_DACR32_EL2         U(0xe0)
-#define CTX_IFSR32_EL2         U(0xe8)
-#define CTX_AARCH32_END                U(0xf0) /* Align to the next 16 byte boundary */
+#define CTX_SPSR_ABT           U(0xb0) /* Align to the next 16 byte boundary */
+#define CTX_SPSR_UND           U(0xb8)
+#define CTX_SPSR_IRQ           U(0xc0)
+#define CTX_SPSR_FIQ           U(0xc8)
+#define CTX_DACR32_EL2         U(0xd0)
+#define CTX_IFSR32_EL2         U(0xd8)
+#define CTX_AARCH32_END                U(0xe0) /* Align to the next 16 byte boundary */
 #else
-#define CTX_AARCH32_END                U(0xc0)  /* Align to the next 16 byte boundary */
+#define CTX_AARCH32_END                U(0xb0) /* Align to the next 16 byte boundary */
 #endif /* CTX_INCLUDE_AARCH32_REGS */
 
 /*
index e6ab19bccb21bf755789a182642e8955efa12946..53dc02e64e2ad269e13d1e85dfede308f1ff70bd 100644 (file)
        .global save_gp_registers
        .global restore_gp_registers
        .global restore_gp_registers_eret
+       .global save_pmcr_disable_pmu
        .global el3_exit
 
+/* -----------------------------------------------------
+ * If ARMv8.5-PMU is implemented, cycle counting is
+ * disabled by seting MDCR_EL3.SCCD to 1.
+ * -----------------------------------------------------
+ */
+func save_pmcr_disable_pmu
+       /* -----------------------------------------------------
+        * Check if earlier initialization MDCR_EL3.SCCD to 1
+        * failed, meaning that ARMv8-PMU is not implemented and
+        * PMCR_EL0 should be saved in non-secure context.
+        * -----------------------------------------------------
+        */
+       mrs     x9, mdcr_el3
+       tst     x9, #MDCR_SCCD_BIT
+       bne     1f
+
+       /* Secure Cycle Counter is not disabled */
+       mrs     x9, pmcr_el0
+
+       /* Check caller's security state */
+       mrs     x10, scr_el3
+       tst     x10, #SCR_NS_BIT
+       beq     2f
+
+       /* Save PMCR_EL0 if called from Non-secure state */
+       str     x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
+
+       /* Disable cycle counter when event counting is prohibited */
+2:     orr     x9, x9, #PMCR_EL0_DP_BIT
+       msr     pmcr_el0, x9
+
+       isb
+1:     ret
+endfunc save_pmcr_disable_pmu
+
 /* -----------------------------------------------------
  * The following function strictly follows the AArch64
  * PCS to use x9-x17 (temporary caller-saved registers)
@@ -80,9 +116,6 @@ func el1_sysregs_context_save
        mrs     x9, vbar_el1
        stp     x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
 
-       mrs     x10, pmcr_el0
-       str     x10, [x0, #CTX_PMCR_EL0]
-
        /* Save AArch32 system registers if the build has instructed so */
 #if CTX_INCLUDE_AARCH32_REGS
        mrs     x11, spsr_abt
@@ -169,9 +202,6 @@ func el1_sysregs_context_restore
        msr     contextidr_el1, x17
        msr     vbar_el1, x9
 
-       ldr     x10, [x0, #CTX_PMCR_EL0]
-       msr     pmcr_el0, x10
-
        /* Restore AArch32 system registers if the build has instructed so */
 #if CTX_INCLUDE_AARCH32_REGS
        ldp     x11, x12, [x0, #CTX_SPSR_ABT]
@@ -503,6 +533,29 @@ func el3_exit
        msr     spsr_el3, x16
        msr     elr_el3, x17
 
+       /* -----------------------------------------------------
+        * Restore PMCR_EL0 when returning to Non-secure state
+        * if Secure Cycle Counter is not disabled in MDCR_EL3
+        * when ARMv8.5-PMU is implemented
+        * -----------------------------------------------------
+        */
+       tst     x18, #SCR_NS_BIT
+       beq     2f
+
+       /* -----------------------------------------------------
+        * Back to Non-secure state.
+        * Check if earlier initialization MDCR_EL3.SCCD to 1
+        * failed, meaning that ARMv8-PMU is not implemented and
+        * PMCR_EL0 should be restored from non-secure context.
+        * -----------------------------------------------------
+        */
+       mrs     x17, mdcr_el3
+       tst     x17, #MDCR_SCCD_BIT
+       bne     2f
+       ldr     x17, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
+       msr     pmcr_el0, x17
+2:
+
 #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
        /* Restore mitigation state as it was on entry to EL3 */
        ldr     x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
index 05ba5ed6c31549ff4a33bcbaea8baa995eea1124..bd5b3aa6c5de8de0045b1ff385aa8508809b5f0c 100644 (file)
@@ -66,7 +66,7 @@ void __init cm_init(void)
 void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
 {
        unsigned int security_state;
-       uint32_t scr_el3, pmcr_el0;
+       uint32_t scr_el3;
        el3_state_t *state;
        gp_regs_t *gp_regs;
        unsigned long sctlr_elx, actlr_elx;
@@ -225,31 +225,10 @@ void cm_setup_context(cpu_context_t *ctx, const entry_point_info_t *ep)
        actlr_elx = read_actlr_el1();
        write_ctx_reg((get_sysregs_ctx(ctx)), (CTX_ACTLR_EL1), (actlr_elx));
 
-       if (security_state == SECURE) {
-               /*
-                * Initialise PMCR_EL0 for secure context only, setting all
-                * fields rather than relying on hw. Some fields are
-                * architecturally UNKNOWN on reset.
-                *
-                * PMCR_EL0.LC: Set to one so that cycle counter overflow, that
-                *  is recorded in PMOVSCLR_EL0[31], occurs on the increment
-                *  that changes PMCCNTR_EL0[63] from 1 to 0.
-                *
-                * PMCR_EL0.DP: Set to one so that the cycle counter,
-                *  PMCCNTR_EL0 does not count when event counting is prohibited.
-                *
-                * PMCR_EL0.X: Set to zero to disable export of events.
-                *
-                * PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
-                *  counts on every clock cycle.
-                */
-               pmcr_el0 = ((PMCR_EL0_RESET_VAL | PMCR_EL0_LC_BIT
-                               | PMCR_EL0_DP_BIT)
-                               & ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT));
-               write_ctx_reg(get_sysregs_ctx(ctx), CTX_PMCR_EL0, pmcr_el0);
-       }
-
-       /* Populate EL3 state so that we've the right context before doing ERET */
+       /*
+        * Populate EL3 state so that we've the right context
+        * before doing ERET
+        */
        state = get_el3state_ctx(ctx);
        write_ctx_reg(state, CTX_SCR_EL3, scr_el3);
        write_ctx_reg(state, CTX_ELR_EL3, ep->pc);
@@ -441,6 +420,29 @@ void cm_prepare_el3_exit(uint32_t security_state)
                         * relying on hw. Some fields are architecturally
                         * UNKNOWN on reset.
                         *
+                        * MDCR_EL2.HLP: Set to one so that event counter
+                        *  overflow, that is recorded in PMOVSCLR_EL0[0-30],
+                        *  occurs on the increment that changes
+                        *  PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU is
+                        *  implemented. This bit is RES0 in versions of the
+                        *  architecture earlier than ARMv8.5, setting it to 1
+                        *  doesn't have any effect on them.
+                        *
+                        * MDCR_EL2.TTRF: Set to zero so that access to Trace
+                        *  Filter Control register TRFCR_EL1 at EL1 is not
+                        *  trapped to EL2. This bit is RES0 in versions of
+                        *  the architecture earlier than ARMv8.4.
+                        *
+                        * MDCR_EL2.HPMD: Set to one so that event counting is
+                        *  prohibited at EL2. This bit is RES0 in versions of
+                        *  the architecture earlier than ARMv8.1, setting it
+                        *  to 1 doesn't have any effect on them.
+                        *
+                        * MDCR_EL2.TPMS: Set to zero so that accesses to
+                        *  Statistical Profiling control registers from EL1
+                        *  do not trap to EL2. This bit is RES0 when SPE is
+                        *  not implemented.
+                        *
                         * MDCR_EL2.TDRA: Set to zero so that Non-secure EL0 and
                         *  EL1 System register accesses to the Debug ROM
                         *  registers are not trapped to EL2.
@@ -469,13 +471,15 @@ void cm_prepare_el3_exit(uint32_t security_state)
                         * MDCR_EL2.HPMN: Set to value of PMCR_EL0.N which is the
                         *  architecturally-defined reset value.
                         */
-                       mdcr_el2 = ((MDCR_EL2_RESET_VAL |
-                                       ((read_pmcr_el0() & PMCR_EL0_N_BITS)
-                                       >> PMCR_EL0_N_SHIFT)) &
-                                       ~(MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT
-                                       | MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT
-                                       | MDCR_EL2_HPME_BIT | MDCR_EL2_TPM_BIT
-                                       | MDCR_EL2_TPMCR_BIT));
+                       mdcr_el2 = ((MDCR_EL2_RESET_VAL | MDCR_EL2_HLP |
+                                    MDCR_EL2_HPMD) |
+                                  ((read_pmcr_el0() & PMCR_EL0_N_BITS)
+                                  >> PMCR_EL0_N_SHIFT)) &
+                                  ~(MDCR_EL2_TTRF | MDCR_EL2_TPMS |
+                                    MDCR_EL2_TDRA_BIT | MDCR_EL2_TDOSA_BIT |
+                                    MDCR_EL2_TDA_BIT | MDCR_EL2_TDE_BIT |
+                                    MDCR_EL2_HPME_BIT | MDCR_EL2_TPM_BIT |
+                                    MDCR_EL2_TPMCR_BIT);
 
                        write_mdcr_el2(mdcr_el2);