6e71a063a2550ee98c3c9f2668ff6676e97da043
[project/bcm63xx/atf.git] / bl31 / aarch64 / ea_delegate.S
1 /*
2 * Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7
8 #include <assert_macros.S>
9 #include <asm_macros.S>
10 #include <assert_macros.S>
11 #include <bl31/ea_handle.h>
12 #include <context.h>
13 #include <lib/extensions/ras_arch.h>
14
15
16 .globl handle_lower_el_ea_esb
17 .globl enter_lower_el_sync_ea
18 .globl enter_lower_el_async_ea
19
20
21 /*
22 * Function to delegate External Aborts synchronized by ESB instruction at EL3
23 * vector entry. This function assumes GP registers x0-x29 have been saved, and
24 * are available for use. It delegates the handling of the EA to platform
25 * handler, and returns only upon successfully handling the EA; otherwise
26 * panics. On return from this function, the original exception handler is
27 * expected to resume.
28 */
29 func handle_lower_el_ea_esb
30 mov x0, #ERROR_EA_ESB
31 mrs x1, DISR_EL1
32 b ea_proceed
33 endfunc handle_lower_el_ea_esb
34
35
36 /*
37 * This function forms the tail end of Synchronous Exception entry from lower
38 * EL, and expects to handle only Synchronous External Aborts from lower EL. If
39 * any other kind of exception is detected, then this function reports unhandled
40 * exception.
41 *
42 * Since it's part of exception vector, this function doesn't expect any GP
43 * registers to have been saved. It delegates the handling of the EA to platform
44 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
45 */
46 func enter_lower_el_sync_ea
47 /*
48 * Explicitly save x30 so as to free up a register and to enable
49 * branching.
50 */
51 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
52
53 mrs x30, esr_el3
54 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
55
56 /* Check for I/D aborts from lower EL */
57 cmp x30, #EC_IABORT_LOWER_EL
58 b.eq 1f
59
60 cmp x30, #EC_DABORT_LOWER_EL
61 b.ne 2f
62
63 1:
64 /* Test for EA bit in the instruction syndrome */
65 mrs x30, esr_el3
66 tbz x30, #ESR_ISS_EABORT_EA_BIT, 2f
67
68 /*
69 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
70 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
71 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
72 */
73 bl save_gp_pmcr_pauth_regs
74
75 #if ENABLE_PAUTH
76 /* Load and program APIAKey firmware key */
77 bl pauth_load_bl31_apiakey
78 #endif
79
80 /* Setup exception class and syndrome arguments for platform handler */
81 mov x0, #ERROR_EA_SYNC
82 mrs x1, esr_el3
83 adr x30, el3_exit
84 b delegate_sync_ea
85
86 2:
87 /* Synchronous exceptions other than the above are assumed to be EA */
88 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
89 no_ret report_unhandled_exception
90 endfunc enter_lower_el_sync_ea
91
92
93 /*
94 * This function handles SErrors from lower ELs.
95 *
96 * Since it's part of exception vector, this function doesn't expect any GP
97 * registers to have been saved. It delegates the handling of the EA to platform
98 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
99 */
100 func enter_lower_el_async_ea
101 /*
102 * Explicitly save x30 so as to free up a register and to enable
103 * branching
104 */
105 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
106
107 /*
108 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
109 * If Secure Cycle Counter is not disabled in MDCR_EL3 when
110 * ARMv8.5-PMU is implemented, save PMCR_EL0 and disable Cycle Counter.
111 */
112 bl save_gp_pmcr_pauth_regs
113
114 #if ENABLE_PAUTH
115 /* Load and program APIAKey firmware key */
116 bl pauth_load_bl31_apiakey
117 #endif
118
119 /* Setup exception class and syndrome arguments for platform handler */
120 mov x0, #ERROR_EA_ASYNC
121 mrs x1, esr_el3
122 adr x30, el3_exit
123 b delegate_async_ea
124 endfunc enter_lower_el_async_ea
125
126
127 /*
128 * Prelude for Synchronous External Abort handling. This function assumes that
129 * all GP registers have been saved by the caller.
130 *
131 * x0: EA reason
132 * x1: EA syndrome
133 */
134 func delegate_sync_ea
135 #if RAS_EXTENSION
136 /*
137 * Check for Uncontainable error type. If so, route to the platform
138 * fatal error handler rather than the generic EA one.
139 */
140 ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
141 cmp x2, #ERROR_STATUS_SET_UC
142 b.ne 1f
143
144 /* Check fault status code */
145 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
146 cmp x3, #SYNC_EA_FSC
147 b.ne 1f
148
149 no_ret plat_handle_uncontainable_ea
150 1:
151 #endif
152
153 b ea_proceed
154 endfunc delegate_sync_ea
155
156
157 /*
158 * Prelude for Asynchronous External Abort handling. This function assumes that
159 * all GP registers have been saved by the caller.
160 *
161 * x0: EA reason
162 * x1: EA syndrome
163 */
164 func delegate_async_ea
165 #if RAS_EXTENSION
166 /*
167 * Check for Implementation Defined Syndrome. If so, skip checking
168 * Uncontainable error type from the syndrome as the format is unknown.
169 */
170 tbnz x1, #SERROR_IDS_BIT, 1f
171
172 /*
173 * Check for Uncontainable error type. If so, route to the platform
174 * fatal error handler rather than the generic EA one.
175 */
176 ubfx x2, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
177 cmp x2, #ERROR_STATUS_UET_UC
178 b.ne 1f
179
180 /* Check DFSC for SError type */
181 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
182 cmp x3, #DFSC_SERROR
183 b.ne 1f
184
185 no_ret plat_handle_uncontainable_ea
186 1:
187 #endif
188
189 b ea_proceed
190 endfunc delegate_async_ea
191
192
193 /*
194 * Delegate External Abort handling to platform's EA handler. This function
195 * assumes that all GP registers have been saved by the caller.
196 *
197 * x0: EA reason
198 * x1: EA syndrome
199 */
200 func ea_proceed
201 /*
202 * If the ESR loaded earlier is not zero, we were processing an EA
203 * already, and this is a double fault.
204 */
205 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
206 cbz x5, 1f
207 no_ret plat_handle_double_fault
208
209 1:
210 /* Save EL3 state */
211 mrs x2, spsr_el3
212 mrs x3, elr_el3
213 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
214
215 /*
216 * Save ESR as handling might involve lower ELs, and returning back to
217 * EL3 from there would trample the original ESR.
218 */
219 mrs x4, scr_el3
220 mrs x5, esr_el3
221 stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
222
223 /*
224 * Setup rest of arguments, and call platform External Abort handler.
225 *
226 * x0: EA reason (already in place)
227 * x1: Exception syndrome (already in place).
228 * x2: Cookie (unused for now).
229 * x3: Context pointer.
230 * x4: Flags (security state from SCR for now).
231 */
232 mov x2, xzr
233 mov x3, sp
234 ubfx x4, x4, #0, #1
235
236 /* Switch to runtime stack */
237 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
238 msr spsel, #MODE_SP_EL0
239 mov sp, x5
240
241 mov x29, x30
242 #if ENABLE_ASSERTIONS
243 /* Stash the stack pointer */
244 mov x28, sp
245 #endif
246 bl plat_ea_handler
247
248 #if ENABLE_ASSERTIONS
249 /*
250 * Error handling flows might involve long jumps; so upon returning from
251 * the platform error handler, validate that the we've completely
252 * unwound the stack.
253 */
254 mov x27, sp
255 cmp x28, x27
256 ASM_ASSERT(eq)
257 #endif
258
259 /* Make SP point to context */
260 msr spsel, #MODE_SP_ELX
261
262 /* Restore EL3 state and ESR */
263 ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
264 msr spsr_el3, x1
265 msr elr_el3, x2
266
267 /* Restore ESR_EL3 and SCR_EL3 */
268 ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
269 msr scr_el3, x3
270 msr esr_el3, x4
271
272 #if ENABLE_ASSERTIONS
273 cmp x4, xzr
274 ASM_ASSERT(ne)
275 #endif
276
277 /* Clear ESR storage */
278 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
279
280 ret x29
281 endfunc ea_proceed