RTEMS 7.0-rc1
Loading...
Searching...
No Matches
ppc_exc_asm_macros.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-2-Clause */
2
12/*
13 * Copyright (c) 1999 Eric Valette <eric.valette@free.fr>
14 *
15 * Modified and partially rewritten by Till Straumann, 2007-2008
16 *
17 * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008.
18 *
19 * Redistribution and use in source and binary forms, with or without
20 * modification, are permitted provided that the following conditions
21 * are met:
22 * 1. Redistributions of source code must retain the above copyright
23 * notice, this list of conditions and the following disclaimer.
24 * 2. Redistributions in binary form must reproduce the above copyright
25 * notice, this list of conditions and the following disclaimer in the
26 * documentation and/or other materials provided with the distribution.
27 *
28 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
29 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
30 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
31 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
32 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
33 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
34 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
35 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
36 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
37 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
38 * POSSIBILITY OF SUCH DAMAGE.
39 */
40
41#include <bspopts.h>
42#include <bsp/vectors.h>
44
45#define LT(cr) ((cr)*4+0)
46#define GT(cr) ((cr)*4+1)
47#define EQ(cr) ((cr)*4+2)
48
49/* Opcode of 'stw r1, off(r13)' */
50#define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff))
51
52#define FRAME_REGISTER r14
53#define VECTOR_REGISTER r4
54#define SCRATCH_REGISTER_0 r5
55#define SCRATCH_REGISTER_1 r6
56#define SCRATCH_REGISTER_2 r7
57
58#define FRAME_OFFSET( r) GPR14_OFFSET( r)
59#define VECTOR_OFFSET( r) GPR4_OFFSET( r)
60#define SCRATCH_REGISTER_0_OFFSET( r) GPR5_OFFSET( r)
61#define SCRATCH_REGISTER_1_OFFSET( r) GPR6_OFFSET( r)
62#define SCRATCH_REGISTER_2_OFFSET( r) GPR7_OFFSET( r)
63
64#define CR_TYPE 2
65#define CR_MSR 3
66#define CR_LOCK 4
67
68 /*
69 * Minimal prologue snippets:
70 *
71 * Rationale: on some PPCs the vector offsets are spaced
72 * as closely as 16 bytes.
73 *
74 * If we deal with asynchronous exceptions ('interrupts')
75 * then we can use 4 instructions to
76 * 1. atomically write lock to indicate ISR is in progress
77 * (we cannot atomically increase the Thread_Dispatch_disable_level,
78 * see README)
79 * 2. save a register in special area
80 * 3. load register with vector info
81 * 4. branch
82 *
83 * If we deal with a synchronous exception (no stack switch
84 * nor dispatch-disabling necessary) then it's easier:
85 * 1. push stack frame
86 * 2. save register on stack
87 * 3. load register with vector info
88 * 4. branch
89 *
90 */
91
92/*
93 *****************************************************************************
94 * MACRO: PPC_EXC_MIN_PROLOG_ASYNC
95 *****************************************************************************
96 * USES: VECTOR_REGISTER
97 * ON EXIT: Vector in VECTOR_REGISTER
98 *
99 * NOTES: VECTOR_REGISTER saved in special variable
100 * 'ppc_exc_vector_register_\_PRI'.
101 *
102 */
103 .macro PPC_EXC_MIN_PROLOG_ASYNC _NAME _VEC _PRI _FLVR
104
105 .global ppc_exc_min_prolog_async_\_NAME
106ppc_exc_min_prolog_async_\_NAME:
107 /* Atomically write lock variable in 1st instruction with non-zero
108 * value (r1 is always nonzero; r13 could also be used)
109 *
110 * NOTE: raising an exception and executing this first instruction
111 * of the exception handler is apparently NOT atomic, i.e., a
112 * low-priority IRQ could set the PC to this location and a
113 * critical IRQ could intervene just at this point.
114 *
115 * We check against this pathological case by checking the
116 * opcode/instruction at the interrupted PC for matching
117 *
118 * stw r1, ppc_exc_lock_XXX@sdarel(r13)
119 *
120 * ASSUMPTION:
121 * 1) ALL 'asynchronous' exceptions (which disable thread-
122 * dispatching) execute THIS 'magical' instruction
123 * FIRST.
124 * 2) This instruction (including the address offset)
125 * is not used anywhere else (probably a safe assumption).
126 */
127 stw r1, ppc_exc_lock_\_PRI@sdarel(r13)
128 /* We have no stack frame yet; store VECTOR_REGISTER in special area;
129 * a higher-priority (critical) interrupt uses a different area
130 * (hence the different prologue snippets) (\PRI)
131 */
132 stw VECTOR_REGISTER, ppc_exc_vector_register_\_PRI@sdarel(r13)
133 /* Load vector.
134 */
135 li VECTOR_REGISTER, ( \_VEC | 0xffff8000 )
136
137 /*
138 * We store the absolute branch target address here. It will be used
139 * to generate the branch operation in ppc_exc_make_prologue().
140 */
141 .int ppc_exc_wrap_\_FLVR
142
143 .endm
144
145/*
146 *****************************************************************************
147 * MACRO: PPC_EXC_MIN_PROLOG_SYNC
148 *****************************************************************************
149 * USES: VECTOR_REGISTER
150 * ON EXIT: vector in VECTOR_REGISTER
151 *
152 * NOTES: exception stack frame pushed; VECTOR_REGISTER saved in frame
153 *
154 */
155 .macro PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR
156
157 .global ppc_exc_min_prolog_sync_\_NAME
158ppc_exc_min_prolog_sync_\_NAME:
159 stwu r1, -EXCEPTION_FRAME_END(r1)
160 stw VECTOR_REGISTER, VECTOR_OFFSET(r1)
161 li VECTOR_REGISTER, \_VEC
162
163 /*
164 * We store the absolute branch target address here. It will be used
165 * to generate the branch operation in ppc_exc_make_prologue().
166 */
167 .int ppc_exc_wrap_nopush_\_FLVR
168
169 .endm
170
171/*
172 *****************************************************************************
173 * MACRO: TEST_1ST_OPCODE_crit
174 *****************************************************************************
175 *
176 * USES: REG, cr0
177 * ON EXIT: REG available (contains *pc - STW_R1_R13(0)),
178 * return value in cr0.
179 *
180 * test opcode interrupted by critical (asynchronous) exception; set CR_LOCK if
181 *
182 * *SRR0 == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
183 *
184 */
185 .macro TEST_1ST_OPCODE_crit _REG
186
187 lwz \_REG, SRR0_FRAME_OFFSET(FRAME_REGISTER)
188 lwz \_REG, 0(\_REG)
189 /* opcode now in REG */
190
191 /* subtract upper 16bits of 'stw r1, 0(r13)' instruction */
192 subis \_REG, \_REG, STW_R1_R13(0)@h
193 /*
194 * if what's left compares against the 'ppc_exc_lock_std@sdarel'
195 * address offset then we have a match...
196 */
197 cmplwi cr0, \_REG, ppc_exc_lock_std@sdarel
198
199 .endm
200
201/*
202 *****************************************************************************
203 * MACRO: TEST_LOCK_std
204 *****************************************************************************
205 *
206 * USES: CR_LOCK
207 * ON EXIT: CR_LOCK is set (indicates no lower-priority locks are engaged)
208 *
209 */
210 .macro TEST_LOCK_std _FLVR
211 /* 'std' is lowest level, i.e., can not be locked -> EQ(CR_LOCK) = 1 */
212 creqv EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
213 .endm
214
215/*
216 ******************************************************************************
217 * MACRO: TEST_LOCK_crit
218 ******************************************************************************
219 *
220 * USES: CR_LOCK, cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
221 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available,
222 * returns result in CR_LOCK.
223 *
224 * critical-exception wrapper has to check 'std' lock:
225 *
226 * Return CR_LOCK = ( (interrupt_mask & MSR_CE) != 0
227 && ppc_lock_std == 0
228 * && * SRR0 != <write std lock instruction> )
229 *
230 */
231 .macro TEST_LOCK_crit _FLVR
232 /* If MSR_CE is not in the IRQ mask then we must never allow
233 * thread-dispatching!
234 */
235 GET_INTERRUPT_MASK mask=SCRATCH_REGISTER_1
236 /* EQ(cr0) = ((interrupt_mask & MSR_CE) == 0) */
237 andis. SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, MSR_CE@h
238 beq TEST_LOCK_crit_done_\_FLVR
239
240 /* STD interrupt could have been interrupted before executing the 1st
241 * instruction which sets the lock; check this case by looking at the
242 * opcode present at the interrupted PC location.
243 */
244 TEST_1ST_OPCODE_crit _REG=SCRATCH_REGISTER_0
245 /*
246 * At this point cr0 is set if
247 *
248 * *(PC) == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
249 *
250 */
251
252 /* check lock */
253 lwz SCRATCH_REGISTER_1, ppc_exc_lock_std@sdarel(r13)
254 cmplwi CR_LOCK, SCRATCH_REGISTER_1, 0
255
256 /* set EQ(CR_LOCK) to result */
257TEST_LOCK_crit_done_\_FLVR:
258 /* If we end up here because the interrupt mask did not contain
259 * MSR_CE then cr0 is set and therefore the value of CR_LOCK
260 * does not matter since x && !1 == 0:
261 *
262 * if ( (interrupt_mask & MSR_CE) == 0 ) {
263 * EQ(CR_LOCK) = EQ(CR_LOCK) && ! ((interrupt_mask & MSR_CE) == 0)
264 * } else {
265 * EQ(CR_LOCK) = (ppc_exc_lock_std == 0) && ! (*pc == <write std lock instruction>)
266 * }
267 */
268 crandc EQ(CR_LOCK), EQ(CR_LOCK), EQ(0 /* cr0 */)
269
270 .endm
271
272/*
273 ******************************************************************************
274 * MACRO: TEST_LOCK_mchk
275 ******************************************************************************
276 *
277 * USES: CR_LOCK
278 * ON EXIT: CR_LOCK is cleared.
279 *
280 * We never want to disable machine-check exceptions to avoid a checkstop. This
281 * means that we cannot use enabling/disabling this type of exception for
282 * protection of critical OS data structures. Therefore, calling OS primitives
283 * from a machine-check handler is ILLEGAL. Since machine-checks can happen
284 * anytime it is not legal to perform a context switch (since the exception
285 * could hit a IRQ protected section of code). We simply let this test return
286 * 0 so that ppc_exc_wrapup is never called after handling a machine-check.
287 */
288 .macro TEST_LOCK_mchk _SRR0 _FLVR
289
290 crxor EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
291
292 .endm
293
294/*
295 ******************************************************************************
296 * MACRO: RECOVER_CHECK_\PRI
297 ******************************************************************************
298 *
299 * USES: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
300 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available
301 *
302 * Checks if the exception is recoverable for exceptions which need such a
303 * test.
304 */
305
306/* Standard*/
307 .macro RECOVER_CHECK_std _FLVR
308
309#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
310
311 /* Check if exception is recoverable */
312 lwz SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
313 lwz SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
314 xor SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
315 andi. SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
316
317recover_check_twiddle_std_\_FLVR:
318
319 /* Not recoverable? */
320 bne recover_check_twiddle_std_\_FLVR
321
322#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
323
324 .endm
325
326/* Critical */
327 .macro RECOVER_CHECK_crit _FLVR
328
329 /* Nothing to do */
330
331 .endm
332
333/* Machine check */
334 .macro RECOVER_CHECK_mchk _FLVR
335
336#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
337
338 /* Check if exception is recoverable */
339 lwz SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
340 lwz SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
341 xor SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
342 andi. SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
343
344recover_check_twiddle_mchk_\_FLVR:
345
346 /* Not recoverable? */
347 bne recover_check_twiddle_mchk_\_FLVR
348
349#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
350
351 .endm
352
353/*
354 ******************************************************************************
355 * MACRO: WRAP
356 ******************************************************************************
357 *
358 * Minimal prologue snippets jump into WRAP which calls the high level
359 * exception handler. We must have this macro instantiated for each possible
360 * flavor of exception so that we use the proper lock variable, SRR register
361 * pair and RFI instruction.
362 *
363 * We have two types of exceptions: synchronous and asynchronous (= interrupt
364 * like). The type is encoded in the vector register (= VECTOR_REGISTER). For
365 * interrupt like exceptions the MSB in the vector register is set. The
366 * exception type is kept in the comparison register CR_TYPE. Normal
367 * exceptions (MSB is clear) use the task stack and a context switch may happen
368 * at any time. The interrupt like exceptions disable thread dispatching and
369 * switch to the interrupt stack (base address is in SPRG1).
370 *
371 * +
372 * |
373 * | Minimal prologue
374 * |
375 * +
376 * |
377 * | o Setup frame pointer
378 * | o Save basic registers
379 * | o Determine exception type:
380 * | synchronous or asynchronous
381 * |
382 * +-----+
383 * Synchronous exceptions: | | Asynchronous exceptions:
384 * | |
385 * Save non-volatile registers | | o Increment thread dispatch
386 * | | disable level
387 * | | o Increment ISR nest level
388 * | | o Clear lock
389 * | | o Switch stack if necessary
390 * | |
391 * +---->+
392 * |
393 * | o Save volatile registers
394 * | o Change MSR if necessary
395 * | o Call high level handler
396 * | o Call global handler if necessary
397 * | o Check if exception is recoverable
398 * |
399 * +-----+
400 * Synchronous exceptions: | | Asynchronous exceptions:
401 * | |
402 * Restore non-volatile registers | | o Decrement ISR nest level
403 * | | o Switch stack
404 * | | o Decrement thread dispatch
405 * | | disable level
406 * | | o Test lock
407 * | | o May do a context switch
408 * | |
409 * +---->+
410 * |
411 * | o Restore MSR if necessary
412 * | o Restore volatile registers
413 * | o Restore frame pointer
414 * | o Return
415 * |
416 * +
417 */
418 .macro WRAP _FLVR _PRI _SRR0 _SRR1 _RFI
419
420 .global ppc_exc_wrap_\_FLVR
421ppc_exc_wrap_\_FLVR:
422
423 /* Push exception frame */
424 stwu r1, -EXCEPTION_FRAME_END(r1)
425
426 .global ppc_exc_wrap_nopush_\_FLVR
427ppc_exc_wrap_nopush_\_FLVR:
428
429 /* Save frame register */
430 stw FRAME_REGISTER, FRAME_OFFSET(r1)
431
432wrap_no_save_frame_register_\_FLVR:
433
434 /*
435 * We save at first only some scratch registers
436 * and the CR. We use a non-volatile register
437 * for the exception frame pointer (= FRAME_REGISTER).
438 */
439
440 /* Move frame address in non-volatile FRAME_REGISTER */
441 mr FRAME_REGISTER, r1
442
443 /* Save scratch registers */
444 stw SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(FRAME_REGISTER)
445 stw SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(FRAME_REGISTER)
446 stw SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(FRAME_REGISTER)
447
448 /* Save CR */
449 mfcr SCRATCH_REGISTER_0
450 stw SCRATCH_REGISTER_0, EXC_CR_OFFSET(FRAME_REGISTER)
451
452 /* Check exception type and remember it in non-volatile CR_TYPE */
453 cmpwi CR_TYPE, VECTOR_REGISTER, 0
454
455#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
456 /* Enable FPU and/or AltiVec */
457 mfmsr SCRATCH_REGISTER_0
458#ifdef PPC_MULTILIB_FPU
459 ori SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_FP
460#endif
461#ifdef PPC_MULTILIB_ALTIVEC
462 oris SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_VE >> 16
463#endif
464 mtmsr SCRATCH_REGISTER_0
465 isync
466#endif
467
468 /*
469 * Depending on the exception type we do now save the non-volatile
470 * registers or disable thread dispatching and switch to the ISR stack.
471 */
472
473 /* Branch for synchronous exceptions */
474 bge CR_TYPE, wrap_save_non_volatile_regs_\_FLVR
475
476 /*
477 * Increment the thread dispatch disable level in case a higher
478 * priority exception occurs we don't want it to run the scheduler. It
479 * is safe to increment this without disabling higher priority
480 * exceptions since those will see that we wrote the lock anyways.
481 */
482
483 /* Increment ISR nest level and thread dispatch disable level */
484 GET_SELF_CPU_CONTROL SCRATCH_REGISTER_2
485 lwz SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2)
486 lwz SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2)
487 addi SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
488 addi SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
489 stw SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2)
490 stw SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2)
491
492 /*
493 * No higher-priority exception occurring after this point
494 * can cause a context switch.
495 */
496
497 /* Clear lock */
498 li SCRATCH_REGISTER_0, 0
499 stw SCRATCH_REGISTER_0, ppc_exc_lock_\_PRI@sdarel(r13)
500
501 /* Switch stack if necessary */
502 mfspr SCRATCH_REGISTER_0, SPRG1
503 cmpw SCRATCH_REGISTER_0, r1
504 blt wrap_stack_switch_\_FLVR
505 mfspr SCRATCH_REGISTER_1, SPRG2
506 cmpw SCRATCH_REGISTER_1, r1
507 blt wrap_stack_switch_done_\_FLVR
508
509wrap_stack_switch_\_FLVR:
510
511 mr r1, SCRATCH_REGISTER_0
512
513wrap_stack_switch_done_\_FLVR:
514
515 /*
516 * Load the pristine VECTOR_REGISTER from a special location for
517 * asynchronous exceptions. The synchronous exceptions save the
518 * VECTOR_REGISTER in their minimal prologue.
519 */
520 lwz SCRATCH_REGISTER_2, ppc_exc_vector_register_\_PRI@sdarel(r13)
521
522 /* Save pristine vector register */
523 stw SCRATCH_REGISTER_2, VECTOR_OFFSET(FRAME_REGISTER)
524
525wrap_disable_thread_dispatching_done_\_FLVR:
526
527 /*
528 * We now have SCRATCH_REGISTER_0, SCRATCH_REGISTER_1,
529 * SCRATCH_REGISTER_2 and CR available. VECTOR_REGISTER still holds
530 * the vector (and exception type). FRAME_REGISTER is a pointer to the
531 * exception frame (always on the stack of the interrupted context).
532 * r1 is the stack pointer, either on the task stack or on the ISR
533 * stack. CR_TYPE holds the exception type.
534 */
535
536 /* Save SRR0 */
537 mfspr SCRATCH_REGISTER_0, \_SRR0
538 stw SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(FRAME_REGISTER)
539
540 /* Save SRR1 */
541 mfspr SCRATCH_REGISTER_0, \_SRR1
542 stw SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
543
544 /* Save CTR */
545 mfctr SCRATCH_REGISTER_0
546 stw SCRATCH_REGISTER_0, EXC_CTR_OFFSET(FRAME_REGISTER)
547
548 /* Save XER */
549 mfxer SCRATCH_REGISTER_0
550 stw SCRATCH_REGISTER_0, EXC_XER_OFFSET(FRAME_REGISTER)
551
552 /* Save LR */
553 mflr SCRATCH_REGISTER_0
554 stw SCRATCH_REGISTER_0, EXC_LR_OFFSET(FRAME_REGISTER)
555
556 /* Save volatile registers */
557 stw r0, GPR0_OFFSET(FRAME_REGISTER)
558 stw r3, GPR3_OFFSET(FRAME_REGISTER)
559 stw r8, GPR8_OFFSET(FRAME_REGISTER)
560 stw r9, GPR9_OFFSET(FRAME_REGISTER)
561 stw r10, GPR10_OFFSET(FRAME_REGISTER)
562 stw r11, GPR11_OFFSET(FRAME_REGISTER)
563 stw r12, GPR12_OFFSET(FRAME_REGISTER)
564
565 /* Save read-only small data area anchor (EABI) */
566 stw r2, GPR2_OFFSET(FRAME_REGISTER)
567
568 /* Save vector number and exception type */
569 stw VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
570
571#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
572
573 /* Load MSR bit mask */
574 lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
575
576 /*
577 * Change the MSR if necessary (MMU, RI),
578 * remember decision in non-volatile CR_MSR
579 */
580 cmpwi CR_MSR, SCRATCH_REGISTER_0, 0
581 bne CR_MSR, wrap_change_msr_\_FLVR
582
583wrap_change_msr_done_\_FLVR:
584
585#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
586
587#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
588 LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile
589 mtctr SCRATCH_REGISTER_0
590 addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
591 bctrl
592 /*
593 * Establish defaults for vrsave and vscr
594 */
595 li SCRATCH_REGISTER_0, 0
596 mtvrsave SCRATCH_REGISTER_0
597 /*
598 * Use java/c9x mode; clear saturation bit
599 */
600 vxor 0, 0, 0
601 mtvscr 0
602 /*
603 * Reload VECTOR_REGISTER
604 */
605 lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
606#endif
607
608#ifdef PPC_MULTILIB_ALTIVEC
609 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
610 stvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0
611 mfvscr v0
612 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1)
613 stvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0
614 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2)
615 stvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0
616 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3)
617 stvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0
618 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4)
619 stvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0
620 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5)
621 stvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0
622 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6)
623 stvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0
624 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7)
625 stvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0
626 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8)
627 stvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0
628 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9)
629 stvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0
630 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
631 stvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0
632 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11)
633 stvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0
634 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12)
635 stvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0
636 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13)
637 stvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0
638 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14)
639 stvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0
640 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15)
641 stvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0
642 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16)
643 stvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0
644 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17)
645 stvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0
646 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18)
647 stvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0
648 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19)
649 stvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0
650 li SCRATCH_REGISTER_0, PPC_EXC_VSCR_OFFSET
651 stvewx v0, r1, SCRATCH_REGISTER_0
652#endif
653
654#ifdef PPC_MULTILIB_FPU
655 stfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER)
656 mffs f0
657 stfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER)
658 stfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER)
659 stfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER)
660 stfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER)
661 stfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER)
662 stfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER)
663 stfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER)
664 stfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER)
665 stfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER)
666 stfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER)
667 stfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER)
668 stfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER)
669 stfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER)
670 stfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER)
671#endif
672
673 /*
674 * Call high level exception handler
675 */
676
677 /*
678 * Get the handler table index from the vector number. We have to
679 * discard the exception type. Take only the least significant five
680 * bits (= LAST_VALID_EXC + 1) from the vector register. Multiply by
681 * four (= size of function pointer).
682 */
683 rlwinm SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
684
685 /* Load handler table address */
686 LA SCRATCH_REGISTER_0, ppc_exc_handler_table
687
688 /* Load handler address */
689 lwzx SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
690
691 /*
692 * First parameter = exception frame pointer + FRAME_LINK_SPACE
693 *
694 * We add FRAME_LINK_SPACE to the frame pointer because the high level
695 * handler expects a BSP_Exception_frame structure.
696 */
697 addi r3, FRAME_REGISTER, FRAME_LINK_SPACE
698
699 /*
700 * Second parameter = vector number (r4 is the VECTOR_REGISTER)
701 *
702 * Discard the exception type and store the vector number
703 * in the vector register. Take only the least significant
704 * five bits (= LAST_VALID_EXC + 1).
705 */
706 rlwinm VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
707
708 /* Call handler */
709 mtctr SCRATCH_REGISTER_0
710 bctrl
711
712 /* Check return value and call global handler if necessary */
713 cmpwi r3, 0
714 bne wrap_call_global_handler_\_FLVR
715
716wrap_handler_done_\_FLVR:
717
718 /* Check if exception is recoverable */
719 RECOVER_CHECK_\_PRI _FLVR=\_FLVR
720
721 /*
722 * Depending on the exception type we do now restore the non-volatile
723 * registers or enable thread dispatching and switch back from the ISR
724 * stack.
725 */
726
727 /* Branch for synchronous exceptions */
728 bge CR_TYPE, wrap_restore_non_volatile_regs_\_FLVR
729
730 /*
731 * Switch back to original stack (FRAME_REGISTER == r1 if we are still
732 * on the IRQ stack).
733 */
734 mr r1, FRAME_REGISTER
735
736 /*
737 * Check thread dispatch disable level AND lower priority locks (in
738 * CR_LOCK): ONLY if the thread dispatch disable level == 0 AND no lock
739 * is set then call ppc_exc_wrapup() which may do a context switch. We
740 * can skip TEST_LOCK, because it has no side effects.
741 */
742
743 /* Decrement ISR nest level and thread dispatch disable level */
744 GET_SELF_CPU_CONTROL SCRATCH_REGISTER_2
745 lwz SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2)
746 lwz SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2)
747 subi SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
748 subic. SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
749 stw SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2)
750 stw SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2)
751
752 /* Branch to skip thread dispatching */
753 bne wrap_thread_dispatching_done_\_FLVR
754
755 /* Test lower-priority locks (result in non-volatile CR_LOCK) */
756 TEST_LOCK_\_PRI _FLVR=\_FLVR
757
758 /* Branch to skip thread dispatching */
759 bne CR_LOCK, wrap_thread_dispatching_done_\_FLVR
760
761 /* Load address of ppc_exc_wrapup() */
762 LA SCRATCH_REGISTER_0, ppc_exc_wrapup
763
764 /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
765 addi r3, FRAME_REGISTER, FRAME_LINK_SPACE
766
767 /* Call ppc_exc_wrapup() */
768 mtctr SCRATCH_REGISTER_0
769 bctrl
770
771wrap_thread_dispatching_done_\_FLVR:
772
773#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC)
774 LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile
775 mtctr SCRATCH_REGISTER_0
776 addi r3, FRAME_REGISTER, EXC_VEC_OFFSET
777 bctrl
778#endif
779
780#ifdef PPC_MULTILIB_ALTIVEC
781 li SCRATCH_REGISTER_0, PPC_EXC_MIN_VSCR_OFFSET
782 lvewx v0, r1, SCRATCH_REGISTER_0
783 mtvscr v0
784 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
785 lvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0
786 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1)
787 lvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0
788 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2)
789 lvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0
790 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3)
791 lvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0
792 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4)
793 lvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0
794 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5)
795 lvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0
796 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6)
797 lvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0
798 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7)
799 lvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0
800 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8)
801 lvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0
802 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9)
803 lvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0
804 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0)
805 lvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0
806 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11)
807 lvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0
808 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12)
809 lvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0
810 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13)
811 lvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0
812 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14)
813 lvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0
814 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15)
815 lvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0
816 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16)
817 lvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0
818 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17)
819 lvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0
820 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18)
821 lvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0
822 li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19)
823 lvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0
824#endif
825
826#ifdef PPC_MULTILIB_FPU
827 lfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER)
828 mtfsf 0xff, f0
829 lfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER)
830 lfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER)
831 lfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER)
832 lfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER)
833 lfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER)
834 lfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER)
835 lfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER)
836 lfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER)
837 lfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER)
838 lfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER)
839 lfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER)
840 lfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER)
841 lfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER)
842 lfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER)
843#endif
844
845#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
846
847 /* Restore MSR? */
848 bne CR_MSR, wrap_restore_msr_\_FLVR
849
850wrap_restore_msr_done_\_FLVR:
851
852#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
853
854 /*
855 * At this point r1 is a valid exception frame pointer and
856 * FRAME_REGISTER is no longer needed.
857 */
858
859 /* Restore frame register */
860 lwz FRAME_REGISTER, FRAME_OFFSET(r1)
861
862 /* Restore XER and CTR */
863 lwz SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
864 lwz SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
865 mtxer SCRATCH_REGISTER_0
866 mtctr SCRATCH_REGISTER_1
867
868 /* Restore CR and LR */
869 lwz SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
870 lwz SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
871 mtcr SCRATCH_REGISTER_0
872 mtlr SCRATCH_REGISTER_1
873
874 /* Restore volatile registers */
875 lwz r0, GPR0_OFFSET(r1)
876 lwz r3, GPR3_OFFSET(r1)
877 lwz r8, GPR8_OFFSET(r1)
878 lwz r9, GPR9_OFFSET(r1)
879 lwz r10, GPR10_OFFSET(r1)
880 lwz r11, GPR11_OFFSET(r1)
881 lwz r12, GPR12_OFFSET(r1)
882
883 /* Restore read-only small data area anchor (EABI) */
884 lwz r2, GPR2_OFFSET(r1)
885
886 /* Restore vector register */
887 lwz VECTOR_REGISTER, VECTOR_OFFSET(r1)
888
889 /*
890 * Disable all asynchronous exceptions which can do a thread dispatch.
891 * See README.
892 */
893 INTERRUPT_DISABLE SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
894
895 /* Restore scratch registers and SRRs */
896 lwz SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
897 lwz SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
898 lwz SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
899 mtspr \_SRR0, SCRATCH_REGISTER_0
900 lwz SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
901 mtspr \_SRR1, SCRATCH_REGISTER_1
902 lwz SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
903
904 /*
905 * We restore r1 from the frame rather than just popping (adding to
906 * current r1) since the exception handler might have done strange
907 * things (e.g. a debugger moving and relocating the stack).
908 */
909 lwz r1, 0(r1)
910
911 /* Return */
912 \_RFI
913
914#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
915
916wrap_change_msr_\_FLVR:
917
918 mfmsr SCRATCH_REGISTER_1
919 or SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
920 mtmsr SCRATCH_REGISTER_1
921 msync
922 isync
923 b wrap_change_msr_done_\_FLVR
924
925wrap_restore_msr_\_FLVR:
926
927 lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
928 mfmsr SCRATCH_REGISTER_1
929 andc SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
930 mtmsr SCRATCH_REGISTER_1
931 msync
932 isync
933 b wrap_restore_msr_done_\_FLVR
934
935#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
936
937wrap_save_non_volatile_regs_\_FLVR:
938
939 /* Load pristine stack pointer */
940 lwz SCRATCH_REGISTER_1, 0(FRAME_REGISTER)
941
942 /* Save small data area anchor (SYSV) */
943 stw r13, GPR13_OFFSET(FRAME_REGISTER)
944
945 /* Save pristine stack pointer */
946 stw SCRATCH_REGISTER_1, GPR1_OFFSET(FRAME_REGISTER)
947
948 /* r14 is the FRAME_REGISTER and will be saved elsewhere */
949
950 /* Save non-volatile registers r15 .. r31 */
951#ifndef __SPE__
952 stmw r15, GPR15_OFFSET(FRAME_REGISTER)
953#else
954 stw r15, GPR15_OFFSET(FRAME_REGISTER)
955 stw r16, GPR16_OFFSET(FRAME_REGISTER)
956 stw r17, GPR17_OFFSET(FRAME_REGISTER)
957 stw r18, GPR18_OFFSET(FRAME_REGISTER)
958 stw r19, GPR19_OFFSET(FRAME_REGISTER)
959 stw r20, GPR20_OFFSET(FRAME_REGISTER)
960 stw r21, GPR21_OFFSET(FRAME_REGISTER)
961 stw r22, GPR22_OFFSET(FRAME_REGISTER)
962 stw r23, GPR23_OFFSET(FRAME_REGISTER)
963 stw r24, GPR24_OFFSET(FRAME_REGISTER)
964 stw r25, GPR25_OFFSET(FRAME_REGISTER)
965 stw r26, GPR26_OFFSET(FRAME_REGISTER)
966 stw r27, GPR27_OFFSET(FRAME_REGISTER)
967 stw r28, GPR28_OFFSET(FRAME_REGISTER)
968 stw r29, GPR29_OFFSET(FRAME_REGISTER)
969 stw r30, GPR30_OFFSET(FRAME_REGISTER)
970 stw r31, GPR31_OFFSET(FRAME_REGISTER)
971#endif
972
973#ifdef PPC_MULTILIB_ALTIVEC
974 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20)
975 stvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1
976 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21)
977 stvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1
978 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22)
979 stvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1
980 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23)
981 stvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1
982 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24)
983 stvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1
984 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25)
985 stvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1
986 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26)
987 stvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1
988 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27)
989 stvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1
990 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28)
991 stvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1
992 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29)
993 stvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1
994 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30)
995 stvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1
996 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31)
997 stvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1
998 mfvrsave SCRATCH_REGISTER_1
999 stw SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER)
1000#endif
1001
1002#ifdef PPC_MULTILIB_FPU
1003 stfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER)
1004 stfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER)
1005 stfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER)
1006 stfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER)
1007 stfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER)
1008 stfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER)
1009 stfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER)
1010 stfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER)
1011 stfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER)
1012 stfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER)
1013 stfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER)
1014 stfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER)
1015 stfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER)
1016 stfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER)
1017 stfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER)
1018 stfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER)
1019 stfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER)
1020 stfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER)
1021#endif
1022
1023 b wrap_disable_thread_dispatching_done_\_FLVR
1024
1025wrap_restore_non_volatile_regs_\_FLVR:
1026
1027 /* Load stack pointer */
1028 lwz SCRATCH_REGISTER_0, GPR1_OFFSET(r1)
1029
1030 /* Restore small data area anchor (SYSV) */
1031 lwz r13, GPR13_OFFSET(r1)
1032
1033 /* r14 is the FRAME_REGISTER and will be restored elsewhere */
1034
1035 /* Restore non-volatile registers r15 .. r31 */
1036#ifndef __SPE__
1037 lmw r15, GPR15_OFFSET(r1)
1038#else
1039 lwz r15, GPR15_OFFSET(FRAME_REGISTER)
1040 lwz r16, GPR16_OFFSET(FRAME_REGISTER)
1041 lwz r17, GPR17_OFFSET(FRAME_REGISTER)
1042 lwz r18, GPR18_OFFSET(FRAME_REGISTER)
1043 lwz r19, GPR19_OFFSET(FRAME_REGISTER)
1044 lwz r20, GPR20_OFFSET(FRAME_REGISTER)
1045 lwz r21, GPR21_OFFSET(FRAME_REGISTER)
1046 lwz r22, GPR22_OFFSET(FRAME_REGISTER)
1047 lwz r23, GPR23_OFFSET(FRAME_REGISTER)
1048 lwz r24, GPR24_OFFSET(FRAME_REGISTER)
1049 lwz r25, GPR25_OFFSET(FRAME_REGISTER)
1050 lwz r26, GPR26_OFFSET(FRAME_REGISTER)
1051 lwz r27, GPR27_OFFSET(FRAME_REGISTER)
1052 lwz r28, GPR28_OFFSET(FRAME_REGISTER)
1053 lwz r29, GPR29_OFFSET(FRAME_REGISTER)
1054 lwz r30, GPR30_OFFSET(FRAME_REGISTER)
1055 lwz r31, GPR31_OFFSET(FRAME_REGISTER)
1056#endif
1057
1058 /* Restore stack pointer */
1059 stw SCRATCH_REGISTER_0, 0(r1)
1060
1061#ifdef PPC_MULTILIB_ALTIVEC
1062 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20)
1063 lvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1
1064 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21)
1065 lvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1
1066 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22)
1067 lvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1
1068 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23)
1069 lvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1
1070 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24)
1071 lvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1
1072 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25)
1073 lvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1
1074 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26)
1075 lvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1
1076 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27)
1077 lvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1
1078 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28)
1079 lvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1
1080 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29)
1081 lvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1
1082 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30)
1083 lvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1
1084 li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31)
1085 lvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1
1086 lwz SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER)
1087 mtvrsave SCRATCH_REGISTER_1
1088#endif
1089
1090#ifdef PPC_MULTILIB_FPU
1091 lfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER)
1092 lfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER)
1093 lfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER)
1094 lfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER)
1095 lfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER)
1096 lfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER)
1097 lfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER)
1098 lfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER)
1099 lfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER)
1100 lfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER)
1101 lfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER)
1102 lfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER)
1103 lfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER)
1104 lfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER)
1105 lfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER)
1106 lfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER)
1107 lfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER)
1108 lfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER)
1109#endif
1110
1111 b wrap_thread_dispatching_done_\_FLVR
1112
1113wrap_call_global_handler_\_FLVR:
1114
1115 /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
1116 addi r3, FRAME_REGISTER, FRAME_LINK_SPACE
1117
1118#ifndef PPC_EXC_CONFIG_USE_FIXED_HANDLER
1119
1120 /* Load global handler address */
1121 LW SCRATCH_REGISTER_0, globalExceptHdl
1122
1123 /* Check address */
1124 cmpwi SCRATCH_REGISTER_0, 0
1125 beq wrap_handler_done_\_FLVR
1126
1127 /* Call global handler */
1128 mtctr SCRATCH_REGISTER_0
1129 bctrl
1130
1131#else /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */
1132
1133 /* Call fixed global handler */
1135
1136#endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */
1137
1138 b wrap_handler_done_\_FLVR
1139
1140 .endm
PowerPC Exceptions API.
ppc_exc_handler_t ppc_exc_handler_table[LAST_VALID_EXC+1]
High-level exception handler table.
Definition: ppc_exc_hdl.c:60
void C_exception_handler(BSP_Exception_frame *excPtr)
Default global exception handler.
Definition: ppc_exc_global_handler.c:24
exception_handler_t globalExceptHdl
Global exception handler.
Definition: ppc_exc_hdl.c:57
uint32_t ppc_exc_msr_bits
Bits for MSR update.
Definition: ppc_exc_hdl.c:43
General purpose assembler macros, linker command file support and some inline functions for direct re...