RTEMS 6.1-rc4
Loading...
Searching...
No Matches
aarch64-system-registers.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-2-Clause */
2
12/*
13 * Copyright (C) 2020 embedded brains GmbH & Co. KG
14 *
15 * Redistribution and use in source and binary forms, with or without
16 * modification, are permitted provided that the following conditions
17 * are met:
18 * 1. Redistributions of source code must retain the above copyright
19 * notice, this list of conditions and the following disclaimer.
20 * 2. Redistributions in binary form must reproduce the above copyright
21 * notice, this list of conditions and the following disclaimer in the
22 * documentation and/or other materials provided with the distribution.
23 *
24 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
25 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
26 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
27 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
28 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
29 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
30 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
31 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
32 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
33 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 * POSSIBILITY OF SUCH DAMAGE.
35 */
36
37#ifndef _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H
38#define _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H
39
40#include <stdint.h>
41
42#ifdef __cplusplus
43extern "C" {
44#endif
45
46/* ACTLR_EL1, Auxiliary Control Register (EL1) */
47
48static inline uint64_t _AArch64_Read_actlr_el1( void )
49{
50 uint64_t value;
51
52 __asm__ volatile (
53 "mrs %0, ACTLR_EL1" : "=&r" ( value ) : : "memory"
54 );
55
56 return value;
57}
58
59static inline void _AArch64_Write_actlr_el1( uint64_t value )
60{
61 __asm__ volatile (
62 "msr ACTLR_EL1, %0" : : "r" ( value ) : "memory"
63 );
64}
65
66/* ACTLR_EL2, Auxiliary Control Register (EL2) */
67
68static inline uint64_t _AArch64_Read_actlr_el2( void )
69{
70 uint64_t value;
71
72 __asm__ volatile (
73 "mrs %0, ACTLR_EL2" : "=&r" ( value ) : : "memory"
74 );
75
76 return value;
77}
78
79static inline void _AArch64_Write_actlr_el2( uint64_t value )
80{
81 __asm__ volatile (
82 "msr ACTLR_EL2, %0" : : "r" ( value ) : "memory"
83 );
84}
85
86/* ACTLR_EL3, Auxiliary Control Register (EL3) */
87
88static inline uint64_t _AArch64_Read_actlr_el3( void )
89{
90 uint64_t value;
91
92 __asm__ volatile (
93 "mrs %0, ACTLR_EL3" : "=&r" ( value ) : : "memory"
94 );
95
96 return value;
97}
98
99static inline void _AArch64_Write_actlr_el3( uint64_t value )
100{
101 __asm__ volatile (
102 "msr ACTLR_EL3, %0" : : "r" ( value ) : "memory"
103 );
104}
105
106/* AFSR0_EL1, Auxiliary Fault Status Register 0 (EL1) */
107
108static inline uint64_t _AArch64_Read_afsr0_el1( void )
109{
110 uint64_t value;
111
112 __asm__ volatile (
113 "mrs %0, AFSR0_EL1" : "=&r" ( value ) : : "memory"
114 );
115
116 return value;
117}
118
119static inline void _AArch64_Write_afsr0_el1( uint64_t value )
120{
121 __asm__ volatile (
122 "msr AFSR0_EL1, %0" : : "r" ( value ) : "memory"
123 );
124}
125
126/* AFSR0_EL2, Auxiliary Fault Status Register 0 (EL2) */
127
128static inline uint64_t _AArch64_Read_afsr0_el2( void )
129{
130 uint64_t value;
131
132 __asm__ volatile (
133 "mrs %0, AFSR0_EL2" : "=&r" ( value ) : : "memory"
134 );
135
136 return value;
137}
138
139static inline void _AArch64_Write_afsr0_el2( uint64_t value )
140{
141 __asm__ volatile (
142 "msr AFSR0_EL2, %0" : : "r" ( value ) : "memory"
143 );
144}
145
146/* AFSR0_EL3, Auxiliary Fault Status Register 0 (EL3) */
147
148static inline uint64_t _AArch64_Read_afsr0_el3( void )
149{
150 uint64_t value;
151
152 __asm__ volatile (
153 "mrs %0, AFSR0_EL3" : "=&r" ( value ) : : "memory"
154 );
155
156 return value;
157}
158
159static inline void _AArch64_Write_afsr0_el3( uint64_t value )
160{
161 __asm__ volatile (
162 "msr AFSR0_EL3, %0" : : "r" ( value ) : "memory"
163 );
164}
165
166/* AFSR1_EL1, Auxiliary Fault Status Register 1 (EL1) */
167
168static inline uint64_t _AArch64_Read_afsr1_el1( void )
169{
170 uint64_t value;
171
172 __asm__ volatile (
173 "mrs %0, AFSR1_EL1" : "=&r" ( value ) : : "memory"
174 );
175
176 return value;
177}
178
179static inline void _AArch64_Write_afsr1_el1( uint64_t value )
180{
181 __asm__ volatile (
182 "msr AFSR1_EL1, %0" : : "r" ( value ) : "memory"
183 );
184}
185
186/* AFSR1_EL2, Auxiliary Fault Status Register 1 (EL2) */
187
188static inline uint64_t _AArch64_Read_afsr1_el2( void )
189{
190 uint64_t value;
191
192 __asm__ volatile (
193 "mrs %0, AFSR1_EL2" : "=&r" ( value ) : : "memory"
194 );
195
196 return value;
197}
198
199static inline void _AArch64_Write_afsr1_el2( uint64_t value )
200{
201 __asm__ volatile (
202 "msr AFSR1_EL2, %0" : : "r" ( value ) : "memory"
203 );
204}
205
206/* AFSR1_EL3, Auxiliary Fault Status Register 1 (EL3) */
207
208static inline uint64_t _AArch64_Read_afsr1_el3( void )
209{
210 uint64_t value;
211
212 __asm__ volatile (
213 "mrs %0, AFSR1_EL3" : "=&r" ( value ) : : "memory"
214 );
215
216 return value;
217}
218
219static inline void _AArch64_Write_afsr1_el3( uint64_t value )
220{
221 __asm__ volatile (
222 "msr AFSR1_EL3, %0" : : "r" ( value ) : "memory"
223 );
224}
225
226/* AIDR_EL1, Auxiliary ID Register */
227
228static inline uint64_t _AArch64_Read_aidr_el1( void )
229{
230 uint64_t value;
231
232 __asm__ volatile (
233 "mrs %0, AIDR_EL1" : "=&r" ( value ) : : "memory"
234 );
235
236 return value;
237}
238
239/* AMAIR_EL1, Auxiliary Memory Attribute Indirection Register (EL1) */
240
241static inline uint64_t _AArch64_Read_amair_el1( void )
242{
243 uint64_t value;
244
245 __asm__ volatile (
246 "mrs %0, AMAIR_EL1" : "=&r" ( value ) : : "memory"
247 );
248
249 return value;
250}
251
252static inline void _AArch64_Write_amair_el1( uint64_t value )
253{
254 __asm__ volatile (
255 "msr AMAIR_EL1, %0" : : "r" ( value ) : "memory"
256 );
257}
258
259/* AMAIR_EL2, Auxiliary Memory Attribute Indirection Register (EL2) */
260
261static inline uint64_t _AArch64_Read_amair_el2( void )
262{
263 uint64_t value;
264
265 __asm__ volatile (
266 "mrs %0, AMAIR_EL2" : "=&r" ( value ) : : "memory"
267 );
268
269 return value;
270}
271
272static inline void _AArch64_Write_amair_el2( uint64_t value )
273{
274 __asm__ volatile (
275 "msr AMAIR_EL2, %0" : : "r" ( value ) : "memory"
276 );
277}
278
279/* AMAIR_EL3, Auxiliary Memory Attribute Indirection Register (EL3) */
280
281static inline uint64_t _AArch64_Read_amair_el3( void )
282{
283 uint64_t value;
284
285 __asm__ volatile (
286 "mrs %0, AMAIR_EL3" : "=&r" ( value ) : : "memory"
287 );
288
289 return value;
290}
291
292static inline void _AArch64_Write_amair_el3( uint64_t value )
293{
294 __asm__ volatile (
295 "msr AMAIR_EL3, %0" : : "r" ( value ) : "memory"
296 );
297}
298
299/* APDAKEYHI_EL1, Pointer Authentication Key A for Data (bits[127:64]) */
300
301static inline uint64_t _AArch64_Read_apdakeyhi_el1( void )
302{
303 uint64_t value;
304
305 __asm__ volatile (
306 "mrs %0, APDAKEYHI_EL1" : "=&r" ( value ) : : "memory"
307 );
308
309 return value;
310}
311
312static inline void _AArch64_Write_apdakeyhi_el1( uint64_t value )
313{
314 __asm__ volatile (
315 "msr APDAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
316 );
317}
318
319/* APDAKEYLO_EL1, Pointer Authentication Key A for Data (bits[63:0]) */
320
321static inline uint64_t _AArch64_Read_apdakeylo_el1( void )
322{
323 uint64_t value;
324
325 __asm__ volatile (
326 "mrs %0, APDAKEYLO_EL1" : "=&r" ( value ) : : "memory"
327 );
328
329 return value;
330}
331
332static inline void _AArch64_Write_apdakeylo_el1( uint64_t value )
333{
334 __asm__ volatile (
335 "msr APDAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
336 );
337}
338
339/* APDBKEYHI_EL1, Pointer Authentication Key B for Data (bits[127:64]) */
340
341static inline uint64_t _AArch64_Read_apdbkeyhi_el1( void )
342{
343 uint64_t value;
344
345 __asm__ volatile (
346 "mrs %0, APDBKEYHI_EL1" : "=&r" ( value ) : : "memory"
347 );
348
349 return value;
350}
351
352static inline void _AArch64_Write_apdbkeyhi_el1( uint64_t value )
353{
354 __asm__ volatile (
355 "msr APDBKEYHI_EL1, %0" : : "r" ( value ) : "memory"
356 );
357}
358
359/* APDBKEYLO_EL1, Pointer Authentication Key B for Data (bits[63:0]) */
360
361static inline uint64_t _AArch64_Read_apdbkeylo_el1( void )
362{
363 uint64_t value;
364
365 __asm__ volatile (
366 "mrs %0, APDBKEYLO_EL1" : "=&r" ( value ) : : "memory"
367 );
368
369 return value;
370}
371
372static inline void _AArch64_Write_apdbkeylo_el1( uint64_t value )
373{
374 __asm__ volatile (
375 "msr APDBKEYLO_EL1, %0" : : "r" ( value ) : "memory"
376 );
377}
378
379/* APGAKEYHI_EL1, Pointer Authentication Key A for Code (bits[127:64]) */
380
381static inline uint64_t _AArch64_Read_apgakeyhi_el1( void )
382{
383 uint64_t value;
384
385 __asm__ volatile (
386 "mrs %0, APGAKEYHI_EL1" : "=&r" ( value ) : : "memory"
387 );
388
389 return value;
390}
391
392static inline void _AArch64_Write_apgakeyhi_el1( uint64_t value )
393{
394 __asm__ volatile (
395 "msr APGAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
396 );
397}
398
399/* APGAKEYLO_EL1, Pointer Authentication Key A for Code (bits[63:0]) */
400
401static inline uint64_t _AArch64_Read_apgakeylo_el1( void )
402{
403 uint64_t value;
404
405 __asm__ volatile (
406 "mrs %0, APGAKEYLO_EL1" : "=&r" ( value ) : : "memory"
407 );
408
409 return value;
410}
411
412static inline void _AArch64_Write_apgakeylo_el1( uint64_t value )
413{
414 __asm__ volatile (
415 "msr APGAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
416 );
417}
418
419/* APIAKEYHI_EL1, Pointer Authentication Key A for Instruction (bits[127:64]) */
420
421static inline uint64_t _AArch64_Read_apiakeyhi_el1( void )
422{
423 uint64_t value;
424
425 __asm__ volatile (
426 "mrs %0, APIAKEYHI_EL1" : "=&r" ( value ) : : "memory"
427 );
428
429 return value;
430}
431
432static inline void _AArch64_Write_apiakeyhi_el1( uint64_t value )
433{
434 __asm__ volatile (
435 "msr APIAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
436 );
437}
438
439/* APIAKEYLO_EL1, Pointer Authentication Key A for Instruction (bits[63:0]) */
440
441static inline uint64_t _AArch64_Read_apiakeylo_el1( void )
442{
443 uint64_t value;
444
445 __asm__ volatile (
446 "mrs %0, APIAKEYLO_EL1" : "=&r" ( value ) : : "memory"
447 );
448
449 return value;
450}
451
452static inline void _AArch64_Write_apiakeylo_el1( uint64_t value )
453{
454 __asm__ volatile (
455 "msr APIAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
456 );
457}
458
459/* APIBKEYHI_EL1, Pointer Authentication Key B for Instruction (bits[127:64]) */
460
461static inline uint64_t _AArch64_Read_apibkeyhi_el1( void )
462{
463 uint64_t value;
464
465 __asm__ volatile (
466 "mrs %0, APIBKEYHI_EL1" : "=&r" ( value ) : : "memory"
467 );
468
469 return value;
470}
471
472static inline void _AArch64_Write_apibkeyhi_el1( uint64_t value )
473{
474 __asm__ volatile (
475 "msr APIBKEYHI_EL1, %0" : : "r" ( value ) : "memory"
476 );
477}
478
479/* APIBKEYLO_EL1, Pointer Authentication Key B for Instruction (bits[63:0]) */
480
481static inline uint64_t _AArch64_Read_apibkeylo_el1( void )
482{
483 uint64_t value;
484
485 __asm__ volatile (
486 "mrs %0, APIBKEYLO_EL1" : "=&r" ( value ) : : "memory"
487 );
488
489 return value;
490}
491
492static inline void _AArch64_Write_apibkeylo_el1( uint64_t value )
493{
494 __asm__ volatile (
495 "msr APIBKEYLO_EL1, %0" : : "r" ( value ) : "memory"
496 );
497}
498
499/* CCSIDR2_EL1, Current Cache Size ID Register 2 */
500
501#define AARCH64_CCSIDR2_EL1_NUMSETS( _val ) ( ( _val ) << 0 )
502#define AARCH64_CCSIDR2_EL1_NUMSETS_SHIFT 0
503#define AARCH64_CCSIDR2_EL1_NUMSETS_MASK 0xffffffU
504#define AARCH64_CCSIDR2_EL1_NUMSETS_GET( _reg ) \
505 ( ( ( _reg ) >> 0 ) & 0xffffffU )
506
507static inline uint64_t _AArch64_Read_ccsidr2_el1( void )
508{
509 uint64_t value;
510
511 __asm__ volatile (
512 "mrs %0, CCSIDR2_EL1" : "=&r" ( value ) : : "memory"
513 );
514
515 return value;
516}
517
518/* CCSIDR_EL1, Current Cache Size ID Register */
519
520#define AARCH64_CCSIDR_EL1_LINESIZE( _val ) ( ( _val ) << 0 )
521#define AARCH64_CCSIDR_EL1_LINESIZE_SHIFT 0
522#define AARCH64_CCSIDR_EL1_LINESIZE_MASK 0x7U
523#define AARCH64_CCSIDR_EL1_LINESIZE_GET( _reg ) \
524 ( ( ( _reg ) >> 0 ) & 0x7U )
525
526#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_0( _val ) ( ( _val ) << 3 )
527#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_SHIFT_0 3
528#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_MASK_0 0x1ff8U
529#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_GET_0( _reg ) \
530 ( ( ( _reg ) >> 3 ) & 0x3ffU )
531
532#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_1( _val ) ( ( _val ) << 3 )
533#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_SHIFT_1 3
534#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_MASK_1 0xfffff8U
535#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_GET_1( _reg ) \
536 ( ( ( _reg ) >> 3 ) & 0x1fffffU )
537
538#define AARCH64_CCSIDR_EL1_NUMSETS_0( _val ) ( ( _val ) << 13 )
539#define AARCH64_CCSIDR_EL1_NUMSETS_SHIFT_0 13
540#define AARCH64_CCSIDR_EL1_NUMSETS_MASK_0 0xfffe000U
541#define AARCH64_CCSIDR_EL1_NUMSETS_GET_0( _reg ) \
542 ( ( ( _reg ) >> 13 ) & 0x7fffU )
543
544#define AARCH64_CCSIDR_EL1_NUMSETS_1( _val ) ( ( _val ) << 32 )
545#define AARCH64_CCSIDR_EL1_NUMSETS_SHIFT_1 32
546#define AARCH64_CCSIDR_EL1_NUMSETS_MASK_1 0xffffff00000000ULL
547#define AARCH64_CCSIDR_EL1_NUMSETS_GET_1( _reg ) \
548 ( ( ( _reg ) >> 32 ) & 0xffffffULL )
549
550static inline uint64_t _AArch64_Read_ccsidr_el1( void )
551{
552 uint64_t value;
553
554 __asm__ volatile (
555 "mrs %0, CCSIDR_EL1" : "=&r" ( value ) : : "memory"
556 );
557
558 return value;
559}
560
561/* CLIDR_EL1, Cache Level ID Register */
562
563#define AARCH64_CLIDR_EL1_CTYPE1( _val ) ( ( _val ) << 0 )
564#define AARCH64_CLIDR_EL1_CTYPE1_SHIFT 0
565#define AARCH64_CLIDR_EL1_CTYPE1_MASK ( 0x7U << 0 )
566#define AARCH64_CLIDR_EL1_CTYPE1_GET( _reg ) \
567 ( ( ( _reg ) >> 0 ) & 0x7U )
568
569#define AARCH64_CLIDR_EL1_CTYPE2( _val ) ( ( _val ) << 3 )
570#define AARCH64_CLIDR_EL1_CTYPE2_SHIFT 3
571#define AARCH64_CLIDR_EL1_CTYPE2_MASK ( 0x7U << 3 )
572#define AARCH64_CLIDR_EL1_CTYPE2_GET( _reg ) \
573 ( ( ( _reg ) >> 3 ) & 0x7U )
574
575#define AARCH64_CLIDR_EL1_CTYPE3( _val ) ( ( _val ) << 6 )
576#define AARCH64_CLIDR_EL1_CTYPE3_SHIFT 6
577#define AARCH64_CLIDR_EL1_CTYPE3_MASK ( 0x7U << 6 )
578#define AARCH64_CLIDR_EL1_CTYPE3_GET( _reg ) \
579 ( ( ( _reg ) >> 6 ) & 0x7U )
580
581#define AARCH64_CLIDR_EL1_CTYPE4( _val ) ( ( _val ) << 9 )
582#define AARCH64_CLIDR_EL1_CTYPE4_SHIFT 9
583#define AARCH64_CLIDR_EL1_CTYPE4_MASK ( 0x7U << 9 )
584#define AARCH64_CLIDR_EL1_CTYPE4_GET( _reg ) \
585 ( ( ( _reg ) >> 9 ) & 0x7U )
586
587#define AARCH64_CLIDR_EL1_CTYPE5( _val ) ( ( _val ) << 12 )
588#define AARCH64_CLIDR_EL1_CTYPE5_SHIFT 12
589#define AARCH64_CLIDR_EL1_CTYPE5_MASK ( 0x7U << 12 )
590#define AARCH64_CLIDR_EL1_CTYPE5_GET( _reg ) \
591 ( ( ( _reg ) >> 12 ) & 0x7U )
592
593#define AARCH64_CLIDR_EL1_CTYPE6( _val ) ( ( _val ) << 15 )
594#define AARCH64_CLIDR_EL1_CTYPE6_SHIFT 15
595#define AARCH64_CLIDR_EL1_CTYPE6_MASK ( 0x7U << 15 )
596#define AARCH64_CLIDR_EL1_CTYPE6_GET( _reg ) \
597 ( ( ( _reg ) >> 15 ) & 0x7U )
598
599#define AARCH64_CLIDR_EL1_CTYPE7( _val ) ( ( _val ) << 18 )
600#define AARCH64_CLIDR_EL1_CTYPE7_SHIFT 18
601#define AARCH64_CLIDR_EL1_CTYPE7_MASK ( 0x7U << 18 )
602#define AARCH64_CLIDR_EL1_CTYPE7_GET( _reg ) \
603 ( ( ( _reg ) >> 18 ) & 0x7U )
604
605#define AARCH64_CLIDR_EL1_LOUIS( _val ) ( ( _val ) << 21 )
606#define AARCH64_CLIDR_EL1_LOUIS_SHIFT 21
607#define AARCH64_CLIDR_EL1_LOUIS_MASK 0xe00000U
608#define AARCH64_CLIDR_EL1_LOUIS_GET( _reg ) \
609 ( ( ( _reg ) >> 21 ) & 0x7U )
610
611#define AARCH64_CLIDR_EL1_LOC( _val ) ( ( _val ) << 24 )
612#define AARCH64_CLIDR_EL1_LOC_SHIFT 24
613#define AARCH64_CLIDR_EL1_LOC_MASK 0x7000000U
614#define AARCH64_CLIDR_EL1_LOC_GET( _reg ) \
615 ( ( ( _reg ) >> 24 ) & 0x7U )
616
617#define AARCH64_CLIDR_EL1_LOUU( _val ) ( ( _val ) << 27 )
618#define AARCH64_CLIDR_EL1_LOUU_SHIFT 27
619#define AARCH64_CLIDR_EL1_LOUU_MASK 0x38000000U
620#define AARCH64_CLIDR_EL1_LOUU_GET( _reg ) \
621 ( ( ( _reg ) >> 27 ) & 0x7U )
622
623#define AARCH64_CLIDR_EL1_ICB( _val ) ( ( _val ) << 30 )
624#define AARCH64_CLIDR_EL1_ICB_SHIFT 30
625#define AARCH64_CLIDR_EL1_ICB_MASK 0x1c0000000ULL
626#define AARCH64_CLIDR_EL1_ICB_GET( _reg ) \
627 ( ( ( _reg ) >> 30 ) & 0x7ULL )
628
629static inline uint64_t _AArch64_Read_clidr_el1( void )
630{
631 uint64_t value;
632
633 __asm__ volatile (
634 "mrs %0, CLIDR_EL1" : "=&r" ( value ) : : "memory"
635 );
636
637 return value;
638}
639
640/* CONTEXTIDR_EL1, Context ID Register (EL1) */
641
642#define AARCH64_CONTEXTIDR_EL1_PROCID( _val ) ( ( _val ) << 0 )
643#define AARCH64_CONTEXTIDR_EL1_PROCID_SHIFT 0
644#define AARCH64_CONTEXTIDR_EL1_PROCID_MASK 0xffffffffU
645#define AARCH64_CONTEXTIDR_EL1_PROCID_GET( _reg ) \
646 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
647
648static inline uint64_t _AArch64_Read_contextidr_el1( void )
649{
650 uint64_t value;
651
652 __asm__ volatile (
653 "mrs %0, CONTEXTIDR_EL1" : "=&r" ( value ) : : "memory"
654 );
655
656 return value;
657}
658
659static inline void _AArch64_Write_contextidr_el1( uint64_t value )
660{
661 __asm__ volatile (
662 "msr CONTEXTIDR_EL1, %0" : : "r" ( value ) : "memory"
663 );
664}
665
666/* CONTEXTIDR_EL2, Context ID Register (EL2) */
667
668#define AARCH64_CONTEXTIDR_EL2_PROCID( _val ) ( ( _val ) << 0 )
669#define AARCH64_CONTEXTIDR_EL2_PROCID_SHIFT 0
670#define AARCH64_CONTEXTIDR_EL2_PROCID_MASK 0xffffffffU
671#define AARCH64_CONTEXTIDR_EL2_PROCID_GET( _reg ) \
672 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
673
674static inline uint64_t _AArch64_Read_contextidr_el2( void )
675{
676 uint64_t value;
677
678 __asm__ volatile (
679 "mrs %0, CONTEXTIDR_EL2" : "=&r" ( value ) : : "memory"
680 );
681
682 return value;
683}
684
685static inline void _AArch64_Write_contextidr_el2( uint64_t value )
686{
687 __asm__ volatile (
688 "msr CONTEXTIDR_EL2, %0" : : "r" ( value ) : "memory"
689 );
690}
691
692/* CPACR_EL1, Architectural Feature Access Control Register */
693
694#define AARCH64_CPACR_EL1_ZEN( _val ) ( ( _val ) << 16 )
695#define AARCH64_CPACR_EL1_ZEN_SHIFT 16
696#define AARCH64_CPACR_EL1_ZEN_MASK 0x30000U
697#define AARCH64_CPACR_EL1_ZEN_GET( _reg ) \
698 ( ( ( _reg ) >> 16 ) & 0x3U )
699
700#define AARCH64_CPACR_EL1_FPEN( _val ) ( ( _val ) << 20 )
701#define AARCH64_CPACR_EL1_FPEN_SHIFT 20
702#define AARCH64_CPACR_EL1_FPEN_MASK 0x300000U
703#define AARCH64_CPACR_EL1_FPEN_GET( _reg ) \
704 ( ( ( _reg ) >> 20 ) & 0x3U )
705
706#define AARCH64_CPACR_EL1_TTA 0x10000000U
707
708static inline uint64_t _AArch64_Read_cpacr_el1( void )
709{
710 uint64_t value;
711
712 __asm__ volatile (
713 "mrs %0, CPACR_EL1" : "=&r" ( value ) : : "memory"
714 );
715
716 return value;
717}
718
719static inline void _AArch64_Write_cpacr_el1( uint64_t value )
720{
721 __asm__ volatile (
722 "msr CPACR_EL1, %0" : : "r" ( value ) : "memory"
723 );
724}
725
726/* CPTR_EL2, Architectural Feature Trap Register (EL2) */
727
728#define AARCH64_CPTR_EL2_TZ 0x100U
729
730#define AARCH64_CPTR_EL2_TFP 0x400U
731
732#define AARCH64_CPTR_EL2_ZEN( _val ) ( ( _val ) << 16 )
733#define AARCH64_CPTR_EL2_ZEN_SHIFT 16
734#define AARCH64_CPTR_EL2_ZEN_MASK 0x30000U
735#define AARCH64_CPTR_EL2_ZEN_GET( _reg ) \
736 ( ( ( _reg ) >> 16 ) & 0x3U )
737
738#define AARCH64_CPTR_EL2_TTA_0 0x100000U
739
740#define AARCH64_CPTR_EL2_FPEN( _val ) ( ( _val ) << 20 )
741#define AARCH64_CPTR_EL2_FPEN_SHIFT 20
742#define AARCH64_CPTR_EL2_FPEN_MASK 0x300000U
743#define AARCH64_CPTR_EL2_FPEN_GET( _reg ) \
744 ( ( ( _reg ) >> 20 ) & 0x3U )
745
746#define AARCH64_CPTR_EL2_TTA_1 0x10000000U
747
748#define AARCH64_CPTR_EL2_TAM 0x40000000U
749
750#define AARCH64_CPTR_EL2_TCPAC 0x80000000U
751
752static inline uint64_t _AArch64_Read_cptr_el2( void )
753{
754 uint64_t value;
755
756 __asm__ volatile (
757 "mrs %0, CPTR_EL2" : "=&r" ( value ) : : "memory"
758 );
759
760 return value;
761}
762
763static inline void _AArch64_Write_cptr_el2( uint64_t value )
764{
765 __asm__ volatile (
766 "msr CPTR_EL2, %0" : : "r" ( value ) : "memory"
767 );
768}
769
770/* CPTR_EL3, Architectural Feature Trap Register (EL3) */
771
772#define AARCH64_CPTR_EL3_EZ 0x100U
773
774#define AARCH64_CPTR_EL3_TFP 0x400U
775
776#define AARCH64_CPTR_EL3_TTA 0x100000U
777
778#define AARCH64_CPTR_EL3_TAM 0x40000000U
779
780#define AARCH64_CPTR_EL3_TCPAC 0x80000000U
781
782static inline uint64_t _AArch64_Read_cptr_el3( void )
783{
784 uint64_t value;
785
786 __asm__ volatile (
787 "mrs %0, CPTR_EL3" : "=&r" ( value ) : : "memory"
788 );
789
790 return value;
791}
792
793static inline void _AArch64_Write_cptr_el3( uint64_t value )
794{
795 __asm__ volatile (
796 "msr CPTR_EL3, %0" : : "r" ( value ) : "memory"
797 );
798}
799
800/* CSSELR_EL1, Cache Size Selection Register */
801
802#define AARCH64_CSSELR_EL1_IND 0x1U
803
804#define AARCH64_CSSELR_EL1_LEVEL( _val ) ( ( _val ) << 1 )
805#define AARCH64_CSSELR_EL1_LEVEL_SHIFT 1
806#define AARCH64_CSSELR_EL1_LEVEL_MASK 0xeU
807#define AARCH64_CSSELR_EL1_LEVEL_GET( _reg ) \
808 ( ( ( _reg ) >> 1 ) & 0x7U )
809
810#define AARCH64_CSSELR_EL1_TND 0x10U
811
812static inline uint64_t _AArch64_Read_csselr_el1( void )
813{
814 uint64_t value;
815
816 __asm__ volatile (
817 "mrs %0, CSSELR_EL1" : "=&r" ( value ) : : "memory"
818 );
819
820 return value;
821}
822
823static inline void _AArch64_Write_csselr_el1( uint64_t value )
824{
825 __asm__ volatile (
826 "msr CSSELR_EL1, %0" : : "r" ( value ) : "memory"
827 );
828}
829
830/* CTR_EL0, Cache Type Register */
831
832#define AARCH64_CTR_EL0_IMINLINE( _val ) ( ( _val ) << 0 )
833#define AARCH64_CTR_EL0_IMINLINE_SHIFT 0
834#define AARCH64_CTR_EL0_IMINLINE_MASK 0xfU
835#define AARCH64_CTR_EL0_IMINLINE_GET( _reg ) \
836 ( ( ( _reg ) >> 0 ) & 0xfU )
837
838#define AARCH64_CTR_EL0_L1IP( _val ) ( ( _val ) << 14 )
839#define AARCH64_CTR_EL0_L1IP_SHIFT 14
840#define AARCH64_CTR_EL0_L1IP_MASK 0xc000U
841#define AARCH64_CTR_EL0_L1IP_GET( _reg ) \
842 ( ( ( _reg ) >> 14 ) & 0x3U )
843
844#define AARCH64_CTR_EL0_DMINLINE( _val ) ( ( _val ) << 16 )
845#define AARCH64_CTR_EL0_DMINLINE_SHIFT 16
846#define AARCH64_CTR_EL0_DMINLINE_MASK 0xf0000U
847#define AARCH64_CTR_EL0_DMINLINE_GET( _reg ) \
848 ( ( ( _reg ) >> 16 ) & 0xfU )
849
850#define AARCH64_CTR_EL0_ERG( _val ) ( ( _val ) << 20 )
851#define AARCH64_CTR_EL0_ERG_SHIFT 20
852#define AARCH64_CTR_EL0_ERG_MASK 0xf00000U
853#define AARCH64_CTR_EL0_ERG_GET( _reg ) \
854 ( ( ( _reg ) >> 20 ) & 0xfU )
855
856#define AARCH64_CTR_EL0_CWG( _val ) ( ( _val ) << 24 )
857#define AARCH64_CTR_EL0_CWG_SHIFT 24
858#define AARCH64_CTR_EL0_CWG_MASK 0xf000000U
859#define AARCH64_CTR_EL0_CWG_GET( _reg ) \
860 ( ( ( _reg ) >> 24 ) & 0xfU )
861
862#define AARCH64_CTR_EL0_IDC 0x10000000U
863
864#define AARCH64_CTR_EL0_DIC 0x20000000U
865
866#define AARCH64_CTR_EL0_TMINLINE( _val ) ( ( _val ) << 32 )
867#define AARCH64_CTR_EL0_TMINLINE_SHIFT 32
868#define AARCH64_CTR_EL0_TMINLINE_MASK 0x3f00000000ULL
869#define AARCH64_CTR_EL0_TMINLINE_GET( _reg ) \
870 ( ( ( _reg ) >> 32 ) & 0x3fULL )
871
872static inline uint64_t _AArch64_Read_ctr_el0( void )
873{
874 uint64_t value;
875
876 __asm__ volatile (
877 "mrs %0, CTR_EL0" : "=&r" ( value ) : : "memory"
878 );
879
880 return value;
881}
882
883/* DACR32_EL2, Domain Access Control Register */
884
885static inline uint64_t _AArch64_Read_dacr32_el2( void )
886{
887 uint64_t value;
888
889 __asm__ volatile (
890 "mrs %0, DACR32_EL2" : "=&r" ( value ) : : "memory"
891 );
892
893 return value;
894}
895
896static inline void _AArch64_Write_dacr32_el2( uint64_t value )
897{
898 __asm__ volatile (
899 "msr DACR32_EL2, %0" : : "r" ( value ) : "memory"
900 );
901}
902
903/* DCZID_EL0, Data Cache Zero ID Register */
904
905#define AARCH64_DCZID_EL0_BS( _val ) ( ( _val ) << 0 )
906#define AARCH64_DCZID_EL0_BS_SHIFT 0
907#define AARCH64_DCZID_EL0_BS_MASK 0xfU
908#define AARCH64_DCZID_EL0_BS_GET( _reg ) \
909 ( ( ( _reg ) >> 0 ) & 0xfU )
910
911#define AARCH64_DCZID_EL0_DZP 0x10U
912
913static inline uint64_t _AArch64_Read_dczid_el0( void )
914{
915 uint64_t value;
916
917 __asm__ volatile (
918 "mrs %0, DCZID_EL0" : "=&r" ( value ) : : "memory"
919 );
920
921 return value;
922}
923
924/* ESR_EL1, Exception Syndrome Register (EL1) */
925
926#define AARCH64_ESR_EL1_DIRECTION 0x1U
927
928#define AARCH64_ESR_EL1_ERETA 0x1U
929
930#define AARCH64_ESR_EL1_IOF 0x1U
931
932#define AARCH64_ESR_EL1_TI 0x1U
933
934#define AARCH64_ESR_EL1_BTYPE( _val ) ( ( _val ) << 0 )
935#define AARCH64_ESR_EL1_BTYPE_SHIFT 0
936#define AARCH64_ESR_EL1_BTYPE_MASK 0x3U
937#define AARCH64_ESR_EL1_BTYPE_GET( _reg ) \
938 ( ( ( _reg ) >> 0 ) & 0x3U )
939
940#define AARCH64_ESR_EL1_DFSC( _val ) ( ( _val ) << 0 )
941#define AARCH64_ESR_EL1_DFSC_SHIFT 0
942#define AARCH64_ESR_EL1_DFSC_MASK 0x3fU
943#define AARCH64_ESR_EL1_DFSC_GET( _reg ) \
944 ( ( ( _reg ) >> 0 ) & 0x3fU )
945
946#define AARCH64_ESR_EL1_IFSC( _val ) ( ( _val ) << 0 )
947#define AARCH64_ESR_EL1_IFSC_SHIFT 0
948#define AARCH64_ESR_EL1_IFSC_MASK 0x3fU
949#define AARCH64_ESR_EL1_IFSC_GET( _reg ) \
950 ( ( ( _reg ) >> 0 ) & 0x3fU )
951
952#define AARCH64_ESR_EL1_COMMENT( _val ) ( ( _val ) << 0 )
953#define AARCH64_ESR_EL1_COMMENT_SHIFT 0
954#define AARCH64_ESR_EL1_COMMENT_MASK 0xffffU
955#define AARCH64_ESR_EL1_COMMENT_GET( _reg ) \
956 ( ( ( _reg ) >> 0 ) & 0xffffU )
957
958#define AARCH64_ESR_EL1_IMM16( _val ) ( ( _val ) << 0 )
959#define AARCH64_ESR_EL1_IMM16_SHIFT 0
960#define AARCH64_ESR_EL1_IMM16_MASK 0xffffU
961#define AARCH64_ESR_EL1_IMM16_GET( _reg ) \
962 ( ( ( _reg ) >> 0 ) & 0xffffU )
963
964#define AARCH64_ESR_EL1_ISS( _val ) ( ( _val ) << 0 )
965#define AARCH64_ESR_EL1_ISS_SHIFT 0
966#define AARCH64_ESR_EL1_ISS_MASK 0x1ffffffU
967#define AARCH64_ESR_EL1_ISS_GET( _reg ) \
968 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
969
970#define AARCH64_ESR_EL1_DZF 0x2U
971
972#define AARCH64_ESR_EL1_ERET 0x2U
973
974#define AARCH64_ESR_EL1_AM( _val ) ( ( _val ) << 1 )
975#define AARCH64_ESR_EL1_AM_SHIFT 1
976#define AARCH64_ESR_EL1_AM_MASK 0xeU
977#define AARCH64_ESR_EL1_AM_GET( _reg ) \
978 ( ( ( _reg ) >> 1 ) & 0x7U )
979
980#define AARCH64_ESR_EL1_CRM( _val ) ( ( _val ) << 1 )
981#define AARCH64_ESR_EL1_CRM_SHIFT 1
982#define AARCH64_ESR_EL1_CRM_MASK 0x1eU
983#define AARCH64_ESR_EL1_CRM_GET( _reg ) \
984 ( ( ( _reg ) >> 1 ) & 0xfU )
985
986#define AARCH64_ESR_EL1_OFF 0x4U
987
988#define AARCH64_ESR_EL1_UFF 0x8U
989
990#define AARCH64_ESR_EL1_IXF 0x10U
991
992#define AARCH64_ESR_EL1_OFFSET 0x10U
993
994#define AARCH64_ESR_EL1_RN( _val ) ( ( _val ) << 5 )
995#define AARCH64_ESR_EL1_RN_SHIFT 5
996#define AARCH64_ESR_EL1_RN_MASK 0x3e0U
997#define AARCH64_ESR_EL1_RN_GET( _reg ) \
998 ( ( ( _reg ) >> 5 ) & 0x1fU )
999
1000#define AARCH64_ESR_EL1_RT( _val ) ( ( _val ) << 5 )
1001#define AARCH64_ESR_EL1_RT_SHIFT 5
1002#define AARCH64_ESR_EL1_RT_MASK 0x3e0U
1003#define AARCH64_ESR_EL1_RT_GET( _reg ) \
1004 ( ( ( _reg ) >> 5 ) & 0x1fU )
1005
1006#define AARCH64_ESR_EL1_EX 0x40U
1007
1008#define AARCH64_ESR_EL1_WNR 0x40U
1009
1010#define AARCH64_ESR_EL1_IDF 0x80U
1011
1012#define AARCH64_ESR_EL1_S1PTW 0x80U
1013
1014#define AARCH64_ESR_EL1_CM 0x100U
1015
1016#define AARCH64_ESR_EL1_VECITR( _val ) ( ( _val ) << 8 )
1017#define AARCH64_ESR_EL1_VECITR_SHIFT 8
1018#define AARCH64_ESR_EL1_VECITR_MASK 0x700U
1019#define AARCH64_ESR_EL1_VECITR_GET( _reg ) \
1020 ( ( ( _reg ) >> 8 ) & 0x7U )
1021
1022#define AARCH64_ESR_EL1_EA 0x200U
1023
1024#define AARCH64_ESR_EL1_FNV 0x400U
1025
1026#define AARCH64_ESR_EL1_AET( _val ) ( ( _val ) << 10 )
1027#define AARCH64_ESR_EL1_AET_SHIFT 10
1028#define AARCH64_ESR_EL1_AET_MASK 0x1c00U
1029#define AARCH64_ESR_EL1_AET_GET( _reg ) \
1030 ( ( ( _reg ) >> 10 ) & 0x7U )
1031
1032#define AARCH64_ESR_EL1_CRN( _val ) ( ( _val ) << 10 )
1033#define AARCH64_ESR_EL1_CRN_SHIFT 10
1034#define AARCH64_ESR_EL1_CRN_MASK 0x3c00U
1035#define AARCH64_ESR_EL1_CRN_GET( _reg ) \
1036 ( ( ( _reg ) >> 10 ) & 0xfU )
1037
1038#define AARCH64_ESR_EL1_RT2( _val ) ( ( _val ) << 10 )
1039#define AARCH64_ESR_EL1_RT2_SHIFT 10
1040#define AARCH64_ESR_EL1_RT2_MASK 0x7c00U
1041#define AARCH64_ESR_EL1_RT2_GET( _reg ) \
1042 ( ( ( _reg ) >> 10 ) & 0x1fU )
1043
1044#define AARCH64_ESR_EL1_SET( _val ) ( ( _val ) << 11 )
1045#define AARCH64_ESR_EL1_SET_SHIFT 11
1046#define AARCH64_ESR_EL1_SET_MASK 0x1800U
1047#define AARCH64_ESR_EL1_SET_GET( _reg ) \
1048 ( ( ( _reg ) >> 11 ) & 0x3U )
1049
1050#define AARCH64_ESR_EL1_IMM8( _val ) ( ( _val ) << 12 )
1051#define AARCH64_ESR_EL1_IMM8_SHIFT 12
1052#define AARCH64_ESR_EL1_IMM8_MASK 0xff000U
1053#define AARCH64_ESR_EL1_IMM8_GET( _reg ) \
1054 ( ( ( _reg ) >> 12 ) & 0xffU )
1055
1056#define AARCH64_ESR_EL1_IESB 0x2000U
1057
1058#define AARCH64_ESR_EL1_VNCR 0x2000U
1059
1060#define AARCH64_ESR_EL1_AR 0x4000U
1061
1062#define AARCH64_ESR_EL1_OP1( _val ) ( ( _val ) << 14 )
1063#define AARCH64_ESR_EL1_OP1_SHIFT 14
1064#define AARCH64_ESR_EL1_OP1_MASK 0x1c000U
1065#define AARCH64_ESR_EL1_OP1_GET( _reg ) \
1066 ( ( ( _reg ) >> 14 ) & 0x7U )
1067
1068#define AARCH64_ESR_EL1_OPC1_0( _val ) ( ( _val ) << 14 )
1069#define AARCH64_ESR_EL1_OPC1_SHIFT_0 14
1070#define AARCH64_ESR_EL1_OPC1_MASK_0 0x1c000U
1071#define AARCH64_ESR_EL1_OPC1_GET_0( _reg ) \
1072 ( ( ( _reg ) >> 14 ) & 0x7U )
1073
1074#define AARCH64_ESR_EL1_SF 0x8000U
1075
1076#define AARCH64_ESR_EL1_OPC1_1( _val ) ( ( _val ) << 16 )
1077#define AARCH64_ESR_EL1_OPC1_SHIFT_1 16
1078#define AARCH64_ESR_EL1_OPC1_MASK_1 0xf0000U
1079#define AARCH64_ESR_EL1_OPC1_GET_1( _reg ) \
1080 ( ( ( _reg ) >> 16 ) & 0xfU )
1081
1082#define AARCH64_ESR_EL1_SRT( _val ) ( ( _val ) << 16 )
1083#define AARCH64_ESR_EL1_SRT_SHIFT 16
1084#define AARCH64_ESR_EL1_SRT_MASK 0x1f0000U
1085#define AARCH64_ESR_EL1_SRT_GET( _reg ) \
1086 ( ( ( _reg ) >> 16 ) & 0x1fU )
1087
1088#define AARCH64_ESR_EL1_OP2( _val ) ( ( _val ) << 17 )
1089#define AARCH64_ESR_EL1_OP2_SHIFT 17
1090#define AARCH64_ESR_EL1_OP2_MASK 0xe0000U
1091#define AARCH64_ESR_EL1_OP2_GET( _reg ) \
1092 ( ( ( _reg ) >> 17 ) & 0x7U )
1093
1094#define AARCH64_ESR_EL1_OPC2( _val ) ( ( _val ) << 17 )
1095#define AARCH64_ESR_EL1_OPC2_SHIFT 17
1096#define AARCH64_ESR_EL1_OPC2_MASK 0xe0000U
1097#define AARCH64_ESR_EL1_OPC2_GET( _reg ) \
1098 ( ( ( _reg ) >> 17 ) & 0x7U )
1099
1100#define AARCH64_ESR_EL1_CCKNOWNPASS 0x80000U
1101
1102#define AARCH64_ESR_EL1_OP0( _val ) ( ( _val ) << 20 )
1103#define AARCH64_ESR_EL1_OP0_SHIFT 20
1104#define AARCH64_ESR_EL1_OP0_MASK 0x300000U
1105#define AARCH64_ESR_EL1_OP0_GET( _reg ) \
1106 ( ( ( _reg ) >> 20 ) & 0x3U )
1107
1108#define AARCH64_ESR_EL1_COND( _val ) ( ( _val ) << 20 )
1109#define AARCH64_ESR_EL1_COND_SHIFT 20
1110#define AARCH64_ESR_EL1_COND_MASK 0xf00000U
1111#define AARCH64_ESR_EL1_COND_GET( _reg ) \
1112 ( ( ( _reg ) >> 20 ) & 0xfU )
1113
1114#define AARCH64_ESR_EL1_SSE 0x200000U
1115
1116#define AARCH64_ESR_EL1_SAS( _val ) ( ( _val ) << 22 )
1117#define AARCH64_ESR_EL1_SAS_SHIFT 22
1118#define AARCH64_ESR_EL1_SAS_MASK 0xc00000U
1119#define AARCH64_ESR_EL1_SAS_GET( _reg ) \
1120 ( ( ( _reg ) >> 22 ) & 0x3U )
1121
1122#define AARCH64_ESR_EL1_TFV 0x800000U
1123
1124#define AARCH64_ESR_EL1_CV 0x1000000U
1125
1126#define AARCH64_ESR_EL1_IDS 0x1000000U
1127
1128#define AARCH64_ESR_EL1_ISV 0x1000000U
1129
1130#define AARCH64_ESR_EL1_IL 0x2000000U
1131
1132#define AARCH64_ESR_EL1_EC( _val ) ( ( _val ) << 26 )
1133#define AARCH64_ESR_EL1_EC_SHIFT 26
1134#define AARCH64_ESR_EL1_EC_MASK 0xfc000000U
1135#define AARCH64_ESR_EL1_EC_GET( _reg ) \
1136 ( ( ( _reg ) >> 26 ) & 0x3fU )
1137
1138static inline uint64_t _AArch64_Read_esr_el1( void )
1139{
1140 uint64_t value;
1141
1142 __asm__ volatile (
1143 "mrs %0, ESR_EL1" : "=&r" ( value ) : : "memory"
1144 );
1145
1146 return value;
1147}
1148
1149static inline void _AArch64_Write_esr_el1( uint64_t value )
1150{
1151 __asm__ volatile (
1152 "msr ESR_EL1, %0" : : "r" ( value ) : "memory"
1153 );
1154}
1155
1156/* ESR_EL2, Exception Syndrome Register (EL2) */
1157
1158#define AARCH64_ESR_EL2_DIRECTION 0x1U
1159
1160#define AARCH64_ESR_EL2_ERETA 0x1U
1161
1162#define AARCH64_ESR_EL2_IOF 0x1U
1163
1164#define AARCH64_ESR_EL2_TI 0x1U
1165
1166#define AARCH64_ESR_EL2_BTYPE( _val ) ( ( _val ) << 0 )
1167#define AARCH64_ESR_EL2_BTYPE_SHIFT 0
1168#define AARCH64_ESR_EL2_BTYPE_MASK 0x3U
1169#define AARCH64_ESR_EL2_BTYPE_GET( _reg ) \
1170 ( ( ( _reg ) >> 0 ) & 0x3U )
1171
1172#define AARCH64_ESR_EL2_DFSC( _val ) ( ( _val ) << 0 )
1173#define AARCH64_ESR_EL2_DFSC_SHIFT 0
1174#define AARCH64_ESR_EL2_DFSC_MASK 0x3fU
1175#define AARCH64_ESR_EL2_DFSC_GET( _reg ) \
1176 ( ( ( _reg ) >> 0 ) & 0x3fU )
1177
1178#define AARCH64_ESR_EL2_IFSC( _val ) ( ( _val ) << 0 )
1179#define AARCH64_ESR_EL2_IFSC_SHIFT 0
1180#define AARCH64_ESR_EL2_IFSC_MASK 0x3fU
1181#define AARCH64_ESR_EL2_IFSC_GET( _reg ) \
1182 ( ( ( _reg ) >> 0 ) & 0x3fU )
1183
1184#define AARCH64_ESR_EL2_COMMENT( _val ) ( ( _val ) << 0 )
1185#define AARCH64_ESR_EL2_COMMENT_SHIFT 0
1186#define AARCH64_ESR_EL2_COMMENT_MASK 0xffffU
1187#define AARCH64_ESR_EL2_COMMENT_GET( _reg ) \
1188 ( ( ( _reg ) >> 0 ) & 0xffffU )
1189
1190#define AARCH64_ESR_EL2_IMM16( _val ) ( ( _val ) << 0 )
1191#define AARCH64_ESR_EL2_IMM16_SHIFT 0
1192#define AARCH64_ESR_EL2_IMM16_MASK 0xffffU
1193#define AARCH64_ESR_EL2_IMM16_GET( _reg ) \
1194 ( ( ( _reg ) >> 0 ) & 0xffffU )
1195
1196#define AARCH64_ESR_EL2_ISS( _val ) ( ( _val ) << 0 )
1197#define AARCH64_ESR_EL2_ISS_SHIFT 0
1198#define AARCH64_ESR_EL2_ISS_MASK 0x1ffffffU
1199#define AARCH64_ESR_EL2_ISS_GET( _reg ) \
1200 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
1201
1202#define AARCH64_ESR_EL2_DZF 0x2U
1203
1204#define AARCH64_ESR_EL2_ERET 0x2U
1205
1206#define AARCH64_ESR_EL2_AM( _val ) ( ( _val ) << 1 )
1207#define AARCH64_ESR_EL2_AM_SHIFT 1
1208#define AARCH64_ESR_EL2_AM_MASK 0xeU
1209#define AARCH64_ESR_EL2_AM_GET( _reg ) \
1210 ( ( ( _reg ) >> 1 ) & 0x7U )
1211
1212#define AARCH64_ESR_EL2_CRM( _val ) ( ( _val ) << 1 )
1213#define AARCH64_ESR_EL2_CRM_SHIFT 1
1214#define AARCH64_ESR_EL2_CRM_MASK 0x1eU
1215#define AARCH64_ESR_EL2_CRM_GET( _reg ) \
1216 ( ( ( _reg ) >> 1 ) & 0xfU )
1217
1218#define AARCH64_ESR_EL2_OFF 0x4U
1219
1220#define AARCH64_ESR_EL2_UFF 0x8U
1221
1222#define AARCH64_ESR_EL2_IXF 0x10U
1223
1224#define AARCH64_ESR_EL2_OFFSET 0x10U
1225
1226#define AARCH64_ESR_EL2_RN( _val ) ( ( _val ) << 5 )
1227#define AARCH64_ESR_EL2_RN_SHIFT 5
1228#define AARCH64_ESR_EL2_RN_MASK 0x3e0U
1229#define AARCH64_ESR_EL2_RN_GET( _reg ) \
1230 ( ( ( _reg ) >> 5 ) & 0x1fU )
1231
1232#define AARCH64_ESR_EL2_RT( _val ) ( ( _val ) << 5 )
1233#define AARCH64_ESR_EL2_RT_SHIFT 5
1234#define AARCH64_ESR_EL2_RT_MASK 0x3e0U
1235#define AARCH64_ESR_EL2_RT_GET( _reg ) \
1236 ( ( ( _reg ) >> 5 ) & 0x1fU )
1237
1238#define AARCH64_ESR_EL2_EX 0x40U
1239
1240#define AARCH64_ESR_EL2_WNR 0x40U
1241
1242#define AARCH64_ESR_EL2_IDF 0x80U
1243
1244#define AARCH64_ESR_EL2_S1PTW 0x80U
1245
1246#define AARCH64_ESR_EL2_CM 0x100U
1247
1248#define AARCH64_ESR_EL2_VECITR( _val ) ( ( _val ) << 8 )
1249#define AARCH64_ESR_EL2_VECITR_SHIFT 8
1250#define AARCH64_ESR_EL2_VECITR_MASK 0x700U
1251#define AARCH64_ESR_EL2_VECITR_GET( _reg ) \
1252 ( ( ( _reg ) >> 8 ) & 0x7U )
1253
1254#define AARCH64_ESR_EL2_EA 0x200U
1255
1256#define AARCH64_ESR_EL2_FNV 0x400U
1257
1258#define AARCH64_ESR_EL2_AET( _val ) ( ( _val ) << 10 )
1259#define AARCH64_ESR_EL2_AET_SHIFT 10
1260#define AARCH64_ESR_EL2_AET_MASK 0x1c00U
1261#define AARCH64_ESR_EL2_AET_GET( _reg ) \
1262 ( ( ( _reg ) >> 10 ) & 0x7U )
1263
1264#define AARCH64_ESR_EL2_CRN( _val ) ( ( _val ) << 10 )
1265#define AARCH64_ESR_EL2_CRN_SHIFT 10
1266#define AARCH64_ESR_EL2_CRN_MASK 0x3c00U
1267#define AARCH64_ESR_EL2_CRN_GET( _reg ) \
1268 ( ( ( _reg ) >> 10 ) & 0xfU )
1269
1270#define AARCH64_ESR_EL2_RT2( _val ) ( ( _val ) << 10 )
1271#define AARCH64_ESR_EL2_RT2_SHIFT 10
1272#define AARCH64_ESR_EL2_RT2_MASK 0x7c00U
1273#define AARCH64_ESR_EL2_RT2_GET( _reg ) \
1274 ( ( ( _reg ) >> 10 ) & 0x1fU )
1275
1276#define AARCH64_ESR_EL2_SET( _val ) ( ( _val ) << 11 )
1277#define AARCH64_ESR_EL2_SET_SHIFT 11
1278#define AARCH64_ESR_EL2_SET_MASK 0x1800U
1279#define AARCH64_ESR_EL2_SET_GET( _reg ) \
1280 ( ( ( _reg ) >> 11 ) & 0x3U )
1281
1282#define AARCH64_ESR_EL2_IMM8( _val ) ( ( _val ) << 12 )
1283#define AARCH64_ESR_EL2_IMM8_SHIFT 12
1284#define AARCH64_ESR_EL2_IMM8_MASK 0xff000U
1285#define AARCH64_ESR_EL2_IMM8_GET( _reg ) \
1286 ( ( ( _reg ) >> 12 ) & 0xffU )
1287
1288#define AARCH64_ESR_EL2_IESB 0x2000U
1289
1290#define AARCH64_ESR_EL2_VNCR 0x2000U
1291
1292#define AARCH64_ESR_EL2_AR 0x4000U
1293
1294#define AARCH64_ESR_EL2_OP1( _val ) ( ( _val ) << 14 )
1295#define AARCH64_ESR_EL2_OP1_SHIFT 14
1296#define AARCH64_ESR_EL2_OP1_MASK 0x1c000U
1297#define AARCH64_ESR_EL2_OP1_GET( _reg ) \
1298 ( ( ( _reg ) >> 14 ) & 0x7U )
1299
1300#define AARCH64_ESR_EL2_OPC1_0( _val ) ( ( _val ) << 14 )
1301#define AARCH64_ESR_EL2_OPC1_SHIFT_0 14
1302#define AARCH64_ESR_EL2_OPC1_MASK_0 0x1c000U
1303#define AARCH64_ESR_EL2_OPC1_GET_0( _reg ) \
1304 ( ( ( _reg ) >> 14 ) & 0x7U )
1305
1306#define AARCH64_ESR_EL2_SF 0x8000U
1307
1308#define AARCH64_ESR_EL2_OPC1_1( _val ) ( ( _val ) << 16 )
1309#define AARCH64_ESR_EL2_OPC1_SHIFT_1 16
1310#define AARCH64_ESR_EL2_OPC1_MASK_1 0xf0000U
1311#define AARCH64_ESR_EL2_OPC1_GET_1( _reg ) \
1312 ( ( ( _reg ) >> 16 ) & 0xfU )
1313
1314#define AARCH64_ESR_EL2_SRT( _val ) ( ( _val ) << 16 )
1315#define AARCH64_ESR_EL2_SRT_SHIFT 16
1316#define AARCH64_ESR_EL2_SRT_MASK 0x1f0000U
1317#define AARCH64_ESR_EL2_SRT_GET( _reg ) \
1318 ( ( ( _reg ) >> 16 ) & 0x1fU )
1319
1320#define AARCH64_ESR_EL2_OP2( _val ) ( ( _val ) << 17 )
1321#define AARCH64_ESR_EL2_OP2_SHIFT 17
1322#define AARCH64_ESR_EL2_OP2_MASK 0xe0000U
1323#define AARCH64_ESR_EL2_OP2_GET( _reg ) \
1324 ( ( ( _reg ) >> 17 ) & 0x7U )
1325
1326#define AARCH64_ESR_EL2_OPC2( _val ) ( ( _val ) << 17 )
1327#define AARCH64_ESR_EL2_OPC2_SHIFT 17
1328#define AARCH64_ESR_EL2_OPC2_MASK 0xe0000U
1329#define AARCH64_ESR_EL2_OPC2_GET( _reg ) \
1330 ( ( ( _reg ) >> 17 ) & 0x7U )
1331
1332#define AARCH64_ESR_EL2_CCKNOWNPASS 0x80000U
1333
1334#define AARCH64_ESR_EL2_OP0( _val ) ( ( _val ) << 20 )
1335#define AARCH64_ESR_EL2_OP0_SHIFT 20
1336#define AARCH64_ESR_EL2_OP0_MASK 0x300000U
1337#define AARCH64_ESR_EL2_OP0_GET( _reg ) \
1338 ( ( ( _reg ) >> 20 ) & 0x3U )
1339
1340#define AARCH64_ESR_EL2_COND( _val ) ( ( _val ) << 20 )
1341#define AARCH64_ESR_EL2_COND_SHIFT 20
1342#define AARCH64_ESR_EL2_COND_MASK 0xf00000U
1343#define AARCH64_ESR_EL2_COND_GET( _reg ) \
1344 ( ( ( _reg ) >> 20 ) & 0xfU )
1345
1346#define AARCH64_ESR_EL2_SSE 0x200000U
1347
1348#define AARCH64_ESR_EL2_SAS( _val ) ( ( _val ) << 22 )
1349#define AARCH64_ESR_EL2_SAS_SHIFT 22
1350#define AARCH64_ESR_EL2_SAS_MASK 0xc00000U
1351#define AARCH64_ESR_EL2_SAS_GET( _reg ) \
1352 ( ( ( _reg ) >> 22 ) & 0x3U )
1353
1354#define AARCH64_ESR_EL2_TFV 0x800000U
1355
1356#define AARCH64_ESR_EL2_CV 0x1000000U
1357
1358#define AARCH64_ESR_EL2_IDS 0x1000000U
1359
1360#define AARCH64_ESR_EL2_ISV 0x1000000U
1361
1362#define AARCH64_ESR_EL2_IL 0x2000000U
1363
1364#define AARCH64_ESR_EL2_EC( _val ) ( ( _val ) << 26 )
1365#define AARCH64_ESR_EL2_EC_SHIFT 26
1366#define AARCH64_ESR_EL2_EC_MASK 0xfc000000U
1367#define AARCH64_ESR_EL2_EC_GET( _reg ) \
1368 ( ( ( _reg ) >> 26 ) & 0x3fU )
1369
1370static inline uint64_t _AArch64_Read_esr_el2( void )
1371{
1372 uint64_t value;
1373
1374 __asm__ volatile (
1375 "mrs %0, ESR_EL2" : "=&r" ( value ) : : "memory"
1376 );
1377
1378 return value;
1379}
1380
1381static inline void _AArch64_Write_esr_el2( uint64_t value )
1382{
1383 __asm__ volatile (
1384 "msr ESR_EL2, %0" : : "r" ( value ) : "memory"
1385 );
1386}
1387
1388/* ESR_EL3, Exception Syndrome Register (EL3) */
1389
1390#define AARCH64_ESR_EL3_DIRECTION 0x1U
1391
1392#define AARCH64_ESR_EL3_ERETA 0x1U
1393
1394#define AARCH64_ESR_EL3_IOF 0x1U
1395
1396#define AARCH64_ESR_EL3_TI 0x1U
1397
1398#define AARCH64_ESR_EL3_BTYPE( _val ) ( ( _val ) << 0 )
1399#define AARCH64_ESR_EL3_BTYPE_SHIFT 0
1400#define AARCH64_ESR_EL3_BTYPE_MASK 0x3U
1401#define AARCH64_ESR_EL3_BTYPE_GET( _reg ) \
1402 ( ( ( _reg ) >> 0 ) & 0x3U )
1403
1404#define AARCH64_ESR_EL3_DFSC( _val ) ( ( _val ) << 0 )
1405#define AARCH64_ESR_EL3_DFSC_SHIFT 0
1406#define AARCH64_ESR_EL3_DFSC_MASK 0x3fU
1407#define AARCH64_ESR_EL3_DFSC_GET( _reg ) \
1408 ( ( ( _reg ) >> 0 ) & 0x3fU )
1409
1410#define AARCH64_ESR_EL3_IFSC( _val ) ( ( _val ) << 0 )
1411#define AARCH64_ESR_EL3_IFSC_SHIFT 0
1412#define AARCH64_ESR_EL3_IFSC_MASK 0x3fU
1413#define AARCH64_ESR_EL3_IFSC_GET( _reg ) \
1414 ( ( ( _reg ) >> 0 ) & 0x3fU )
1415
1416#define AARCH64_ESR_EL3_COMMENT( _val ) ( ( _val ) << 0 )
1417#define AARCH64_ESR_EL3_COMMENT_SHIFT 0
1418#define AARCH64_ESR_EL3_COMMENT_MASK 0xffffU
1419#define AARCH64_ESR_EL3_COMMENT_GET( _reg ) \
1420 ( ( ( _reg ) >> 0 ) & 0xffffU )
1421
1422#define AARCH64_ESR_EL3_IMM16( _val ) ( ( _val ) << 0 )
1423#define AARCH64_ESR_EL3_IMM16_SHIFT 0
1424#define AARCH64_ESR_EL3_IMM16_MASK 0xffffU
1425#define AARCH64_ESR_EL3_IMM16_GET( _reg ) \
1426 ( ( ( _reg ) >> 0 ) & 0xffffU )
1427
1428#define AARCH64_ESR_EL3_ISS( _val ) ( ( _val ) << 0 )
1429#define AARCH64_ESR_EL3_ISS_SHIFT 0
1430#define AARCH64_ESR_EL3_ISS_MASK 0x1ffffffU
1431#define AARCH64_ESR_EL3_ISS_GET( _reg ) \
1432 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
1433
1434#define AARCH64_ESR_EL3_DZF 0x2U
1435
1436#define AARCH64_ESR_EL3_ERET 0x2U
1437
1438#define AARCH64_ESR_EL3_AM( _val ) ( ( _val ) << 1 )
1439#define AARCH64_ESR_EL3_AM_SHIFT 1
1440#define AARCH64_ESR_EL3_AM_MASK 0xeU
1441#define AARCH64_ESR_EL3_AM_GET( _reg ) \
1442 ( ( ( _reg ) >> 1 ) & 0x7U )
1443
1444#define AARCH64_ESR_EL3_CRM( _val ) ( ( _val ) << 1 )
1445#define AARCH64_ESR_EL3_CRM_SHIFT 1
1446#define AARCH64_ESR_EL3_CRM_MASK 0x1eU
1447#define AARCH64_ESR_EL3_CRM_GET( _reg ) \
1448 ( ( ( _reg ) >> 1 ) & 0xfU )
1449
1450#define AARCH64_ESR_EL3_OFF 0x4U
1451
1452#define AARCH64_ESR_EL3_UFF 0x8U
1453
1454#define AARCH64_ESR_EL3_IXF 0x10U
1455
1456#define AARCH64_ESR_EL3_OFFSET 0x10U
1457
1458#define AARCH64_ESR_EL3_RN( _val ) ( ( _val ) << 5 )
1459#define AARCH64_ESR_EL3_RN_SHIFT 5
1460#define AARCH64_ESR_EL3_RN_MASK 0x3e0U
1461#define AARCH64_ESR_EL3_RN_GET( _reg ) \
1462 ( ( ( _reg ) >> 5 ) & 0x1fU )
1463
1464#define AARCH64_ESR_EL3_RT( _val ) ( ( _val ) << 5 )
1465#define AARCH64_ESR_EL3_RT_SHIFT 5
1466#define AARCH64_ESR_EL3_RT_MASK 0x3e0U
1467#define AARCH64_ESR_EL3_RT_GET( _reg ) \
1468 ( ( ( _reg ) >> 5 ) & 0x1fU )
1469
1470#define AARCH64_ESR_EL3_EX 0x40U
1471
1472#define AARCH64_ESR_EL3_WNR 0x40U
1473
1474#define AARCH64_ESR_EL3_IDF 0x80U
1475
1476#define AARCH64_ESR_EL3_S1PTW 0x80U
1477
1478#define AARCH64_ESR_EL3_CM 0x100U
1479
1480#define AARCH64_ESR_EL3_VECITR( _val ) ( ( _val ) << 8 )
1481#define AARCH64_ESR_EL3_VECITR_SHIFT 8
1482#define AARCH64_ESR_EL3_VECITR_MASK 0x700U
1483#define AARCH64_ESR_EL3_VECITR_GET( _reg ) \
1484 ( ( ( _reg ) >> 8 ) & 0x7U )
1485
1486#define AARCH64_ESR_EL3_EA 0x200U
1487
1488#define AARCH64_ESR_EL3_FNV 0x400U
1489
1490#define AARCH64_ESR_EL3_AET( _val ) ( ( _val ) << 10 )
1491#define AARCH64_ESR_EL3_AET_SHIFT 10
1492#define AARCH64_ESR_EL3_AET_MASK 0x1c00U
1493#define AARCH64_ESR_EL3_AET_GET( _reg ) \
1494 ( ( ( _reg ) >> 10 ) & 0x7U )
1495
1496#define AARCH64_ESR_EL3_CRN( _val ) ( ( _val ) << 10 )
1497#define AARCH64_ESR_EL3_CRN_SHIFT 10
1498#define AARCH64_ESR_EL3_CRN_MASK 0x3c00U
1499#define AARCH64_ESR_EL3_CRN_GET( _reg ) \
1500 ( ( ( _reg ) >> 10 ) & 0xfU )
1501
1502#define AARCH64_ESR_EL3_RT2( _val ) ( ( _val ) << 10 )
1503#define AARCH64_ESR_EL3_RT2_SHIFT 10
1504#define AARCH64_ESR_EL3_RT2_MASK 0x7c00U
1505#define AARCH64_ESR_EL3_RT2_GET( _reg ) \
1506 ( ( ( _reg ) >> 10 ) & 0x1fU )
1507
1508#define AARCH64_ESR_EL3_SET( _val ) ( ( _val ) << 11 )
1509#define AARCH64_ESR_EL3_SET_SHIFT 11
1510#define AARCH64_ESR_EL3_SET_MASK 0x1800U
1511#define AARCH64_ESR_EL3_SET_GET( _reg ) \
1512 ( ( ( _reg ) >> 11 ) & 0x3U )
1513
1514#define AARCH64_ESR_EL3_IMM8( _val ) ( ( _val ) << 12 )
1515#define AARCH64_ESR_EL3_IMM8_SHIFT 12
1516#define AARCH64_ESR_EL3_IMM8_MASK 0xff000U
1517#define AARCH64_ESR_EL3_IMM8_GET( _reg ) \
1518 ( ( ( _reg ) >> 12 ) & 0xffU )
1519
1520#define AARCH64_ESR_EL3_IESB 0x2000U
1521
1522#define AARCH64_ESR_EL3_VNCR 0x2000U
1523
1524#define AARCH64_ESR_EL3_AR 0x4000U
1525
1526#define AARCH64_ESR_EL3_OP1( _val ) ( ( _val ) << 14 )
1527#define AARCH64_ESR_EL3_OP1_SHIFT 14
1528#define AARCH64_ESR_EL3_OP1_MASK 0x1c000U
1529#define AARCH64_ESR_EL3_OP1_GET( _reg ) \
1530 ( ( ( _reg ) >> 14 ) & 0x7U )
1531
1532#define AARCH64_ESR_EL3_OPC1_0( _val ) ( ( _val ) << 14 )
1533#define AARCH64_ESR_EL3_OPC1_SHIFT_0 14
1534#define AARCH64_ESR_EL3_OPC1_MASK_0 0x1c000U
1535#define AARCH64_ESR_EL3_OPC1_GET_0( _reg ) \
1536 ( ( ( _reg ) >> 14 ) & 0x7U )
1537
1538#define AARCH64_ESR_EL3_SF 0x8000U
1539
1540#define AARCH64_ESR_EL3_OPC1_1( _val ) ( ( _val ) << 16 )
1541#define AARCH64_ESR_EL3_OPC1_SHIFT_1 16
1542#define AARCH64_ESR_EL3_OPC1_MASK_1 0xf0000U
1543#define AARCH64_ESR_EL3_OPC1_GET_1( _reg ) \
1544 ( ( ( _reg ) >> 16 ) & 0xfU )
1545
1546#define AARCH64_ESR_EL3_SRT( _val ) ( ( _val ) << 16 )
1547#define AARCH64_ESR_EL3_SRT_SHIFT 16
1548#define AARCH64_ESR_EL3_SRT_MASK 0x1f0000U
1549#define AARCH64_ESR_EL3_SRT_GET( _reg ) \
1550 ( ( ( _reg ) >> 16 ) & 0x1fU )
1551
1552#define AARCH64_ESR_EL3_OP2( _val ) ( ( _val ) << 17 )
1553#define AARCH64_ESR_EL3_OP2_SHIFT 17
1554#define AARCH64_ESR_EL3_OP2_MASK 0xe0000U
1555#define AARCH64_ESR_EL3_OP2_GET( _reg ) \
1556 ( ( ( _reg ) >> 17 ) & 0x7U )
1557
1558#define AARCH64_ESR_EL3_OPC2( _val ) ( ( _val ) << 17 )
1559#define AARCH64_ESR_EL3_OPC2_SHIFT 17
1560#define AARCH64_ESR_EL3_OPC2_MASK 0xe0000U
1561#define AARCH64_ESR_EL3_OPC2_GET( _reg ) \
1562 ( ( ( _reg ) >> 17 ) & 0x7U )
1563
1564#define AARCH64_ESR_EL3_CCKNOWNPASS 0x80000U
1565
1566#define AARCH64_ESR_EL3_OP0( _val ) ( ( _val ) << 20 )
1567#define AARCH64_ESR_EL3_OP0_SHIFT 20
1568#define AARCH64_ESR_EL3_OP0_MASK 0x300000U
1569#define AARCH64_ESR_EL3_OP0_GET( _reg ) \
1570 ( ( ( _reg ) >> 20 ) & 0x3U )
1571
1572#define AARCH64_ESR_EL3_COND( _val ) ( ( _val ) << 20 )
1573#define AARCH64_ESR_EL3_COND_SHIFT 20
1574#define AARCH64_ESR_EL3_COND_MASK 0xf00000U
1575#define AARCH64_ESR_EL3_COND_GET( _reg ) \
1576 ( ( ( _reg ) >> 20 ) & 0xfU )
1577
1578#define AARCH64_ESR_EL3_SSE 0x200000U
1579
1580#define AARCH64_ESR_EL3_SAS( _val ) ( ( _val ) << 22 )
1581#define AARCH64_ESR_EL3_SAS_SHIFT 22
1582#define AARCH64_ESR_EL3_SAS_MASK 0xc00000U
1583#define AARCH64_ESR_EL3_SAS_GET( _reg ) \
1584 ( ( ( _reg ) >> 22 ) & 0x3U )
1585
1586#define AARCH64_ESR_EL3_TFV 0x800000U
1587
1588#define AARCH64_ESR_EL3_CV 0x1000000U
1589
1590#define AARCH64_ESR_EL3_IDS 0x1000000U
1591
1592#define AARCH64_ESR_EL3_ISV 0x1000000U
1593
1594#define AARCH64_ESR_EL3_IL 0x2000000U
1595
1596#define AARCH64_ESR_EL3_EC( _val ) ( ( _val ) << 26 )
1597#define AARCH64_ESR_EL3_EC_SHIFT 26
1598#define AARCH64_ESR_EL3_EC_MASK 0xfc000000U
1599#define AARCH64_ESR_EL3_EC_GET( _reg ) \
1600 ( ( ( _reg ) >> 26 ) & 0x3fU )
1601
1602static inline uint64_t _AArch64_Read_esr_el3( void )
1603{
1604 uint64_t value;
1605
1606 __asm__ volatile (
1607 "mrs %0, ESR_EL3" : "=&r" ( value ) : : "memory"
1608 );
1609
1610 return value;
1611}
1612
1613static inline void _AArch64_Write_esr_el3( uint64_t value )
1614{
1615 __asm__ volatile (
1616 "msr ESR_EL3, %0" : : "r" ( value ) : "memory"
1617 );
1618}
1619
1620/* FAR_EL1, Fault Address Register (EL1) */
1621
1622static inline uint64_t _AArch64_Read_far_el1( void )
1623{
1624 uint64_t value;
1625
1626 __asm__ volatile (
1627 "mrs %0, FAR_EL1" : "=&r" ( value ) : : "memory"
1628 );
1629
1630 return value;
1631}
1632
1633static inline void _AArch64_Write_far_el1( uint64_t value )
1634{
1635 __asm__ volatile (
1636 "msr FAR_EL1, %0" : : "r" ( value ) : "memory"
1637 );
1638}
1639
1640/* FAR_EL2, Fault Address Register (EL2) */
1641
1642static inline uint64_t _AArch64_Read_far_el2( void )
1643{
1644 uint64_t value;
1645
1646 __asm__ volatile (
1647 "mrs %0, FAR_EL2" : "=&r" ( value ) : : "memory"
1648 );
1649
1650 return value;
1651}
1652
1653static inline void _AArch64_Write_far_el2( uint64_t value )
1654{
1655 __asm__ volatile (
1656 "msr FAR_EL2, %0" : : "r" ( value ) : "memory"
1657 );
1658}
1659
1660/* FAR_EL3, Fault Address Register (EL3) */
1661
1662static inline uint64_t _AArch64_Read_far_el3( void )
1663{
1664 uint64_t value;
1665
1666 __asm__ volatile (
1667 "mrs %0, FAR_EL3" : "=&r" ( value ) : : "memory"
1668 );
1669
1670 return value;
1671}
1672
1673static inline void _AArch64_Write_far_el3( uint64_t value )
1674{
1675 __asm__ volatile (
1676 "msr FAR_EL3, %0" : : "r" ( value ) : "memory"
1677 );
1678}
1679
1680/* FPEXC32_EL2, Floating-Point Exception Control Register */
1681
1682#define AARCH64_FPEXC32_EL2_IOF 0x1U
1683
1684#define AARCH64_FPEXC32_EL2_DZF 0x2U
1685
1686#define AARCH64_FPEXC32_EL2_OFF 0x4U
1687
1688#define AARCH64_FPEXC32_EL2_UFF 0x8U
1689
1690#define AARCH64_FPEXC32_EL2_IXF 0x10U
1691
1692#define AARCH64_FPEXC32_EL2_IDF 0x80U
1693
1694#define AARCH64_FPEXC32_EL2_VECITR( _val ) ( ( _val ) << 8 )
1695#define AARCH64_FPEXC32_EL2_VECITR_SHIFT 8
1696#define AARCH64_FPEXC32_EL2_VECITR_MASK 0x700U
1697#define AARCH64_FPEXC32_EL2_VECITR_GET( _reg ) \
1698 ( ( ( _reg ) >> 8 ) & 0x7U )
1699
1700#define AARCH64_FPEXC32_EL2_TFV 0x4000000U
1701
1702#define AARCH64_FPEXC32_EL2_VV 0x8000000U
1703
1704#define AARCH64_FPEXC32_EL2_FP2V 0x10000000U
1705
1706#define AARCH64_FPEXC32_EL2_DEX 0x20000000U
1707
1708#define AARCH64_FPEXC32_EL2_EN 0x40000000U
1709
1710#define AARCH64_FPEXC32_EL2_EX 0x80000000U
1711
1712static inline uint64_t _AArch64_Read_fpexc32_el2( void )
1713{
1714 uint64_t value;
1715
1716 __asm__ volatile (
1717 "mrs %0, FPEXC32_EL2" : "=&r" ( value ) : : "memory"
1718 );
1719
1720 return value;
1721}
1722
1723static inline void _AArch64_Write_fpexc32_el2( uint64_t value )
1724{
1725 __asm__ volatile (
1726 "msr FPEXC32_EL2, %0" : : "r" ( value ) : "memory"
1727 );
1728}
1729
1730/* GCR_EL1, Tag Control Register. */
1731
1732#define AARCH64_GCR_EL1_EXCLUDE( _val ) ( ( _val ) << 0 )
1733#define AARCH64_GCR_EL1_EXCLUDE_SHIFT 0
1734#define AARCH64_GCR_EL1_EXCLUDE_MASK 0xffffU
1735#define AARCH64_GCR_EL1_EXCLUDE_GET( _reg ) \
1736 ( ( ( _reg ) >> 0 ) & 0xffffU )
1737
1738#define AARCH64_GCR_EL1_RRND 0x10000U
1739
1740static inline uint64_t _AArch64_Read_gcr_el1( void )
1741{
1742 uint64_t value;
1743
1744 __asm__ volatile (
1745 "mrs %0, GCR_EL1" : "=&r" ( value ) : : "memory"
1746 );
1747
1748 return value;
1749}
1750
1751static inline void _AArch64_Write_gcr_el1( uint64_t value )
1752{
1753 __asm__ volatile (
1754 "msr GCR_EL1, %0" : : "r" ( value ) : "memory"
1755 );
1756}
1757
1758/* GMID_EL1, Multiple tag transfer ID Register */
1759
1760#define AARCH64_GMID_EL1_BS( _val ) ( ( _val ) << 0 )
1761#define AARCH64_GMID_EL1_BS_SHIFT 0
1762#define AARCH64_GMID_EL1_BS_MASK 0xfU
1763#define AARCH64_GMID_EL1_BS_GET( _reg ) \
1764 ( ( ( _reg ) >> 0 ) & 0xfU )
1765
1766static inline uint64_t _AArch64_Read_gmid_el1( void )
1767{
1768 uint64_t value;
1769
1770 __asm__ volatile (
1771 "mrs %0, GMID_EL1" : "=&r" ( value ) : : "memory"
1772 );
1773
1774 return value;
1775}
1776
1777/* HACR_EL2, Hypervisor Auxiliary Control Register */
1778
1779static inline uint64_t _AArch64_Read_hacr_el2( void )
1780{
1781 uint64_t value;
1782
1783 __asm__ volatile (
1784 "mrs %0, HACR_EL2" : "=&r" ( value ) : : "memory"
1785 );
1786
1787 return value;
1788}
1789
1790static inline void _AArch64_Write_hacr_el2( uint64_t value )
1791{
1792 __asm__ volatile (
1793 "msr HACR_EL2, %0" : : "r" ( value ) : "memory"
1794 );
1795}
1796
1797/* HAFGRTR_EL2, Hypervisor Activity Monitors Fine-Grained Read Trap Register */
1798
1799#define AARCH64_HAFGRTR_EL2_AMCNTEN0 0x1U
1800
1801#define AARCH64_HAFGRTR_EL2_AMCNTEN1 0x20000U
1802
1803#define AARCH64_HAFGRTR_EL2_AMEVCNTR10_EL0 0x40000U
1804
1805#define AARCH64_HAFGRTR_EL2_AMEVTYPER10_EL0 0x80000U
1806
1807#define AARCH64_HAFGRTR_EL2_AMEVCNTR11_EL0 0x100000U
1808
1809#define AARCH64_HAFGRTR_EL2_AMEVTYPER11_EL0 0x200000U
1810
1811#define AARCH64_HAFGRTR_EL2_AMEVCNTR12_EL0 0x400000U
1812
1813#define AARCH64_HAFGRTR_EL2_AMEVTYPER12_EL0 0x800000U
1814
1815#define AARCH64_HAFGRTR_EL2_AMEVCNTR13_EL0 0x1000000U
1816
1817#define AARCH64_HAFGRTR_EL2_AMEVTYPER13_EL0 0x2000000U
1818
1819#define AARCH64_HAFGRTR_EL2_AMEVCNTR14_EL0 0x4000000U
1820
1821#define AARCH64_HAFGRTR_EL2_AMEVTYPER14_EL0 0x8000000U
1822
1823#define AARCH64_HAFGRTR_EL2_AMEVCNTR15_EL0 0x10000000U
1824
1825#define AARCH64_HAFGRTR_EL2_AMEVTYPER15_EL0 0x20000000U
1826
1827#define AARCH64_HAFGRTR_EL2_AMEVCNTR16_EL0 0x40000000U
1828
1829#define AARCH64_HAFGRTR_EL2_AMEVTYPER16_EL0 0x80000000U
1830
1831#define AARCH64_HAFGRTR_EL2_AMEVCNTR17_EL0 0x100000000ULL
1832
1833#define AARCH64_HAFGRTR_EL2_AMEVTYPER17_EL0 0x200000000ULL
1834
1835#define AARCH64_HAFGRTR_EL2_AMEVCNTR18_EL0 0x400000000ULL
1836
1837#define AARCH64_HAFGRTR_EL2_AMEVTYPER18_EL0 0x800000000ULL
1838
1839#define AARCH64_HAFGRTR_EL2_AMEVCNTR19_EL0 0x1000000000ULL
1840
1841#define AARCH64_HAFGRTR_EL2_AMEVTYPER19_EL0 0x2000000000ULL
1842
1843#define AARCH64_HAFGRTR_EL2_AMEVCNTR110_EL0 0x4000000000ULL
1844
1845#define AARCH64_HAFGRTR_EL2_AMEVTYPER110_EL0 0x8000000000ULL
1846
1847#define AARCH64_HAFGRTR_EL2_AMEVCNTR111_EL0 0x10000000000ULL
1848
1849#define AARCH64_HAFGRTR_EL2_AMEVTYPER111_EL0 0x20000000000ULL
1850
1851#define AARCH64_HAFGRTR_EL2_AMEVCNTR112_EL0 0x40000000000ULL
1852
1853#define AARCH64_HAFGRTR_EL2_AMEVTYPER112_EL0 0x80000000000ULL
1854
1855#define AARCH64_HAFGRTR_EL2_AMEVCNTR113_EL0 0x100000000000ULL
1856
1857#define AARCH64_HAFGRTR_EL2_AMEVTYPER113_EL0 0x200000000000ULL
1858
1859#define AARCH64_HAFGRTR_EL2_AMEVCNTR114_EL0 0x400000000000ULL
1860
1861#define AARCH64_HAFGRTR_EL2_AMEVTYPER114_EL0 0x800000000000ULL
1862
1863#define AARCH64_HAFGRTR_EL2_AMEVCNTR115_EL0 0x1000000000000ULL
1864
1865#define AARCH64_HAFGRTR_EL2_AMEVTYPER115_EL0 0x2000000000000ULL
1866
1867static inline uint64_t _AArch64_Read_hafgrtr_el2( void )
1868{
1869 uint64_t value;
1870
1871 __asm__ volatile (
1872 "mrs %0, HAFGRTR_EL2" : "=&r" ( value ) : : "memory"
1873 );
1874
1875 return value;
1876}
1877
1878static inline void _AArch64_Write_hafgrtr_el2( uint64_t value )
1879{
1880 __asm__ volatile (
1881 "msr HAFGRTR_EL2, %0" : : "r" ( value ) : "memory"
1882 );
1883}
1884
1885/* HCR_EL2, Hypervisor Configuration Register */
1886
1887#define AARCH64_HCR_EL2_VM 0x1U
1888
1889#define AARCH64_HCR_EL2_SWIO 0x2U
1890
1891#define AARCH64_HCR_EL2_PTW 0x4U
1892
1893#define AARCH64_HCR_EL2_FMO 0x8U
1894
1895#define AARCH64_HCR_EL2_IMO 0x10U
1896
1897#define AARCH64_HCR_EL2_AMO 0x20U
1898
1899#define AARCH64_HCR_EL2_VF 0x40U
1900
1901#define AARCH64_HCR_EL2_VI 0x80U
1902
1903#define AARCH64_HCR_EL2_VSE 0x100U
1904
1905#define AARCH64_HCR_EL2_FB 0x200U
1906
1907#define AARCH64_HCR_EL2_BSU( _val ) ( ( _val ) << 10 )
1908#define AARCH64_HCR_EL2_BSU_SHIFT 10
1909#define AARCH64_HCR_EL2_BSU_MASK 0xc00U
1910#define AARCH64_HCR_EL2_BSU_GET( _reg ) \
1911 ( ( ( _reg ) >> 10 ) & 0x3U )
1912
1913#define AARCH64_HCR_EL2_DC 0x1000U
1914
1915#define AARCH64_HCR_EL2_TWI 0x2000U
1916
1917#define AARCH64_HCR_EL2_TWE 0x4000U
1918
1919#define AARCH64_HCR_EL2_TID0 0x8000U
1920
1921#define AARCH64_HCR_EL2_TID1 0x10000U
1922
1923#define AARCH64_HCR_EL2_TID2 0x20000U
1924
1925#define AARCH64_HCR_EL2_TID3 0x40000U
1926
1927#define AARCH64_HCR_EL2_TSC 0x80000U
1928
1929#define AARCH64_HCR_EL2_TIDCP 0x100000U
1930
1931#define AARCH64_HCR_EL2_TACR 0x200000U
1932
1933#define AARCH64_HCR_EL2_TSW 0x400000U
1934
1935#define AARCH64_HCR_EL2_TPCP 0x800000U
1936
1937#define AARCH64_HCR_EL2_TPU 0x1000000U
1938
1939#define AARCH64_HCR_EL2_TTLB 0x2000000U
1940
1941#define AARCH64_HCR_EL2_TVM 0x4000000U
1942
1943#define AARCH64_HCR_EL2_TGE 0x8000000U
1944
1945#define AARCH64_HCR_EL2_TDZ 0x10000000U
1946
1947#define AARCH64_HCR_EL2_HCD 0x20000000U
1948
1949#define AARCH64_HCR_EL2_TRVM 0x40000000U
1950
1951#define AARCH64_HCR_EL2_RW 0x80000000U
1952
1953#define AARCH64_HCR_EL2_CD 0x100000000ULL
1954
1955#define AARCH64_HCR_EL2_ID 0x200000000ULL
1956
1957#define AARCH64_HCR_EL2_E2H 0x400000000ULL
1958
1959#define AARCH64_HCR_EL2_TLOR 0x800000000ULL
1960
1961#define AARCH64_HCR_EL2_TERR 0x1000000000ULL
1962
1963#define AARCH64_HCR_EL2_TEA 0x2000000000ULL
1964
1965#define AARCH64_HCR_EL2_MIOCNCE 0x4000000000ULL
1966
1967#define AARCH64_HCR_EL2_APK 0x10000000000ULL
1968
1969#define AARCH64_HCR_EL2_API 0x20000000000ULL
1970
1971#define AARCH64_HCR_EL2_NV 0x40000000000ULL
1972
1973#define AARCH64_HCR_EL2_NV1 0x80000000000ULL
1974
1975#define AARCH64_HCR_EL2_AT 0x100000000000ULL
1976
1977#define AARCH64_HCR_EL2_NV2 0x200000000000ULL
1978
1979#define AARCH64_HCR_EL2_FWB 0x400000000000ULL
1980
1981#define AARCH64_HCR_EL2_FIEN 0x800000000000ULL
1982
1983#define AARCH64_HCR_EL2_TID4 0x2000000000000ULL
1984
1985#define AARCH64_HCR_EL2_TICAB 0x4000000000000ULL
1986
1987#define AARCH64_HCR_EL2_AMVOFFEN 0x8000000000000ULL
1988
1989#define AARCH64_HCR_EL2_TOCU 0x10000000000000ULL
1990
1991#define AARCH64_HCR_EL2_ENSCXT 0x20000000000000ULL
1992
1993#define AARCH64_HCR_EL2_TTLBIS 0x40000000000000ULL
1994
1995#define AARCH64_HCR_EL2_TTLBOS 0x80000000000000ULL
1996
1997#define AARCH64_HCR_EL2_ATA 0x100000000000000ULL
1998
1999#define AARCH64_HCR_EL2_DCT 0x200000000000000ULL
2000
2001#define AARCH64_HCR_EL2_TID5 0x400000000000000ULL
2002
2003#define AARCH64_HCR_EL2_TWEDEN 0x800000000000000ULL
2004
2005#define AARCH64_HCR_EL2_TWEDEL( _val ) ( ( _val ) << 60 )
2006#define AARCH64_HCR_EL2_TWEDEL_SHIFT 60
2007#define AARCH64_HCR_EL2_TWEDEL_MASK 0xf000000000000000ULL
2008#define AARCH64_HCR_EL2_TWEDEL_GET( _reg ) \
2009 ( ( ( _reg ) >> 60 ) & 0xfULL )
2010
2011static inline uint64_t _AArch64_Read_hcr_el2( void )
2012{
2013 uint64_t value;
2014
2015 __asm__ volatile (
2016 "mrs %0, HCR_EL2" : "=&r" ( value ) : : "memory"
2017 );
2018
2019 return value;
2020}
2021
2022static inline void _AArch64_Write_hcr_el2( uint64_t value )
2023{
2024 __asm__ volatile (
2025 "msr HCR_EL2, %0" : : "r" ( value ) : "memory"
2026 );
2027}
2028
2029/* HDFGRTR_EL2, Hypervisor Debug Fine-Grained Read Trap Register */
2030
2031#define AARCH64_HDFGRTR_EL2_DBGBCRN_EL1 0x1U
2032
2033#define AARCH64_HDFGRTR_EL2_DBGBVRN_EL1 0x2U
2034
2035#define AARCH64_HDFGRTR_EL2_DBGWCRN_EL1 0x4U
2036
2037#define AARCH64_HDFGRTR_EL2_DBGWVRN_EL1 0x8U
2038
2039#define AARCH64_HDFGRTR_EL2_MDSCR_EL1 0x10U
2040
2041#define AARCH64_HDFGRTR_EL2_DBGCLAIM 0x20U
2042
2043#define AARCH64_HDFGRTR_EL2_DBGAUTHSTATUS_EL1 0x40U
2044
2045#define AARCH64_HDFGRTR_EL2_DBGPRCR_EL1 0x80U
2046
2047#define AARCH64_HDFGRTR_EL2_OSLSR_EL1 0x200U
2048
2049#define AARCH64_HDFGRTR_EL2_OSECCR_EL1 0x400U
2050
2051#define AARCH64_HDFGRTR_EL2_OSDLR_EL1 0x800U
2052
2053#define AARCH64_HDFGRTR_EL2_PMEVCNTRN_EL0 0x1000U
2054
2055#define AARCH64_HDFGRTR_EL2_PMEVTYPERN_EL0 0x2000U
2056
2057#define AARCH64_HDFGRTR_EL2_PMCCFILTR_EL0 0x4000U
2058
2059#define AARCH64_HDFGRTR_EL2_PMCCNTR_EL0 0x8000U
2060
2061#define AARCH64_HDFGRTR_EL2_PMCNTEN 0x10000U
2062
2063#define AARCH64_HDFGRTR_EL2_PMINTEN 0x20000U
2064
2065#define AARCH64_HDFGRTR_EL2_PMOVS 0x40000U
2066
2067#define AARCH64_HDFGRTR_EL2_PMSELR_EL0 0x80000U
2068
2069#define AARCH64_HDFGRTR_EL2_PMMIR_EL1 0x400000U
2070
2071#define AARCH64_HDFGRTR_EL2_PMBLIMITR_EL1 0x800000U
2072
2073#define AARCH64_HDFGRTR_EL2_PMBPTR_EL1 0x1000000U
2074
2075#define AARCH64_HDFGRTR_EL2_PMBSR_EL1 0x2000000U
2076
2077#define AARCH64_HDFGRTR_EL2_PMSCR_EL1 0x4000000U
2078
2079#define AARCH64_HDFGRTR_EL2_PMSEVFR_EL1 0x8000000U
2080
2081#define AARCH64_HDFGRTR_EL2_PMSFCR_EL1 0x10000000U
2082
2083#define AARCH64_HDFGRTR_EL2_PMSICR_EL1 0x20000000U
2084
2085#define AARCH64_HDFGRTR_EL2_PMSIDR_EL1 0x40000000U
2086
2087#define AARCH64_HDFGRTR_EL2_PMSIRR_EL1 0x80000000U
2088
2089#define AARCH64_HDFGRTR_EL2_PMSLATFR_EL1 0x100000000ULL
2090
2091#define AARCH64_HDFGRTR_EL2_TRC 0x200000000ULL
2092
2093#define AARCH64_HDFGRTR_EL2_TRCAUTHSTATUS 0x400000000ULL
2094
2095#define AARCH64_HDFGRTR_EL2_TRCAUXCTLR 0x800000000ULL
2096
2097#define AARCH64_HDFGRTR_EL2_TRCCLAIM 0x1000000000ULL
2098
2099#define AARCH64_HDFGRTR_EL2_TRCCNTVRN 0x2000000000ULL
2100
2101#define AARCH64_HDFGRTR_EL2_TRCID 0x10000000000ULL
2102
2103#define AARCH64_HDFGRTR_EL2_TRCIMSPECN 0x20000000000ULL
2104
2105#define AARCH64_HDFGRTR_EL2_TRCOSLSR 0x80000000000ULL
2106
2107#define AARCH64_HDFGRTR_EL2_TRCPRGCTLR 0x100000000000ULL
2108
2109#define AARCH64_HDFGRTR_EL2_TRCSEQSTR 0x200000000000ULL
2110
2111#define AARCH64_HDFGRTR_EL2_TRCSSCSRN 0x400000000000ULL
2112
2113#define AARCH64_HDFGRTR_EL2_TRCSTATR 0x800000000000ULL
2114
2115#define AARCH64_HDFGRTR_EL2_TRCVICTLR 0x1000000000000ULL
2116
2117#define AARCH64_HDFGRTR_EL2_PMUSERENR_EL0 0x200000000000000ULL
2118
2119#define AARCH64_HDFGRTR_EL2_PMCEIDN_EL0 0x400000000000000ULL
2120
2121static inline uint64_t _AArch64_Read_hdfgrtr_el2( void )
2122{
2123 uint64_t value;
2124
2125 __asm__ volatile (
2126 "mrs %0, HDFGRTR_EL2" : "=&r" ( value ) : : "memory"
2127 );
2128
2129 return value;
2130}
2131
2132static inline void _AArch64_Write_hdfgrtr_el2( uint64_t value )
2133{
2134 __asm__ volatile (
2135 "msr HDFGRTR_EL2, %0" : : "r" ( value ) : "memory"
2136 );
2137}
2138
2139/* HDFGWTR_EL2, Hypervisor Debug Fine-Grained Write Trap Register */
2140
2141#define AARCH64_HDFGWTR_EL2_DBGBCRN_EL1 0x1U
2142
2143#define AARCH64_HDFGWTR_EL2_DBGBVRN_EL1 0x2U
2144
2145#define AARCH64_HDFGWTR_EL2_DBGWCRN_EL1 0x4U
2146
2147#define AARCH64_HDFGWTR_EL2_DBGWVRN_EL1 0x8U
2148
2149#define AARCH64_HDFGWTR_EL2_MDSCR_EL1 0x10U
2150
2151#define AARCH64_HDFGWTR_EL2_DBGCLAIM 0x20U
2152
2153#define AARCH64_HDFGWTR_EL2_DBGPRCR_EL1 0x80U
2154
2155#define AARCH64_HDFGWTR_EL2_OSLAR_EL1 0x100U
2156
2157#define AARCH64_HDFGWTR_EL2_OSECCR_EL1 0x400U
2158
2159#define AARCH64_HDFGWTR_EL2_OSDLR_EL1 0x800U
2160
2161#define AARCH64_HDFGWTR_EL2_PMEVCNTRN_EL0 0x1000U
2162
2163#define AARCH64_HDFGWTR_EL2_PMEVTYPERN_EL0 0x2000U
2164
2165#define AARCH64_HDFGWTR_EL2_PMCCFILTR_EL0 0x4000U
2166
2167#define AARCH64_HDFGWTR_EL2_PMCCNTR_EL0 0x8000U
2168
2169#define AARCH64_HDFGWTR_EL2_PMCNTEN 0x10000U
2170
2171#define AARCH64_HDFGWTR_EL2_PMINTEN 0x20000U
2172
2173#define AARCH64_HDFGWTR_EL2_PMOVS 0x40000U
2174
2175#define AARCH64_HDFGWTR_EL2_PMSELR_EL0 0x80000U
2176
2177#define AARCH64_HDFGWTR_EL2_PMSWINC_EL0 0x100000U
2178
2179#define AARCH64_HDFGWTR_EL2_PMCR_EL0 0x200000U
2180
2181#define AARCH64_HDFGWTR_EL2_PMBLIMITR_EL1 0x800000U
2182
2183#define AARCH64_HDFGWTR_EL2_PMBPTR_EL1 0x1000000U
2184
2185#define AARCH64_HDFGWTR_EL2_PMBSR_EL1 0x2000000U
2186
2187#define AARCH64_HDFGWTR_EL2_PMSCR_EL1 0x4000000U
2188
2189#define AARCH64_HDFGWTR_EL2_PMSEVFR_EL1 0x8000000U
2190
2191#define AARCH64_HDFGWTR_EL2_PMSFCR_EL1 0x10000000U
2192
2193#define AARCH64_HDFGWTR_EL2_PMSICR_EL1 0x20000000U
2194
2195#define AARCH64_HDFGWTR_EL2_PMSIRR_EL1 0x80000000U
2196
2197#define AARCH64_HDFGWTR_EL2_PMSLATFR_EL1 0x100000000ULL
2198
2199#define AARCH64_HDFGWTR_EL2_TRC 0x200000000ULL
2200
2201#define AARCH64_HDFGWTR_EL2_TRCAUXCTLR 0x800000000ULL
2202
2203#define AARCH64_HDFGWTR_EL2_TRCCLAIM 0x1000000000ULL
2204
2205#define AARCH64_HDFGWTR_EL2_TRCCNTVRN 0x2000000000ULL
2206
2207#define AARCH64_HDFGWTR_EL2_TRCIMSPECN 0x20000000000ULL
2208
2209#define AARCH64_HDFGWTR_EL2_TRCOSLAR 0x40000000000ULL
2210
2211#define AARCH64_HDFGWTR_EL2_TRCPRGCTLR 0x100000000000ULL
2212
2213#define AARCH64_HDFGWTR_EL2_TRCSEQSTR 0x200000000000ULL
2214
2215#define AARCH64_HDFGWTR_EL2_TRCSSCSRN 0x400000000000ULL
2216
2217#define AARCH64_HDFGWTR_EL2_TRCVICTLR 0x1000000000000ULL
2218
2219#define AARCH64_HDFGWTR_EL2_TRFCR_EL1 0x2000000000000ULL
2220
2221#define AARCH64_HDFGWTR_EL2_PMUSERENR_EL0 0x200000000000000ULL
2222
2223static inline uint64_t _AArch64_Read_hdfgwtr_el2( void )
2224{
2225 uint64_t value;
2226
2227 __asm__ volatile (
2228 "mrs %0, HDFGWTR_EL2" : "=&r" ( value ) : : "memory"
2229 );
2230
2231 return value;
2232}
2233
2234static inline void _AArch64_Write_hdfgwtr_el2( uint64_t value )
2235{
2236 __asm__ volatile (
2237 "msr HDFGWTR_EL2, %0" : : "r" ( value ) : "memory"
2238 );
2239}
2240
2241/* HFGITR_EL2, Hypervisor Fine-Grained Instruction Trap Register */
2242
2243#define AARCH64_HFGITR_EL2_ICIALLUIS 0x1U
2244
2245#define AARCH64_HFGITR_EL2_ICIALLU 0x2U
2246
2247#define AARCH64_HFGITR_EL2_ICIVAU 0x4U
2248
2249#define AARCH64_HFGITR_EL2_DCIVAC 0x8U
2250
2251#define AARCH64_HFGITR_EL2_DCISW 0x10U
2252
2253#define AARCH64_HFGITR_EL2_DCCSW 0x20U
2254
2255#define AARCH64_HFGITR_EL2_DCCISW 0x40U
2256
2257#define AARCH64_HFGITR_EL2_DCCVAU 0x80U
2258
2259#define AARCH64_HFGITR_EL2_DCCVAP 0x100U
2260
2261#define AARCH64_HFGITR_EL2_DCCVADP 0x200U
2262
2263#define AARCH64_HFGITR_EL2_DCCIVAC 0x400U
2264
2265#define AARCH64_HFGITR_EL2_DCZVA 0x800U
2266
2267#define AARCH64_HFGITR_EL2_ATS1E1R 0x1000U
2268
2269#define AARCH64_HFGITR_EL2_ATS1E1W 0x2000U
2270
2271#define AARCH64_HFGITR_EL2_ATS1E0R 0x4000U
2272
2273#define AARCH64_HFGITR_EL2_ATS1E0W 0x8000U
2274
2275#define AARCH64_HFGITR_EL2_ATS1E1RP 0x10000U
2276
2277#define AARCH64_HFGITR_EL2_ATS1E1WP 0x20000U
2278
2279#define AARCH64_HFGITR_EL2_TLBIVMALLE1OS 0x40000U
2280
2281#define AARCH64_HFGITR_EL2_TLBIVAE1OS 0x80000U
2282
2283#define AARCH64_HFGITR_EL2_TLBIASIDE1OS 0x100000U
2284
2285#define AARCH64_HFGITR_EL2_TLBIVAAE1OS 0x200000U
2286
2287#define AARCH64_HFGITR_EL2_TLBIVALE1OS 0x400000U
2288
2289#define AARCH64_HFGITR_EL2_TLBIVAALE1OS 0x800000U
2290
2291#define AARCH64_HFGITR_EL2_TLBIRVAE1OS 0x1000000U
2292
2293#define AARCH64_HFGITR_EL2_TLBIRVAAE1OS 0x2000000U
2294
2295#define AARCH64_HFGITR_EL2_TLBIRVALE1OS 0x4000000U
2296
2297#define AARCH64_HFGITR_EL2_TLBIRVAALE1OS 0x8000000U
2298
2299#define AARCH64_HFGITR_EL2_TLBIVMALLE1IS 0x10000000U
2300
2301#define AARCH64_HFGITR_EL2_TLBIVAE1IS 0x20000000U
2302
2303#define AARCH64_HFGITR_EL2_TLBIASIDE1IS 0x40000000U
2304
2305#define AARCH64_HFGITR_EL2_TLBIVAAE1IS 0x80000000U
2306
2307#define AARCH64_HFGITR_EL2_TLBIVALE1IS 0x100000000ULL
2308
2309#define AARCH64_HFGITR_EL2_TLBIVAALE1IS 0x200000000ULL
2310
2311#define AARCH64_HFGITR_EL2_TLBIRVAE1IS 0x400000000ULL
2312
2313#define AARCH64_HFGITR_EL2_TLBIRVAAE1IS 0x800000000ULL
2314
2315#define AARCH64_HFGITR_EL2_TLBIRVALE1IS 0x1000000000ULL
2316
2317#define AARCH64_HFGITR_EL2_TLBIRVAALE1IS 0x2000000000ULL
2318
2319#define AARCH64_HFGITR_EL2_TLBIRVAE1 0x4000000000ULL
2320
2321#define AARCH64_HFGITR_EL2_TLBIRVAAE1 0x8000000000ULL
2322
2323#define AARCH64_HFGITR_EL2_TLBIRVALE1 0x10000000000ULL
2324
2325#define AARCH64_HFGITR_EL2_TLBIRVAALE1 0x20000000000ULL
2326
2327#define AARCH64_HFGITR_EL2_TLBIVMALLE1 0x40000000000ULL
2328
2329#define AARCH64_HFGITR_EL2_TLBIVAE1 0x80000000000ULL
2330
2331#define AARCH64_HFGITR_EL2_TLBIASIDE1 0x100000000000ULL
2332
2333#define AARCH64_HFGITR_EL2_TLBIVAAE1 0x200000000000ULL
2334
2335#define AARCH64_HFGITR_EL2_TLBIVALE1 0x400000000000ULL
2336
2337#define AARCH64_HFGITR_EL2_TLBIVAALE1 0x800000000000ULL
2338
2339#define AARCH64_HFGITR_EL2_CFPRCTX 0x1000000000000ULL
2340
2341#define AARCH64_HFGITR_EL2_DVPRCTX 0x2000000000000ULL
2342
2343#define AARCH64_HFGITR_EL2_CPPRCTX 0x4000000000000ULL
2344
2345#define AARCH64_HFGITR_EL2_ERET 0x8000000000000ULL
2346
2347#define AARCH64_HFGITR_EL2_SVC_EL0 0x10000000000000ULL
2348
2349#define AARCH64_HFGITR_EL2_SVC_EL1 0x20000000000000ULL
2350
2351#define AARCH64_HFGITR_EL2_DCCVAC 0x40000000000000ULL
2352
2353static inline uint64_t _AArch64_Read_hfgitr_el2( void )
2354{
2355 uint64_t value;
2356
2357 __asm__ volatile (
2358 "mrs %0, HFGITR_EL2" : "=&r" ( value ) : : "memory"
2359 );
2360
2361 return value;
2362}
2363
2364static inline void _AArch64_Write_hfgitr_el2( uint64_t value )
2365{
2366 __asm__ volatile (
2367 "msr HFGITR_EL2, %0" : : "r" ( value ) : "memory"
2368 );
2369}
2370
2371/* HFGRTR_EL2, Hypervisor Fine-Grained Read Trap Register */
2372
2373#define AARCH64_HFGRTR_EL2_AFSR0_EL1 0x1U
2374
2375#define AARCH64_HFGRTR_EL2_AFSR1_EL1 0x2U
2376
2377#define AARCH64_HFGRTR_EL2_AIDR_EL1 0x4U
2378
2379#define AARCH64_HFGRTR_EL2_AMAIR_EL1 0x8U
2380
2381#define AARCH64_HFGRTR_EL2_APDAKEY 0x10U
2382
2383#define AARCH64_HFGRTR_EL2_APDBKEY 0x20U
2384
2385#define AARCH64_HFGRTR_EL2_APGAKEY 0x40U
2386
2387#define AARCH64_HFGRTR_EL2_APIAKEY 0x80U
2388
2389#define AARCH64_HFGRTR_EL2_APIBKEY 0x100U
2390
2391#define AARCH64_HFGRTR_EL2_CCSIDR_EL1 0x200U
2392
2393#define AARCH64_HFGRTR_EL2_CLIDR_EL1 0x400U
2394
2395#define AARCH64_HFGRTR_EL2_CONTEXTIDR_EL1 0x800U
2396
2397#define AARCH64_HFGRTR_EL2_CPACR_EL1 0x1000U
2398
2399#define AARCH64_HFGRTR_EL2_CSSELR_EL1 0x2000U
2400
2401#define AARCH64_HFGRTR_EL2_CTR_EL0 0x4000U
2402
2403#define AARCH64_HFGRTR_EL2_DCZID_EL0 0x8000U
2404
2405#define AARCH64_HFGRTR_EL2_ESR_EL1 0x10000U
2406
2407#define AARCH64_HFGRTR_EL2_FAR_EL1 0x20000U
2408
2409#define AARCH64_HFGRTR_EL2_ISR_EL1 0x40000U
2410
2411#define AARCH64_HFGRTR_EL2_LORC_EL1 0x80000U
2412
2413#define AARCH64_HFGRTR_EL2_LOREA_EL1 0x100000U
2414
2415#define AARCH64_HFGRTR_EL2_LORID_EL1 0x200000U
2416
2417#define AARCH64_HFGRTR_EL2_LORN_EL1 0x400000U
2418
2419#define AARCH64_HFGRTR_EL2_LORSA_EL1 0x800000U
2420
2421#define AARCH64_HFGRTR_EL2_MAIR_EL1 0x1000000U
2422
2423#define AARCH64_HFGRTR_EL2_MIDR_EL1 0x2000000U
2424
2425#define AARCH64_HFGRTR_EL2_MPIDR_EL1 0x4000000U
2426
2427#define AARCH64_HFGRTR_EL2_PAR_EL1 0x8000000U
2428
2429#define AARCH64_HFGRTR_EL2_REVIDR_EL1 0x10000000U
2430
2431#define AARCH64_HFGRTR_EL2_SCTLR_EL1 0x20000000U
2432
2433#define AARCH64_HFGRTR_EL2_SCXTNUM_EL1 0x40000000U
2434
2435#define AARCH64_HFGRTR_EL2_SCXTNUM_EL0 0x80000000U
2436
2437#define AARCH64_HFGRTR_EL2_TCR_EL1 0x100000000ULL
2438
2439#define AARCH64_HFGRTR_EL2_TPIDR_EL1 0x200000000ULL
2440
2441#define AARCH64_HFGRTR_EL2_TPIDRRO_EL0 0x400000000ULL
2442
2443#define AARCH64_HFGRTR_EL2_TPIDR_EL0 0x800000000ULL
2444
2445#define AARCH64_HFGRTR_EL2_TTBR0_EL1 0x1000000000ULL
2446
2447#define AARCH64_HFGRTR_EL2_TTBR1_EL1 0x2000000000ULL
2448
2449#define AARCH64_HFGRTR_EL2_VBAR_EL1 0x4000000000ULL
2450
2451#define AARCH64_HFGRTR_EL2_ICC_IGRPENN_EL1 0x8000000000ULL
2452
2453#define AARCH64_HFGRTR_EL2_ERRIDR_EL1 0x10000000000ULL
2454
2455#define AARCH64_HFGRTR_EL2_ERRSELR_EL1 0x20000000000ULL
2456
2457#define AARCH64_HFGRTR_EL2_ERXFR_EL1 0x40000000000ULL
2458
2459#define AARCH64_HFGRTR_EL2_ERXCTLR_EL1 0x80000000000ULL
2460
2461#define AARCH64_HFGRTR_EL2_ERXSTATUS_EL1 0x100000000000ULL
2462
2463#define AARCH64_HFGRTR_EL2_ERXMISCN_EL1 0x200000000000ULL
2464
2465#define AARCH64_HFGRTR_EL2_ERXPFGF_EL1 0x400000000000ULL
2466
2467#define AARCH64_HFGRTR_EL2_ERXPFGCTL_EL1 0x800000000000ULL
2468
2469#define AARCH64_HFGRTR_EL2_ERXPFGCDN_EL1 0x1000000000000ULL
2470
2471#define AARCH64_HFGRTR_EL2_ERXADDR_EL1 0x2000000000000ULL
2472
2473static inline uint64_t _AArch64_Read_hfgrtr_el2( void )
2474{
2475 uint64_t value;
2476
2477 __asm__ volatile (
2478 "mrs %0, HFGRTR_EL2" : "=&r" ( value ) : : "memory"
2479 );
2480
2481 return value;
2482}
2483
2484static inline void _AArch64_Write_hfgrtr_el2( uint64_t value )
2485{
2486 __asm__ volatile (
2487 "msr HFGRTR_EL2, %0" : : "r" ( value ) : "memory"
2488 );
2489}
2490
2491/* HFGWTR_EL2, Hypervisor Fine-Grained Write Trap Register */
2492
2493#define AARCH64_HFGWTR_EL2_AFSR0_EL1 0x1U
2494
2495#define AARCH64_HFGWTR_EL2_AFSR1_EL1 0x2U
2496
2497#define AARCH64_HFGWTR_EL2_AMAIR_EL1 0x8U
2498
2499#define AARCH64_HFGWTR_EL2_APDAKEY 0x10U
2500
2501#define AARCH64_HFGWTR_EL2_APDBKEY 0x20U
2502
2503#define AARCH64_HFGWTR_EL2_APGAKEY 0x40U
2504
2505#define AARCH64_HFGWTR_EL2_APIAKEY 0x80U
2506
2507#define AARCH64_HFGWTR_EL2_APIBKEY 0x100U
2508
2509#define AARCH64_HFGWTR_EL2_CONTEXTIDR_EL1 0x800U
2510
2511#define AARCH64_HFGWTR_EL2_CPACR_EL1 0x1000U
2512
2513#define AARCH64_HFGWTR_EL2_CSSELR_EL1 0x2000U
2514
2515#define AARCH64_HFGWTR_EL2_ESR_EL1 0x10000U
2516
2517#define AARCH64_HFGWTR_EL2_FAR_EL1 0x20000U
2518
2519#define AARCH64_HFGWTR_EL2_LORC_EL1 0x80000U
2520
2521#define AARCH64_HFGWTR_EL2_LOREA_EL1 0x100000U
2522
2523#define AARCH64_HFGWTR_EL2_LORN_EL1 0x400000U
2524
2525#define AARCH64_HFGWTR_EL2_LORSA_EL1 0x800000U
2526
2527#define AARCH64_HFGWTR_EL2_MAIR_EL1 0x1000000U
2528
2529#define AARCH64_HFGWTR_EL2_PAR_EL1 0x8000000U
2530
2531#define AARCH64_HFGWTR_EL2_SCTLR_EL1 0x20000000U
2532
2533#define AARCH64_HFGWTR_EL2_SCXTNUM_EL1 0x40000000U
2534
2535#define AARCH64_HFGWTR_EL2_SCXTNUM_EL0 0x80000000U
2536
2537#define AARCH64_HFGWTR_EL2_TCR_EL1 0x100000000ULL
2538
2539#define AARCH64_HFGWTR_EL2_TPIDR_EL1 0x200000000ULL
2540
2541#define AARCH64_HFGWTR_EL2_TPIDRRO_EL0 0x400000000ULL
2542
2543#define AARCH64_HFGWTR_EL2_TPIDR_EL0 0x800000000ULL
2544
2545#define AARCH64_HFGWTR_EL2_TTBR0_EL1 0x1000000000ULL
2546
2547#define AARCH64_HFGWTR_EL2_TTBR1_EL1 0x2000000000ULL
2548
2549#define AARCH64_HFGWTR_EL2_VBAR_EL1 0x4000000000ULL
2550
2551#define AARCH64_HFGWTR_EL2_ICC_IGRPENN_EL1 0x8000000000ULL
2552
2553#define AARCH64_HFGWTR_EL2_ERRSELR_EL1 0x20000000000ULL
2554
2555#define AARCH64_HFGWTR_EL2_ERXCTLR_EL1 0x80000000000ULL
2556
2557#define AARCH64_HFGWTR_EL2_ERXSTATUS_EL1 0x100000000000ULL
2558
2559#define AARCH64_HFGWTR_EL2_ERXMISCN_EL1 0x200000000000ULL
2560
2561#define AARCH64_HFGWTR_EL2_ERXPFGCTL_EL1 0x800000000000ULL
2562
2563#define AARCH64_HFGWTR_EL2_ERXPFGCDN_EL1 0x1000000000000ULL
2564
2565#define AARCH64_HFGWTR_EL2_ERXADDR_EL1 0x2000000000000ULL
2566
2567static inline uint64_t _AArch64_Read_hfgwtr_el2( void )
2568{
2569 uint64_t value;
2570
2571 __asm__ volatile (
2572 "mrs %0, HFGWTR_EL2" : "=&r" ( value ) : : "memory"
2573 );
2574
2575 return value;
2576}
2577
2578static inline void _AArch64_Write_hfgwtr_el2( uint64_t value )
2579{
2580 __asm__ volatile (
2581 "msr HFGWTR_EL2, %0" : : "r" ( value ) : "memory"
2582 );
2583}
2584
2585/* HPFAR_EL2, Hypervisor IPA Fault Address Register */
2586
2587#define AARCH64_HPFAR_EL2_FIPA_47_12( _val ) ( ( _val ) << 4 )
2588#define AARCH64_HPFAR_EL2_FIPA_47_12_SHIFT 4
2589#define AARCH64_HPFAR_EL2_FIPA_47_12_MASK 0xfffffffff0ULL
2590#define AARCH64_HPFAR_EL2_FIPA_47_12_GET( _reg ) \
2591 ( ( ( _reg ) >> 4 ) & 0xfffffffffULL )
2592
2593#define AARCH64_HPFAR_EL2_FIPA_51_48( _val ) ( ( _val ) << 40 )
2594#define AARCH64_HPFAR_EL2_FIPA_51_48_SHIFT 40
2595#define AARCH64_HPFAR_EL2_FIPA_51_48_MASK 0xf0000000000ULL
2596#define AARCH64_HPFAR_EL2_FIPA_51_48_GET( _reg ) \
2597 ( ( ( _reg ) >> 40 ) & 0xfULL )
2598
2599#define AARCH64_HPFAR_EL2_NS 0x8000000000000000ULL
2600
2601static inline uint64_t _AArch64_Read_hpfar_el2( void )
2602{
2603 uint64_t value;
2604
2605 __asm__ volatile (
2606 "mrs %0, HPFAR_EL2" : "=&r" ( value ) : : "memory"
2607 );
2608
2609 return value;
2610}
2611
2612static inline void _AArch64_Write_hpfar_el2( uint64_t value )
2613{
2614 __asm__ volatile (
2615 "msr HPFAR_EL2, %0" : : "r" ( value ) : "memory"
2616 );
2617}
2618
2619/* HSTR_EL2, Hypervisor System Trap Register */
2620
2621static inline uint64_t _AArch64_Read_hstr_el2( void )
2622{
2623 uint64_t value;
2624
2625 __asm__ volatile (
2626 "mrs %0, HSTR_EL2" : "=&r" ( value ) : : "memory"
2627 );
2628
2629 return value;
2630}
2631
2632static inline void _AArch64_Write_hstr_el2( uint64_t value )
2633{
2634 __asm__ volatile (
2635 "msr HSTR_EL2, %0" : : "r" ( value ) : "memory"
2636 );
2637}
2638
2639/* ID_AA64AFR0_EL1, AArch64 Auxiliary Feature Register 0 */
2640
2641static inline uint64_t _AArch64_Read_id_aa64afr0_el1( void )
2642{
2643 uint64_t value;
2644
2645 __asm__ volatile (
2646 "mrs %0, ID_AA64AFR0_EL1" : "=&r" ( value ) : : "memory"
2647 );
2648
2649 return value;
2650}
2651
2652/* ID_AA64AFR1_EL1, AArch64 Auxiliary Feature Register 1 */
2653
2654static inline uint64_t _AArch64_Read_id_aa64afr1_el1( void )
2655{
2656 uint64_t value;
2657
2658 __asm__ volatile (
2659 "mrs %0, ID_AA64AFR1_EL1" : "=&r" ( value ) : : "memory"
2660 );
2661
2662 return value;
2663}
2664
2665/* ID_AA64DFR0_EL1, AArch64 Debug Feature Register 0 */
2666
2667#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER( _val ) ( ( _val ) << 0 )
2668#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_SHIFT 0
2669#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_MASK 0xfU
2670#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_GET( _reg ) \
2671 ( ( ( _reg ) >> 0 ) & 0xfU )
2672
2673#define AARCH64_ID_AA64DFR0_EL1_TRACEVER( _val ) ( ( _val ) << 4 )
2674#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_SHIFT 4
2675#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_MASK 0xf0U
2676#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_GET( _reg ) \
2677 ( ( ( _reg ) >> 4 ) & 0xfU )
2678
2679#define AARCH64_ID_AA64DFR0_EL1_PMUVER( _val ) ( ( _val ) << 8 )
2680#define AARCH64_ID_AA64DFR0_EL1_PMUVER_SHIFT 8
2681#define AARCH64_ID_AA64DFR0_EL1_PMUVER_MASK 0xf00U
2682#define AARCH64_ID_AA64DFR0_EL1_PMUVER_GET( _reg ) \
2683 ( ( ( _reg ) >> 8 ) & 0xfU )
2684
2685#define AARCH64_ID_AA64DFR0_EL1_BRPS( _val ) ( ( _val ) << 12 )
2686#define AARCH64_ID_AA64DFR0_EL1_BRPS_SHIFT 12
2687#define AARCH64_ID_AA64DFR0_EL1_BRPS_MASK 0xf000U
2688#define AARCH64_ID_AA64DFR0_EL1_BRPS_GET( _reg ) \
2689 ( ( ( _reg ) >> 12 ) & 0xfU )
2690
2691#define AARCH64_ID_AA64DFR0_EL1_WRPS( _val ) ( ( _val ) << 20 )
2692#define AARCH64_ID_AA64DFR0_EL1_WRPS_SHIFT 20
2693#define AARCH64_ID_AA64DFR0_EL1_WRPS_MASK 0xf00000U
2694#define AARCH64_ID_AA64DFR0_EL1_WRPS_GET( _reg ) \
2695 ( ( ( _reg ) >> 20 ) & 0xfU )
2696
2697#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS( _val ) ( ( _val ) << 28 )
2698#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_SHIFT 28
2699#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_MASK 0xf0000000U
2700#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_GET( _reg ) \
2701 ( ( ( _reg ) >> 28 ) & 0xfU )
2702
2703#define AARCH64_ID_AA64DFR0_EL1_PMSVER( _val ) ( ( _val ) << 32 )
2704#define AARCH64_ID_AA64DFR0_EL1_PMSVER_SHIFT 32
2705#define AARCH64_ID_AA64DFR0_EL1_PMSVER_MASK 0xf00000000ULL
2706#define AARCH64_ID_AA64DFR0_EL1_PMSVER_GET( _reg ) \
2707 ( ( ( _reg ) >> 32 ) & 0xfULL )
2708
2709#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK( _val ) ( ( _val ) << 36 )
2710#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_SHIFT 36
2711#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_MASK 0xf000000000ULL
2712#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_GET( _reg ) \
2713 ( ( ( _reg ) >> 36 ) & 0xfULL )
2714
2715#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT( _val ) ( ( _val ) << 40 )
2716#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_SHIFT 40
2717#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_MASK 0xf0000000000ULL
2718#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_GET( _reg ) \
2719 ( ( ( _reg ) >> 40 ) & 0xfULL )
2720
2721#define AARCH64_ID_AA64DFR0_EL1_MTPMU( _val ) ( ( _val ) << 48 )
2722#define AARCH64_ID_AA64DFR0_EL1_MTPMU_SHIFT 48
2723#define AARCH64_ID_AA64DFR0_EL1_MTPMU_MASK 0xf000000000000ULL
2724#define AARCH64_ID_AA64DFR0_EL1_MTPMU_GET( _reg ) \
2725 ( ( ( _reg ) >> 48 ) & 0xfULL )
2726
2727static inline uint64_t _AArch64_Read_id_aa64dfr0_el1( void )
2728{
2729 uint64_t value;
2730
2731 __asm__ volatile (
2732 "mrs %0, ID_AA64DFR0_EL1" : "=&r" ( value ) : : "memory"
2733 );
2734
2735 return value;
2736}
2737
2738/* ID_AA64DFR1_EL1, AArch64 Debug Feature Register 1 */
2739
2740static inline uint64_t _AArch64_Read_id_aa64dfr1_el1( void )
2741{
2742 uint64_t value;
2743
2744 __asm__ volatile (
2745 "mrs %0, ID_AA64DFR1_EL1" : "=&r" ( value ) : : "memory"
2746 );
2747
2748 return value;
2749}
2750
2751/* ID_AA64ISAR0_EL1, AArch64 Instruction Set Attribute Register 0 */
2752
2753#define AARCH64_ID_AA64ISAR0_EL1_AES( _val ) ( ( _val ) << 4 )
2754#define AARCH64_ID_AA64ISAR0_EL1_AES_SHIFT 4
2755#define AARCH64_ID_AA64ISAR0_EL1_AES_MASK 0xf0U
2756#define AARCH64_ID_AA64ISAR0_EL1_AES_GET( _reg ) \
2757 ( ( ( _reg ) >> 4 ) & 0xfU )
2758
2759#define AARCH64_ID_AA64ISAR0_EL1_SHA1( _val ) ( ( _val ) << 8 )
2760#define AARCH64_ID_AA64ISAR0_EL1_SHA1_SHIFT 8
2761#define AARCH64_ID_AA64ISAR0_EL1_SHA1_MASK 0xf00U
2762#define AARCH64_ID_AA64ISAR0_EL1_SHA1_GET( _reg ) \
2763 ( ( ( _reg ) >> 8 ) & 0xfU )
2764
2765#define AARCH64_ID_AA64ISAR0_EL1_SHA2( _val ) ( ( _val ) << 12 )
2766#define AARCH64_ID_AA64ISAR0_EL1_SHA2_SHIFT 12
2767#define AARCH64_ID_AA64ISAR0_EL1_SHA2_MASK 0xf000U
2768#define AARCH64_ID_AA64ISAR0_EL1_SHA2_GET( _reg ) \
2769 ( ( ( _reg ) >> 12 ) & 0xfU )
2770
2771#define AARCH64_ID_AA64ISAR0_EL1_CRC32( _val ) ( ( _val ) << 16 )
2772#define AARCH64_ID_AA64ISAR0_EL1_CRC32_SHIFT 16
2773#define AARCH64_ID_AA64ISAR0_EL1_CRC32_MASK 0xf0000U
2774#define AARCH64_ID_AA64ISAR0_EL1_CRC32_GET( _reg ) \
2775 ( ( ( _reg ) >> 16 ) & 0xfU )
2776
2777#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC( _val ) ( ( _val ) << 20 )
2778#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_SHIFT 20
2779#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_MASK 0xf00000U
2780#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_GET( _reg ) \
2781 ( ( ( _reg ) >> 20 ) & 0xfU )
2782
2783#define AARCH64_ID_AA64ISAR0_EL1_RDM( _val ) ( ( _val ) << 28 )
2784#define AARCH64_ID_AA64ISAR0_EL1_RDM_SHIFT 28
2785#define AARCH64_ID_AA64ISAR0_EL1_RDM_MASK 0xf0000000U
2786#define AARCH64_ID_AA64ISAR0_EL1_RDM_GET( _reg ) \
2787 ( ( ( _reg ) >> 28 ) & 0xfU )
2788
2789#define AARCH64_ID_AA64ISAR0_EL1_SHA3( _val ) ( ( _val ) << 32 )
2790#define AARCH64_ID_AA64ISAR0_EL1_SHA3_SHIFT 32
2791#define AARCH64_ID_AA64ISAR0_EL1_SHA3_MASK 0xf00000000ULL
2792#define AARCH64_ID_AA64ISAR0_EL1_SHA3_GET( _reg ) \
2793 ( ( ( _reg ) >> 32 ) & 0xfULL )
2794
2795#define AARCH64_ID_AA64ISAR0_EL1_SM3( _val ) ( ( _val ) << 36 )
2796#define AARCH64_ID_AA64ISAR0_EL1_SM3_SHIFT 36
2797#define AARCH64_ID_AA64ISAR0_EL1_SM3_MASK 0xf000000000ULL
2798#define AARCH64_ID_AA64ISAR0_EL1_SM3_GET( _reg ) \
2799 ( ( ( _reg ) >> 36 ) & 0xfULL )
2800
2801#define AARCH64_ID_AA64ISAR0_EL1_SM4( _val ) ( ( _val ) << 40 )
2802#define AARCH64_ID_AA64ISAR0_EL1_SM4_SHIFT 40
2803#define AARCH64_ID_AA64ISAR0_EL1_SM4_MASK 0xf0000000000ULL
2804#define AARCH64_ID_AA64ISAR0_EL1_SM4_GET( _reg ) \
2805 ( ( ( _reg ) >> 40 ) & 0xfULL )
2806
2807#define AARCH64_ID_AA64ISAR0_EL1_DP( _val ) ( ( _val ) << 44 )
2808#define AARCH64_ID_AA64ISAR0_EL1_DP_SHIFT 44
2809#define AARCH64_ID_AA64ISAR0_EL1_DP_MASK 0xf00000000000ULL
2810#define AARCH64_ID_AA64ISAR0_EL1_DP_GET( _reg ) \
2811 ( ( ( _reg ) >> 44 ) & 0xfULL )
2812
2813#define AARCH64_ID_AA64ISAR0_EL1_FHM( _val ) ( ( _val ) << 48 )
2814#define AARCH64_ID_AA64ISAR0_EL1_FHM_SHIFT 48
2815#define AARCH64_ID_AA64ISAR0_EL1_FHM_MASK 0xf000000000000ULL
2816#define AARCH64_ID_AA64ISAR0_EL1_FHM_GET( _reg ) \
2817 ( ( ( _reg ) >> 48 ) & 0xfULL )
2818
2819#define AARCH64_ID_AA64ISAR0_EL1_TS( _val ) ( ( _val ) << 52 )
2820#define AARCH64_ID_AA64ISAR0_EL1_TS_SHIFT 52
2821#define AARCH64_ID_AA64ISAR0_EL1_TS_MASK 0xf0000000000000ULL
2822#define AARCH64_ID_AA64ISAR0_EL1_TS_GET( _reg ) \
2823 ( ( ( _reg ) >> 52 ) & 0xfULL )
2824
2825#define AARCH64_ID_AA64ISAR0_EL1_TLB( _val ) ( ( _val ) << 56 )
2826#define AARCH64_ID_AA64ISAR0_EL1_TLB_SHIFT 56
2827#define AARCH64_ID_AA64ISAR0_EL1_TLB_MASK 0xf00000000000000ULL
2828#define AARCH64_ID_AA64ISAR0_EL1_TLB_GET( _reg ) \
2829 ( ( ( _reg ) >> 56 ) & 0xfULL )
2830
2831#define AARCH64_ID_AA64ISAR0_EL1_RNDR( _val ) ( ( _val ) << 60 )
2832#define AARCH64_ID_AA64ISAR0_EL1_RNDR_SHIFT 60
2833#define AARCH64_ID_AA64ISAR0_EL1_RNDR_MASK 0xf000000000000000ULL
2834#define AARCH64_ID_AA64ISAR0_EL1_RNDR_GET( _reg ) \
2835 ( ( ( _reg ) >> 60 ) & 0xfULL )
2836
2837static inline uint64_t _AArch64_Read_id_aa64isar0_el1( void )
2838{
2839 uint64_t value;
2840
2841 __asm__ volatile (
2842 "mrs %0, ID_AA64ISAR0_EL1" : "=&r" ( value ) : : "memory"
2843 );
2844
2845 return value;
2846}
2847
2848/* ID_AA64ISAR1_EL1, AArch64 Instruction Set Attribute Register 1 */
2849
2850#define AARCH64_ID_AA64ISAR1_EL1_DPB( _val ) ( ( _val ) << 0 )
2851#define AARCH64_ID_AA64ISAR1_EL1_DPB_SHIFT 0
2852#define AARCH64_ID_AA64ISAR1_EL1_DPB_MASK 0xfU
2853#define AARCH64_ID_AA64ISAR1_EL1_DPB_GET( _reg ) \
2854 ( ( ( _reg ) >> 0 ) & 0xfU )
2855
2856#define AARCH64_ID_AA64ISAR1_EL1_APA( _val ) ( ( _val ) << 4 )
2857#define AARCH64_ID_AA64ISAR1_EL1_APA_SHIFT 4
2858#define AARCH64_ID_AA64ISAR1_EL1_APA_MASK 0xf0U
2859#define AARCH64_ID_AA64ISAR1_EL1_APA_GET( _reg ) \
2860 ( ( ( _reg ) >> 4 ) & 0xfU )
2861
2862#define AARCH64_ID_AA64ISAR1_EL1_API( _val ) ( ( _val ) << 8 )
2863#define AARCH64_ID_AA64ISAR1_EL1_API_SHIFT 8
2864#define AARCH64_ID_AA64ISAR1_EL1_API_MASK 0xf00U
2865#define AARCH64_ID_AA64ISAR1_EL1_API_GET( _reg ) \
2866 ( ( ( _reg ) >> 8 ) & 0xfU )
2867
2868#define AARCH64_ID_AA64ISAR1_EL1_JSCVT( _val ) ( ( _val ) << 12 )
2869#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_SHIFT 12
2870#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_MASK 0xf000U
2871#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_GET( _reg ) \
2872 ( ( ( _reg ) >> 12 ) & 0xfU )
2873
2874#define AARCH64_ID_AA64ISAR1_EL1_FCMA( _val ) ( ( _val ) << 16 )
2875#define AARCH64_ID_AA64ISAR1_EL1_FCMA_SHIFT 16
2876#define AARCH64_ID_AA64ISAR1_EL1_FCMA_MASK 0xf0000U
2877#define AARCH64_ID_AA64ISAR1_EL1_FCMA_GET( _reg ) \
2878 ( ( ( _reg ) >> 16 ) & 0xfU )
2879
2880#define AARCH64_ID_AA64ISAR1_EL1_LRCPC( _val ) ( ( _val ) << 20 )
2881#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_SHIFT 20
2882#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_MASK 0xf00000U
2883#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_GET( _reg ) \
2884 ( ( ( _reg ) >> 20 ) & 0xfU )
2885
2886#define AARCH64_ID_AA64ISAR1_EL1_GPA( _val ) ( ( _val ) << 24 )
2887#define AARCH64_ID_AA64ISAR1_EL1_GPA_SHIFT 24
2888#define AARCH64_ID_AA64ISAR1_EL1_GPA_MASK 0xf000000U
2889#define AARCH64_ID_AA64ISAR1_EL1_GPA_GET( _reg ) \
2890 ( ( ( _reg ) >> 24 ) & 0xfU )
2891
2892#define AARCH64_ID_AA64ISAR1_EL1_GPI( _val ) ( ( _val ) << 28 )
2893#define AARCH64_ID_AA64ISAR1_EL1_GPI_SHIFT 28
2894#define AARCH64_ID_AA64ISAR1_EL1_GPI_MASK 0xf0000000U
2895#define AARCH64_ID_AA64ISAR1_EL1_GPI_GET( _reg ) \
2896 ( ( ( _reg ) >> 28 ) & 0xfU )
2897
2898#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS( _val ) ( ( _val ) << 32 )
2899#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_SHIFT 32
2900#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_MASK 0xf00000000ULL
2901#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_GET( _reg ) \
2902 ( ( ( _reg ) >> 32 ) & 0xfULL )
2903
2904#define AARCH64_ID_AA64ISAR1_EL1_SB( _val ) ( ( _val ) << 36 )
2905#define AARCH64_ID_AA64ISAR1_EL1_SB_SHIFT 36
2906#define AARCH64_ID_AA64ISAR1_EL1_SB_MASK 0xf000000000ULL
2907#define AARCH64_ID_AA64ISAR1_EL1_SB_GET( _reg ) \
2908 ( ( ( _reg ) >> 36 ) & 0xfULL )
2909
2910#define AARCH64_ID_AA64ISAR1_EL1_SPECRES( _val ) ( ( _val ) << 40 )
2911#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_SHIFT 40
2912#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_MASK 0xf0000000000ULL
2913#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_GET( _reg ) \
2914 ( ( ( _reg ) >> 40 ) & 0xfULL )
2915
2916#define AARCH64_ID_AA64ISAR1_EL1_BF16( _val ) ( ( _val ) << 44 )
2917#define AARCH64_ID_AA64ISAR1_EL1_BF16_SHIFT 44
2918#define AARCH64_ID_AA64ISAR1_EL1_BF16_MASK 0xf00000000000ULL
2919#define AARCH64_ID_AA64ISAR1_EL1_BF16_GET( _reg ) \
2920 ( ( ( _reg ) >> 44 ) & 0xfULL )
2921
2922#define AARCH64_ID_AA64ISAR1_EL1_DGH( _val ) ( ( _val ) << 48 )
2923#define AARCH64_ID_AA64ISAR1_EL1_DGH_SHIFT 48
2924#define AARCH64_ID_AA64ISAR1_EL1_DGH_MASK 0xf000000000000ULL
2925#define AARCH64_ID_AA64ISAR1_EL1_DGH_GET( _reg ) \
2926 ( ( ( _reg ) >> 48 ) & 0xfULL )
2927
2928#define AARCH64_ID_AA64ISAR1_EL1_I8MM( _val ) ( ( _val ) << 52 )
2929#define AARCH64_ID_AA64ISAR1_EL1_I8MM_SHIFT 52
2930#define AARCH64_ID_AA64ISAR1_EL1_I8MM_MASK 0xf0000000000000ULL
2931#define AARCH64_ID_AA64ISAR1_EL1_I8MM_GET( _reg ) \
2932 ( ( ( _reg ) >> 52 ) & 0xfULL )
2933
2934static inline uint64_t _AArch64_Read_id_aa64isar1_el1( void )
2935{
2936 uint64_t value;
2937
2938 __asm__ volatile (
2939 "mrs %0, ID_AA64ISAR1_EL1" : "=&r" ( value ) : : "memory"
2940 );
2941
2942 return value;
2943}
2944
2945/* ID_AA64MMFR0_EL1, AArch64 Memory Model Feature Register 0 */
2946
2947#define AARCH64_ID_AA64MMFR0_EL1_PARANGE( _val ) ( ( _val ) << 0 )
2948#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_SHIFT 0
2949#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_MASK 0xfU
2950#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_GET( _reg ) \
2951 ( ( ( _reg ) >> 0 ) & 0xfU )
2952
2953#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS( _val ) ( ( _val ) << 4 )
2954#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_SHIFT 4
2955#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_MASK 0xf0U
2956#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_GET( _reg ) \
2957 ( ( ( _reg ) >> 4 ) & 0xfU )
2958
2959#define AARCH64_ID_AA64MMFR0_EL1_BIGEND( _val ) ( ( _val ) << 8 )
2960#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_SHIFT 8
2961#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_MASK 0xf00U
2962#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_GET( _reg ) \
2963 ( ( ( _reg ) >> 8 ) & 0xfU )
2964
2965#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM( _val ) ( ( _val ) << 12 )
2966#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_SHIFT 12
2967#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_MASK 0xf000U
2968#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_GET( _reg ) \
2969 ( ( ( _reg ) >> 12 ) & 0xfU )
2970
2971#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0( _val ) ( ( _val ) << 16 )
2972#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT 16
2973#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_MASK 0xf0000U
2974#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_GET( _reg ) \
2975 ( ( ( _reg ) >> 16 ) & 0xfU )
2976
2977#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16( _val ) ( ( _val ) << 20 )
2978#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_SHIFT 20
2979#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_MASK 0xf00000U
2980#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_GET( _reg ) \
2981 ( ( ( _reg ) >> 20 ) & 0xfU )
2982
2983#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64( _val ) ( ( _val ) << 24 )
2984#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_SHIFT 24
2985#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_MASK 0xf000000U
2986#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_GET( _reg ) \
2987 ( ( ( _reg ) >> 24 ) & 0xfU )
2988
2989#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4( _val ) ( ( _val ) << 28 )
2990#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_SHIFT 28
2991#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_MASK 0xf0000000U
2992#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_GET( _reg ) \
2993 ( ( ( _reg ) >> 28 ) & 0xfU )
2994
2995#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2( _val ) ( ( _val ) << 32 )
2996#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_SHIFT 32
2997#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_MASK 0xf00000000ULL
2998#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_GET( _reg ) \
2999 ( ( ( _reg ) >> 32 ) & 0xfULL )
3000
3001#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2( _val ) ( ( _val ) << 36 )
3002#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_SHIFT 36
3003#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_MASK 0xf000000000ULL
3004#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_GET( _reg ) \
3005 ( ( ( _reg ) >> 36 ) & 0xfULL )
3006
3007#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2( _val ) ( ( _val ) << 40 )
3008#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_SHIFT 40
3009#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_MASK 0xf0000000000ULL
3010#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_GET( _reg ) \
3011 ( ( ( _reg ) >> 40 ) & 0xfULL )
3012
3013#define AARCH64_ID_AA64MMFR0_EL1_EXS( _val ) ( ( _val ) << 44 )
3014#define AARCH64_ID_AA64MMFR0_EL1_EXS_SHIFT 44
3015#define AARCH64_ID_AA64MMFR0_EL1_EXS_MASK 0xf00000000000ULL
3016#define AARCH64_ID_AA64MMFR0_EL1_EXS_GET( _reg ) \
3017 ( ( ( _reg ) >> 44 ) & 0xfULL )
3018
3019#define AARCH64_ID_AA64MMFR0_EL1_FGT( _val ) ( ( _val ) << 56 )
3020#define AARCH64_ID_AA64MMFR0_EL1_FGT_SHIFT 56
3021#define AARCH64_ID_AA64MMFR0_EL1_FGT_MASK 0xf00000000000000ULL
3022#define AARCH64_ID_AA64MMFR0_EL1_FGT_GET( _reg ) \
3023 ( ( ( _reg ) >> 56 ) & 0xfULL )
3024
3025#define AARCH64_ID_AA64MMFR0_EL1_ECV( _val ) ( ( _val ) << 60 )
3026#define AARCH64_ID_AA64MMFR0_EL1_ECV_SHIFT 60
3027#define AARCH64_ID_AA64MMFR0_EL1_ECV_MASK 0xf000000000000000ULL
3028#define AARCH64_ID_AA64MMFR0_EL1_ECV_GET( _reg ) \
3029 ( ( ( _reg ) >> 60 ) & 0xfULL )
3030
3031static inline uint64_t _AArch64_Read_id_aa64mmfr0_el1( void )
3032{
3033 uint64_t value;
3034
3035 __asm__ volatile (
3036 "mrs %0, ID_AA64MMFR0_EL1" : "=&r" ( value ) : : "memory"
3037 );
3038
3039 return value;
3040}
3041
3042/* ID_AA64MMFR1_EL1, AArch64 Memory Model Feature Register 1 */
3043
3044#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS( _val ) ( ( _val ) << 0 )
3045#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_SHIFT 0
3046#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_MASK 0xfU
3047#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_GET( _reg ) \
3048 ( ( ( _reg ) >> 0 ) & 0xfU )
3049
3050#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS( _val ) ( ( _val ) << 4 )
3051#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_SHIFT 4
3052#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_MASK 0xf0U
3053#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_GET( _reg ) \
3054 ( ( ( _reg ) >> 4 ) & 0xfU )
3055
3056#define AARCH64_ID_AA64MMFR1_EL1_VH( _val ) ( ( _val ) << 8 )
3057#define AARCH64_ID_AA64MMFR1_EL1_VH_SHIFT 8
3058#define AARCH64_ID_AA64MMFR1_EL1_VH_MASK 0xf00U
3059#define AARCH64_ID_AA64MMFR1_EL1_VH_GET( _reg ) \
3060 ( ( ( _reg ) >> 8 ) & 0xfU )
3061
3062#define AARCH64_ID_AA64MMFR1_EL1_HPDS( _val ) ( ( _val ) << 12 )
3063#define AARCH64_ID_AA64MMFR1_EL1_HPDS_SHIFT 12
3064#define AARCH64_ID_AA64MMFR1_EL1_HPDS_MASK 0xf000U
3065#define AARCH64_ID_AA64MMFR1_EL1_HPDS_GET( _reg ) \
3066 ( ( ( _reg ) >> 12 ) & 0xfU )
3067
3068#define AARCH64_ID_AA64MMFR1_EL1_LO( _val ) ( ( _val ) << 16 )
3069#define AARCH64_ID_AA64MMFR1_EL1_LO_SHIFT 16
3070#define AARCH64_ID_AA64MMFR1_EL1_LO_MASK 0xf0000U
3071#define AARCH64_ID_AA64MMFR1_EL1_LO_GET( _reg ) \
3072 ( ( ( _reg ) >> 16 ) & 0xfU )
3073
3074#define AARCH64_ID_AA64MMFR1_EL1_PAN( _val ) ( ( _val ) << 20 )
3075#define AARCH64_ID_AA64MMFR1_EL1_PAN_SHIFT 20
3076#define AARCH64_ID_AA64MMFR1_EL1_PAN_MASK 0xf00000U
3077#define AARCH64_ID_AA64MMFR1_EL1_PAN_GET( _reg ) \
3078 ( ( ( _reg ) >> 20 ) & 0xfU )
3079
3080#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI( _val ) ( ( _val ) << 24 )
3081#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_SHIFT 24
3082#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_MASK 0xf000000U
3083#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_GET( _reg ) \
3084 ( ( ( _reg ) >> 24 ) & 0xfU )
3085
3086#define AARCH64_ID_AA64MMFR1_EL1_XNX( _val ) ( ( _val ) << 28 )
3087#define AARCH64_ID_AA64MMFR1_EL1_XNX_SHIFT 28
3088#define AARCH64_ID_AA64MMFR1_EL1_XNX_MASK 0xf0000000U
3089#define AARCH64_ID_AA64MMFR1_EL1_XNX_GET( _reg ) \
3090 ( ( ( _reg ) >> 28 ) & 0xfU )
3091
3092#define AARCH64_ID_AA64MMFR1_EL1_TWED( _val ) ( ( _val ) << 32 )
3093#define AARCH64_ID_AA64MMFR1_EL1_TWED_SHIFT 32
3094#define AARCH64_ID_AA64MMFR1_EL1_TWED_MASK 0xf00000000ULL
3095#define AARCH64_ID_AA64MMFR1_EL1_TWED_GET( _reg ) \
3096 ( ( ( _reg ) >> 32 ) & 0xfULL )
3097
3098#define AARCH64_ID_AA64MMFR1_EL1_ETS( _val ) ( ( _val ) << 36 )
3099#define AARCH64_ID_AA64MMFR1_EL1_ETS_SHIFT 36
3100#define AARCH64_ID_AA64MMFR1_EL1_ETS_MASK 0xf000000000ULL
3101#define AARCH64_ID_AA64MMFR1_EL1_ETS_GET( _reg ) \
3102 ( ( ( _reg ) >> 36 ) & 0xfULL )
3103
3104static inline uint64_t _AArch64_Read_id_aa64mmfr1_el1( void )
3105{
3106 uint64_t value;
3107
3108 __asm__ volatile (
3109 "mrs %0, ID_AA64MMFR1_EL1" : "=&r" ( value ) : : "memory"
3110 );
3111
3112 return value;
3113}
3114
3115/* ID_AA64MMFR2_EL1, AArch64 Memory Model Feature Register 2 */
3116
3117#define AARCH64_ID_AA64MMFR2_EL1_CNP( _val ) ( ( _val ) << 0 )
3118#define AARCH64_ID_AA64MMFR2_EL1_CNP_SHIFT 0
3119#define AARCH64_ID_AA64MMFR2_EL1_CNP_MASK 0xfU
3120#define AARCH64_ID_AA64MMFR2_EL1_CNP_GET( _reg ) \
3121 ( ( ( _reg ) >> 0 ) & 0xfU )
3122
3123#define AARCH64_ID_AA64MMFR2_EL1_UAO( _val ) ( ( _val ) << 4 )
3124#define AARCH64_ID_AA64MMFR2_EL1_UAO_SHIFT 4
3125#define AARCH64_ID_AA64MMFR2_EL1_UAO_MASK 0xf0U
3126#define AARCH64_ID_AA64MMFR2_EL1_UAO_GET( _reg ) \
3127 ( ( ( _reg ) >> 4 ) & 0xfU )
3128
3129#define AARCH64_ID_AA64MMFR2_EL1_LSM( _val ) ( ( _val ) << 8 )
3130#define AARCH64_ID_AA64MMFR2_EL1_LSM_SHIFT 8
3131#define AARCH64_ID_AA64MMFR2_EL1_LSM_MASK 0xf00U
3132#define AARCH64_ID_AA64MMFR2_EL1_LSM_GET( _reg ) \
3133 ( ( ( _reg ) >> 8 ) & 0xfU )
3134
3135#define AARCH64_ID_AA64MMFR2_EL1_IESB( _val ) ( ( _val ) << 12 )
3136#define AARCH64_ID_AA64MMFR2_EL1_IESB_SHIFT 12
3137#define AARCH64_ID_AA64MMFR2_EL1_IESB_MASK 0xf000U
3138#define AARCH64_ID_AA64MMFR2_EL1_IESB_GET( _reg ) \
3139 ( ( ( _reg ) >> 12 ) & 0xfU )
3140
3141#define AARCH64_ID_AA64MMFR2_EL1_VARANGE( _val ) ( ( _val ) << 16 )
3142#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_SHIFT 16
3143#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_MASK 0xf0000U
3144#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_GET( _reg ) \
3145 ( ( ( _reg ) >> 16 ) & 0xfU )
3146
3147#define AARCH64_ID_AA64MMFR2_EL1_CCIDX( _val ) ( ( _val ) << 20 )
3148#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
3149#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_MASK 0xf00000U
3150#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_GET( _reg ) \
3151 ( ( ( _reg ) >> 20 ) & 0xfU )
3152
3153#define AARCH64_ID_AA64MMFR2_EL1_NV( _val ) ( ( _val ) << 24 )
3154#define AARCH64_ID_AA64MMFR2_EL1_NV_SHIFT 24
3155#define AARCH64_ID_AA64MMFR2_EL1_NV_MASK 0xf000000U
3156#define AARCH64_ID_AA64MMFR2_EL1_NV_GET( _reg ) \
3157 ( ( ( _reg ) >> 24 ) & 0xfU )
3158
3159#define AARCH64_ID_AA64MMFR2_EL1_ST( _val ) ( ( _val ) << 28 )
3160#define AARCH64_ID_AA64MMFR2_EL1_ST_SHIFT 28
3161#define AARCH64_ID_AA64MMFR2_EL1_ST_MASK 0xf0000000U
3162#define AARCH64_ID_AA64MMFR2_EL1_ST_GET( _reg ) \
3163 ( ( ( _reg ) >> 28 ) & 0xfU )
3164
3165#define AARCH64_ID_AA64MMFR2_EL1_AT( _val ) ( ( _val ) << 32 )
3166#define AARCH64_ID_AA64MMFR2_EL1_AT_SHIFT 32
3167#define AARCH64_ID_AA64MMFR2_EL1_AT_MASK 0xf00000000ULL
3168#define AARCH64_ID_AA64MMFR2_EL1_AT_GET( _reg ) \
3169 ( ( ( _reg ) >> 32 ) & 0xfULL )
3170
3171#define AARCH64_ID_AA64MMFR2_EL1_IDS( _val ) ( ( _val ) << 36 )
3172#define AARCH64_ID_AA64MMFR2_EL1_IDS_SHIFT 36
3173#define AARCH64_ID_AA64MMFR2_EL1_IDS_MASK 0xf000000000ULL
3174#define AARCH64_ID_AA64MMFR2_EL1_IDS_GET( _reg ) \
3175 ( ( ( _reg ) >> 36 ) & 0xfULL )
3176
3177#define AARCH64_ID_AA64MMFR2_EL1_FWB( _val ) ( ( _val ) << 40 )
3178#define AARCH64_ID_AA64MMFR2_EL1_FWB_SHIFT 40
3179#define AARCH64_ID_AA64MMFR2_EL1_FWB_MASK 0xf0000000000ULL
3180#define AARCH64_ID_AA64MMFR2_EL1_FWB_GET( _reg ) \
3181 ( ( ( _reg ) >> 40 ) & 0xfULL )
3182
3183#define AARCH64_ID_AA64MMFR2_EL1_TTL( _val ) ( ( _val ) << 48 )
3184#define AARCH64_ID_AA64MMFR2_EL1_TTL_SHIFT 48
3185#define AARCH64_ID_AA64MMFR2_EL1_TTL_MASK 0xf000000000000ULL
3186#define AARCH64_ID_AA64MMFR2_EL1_TTL_GET( _reg ) \
3187 ( ( ( _reg ) >> 48 ) & 0xfULL )
3188
3189#define AARCH64_ID_AA64MMFR2_EL1_BBM( _val ) ( ( _val ) << 52 )
3190#define AARCH64_ID_AA64MMFR2_EL1_BBM_SHIFT 52
3191#define AARCH64_ID_AA64MMFR2_EL1_BBM_MASK 0xf0000000000000ULL
3192#define AARCH64_ID_AA64MMFR2_EL1_BBM_GET( _reg ) \
3193 ( ( ( _reg ) >> 52 ) & 0xfULL )
3194
3195#define AARCH64_ID_AA64MMFR2_EL1_EVT( _val ) ( ( _val ) << 56 )
3196#define AARCH64_ID_AA64MMFR2_EL1_EVT_SHIFT 56
3197#define AARCH64_ID_AA64MMFR2_EL1_EVT_MASK 0xf00000000000000ULL
3198#define AARCH64_ID_AA64MMFR2_EL1_EVT_GET( _reg ) \
3199 ( ( ( _reg ) >> 56 ) & 0xfULL )
3200
3201#define AARCH64_ID_AA64MMFR2_EL1_E0PD( _val ) ( ( _val ) << 60 )
3202#define AARCH64_ID_AA64MMFR2_EL1_E0PD_SHIFT 60
3203#define AARCH64_ID_AA64MMFR2_EL1_E0PD_MASK 0xf000000000000000ULL
3204#define AARCH64_ID_AA64MMFR2_EL1_E0PD_GET( _reg ) \
3205 ( ( ( _reg ) >> 60 ) & 0xfULL )
3206
3207static inline uint64_t _AArch64_Read_id_aa64mmfr2_el1( void )
3208{
3209 uint64_t value;
3210
3211 __asm__ volatile (
3212 "mrs %0, ID_AA64MMFR2_EL1" : "=&r" ( value ) : : "memory"
3213 );
3214
3215 return value;
3216}
3217
3218/* ID_AA64PFR0_EL1, AArch64 Processor Feature Register 0 */
3219
3220#define AARCH64_ID_AA64PFR0_EL1_EL0( _val ) ( ( _val ) << 0 )
3221#define AARCH64_ID_AA64PFR0_EL1_EL0_SHIFT 0
3222#define AARCH64_ID_AA64PFR0_EL1_EL0_MASK 0xfU
3223#define AARCH64_ID_AA64PFR0_EL1_EL0_GET( _reg ) \
3224 ( ( ( _reg ) >> 0 ) & 0xfU )
3225
3226#define AARCH64_ID_AA64PFR0_EL1_EL1( _val ) ( ( _val ) << 4 )
3227#define AARCH64_ID_AA64PFR0_EL1_EL1_SHIFT 4
3228#define AARCH64_ID_AA64PFR0_EL1_EL1_MASK 0xf0U
3229#define AARCH64_ID_AA64PFR0_EL1_EL1_GET( _reg ) \
3230 ( ( ( _reg ) >> 4 ) & 0xfU )
3231
3232#define AARCH64_ID_AA64PFR0_EL1_EL2( _val ) ( ( _val ) << 8 )
3233#define AARCH64_ID_AA64PFR0_EL1_EL2_SHIFT 8
3234#define AARCH64_ID_AA64PFR0_EL1_EL2_MASK 0xf00U
3235#define AARCH64_ID_AA64PFR0_EL1_EL2_GET( _reg ) \
3236 ( ( ( _reg ) >> 8 ) & 0xfU )
3237
3238#define AARCH64_ID_AA64PFR0_EL1_EL3( _val ) ( ( _val ) << 12 )
3239#define AARCH64_ID_AA64PFR0_EL1_EL3_SHIFT 12
3240#define AARCH64_ID_AA64PFR0_EL1_EL3_MASK 0xf000U
3241#define AARCH64_ID_AA64PFR0_EL1_EL3_GET( _reg ) \
3242 ( ( ( _reg ) >> 12 ) & 0xfU )
3243
3244#define AARCH64_ID_AA64PFR0_EL1_FP( _val ) ( ( _val ) << 16 )
3245#define AARCH64_ID_AA64PFR0_EL1_FP_SHIFT 16
3246#define AARCH64_ID_AA64PFR0_EL1_FP_MASK 0xf0000U
3247#define AARCH64_ID_AA64PFR0_EL1_FP_GET( _reg ) \
3248 ( ( ( _reg ) >> 16 ) & 0xfU )
3249
3250#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD( _val ) ( ( _val ) << 20 )
3251#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_SHIFT 20
3252#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_MASK 0xf00000U
3253#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_GET( _reg ) \
3254 ( ( ( _reg ) >> 20 ) & 0xfU )
3255
3256#define AARCH64_ID_AA64PFR0_EL1_GIC( _val ) ( ( _val ) << 24 )
3257#define AARCH64_ID_AA64PFR0_EL1_GIC_SHIFT 24
3258#define AARCH64_ID_AA64PFR0_EL1_GIC_MASK 0xf000000U
3259#define AARCH64_ID_AA64PFR0_EL1_GIC_GET( _reg ) \
3260 ( ( ( _reg ) >> 24 ) & 0xfU )
3261
3262#define AARCH64_ID_AA64PFR0_EL1_RAS( _val ) ( ( _val ) << 28 )
3263#define AARCH64_ID_AA64PFR0_EL1_RAS_SHIFT 28
3264#define AARCH64_ID_AA64PFR0_EL1_RAS_MASK 0xf0000000U
3265#define AARCH64_ID_AA64PFR0_EL1_RAS_GET( _reg ) \
3266 ( ( ( _reg ) >> 28 ) & 0xfU )
3267
3268#define AARCH64_ID_AA64PFR0_EL1_SVE( _val ) ( ( _val ) << 32 )
3269#define AARCH64_ID_AA64PFR0_EL1_SVE_SHIFT 32
3270#define AARCH64_ID_AA64PFR0_EL1_SVE_MASK 0xf00000000ULL
3271#define AARCH64_ID_AA64PFR0_EL1_SVE_GET( _reg ) \
3272 ( ( ( _reg ) >> 32 ) & 0xfULL )
3273
3274#define AARCH64_ID_AA64PFR0_EL1_SEL2( _val ) ( ( _val ) << 36 )
3275#define AARCH64_ID_AA64PFR0_EL1_SEL2_SHIFT 36
3276#define AARCH64_ID_AA64PFR0_EL1_SEL2_MASK 0xf000000000ULL
3277#define AARCH64_ID_AA64PFR0_EL1_SEL2_GET( _reg ) \
3278 ( ( ( _reg ) >> 36 ) & 0xfULL )
3279
3280#define AARCH64_ID_AA64PFR0_EL1_MPAM( _val ) ( ( _val ) << 40 )
3281#define AARCH64_ID_AA64PFR0_EL1_MPAM_SHIFT 40
3282#define AARCH64_ID_AA64PFR0_EL1_MPAM_MASK 0xf0000000000ULL
3283#define AARCH64_ID_AA64PFR0_EL1_MPAM_GET( _reg ) \
3284 ( ( ( _reg ) >> 40 ) & 0xfULL )
3285
3286#define AARCH64_ID_AA64PFR0_EL1_AMU( _val ) ( ( _val ) << 44 )
3287#define AARCH64_ID_AA64PFR0_EL1_AMU_SHIFT 44
3288#define AARCH64_ID_AA64PFR0_EL1_AMU_MASK 0xf00000000000ULL
3289#define AARCH64_ID_AA64PFR0_EL1_AMU_GET( _reg ) \
3290 ( ( ( _reg ) >> 44 ) & 0xfULL )
3291
3292#define AARCH64_ID_AA64PFR0_EL1_DIT( _val ) ( ( _val ) << 48 )
3293#define AARCH64_ID_AA64PFR0_EL1_DIT_SHIFT 48
3294#define AARCH64_ID_AA64PFR0_EL1_DIT_MASK 0xf000000000000ULL
3295#define AARCH64_ID_AA64PFR0_EL1_DIT_GET( _reg ) \
3296 ( ( ( _reg ) >> 48 ) & 0xfULL )
3297
3298#define AARCH64_ID_AA64PFR0_EL1_CSV2( _val ) ( ( _val ) << 56 )
3299#define AARCH64_ID_AA64PFR0_EL1_CSV2_SHIFT 56
3300#define AARCH64_ID_AA64PFR0_EL1_CSV2_MASK 0xf00000000000000ULL
3301#define AARCH64_ID_AA64PFR0_EL1_CSV2_GET( _reg ) \
3302 ( ( ( _reg ) >> 56 ) & 0xfULL )
3303
3304#define AARCH64_ID_AA64PFR0_EL1_CSV3( _val ) ( ( _val ) << 60 )
3305#define AARCH64_ID_AA64PFR0_EL1_CSV3_SHIFT 60
3306#define AARCH64_ID_AA64PFR0_EL1_CSV3_MASK 0xf000000000000000ULL
3307#define AARCH64_ID_AA64PFR0_EL1_CSV3_GET( _reg ) \
3308 ( ( ( _reg ) >> 60 ) & 0xfULL )
3309
3310static inline uint64_t _AArch64_Read_id_aa64pfr0_el1( void )
3311{
3312 uint64_t value;
3313
3314 __asm__ volatile (
3315 "mrs %0, ID_AA64PFR0_EL1" : "=&r" ( value ) : : "memory"
3316 );
3317
3318 return value;
3319}
3320
3321/* ID_AA64PFR1_EL1, AArch64 Processor Feature Register 1 */
3322
3323#define AARCH64_ID_AA64PFR1_EL1_BT( _val ) ( ( _val ) << 0 )
3324#define AARCH64_ID_AA64PFR1_EL1_BT_SHIFT 0
3325#define AARCH64_ID_AA64PFR1_EL1_BT_MASK 0xfU
3326#define AARCH64_ID_AA64PFR1_EL1_BT_GET( _reg ) \
3327 ( ( ( _reg ) >> 0 ) & 0xfU )
3328
3329#define AARCH64_ID_AA64PFR1_EL1_SSBS( _val ) ( ( _val ) << 4 )
3330#define AARCH64_ID_AA64PFR1_EL1_SSBS_SHIFT 4
3331#define AARCH64_ID_AA64PFR1_EL1_SSBS_MASK 0xf0U
3332#define AARCH64_ID_AA64PFR1_EL1_SSBS_GET( _reg ) \
3333 ( ( ( _reg ) >> 4 ) & 0xfU )
3334
3335#define AARCH64_ID_AA64PFR1_EL1_MTE( _val ) ( ( _val ) << 8 )
3336#define AARCH64_ID_AA64PFR1_EL1_MTE_SHIFT 8
3337#define AARCH64_ID_AA64PFR1_EL1_MTE_MASK 0xf00U
3338#define AARCH64_ID_AA64PFR1_EL1_MTE_GET( _reg ) \
3339 ( ( ( _reg ) >> 8 ) & 0xfU )
3340
3341#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC( _val ) ( ( _val ) << 12 )
3342#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_SHIFT 12
3343#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_MASK 0xf000U
3344#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_GET( _reg ) \
3345 ( ( ( _reg ) >> 12 ) & 0xfU )
3346
3347#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC( _val ) ( ( _val ) << 16 )
3348#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_SHIFT 16
3349#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_MASK 0xf0000U
3350#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_GET( _reg ) \
3351 ( ( ( _reg ) >> 16 ) & 0xfU )
3352
3353static inline uint64_t _AArch64_Read_id_aa64pfr1_el1( void )
3354{
3355 uint64_t value;
3356
3357 __asm__ volatile (
3358 "mrs %0, ID_AA64PFR1_EL1" : "=&r" ( value ) : : "memory"
3359 );
3360
3361 return value;
3362}
3363
3364/* ID_AFR0_EL1, AArch64 Auxiliary Feature Register 0 */
3365
3366static inline uint64_t _AArch64_Read_id_afr0_el1( void )
3367{
3368 uint64_t value;
3369
3370 __asm__ volatile (
3371 "mrs %0, ID_AFR0_EL1" : "=&r" ( value ) : : "memory"
3372 );
3373
3374 return value;
3375}
3376
3377/* ID_DFR0_EL1, AArch64 Debug Feature Register 0 */
3378
3379#define AARCH64_ID_DFR0_EL1_COPDBG( _val ) ( ( _val ) << 0 )
3380#define AARCH64_ID_DFR0_EL1_COPDBG_SHIFT 0
3381#define AARCH64_ID_DFR0_EL1_COPDBG_MASK 0xfU
3382#define AARCH64_ID_DFR0_EL1_COPDBG_GET( _reg ) \
3383 ( ( ( _reg ) >> 0 ) & 0xfU )
3384
3385#define AARCH64_ID_DFR0_EL1_COPSDBG( _val ) ( ( _val ) << 4 )
3386#define AARCH64_ID_DFR0_EL1_COPSDBG_SHIFT 4
3387#define AARCH64_ID_DFR0_EL1_COPSDBG_MASK 0xf0U
3388#define AARCH64_ID_DFR0_EL1_COPSDBG_GET( _reg ) \
3389 ( ( ( _reg ) >> 4 ) & 0xfU )
3390
3391#define AARCH64_ID_DFR0_EL1_MMAPDBG( _val ) ( ( _val ) << 8 )
3392#define AARCH64_ID_DFR0_EL1_MMAPDBG_SHIFT 8
3393#define AARCH64_ID_DFR0_EL1_MMAPDBG_MASK 0xf00U
3394#define AARCH64_ID_DFR0_EL1_MMAPDBG_GET( _reg ) \
3395 ( ( ( _reg ) >> 8 ) & 0xfU )
3396
3397#define AARCH64_ID_DFR0_EL1_COPTRC( _val ) ( ( _val ) << 12 )
3398#define AARCH64_ID_DFR0_EL1_COPTRC_SHIFT 12
3399#define AARCH64_ID_DFR0_EL1_COPTRC_MASK 0xf000U
3400#define AARCH64_ID_DFR0_EL1_COPTRC_GET( _reg ) \
3401 ( ( ( _reg ) >> 12 ) & 0xfU )
3402
3403#define AARCH64_ID_DFR0_EL1_MMAPTRC( _val ) ( ( _val ) << 16 )
3404#define AARCH64_ID_DFR0_EL1_MMAPTRC_SHIFT 16
3405#define AARCH64_ID_DFR0_EL1_MMAPTRC_MASK 0xf0000U
3406#define AARCH64_ID_DFR0_EL1_MMAPTRC_GET( _reg ) \
3407 ( ( ( _reg ) >> 16 ) & 0xfU )
3408
3409#define AARCH64_ID_DFR0_EL1_MPROFDBG( _val ) ( ( _val ) << 20 )
3410#define AARCH64_ID_DFR0_EL1_MPROFDBG_SHIFT 20
3411#define AARCH64_ID_DFR0_EL1_MPROFDBG_MASK 0xf00000U
3412#define AARCH64_ID_DFR0_EL1_MPROFDBG_GET( _reg ) \
3413 ( ( ( _reg ) >> 20 ) & 0xfU )
3414
3415#define AARCH64_ID_DFR0_EL1_PERFMON( _val ) ( ( _val ) << 24 )
3416#define AARCH64_ID_DFR0_EL1_PERFMON_SHIFT 24
3417#define AARCH64_ID_DFR0_EL1_PERFMON_MASK 0xf000000U
3418#define AARCH64_ID_DFR0_EL1_PERFMON_GET( _reg ) \
3419 ( ( ( _reg ) >> 24 ) & 0xfU )
3420
3421#define AARCH64_ID_DFR0_EL1_TRACEFILT( _val ) ( ( _val ) << 28 )
3422#define AARCH64_ID_DFR0_EL1_TRACEFILT_SHIFT 28
3423#define AARCH64_ID_DFR0_EL1_TRACEFILT_MASK 0xf0000000U
3424#define AARCH64_ID_DFR0_EL1_TRACEFILT_GET( _reg ) \
3425 ( ( ( _reg ) >> 28 ) & 0xfU )
3426
3427static inline uint64_t _AArch64_Read_id_dfr0_el1( void )
3428{
3429 uint64_t value;
3430
3431 __asm__ volatile (
3432 "mrs %0, ID_DFR0_EL1" : "=&r" ( value ) : : "memory"
3433 );
3434
3435 return value;
3436}
3437
3438/* ID_DFR1_EL1, Debug Feature Register 1 */
3439
3440#define AARCH64_ID_DFR1_EL1_MTPMU( _val ) ( ( _val ) << 0 )
3441#define AARCH64_ID_DFR1_EL1_MTPMU_SHIFT 0
3442#define AARCH64_ID_DFR1_EL1_MTPMU_MASK 0xfU
3443#define AARCH64_ID_DFR1_EL1_MTPMU_GET( _reg ) \
3444 ( ( ( _reg ) >> 0 ) & 0xfU )
3445
3446static inline uint64_t _AArch64_Read_id_dfr1_el1( void )
3447{
3448 uint64_t value;
3449
3450 __asm__ volatile (
3451 "mrs %0, ID_DFR1_EL1" : "=&r" ( value ) : : "memory"
3452 );
3453
3454 return value;
3455}
3456
3457/* ID_ISAR0_EL1, AArch64 Instruction Set Attribute Register 0 */
3458
3459#define AARCH64_ID_ISAR0_EL1_SWAP( _val ) ( ( _val ) << 0 )
3460#define AARCH64_ID_ISAR0_EL1_SWAP_SHIFT 0
3461#define AARCH64_ID_ISAR0_EL1_SWAP_MASK 0xfU
3462#define AARCH64_ID_ISAR0_EL1_SWAP_GET( _reg ) \
3463 ( ( ( _reg ) >> 0 ) & 0xfU )
3464
3465#define AARCH64_ID_ISAR0_EL1_BITCOUNT( _val ) ( ( _val ) << 4 )
3466#define AARCH64_ID_ISAR0_EL1_BITCOUNT_SHIFT 4
3467#define AARCH64_ID_ISAR0_EL1_BITCOUNT_MASK 0xf0U
3468#define AARCH64_ID_ISAR0_EL1_BITCOUNT_GET( _reg ) \
3469 ( ( ( _reg ) >> 4 ) & 0xfU )
3470
3471#define AARCH64_ID_ISAR0_EL1_BITFIELD( _val ) ( ( _val ) << 8 )
3472#define AARCH64_ID_ISAR0_EL1_BITFIELD_SHIFT 8
3473#define AARCH64_ID_ISAR0_EL1_BITFIELD_MASK 0xf00U
3474#define AARCH64_ID_ISAR0_EL1_BITFIELD_GET( _reg ) \
3475 ( ( ( _reg ) >> 8 ) & 0xfU )
3476
3477#define AARCH64_ID_ISAR0_EL1_CMPBRANCH( _val ) ( ( _val ) << 12 )
3478#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_SHIFT 12
3479#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_MASK 0xf000U
3480#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_GET( _reg ) \
3481 ( ( ( _reg ) >> 12 ) & 0xfU )
3482
3483#define AARCH64_ID_ISAR0_EL1_COPROC( _val ) ( ( _val ) << 16 )
3484#define AARCH64_ID_ISAR0_EL1_COPROC_SHIFT 16
3485#define AARCH64_ID_ISAR0_EL1_COPROC_MASK 0xf0000U
3486#define AARCH64_ID_ISAR0_EL1_COPROC_GET( _reg ) \
3487 ( ( ( _reg ) >> 16 ) & 0xfU )
3488
3489#define AARCH64_ID_ISAR0_EL1_DEBUG( _val ) ( ( _val ) << 20 )
3490#define AARCH64_ID_ISAR0_EL1_DEBUG_SHIFT 20
3491#define AARCH64_ID_ISAR0_EL1_DEBUG_MASK 0xf00000U
3492#define AARCH64_ID_ISAR0_EL1_DEBUG_GET( _reg ) \
3493 ( ( ( _reg ) >> 20 ) & 0xfU )
3494
3495#define AARCH64_ID_ISAR0_EL1_DIVIDE( _val ) ( ( _val ) << 24 )
3496#define AARCH64_ID_ISAR0_EL1_DIVIDE_SHIFT 24
3497#define AARCH64_ID_ISAR0_EL1_DIVIDE_MASK 0xf000000U
3498#define AARCH64_ID_ISAR0_EL1_DIVIDE_GET( _reg ) \
3499 ( ( ( _reg ) >> 24 ) & 0xfU )
3500
3501static inline uint64_t _AArch64_Read_id_isar0_el1( void )
3502{
3503 uint64_t value;
3504
3505 __asm__ volatile (
3506 "mrs %0, ID_ISAR0_EL1" : "=&r" ( value ) : : "memory"
3507 );
3508
3509 return value;
3510}
3511
3512/* ID_ISAR1_EL1, AArch64 Instruction Set Attribute Register 1 */
3513
3514#define AARCH64_ID_ISAR1_EL1_ENDIAN( _val ) ( ( _val ) << 0 )
3515#define AARCH64_ID_ISAR1_EL1_ENDIAN_SHIFT 0
3516#define AARCH64_ID_ISAR1_EL1_ENDIAN_MASK 0xfU
3517#define AARCH64_ID_ISAR1_EL1_ENDIAN_GET( _reg ) \
3518 ( ( ( _reg ) >> 0 ) & 0xfU )
3519
3520#define AARCH64_ID_ISAR1_EL1_EXCEPT( _val ) ( ( _val ) << 4 )
3521#define AARCH64_ID_ISAR1_EL1_EXCEPT_SHIFT 4
3522#define AARCH64_ID_ISAR1_EL1_EXCEPT_MASK 0xf0U
3523#define AARCH64_ID_ISAR1_EL1_EXCEPT_GET( _reg ) \
3524 ( ( ( _reg ) >> 4 ) & 0xfU )
3525
3526#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR( _val ) ( ( _val ) << 8 )
3527#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_SHIFT 8
3528#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_MASK 0xf00U
3529#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_GET( _reg ) \
3530 ( ( ( _reg ) >> 8 ) & 0xfU )
3531
3532#define AARCH64_ID_ISAR1_EL1_EXTEND( _val ) ( ( _val ) << 12 )
3533#define AARCH64_ID_ISAR1_EL1_EXTEND_SHIFT 12
3534#define AARCH64_ID_ISAR1_EL1_EXTEND_MASK 0xf000U
3535#define AARCH64_ID_ISAR1_EL1_EXTEND_GET( _reg ) \
3536 ( ( ( _reg ) >> 12 ) & 0xfU )
3537
3538#define AARCH64_ID_ISAR1_EL1_IFTHEN( _val ) ( ( _val ) << 16 )
3539#define AARCH64_ID_ISAR1_EL1_IFTHEN_SHIFT 16
3540#define AARCH64_ID_ISAR1_EL1_IFTHEN_MASK 0xf0000U
3541#define AARCH64_ID_ISAR1_EL1_IFTHEN_GET( _reg ) \
3542 ( ( ( _reg ) >> 16 ) & 0xfU )
3543
3544#define AARCH64_ID_ISAR1_EL1_IMMEDIATE( _val ) ( ( _val ) << 20 )
3545#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_SHIFT 20
3546#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_MASK 0xf00000U
3547#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_GET( _reg ) \
3548 ( ( ( _reg ) >> 20 ) & 0xfU )
3549
3550#define AARCH64_ID_ISAR1_EL1_INTERWORK( _val ) ( ( _val ) << 24 )
3551#define AARCH64_ID_ISAR1_EL1_INTERWORK_SHIFT 24
3552#define AARCH64_ID_ISAR1_EL1_INTERWORK_MASK 0xf000000U
3553#define AARCH64_ID_ISAR1_EL1_INTERWORK_GET( _reg ) \
3554 ( ( ( _reg ) >> 24 ) & 0xfU )
3555
3556#define AARCH64_ID_ISAR1_EL1_JAZELLE( _val ) ( ( _val ) << 28 )
3557#define AARCH64_ID_ISAR1_EL1_JAZELLE_SHIFT 28
3558#define AARCH64_ID_ISAR1_EL1_JAZELLE_MASK 0xf0000000U
3559#define AARCH64_ID_ISAR1_EL1_JAZELLE_GET( _reg ) \
3560 ( ( ( _reg ) >> 28 ) & 0xfU )
3561
3562static inline uint64_t _AArch64_Read_id_isar1_el1( void )
3563{
3564 uint64_t value;
3565
3566 __asm__ volatile (
3567 "mrs %0, ID_ISAR1_EL1" : "=&r" ( value ) : : "memory"
3568 );
3569
3570 return value;
3571}
3572
3573/* ID_ISAR2_EL1, AArch64 Instruction Set Attribute Register 2 */
3574
3575#define AARCH64_ID_ISAR2_EL1_LOADSTORE( _val ) ( ( _val ) << 0 )
3576#define AARCH64_ID_ISAR2_EL1_LOADSTORE_SHIFT 0
3577#define AARCH64_ID_ISAR2_EL1_LOADSTORE_MASK 0xfU
3578#define AARCH64_ID_ISAR2_EL1_LOADSTORE_GET( _reg ) \
3579 ( ( ( _reg ) >> 0 ) & 0xfU )
3580
3581#define AARCH64_ID_ISAR2_EL1_MEMHINT( _val ) ( ( _val ) << 4 )
3582#define AARCH64_ID_ISAR2_EL1_MEMHINT_SHIFT 4
3583#define AARCH64_ID_ISAR2_EL1_MEMHINT_MASK 0xf0U
3584#define AARCH64_ID_ISAR2_EL1_MEMHINT_GET( _reg ) \
3585 ( ( ( _reg ) >> 4 ) & 0xfU )
3586
3587#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT( _val ) ( ( _val ) << 8 )
3588#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_SHIFT 8
3589#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_MASK 0xf00U
3590#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_GET( _reg ) \
3591 ( ( ( _reg ) >> 8 ) & 0xfU )
3592
3593#define AARCH64_ID_ISAR2_EL1_MULT( _val ) ( ( _val ) << 12 )
3594#define AARCH64_ID_ISAR2_EL1_MULT_SHIFT 12
3595#define AARCH64_ID_ISAR2_EL1_MULT_MASK 0xf000U
3596#define AARCH64_ID_ISAR2_EL1_MULT_GET( _reg ) \
3597 ( ( ( _reg ) >> 12 ) & 0xfU )
3598
3599#define AARCH64_ID_ISAR2_EL1_MULTS( _val ) ( ( _val ) << 16 )
3600#define AARCH64_ID_ISAR2_EL1_MULTS_SHIFT 16
3601#define AARCH64_ID_ISAR2_EL1_MULTS_MASK 0xf0000U
3602#define AARCH64_ID_ISAR2_EL1_MULTS_GET( _reg ) \
3603 ( ( ( _reg ) >> 16 ) & 0xfU )
3604
3605#define AARCH64_ID_ISAR2_EL1_MULTU( _val ) ( ( _val ) << 20 )
3606#define AARCH64_ID_ISAR2_EL1_MULTU_SHIFT 20
3607#define AARCH64_ID_ISAR2_EL1_MULTU_MASK 0xf00000U
3608#define AARCH64_ID_ISAR2_EL1_MULTU_GET( _reg ) \
3609 ( ( ( _reg ) >> 20 ) & 0xfU )
3610
3611#define AARCH64_ID_ISAR2_EL1_PSR_AR( _val ) ( ( _val ) << 24 )
3612#define AARCH64_ID_ISAR2_EL1_PSR_AR_SHIFT 24
3613#define AARCH64_ID_ISAR2_EL1_PSR_AR_MASK 0xf000000U
3614#define AARCH64_ID_ISAR2_EL1_PSR_AR_GET( _reg ) \
3615 ( ( ( _reg ) >> 24 ) & 0xfU )
3616
3617#define AARCH64_ID_ISAR2_EL1_REVERSAL( _val ) ( ( _val ) << 28 )
3618#define AARCH64_ID_ISAR2_EL1_REVERSAL_SHIFT 28
3619#define AARCH64_ID_ISAR2_EL1_REVERSAL_MASK 0xf0000000U
3620#define AARCH64_ID_ISAR2_EL1_REVERSAL_GET( _reg ) \
3621 ( ( ( _reg ) >> 28 ) & 0xfU )
3622
3623static inline uint64_t _AArch64_Read_id_isar2_el1( void )
3624{
3625 uint64_t value;
3626
3627 __asm__ volatile (
3628 "mrs %0, ID_ISAR2_EL1" : "=&r" ( value ) : : "memory"
3629 );
3630
3631 return value;
3632}
3633
3634/* ID_ISAR3_EL1, AArch64 Instruction Set Attribute Register 3 */
3635
3636#define AARCH64_ID_ISAR3_EL1_SATURATE( _val ) ( ( _val ) << 0 )
3637#define AARCH64_ID_ISAR3_EL1_SATURATE_SHIFT 0
3638#define AARCH64_ID_ISAR3_EL1_SATURATE_MASK 0xfU
3639#define AARCH64_ID_ISAR3_EL1_SATURATE_GET( _reg ) \
3640 ( ( ( _reg ) >> 0 ) & 0xfU )
3641
3642#define AARCH64_ID_ISAR3_EL1_SIMD( _val ) ( ( _val ) << 4 )
3643#define AARCH64_ID_ISAR3_EL1_SIMD_SHIFT 4
3644#define AARCH64_ID_ISAR3_EL1_SIMD_MASK 0xf0U
3645#define AARCH64_ID_ISAR3_EL1_SIMD_GET( _reg ) \
3646 ( ( ( _reg ) >> 4 ) & 0xfU )
3647
3648#define AARCH64_ID_ISAR3_EL1_SVC( _val ) ( ( _val ) << 8 )
3649#define AARCH64_ID_ISAR3_EL1_SVC_SHIFT 8
3650#define AARCH64_ID_ISAR3_EL1_SVC_MASK 0xf00U
3651#define AARCH64_ID_ISAR3_EL1_SVC_GET( _reg ) \
3652 ( ( ( _reg ) >> 8 ) & 0xfU )
3653
3654#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM( _val ) ( ( _val ) << 12 )
3655#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_SHIFT 12
3656#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_MASK 0xf000U
3657#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_GET( _reg ) \
3658 ( ( ( _reg ) >> 12 ) & 0xfU )
3659
3660#define AARCH64_ID_ISAR3_EL1_TABBRANCH( _val ) ( ( _val ) << 16 )
3661#define AARCH64_ID_ISAR3_EL1_TABBRANCH_SHIFT 16
3662#define AARCH64_ID_ISAR3_EL1_TABBRANCH_MASK 0xf0000U
3663#define AARCH64_ID_ISAR3_EL1_TABBRANCH_GET( _reg ) \
3664 ( ( ( _reg ) >> 16 ) & 0xfU )
3665
3666#define AARCH64_ID_ISAR3_EL1_T32COPY( _val ) ( ( _val ) << 20 )
3667#define AARCH64_ID_ISAR3_EL1_T32COPY_SHIFT 20
3668#define AARCH64_ID_ISAR3_EL1_T32COPY_MASK 0xf00000U
3669#define AARCH64_ID_ISAR3_EL1_T32COPY_GET( _reg ) \
3670 ( ( ( _reg ) >> 20 ) & 0xfU )
3671
3672#define AARCH64_ID_ISAR3_EL1_TRUENOP( _val ) ( ( _val ) << 24 )
3673#define AARCH64_ID_ISAR3_EL1_TRUENOP_SHIFT 24
3674#define AARCH64_ID_ISAR3_EL1_TRUENOP_MASK 0xf000000U
3675#define AARCH64_ID_ISAR3_EL1_TRUENOP_GET( _reg ) \
3676 ( ( ( _reg ) >> 24 ) & 0xfU )
3677
3678#define AARCH64_ID_ISAR3_EL1_T32EE( _val ) ( ( _val ) << 28 )
3679#define AARCH64_ID_ISAR3_EL1_T32EE_SHIFT 28
3680#define AARCH64_ID_ISAR3_EL1_T32EE_MASK 0xf0000000U
3681#define AARCH64_ID_ISAR3_EL1_T32EE_GET( _reg ) \
3682 ( ( ( _reg ) >> 28 ) & 0xfU )
3683
3684static inline uint64_t _AArch64_Read_id_isar3_el1( void )
3685{
3686 uint64_t value;
3687
3688 __asm__ volatile (
3689 "mrs %0, ID_ISAR3_EL1" : "=&r" ( value ) : : "memory"
3690 );
3691
3692 return value;
3693}
3694
3695/* ID_ISAR4_EL1, AArch64 Instruction Set Attribute Register 4 */
3696
3697#define AARCH64_ID_ISAR4_EL1_UNPRIV( _val ) ( ( _val ) << 0 )
3698#define AARCH64_ID_ISAR4_EL1_UNPRIV_SHIFT 0
3699#define AARCH64_ID_ISAR4_EL1_UNPRIV_MASK 0xfU
3700#define AARCH64_ID_ISAR4_EL1_UNPRIV_GET( _reg ) \
3701 ( ( ( _reg ) >> 0 ) & 0xfU )
3702
3703#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS( _val ) ( ( _val ) << 4 )
3704#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_SHIFT 4
3705#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_MASK 0xf0U
3706#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_GET( _reg ) \
3707 ( ( ( _reg ) >> 4 ) & 0xfU )
3708
3709#define AARCH64_ID_ISAR4_EL1_WRITEBACK( _val ) ( ( _val ) << 8 )
3710#define AARCH64_ID_ISAR4_EL1_WRITEBACK_SHIFT 8
3711#define AARCH64_ID_ISAR4_EL1_WRITEBACK_MASK 0xf00U
3712#define AARCH64_ID_ISAR4_EL1_WRITEBACK_GET( _reg ) \
3713 ( ( ( _reg ) >> 8 ) & 0xfU )
3714
3715#define AARCH64_ID_ISAR4_EL1_SMC( _val ) ( ( _val ) << 12 )
3716#define AARCH64_ID_ISAR4_EL1_SMC_SHIFT 12
3717#define AARCH64_ID_ISAR4_EL1_SMC_MASK 0xf000U
3718#define AARCH64_ID_ISAR4_EL1_SMC_GET( _reg ) \
3719 ( ( ( _reg ) >> 12 ) & 0xfU )
3720
3721#define AARCH64_ID_ISAR4_EL1_BARRIER( _val ) ( ( _val ) << 16 )
3722#define AARCH64_ID_ISAR4_EL1_BARRIER_SHIFT 16
3723#define AARCH64_ID_ISAR4_EL1_BARRIER_MASK 0xf0000U
3724#define AARCH64_ID_ISAR4_EL1_BARRIER_GET( _reg ) \
3725 ( ( ( _reg ) >> 16 ) & 0xfU )
3726
3727#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC( _val ) ( ( _val ) << 20 )
3728#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_SHIFT 20
3729#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_MASK 0xf00000U
3730#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_GET( _reg ) \
3731 ( ( ( _reg ) >> 20 ) & 0xfU )
3732
3733#define AARCH64_ID_ISAR4_EL1_PSR_M( _val ) ( ( _val ) << 24 )
3734#define AARCH64_ID_ISAR4_EL1_PSR_M_SHIFT 24
3735#define AARCH64_ID_ISAR4_EL1_PSR_M_MASK 0xf000000U
3736#define AARCH64_ID_ISAR4_EL1_PSR_M_GET( _reg ) \
3737 ( ( ( _reg ) >> 24 ) & 0xfU )
3738
3739#define AARCH64_ID_ISAR4_EL1_SWP_FRAC( _val ) ( ( _val ) << 28 )
3740#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_SHIFT 28
3741#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_MASK 0xf0000000U
3742#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_GET( _reg ) \
3743 ( ( ( _reg ) >> 28 ) & 0xfU )
3744
3745static inline uint64_t _AArch64_Read_id_isar4_el1( void )
3746{
3747 uint64_t value;
3748
3749 __asm__ volatile (
3750 "mrs %0, ID_ISAR4_EL1" : "=&r" ( value ) : : "memory"
3751 );
3752
3753 return value;
3754}
3755
3756/* ID_ISAR5_EL1, AArch64 Instruction Set Attribute Register 5 */
3757
3758#define AARCH64_ID_ISAR5_EL1_SEVL( _val ) ( ( _val ) << 0 )
3759#define AARCH64_ID_ISAR5_EL1_SEVL_SHIFT 0
3760#define AARCH64_ID_ISAR5_EL1_SEVL_MASK 0xfU
3761#define AARCH64_ID_ISAR5_EL1_SEVL_GET( _reg ) \
3762 ( ( ( _reg ) >> 0 ) & 0xfU )
3763
3764#define AARCH64_ID_ISAR5_EL1_AES( _val ) ( ( _val ) << 4 )
3765#define AARCH64_ID_ISAR5_EL1_AES_SHIFT 4
3766#define AARCH64_ID_ISAR5_EL1_AES_MASK 0xf0U
3767#define AARCH64_ID_ISAR5_EL1_AES_GET( _reg ) \
3768 ( ( ( _reg ) >> 4 ) & 0xfU )
3769
3770#define AARCH64_ID_ISAR5_EL1_SHA1( _val ) ( ( _val ) << 8 )
3771#define AARCH64_ID_ISAR5_EL1_SHA1_SHIFT 8
3772#define AARCH64_ID_ISAR5_EL1_SHA1_MASK 0xf00U
3773#define AARCH64_ID_ISAR5_EL1_SHA1_GET( _reg ) \
3774 ( ( ( _reg ) >> 8 ) & 0xfU )
3775
3776#define AARCH64_ID_ISAR5_EL1_SHA2( _val ) ( ( _val ) << 12 )
3777#define AARCH64_ID_ISAR5_EL1_SHA2_SHIFT 12
3778#define AARCH64_ID_ISAR5_EL1_SHA2_MASK 0xf000U
3779#define AARCH64_ID_ISAR5_EL1_SHA2_GET( _reg ) \
3780 ( ( ( _reg ) >> 12 ) & 0xfU )
3781
3782#define AARCH64_ID_ISAR5_EL1_CRC32( _val ) ( ( _val ) << 16 )
3783#define AARCH64_ID_ISAR5_EL1_CRC32_SHIFT 16
3784#define AARCH64_ID_ISAR5_EL1_CRC32_MASK 0xf0000U
3785#define AARCH64_ID_ISAR5_EL1_CRC32_GET( _reg ) \
3786 ( ( ( _reg ) >> 16 ) & 0xfU )
3787
3788#define AARCH64_ID_ISAR5_EL1_RDM( _val ) ( ( _val ) << 24 )
3789#define AARCH64_ID_ISAR5_EL1_RDM_SHIFT 24
3790#define AARCH64_ID_ISAR5_EL1_RDM_MASK 0xf000000U
3791#define AARCH64_ID_ISAR5_EL1_RDM_GET( _reg ) \
3792 ( ( ( _reg ) >> 24 ) & 0xfU )
3793
3794#define AARCH64_ID_ISAR5_EL1_VCMA( _val ) ( ( _val ) << 28 )
3795#define AARCH64_ID_ISAR5_EL1_VCMA_SHIFT 28
3796#define AARCH64_ID_ISAR5_EL1_VCMA_MASK 0xf0000000U
3797#define AARCH64_ID_ISAR5_EL1_VCMA_GET( _reg ) \
3798 ( ( ( _reg ) >> 28 ) & 0xfU )
3799
3800static inline uint64_t _AArch64_Read_id_isar5_el1( void )
3801{
3802 uint64_t value;
3803
3804 __asm__ volatile (
3805 "mrs %0, ID_ISAR5_EL1" : "=&r" ( value ) : : "memory"
3806 );
3807
3808 return value;
3809}
3810
3811/* ID_ISAR6_EL1, AArch64 Instruction Set Attribute Register 6 */
3812
3813#define AARCH64_ID_ISAR6_EL1_JSCVT( _val ) ( ( _val ) << 0 )
3814#define AARCH64_ID_ISAR6_EL1_JSCVT_SHIFT 0
3815#define AARCH64_ID_ISAR6_EL1_JSCVT_MASK 0xfU
3816#define AARCH64_ID_ISAR6_EL1_JSCVT_GET( _reg ) \
3817 ( ( ( _reg ) >> 0 ) & 0xfU )
3818
3819#define AARCH64_ID_ISAR6_EL1_DP( _val ) ( ( _val ) << 4 )
3820#define AARCH64_ID_ISAR6_EL1_DP_SHIFT 4
3821#define AARCH64_ID_ISAR6_EL1_DP_MASK 0xf0U
3822#define AARCH64_ID_ISAR6_EL1_DP_GET( _reg ) \
3823 ( ( ( _reg ) >> 4 ) & 0xfU )
3824
3825#define AARCH64_ID_ISAR6_EL1_FHM( _val ) ( ( _val ) << 8 )
3826#define AARCH64_ID_ISAR6_EL1_FHM_SHIFT 8
3827#define AARCH64_ID_ISAR6_EL1_FHM_MASK 0xf00U
3828#define AARCH64_ID_ISAR6_EL1_FHM_GET( _reg ) \
3829 ( ( ( _reg ) >> 8 ) & 0xfU )
3830
3831#define AARCH64_ID_ISAR6_EL1_SB( _val ) ( ( _val ) << 12 )
3832#define AARCH64_ID_ISAR6_EL1_SB_SHIFT 12
3833#define AARCH64_ID_ISAR6_EL1_SB_MASK 0xf000U
3834#define AARCH64_ID_ISAR6_EL1_SB_GET( _reg ) \
3835 ( ( ( _reg ) >> 12 ) & 0xfU )
3836
3837#define AARCH64_ID_ISAR6_EL1_SPECRES( _val ) ( ( _val ) << 16 )
3838#define AARCH64_ID_ISAR6_EL1_SPECRES_SHIFT 16
3839#define AARCH64_ID_ISAR6_EL1_SPECRES_MASK 0xf0000U
3840#define AARCH64_ID_ISAR6_EL1_SPECRES_GET( _reg ) \
3841 ( ( ( _reg ) >> 16 ) & 0xfU )
3842
3843#define AARCH64_ID_ISAR6_EL1_BF16( _val ) ( ( _val ) << 20 )
3844#define AARCH64_ID_ISAR6_EL1_BF16_SHIFT 20
3845#define AARCH64_ID_ISAR6_EL1_BF16_MASK 0xf00000U
3846#define AARCH64_ID_ISAR6_EL1_BF16_GET( _reg ) \
3847 ( ( ( _reg ) >> 20 ) & 0xfU )
3848
3849#define AARCH64_ID_ISAR6_EL1_I8MM( _val ) ( ( _val ) << 24 )
3850#define AARCH64_ID_ISAR6_EL1_I8MM_SHIFT 24
3851#define AARCH64_ID_ISAR6_EL1_I8MM_MASK 0xf000000U
3852#define AARCH64_ID_ISAR6_EL1_I8MM_GET( _reg ) \
3853 ( ( ( _reg ) >> 24 ) & 0xfU )
3854
3855static inline uint64_t _AArch64_Read_id_isar6_el1( void )
3856{
3857 uint64_t value;
3858
3859 __asm__ volatile (
3860 "mrs %0, ID_ISAR6_EL1" : "=&r" ( value ) : : "memory"
3861 );
3862
3863 return value;
3864}
3865
3866/* ID_MMFR0_EL1, AArch64 Memory Model Feature Register 0 */
3867
3868#define AARCH64_ID_MMFR0_EL1_VMSA( _val ) ( ( _val ) << 0 )
3869#define AARCH64_ID_MMFR0_EL1_VMSA_SHIFT 0
3870#define AARCH64_ID_MMFR0_EL1_VMSA_MASK 0xfU
3871#define AARCH64_ID_MMFR0_EL1_VMSA_GET( _reg ) \
3872 ( ( ( _reg ) >> 0 ) & 0xfU )
3873
3874#define AARCH64_ID_MMFR0_EL1_PMSA( _val ) ( ( _val ) << 4 )
3875#define AARCH64_ID_MMFR0_EL1_PMSA_SHIFT 4
3876#define AARCH64_ID_MMFR0_EL1_PMSA_MASK 0xf0U
3877#define AARCH64_ID_MMFR0_EL1_PMSA_GET( _reg ) \
3878 ( ( ( _reg ) >> 4 ) & 0xfU )
3879
3880#define AARCH64_ID_MMFR0_EL1_OUTERSHR( _val ) ( ( _val ) << 8 )
3881#define AARCH64_ID_MMFR0_EL1_OUTERSHR_SHIFT 8
3882#define AARCH64_ID_MMFR0_EL1_OUTERSHR_MASK 0xf00U
3883#define AARCH64_ID_MMFR0_EL1_OUTERSHR_GET( _reg ) \
3884 ( ( ( _reg ) >> 8 ) & 0xfU )
3885
3886#define AARCH64_ID_MMFR0_EL1_SHARELVL( _val ) ( ( _val ) << 12 )
3887#define AARCH64_ID_MMFR0_EL1_SHARELVL_SHIFT 12
3888#define AARCH64_ID_MMFR0_EL1_SHARELVL_MASK 0xf000U
3889#define AARCH64_ID_MMFR0_EL1_SHARELVL_GET( _reg ) \
3890 ( ( ( _reg ) >> 12 ) & 0xfU )
3891
3892#define AARCH64_ID_MMFR0_EL1_TCM( _val ) ( ( _val ) << 16 )
3893#define AARCH64_ID_MMFR0_EL1_TCM_SHIFT 16
3894#define AARCH64_ID_MMFR0_EL1_TCM_MASK 0xf0000U
3895#define AARCH64_ID_MMFR0_EL1_TCM_GET( _reg ) \
3896 ( ( ( _reg ) >> 16 ) & 0xfU )
3897
3898#define AARCH64_ID_MMFR0_EL1_AUXREG( _val ) ( ( _val ) << 20 )
3899#define AARCH64_ID_MMFR0_EL1_AUXREG_SHIFT 20
3900#define AARCH64_ID_MMFR0_EL1_AUXREG_MASK 0xf00000U
3901#define AARCH64_ID_MMFR0_EL1_AUXREG_GET( _reg ) \
3902 ( ( ( _reg ) >> 20 ) & 0xfU )
3903
3904#define AARCH64_ID_MMFR0_EL1_FCSE( _val ) ( ( _val ) << 24 )
3905#define AARCH64_ID_MMFR0_EL1_FCSE_SHIFT 24
3906#define AARCH64_ID_MMFR0_EL1_FCSE_MASK 0xf000000U
3907#define AARCH64_ID_MMFR0_EL1_FCSE_GET( _reg ) \
3908 ( ( ( _reg ) >> 24 ) & 0xfU )
3909
3910#define AARCH64_ID_MMFR0_EL1_INNERSHR( _val ) ( ( _val ) << 28 )
3911#define AARCH64_ID_MMFR0_EL1_INNERSHR_SHIFT 28
3912#define AARCH64_ID_MMFR0_EL1_INNERSHR_MASK 0xf0000000U
3913#define AARCH64_ID_MMFR0_EL1_INNERSHR_GET( _reg ) \
3914 ( ( ( _reg ) >> 28 ) & 0xfU )
3915
3916static inline uint64_t _AArch64_Read_id_mmfr0_el1( void )
3917{
3918 uint64_t value;
3919
3920 __asm__ volatile (
3921 "mrs %0, ID_MMFR0_EL1" : "=&r" ( value ) : : "memory"
3922 );
3923
3924 return value;
3925}
3926
3927/* ID_MMFR1_EL1, AArch64 Memory Model Feature Register 1 */
3928
3929#define AARCH64_ID_MMFR1_EL1_L1HVDVA( _val ) ( ( _val ) << 0 )
3930#define AARCH64_ID_MMFR1_EL1_L1HVDVA_SHIFT 0
3931#define AARCH64_ID_MMFR1_EL1_L1HVDVA_MASK 0xfU
3932#define AARCH64_ID_MMFR1_EL1_L1HVDVA_GET( _reg ) \
3933 ( ( ( _reg ) >> 0 ) & 0xfU )
3934
3935#define AARCH64_ID_MMFR1_EL1_L1UNIVA( _val ) ( ( _val ) << 4 )
3936#define AARCH64_ID_MMFR1_EL1_L1UNIVA_SHIFT 4
3937#define AARCH64_ID_MMFR1_EL1_L1UNIVA_MASK 0xf0U
3938#define AARCH64_ID_MMFR1_EL1_L1UNIVA_GET( _reg ) \
3939 ( ( ( _reg ) >> 4 ) & 0xfU )
3940
3941#define AARCH64_ID_MMFR1_EL1_L1HVDSW( _val ) ( ( _val ) << 8 )
3942#define AARCH64_ID_MMFR1_EL1_L1HVDSW_SHIFT 8
3943#define AARCH64_ID_MMFR1_EL1_L1HVDSW_MASK 0xf00U
3944#define AARCH64_ID_MMFR1_EL1_L1HVDSW_GET( _reg ) \
3945 ( ( ( _reg ) >> 8 ) & 0xfU )
3946
3947#define AARCH64_ID_MMFR1_EL1_L1UNISW( _val ) ( ( _val ) << 12 )
3948#define AARCH64_ID_MMFR1_EL1_L1UNISW_SHIFT 12
3949#define AARCH64_ID_MMFR1_EL1_L1UNISW_MASK 0xf000U
3950#define AARCH64_ID_MMFR1_EL1_L1UNISW_GET( _reg ) \
3951 ( ( ( _reg ) >> 12 ) & 0xfU )
3952
3953#define AARCH64_ID_MMFR1_EL1_L1HVD( _val ) ( ( _val ) << 16 )
3954#define AARCH64_ID_MMFR1_EL1_L1HVD_SHIFT 16
3955#define AARCH64_ID_MMFR1_EL1_L1HVD_MASK 0xf0000U
3956#define AARCH64_ID_MMFR1_EL1_L1HVD_GET( _reg ) \
3957 ( ( ( _reg ) >> 16 ) & 0xfU )
3958
3959#define AARCH64_ID_MMFR1_EL1_L1UNI( _val ) ( ( _val ) << 20 )
3960#define AARCH64_ID_MMFR1_EL1_L1UNI_SHIFT 20
3961#define AARCH64_ID_MMFR1_EL1_L1UNI_MASK 0xf00000U
3962#define AARCH64_ID_MMFR1_EL1_L1UNI_GET( _reg ) \
3963 ( ( ( _reg ) >> 20 ) & 0xfU )
3964
3965#define AARCH64_ID_MMFR1_EL1_L1TSTCLN( _val ) ( ( _val ) << 24 )
3966#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_SHIFT 24
3967#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_MASK 0xf000000U
3968#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_GET( _reg ) \
3969 ( ( ( _reg ) >> 24 ) & 0xfU )
3970
3971#define AARCH64_ID_MMFR1_EL1_BPRED( _val ) ( ( _val ) << 28 )
3972#define AARCH64_ID_MMFR1_EL1_BPRED_SHIFT 28
3973#define AARCH64_ID_MMFR1_EL1_BPRED_MASK 0xf0000000U
3974#define AARCH64_ID_MMFR1_EL1_BPRED_GET( _reg ) \
3975 ( ( ( _reg ) >> 28 ) & 0xfU )
3976
3977static inline uint64_t _AArch64_Read_id_mmfr1_el1( void )
3978{
3979 uint64_t value;
3980
3981 __asm__ volatile (
3982 "mrs %0, ID_MMFR1_EL1" : "=&r" ( value ) : : "memory"
3983 );
3984
3985 return value;
3986}
3987
3988/* ID_MMFR2_EL1, AArch64 Memory Model Feature Register 2 */
3989
3990#define AARCH64_ID_MMFR2_EL1_L1HVDFG( _val ) ( ( _val ) << 0 )
3991#define AARCH64_ID_MMFR2_EL1_L1HVDFG_SHIFT 0
3992#define AARCH64_ID_MMFR2_EL1_L1HVDFG_MASK 0xfU
3993#define AARCH64_ID_MMFR2_EL1_L1HVDFG_GET( _reg ) \
3994 ( ( ( _reg ) >> 0 ) & 0xfU )
3995
3996#define AARCH64_ID_MMFR2_EL1_L1HVDBG( _val ) ( ( _val ) << 4 )
3997#define AARCH64_ID_MMFR2_EL1_L1HVDBG_SHIFT 4
3998#define AARCH64_ID_MMFR2_EL1_L1HVDBG_MASK 0xf0U
3999#define AARCH64_ID_MMFR2_EL1_L1HVDBG_GET( _reg ) \
4000 ( ( ( _reg ) >> 4 ) & 0xfU )
4001
4002#define AARCH64_ID_MMFR2_EL1_L1HVDRNG( _val ) ( ( _val ) << 8 )
4003#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_SHIFT 8
4004#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_MASK 0xf00U
4005#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_GET( _reg ) \
4006 ( ( ( _reg ) >> 8 ) & 0xfU )
4007
4008#define AARCH64_ID_MMFR2_EL1_HVDTLB( _val ) ( ( _val ) << 12 )
4009#define AARCH64_ID_MMFR2_EL1_HVDTLB_SHIFT 12
4010#define AARCH64_ID_MMFR2_EL1_HVDTLB_MASK 0xf000U
4011#define AARCH64_ID_MMFR2_EL1_HVDTLB_GET( _reg ) \
4012 ( ( ( _reg ) >> 12 ) & 0xfU )
4013
4014#define AARCH64_ID_MMFR2_EL1_UNITLB( _val ) ( ( _val ) << 16 )
4015#define AARCH64_ID_MMFR2_EL1_UNITLB_SHIFT 16
4016#define AARCH64_ID_MMFR2_EL1_UNITLB_MASK 0xf0000U
4017#define AARCH64_ID_MMFR2_EL1_UNITLB_GET( _reg ) \
4018 ( ( ( _reg ) >> 16 ) & 0xfU )
4019
4020#define AARCH64_ID_MMFR2_EL1_MEMBARR( _val ) ( ( _val ) << 20 )
4021#define AARCH64_ID_MMFR2_EL1_MEMBARR_SHIFT 20
4022#define AARCH64_ID_MMFR2_EL1_MEMBARR_MASK 0xf00000U
4023#define AARCH64_ID_MMFR2_EL1_MEMBARR_GET( _reg ) \
4024 ( ( ( _reg ) >> 20 ) & 0xfU )
4025
4026#define AARCH64_ID_MMFR2_EL1_WFISTALL( _val ) ( ( _val ) << 24 )
4027#define AARCH64_ID_MMFR2_EL1_WFISTALL_SHIFT 24
4028#define AARCH64_ID_MMFR2_EL1_WFISTALL_MASK 0xf000000U
4029#define AARCH64_ID_MMFR2_EL1_WFISTALL_GET( _reg ) \
4030 ( ( ( _reg ) >> 24 ) & 0xfU )
4031
4032#define AARCH64_ID_MMFR2_EL1_HWACCFLG( _val ) ( ( _val ) << 28 )
4033#define AARCH64_ID_MMFR2_EL1_HWACCFLG_SHIFT 28
4034#define AARCH64_ID_MMFR2_EL1_HWACCFLG_MASK 0xf0000000U
4035#define AARCH64_ID_MMFR2_EL1_HWACCFLG_GET( _reg ) \
4036 ( ( ( _reg ) >> 28 ) & 0xfU )
4037
4038static inline uint64_t _AArch64_Read_id_mmfr2_el1( void )
4039{
4040 uint64_t value;
4041
4042 __asm__ volatile (
4043 "mrs %0, ID_MMFR2_EL1" : "=&r" ( value ) : : "memory"
4044 );
4045
4046 return value;
4047}
4048
4049/* ID_MMFR3_EL1, AArch64 Memory Model Feature Register 3 */
4050
4051#define AARCH64_ID_MMFR3_EL1_CMAINTVA( _val ) ( ( _val ) << 0 )
4052#define AARCH64_ID_MMFR3_EL1_CMAINTVA_SHIFT 0
4053#define AARCH64_ID_MMFR3_EL1_CMAINTVA_MASK 0xfU
4054#define AARCH64_ID_MMFR3_EL1_CMAINTVA_GET( _reg ) \
4055 ( ( ( _reg ) >> 0 ) & 0xfU )
4056
4057#define AARCH64_ID_MMFR3_EL1_CMAINTSW( _val ) ( ( _val ) << 4 )
4058#define AARCH64_ID_MMFR3_EL1_CMAINTSW_SHIFT 4
4059#define AARCH64_ID_MMFR3_EL1_CMAINTSW_MASK 0xf0U
4060#define AARCH64_ID_MMFR3_EL1_CMAINTSW_GET( _reg ) \
4061 ( ( ( _reg ) >> 4 ) & 0xfU )
4062
4063#define AARCH64_ID_MMFR3_EL1_BPMAINT( _val ) ( ( _val ) << 8 )
4064#define AARCH64_ID_MMFR3_EL1_BPMAINT_SHIFT 8
4065#define AARCH64_ID_MMFR3_EL1_BPMAINT_MASK 0xf00U
4066#define AARCH64_ID_MMFR3_EL1_BPMAINT_GET( _reg ) \
4067 ( ( ( _reg ) >> 8 ) & 0xfU )
4068
4069#define AARCH64_ID_MMFR3_EL1_MAINTBCST( _val ) ( ( _val ) << 12 )
4070#define AARCH64_ID_MMFR3_EL1_MAINTBCST_SHIFT 12
4071#define AARCH64_ID_MMFR3_EL1_MAINTBCST_MASK 0xf000U
4072#define AARCH64_ID_MMFR3_EL1_MAINTBCST_GET( _reg ) \
4073 ( ( ( _reg ) >> 12 ) & 0xfU )
4074
4075#define AARCH64_ID_MMFR3_EL1_PAN( _val ) ( ( _val ) << 16 )
4076#define AARCH64_ID_MMFR3_EL1_PAN_SHIFT 16
4077#define AARCH64_ID_MMFR3_EL1_PAN_MASK 0xf0000U
4078#define AARCH64_ID_MMFR3_EL1_PAN_GET( _reg ) \
4079 ( ( ( _reg ) >> 16 ) & 0xfU )
4080
4081#define AARCH64_ID_MMFR3_EL1_COHWALK( _val ) ( ( _val ) << 20 )
4082#define AARCH64_ID_MMFR3_EL1_COHWALK_SHIFT 20
4083#define AARCH64_ID_MMFR3_EL1_COHWALK_MASK 0xf00000U
4084#define AARCH64_ID_MMFR3_EL1_COHWALK_GET( _reg ) \
4085 ( ( ( _reg ) >> 20 ) & 0xfU )
4086
4087#define AARCH64_ID_MMFR3_EL1_CMEMSZ( _val ) ( ( _val ) << 24 )
4088#define AARCH64_ID_MMFR3_EL1_CMEMSZ_SHIFT 24
4089#define AARCH64_ID_MMFR3_EL1_CMEMSZ_MASK 0xf000000U
4090#define AARCH64_ID_MMFR3_EL1_CMEMSZ_GET( _reg ) \
4091 ( ( ( _reg ) >> 24 ) & 0xfU )
4092
4093#define AARCH64_ID_MMFR3_EL1_SUPERSEC( _val ) ( ( _val ) << 28 )
4094#define AARCH64_ID_MMFR3_EL1_SUPERSEC_SHIFT 28
4095#define AARCH64_ID_MMFR3_EL1_SUPERSEC_MASK 0xf0000000U
4096#define AARCH64_ID_MMFR3_EL1_SUPERSEC_GET( _reg ) \
4097 ( ( ( _reg ) >> 28 ) & 0xfU )
4098
4099static inline uint64_t _AArch64_Read_id_mmfr3_el1( void )
4100{
4101 uint64_t value;
4102
4103 __asm__ volatile (
4104 "mrs %0, ID_MMFR3_EL1" : "=&r" ( value ) : : "memory"
4105 );
4106
4107 return value;
4108}
4109
4110/* ID_MMFR4_EL1, AArch64 Memory Model Feature Register 4 */
4111
4112#define AARCH64_ID_MMFR4_EL1_SPECSEI( _val ) ( ( _val ) << 0 )
4113#define AARCH64_ID_MMFR4_EL1_SPECSEI_SHIFT 0
4114#define AARCH64_ID_MMFR4_EL1_SPECSEI_MASK 0xfU
4115#define AARCH64_ID_MMFR4_EL1_SPECSEI_GET( _reg ) \
4116 ( ( ( _reg ) >> 0 ) & 0xfU )
4117
4118#define AARCH64_ID_MMFR4_EL1_AC2( _val ) ( ( _val ) << 4 )
4119#define AARCH64_ID_MMFR4_EL1_AC2_SHIFT 4
4120#define AARCH64_ID_MMFR4_EL1_AC2_MASK 0xf0U
4121#define AARCH64_ID_MMFR4_EL1_AC2_GET( _reg ) \
4122 ( ( ( _reg ) >> 4 ) & 0xfU )
4123
4124#define AARCH64_ID_MMFR4_EL1_XNX( _val ) ( ( _val ) << 8 )
4125#define AARCH64_ID_MMFR4_EL1_XNX_SHIFT 8
4126#define AARCH64_ID_MMFR4_EL1_XNX_MASK 0xf00U
4127#define AARCH64_ID_MMFR4_EL1_XNX_GET( _reg ) \
4128 ( ( ( _reg ) >> 8 ) & 0xfU )
4129
4130#define AARCH64_ID_MMFR4_EL1_CNP( _val ) ( ( _val ) << 12 )
4131#define AARCH64_ID_MMFR4_EL1_CNP_SHIFT 12
4132#define AARCH64_ID_MMFR4_EL1_CNP_MASK 0xf000U
4133#define AARCH64_ID_MMFR4_EL1_CNP_GET( _reg ) \
4134 ( ( ( _reg ) >> 12 ) & 0xfU )
4135
4136#define AARCH64_ID_MMFR4_EL1_HPDS( _val ) ( ( _val ) << 16 )
4137#define AARCH64_ID_MMFR4_EL1_HPDS_SHIFT 16
4138#define AARCH64_ID_MMFR4_EL1_HPDS_MASK 0xf0000U
4139#define AARCH64_ID_MMFR4_EL1_HPDS_GET( _reg ) \
4140 ( ( ( _reg ) >> 16 ) & 0xfU )
4141
4142#define AARCH64_ID_MMFR4_EL1_LSM( _val ) ( ( _val ) << 20 )
4143#define AARCH64_ID_MMFR4_EL1_LSM_SHIFT 20
4144#define AARCH64_ID_MMFR4_EL1_LSM_MASK 0xf00000U
4145#define AARCH64_ID_MMFR4_EL1_LSM_GET( _reg ) \
4146 ( ( ( _reg ) >> 20 ) & 0xfU )
4147
4148#define AARCH64_ID_MMFR4_EL1_CCIDX( _val ) ( ( _val ) << 24 )
4149#define AARCH64_ID_MMFR4_EL1_CCIDX_SHIFT 24
4150#define AARCH64_ID_MMFR4_EL1_CCIDX_MASK 0xf000000U
4151#define AARCH64_ID_MMFR4_EL1_CCIDX_GET( _reg ) \
4152 ( ( ( _reg ) >> 24 ) & 0xfU )
4153
4154#define AARCH64_ID_MMFR4_EL1_EVT( _val ) ( ( _val ) << 28 )
4155#define AARCH64_ID_MMFR4_EL1_EVT_SHIFT 28
4156#define AARCH64_ID_MMFR4_EL1_EVT_MASK 0xf0000000U
4157#define AARCH64_ID_MMFR4_EL1_EVT_GET( _reg ) \
4158 ( ( ( _reg ) >> 28 ) & 0xfU )
4159
4160static inline uint64_t _AArch64_Read_id_mmfr4_el1( void )
4161{
4162 uint64_t value;
4163
4164 __asm__ volatile (
4165 "mrs %0, ID_MMFR4_EL1" : "=&r" ( value ) : : "memory"
4166 );
4167
4168 return value;
4169}
4170
4171/* ID_MMFR5_EL1, AArch64 Memory Model Feature Register 5 */
4172
4173#define AARCH64_ID_MMFR5_EL1_ETS( _val ) ( ( _val ) << 0 )
4174#define AARCH64_ID_MMFR5_EL1_ETS_SHIFT 0
4175#define AARCH64_ID_MMFR5_EL1_ETS_MASK 0xfU
4176#define AARCH64_ID_MMFR5_EL1_ETS_GET( _reg ) \
4177 ( ( ( _reg ) >> 0 ) & 0xfU )
4178
4179static inline uint64_t _AArch64_Read_id_mmfr5_el1( void )
4180{
4181 uint64_t value;
4182
4183 __asm__ volatile (
4184 "mrs %0, ID_MMFR5_EL1" : "=&r" ( value ) : : "memory"
4185 );
4186
4187 return value;
4188}
4189
4190/* ID_PFR0_EL1, AArch64 Processor Feature Register 0 */
4191
4192#define AARCH64_ID_PFR0_EL1_STATE0( _val ) ( ( _val ) << 0 )
4193#define AARCH64_ID_PFR0_EL1_STATE0_SHIFT 0
4194#define AARCH64_ID_PFR0_EL1_STATE0_MASK 0xfU
4195#define AARCH64_ID_PFR0_EL1_STATE0_GET( _reg ) \
4196 ( ( ( _reg ) >> 0 ) & 0xfU )
4197
4198#define AARCH64_ID_PFR0_EL1_STATE1( _val ) ( ( _val ) << 4 )
4199#define AARCH64_ID_PFR0_EL1_STATE1_SHIFT 4
4200#define AARCH64_ID_PFR0_EL1_STATE1_MASK 0xf0U
4201#define AARCH64_ID_PFR0_EL1_STATE1_GET( _reg ) \
4202 ( ( ( _reg ) >> 4 ) & 0xfU )
4203
4204#define AARCH64_ID_PFR0_EL1_STATE2( _val ) ( ( _val ) << 8 )
4205#define AARCH64_ID_PFR0_EL1_STATE2_SHIFT 8
4206#define AARCH64_ID_PFR0_EL1_STATE2_MASK 0xf00U
4207#define AARCH64_ID_PFR0_EL1_STATE2_GET( _reg ) \
4208 ( ( ( _reg ) >> 8 ) & 0xfU )
4209
4210#define AARCH64_ID_PFR0_EL1_STATE3( _val ) ( ( _val ) << 12 )
4211#define AARCH64_ID_PFR0_EL1_STATE3_SHIFT 12
4212#define AARCH64_ID_PFR0_EL1_STATE3_MASK 0xf000U
4213#define AARCH64_ID_PFR0_EL1_STATE3_GET( _reg ) \
4214 ( ( ( _reg ) >> 12 ) & 0xfU )
4215
4216#define AARCH64_ID_PFR0_EL1_CSV2( _val ) ( ( _val ) << 16 )
4217#define AARCH64_ID_PFR0_EL1_CSV2_SHIFT 16
4218#define AARCH64_ID_PFR0_EL1_CSV2_MASK 0xf0000U
4219#define AARCH64_ID_PFR0_EL1_CSV2_GET( _reg ) \
4220 ( ( ( _reg ) >> 16 ) & 0xfU )
4221
4222#define AARCH64_ID_PFR0_EL1_AMU( _val ) ( ( _val ) << 20 )
4223#define AARCH64_ID_PFR0_EL1_AMU_SHIFT 20
4224#define AARCH64_ID_PFR0_EL1_AMU_MASK 0xf00000U
4225#define AARCH64_ID_PFR0_EL1_AMU_GET( _reg ) \
4226 ( ( ( _reg ) >> 20 ) & 0xfU )
4227
4228#define AARCH64_ID_PFR0_EL1_DIT( _val ) ( ( _val ) << 24 )
4229#define AARCH64_ID_PFR0_EL1_DIT_SHIFT 24
4230#define AARCH64_ID_PFR0_EL1_DIT_MASK 0xf000000U
4231#define AARCH64_ID_PFR0_EL1_DIT_GET( _reg ) \
4232 ( ( ( _reg ) >> 24 ) & 0xfU )
4233
4234#define AARCH64_ID_PFR0_EL1_RAS( _val ) ( ( _val ) << 28 )
4235#define AARCH64_ID_PFR0_EL1_RAS_SHIFT 28
4236#define AARCH64_ID_PFR0_EL1_RAS_MASK 0xf0000000U
4237#define AARCH64_ID_PFR0_EL1_RAS_GET( _reg ) \
4238 ( ( ( _reg ) >> 28 ) & 0xfU )
4239
4240static inline uint64_t _AArch64_Read_id_pfr0_el1( void )
4241{
4242 uint64_t value;
4243
4244 __asm__ volatile (
4245 "mrs %0, ID_PFR0_EL1" : "=&r" ( value ) : : "memory"
4246 );
4247
4248 return value;
4249}
4250
4251/* ID_PFR1_EL1, AArch64 Processor Feature Register 1 */
4252
4253#define AARCH64_ID_PFR1_EL1_PROGMOD( _val ) ( ( _val ) << 0 )
4254#define AARCH64_ID_PFR1_EL1_PROGMOD_SHIFT 0
4255#define AARCH64_ID_PFR1_EL1_PROGMOD_MASK 0xfU
4256#define AARCH64_ID_PFR1_EL1_PROGMOD_GET( _reg ) \
4257 ( ( ( _reg ) >> 0 ) & 0xfU )
4258
4259#define AARCH64_ID_PFR1_EL1_SECURITY( _val ) ( ( _val ) << 4 )
4260#define AARCH64_ID_PFR1_EL1_SECURITY_SHIFT 4
4261#define AARCH64_ID_PFR1_EL1_SECURITY_MASK 0xf0U
4262#define AARCH64_ID_PFR1_EL1_SECURITY_GET( _reg ) \
4263 ( ( ( _reg ) >> 4 ) & 0xfU )
4264
4265#define AARCH64_ID_PFR1_EL1_MPROGMOD( _val ) ( ( _val ) << 8 )
4266#define AARCH64_ID_PFR1_EL1_MPROGMOD_SHIFT 8
4267#define AARCH64_ID_PFR1_EL1_MPROGMOD_MASK 0xf00U
4268#define AARCH64_ID_PFR1_EL1_MPROGMOD_GET( _reg ) \
4269 ( ( ( _reg ) >> 8 ) & 0xfU )
4270
4271#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION( _val ) ( ( _val ) << 12 )
4272#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_SHIFT 12
4273#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_MASK 0xf000U
4274#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_GET( _reg ) \
4275 ( ( ( _reg ) >> 12 ) & 0xfU )
4276
4277#define AARCH64_ID_PFR1_EL1_GENTIMER( _val ) ( ( _val ) << 16 )
4278#define AARCH64_ID_PFR1_EL1_GENTIMER_SHIFT 16
4279#define AARCH64_ID_PFR1_EL1_GENTIMER_MASK 0xf0000U
4280#define AARCH64_ID_PFR1_EL1_GENTIMER_GET( _reg ) \
4281 ( ( ( _reg ) >> 16 ) & 0xfU )
4282
4283#define AARCH64_ID_PFR1_EL1_SEC_FRAC( _val ) ( ( _val ) << 20 )
4284#define AARCH64_ID_PFR1_EL1_SEC_FRAC_SHIFT 20
4285#define AARCH64_ID_PFR1_EL1_SEC_FRAC_MASK 0xf00000U
4286#define AARCH64_ID_PFR1_EL1_SEC_FRAC_GET( _reg ) \
4287 ( ( ( _reg ) >> 20 ) & 0xfU )
4288
4289#define AARCH64_ID_PFR1_EL1_VIRT_FRAC( _val ) ( ( _val ) << 24 )
4290#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_SHIFT 24
4291#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_MASK 0xf000000U
4292#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_GET( _reg ) \
4293 ( ( ( _reg ) >> 24 ) & 0xfU )
4294
4295#define AARCH64_ID_PFR1_EL1_GIC( _val ) ( ( _val ) << 28 )
4296#define AARCH64_ID_PFR1_EL1_GIC_SHIFT 28
4297#define AARCH64_ID_PFR1_EL1_GIC_MASK 0xf0000000U
4298#define AARCH64_ID_PFR1_EL1_GIC_GET( _reg ) \
4299 ( ( ( _reg ) >> 28 ) & 0xfU )
4300
4301static inline uint64_t _AArch64_Read_id_pfr1_el1( void )
4302{
4303 uint64_t value;
4304
4305 __asm__ volatile (
4306 "mrs %0, ID_PFR1_EL1" : "=&r" ( value ) : : "memory"
4307 );
4308
4309 return value;
4310}
4311
4312/* ID_PFR2_EL1, AArch64 Processor Feature Register 2 */
4313
4314#define AARCH64_ID_PFR2_EL1_CSV3( _val ) ( ( _val ) << 0 )
4315#define AARCH64_ID_PFR2_EL1_CSV3_SHIFT 0
4316#define AARCH64_ID_PFR2_EL1_CSV3_MASK 0xfU
4317#define AARCH64_ID_PFR2_EL1_CSV3_GET( _reg ) \
4318 ( ( ( _reg ) >> 0 ) & 0xfU )
4319
4320#define AARCH64_ID_PFR2_EL1_SSBS( _val ) ( ( _val ) << 4 )
4321#define AARCH64_ID_PFR2_EL1_SSBS_SHIFT 4
4322#define AARCH64_ID_PFR2_EL1_SSBS_MASK 0xf0U
4323#define AARCH64_ID_PFR2_EL1_SSBS_GET( _reg ) \
4324 ( ( ( _reg ) >> 4 ) & 0xfU )
4325
4326#define AARCH64_ID_PFR2_EL1_RAS_FRAC( _val ) ( ( _val ) << 8 )
4327#define AARCH64_ID_PFR2_EL1_RAS_FRAC_SHIFT 8
4328#define AARCH64_ID_PFR2_EL1_RAS_FRAC_MASK 0xf00U
4329#define AARCH64_ID_PFR2_EL1_RAS_FRAC_GET( _reg ) \
4330 ( ( ( _reg ) >> 8 ) & 0xfU )
4331
4332static inline uint64_t _AArch64_Read_id_pfr2_el1( void )
4333{
4334 uint64_t value;
4335
4336 __asm__ volatile (
4337 "mrs %0, ID_PFR2_EL1" : "=&r" ( value ) : : "memory"
4338 );
4339
4340 return value;
4341}
4342
4343/* IFSR32_EL2, Instruction Fault Status Register (EL2) */
4344
4345#define AARCH64_IFSR32_EL2_FS_3_0( _val ) ( ( _val ) << 0 )
4346#define AARCH64_IFSR32_EL2_FS_3_0_SHIFT 0
4347#define AARCH64_IFSR32_EL2_FS_3_0_MASK 0xfU
4348#define AARCH64_IFSR32_EL2_FS_3_0_GET( _reg ) \
4349 ( ( ( _reg ) >> 0 ) & 0xfU )
4350
4351#define AARCH64_IFSR32_EL2_STATUS( _val ) ( ( _val ) << 0 )
4352#define AARCH64_IFSR32_EL2_STATUS_SHIFT 0
4353#define AARCH64_IFSR32_EL2_STATUS_MASK 0x3fU
4354#define AARCH64_IFSR32_EL2_STATUS_GET( _reg ) \
4355 ( ( ( _reg ) >> 0 ) & 0x3fU )
4356
4357#define AARCH64_IFSR32_EL2_LPAE 0x200U
4358
4359#define AARCH64_IFSR32_EL2_FS_4 0x400U
4360
4361#define AARCH64_IFSR32_EL2_EXT 0x1000U
4362
4363#define AARCH64_IFSR32_EL2_FNV 0x10000U
4364
4365static inline uint64_t _AArch64_Read_ifsr32_el2( void )
4366{
4367 uint64_t value;
4368
4369 __asm__ volatile (
4370 "mrs %0, IFSR32_EL2" : "=&r" ( value ) : : "memory"
4371 );
4372
4373 return value;
4374}
4375
4376static inline void _AArch64_Write_ifsr32_el2( uint64_t value )
4377{
4378 __asm__ volatile (
4379 "msr IFSR32_EL2, %0" : : "r" ( value ) : "memory"
4380 );
4381}
4382
4383/* ISR_EL1, Interrupt Status Register */
4384
4385#define AARCH64_ISR_EL1_F 0x40U
4386
4387#define AARCH64_ISR_EL1_I 0x80U
4388
4389#define AARCH64_ISR_EL1_A 0x100U
4390
4391static inline uint64_t _AArch64_Read_isr_el1( void )
4392{
4393 uint64_t value;
4394
4395 __asm__ volatile (
4396 "mrs %0, ISR_EL1" : "=&r" ( value ) : : "memory"
4397 );
4398
4399 return value;
4400}
4401
4402/* LORC_EL1, LORegion Control (EL1) */
4403
4404#define AARCH64_LORC_EL1_EN 0x1U
4405
4406#define AARCH64_LORC_EL1_DS( _val ) ( ( _val ) << 2 )
4407#define AARCH64_LORC_EL1_DS_SHIFT 2
4408#define AARCH64_LORC_EL1_DS_MASK 0x3fcU
4409#define AARCH64_LORC_EL1_DS_GET( _reg ) \
4410 ( ( ( _reg ) >> 2 ) & 0xffU )
4411
4412static inline uint64_t _AArch64_Read_lorc_el1( void )
4413{
4414 uint64_t value;
4415
4416 __asm__ volatile (
4417 "mrs %0, LORC_EL1" : "=&r" ( value ) : : "memory"
4418 );
4419
4420 return value;
4421}
4422
4423static inline void _AArch64_Write_lorc_el1( uint64_t value )
4424{
4425 __asm__ volatile (
4426 "msr LORC_EL1, %0" : : "r" ( value ) : "memory"
4427 );
4428}
4429
4430/* LOREA_EL1, LORegion End Address (EL1) */
4431
4432#define AARCH64_LOREA_EL1_EA_47_16( _val ) ( ( _val ) << 16 )
4433#define AARCH64_LOREA_EL1_EA_47_16_SHIFT 16
4434#define AARCH64_LOREA_EL1_EA_47_16_MASK 0xffffffff0000ULL
4435#define AARCH64_LOREA_EL1_EA_47_16_GET( _reg ) \
4436 ( ( ( _reg ) >> 16 ) & 0xffffffffULL )
4437
4438#define AARCH64_LOREA_EL1_EA_51_48( _val ) ( ( _val ) << 48 )
4439#define AARCH64_LOREA_EL1_EA_51_48_SHIFT 48
4440#define AARCH64_LOREA_EL1_EA_51_48_MASK 0xf000000000000ULL
4441#define AARCH64_LOREA_EL1_EA_51_48_GET( _reg ) \
4442 ( ( ( _reg ) >> 48 ) & 0xfULL )
4443
4444static inline uint64_t _AArch64_Read_lorea_el1( void )
4445{
4446 uint64_t value;
4447
4448 __asm__ volatile (
4449 "mrs %0, LOREA_EL1" : "=&r" ( value ) : : "memory"
4450 );
4451
4452 return value;
4453}
4454
4455static inline void _AArch64_Write_lorea_el1( uint64_t value )
4456{
4457 __asm__ volatile (
4458 "msr LOREA_EL1, %0" : : "r" ( value ) : "memory"
4459 );
4460}
4461
4462/* LORID_EL1, LORegionID (EL1) */
4463
4464#define AARCH64_LORID_EL1_LR( _val ) ( ( _val ) << 0 )
4465#define AARCH64_LORID_EL1_LR_SHIFT 0
4466#define AARCH64_LORID_EL1_LR_MASK 0xffU
4467#define AARCH64_LORID_EL1_LR_GET( _reg ) \
4468 ( ( ( _reg ) >> 0 ) & 0xffU )
4469
4470#define AARCH64_LORID_EL1_LD( _val ) ( ( _val ) << 16 )
4471#define AARCH64_LORID_EL1_LD_SHIFT 16
4472#define AARCH64_LORID_EL1_LD_MASK 0xff0000U
4473#define AARCH64_LORID_EL1_LD_GET( _reg ) \
4474 ( ( ( _reg ) >> 16 ) & 0xffU )
4475
4476static inline uint64_t _AArch64_Read_lorid_el1( void )
4477{
4478 uint64_t value;
4479
4480 __asm__ volatile (
4481 "mrs %0, LORID_EL1" : "=&r" ( value ) : : "memory"
4482 );
4483
4484 return value;
4485}
4486
4487/* LORN_EL1, LORegion Number (EL1) */
4488
4489#define AARCH64_LORN_EL1_NUM( _val ) ( ( _val ) << 0 )
4490#define AARCH64_LORN_EL1_NUM_SHIFT 0
4491#define AARCH64_LORN_EL1_NUM_MASK 0xffU
4492#define AARCH64_LORN_EL1_NUM_GET( _reg ) \
4493 ( ( ( _reg ) >> 0 ) & 0xffU )
4494
4495static inline uint64_t _AArch64_Read_lorn_el1( void )
4496{
4497 uint64_t value;
4498
4499 __asm__ volatile (
4500 "mrs %0, LORN_EL1" : "=&r" ( value ) : : "memory"
4501 );
4502
4503 return value;
4504}
4505
4506static inline void _AArch64_Write_lorn_el1( uint64_t value )
4507{
4508 __asm__ volatile (
4509 "msr LORN_EL1, %0" : : "r" ( value ) : "memory"
4510 );
4511}
4512
4513/* LORSA_EL1, LORegion Start Address (EL1) */
4514
4515#define AARCH64_LORSA_EL1_VALID 0x1U
4516
4517#define AARCH64_LORSA_EL1_SA_47_16( _val ) ( ( _val ) << 16 )
4518#define AARCH64_LORSA_EL1_SA_47_16_SHIFT 16
4519#define AARCH64_LORSA_EL1_SA_47_16_MASK 0xffffffff0000ULL
4520#define AARCH64_LORSA_EL1_SA_47_16_GET( _reg ) \
4521 ( ( ( _reg ) >> 16 ) & 0xffffffffULL )
4522
4523#define AARCH64_LORSA_EL1_SA_51_48( _val ) ( ( _val ) << 48 )
4524#define AARCH64_LORSA_EL1_SA_51_48_SHIFT 48
4525#define AARCH64_LORSA_EL1_SA_51_48_MASK 0xf000000000000ULL
4526#define AARCH64_LORSA_EL1_SA_51_48_GET( _reg ) \
4527 ( ( ( _reg ) >> 48 ) & 0xfULL )
4528
4529static inline uint64_t _AArch64_Read_lorsa_el1( void )
4530{
4531 uint64_t value;
4532
4533 __asm__ volatile (
4534 "mrs %0, LORSA_EL1" : "=&r" ( value ) : : "memory"
4535 );
4536
4537 return value;
4538}
4539
4540static inline void _AArch64_Write_lorsa_el1( uint64_t value )
4541{
4542 __asm__ volatile (
4543 "msr LORSA_EL1, %0" : : "r" ( value ) : "memory"
4544 );
4545}
4546
4547/* MAIR_EL1, Memory Attribute Indirection Register (EL1) */
4548
4549#define AARCH64_MAIR_EL1_ATTR0( _val ) ( ( _val ) << 0 )
4550#define AARCH64_MAIR_EL1_ATTR1( _val ) ( ( _val ) << 8 )
4551#define AARCH64_MAIR_EL1_ATTR2( _val ) ( ( _val ) << 16 )
4552#define AARCH64_MAIR_EL1_ATTR3( _val ) ( ( _val ) << 24 )
4553#define AARCH64_MAIR_EL1_ATTR4( _val ) ( ( _val ) << 32 )
4554#define AARCH64_MAIR_EL1_ATTR5( _val ) ( ( _val ) << 40 )
4555#define AARCH64_MAIR_EL1_ATTR6( _val ) ( ( _val ) << 48 )
4556#define AARCH64_MAIR_EL1_ATTR7( _val ) ( ( _val ) << 56 )
4557
4558static inline uint64_t _AArch64_Read_mair_el1( void )
4559{
4560 uint64_t value;
4561
4562 __asm__ volatile (
4563 "mrs %0, MAIR_EL1" : "=&r" ( value ) : : "memory"
4564 );
4565
4566 return value;
4567}
4568
4569static inline void _AArch64_Write_mair_el1( uint64_t value )
4570{
4571 __asm__ volatile (
4572 "msr MAIR_EL1, %0" : : "r" ( value ) : "memory"
4573 );
4574}
4575
4576/* MAIR_EL2, Memory Attribute Indirection Register (EL2) */
4577
4578static inline uint64_t _AArch64_Read_mair_el2( void )
4579{
4580 uint64_t value;
4581
4582 __asm__ volatile (
4583 "mrs %0, MAIR_EL2" : "=&r" ( value ) : : "memory"
4584 );
4585
4586 return value;
4587}
4588
4589static inline void _AArch64_Write_mair_el2( uint64_t value )
4590{
4591 __asm__ volatile (
4592 "msr MAIR_EL2, %0" : : "r" ( value ) : "memory"
4593 );
4594}
4595
4596/* MAIR_EL3, Memory Attribute Indirection Register (EL3) */
4597
4598static inline uint64_t _AArch64_Read_mair_el3( void )
4599{
4600 uint64_t value;
4601
4602 __asm__ volatile (
4603 "mrs %0, MAIR_EL3" : "=&r" ( value ) : : "memory"
4604 );
4605
4606 return value;
4607}
4608
4609static inline void _AArch64_Write_mair_el3( uint64_t value )
4610{
4611 __asm__ volatile (
4612 "msr MAIR_EL3, %0" : : "r" ( value ) : "memory"
4613 );
4614}
4615
4616/* MIDR_EL1, Main ID Register */
4617
4618#define AARCH64_MIDR_EL1_REVISION( _val ) ( ( _val ) << 0 )
4619#define AARCH64_MIDR_EL1_REVISION_SHIFT 0
4620#define AARCH64_MIDR_EL1_REVISION_MASK 0xfU
4621#define AARCH64_MIDR_EL1_REVISION_GET( _reg ) \
4622 ( ( ( _reg ) >> 0 ) & 0xfU )
4623
4624#define AARCH64_MIDR_EL1_PARTNUM( _val ) ( ( _val ) << 4 )
4625#define AARCH64_MIDR_EL1_PARTNUM_SHIFT 4
4626#define AARCH64_MIDR_EL1_PARTNUM_MASK 0xfff0U
4627#define AARCH64_MIDR_EL1_PARTNUM_GET( _reg ) \
4628 ( ( ( _reg ) >> 4 ) & 0xfffU )
4629
4630#define AARCH64_MIDR_EL1_ARCHITECTURE( _val ) ( ( _val ) << 16 )
4631#define AARCH64_MIDR_EL1_ARCHITECTURE_SHIFT 16
4632#define AARCH64_MIDR_EL1_ARCHITECTURE_MASK 0xf0000U
4633#define AARCH64_MIDR_EL1_ARCHITECTURE_GET( _reg ) \
4634 ( ( ( _reg ) >> 16 ) & 0xfU )
4635
4636#define AARCH64_MIDR_EL1_VARIANT( _val ) ( ( _val ) << 20 )
4637#define AARCH64_MIDR_EL1_VARIANT_SHIFT 20
4638#define AARCH64_MIDR_EL1_VARIANT_MASK 0xf00000U
4639#define AARCH64_MIDR_EL1_VARIANT_GET( _reg ) \
4640 ( ( ( _reg ) >> 20 ) & 0xfU )
4641
4642#define AARCH64_MIDR_EL1_IMPLEMENTER( _val ) ( ( _val ) << 24 )
4643#define AARCH64_MIDR_EL1_IMPLEMENTER_SHIFT 24
4644#define AARCH64_MIDR_EL1_IMPLEMENTER_MASK 0xff000000U
4645#define AARCH64_MIDR_EL1_IMPLEMENTER_GET( _reg ) \
4646 ( ( ( _reg ) >> 24 ) & 0xffU )
4647
4648static inline uint64_t _AArch64_Read_midr_el1( void )
4649{
4650 uint64_t value;
4651
4652 __asm__ volatile (
4653 "mrs %0, MIDR_EL1" : "=&r" ( value ) : : "memory"
4654 );
4655
4656 return value;
4657}
4658
4659/* MPIDR_EL1, Multiprocessor Affinity Register */
4660
4661#define AARCH64_MPIDR_EL1_AFF0( _val ) ( ( _val ) << 0 )
4662#define AARCH64_MPIDR_EL1_AFF0_SHIFT 0
4663#define AARCH64_MPIDR_EL1_AFF0_MASK 0xffU
4664#define AARCH64_MPIDR_EL1_AFF0_GET( _reg ) \
4665 ( ( ( _reg ) >> 0 ) & 0xffU )
4666
4667#define AARCH64_MPIDR_EL1_AFF1( _val ) ( ( _val ) << 8 )
4668#define AARCH64_MPIDR_EL1_AFF1_SHIFT 8
4669#define AARCH64_MPIDR_EL1_AFF1_MASK 0xff00U
4670#define AARCH64_MPIDR_EL1_AFF1_GET( _reg ) \
4671 ( ( ( _reg ) >> 8 ) & 0xffU )
4672
4673#define AARCH64_MPIDR_EL1_AFF2( _val ) ( ( _val ) << 16 )
4674#define AARCH64_MPIDR_EL1_AFF2_SHIFT 16
4675#define AARCH64_MPIDR_EL1_AFF2_MASK 0xff0000U
4676#define AARCH64_MPIDR_EL1_AFF2_GET( _reg ) \
4677 ( ( ( _reg ) >> 16 ) & 0xffU )
4678
4679#define AARCH64_MPIDR_EL1_MT 0x1000000U
4680
4681#define AARCH64_MPIDR_EL1_U 0x40000000U
4682
4683#define AARCH64_MPIDR_EL1_AFF3( _val ) ( ( _val ) << 32 )
4684#define AARCH64_MPIDR_EL1_AFF3_SHIFT 32
4685#define AARCH64_MPIDR_EL1_AFF3_MASK 0xff00000000ULL
4686#define AARCH64_MPIDR_EL1_AFF3_GET( _reg ) \
4687 ( ( ( _reg ) >> 32 ) & 0xffULL )
4688
4689static inline uint64_t _AArch64_Read_mpidr_el1( void )
4690{
4691 uint64_t value;
4692
4693 __asm__ volatile (
4694 "mrs %0, MPIDR_EL1" : "=&r" ( value ) : : "memory"
4695 );
4696
4697 return value;
4698}
4699
4700/* MVFR0_EL1, AArch64 Media and VFP Feature Register 0 */
4701
4702#define AARCH64_MVFR0_EL1_SIMDREG( _val ) ( ( _val ) << 0 )
4703#define AARCH64_MVFR0_EL1_SIMDREG_SHIFT 0
4704#define AARCH64_MVFR0_EL1_SIMDREG_MASK 0xfU
4705#define AARCH64_MVFR0_EL1_SIMDREG_GET( _reg ) \
4706 ( ( ( _reg ) >> 0 ) & 0xfU )
4707
4708#define AARCH64_MVFR0_EL1_FPSP( _val ) ( ( _val ) << 4 )
4709#define AARCH64_MVFR0_EL1_FPSP_SHIFT 4
4710#define AARCH64_MVFR0_EL1_FPSP_MASK 0xf0U
4711#define AARCH64_MVFR0_EL1_FPSP_GET( _reg ) \
4712 ( ( ( _reg ) >> 4 ) & 0xfU )
4713
4714#define AARCH64_MVFR0_EL1_FPDP( _val ) ( ( _val ) << 8 )
4715#define AARCH64_MVFR0_EL1_FPDP_SHIFT 8
4716#define AARCH64_MVFR0_EL1_FPDP_MASK 0xf00U
4717#define AARCH64_MVFR0_EL1_FPDP_GET( _reg ) \
4718 ( ( ( _reg ) >> 8 ) & 0xfU )
4719
4720#define AARCH64_MVFR0_EL1_FPTRAP( _val ) ( ( _val ) << 12 )
4721#define AARCH64_MVFR0_EL1_FPTRAP_SHIFT 12
4722#define AARCH64_MVFR0_EL1_FPTRAP_MASK 0xf000U
4723#define AARCH64_MVFR0_EL1_FPTRAP_GET( _reg ) \
4724 ( ( ( _reg ) >> 12 ) & 0xfU )
4725
4726#define AARCH64_MVFR0_EL1_FPDIVIDE( _val ) ( ( _val ) << 16 )
4727#define AARCH64_MVFR0_EL1_FPDIVIDE_SHIFT 16
4728#define AARCH64_MVFR0_EL1_FPDIVIDE_MASK 0xf0000U
4729#define AARCH64_MVFR0_EL1_FPDIVIDE_GET( _reg ) \
4730 ( ( ( _reg ) >> 16 ) & 0xfU )
4731
4732#define AARCH64_MVFR0_EL1_FPSQRT( _val ) ( ( _val ) << 20 )
4733#define AARCH64_MVFR0_EL1_FPSQRT_SHIFT 20
4734#define AARCH64_MVFR0_EL1_FPSQRT_MASK 0xf00000U
4735#define AARCH64_MVFR0_EL1_FPSQRT_GET( _reg ) \
4736 ( ( ( _reg ) >> 20 ) & 0xfU )
4737
4738#define AARCH64_MVFR0_EL1_FPSHVEC( _val ) ( ( _val ) << 24 )
4739#define AARCH64_MVFR0_EL1_FPSHVEC_SHIFT 24
4740#define AARCH64_MVFR0_EL1_FPSHVEC_MASK 0xf000000U
4741#define AARCH64_MVFR0_EL1_FPSHVEC_GET( _reg ) \
4742 ( ( ( _reg ) >> 24 ) & 0xfU )
4743
4744#define AARCH64_MVFR0_EL1_FPROUND( _val ) ( ( _val ) << 28 )
4745#define AARCH64_MVFR0_EL1_FPROUND_SHIFT 28
4746#define AARCH64_MVFR0_EL1_FPROUND_MASK 0xf0000000U
4747#define AARCH64_MVFR0_EL1_FPROUND_GET( _reg ) \
4748 ( ( ( _reg ) >> 28 ) & 0xfU )
4749
4750static inline uint64_t _AArch64_Read_mvfr0_el1( void )
4751{
4752 uint64_t value;
4753
4754 __asm__ volatile (
4755 "mrs %0, MVFR0_EL1" : "=&r" ( value ) : : "memory"
4756 );
4757
4758 return value;
4759}
4760
4761/* MVFR1_EL1, AArch64 Media and VFP Feature Register 1 */
4762
4763#define AARCH64_MVFR1_EL1_FPFTZ( _val ) ( ( _val ) << 0 )
4764#define AARCH64_MVFR1_EL1_FPFTZ_SHIFT 0
4765#define AARCH64_MVFR1_EL1_FPFTZ_MASK 0xfU
4766#define AARCH64_MVFR1_EL1_FPFTZ_GET( _reg ) \
4767 ( ( ( _reg ) >> 0 ) & 0xfU )
4768
4769#define AARCH64_MVFR1_EL1_FPDNAN( _val ) ( ( _val ) << 4 )
4770#define AARCH64_MVFR1_EL1_FPDNAN_SHIFT 4
4771#define AARCH64_MVFR1_EL1_FPDNAN_MASK 0xf0U
4772#define AARCH64_MVFR1_EL1_FPDNAN_GET( _reg ) \
4773 ( ( ( _reg ) >> 4 ) & 0xfU )
4774
4775#define AARCH64_MVFR1_EL1_SIMDLS( _val ) ( ( _val ) << 8 )
4776#define AARCH64_MVFR1_EL1_SIMDLS_SHIFT 8
4777#define AARCH64_MVFR1_EL1_SIMDLS_MASK 0xf00U
4778#define AARCH64_MVFR1_EL1_SIMDLS_GET( _reg ) \
4779 ( ( ( _reg ) >> 8 ) & 0xfU )
4780
4781#define AARCH64_MVFR1_EL1_SIMDINT( _val ) ( ( _val ) << 12 )
4782#define AARCH64_MVFR1_EL1_SIMDINT_SHIFT 12
4783#define AARCH64_MVFR1_EL1_SIMDINT_MASK 0xf000U
4784#define AARCH64_MVFR1_EL1_SIMDINT_GET( _reg ) \
4785 ( ( ( _reg ) >> 12 ) & 0xfU )
4786
4787#define AARCH64_MVFR1_EL1_SIMDSP( _val ) ( ( _val ) << 16 )
4788#define AARCH64_MVFR1_EL1_SIMDSP_SHIFT 16
4789#define AARCH64_MVFR1_EL1_SIMDSP_MASK 0xf0000U
4790#define AARCH64_MVFR1_EL1_SIMDSP_GET( _reg ) \
4791 ( ( ( _reg ) >> 16 ) & 0xfU )
4792
4793#define AARCH64_MVFR1_EL1_SIMDHP( _val ) ( ( _val ) << 20 )
4794#define AARCH64_MVFR1_EL1_SIMDHP_SHIFT 20
4795#define AARCH64_MVFR1_EL1_SIMDHP_MASK 0xf00000U
4796#define AARCH64_MVFR1_EL1_SIMDHP_GET( _reg ) \
4797 ( ( ( _reg ) >> 20 ) & 0xfU )
4798
4799#define AARCH64_MVFR1_EL1_FPHP( _val ) ( ( _val ) << 24 )
4800#define AARCH64_MVFR1_EL1_FPHP_SHIFT 24
4801#define AARCH64_MVFR1_EL1_FPHP_MASK 0xf000000U
4802#define AARCH64_MVFR1_EL1_FPHP_GET( _reg ) \
4803 ( ( ( _reg ) >> 24 ) & 0xfU )
4804
4805#define AARCH64_MVFR1_EL1_SIMDFMAC( _val ) ( ( _val ) << 28 )
4806#define AARCH64_MVFR1_EL1_SIMDFMAC_SHIFT 28
4807#define AARCH64_MVFR1_EL1_SIMDFMAC_MASK 0xf0000000U
4808#define AARCH64_MVFR1_EL1_SIMDFMAC_GET( _reg ) \
4809 ( ( ( _reg ) >> 28 ) & 0xfU )
4810
4811static inline uint64_t _AArch64_Read_mvfr1_el1( void )
4812{
4813 uint64_t value;
4814
4815 __asm__ volatile (
4816 "mrs %0, MVFR1_EL1" : "=&r" ( value ) : : "memory"
4817 );
4818
4819 return value;
4820}
4821
4822/* MVFR2_EL1, AArch64 Media and VFP Feature Register 2 */
4823
4824#define AARCH64_MVFR2_EL1_SIMDMISC( _val ) ( ( _val ) << 0 )
4825#define AARCH64_MVFR2_EL1_SIMDMISC_SHIFT 0
4826#define AARCH64_MVFR2_EL1_SIMDMISC_MASK 0xfU
4827#define AARCH64_MVFR2_EL1_SIMDMISC_GET( _reg ) \
4828 ( ( ( _reg ) >> 0 ) & 0xfU )
4829
4830#define AARCH64_MVFR2_EL1_FPMISC( _val ) ( ( _val ) << 4 )
4831#define AARCH64_MVFR2_EL1_FPMISC_SHIFT 4
4832#define AARCH64_MVFR2_EL1_FPMISC_MASK 0xf0U
4833#define AARCH64_MVFR2_EL1_FPMISC_GET( _reg ) \
4834 ( ( ( _reg ) >> 4 ) & 0xfU )
4835
4836static inline uint64_t _AArch64_Read_mvfr2_el1( void )
4837{
4838 uint64_t value;
4839
4840 __asm__ volatile (
4841 "mrs %0, MVFR2_EL1" : "=&r" ( value ) : : "memory"
4842 );
4843
4844 return value;
4845}
4846
4847/* PAR_EL1, Physical Address Register */
4848
4849#define AARCH64_PAR_EL1_F 0x1U
4850
4851#define AARCH64_PAR_EL1_FST( _val ) ( ( _val ) << 1 )
4852#define AARCH64_PAR_EL1_FST_SHIFT 1
4853#define AARCH64_PAR_EL1_FST_MASK 0x7eU
4854#define AARCH64_PAR_EL1_FST_GET( _reg ) \
4855 ( ( ( _reg ) >> 1 ) & 0x3fU )
4856
4857#define AARCH64_PAR_EL1_SH( _val ) ( ( _val ) << 7 )
4858#define AARCH64_PAR_EL1_SH_SHIFT 7
4859#define AARCH64_PAR_EL1_SH_MASK 0x180U
4860#define AARCH64_PAR_EL1_SH_GET( _reg ) \
4861 ( ( ( _reg ) >> 7 ) & 0x3U )
4862
4863#define AARCH64_PAR_EL1_PTW 0x100U
4864
4865#define AARCH64_PAR_EL1_NS 0x200U
4866
4867#define AARCH64_PAR_EL1_S 0x200U
4868
4869#define AARCH64_PAR_EL1_PA_47_12( _val ) ( ( _val ) << 12 )
4870#define AARCH64_PAR_EL1_PA_47_12_SHIFT 12
4871#define AARCH64_PAR_EL1_PA_47_12_MASK 0xfffffffff000ULL
4872#define AARCH64_PAR_EL1_PA_47_12_GET( _reg ) \
4873 ( ( ( _reg ) >> 12 ) & 0xfffffffffULL )
4874
4875#define AARCH64_PAR_EL1_PA_51_48( _val ) ( ( _val ) << 48 )
4876#define AARCH64_PAR_EL1_PA_51_48_SHIFT 48
4877#define AARCH64_PAR_EL1_PA_51_48_MASK 0xf000000000000ULL
4878#define AARCH64_PAR_EL1_PA_51_48_GET( _reg ) \
4879 ( ( ( _reg ) >> 48 ) & 0xfULL )
4880
4881#define AARCH64_PAR_EL1_ATTR( _val ) ( ( _val ) << 56 )
4882#define AARCH64_PAR_EL1_ATTR_SHIFT 56
4883#define AARCH64_PAR_EL1_ATTR_MASK 0xff00000000000000ULL
4884#define AARCH64_PAR_EL1_ATTR_GET( _reg ) \
4885 ( ( ( _reg ) >> 56 ) & 0xffULL )
4886
4887static inline uint64_t _AArch64_Read_par_el1( void )
4888{
4889 uint64_t value;
4890
4891 __asm__ volatile (
4892 "mrs %0, PAR_EL1" : "=&r" ( value ) : : "memory"
4893 );
4894
4895 return value;
4896}
4897
4898static inline void _AArch64_Write_par_el1( uint64_t value )
4899{
4900 __asm__ volatile (
4901 "msr PAR_EL1, %0" : : "r" ( value ) : "memory"
4902 );
4903}
4904
4905/* REVIDR_EL1, Revision ID Register */
4906
4907static inline uint64_t _AArch64_Read_revidr_el1( void )
4908{
4909 uint64_t value;
4910
4911 __asm__ volatile (
4912 "mrs %0, REVIDR_EL1" : "=&r" ( value ) : : "memory"
4913 );
4914
4915 return value;
4916}
4917
4918/* RGSR_EL1, Random Allocation Tag Seed Register. */
4919
4920#define AARCH64_RGSR_EL1_TAG( _val ) ( ( _val ) << 0 )
4921#define AARCH64_RGSR_EL1_TAG_SHIFT 0
4922#define AARCH64_RGSR_EL1_TAG_MASK 0xfU
4923#define AARCH64_RGSR_EL1_TAG_GET( _reg ) \
4924 ( ( ( _reg ) >> 0 ) & 0xfU )
4925
4926#define AARCH64_RGSR_EL1_SEED( _val ) ( ( _val ) << 8 )
4927#define AARCH64_RGSR_EL1_SEED_SHIFT 8
4928#define AARCH64_RGSR_EL1_SEED_MASK 0xffff00U
4929#define AARCH64_RGSR_EL1_SEED_GET( _reg ) \
4930 ( ( ( _reg ) >> 8 ) & 0xffffU )
4931
4932static inline uint64_t _AArch64_Read_rgsr_el1( void )
4933{
4934 uint64_t value;
4935
4936 __asm__ volatile (
4937 "mrs %0, RGSR_EL1" : "=&r" ( value ) : : "memory"
4938 );
4939
4940 return value;
4941}
4942
4943static inline void _AArch64_Write_rgsr_el1( uint64_t value )
4944{
4945 __asm__ volatile (
4946 "msr RGSR_EL1, %0" : : "r" ( value ) : "memory"
4947 );
4948}
4949
4950/* RMR_EL1, Reset Management Register (EL1) */
4951
4952#define AARCH64_RMR_EL1_AA64 0x1U
4953
4954#define AARCH64_RMR_EL1_RR 0x2U
4955
4956static inline uint64_t _AArch64_Read_rmr_el1( void )
4957{
4958 uint64_t value;
4959
4960 __asm__ volatile (
4961 "mrs %0, RMR_EL1" : "=&r" ( value ) : : "memory"
4962 );
4963
4964 return value;
4965}
4966
4967static inline void _AArch64_Write_rmr_el1( uint64_t value )
4968{
4969 __asm__ volatile (
4970 "msr RMR_EL1, %0" : : "r" ( value ) : "memory"
4971 );
4972}
4973
4974/* RMR_EL2, Reset Management Register (EL2) */
4975
4976#define AARCH64_RMR_EL2_AA64 0x1U
4977
4978#define AARCH64_RMR_EL2_RR 0x2U
4979
4980static inline uint64_t _AArch64_Read_rmr_el2( void )
4981{
4982 uint64_t value;
4983
4984 __asm__ volatile (
4985 "mrs %0, RMR_EL2" : "=&r" ( value ) : : "memory"
4986 );
4987
4988 return value;
4989}
4990
4991static inline void _AArch64_Write_rmr_el2( uint64_t value )
4992{
4993 __asm__ volatile (
4994 "msr RMR_EL2, %0" : : "r" ( value ) : "memory"
4995 );
4996}
4997
4998/* RMR_EL3, Reset Management Register (EL3) */
4999
5000#define AARCH64_RMR_EL3_AA64 0x1U
5001
5002#define AARCH64_RMR_EL3_RR 0x2U
5003
5004static inline uint64_t _AArch64_Read_rmr_el3( void )
5005{
5006 uint64_t value;
5007
5008 __asm__ volatile (
5009 "mrs %0, RMR_EL3" : "=&r" ( value ) : : "memory"
5010 );
5011
5012 return value;
5013}
5014
5015static inline void _AArch64_Write_rmr_el3( uint64_t value )
5016{
5017 __asm__ volatile (
5018 "msr RMR_EL3, %0" : : "r" ( value ) : "memory"
5019 );
5020}
5021
5022/* RNDR, Random Number */
5023
5024static inline uint64_t _AArch64_Read_rndr( void )
5025{
5026 uint64_t value;
5027
5028 __asm__ volatile (
5029 "mrs %0, RNDR" : "=&r" ( value ) : : "memory"
5030 );
5031
5032 return value;
5033}
5034
5035/* RNDRRS, Reseeded Random Number */
5036
5037static inline uint64_t _AArch64_Read_rndrrs( void )
5038{
5039 uint64_t value;
5040
5041 __asm__ volatile (
5042 "mrs %0, RNDRRS" : "=&r" ( value ) : : "memory"
5043 );
5044
5045 return value;
5046}
5047
5048/* RVBAR_EL1, Reset Vector Base Address Register (if EL2 and EL3 not implemented) */
5049
5050static inline uint64_t _AArch64_Read_rvbar_el1( void )
5051{
5052 uint64_t value;
5053
5054 __asm__ volatile (
5055 "mrs %0, RVBAR_EL1" : "=&r" ( value ) : : "memory"
5056 );
5057
5058 return value;
5059}
5060
5061/* RVBAR_EL2, Reset Vector Base Address Register (if EL3 not implemented) */
5062
5063static inline uint64_t _AArch64_Read_rvbar_el2( void )
5064{
5065 uint64_t value;
5066
5067 __asm__ volatile (
5068 "mrs %0, RVBAR_EL2" : "=&r" ( value ) : : "memory"
5069 );
5070
5071 return value;
5072}
5073
5074/* RVBAR_EL3, Reset Vector Base Address Register (if EL3 implemented) */
5075
5076static inline uint64_t _AArch64_Read_rvbar_el3( void )
5077{
5078 uint64_t value;
5079
5080 __asm__ volatile (
5081 "mrs %0, RVBAR_EL3" : "=&r" ( value ) : : "memory"
5082 );
5083
5084 return value;
5085}
5086
5087/* SCR_EL3, Secure Configuration Register */
5088
5089#define AARCH64_SCR_EL3_NS 0x1U
5090
5091#define AARCH64_SCR_EL3_IRQ 0x2U
5092
5093#define AARCH64_SCR_EL3_FIQ 0x4U
5094
5095#define AARCH64_SCR_EL3_EA 0x8U
5096
5097#define AARCH64_SCR_EL3_SMD 0x80U
5098
5099#define AARCH64_SCR_EL3_HCE 0x100U
5100
5101#define AARCH64_SCR_EL3_SIF 0x200U
5102
5103#define AARCH64_SCR_EL3_RW 0x400U
5104
5105#define AARCH64_SCR_EL3_ST 0x800U
5106
5107#define AARCH64_SCR_EL3_TWI 0x1000U
5108
5109#define AARCH64_SCR_EL3_TWE 0x2000U
5110
5111#define AARCH64_SCR_EL3_TLOR 0x4000U
5112
5113#define AARCH64_SCR_EL3_TERR 0x8000U
5114
5115#define AARCH64_SCR_EL3_APK 0x10000U
5116
5117#define AARCH64_SCR_EL3_API 0x20000U
5118
5119#define AARCH64_SCR_EL3_EEL2 0x40000U
5120
5121#define AARCH64_SCR_EL3_EASE 0x80000U
5122
5123#define AARCH64_SCR_EL3_NMEA 0x100000U
5124
5125#define AARCH64_SCR_EL3_FIEN 0x200000U
5126
5127#define AARCH64_SCR_EL3_ENSCXT 0x2000000U
5128
5129#define AARCH64_SCR_EL3_ATA 0x4000000U
5130
5131#define AARCH64_SCR_EL3_FGTEN 0x8000000U
5132
5133#define AARCH64_SCR_EL3_ECVEN 0x10000000U
5134
5135#define AARCH64_SCR_EL3_TWEDEN 0x20000000U
5136
5137#define AARCH64_SCR_EL3_TWEDEL( _val ) ( ( _val ) << 30 )
5138#define AARCH64_SCR_EL3_TWEDEL_SHIFT 30
5139#define AARCH64_SCR_EL3_TWEDEL_MASK 0x3c0000000ULL
5140#define AARCH64_SCR_EL3_TWEDEL_GET( _reg ) \
5141 ( ( ( _reg ) >> 30 ) & 0xfULL )
5142
5143#define AARCH64_SCR_EL3_AMVOFFEN 0x800000000ULL
5144
5145static inline uint64_t _AArch64_Read_scr_el3( void )
5146{
5147 uint64_t value;
5148
5149 __asm__ volatile (
5150 "mrs %0, SCR_EL3" : "=&r" ( value ) : : "memory"
5151 );
5152
5153 return value;
5154}
5155
5156static inline void _AArch64_Write_scr_el3( uint64_t value )
5157{
5158 __asm__ volatile (
5159 "msr SCR_EL3, %0" : : "r" ( value ) : "memory"
5160 );
5161}
5162
5163/* SCTLR_EL1, System Control Register (EL1) */
5164
5165#define AARCH64_SCTLR_EL1_M 0x1U
5166
5167#define AARCH64_SCTLR_EL1_A 0x2U
5168
5169#define AARCH64_SCTLR_EL1_C 0x4U
5170
5171#define AARCH64_SCTLR_EL1_SA 0x8U
5172
5173#define AARCH64_SCTLR_EL1_SA0 0x10U
5174
5175#define AARCH64_SCTLR_EL1_CP15BEN 0x20U
5176
5177#define AARCH64_SCTLR_EL1_NAA 0x40U
5178
5179#define AARCH64_SCTLR_EL1_ITD 0x80U
5180
5181#define AARCH64_SCTLR_EL1_SED 0x100U
5182
5183#define AARCH64_SCTLR_EL1_UMA 0x200U
5184
5185#define AARCH64_SCTLR_EL1_ENRCTX 0x400U
5186
5187#define AARCH64_SCTLR_EL1_EOS 0x800U
5188
5189#define AARCH64_SCTLR_EL1_I 0x1000U
5190
5191#define AARCH64_SCTLR_EL1_ENDB 0x2000U
5192
5193#define AARCH64_SCTLR_EL1_DZE 0x4000U
5194
5195#define AARCH64_SCTLR_EL1_UCT 0x8000U
5196
5197#define AARCH64_SCTLR_EL1_NTWI 0x10000U
5198
5199#define AARCH64_SCTLR_EL1_NTWE 0x40000U
5200
5201#define AARCH64_SCTLR_EL1_WXN 0x80000U
5202
5203#define AARCH64_SCTLR_EL1_TSCXT 0x100000U
5204
5205#define AARCH64_SCTLR_EL1_IESB 0x200000U
5206
5207#define AARCH64_SCTLR_EL1_EIS 0x400000U
5208
5209#define AARCH64_SCTLR_EL1_SPAN 0x800000U
5210
5211#define AARCH64_SCTLR_EL1_E0E 0x1000000U
5212
5213#define AARCH64_SCTLR_EL1_EE 0x2000000U
5214
5215#define AARCH64_SCTLR_EL1_UCI 0x4000000U
5216
5217#define AARCH64_SCTLR_EL1_ENDA 0x8000000U
5218
5219#define AARCH64_SCTLR_EL1_NTLSMD 0x10000000U
5220
5221#define AARCH64_SCTLR_EL1_LSMAOE 0x20000000U
5222
5223#define AARCH64_SCTLR_EL1_ENIB 0x40000000U
5224
5225#define AARCH64_SCTLR_EL1_ENIA 0x80000000U
5226
5227#define AARCH64_SCTLR_EL1_BT0 0x800000000ULL
5228
5229#define AARCH64_SCTLR_EL1_BT1 0x1000000000ULL
5230
5231#define AARCH64_SCTLR_EL1_ITFSB 0x2000000000ULL
5232
5233#define AARCH64_SCTLR_EL1_TCF0( _val ) ( ( _val ) << 38 )
5234#define AARCH64_SCTLR_EL1_TCF0_SHIFT 38
5235#define AARCH64_SCTLR_EL1_TCF0_MASK 0xc000000000ULL
5236#define AARCH64_SCTLR_EL1_TCF0_GET( _reg ) \
5237 ( ( ( _reg ) >> 38 ) & 0x3ULL )
5238
5239#define AARCH64_SCTLR_EL1_TCF( _val ) ( ( _val ) << 40 )
5240#define AARCH64_SCTLR_EL1_TCF_SHIFT 40
5241#define AARCH64_SCTLR_EL1_TCF_MASK 0x30000000000ULL
5242#define AARCH64_SCTLR_EL1_TCF_GET( _reg ) \
5243 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5244
5245#define AARCH64_SCTLR_EL1_ATA0 0x40000000000ULL
5246
5247#define AARCH64_SCTLR_EL1_ATA 0x80000000000ULL
5248
5249#define AARCH64_SCTLR_EL1_DSSBS 0x100000000000ULL
5250
5251#define AARCH64_SCTLR_EL1_TWEDEN 0x200000000000ULL
5252
5253#define AARCH64_SCTLR_EL1_TWEDEL( _val ) ( ( _val ) << 46 )
5254#define AARCH64_SCTLR_EL1_TWEDEL_SHIFT 46
5255#define AARCH64_SCTLR_EL1_TWEDEL_MASK 0x3c00000000000ULL
5256#define AARCH64_SCTLR_EL1_TWEDEL_GET( _reg ) \
5257 ( ( ( _reg ) >> 46 ) & 0xfULL )
5258
5259static inline uint64_t _AArch64_Read_sctlr_el1( void )
5260{
5261 uint64_t value;
5262
5263 __asm__ volatile (
5264 "mrs %0, SCTLR_EL1" : "=&r" ( value ) : : "memory"
5265 );
5266
5267 return value;
5268}
5269
5270static inline void _AArch64_Write_sctlr_el1( uint64_t value )
5271{
5272 __asm__ volatile (
5273 "msr SCTLR_EL1, %0" : : "r" ( value ) : "memory"
5274 );
5275}
5276
5277/* SCTLR_EL2, System Control Register (EL2) */
5278
5279#define AARCH64_SCTLR_EL2_M 0x1U
5280
5281#define AARCH64_SCTLR_EL2_A 0x2U
5282
5283#define AARCH64_SCTLR_EL2_C 0x4U
5284
5285#define AARCH64_SCTLR_EL2_SA 0x8U
5286
5287#define AARCH64_SCTLR_EL2_SA0 0x10U
5288
5289#define AARCH64_SCTLR_EL2_CP15BEN 0x20U
5290
5291#define AARCH64_SCTLR_EL2_NAA 0x40U
5292
5293#define AARCH64_SCTLR_EL2_ITD 0x80U
5294
5295#define AARCH64_SCTLR_EL2_SED 0x100U
5296
5297#define AARCH64_SCTLR_EL2_ENRCTX 0x400U
5298
5299#define AARCH64_SCTLR_EL2_EOS 0x800U
5300
5301#define AARCH64_SCTLR_EL2_I 0x1000U
5302
5303#define AARCH64_SCTLR_EL2_ENDB 0x2000U
5304
5305#define AARCH64_SCTLR_EL2_DZE 0x4000U
5306
5307#define AARCH64_SCTLR_EL2_UCT 0x8000U
5308
5309#define AARCH64_SCTLR_EL2_NTWI 0x10000U
5310
5311#define AARCH64_SCTLR_EL2_NTWE 0x40000U
5312
5313#define AARCH64_SCTLR_EL2_WXN 0x80000U
5314
5315#define AARCH64_SCTLR_EL2_TSCXT 0x100000U
5316
5317#define AARCH64_SCTLR_EL2_IESB 0x200000U
5318
5319#define AARCH64_SCTLR_EL2_EIS 0x400000U
5320
5321#define AARCH64_SCTLR_EL2_SPAN 0x800000U
5322
5323#define AARCH64_SCTLR_EL2_E0E 0x1000000U
5324
5325#define AARCH64_SCTLR_EL2_EE 0x2000000U
5326
5327#define AARCH64_SCTLR_EL2_UCI 0x4000000U
5328
5329#define AARCH64_SCTLR_EL2_ENDA 0x8000000U
5330
5331#define AARCH64_SCTLR_EL2_NTLSMD 0x10000000U
5332
5333#define AARCH64_SCTLR_EL2_LSMAOE 0x20000000U
5334
5335#define AARCH64_SCTLR_EL2_ENIB 0x40000000U
5336
5337#define AARCH64_SCTLR_EL2_ENIA 0x80000000U
5338
5339#define AARCH64_SCTLR_EL2_BT0 0x800000000ULL
5340
5341#define AARCH64_SCTLR_EL2_BT 0x1000000000ULL
5342
5343#define AARCH64_SCTLR_EL2_BT1 0x1000000000ULL
5344
5345#define AARCH64_SCTLR_EL2_ITFSB 0x2000000000ULL
5346
5347#define AARCH64_SCTLR_EL2_TCF0( _val ) ( ( _val ) << 38 )
5348#define AARCH64_SCTLR_EL2_TCF0_SHIFT 38
5349#define AARCH64_SCTLR_EL2_TCF0_MASK 0xc000000000ULL
5350#define AARCH64_SCTLR_EL2_TCF0_GET( _reg ) \
5351 ( ( ( _reg ) >> 38 ) & 0x3ULL )
5352
5353#define AARCH64_SCTLR_EL2_TCF( _val ) ( ( _val ) << 40 )
5354#define AARCH64_SCTLR_EL2_TCF_SHIFT 40
5355#define AARCH64_SCTLR_EL2_TCF_MASK 0x30000000000ULL
5356#define AARCH64_SCTLR_EL2_TCF_GET( _reg ) \
5357 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5358
5359#define AARCH64_SCTLR_EL2_ATA0 0x40000000000ULL
5360
5361#define AARCH64_SCTLR_EL2_ATA 0x80000000000ULL
5362
5363#define AARCH64_SCTLR_EL2_DSSBS 0x100000000000ULL
5364
5365#define AARCH64_SCTLR_EL2_TWEDEN 0x200000000000ULL
5366
5367#define AARCH64_SCTLR_EL2_TWEDEL( _val ) ( ( _val ) << 46 )
5368#define AARCH64_SCTLR_EL2_TWEDEL_SHIFT 46
5369#define AARCH64_SCTLR_EL2_TWEDEL_MASK 0x3c00000000000ULL
5370#define AARCH64_SCTLR_EL2_TWEDEL_GET( _reg ) \
5371 ( ( ( _reg ) >> 46 ) & 0xfULL )
5372
5373static inline uint64_t _AArch64_Read_sctlr_el2( void )
5374{
5375 uint64_t value;
5376
5377 __asm__ volatile (
5378 "mrs %0, SCTLR_EL2" : "=&r" ( value ) : : "memory"
5379 );
5380
5381 return value;
5382}
5383
5384static inline void _AArch64_Write_sctlr_el2( uint64_t value )
5385{
5386 __asm__ volatile (
5387 "msr SCTLR_EL2, %0" : : "r" ( value ) : "memory"
5388 );
5389}
5390
5391/* SCTLR_EL3, System Control Register (EL3) */
5392
5393#define AARCH64_SCTLR_EL3_M 0x1U
5394
5395#define AARCH64_SCTLR_EL3_A 0x2U
5396
5397#define AARCH64_SCTLR_EL3_C 0x4U
5398
5399#define AARCH64_SCTLR_EL3_SA 0x8U
5400
5401#define AARCH64_SCTLR_EL3_NAA 0x40U
5402
5403#define AARCH64_SCTLR_EL3_EOS 0x800U
5404
5405#define AARCH64_SCTLR_EL3_I 0x1000U
5406
5407#define AARCH64_SCTLR_EL3_ENDB 0x2000U
5408
5409#define AARCH64_SCTLR_EL3_WXN 0x80000U
5410
5411#define AARCH64_SCTLR_EL3_IESB 0x200000U
5412
5413#define AARCH64_SCTLR_EL3_EIS 0x400000U
5414
5415#define AARCH64_SCTLR_EL3_EE 0x2000000U
5416
5417#define AARCH64_SCTLR_EL3_ENDA 0x8000000U
5418
5419#define AARCH64_SCTLR_EL3_ENIB 0x40000000U
5420
5421#define AARCH64_SCTLR_EL3_ENIA 0x80000000U
5422
5423#define AARCH64_SCTLR_EL3_BT 0x1000000000ULL
5424
5425#define AARCH64_SCTLR_EL3_ITFSB 0x2000000000ULL
5426
5427#define AARCH64_SCTLR_EL3_TCF( _val ) ( ( _val ) << 40 )
5428#define AARCH64_SCTLR_EL3_TCF_SHIFT 40
5429#define AARCH64_SCTLR_EL3_TCF_MASK 0x30000000000ULL
5430#define AARCH64_SCTLR_EL3_TCF_GET( _reg ) \
5431 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5432
5433#define AARCH64_SCTLR_EL3_ATA 0x80000000000ULL
5434
5435#define AARCH64_SCTLR_EL3_DSSBS 0x100000000000ULL
5436
5437static inline uint64_t _AArch64_Read_sctlr_el3( void )
5438{
5439 uint64_t value;
5440
5441 __asm__ volatile (
5442 "mrs %0, SCTLR_EL3" : "=&r" ( value ) : : "memory"
5443 );
5444
5445 return value;
5446}
5447
5448static inline void _AArch64_Write_sctlr_el3( uint64_t value )
5449{
5450 __asm__ volatile (
5451 "msr SCTLR_EL3, %0" : : "r" ( value ) : "memory"
5452 );
5453}
5454
5455/* SCXTNUM_EL0, EL0 Read/Write Software Context Number */
5456
5457static inline uint64_t _AArch64_Read_scxtnum_el0( void )
5458{
5459 uint64_t value;
5460
5461 __asm__ volatile (
5462 "mrs %0, SCXTNUM_EL0" : "=&r" ( value ) : : "memory"
5463 );
5464
5465 return value;
5466}
5467
5468static inline void _AArch64_Write_scxtnum_el0( uint64_t value )
5469{
5470 __asm__ volatile (
5471 "msr SCXTNUM_EL0, %0" : : "r" ( value ) : "memory"
5472 );
5473}
5474
5475/* SCXTNUM_EL1, EL1 Read/Write Software Context Number */
5476
5477static inline uint64_t _AArch64_Read_scxtnum_el1( void )
5478{
5479 uint64_t value;
5480
5481 __asm__ volatile (
5482 "mrs %0, SCXTNUM_EL1" : "=&r" ( value ) : : "memory"
5483 );
5484
5485 return value;
5486}
5487
5488static inline void _AArch64_Write_scxtnum_el1( uint64_t value )
5489{
5490 __asm__ volatile (
5491 "msr SCXTNUM_EL1, %0" : : "r" ( value ) : "memory"
5492 );
5493}
5494
5495/* SCXTNUM_EL2, EL2 Read/Write Software Context Number */
5496
5497static inline uint64_t _AArch64_Read_scxtnum_el2( void )
5498{
5499 uint64_t value;
5500
5501 __asm__ volatile (
5502 "mrs %0, SCXTNUM_EL2" : "=&r" ( value ) : : "memory"
5503 );
5504
5505 return value;
5506}
5507
5508static inline void _AArch64_Write_scxtnum_el2( uint64_t value )
5509{
5510 __asm__ volatile (
5511 "msr SCXTNUM_EL2, %0" : : "r" ( value ) : "memory"
5512 );
5513}
5514
5515/* SCXTNUM_EL3, EL3 Read/Write Software Context Number */
5516
5517static inline uint64_t _AArch64_Read_scxtnum_el3( void )
5518{
5519 uint64_t value;
5520
5521 __asm__ volatile (
5522 "mrs %0, SCXTNUM_EL3" : "=&r" ( value ) : : "memory"
5523 );
5524
5525 return value;
5526}
5527
5528static inline void _AArch64_Write_scxtnum_el3( uint64_t value )
5529{
5530 __asm__ volatile (
5531 "msr SCXTNUM_EL3, %0" : : "r" ( value ) : "memory"
5532 );
5533}
5534
5535/* TCR_EL1, Translation Control Register (EL1) */
5536
5537#define AARCH64_TCR_EL1_T0SZ( _val ) ( ( _val ) << 0 )
5538#define AARCH64_TCR_EL1_T0SZ_SHIFT 0
5539#define AARCH64_TCR_EL1_T0SZ_MASK 0x3fU
5540#define AARCH64_TCR_EL1_T0SZ_GET( _reg ) \
5541 ( ( ( _reg ) >> 0 ) & 0x3fU )
5542
5543#define AARCH64_TCR_EL1_EPD0 0x80U
5544
5545#define AARCH64_TCR_EL1_IRGN0( _val ) ( ( _val ) << 8 )
5546#define AARCH64_TCR_EL1_IRGN0_SHIFT 8
5547#define AARCH64_TCR_EL1_IRGN0_MASK 0x300U
5548#define AARCH64_TCR_EL1_IRGN0_GET( _reg ) \
5549 ( ( ( _reg ) >> 8 ) & 0x3U )
5550
5551#define AARCH64_TCR_EL1_ORGN0( _val ) ( ( _val ) << 10 )
5552#define AARCH64_TCR_EL1_ORGN0_SHIFT 10
5553#define AARCH64_TCR_EL1_ORGN0_MASK 0xc00U
5554#define AARCH64_TCR_EL1_ORGN0_GET( _reg ) \
5555 ( ( ( _reg ) >> 10 ) & 0x3U )
5556
5557#define AARCH64_TCR_EL1_SH0( _val ) ( ( _val ) << 12 )
5558#define AARCH64_TCR_EL1_SH0_SHIFT 12
5559#define AARCH64_TCR_EL1_SH0_MASK 0x3000U
5560#define AARCH64_TCR_EL1_SH0_GET( _reg ) \
5561 ( ( ( _reg ) >> 12 ) & 0x3U )
5562
5563#define AARCH64_TCR_EL1_TG0( _val ) ( ( _val ) << 14 )
5564#define AARCH64_TCR_EL1_TG0_SHIFT 14
5565#define AARCH64_TCR_EL1_TG0_MASK 0xc000U
5566#define AARCH64_TCR_EL1_TG0_GET( _reg ) \
5567 ( ( ( _reg ) >> 14 ) & 0x3U )
5568
5569#define AARCH64_TCR_EL1_T1SZ( _val ) ( ( _val ) << 16 )
5570#define AARCH64_TCR_EL1_T1SZ_SHIFT 16
5571#define AARCH64_TCR_EL1_T1SZ_MASK 0x3f0000U
5572#define AARCH64_TCR_EL1_T1SZ_GET( _reg ) \
5573 ( ( ( _reg ) >> 16 ) & 0x3fU )
5574
5575#define AARCH64_TCR_EL1_A1 0x400000U
5576
5577#define AARCH64_TCR_EL1_EPD1 0x800000U
5578
5579#define AARCH64_TCR_EL1_IRGN1( _val ) ( ( _val ) << 24 )
5580#define AARCH64_TCR_EL1_IRGN1_SHIFT 24
5581#define AARCH64_TCR_EL1_IRGN1_MASK 0x3000000U
5582#define AARCH64_TCR_EL1_IRGN1_GET( _reg ) \
5583 ( ( ( _reg ) >> 24 ) & 0x3U )
5584
5585#define AARCH64_TCR_EL1_ORGN1( _val ) ( ( _val ) << 26 )
5586#define AARCH64_TCR_EL1_ORGN1_SHIFT 26
5587#define AARCH64_TCR_EL1_ORGN1_MASK 0xc000000U
5588#define AARCH64_TCR_EL1_ORGN1_GET( _reg ) \
5589 ( ( ( _reg ) >> 26 ) & 0x3U )
5590
5591#define AARCH64_TCR_EL1_SH1( _val ) ( ( _val ) << 28 )
5592#define AARCH64_TCR_EL1_SH1_SHIFT 28
5593#define AARCH64_TCR_EL1_SH1_MASK 0x30000000U
5594#define AARCH64_TCR_EL1_SH1_GET( _reg ) \
5595 ( ( ( _reg ) >> 28 ) & 0x3U )
5596
5597#define AARCH64_TCR_EL1_TG1( _val ) ( ( _val ) << 30 )
5598#define AARCH64_TCR_EL1_TG1_SHIFT 30
5599#define AARCH64_TCR_EL1_TG1_MASK 0xc0000000U
5600#define AARCH64_TCR_EL1_TG1_GET( _reg ) \
5601 ( ( ( _reg ) >> 30 ) & 0x3U )
5602
5603#define AARCH64_TCR_EL1_IPS( _val ) ( ( _val ) << 32 )
5604#define AARCH64_TCR_EL1_IPS_SHIFT 32
5605#define AARCH64_TCR_EL1_IPS_MASK 0x700000000ULL
5606#define AARCH64_TCR_EL1_IPS_GET( _reg ) \
5607 ( ( ( _reg ) >> 32 ) & 0x7ULL )
5608
5609#define AARCH64_TCR_EL1_AS 0x1000000000ULL
5610
5611#define AARCH64_TCR_EL1_TBI0 0x2000000000ULL
5612
5613#define AARCH64_TCR_EL1_TBI1 0x4000000000ULL
5614
5615#define AARCH64_TCR_EL1_HA 0x8000000000ULL
5616
5617#define AARCH64_TCR_EL1_HD 0x10000000000ULL
5618
5619#define AARCH64_TCR_EL1_HPD0 0x20000000000ULL
5620
5621#define AARCH64_TCR_EL1_HPD1 0x40000000000ULL
5622
5623#define AARCH64_TCR_EL1_HWU059 0x80000000000ULL
5624
5625#define AARCH64_TCR_EL1_HWU060 0x100000000000ULL
5626
5627#define AARCH64_TCR_EL1_HWU061 0x200000000000ULL
5628
5629#define AARCH64_TCR_EL1_HWU062 0x400000000000ULL
5630
5631#define AARCH64_TCR_EL1_HWU159 0x800000000000ULL
5632
5633#define AARCH64_TCR_EL1_HWU160 0x1000000000000ULL
5634
5635#define AARCH64_TCR_EL1_HWU161 0x2000000000000ULL
5636
5637#define AARCH64_TCR_EL1_HWU162 0x4000000000000ULL
5638
5639#define AARCH64_TCR_EL1_TBID0 0x8000000000000ULL
5640
5641#define AARCH64_TCR_EL1_TBID1 0x10000000000000ULL
5642
5643#define AARCH64_TCR_EL1_NFD0 0x20000000000000ULL
5644
5645#define AARCH64_TCR_EL1_NFD1 0x40000000000000ULL
5646
5647#define AARCH64_TCR_EL1_E0PD0 0x80000000000000ULL
5648
5649#define AARCH64_TCR_EL1_E0PD1 0x100000000000000ULL
5650
5651#define AARCH64_TCR_EL1_TCMA0 0x200000000000000ULL
5652
5653#define AARCH64_TCR_EL1_TCMA1 0x400000000000000ULL
5654
5655static inline uint64_t _AArch64_Read_tcr_el1( void )
5656{
5657 uint64_t value;
5658
5659 __asm__ volatile (
5660 "mrs %0, TCR_EL1" : "=&r" ( value ) : : "memory"
5661 );
5662
5663 return value;
5664}
5665
5666static inline void _AArch64_Write_tcr_el1( uint64_t value )
5667{
5668 __asm__ volatile (
5669 "msr TCR_EL1, %0" : : "r" ( value ) : "memory"
5670 );
5671}
5672
5673/* TCR_EL2, Translation Control Register (EL2) */
5674
5675#define AARCH64_TCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
5676#define AARCH64_TCR_EL2_T0SZ_SHIFT 0
5677#define AARCH64_TCR_EL2_T0SZ_MASK 0x3fU
5678#define AARCH64_TCR_EL2_T0SZ_GET( _reg ) \
5679 ( ( ( _reg ) >> 0 ) & 0x3fU )
5680
5681#define AARCH64_TCR_EL2_EPD0 0x80U
5682
5683#define AARCH64_TCR_EL2_IRGN0( _val ) ( ( _val ) << 8 )
5684#define AARCH64_TCR_EL2_IRGN0_SHIFT 8
5685#define AARCH64_TCR_EL2_IRGN0_MASK 0x300U
5686#define AARCH64_TCR_EL2_IRGN0_GET( _reg ) \
5687 ( ( ( _reg ) >> 8 ) & 0x3U )
5688
5689#define AARCH64_TCR_EL2_ORGN0( _val ) ( ( _val ) << 10 )
5690#define AARCH64_TCR_EL2_ORGN0_SHIFT 10
5691#define AARCH64_TCR_EL2_ORGN0_MASK 0xc00U
5692#define AARCH64_TCR_EL2_ORGN0_GET( _reg ) \
5693 ( ( ( _reg ) >> 10 ) & 0x3U )
5694
5695#define AARCH64_TCR_EL2_SH0( _val ) ( ( _val ) << 12 )
5696#define AARCH64_TCR_EL2_SH0_SHIFT 12
5697#define AARCH64_TCR_EL2_SH0_MASK 0x3000U
5698#define AARCH64_TCR_EL2_SH0_GET( _reg ) \
5699 ( ( ( _reg ) >> 12 ) & 0x3U )
5700
5701#define AARCH64_TCR_EL2_TG0( _val ) ( ( _val ) << 14 )
5702#define AARCH64_TCR_EL2_TG0_SHIFT 14
5703#define AARCH64_TCR_EL2_TG0_MASK 0xc000U
5704#define AARCH64_TCR_EL2_TG0_GET( _reg ) \
5705 ( ( ( _reg ) >> 14 ) & 0x3U )
5706
5707#define AARCH64_TCR_EL2_PS( _val ) ( ( _val ) << 16 )
5708#define AARCH64_TCR_EL2_PS_SHIFT 16
5709#define AARCH64_TCR_EL2_PS_MASK 0x70000U
5710#define AARCH64_TCR_EL2_PS_GET( _reg ) \
5711 ( ( ( _reg ) >> 16 ) & 0x7U )
5712
5713#define AARCH64_TCR_EL2_T1SZ( _val ) ( ( _val ) << 16 )
5714#define AARCH64_TCR_EL2_T1SZ_SHIFT 16
5715#define AARCH64_TCR_EL2_T1SZ_MASK 0x3f0000U
5716#define AARCH64_TCR_EL2_T1SZ_GET( _reg ) \
5717 ( ( ( _reg ) >> 16 ) & 0x3fU )
5718
5719#define AARCH64_TCR_EL2_TBI 0x100000U
5720
5721#define AARCH64_TCR_EL2_HA_0 0x200000U
5722
5723#define AARCH64_TCR_EL2_A1 0x400000U
5724
5725#define AARCH64_TCR_EL2_HD_0 0x400000U
5726
5727#define AARCH64_TCR_EL2_EPD1 0x800000U
5728
5729#define AARCH64_TCR_EL2_HPD 0x1000000U
5730
5731#define AARCH64_TCR_EL2_IRGN1( _val ) ( ( _val ) << 24 )
5732#define AARCH64_TCR_EL2_IRGN1_SHIFT 24
5733#define AARCH64_TCR_EL2_IRGN1_MASK 0x3000000U
5734#define AARCH64_TCR_EL2_IRGN1_GET( _reg ) \
5735 ( ( ( _reg ) >> 24 ) & 0x3U )
5736
5737#define AARCH64_TCR_EL2_HWU59 0x2000000U
5738
5739#define AARCH64_TCR_EL2_HWU60 0x4000000U
5740
5741#define AARCH64_TCR_EL2_ORGN1( _val ) ( ( _val ) << 26 )
5742#define AARCH64_TCR_EL2_ORGN1_SHIFT 26
5743#define AARCH64_TCR_EL2_ORGN1_MASK 0xc000000U
5744#define AARCH64_TCR_EL2_ORGN1_GET( _reg ) \
5745 ( ( ( _reg ) >> 26 ) & 0x3U )
5746
5747#define AARCH64_TCR_EL2_HWU61 0x8000000U
5748
5749#define AARCH64_TCR_EL2_HWU62 0x10000000U
5750
5751#define AARCH64_TCR_EL2_SH1( _val ) ( ( _val ) << 28 )
5752#define AARCH64_TCR_EL2_SH1_SHIFT 28
5753#define AARCH64_TCR_EL2_SH1_MASK 0x30000000U
5754#define AARCH64_TCR_EL2_SH1_GET( _reg ) \
5755 ( ( ( _reg ) >> 28 ) & 0x3U )
5756
5757#define AARCH64_TCR_EL2_TBID 0x20000000U
5758
5759#define AARCH64_TCR_EL2_TCMA 0x40000000U
5760
5761#define AARCH64_TCR_EL2_TG1( _val ) ( ( _val ) << 30 )
5762#define AARCH64_TCR_EL2_TG1_SHIFT 30
5763#define AARCH64_TCR_EL2_TG1_MASK 0xc0000000U
5764#define AARCH64_TCR_EL2_TG1_GET( _reg ) \
5765 ( ( ( _reg ) >> 30 ) & 0x3U )
5766
5767#define AARCH64_TCR_EL2_IPS( _val ) ( ( _val ) << 32 )
5768#define AARCH64_TCR_EL2_IPS_SHIFT 32
5769#define AARCH64_TCR_EL2_IPS_MASK 0x700000000ULL
5770#define AARCH64_TCR_EL2_IPS_GET( _reg ) \
5771 ( ( ( _reg ) >> 32 ) & 0x7ULL )
5772
5773#define AARCH64_TCR_EL2_AS 0x1000000000ULL
5774
5775#define AARCH64_TCR_EL2_TBI0 0x2000000000ULL
5776
5777#define AARCH64_TCR_EL2_TBI1 0x4000000000ULL
5778
5779#define AARCH64_TCR_EL2_HA_1 0x8000000000ULL
5780
5781#define AARCH64_TCR_EL2_HD_1 0x10000000000ULL
5782
5783#define AARCH64_TCR_EL2_HPD0 0x20000000000ULL
5784
5785#define AARCH64_TCR_EL2_HPD1 0x40000000000ULL
5786
5787#define AARCH64_TCR_EL2_HWU059 0x80000000000ULL
5788
5789#define AARCH64_TCR_EL2_HWU060 0x100000000000ULL
5790
5791#define AARCH64_TCR_EL2_HWU061 0x200000000000ULL
5792
5793#define AARCH64_TCR_EL2_HWU062 0x400000000000ULL
5794
5795#define AARCH64_TCR_EL2_HWU159 0x800000000000ULL
5796
5797#define AARCH64_TCR_EL2_HWU160 0x1000000000000ULL
5798
5799#define AARCH64_TCR_EL2_HWU161 0x2000000000000ULL
5800
5801#define AARCH64_TCR_EL2_HWU162 0x4000000000000ULL
5802
5803#define AARCH64_TCR_EL2_TBID0 0x8000000000000ULL
5804
5805#define AARCH64_TCR_EL2_TBID1 0x10000000000000ULL
5806
5807#define AARCH64_TCR_EL2_NFD0 0x20000000000000ULL
5808
5809#define AARCH64_TCR_EL2_NFD1 0x40000000000000ULL
5810
5811#define AARCH64_TCR_EL2_E0PD0 0x80000000000000ULL
5812
5813#define AARCH64_TCR_EL2_E0PD1 0x100000000000000ULL
5814
5815#define AARCH64_TCR_EL2_TCMA0 0x200000000000000ULL
5816
5817#define AARCH64_TCR_EL2_TCMA1 0x400000000000000ULL
5818
5819static inline uint64_t _AArch64_Read_tcr_el2( void )
5820{
5821 uint64_t value;
5822
5823 __asm__ volatile (
5824 "mrs %0, TCR_EL2" : "=&r" ( value ) : : "memory"
5825 );
5826
5827 return value;
5828}
5829
5830static inline void _AArch64_Write_tcr_el2( uint64_t value )
5831{
5832 __asm__ volatile (
5833 "msr TCR_EL2, %0" : : "r" ( value ) : "memory"
5834 );
5835}
5836
5837/* TCR_EL3, Translation Control Register (EL3) */
5838
5839#define AARCH64_TCR_EL3_T0SZ( _val ) ( ( _val ) << 0 )
5840#define AARCH64_TCR_EL3_T0SZ_SHIFT 0
5841#define AARCH64_TCR_EL3_T0SZ_MASK 0x3fU
5842#define AARCH64_TCR_EL3_T0SZ_GET( _reg ) \
5843 ( ( ( _reg ) >> 0 ) & 0x3fU )
5844
5845#define AARCH64_TCR_EL3_IRGN0( _val ) ( ( _val ) << 8 )
5846#define AARCH64_TCR_EL3_IRGN0_SHIFT 8
5847#define AARCH64_TCR_EL3_IRGN0_MASK 0x300U
5848#define AARCH64_TCR_EL3_IRGN0_GET( _reg ) \
5849 ( ( ( _reg ) >> 8 ) & 0x3U )
5850
5851#define AARCH64_TCR_EL3_ORGN0( _val ) ( ( _val ) << 10 )
5852#define AARCH64_TCR_EL3_ORGN0_SHIFT 10
5853#define AARCH64_TCR_EL3_ORGN0_MASK 0xc00U
5854#define AARCH64_TCR_EL3_ORGN0_GET( _reg ) \
5855 ( ( ( _reg ) >> 10 ) & 0x3U )
5856
5857#define AARCH64_TCR_EL3_SH0( _val ) ( ( _val ) << 12 )
5858#define AARCH64_TCR_EL3_SH0_SHIFT 12
5859#define AARCH64_TCR_EL3_SH0_MASK 0x3000U
5860#define AARCH64_TCR_EL3_SH0_GET( _reg ) \
5861 ( ( ( _reg ) >> 12 ) & 0x3U )
5862
5863#define AARCH64_TCR_EL3_TG0( _val ) ( ( _val ) << 14 )
5864#define AARCH64_TCR_EL3_TG0_SHIFT 14
5865#define AARCH64_TCR_EL3_TG0_MASK 0xc000U
5866#define AARCH64_TCR_EL3_TG0_GET( _reg ) \
5867 ( ( ( _reg ) >> 14 ) & 0x3U )
5868
5869#define AARCH64_TCR_EL3_PS( _val ) ( ( _val ) << 16 )
5870#define AARCH64_TCR_EL3_PS_SHIFT 16
5871#define AARCH64_TCR_EL3_PS_MASK 0x70000U
5872#define AARCH64_TCR_EL3_PS_GET( _reg ) \
5873 ( ( ( _reg ) >> 16 ) & 0x7U )
5874
5875#define AARCH64_TCR_EL3_TBI 0x100000U
5876
5877#define AARCH64_TCR_EL3_HA 0x200000U
5878
5879#define AARCH64_TCR_EL3_HD 0x400000U
5880
5881#define AARCH64_TCR_EL3_HPD 0x1000000U
5882
5883#define AARCH64_TCR_EL3_HWU59 0x2000000U
5884
5885#define AARCH64_TCR_EL3_HWU60 0x4000000U
5886
5887#define AARCH64_TCR_EL3_HWU61 0x8000000U
5888
5889#define AARCH64_TCR_EL3_HWU62 0x10000000U
5890
5891#define AARCH64_TCR_EL3_TBID 0x20000000U
5892
5893#define AARCH64_TCR_EL3_TCMA 0x40000000U
5894
5895static inline uint64_t _AArch64_Read_tcr_el3( void )
5896{
5897 uint64_t value;
5898
5899 __asm__ volatile (
5900 "mrs %0, TCR_EL3" : "=&r" ( value ) : : "memory"
5901 );
5902
5903 return value;
5904}
5905
5906static inline void _AArch64_Write_tcr_el3( uint64_t value )
5907{
5908 __asm__ volatile (
5909 "msr TCR_EL3, %0" : : "r" ( value ) : "memory"
5910 );
5911}
5912
5913/* TFSRE0_EL1, Tag Fault Status Register (EL0). */
5914
5915#define AARCH64_TFSRE0_EL1_TF0 0x1U
5916
5917#define AARCH64_TFSRE0_EL1_TF1 0x2U
5918
5919static inline uint64_t _AArch64_Read_tfsre0_el1( void )
5920{
5921 uint64_t value;
5922
5923 __asm__ volatile (
5924 "mrs %0, TFSRE0_EL1" : "=&r" ( value ) : : "memory"
5925 );
5926
5927 return value;
5928}
5929
5930static inline void _AArch64_Write_tfsre0_el1( uint64_t value )
5931{
5932 __asm__ volatile (
5933 "msr TFSRE0_EL1, %0" : : "r" ( value ) : "memory"
5934 );
5935}
5936
5937/* TFSR_EL1, Tag Fault Status Register (EL1) */
5938
5939#define AARCH64_TFSR_EL1_TF0 0x1U
5940
5941#define AARCH64_TFSR_EL1_TF1 0x2U
5942
5943static inline uint64_t _AArch64_Read_tfsr_el1( void )
5944{
5945 uint64_t value;
5946
5947 __asm__ volatile (
5948 "mrs %0, TFSR_EL1" : "=&r" ( value ) : : "memory"
5949 );
5950
5951 return value;
5952}
5953
5954static inline void _AArch64_Write_tfsr_el1( uint64_t value )
5955{
5956 __asm__ volatile (
5957 "msr TFSR_EL1, %0" : : "r" ( value ) : "memory"
5958 );
5959}
5960
5961/* TFSR_EL2, Tag Fault Status Register (EL2) */
5962
5963#define AARCH64_TFSR_EL2_TF0 0x1U
5964
5965#define AARCH64_TFSR_EL2_TF1 0x2U
5966
5967static inline uint64_t _AArch64_Read_tfsr_el2( void )
5968{
5969 uint64_t value;
5970
5971 __asm__ volatile (
5972 "mrs %0, TFSR_EL2" : "=&r" ( value ) : : "memory"
5973 );
5974
5975 return value;
5976}
5977
5978static inline void _AArch64_Write_tfsr_el2( uint64_t value )
5979{
5980 __asm__ volatile (
5981 "msr TFSR_EL2, %0" : : "r" ( value ) : "memory"
5982 );
5983}
5984
5985/* TFSR_EL3, Tag Fault Status Register (EL3) */
5986
5987#define AARCH64_TFSR_EL3_TF0 0x1U
5988
5989static inline uint64_t _AArch64_Read_tfsr_el3( void )
5990{
5991 uint64_t value;
5992
5993 __asm__ volatile (
5994 "mrs %0, TFSR_EL3" : "=&r" ( value ) : : "memory"
5995 );
5996
5997 return value;
5998}
5999
6000static inline void _AArch64_Write_tfsr_el3( uint64_t value )
6001{
6002 __asm__ volatile (
6003 "msr TFSR_EL3, %0" : : "r" ( value ) : "memory"
6004 );
6005}
6006
6007/* TPIDR_EL0, EL0 Read/Write Software Thread ID Register */
6008
6009static inline uint64_t _AArch64_Read_tpidr_el0( void )
6010{
6011 uint64_t value;
6012
6013 __asm__ volatile (
6014 "mrs %0, TPIDR_EL0" : "=&r" ( value ) : : "memory"
6015 );
6016
6017 return value;
6018}
6019
6020static inline void _AArch64_Write_tpidr_el0( uint64_t value )
6021{
6022 __asm__ volatile (
6023 "msr TPIDR_EL0, %0" : : "r" ( value ) : "memory"
6024 );
6025}
6026
6027/* TPIDR_EL1, EL1 Software Thread ID Register */
6028
6029static inline uint64_t _AArch64_Read_tpidr_el1( void )
6030{
6031 uint64_t value;
6032
6033 __asm__ volatile (
6034 "mrs %0, TPIDR_EL1" : "=&r" ( value ) : : "memory"
6035 );
6036
6037 return value;
6038}
6039
6040static inline void _AArch64_Write_tpidr_el1( uint64_t value )
6041{
6042 __asm__ volatile (
6043 "msr TPIDR_EL1, %0" : : "r" ( value ) : "memory"
6044 );
6045}
6046
6047/* TPIDR_EL2, EL2 Software Thread ID Register */
6048
6049static inline uint64_t _AArch64_Read_tpidr_el2( void )
6050{
6051 uint64_t value;
6052
6053 __asm__ volatile (
6054 "mrs %0, TPIDR_EL2" : "=&r" ( value ) : : "memory"
6055 );
6056
6057 return value;
6058}
6059
6060static inline void _AArch64_Write_tpidr_el2( uint64_t value )
6061{
6062 __asm__ volatile (
6063 "msr TPIDR_EL2, %0" : : "r" ( value ) : "memory"
6064 );
6065}
6066
6067/* TPIDR_EL3, EL3 Software Thread ID Register */
6068
6069static inline uint64_t _AArch64_Read_tpidr_el3( void )
6070{
6071 uint64_t value;
6072
6073 __asm__ volatile (
6074 "mrs %0, TPIDR_EL3" : "=&r" ( value ) : : "memory"
6075 );
6076
6077 return value;
6078}
6079
6080static inline void _AArch64_Write_tpidr_el3( uint64_t value )
6081{
6082 __asm__ volatile (
6083 "msr TPIDR_EL3, %0" : : "r" ( value ) : "memory"
6084 );
6085}
6086
6087/* TPIDRRO_EL0, EL0 Read-Only Software Thread ID Register */
6088
6089static inline uint64_t _AArch64_Read_tpidrro_el0( void )
6090{
6091 uint64_t value;
6092
6093 __asm__ volatile (
6094 "mrs %0, TPIDRRO_EL0" : "=&r" ( value ) : : "memory"
6095 );
6096
6097 return value;
6098}
6099
6100static inline void _AArch64_Write_tpidrro_el0( uint64_t value )
6101{
6102 __asm__ volatile (
6103 "msr TPIDRRO_EL0, %0" : : "r" ( value ) : "memory"
6104 );
6105}
6106
6107/* TTBR0_EL1, Translation Table Base Register 0 (EL1) */
6108
6109#define AARCH64_TTBR0_EL1_CNP 0x1U
6110
6111#define AARCH64_TTBR0_EL1_BADDR( _val ) ( ( _val ) << 1 )
6112#define AARCH64_TTBR0_EL1_BADDR_SHIFT 1
6113#define AARCH64_TTBR0_EL1_BADDR_MASK 0xfffffffffffeULL
6114#define AARCH64_TTBR0_EL1_BADDR_GET( _reg ) \
6115 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6116
6117#define AARCH64_TTBR0_EL1_ASID( _val ) ( ( _val ) << 48 )
6118#define AARCH64_TTBR0_EL1_ASID_SHIFT 48
6119#define AARCH64_TTBR0_EL1_ASID_MASK 0xffff000000000000ULL
6120#define AARCH64_TTBR0_EL1_ASID_GET( _reg ) \
6121 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6122
6123static inline uint64_t _AArch64_Read_ttbr0_el1( void )
6124{
6125 uint64_t value;
6126
6127 __asm__ volatile (
6128 "mrs %0, TTBR0_EL1" : "=&r" ( value ) : : "memory"
6129 );
6130
6131 return value;
6132}
6133
6134static inline void _AArch64_Write_ttbr0_el1( uint64_t value )
6135{
6136 __asm__ volatile (
6137 "msr TTBR0_EL1, %0" : : "r" ( value ) : "memory"
6138 );
6139}
6140
6141/* TTBR0_EL2, Translation Table Base Register 0 (EL2) */
6142
6143#define AARCH64_TTBR0_EL2_CNP 0x1U
6144
6145#define AARCH64_TTBR0_EL2_BADDR( _val ) ( ( _val ) << 1 )
6146#define AARCH64_TTBR0_EL2_BADDR_SHIFT 1
6147#define AARCH64_TTBR0_EL2_BADDR_MASK 0xfffffffffffeULL
6148#define AARCH64_TTBR0_EL2_BADDR_GET( _reg ) \
6149 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6150
6151#define AARCH64_TTBR0_EL2_ASID( _val ) ( ( _val ) << 48 )
6152#define AARCH64_TTBR0_EL2_ASID_SHIFT 48
6153#define AARCH64_TTBR0_EL2_ASID_MASK 0xffff000000000000ULL
6154#define AARCH64_TTBR0_EL2_ASID_GET( _reg ) \
6155 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6156
6157static inline uint64_t _AArch64_Read_ttbr0_el2( void )
6158{
6159 uint64_t value;
6160
6161 __asm__ volatile (
6162 "mrs %0, TTBR0_EL2" : "=&r" ( value ) : : "memory"
6163 );
6164
6165 return value;
6166}
6167
6168static inline void _AArch64_Write_ttbr0_el2( uint64_t value )
6169{
6170 __asm__ volatile (
6171 "msr TTBR0_EL2, %0" : : "r" ( value ) : "memory"
6172 );
6173}
6174
6175/* TTBR0_EL3, Translation Table Base Register 0 (EL3) */
6176
6177#define AARCH64_TTBR0_EL3_CNP 0x1U
6178
6179#define AARCH64_TTBR0_EL3_BADDR( _val ) ( ( _val ) << 1 )
6180#define AARCH64_TTBR0_EL3_BADDR_SHIFT 1
6181#define AARCH64_TTBR0_EL3_BADDR_MASK 0xfffffffffffeULL
6182#define AARCH64_TTBR0_EL3_BADDR_GET( _reg ) \
6183 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6184
6185static inline uint64_t _AArch64_Read_ttbr0_el3( void )
6186{
6187 uint64_t value;
6188
6189 __asm__ volatile (
6190 "mrs %0, TTBR0_EL3" : "=&r" ( value ) : : "memory"
6191 );
6192
6193 return value;
6194}
6195
6196static inline void _AArch64_Write_ttbr0_el3( uint64_t value )
6197{
6198 __asm__ volatile (
6199 "msr TTBR0_EL3, %0" : : "r" ( value ) : "memory"
6200 );
6201}
6202
6203/* TTBR1_EL1, Translation Table Base Register 1 (EL1) */
6204
6205#define AARCH64_TTBR1_EL1_CNP 0x1U
6206
6207#define AARCH64_TTBR1_EL1_BADDR( _val ) ( ( _val ) << 1 )
6208#define AARCH64_TTBR1_EL1_BADDR_SHIFT 1
6209#define AARCH64_TTBR1_EL1_BADDR_MASK 0xfffffffffffeULL
6210#define AARCH64_TTBR1_EL1_BADDR_GET( _reg ) \
6211 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6212
6213#define AARCH64_TTBR1_EL1_ASID( _val ) ( ( _val ) << 48 )
6214#define AARCH64_TTBR1_EL1_ASID_SHIFT 48
6215#define AARCH64_TTBR1_EL1_ASID_MASK 0xffff000000000000ULL
6216#define AARCH64_TTBR1_EL1_ASID_GET( _reg ) \
6217 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6218
6219static inline uint64_t _AArch64_Read_ttbr1_el1( void )
6220{
6221 uint64_t value;
6222
6223 __asm__ volatile (
6224 "mrs %0, TTBR1_EL1" : "=&r" ( value ) : : "memory"
6225 );
6226
6227 return value;
6228}
6229
6230static inline void _AArch64_Write_ttbr1_el1( uint64_t value )
6231{
6232 __asm__ volatile (
6233 "msr TTBR1_EL1, %0" : : "r" ( value ) : "memory"
6234 );
6235}
6236
6237/* TTBR1_EL2, Translation Table Base Register 1 (EL2) */
6238
6239#define AARCH64_TTBR1_EL2_CNP 0x1U
6240
6241#define AARCH64_TTBR1_EL2_BADDR( _val ) ( ( _val ) << 1 )
6242#define AARCH64_TTBR1_EL2_BADDR_SHIFT 1
6243#define AARCH64_TTBR1_EL2_BADDR_MASK 0xfffffffffffeULL
6244#define AARCH64_TTBR1_EL2_BADDR_GET( _reg ) \
6245 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6246
6247#define AARCH64_TTBR1_EL2_ASID( _val ) ( ( _val ) << 48 )
6248#define AARCH64_TTBR1_EL2_ASID_SHIFT 48
6249#define AARCH64_TTBR1_EL2_ASID_MASK 0xffff000000000000ULL
6250#define AARCH64_TTBR1_EL2_ASID_GET( _reg ) \
6251 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6252
6253static inline uint64_t _AArch64_Read_ttbr1_el2( void )
6254{
6255 uint64_t value;
6256
6257 __asm__ volatile (
6258 "mrs %0, TTBR1_EL2" : "=&r" ( value ) : : "memory"
6259 );
6260
6261 return value;
6262}
6263
6264static inline void _AArch64_Write_ttbr1_el2( uint64_t value )
6265{
6266 __asm__ volatile (
6267 "msr TTBR1_EL2, %0" : : "r" ( value ) : "memory"
6268 );
6269}
6270
6271/* VBAR_EL1, Vector Base Address Register (EL1) */
6272
6273static inline uint64_t _AArch64_Read_vbar_el1( void )
6274{
6275 uint64_t value;
6276
6277 __asm__ volatile (
6278 "mrs %0, VBAR_EL1" : "=&r" ( value ) : : "memory"
6279 );
6280
6281 return value;
6282}
6283
6284static inline void _AArch64_Write_vbar_el1( uint64_t value )
6285{
6286 __asm__ volatile (
6287 "msr VBAR_EL1, %0" : : "r" ( value ) : "memory"
6288 );
6289}
6290
6291/* VBAR_EL2, Vector Base Address Register (EL2) */
6292
6293static inline uint64_t _AArch64_Read_vbar_el2( void )
6294{
6295 uint64_t value;
6296
6297 __asm__ volatile (
6298 "mrs %0, VBAR_EL2" : "=&r" ( value ) : : "memory"
6299 );
6300
6301 return value;
6302}
6303
6304static inline void _AArch64_Write_vbar_el2( uint64_t value )
6305{
6306 __asm__ volatile (
6307 "msr VBAR_EL2, %0" : : "r" ( value ) : "memory"
6308 );
6309}
6310
6311/* VBAR_EL3, Vector Base Address Register (EL3) */
6312
6313static inline uint64_t _AArch64_Read_vbar_el3( void )
6314{
6315 uint64_t value;
6316
6317 __asm__ volatile (
6318 "mrs %0, VBAR_EL3" : "=&r" ( value ) : : "memory"
6319 );
6320
6321 return value;
6322}
6323
6324static inline void _AArch64_Write_vbar_el3( uint64_t value )
6325{
6326 __asm__ volatile (
6327 "msr VBAR_EL3, %0" : : "r" ( value ) : "memory"
6328 );
6329}
6330
6331/* VMPIDR_EL2, Virtualization Multiprocessor ID Register */
6332
6333#define AARCH64_VMPIDR_EL2_AFF0( _val ) ( ( _val ) << 0 )
6334#define AARCH64_VMPIDR_EL2_AFF0_SHIFT 0
6335#define AARCH64_VMPIDR_EL2_AFF0_MASK 0xffU
6336#define AARCH64_VMPIDR_EL2_AFF0_GET( _reg ) \
6337 ( ( ( _reg ) >> 0 ) & 0xffU )
6338
6339#define AARCH64_VMPIDR_EL2_AFF1( _val ) ( ( _val ) << 8 )
6340#define AARCH64_VMPIDR_EL2_AFF1_SHIFT 8
6341#define AARCH64_VMPIDR_EL2_AFF1_MASK 0xff00U
6342#define AARCH64_VMPIDR_EL2_AFF1_GET( _reg ) \
6343 ( ( ( _reg ) >> 8 ) & 0xffU )
6344
6345#define AARCH64_VMPIDR_EL2_AFF2( _val ) ( ( _val ) << 16 )
6346#define AARCH64_VMPIDR_EL2_AFF2_SHIFT 16
6347#define AARCH64_VMPIDR_EL2_AFF2_MASK 0xff0000U
6348#define AARCH64_VMPIDR_EL2_AFF2_GET( _reg ) \
6349 ( ( ( _reg ) >> 16 ) & 0xffU )
6350
6351#define AARCH64_VMPIDR_EL2_MT 0x1000000U
6352
6353#define AARCH64_VMPIDR_EL2_U 0x40000000U
6354
6355#define AARCH64_VMPIDR_EL2_AFF3( _val ) ( ( _val ) << 32 )
6356#define AARCH64_VMPIDR_EL2_AFF3_SHIFT 32
6357#define AARCH64_VMPIDR_EL2_AFF3_MASK 0xff00000000ULL
6358#define AARCH64_VMPIDR_EL2_AFF3_GET( _reg ) \
6359 ( ( ( _reg ) >> 32 ) & 0xffULL )
6360
6361static inline uint64_t _AArch64_Read_vmpidr_el2( void )
6362{
6363 uint64_t value;
6364
6365 __asm__ volatile (
6366 "mrs %0, VMPIDR_EL2" : "=&r" ( value ) : : "memory"
6367 );
6368
6369 return value;
6370}
6371
6372static inline void _AArch64_Write_vmpidr_el2( uint64_t value )
6373{
6374 __asm__ volatile (
6375 "msr VMPIDR_EL2, %0" : : "r" ( value ) : "memory"
6376 );
6377}
6378
6379/* VNCR_EL2, Virtual Nested Control Register */
6380
6381#define AARCH64_VNCR_EL2_BADDR( _val ) ( ( _val ) << 12 )
6382#define AARCH64_VNCR_EL2_BADDR_SHIFT 12
6383#define AARCH64_VNCR_EL2_BADDR_MASK 0x1ffffffffff000ULL
6384#define AARCH64_VNCR_EL2_BADDR_GET( _reg ) \
6385 ( ( ( _reg ) >> 12 ) & 0x1ffffffffffULL )
6386
6387#define AARCH64_VNCR_EL2_RESS( _val ) ( ( _val ) << 53 )
6388#define AARCH64_VNCR_EL2_RESS_SHIFT 53
6389#define AARCH64_VNCR_EL2_RESS_MASK 0xffe0000000000000ULL
6390#define AARCH64_VNCR_EL2_RESS_GET( _reg ) \
6391 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
6392
6393static inline uint64_t _AArch64_Read_vncr_el2( void )
6394{
6395 uint64_t value;
6396
6397 __asm__ volatile (
6398 "mrs %0, VNCR_EL2" : "=&r" ( value ) : : "memory"
6399 );
6400
6401 return value;
6402}
6403
6404static inline void _AArch64_Write_vncr_el2( uint64_t value )
6405{
6406 __asm__ volatile (
6407 "msr VNCR_EL2, %0" : : "r" ( value ) : "memory"
6408 );
6409}
6410
6411/* VPIDR_EL2, Virtualization Processor ID Register */
6412
6413#define AARCH64_VPIDR_EL2_REVISION( _val ) ( ( _val ) << 0 )
6414#define AARCH64_VPIDR_EL2_REVISION_SHIFT 0
6415#define AARCH64_VPIDR_EL2_REVISION_MASK 0xfU
6416#define AARCH64_VPIDR_EL2_REVISION_GET( _reg ) \
6417 ( ( ( _reg ) >> 0 ) & 0xfU )
6418
6419#define AARCH64_VPIDR_EL2_PARTNUM( _val ) ( ( _val ) << 4 )
6420#define AARCH64_VPIDR_EL2_PARTNUM_SHIFT 4
6421#define AARCH64_VPIDR_EL2_PARTNUM_MASK 0xfff0U
6422#define AARCH64_VPIDR_EL2_PARTNUM_GET( _reg ) \
6423 ( ( ( _reg ) >> 4 ) & 0xfffU )
6424
6425#define AARCH64_VPIDR_EL2_ARCHITECTURE( _val ) ( ( _val ) << 16 )
6426#define AARCH64_VPIDR_EL2_ARCHITECTURE_SHIFT 16
6427#define AARCH64_VPIDR_EL2_ARCHITECTURE_MASK 0xf0000U
6428#define AARCH64_VPIDR_EL2_ARCHITECTURE_GET( _reg ) \
6429 ( ( ( _reg ) >> 16 ) & 0xfU )
6430
6431#define AARCH64_VPIDR_EL2_VARIANT( _val ) ( ( _val ) << 20 )
6432#define AARCH64_VPIDR_EL2_VARIANT_SHIFT 20
6433#define AARCH64_VPIDR_EL2_VARIANT_MASK 0xf00000U
6434#define AARCH64_VPIDR_EL2_VARIANT_GET( _reg ) \
6435 ( ( ( _reg ) >> 20 ) & 0xfU )
6436
6437#define AARCH64_VPIDR_EL2_IMPLEMENTER( _val ) ( ( _val ) << 24 )
6438#define AARCH64_VPIDR_EL2_IMPLEMENTER_SHIFT 24
6439#define AARCH64_VPIDR_EL2_IMPLEMENTER_MASK 0xff000000U
6440#define AARCH64_VPIDR_EL2_IMPLEMENTER_GET( _reg ) \
6441 ( ( ( _reg ) >> 24 ) & 0xffU )
6442
6443static inline uint64_t _AArch64_Read_vpidr_el2( void )
6444{
6445 uint64_t value;
6446
6447 __asm__ volatile (
6448 "mrs %0, VPIDR_EL2" : "=&r" ( value ) : : "memory"
6449 );
6450
6451 return value;
6452}
6453
6454static inline void _AArch64_Write_vpidr_el2( uint64_t value )
6455{
6456 __asm__ volatile (
6457 "msr VPIDR_EL2, %0" : : "r" ( value ) : "memory"
6458 );
6459}
6460
6461/* VSTCR_EL2, Virtualization Secure Translation Control Register */
6462
6463#define AARCH64_VSTCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
6464#define AARCH64_VSTCR_EL2_T0SZ_SHIFT 0
6465#define AARCH64_VSTCR_EL2_T0SZ_MASK 0x3fU
6466#define AARCH64_VSTCR_EL2_T0SZ_GET( _reg ) \
6467 ( ( ( _reg ) >> 0 ) & 0x3fU )
6468
6469#define AARCH64_VSTCR_EL2_SL0( _val ) ( ( _val ) << 6 )
6470#define AARCH64_VSTCR_EL2_SL0_SHIFT 6
6471#define AARCH64_VSTCR_EL2_SL0_MASK 0xc0U
6472#define AARCH64_VSTCR_EL2_SL0_GET( _reg ) \
6473 ( ( ( _reg ) >> 6 ) & 0x3U )
6474
6475#define AARCH64_VSTCR_EL2_TG0( _val ) ( ( _val ) << 14 )
6476#define AARCH64_VSTCR_EL2_TG0_SHIFT 14
6477#define AARCH64_VSTCR_EL2_TG0_MASK 0xc000U
6478#define AARCH64_VSTCR_EL2_TG0_GET( _reg ) \
6479 ( ( ( _reg ) >> 14 ) & 0x3U )
6480
6481#define AARCH64_VSTCR_EL2_SW 0x20000000U
6482
6483#define AARCH64_VSTCR_EL2_SA 0x40000000U
6484
6485static inline uint64_t _AArch64_Read_vstcr_el2( void )
6486{
6487 uint64_t value;
6488
6489 __asm__ volatile (
6490 "mrs %0, VSTCR_EL2" : "=&r" ( value ) : : "memory"
6491 );
6492
6493 return value;
6494}
6495
6496static inline void _AArch64_Write_vstcr_el2( uint64_t value )
6497{
6498 __asm__ volatile (
6499 "msr VSTCR_EL2, %0" : : "r" ( value ) : "memory"
6500 );
6501}
6502
6503/* VSTTBR_EL2, Virtualization Secure Translation Table Base Register */
6504
6505#define AARCH64_VSTTBR_EL2_CNP 0x1U
6506
6507#define AARCH64_VSTTBR_EL2_BADDR( _val ) ( ( _val ) << 1 )
6508#define AARCH64_VSTTBR_EL2_BADDR_SHIFT 1
6509#define AARCH64_VSTTBR_EL2_BADDR_MASK 0xfffffffffffeULL
6510#define AARCH64_VSTTBR_EL2_BADDR_GET( _reg ) \
6511 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6512
6513static inline uint64_t _AArch64_Read_vsttbr_el2( void )
6514{
6515 uint64_t value;
6516
6517 __asm__ volatile (
6518 "mrs %0, VSTTBR_EL2" : "=&r" ( value ) : : "memory"
6519 );
6520
6521 return value;
6522}
6523
6524static inline void _AArch64_Write_vsttbr_el2( uint64_t value )
6525{
6526 __asm__ volatile (
6527 "msr VSTTBR_EL2, %0" : : "r" ( value ) : "memory"
6528 );
6529}
6530
6531/* VTCR_EL2, Virtualization Translation Control Register */
6532
6533#define AARCH64_VTCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
6534#define AARCH64_VTCR_EL2_T0SZ_SHIFT 0
6535#define AARCH64_VTCR_EL2_T0SZ_MASK 0x3fU
6536#define AARCH64_VTCR_EL2_T0SZ_GET( _reg ) \
6537 ( ( ( _reg ) >> 0 ) & 0x3fU )
6538
6539#define AARCH64_VTCR_EL2_SL0( _val ) ( ( _val ) << 6 )
6540#define AARCH64_VTCR_EL2_SL0_SHIFT 6
6541#define AARCH64_VTCR_EL2_SL0_MASK 0xc0U
6542#define AARCH64_VTCR_EL2_SL0_GET( _reg ) \
6543 ( ( ( _reg ) >> 6 ) & 0x3U )
6544
6545#define AARCH64_VTCR_EL2_IRGN0( _val ) ( ( _val ) << 8 )
6546#define AARCH64_VTCR_EL2_IRGN0_SHIFT 8
6547#define AARCH64_VTCR_EL2_IRGN0_MASK 0x300U
6548#define AARCH64_VTCR_EL2_IRGN0_GET( _reg ) \
6549 ( ( ( _reg ) >> 8 ) & 0x3U )
6550
6551#define AARCH64_VTCR_EL2_ORGN0( _val ) ( ( _val ) << 10 )
6552#define AARCH64_VTCR_EL2_ORGN0_SHIFT 10
6553#define AARCH64_VTCR_EL2_ORGN0_MASK 0xc00U
6554#define AARCH64_VTCR_EL2_ORGN0_GET( _reg ) \
6555 ( ( ( _reg ) >> 10 ) & 0x3U )
6556
6557#define AARCH64_VTCR_EL2_SH0( _val ) ( ( _val ) << 12 )
6558#define AARCH64_VTCR_EL2_SH0_SHIFT 12
6559#define AARCH64_VTCR_EL2_SH0_MASK 0x3000U
6560#define AARCH64_VTCR_EL2_SH0_GET( _reg ) \
6561 ( ( ( _reg ) >> 12 ) & 0x3U )
6562
6563#define AARCH64_VTCR_EL2_TG0( _val ) ( ( _val ) << 14 )
6564#define AARCH64_VTCR_EL2_TG0_SHIFT 14
6565#define AARCH64_VTCR_EL2_TG0_MASK 0xc000U
6566#define AARCH64_VTCR_EL2_TG0_GET( _reg ) \
6567 ( ( ( _reg ) >> 14 ) & 0x3U )
6568
6569#define AARCH64_VTCR_EL2_PS( _val ) ( ( _val ) << 16 )
6570#define AARCH64_VTCR_EL2_PS_SHIFT 16
6571#define AARCH64_VTCR_EL2_PS_MASK 0x70000U
6572#define AARCH64_VTCR_EL2_PS_GET( _reg ) \
6573 ( ( ( _reg ) >> 16 ) & 0x7U )
6574
6575#define AARCH64_VTCR_EL2_VS 0x80000U
6576
6577#define AARCH64_VTCR_EL2_HA 0x200000U
6578
6579#define AARCH64_VTCR_EL2_HD 0x400000U
6580
6581#define AARCH64_VTCR_EL2_HWU59 0x2000000U
6582
6583#define AARCH64_VTCR_EL2_HWU60 0x4000000U
6584
6585#define AARCH64_VTCR_EL2_HWU61 0x8000000U
6586
6587#define AARCH64_VTCR_EL2_HWU62 0x10000000U
6588
6589#define AARCH64_VTCR_EL2_NSW 0x20000000U
6590
6591#define AARCH64_VTCR_EL2_NSA 0x40000000U
6592
6593static inline uint64_t _AArch64_Read_vtcr_el2( void )
6594{
6595 uint64_t value;
6596
6597 __asm__ volatile (
6598 "mrs %0, VTCR_EL2" : "=&r" ( value ) : : "memory"
6599 );
6600
6601 return value;
6602}
6603
6604static inline void _AArch64_Write_vtcr_el2( uint64_t value )
6605{
6606 __asm__ volatile (
6607 "msr VTCR_EL2, %0" : : "r" ( value ) : "memory"
6608 );
6609}
6610
6611/* VTTBR_EL2, Virtualization Translation Table Base Register */
6612
6613#define AARCH64_VTTBR_EL2_CNP 0x1U
6614
6615#define AARCH64_VTTBR_EL2_BADDR( _val ) ( ( _val ) << 1 )
6616#define AARCH64_VTTBR_EL2_BADDR_SHIFT 1
6617#define AARCH64_VTTBR_EL2_BADDR_MASK 0xfffffffffffeULL
6618#define AARCH64_VTTBR_EL2_BADDR_GET( _reg ) \
6619 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6620
6621#define AARCH64_VTTBR_EL2_VMID_7_0( _val ) ( ( _val ) << 48 )
6622#define AARCH64_VTTBR_EL2_VMID_7_0_SHIFT 48
6623#define AARCH64_VTTBR_EL2_VMID_7_0_MASK 0xff000000000000ULL
6624#define AARCH64_VTTBR_EL2_VMID_7_0_GET( _reg ) \
6625 ( ( ( _reg ) >> 48 ) & 0xffULL )
6626
6627#define AARCH64_VTTBR_EL2_VMID_15_8( _val ) ( ( _val ) << 56 )
6628#define AARCH64_VTTBR_EL2_VMID_15_8_SHIFT 56
6629#define AARCH64_VTTBR_EL2_VMID_15_8_MASK 0xff00000000000000ULL
6630#define AARCH64_VTTBR_EL2_VMID_15_8_GET( _reg ) \
6631 ( ( ( _reg ) >> 56 ) & 0xffULL )
6632
6633static inline uint64_t _AArch64_Read_vttbr_el2( void )
6634{
6635 uint64_t value;
6636
6637 __asm__ volatile (
6638 "mrs %0, VTTBR_EL2" : "=&r" ( value ) : : "memory"
6639 );
6640
6641 return value;
6642}
6643
6644static inline void _AArch64_Write_vttbr_el2( uint64_t value )
6645{
6646 __asm__ volatile (
6647 "msr VTTBR_EL2, %0" : : "r" ( value ) : "memory"
6648 );
6649}
6650
6651/* DBGAUTHSTATUS_EL1, Debug Authentication Status Register */
6652
6653#define AARCH64_DBGAUTHSTATUS_EL1_NSID( _val ) ( ( _val ) << 0 )
6654#define AARCH64_DBGAUTHSTATUS_EL1_NSID_SHIFT 0
6655#define AARCH64_DBGAUTHSTATUS_EL1_NSID_MASK 0x3U
6656#define AARCH64_DBGAUTHSTATUS_EL1_NSID_GET( _reg ) \
6657 ( ( ( _reg ) >> 0 ) & 0x3U )
6658
6659#define AARCH64_DBGAUTHSTATUS_EL1_NSNID( _val ) ( ( _val ) << 2 )
6660#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_SHIFT 2
6661#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_MASK 0xcU
6662#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_GET( _reg ) \
6663 ( ( ( _reg ) >> 2 ) & 0x3U )
6664
6665#define AARCH64_DBGAUTHSTATUS_EL1_SID( _val ) ( ( _val ) << 4 )
6666#define AARCH64_DBGAUTHSTATUS_EL1_SID_SHIFT 4
6667#define AARCH64_DBGAUTHSTATUS_EL1_SID_MASK 0x30U
6668#define AARCH64_DBGAUTHSTATUS_EL1_SID_GET( _reg ) \
6669 ( ( ( _reg ) >> 4 ) & 0x3U )
6670
6671#define AARCH64_DBGAUTHSTATUS_EL1_SNID( _val ) ( ( _val ) << 6 )
6672#define AARCH64_DBGAUTHSTATUS_EL1_SNID_SHIFT 6
6673#define AARCH64_DBGAUTHSTATUS_EL1_SNID_MASK 0xc0U
6674#define AARCH64_DBGAUTHSTATUS_EL1_SNID_GET( _reg ) \
6675 ( ( ( _reg ) >> 6 ) & 0x3U )
6676
6677static inline uint64_t _AArch64_Read_dbgauthstatus_el1( void )
6678{
6679 uint64_t value;
6680
6681 __asm__ volatile (
6682 "mrs %0, DBGAUTHSTATUS_EL1" : "=&r" ( value ) : : "memory"
6683 );
6684
6685 return value;
6686}
6687
6688/* DBGBCR_N_EL1, Debug Breakpoint Control Registers, n = 0 - 15 */
6689
6690#define AARCH64_DBGBCR_N_EL1_E 0x1U
6691
6692#define AARCH64_DBGBCR_N_EL1_PMC( _val ) ( ( _val ) << 1 )
6693#define AARCH64_DBGBCR_N_EL1_PMC_SHIFT 1
6694#define AARCH64_DBGBCR_N_EL1_PMC_MASK 0x6U
6695#define AARCH64_DBGBCR_N_EL1_PMC_GET( _reg ) \
6696 ( ( ( _reg ) >> 1 ) & 0x3U )
6697
6698#define AARCH64_DBGBCR_N_EL1_BAS( _val ) ( ( _val ) << 5 )
6699#define AARCH64_DBGBCR_N_EL1_BAS_SHIFT 5
6700#define AARCH64_DBGBCR_N_EL1_BAS_MASK 0x1e0U
6701#define AARCH64_DBGBCR_N_EL1_BAS_GET( _reg ) \
6702 ( ( ( _reg ) >> 5 ) & 0xfU )
6703
6704#define AARCH64_DBGBCR_N_EL1_HMC 0x2000U
6705
6706#define AARCH64_DBGBCR_N_EL1_SSC( _val ) ( ( _val ) << 14 )
6707#define AARCH64_DBGBCR_N_EL1_SSC_SHIFT 14
6708#define AARCH64_DBGBCR_N_EL1_SSC_MASK 0xc000U
6709#define AARCH64_DBGBCR_N_EL1_SSC_GET( _reg ) \
6710 ( ( ( _reg ) >> 14 ) & 0x3U )
6711
6712#define AARCH64_DBGBCR_N_EL1_LBN( _val ) ( ( _val ) << 16 )
6713#define AARCH64_DBGBCR_N_EL1_LBN_SHIFT 16
6714#define AARCH64_DBGBCR_N_EL1_LBN_MASK 0xf0000U
6715#define AARCH64_DBGBCR_N_EL1_LBN_GET( _reg ) \
6716 ( ( ( _reg ) >> 16 ) & 0xfU )
6717
6718#define AARCH64_DBGBCR_N_EL1_BT( _val ) ( ( _val ) << 20 )
6719#define AARCH64_DBGBCR_N_EL1_BT_SHIFT 20
6720#define AARCH64_DBGBCR_N_EL1_BT_MASK 0xf00000U
6721#define AARCH64_DBGBCR_N_EL1_BT_GET( _reg ) \
6722 ( ( ( _reg ) >> 20 ) & 0xfU )
6723
6724static inline uint64_t _AArch64_Read_dbgbcr0_el1( void )
6725{
6726 uint64_t value;
6727
6728 __asm__ volatile (
6729 "mrs %0, DBGBCR0_EL1" : "=&r" ( value ) : : "memory"
6730 );
6731
6732 return value;
6733}
6734
6735static inline void _AArch64_Write_dbgbcr0_el1( uint64_t value )
6736{
6737 __asm__ volatile (
6738 "msr DBGBCR0_EL1, %0" : : "r" ( value ) : "memory"
6739 );
6740}
6741
6742static inline uint64_t _AArch64_Read_dbgbcr1_el1( void )
6743{
6744 uint64_t value;
6745
6746 __asm__ volatile (
6747 "mrs %0, DBGBCR1_EL1" : "=&r" ( value ) : : "memory"
6748 );
6749
6750 return value;
6751}
6752
6753static inline void _AArch64_Write_dbgbcr1_el1( uint64_t value )
6754{
6755 __asm__ volatile (
6756 "msr DBGBCR1_EL1, %0" : : "r" ( value ) : "memory"
6757 );
6758}
6759
6760static inline uint64_t _AArch64_Read_dbgbcr2_el1( void )
6761{
6762 uint64_t value;
6763
6764 __asm__ volatile (
6765 "mrs %0, DBGBCR2_EL1" : "=&r" ( value ) : : "memory"
6766 );
6767
6768 return value;
6769}
6770
6771static inline void _AArch64_Write_dbgbcr2_el1( uint64_t value )
6772{
6773 __asm__ volatile (
6774 "msr DBGBCR2_EL1, %0" : : "r" ( value ) : "memory"
6775 );
6776}
6777
6778static inline uint64_t _AArch64_Read_dbgbcr3_el1( void )
6779{
6780 uint64_t value;
6781
6782 __asm__ volatile (
6783 "mrs %0, DBGBCR3_EL1" : "=&r" ( value ) : : "memory"
6784 );
6785
6786 return value;
6787}
6788
6789static inline void _AArch64_Write_dbgbcr3_el1( uint64_t value )
6790{
6791 __asm__ volatile (
6792 "msr DBGBCR3_EL1, %0" : : "r" ( value ) : "memory"
6793 );
6794}
6795
6796static inline uint64_t _AArch64_Read_dbgbcr4_el1( void )
6797{
6798 uint64_t value;
6799
6800 __asm__ volatile (
6801 "mrs %0, DBGBCR4_EL1" : "=&r" ( value ) : : "memory"
6802 );
6803
6804 return value;
6805}
6806
6807static inline void _AArch64_Write_dbgbcr4_el1( uint64_t value )
6808{
6809 __asm__ volatile (
6810 "msr DBGBCR4_EL1, %0" : : "r" ( value ) : "memory"
6811 );
6812}
6813
6814static inline uint64_t _AArch64_Read_dbgbcr5_el1( void )
6815{
6816 uint64_t value;
6817
6818 __asm__ volatile (
6819 "mrs %0, DBGBCR5_EL1" : "=&r" ( value ) : : "memory"
6820 );
6821
6822 return value;
6823}
6824
6825static inline void _AArch64_Write_dbgbcr5_el1( uint64_t value )
6826{
6827 __asm__ volatile (
6828 "msr DBGBCR5_EL1, %0" : : "r" ( value ) : "memory"
6829 );
6830}
6831
6832static inline uint64_t _AArch64_Read_dbgbcr6_el1( void )
6833{
6834 uint64_t value;
6835
6836 __asm__ volatile (
6837 "mrs %0, DBGBCR6_EL1" : "=&r" ( value ) : : "memory"
6838 );
6839
6840 return value;
6841}
6842
6843static inline void _AArch64_Write_dbgbcr6_el1( uint64_t value )
6844{
6845 __asm__ volatile (
6846 "msr DBGBCR6_EL1, %0" : : "r" ( value ) : "memory"
6847 );
6848}
6849
6850static inline uint64_t _AArch64_Read_dbgbcr7_el1( void )
6851{
6852 uint64_t value;
6853
6854 __asm__ volatile (
6855 "mrs %0, DBGBCR7_EL1" : "=&r" ( value ) : : "memory"
6856 );
6857
6858 return value;
6859}
6860
6861static inline void _AArch64_Write_dbgbcr7_el1( uint64_t value )
6862{
6863 __asm__ volatile (
6864 "msr DBGBCR7_EL1, %0" : : "r" ( value ) : "memory"
6865 );
6866}
6867
6868static inline uint64_t _AArch64_Read_dbgbcr8_el1( void )
6869{
6870 uint64_t value;
6871
6872 __asm__ volatile (
6873 "mrs %0, DBGBCR8_EL1" : "=&r" ( value ) : : "memory"
6874 );
6875
6876 return value;
6877}
6878
6879static inline void _AArch64_Write_dbgbcr8_el1( uint64_t value )
6880{
6881 __asm__ volatile (
6882 "msr DBGBCR8_EL1, %0" : : "r" ( value ) : "memory"
6883 );
6884}
6885
6886static inline uint64_t _AArch64_Read_dbgbcr9_el1( void )
6887{
6888 uint64_t value;
6889
6890 __asm__ volatile (
6891 "mrs %0, DBGBCR9_EL1" : "=&r" ( value ) : : "memory"
6892 );
6893
6894 return value;
6895}
6896
6897static inline void _AArch64_Write_dbgbcr9_el1( uint64_t value )
6898{
6899 __asm__ volatile (
6900 "msr DBGBCR9_EL1, %0" : : "r" ( value ) : "memory"
6901 );
6902}
6903
6904static inline uint64_t _AArch64_Read_dbgbcr10_el1( void )
6905{
6906 uint64_t value;
6907
6908 __asm__ volatile (
6909 "mrs %0, DBGBCR10_EL1" : "=&r" ( value ) : : "memory"
6910 );
6911
6912 return value;
6913}
6914
6915static inline void _AArch64_Write_dbgbcr10_el1( uint64_t value )
6916{
6917 __asm__ volatile (
6918 "msr DBGBCR10_EL1, %0" : : "r" ( value ) : "memory"
6919 );
6920}
6921
6922static inline uint64_t _AArch64_Read_dbgbcr11_el1( void )
6923{
6924 uint64_t value;
6925
6926 __asm__ volatile (
6927 "mrs %0, DBGBCR11_EL1" : "=&r" ( value ) : : "memory"
6928 );
6929
6930 return value;
6931}
6932
6933static inline void _AArch64_Write_dbgbcr11_el1( uint64_t value )
6934{
6935 __asm__ volatile (
6936 "msr DBGBCR11_EL1, %0" : : "r" ( value ) : "memory"
6937 );
6938}
6939
6940static inline uint64_t _AArch64_Read_dbgbcr12_el1( void )
6941{
6942 uint64_t value;
6943
6944 __asm__ volatile (
6945 "mrs %0, DBGBCR12_EL1" : "=&r" ( value ) : : "memory"
6946 );
6947
6948 return value;
6949}
6950
6951static inline void _AArch64_Write_dbgbcr12_el1( uint64_t value )
6952{
6953 __asm__ volatile (
6954 "msr DBGBCR12_EL1, %0" : : "r" ( value ) : "memory"
6955 );
6956}
6957
6958static inline uint64_t _AArch64_Read_dbgbcr13_el1( void )
6959{
6960 uint64_t value;
6961
6962 __asm__ volatile (
6963 "mrs %0, DBGBCR13_EL1" : "=&r" ( value ) : : "memory"
6964 );
6965
6966 return value;
6967}
6968
6969static inline void _AArch64_Write_dbgbcr13_el1( uint64_t value )
6970{
6971 __asm__ volatile (
6972 "msr DBGBCR13_EL1, %0" : : "r" ( value ) : "memory"
6973 );
6974}
6975
6976static inline uint64_t _AArch64_Read_dbgbcr14_el1( void )
6977{
6978 uint64_t value;
6979
6980 __asm__ volatile (
6981 "mrs %0, DBGBCR14_EL1" : "=&r" ( value ) : : "memory"
6982 );
6983
6984 return value;
6985}
6986
6987static inline void _AArch64_Write_dbgbcr14_el1( uint64_t value )
6988{
6989 __asm__ volatile (
6990 "msr DBGBCR14_EL1, %0" : : "r" ( value ) : "memory"
6991 );
6992}
6993
6994static inline uint64_t _AArch64_Read_dbgbcr15_el1( void )
6995{
6996 uint64_t value;
6997
6998 __asm__ volatile (
6999 "mrs %0, DBGBCR15_EL1" : "=&r" ( value ) : : "memory"
7000 );
7001
7002 return value;
7003}
7004
7005static inline void _AArch64_Write_dbgbcr15_el1( uint64_t value )
7006{
7007 __asm__ volatile (
7008 "msr DBGBCR15_EL1, %0" : : "r" ( value ) : "memory"
7009 );
7010}
7011
7012/* DBGBVR_N_EL1, Debug Breakpoint Value Registers, n = 0 - 15 */
7013
7014#define AARCH64_DBGBVR_N_EL1_CONTEXTID( _val ) ( ( _val ) << 0 )
7015#define AARCH64_DBGBVR_N_EL1_CONTEXTID_SHIFT 0
7016#define AARCH64_DBGBVR_N_EL1_CONTEXTID_MASK 0xffffffffU
7017#define AARCH64_DBGBVR_N_EL1_CONTEXTID_GET( _reg ) \
7018 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
7019
7020#define AARCH64_DBGBVR_N_EL1_VA_48_2( _val ) ( ( _val ) << 2 )
7021#define AARCH64_DBGBVR_N_EL1_VA_48_2_SHIFT 2
7022#define AARCH64_DBGBVR_N_EL1_VA_48_2_MASK 0x1fffffffffffcULL
7023#define AARCH64_DBGBVR_N_EL1_VA_48_2_GET( _reg ) \
7024 ( ( ( _reg ) >> 2 ) & 0x7fffffffffffULL )
7025
7026#define AARCH64_DBGBVR_N_EL1_VMID_7_0( _val ) ( ( _val ) << 32 )
7027#define AARCH64_DBGBVR_N_EL1_VMID_7_0_SHIFT 32
7028#define AARCH64_DBGBVR_N_EL1_VMID_7_0_MASK 0xff00000000ULL
7029#define AARCH64_DBGBVR_N_EL1_VMID_7_0_GET( _reg ) \
7030 ( ( ( _reg ) >> 32 ) & 0xffULL )
7031
7032#define AARCH64_DBGBVR_N_EL1_CONTEXTID2( _val ) ( ( _val ) << 32 )
7033#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_SHIFT 32
7034#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_MASK 0xffffffff00000000ULL
7035#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_GET( _reg ) \
7036 ( ( ( _reg ) >> 32 ) & 0xffffffffULL )
7037
7038#define AARCH64_DBGBVR_N_EL1_VMID_15_8( _val ) ( ( _val ) << 40 )
7039#define AARCH64_DBGBVR_N_EL1_VMID_15_8_SHIFT 40
7040#define AARCH64_DBGBVR_N_EL1_VMID_15_8_MASK 0xff0000000000ULL
7041#define AARCH64_DBGBVR_N_EL1_VMID_15_8_GET( _reg ) \
7042 ( ( ( _reg ) >> 40 ) & 0xffULL )
7043
7044#define AARCH64_DBGBVR_N_EL1_VA_52_49( _val ) ( ( _val ) << 49 )
7045#define AARCH64_DBGBVR_N_EL1_VA_52_49_SHIFT 49
7046#define AARCH64_DBGBVR_N_EL1_VA_52_49_MASK 0x1e000000000000ULL
7047#define AARCH64_DBGBVR_N_EL1_VA_52_49_GET( _reg ) \
7048 ( ( ( _reg ) >> 49 ) & 0xfULL )
7049
7050#define AARCH64_DBGBVR_N_EL1_RESS_14_4( _val ) ( ( _val ) << 53 )
7051#define AARCH64_DBGBVR_N_EL1_RESS_14_4_SHIFT 53
7052#define AARCH64_DBGBVR_N_EL1_RESS_14_4_MASK 0xffe0000000000000ULL
7053#define AARCH64_DBGBVR_N_EL1_RESS_14_4_GET( _reg ) \
7054 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
7055
7056static inline uint64_t _AArch64_Read_dbgbvr0_el1( void )
7057{
7058 uint64_t value;
7059
7060 __asm__ volatile (
7061 "mrs %0, DBGBVR0_EL1" : "=&r" ( value ) : : "memory"
7062 );
7063
7064 return value;
7065}
7066
7067static inline void _AArch64_Write_dbgbvr0_el1( uint64_t value )
7068{
7069 __asm__ volatile (
7070 "msr DBGBVR0_EL1, %0" : : "r" ( value ) : "memory"
7071 );
7072}
7073
7074static inline uint64_t _AArch64_Read_dbgbvr1_el1( void )
7075{
7076 uint64_t value;
7077
7078 __asm__ volatile (
7079 "mrs %0, DBGBVR1_EL1" : "=&r" ( value ) : : "memory"
7080 );
7081
7082 return value;
7083}
7084
7085static inline void _AArch64_Write_dbgbvr1_el1( uint64_t value )
7086{
7087 __asm__ volatile (
7088 "msr DBGBVR1_EL1, %0" : : "r" ( value ) : "memory"
7089 );
7090}
7091
7092static inline uint64_t _AArch64_Read_dbgbvr2_el1( void )
7093{
7094 uint64_t value;
7095
7096 __asm__ volatile (
7097 "mrs %0, DBGBVR2_EL1" : "=&r" ( value ) : : "memory"
7098 );
7099
7100 return value;
7101}
7102
7103static inline void _AArch64_Write_dbgbvr2_el1( uint64_t value )
7104{
7105 __asm__ volatile (
7106 "msr DBGBVR2_EL1, %0" : : "r" ( value ) : "memory"
7107 );
7108}
7109
7110static inline uint64_t _AArch64_Read_dbgbvr3_el1( void )
7111{
7112 uint64_t value;
7113
7114 __asm__ volatile (
7115 "mrs %0, DBGBVR3_EL1" : "=&r" ( value ) : : "memory"
7116 );
7117
7118 return value;
7119}
7120
7121static inline void _AArch64_Write_dbgbvr3_el1( uint64_t value )
7122{
7123 __asm__ volatile (
7124 "msr DBGBVR3_EL1, %0" : : "r" ( value ) : "memory"
7125 );
7126}
7127
7128static inline uint64_t _AArch64_Read_dbgbvr4_el1( void )
7129{
7130 uint64_t value;
7131
7132 __asm__ volatile (
7133 "mrs %0, DBGBVR4_EL1" : "=&r" ( value ) : : "memory"
7134 );
7135
7136 return value;
7137}
7138
7139static inline void _AArch64_Write_dbgbvr4_el1( uint64_t value )
7140{
7141 __asm__ volatile (
7142 "msr DBGBVR4_EL1, %0" : : "r" ( value ) : "memory"
7143 );
7144}
7145
7146static inline uint64_t _AArch64_Read_dbgbvr5_el1( void )
7147{
7148 uint64_t value;
7149
7150 __asm__ volatile (
7151 "mrs %0, DBGBVR5_EL1" : "=&r" ( value ) : : "memory"
7152 );
7153
7154 return value;
7155}
7156
7157static inline void _AArch64_Write_dbgbvr5_el1( uint64_t value )
7158{
7159 __asm__ volatile (
7160 "msr DBGBVR5_EL1, %0" : : "r" ( value ) : "memory"
7161 );
7162}
7163
7164static inline uint64_t _AArch64_Read_dbgbvr6_el1( void )
7165{
7166 uint64_t value;
7167
7168 __asm__ volatile (
7169 "mrs %0, DBGBVR6_EL1" : "=&r" ( value ) : : "memory"
7170 );
7171
7172 return value;
7173}
7174
7175static inline void _AArch64_Write_dbgbvr6_el1( uint64_t value )
7176{
7177 __asm__ volatile (
7178 "msr DBGBVR6_EL1, %0" : : "r" ( value ) : "memory"
7179 );
7180}
7181
7182static inline uint64_t _AArch64_Read_dbgbvr7_el1( void )
7183{
7184 uint64_t value;
7185
7186 __asm__ volatile (
7187 "mrs %0, DBGBVR7_EL1" : "=&r" ( value ) : : "memory"
7188 );
7189
7190 return value;
7191}
7192
7193static inline void _AArch64_Write_dbgbvr7_el1( uint64_t value )
7194{
7195 __asm__ volatile (
7196 "msr DBGBVR7_EL1, %0" : : "r" ( value ) : "memory"
7197 );
7198}
7199
7200static inline uint64_t _AArch64_Read_dbgbvr8_el1( void )
7201{
7202 uint64_t value;
7203
7204 __asm__ volatile (
7205 "mrs %0, DBGBVR8_EL1" : "=&r" ( value ) : : "memory"
7206 );
7207
7208 return value;
7209}
7210
7211static inline void _AArch64_Write_dbgbvr8_el1( uint64_t value )
7212{
7213 __asm__ volatile (
7214 "msr DBGBVR8_EL1, %0" : : "r" ( value ) : "memory"
7215 );
7216}
7217
7218static inline uint64_t _AArch64_Read_dbgbvr9_el1( void )
7219{
7220 uint64_t value;
7221
7222 __asm__ volatile (
7223 "mrs %0, DBGBVR9_EL1" : "=&r" ( value ) : : "memory"
7224 );
7225
7226 return value;
7227}
7228
7229static inline void _AArch64_Write_dbgbvr9_el1( uint64_t value )
7230{
7231 __asm__ volatile (
7232 "msr DBGBVR9_EL1, %0" : : "r" ( value ) : "memory"
7233 );
7234}
7235
7236static inline uint64_t _AArch64_Read_dbgbvr10_el1( void )
7237{
7238 uint64_t value;
7239
7240 __asm__ volatile (
7241 "mrs %0, DBGBVR10_EL1" : "=&r" ( value ) : : "memory"
7242 );
7243
7244 return value;
7245}
7246
7247static inline void _AArch64_Write_dbgbvr10_el1( uint64_t value )
7248{
7249 __asm__ volatile (
7250 "msr DBGBVR10_EL1, %0" : : "r" ( value ) : "memory"
7251 );
7252}
7253
7254static inline uint64_t _AArch64_Read_dbgbvr11_el1( void )
7255{
7256 uint64_t value;
7257
7258 __asm__ volatile (
7259 "mrs %0, DBGBVR11_EL1" : "=&r" ( value ) : : "memory"
7260 );
7261
7262 return value;
7263}
7264
7265static inline void _AArch64_Write_dbgbvr11_el1( uint64_t value )
7266{
7267 __asm__ volatile (
7268 "msr DBGBVR11_EL1, %0" : : "r" ( value ) : "memory"
7269 );
7270}
7271
7272static inline uint64_t _AArch64_Read_dbgbvr12_el1( void )
7273{
7274 uint64_t value;
7275
7276 __asm__ volatile (
7277 "mrs %0, DBGBVR12_EL1" : "=&r" ( value ) : : "memory"
7278 );
7279
7280 return value;
7281}
7282
7283static inline void _AArch64_Write_dbgbvr12_el1( uint64_t value )
7284{
7285 __asm__ volatile (
7286 "msr DBGBVR12_EL1, %0" : : "r" ( value ) : "memory"
7287 );
7288}
7289
7290static inline uint64_t _AArch64_Read_dbgbvr13_el1( void )
7291{
7292 uint64_t value;
7293
7294 __asm__ volatile (
7295 "mrs %0, DBGBVR13_EL1" : "=&r" ( value ) : : "memory"
7296 );
7297
7298 return value;
7299}
7300
7301static inline void _AArch64_Write_dbgbvr13_el1( uint64_t value )
7302{
7303 __asm__ volatile (
7304 "msr DBGBVR13_EL1, %0" : : "r" ( value ) : "memory"
7305 );
7306}
7307
7308static inline uint64_t _AArch64_Read_dbgbvr14_el1( void )
7309{
7310 uint64_t value;
7311
7312 __asm__ volatile (
7313 "mrs %0, DBGBVR14_EL1" : "=&r" ( value ) : : "memory"
7314 );
7315
7316 return value;
7317}
7318
7319static inline void _AArch64_Write_dbgbvr14_el1( uint64_t value )
7320{
7321 __asm__ volatile (
7322 "msr DBGBVR14_EL1, %0" : : "r" ( value ) : "memory"
7323 );
7324}
7325
7326static inline uint64_t _AArch64_Read_dbgbvr15_el1( void )
7327{
7328 uint64_t value;
7329
7330 __asm__ volatile (
7331 "mrs %0, DBGBVR15_EL1" : "=&r" ( value ) : : "memory"
7332 );
7333
7334 return value;
7335}
7336
7337static inline void _AArch64_Write_dbgbvr15_el1( uint64_t value )
7338{
7339 __asm__ volatile (
7340 "msr DBGBVR15_EL1, %0" : : "r" ( value ) : "memory"
7341 );
7342}
7343
7344/* DBGCLAIMCLR_EL1, Debug CLAIM Tag Clear Register */
7345
7346#define AARCH64_DBGCLAIMCLR_EL1_CLAIM( _val ) ( ( _val ) << 0 )
7347#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_SHIFT 0
7348#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_MASK 0xffU
7349#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_GET( _reg ) \
7350 ( ( ( _reg ) >> 0 ) & 0xffU )
7351
7352static inline uint64_t _AArch64_Read_dbgclaimclr_el1( void )
7353{
7354 uint64_t value;
7355
7356 __asm__ volatile (
7357 "mrs %0, DBGCLAIMCLR_EL1" : "=&r" ( value ) : : "memory"
7358 );
7359
7360 return value;
7361}
7362
7363static inline void _AArch64_Write_dbgclaimclr_el1( uint64_t value )
7364{
7365 __asm__ volatile (
7366 "msr DBGCLAIMCLR_EL1, %0" : : "r" ( value ) : "memory"
7367 );
7368}
7369
7370/* DBGCLAIMSET_EL1, Debug CLAIM Tag Set Register */
7371
7372#define AARCH64_DBGCLAIMSET_EL1_CLAIM( _val ) ( ( _val ) << 0 )
7373#define AARCH64_DBGCLAIMSET_EL1_CLAIM_SHIFT 0
7374#define AARCH64_DBGCLAIMSET_EL1_CLAIM_MASK 0xffU
7375#define AARCH64_DBGCLAIMSET_EL1_CLAIM_GET( _reg ) \
7376 ( ( ( _reg ) >> 0 ) & 0xffU )
7377
7378static inline uint64_t _AArch64_Read_dbgclaimset_el1( void )
7379{
7380 uint64_t value;
7381
7382 __asm__ volatile (
7383 "mrs %0, DBGCLAIMSET_EL1" : "=&r" ( value ) : : "memory"
7384 );
7385
7386 return value;
7387}
7388
7389static inline void _AArch64_Write_dbgclaimset_el1( uint64_t value )
7390{
7391 __asm__ volatile (
7392 "msr DBGCLAIMSET_EL1, %0" : : "r" ( value ) : "memory"
7393 );
7394}
7395
7396/* DBGDTR_EL0, Debug Data Transfer Register, half-duplex */
7397
7398#define AARCH64_DBGDTR_EL0_LOWWORD( _val ) ( ( _val ) << 0 )
7399#define AARCH64_DBGDTR_EL0_LOWWORD_SHIFT 0
7400#define AARCH64_DBGDTR_EL0_LOWWORD_MASK 0xffffffffU
7401#define AARCH64_DBGDTR_EL0_LOWWORD_GET( _reg ) \
7402 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
7403
7404#define AARCH64_DBGDTR_EL0_HIGHWORD( _val ) ( ( _val ) << 32 )
7405#define AARCH64_DBGDTR_EL0_HIGHWORD_SHIFT 32
7406#define AARCH64_DBGDTR_EL0_HIGHWORD_MASK 0xffffffff00000000ULL
7407#define AARCH64_DBGDTR_EL0_HIGHWORD_GET( _reg ) \
7408 ( ( ( _reg ) >> 32 ) & 0xffffffffULL )
7409
7410static inline uint64_t _AArch64_Read_dbgdtr_el0( void )
7411{
7412 uint64_t value;
7413
7414 __asm__ volatile (
7415 "mrs %0, DBGDTR_EL0" : "=&r" ( value ) : : "memory"
7416 );
7417
7418 return value;
7419}
7420
7421static inline void _AArch64_Write_dbgdtr_el0( uint64_t value )
7422{
7423 __asm__ volatile (
7424 "msr DBGDTR_EL0, %0" : : "r" ( value ) : "memory"
7425 );
7426}
7427
7428/* DBGDTRRX_EL0, Debug Data Transfer Register, Receive */
7429
7430static inline uint64_t _AArch64_Read_dbgdtrrx_el0( void )
7431{
7432 uint64_t value;
7433
7434 __asm__ volatile (
7435 "mrs %0, DBGDTRRX_EL0" : "=&r" ( value ) : : "memory"
7436 );
7437
7438 return value;
7439}
7440
7441/* DBGDTRTX_EL0, Debug Data Transfer Register, Transmit */
7442
7443static inline void _AArch64_Write_dbgdtrtx_el0( uint64_t value )
7444{
7445 __asm__ volatile (
7446 "msr DBGDTRTX_EL0, %0" : : "r" ( value ) : "memory"
7447 );
7448}
7449
7450/* DBGPRCR_EL1, Debug Power Control Register */
7451
7452#define AARCH64_DBGPRCR_EL1_CORENPDRQ 0x1U
7453
7454static inline uint64_t _AArch64_Read_dbgprcr_el1( void )
7455{
7456 uint64_t value;
7457
7458 __asm__ volatile (
7459 "mrs %0, DBGPRCR_EL1" : "=&r" ( value ) : : "memory"
7460 );
7461
7462 return value;
7463}
7464
7465static inline void _AArch64_Write_dbgprcr_el1( uint64_t value )
7466{
7467 __asm__ volatile (
7468 "msr DBGPRCR_EL1, %0" : : "r" ( value ) : "memory"
7469 );
7470}
7471
7472/* DBGVCR32_EL2, Debug Vector Catch Register */
7473
7474#define AARCH64_DBGVCR32_EL2_SU 0x2U
7475
7476#define AARCH64_DBGVCR32_EL2_U 0x2U
7477
7478#define AARCH64_DBGVCR32_EL2_S 0x4U
7479
7480#define AARCH64_DBGVCR32_EL2_SS 0x4U
7481
7482#define AARCH64_DBGVCR32_EL2_P 0x8U
7483
7484#define AARCH64_DBGVCR32_EL2_SP 0x8U
7485
7486#define AARCH64_DBGVCR32_EL2_D 0x10U
7487
7488#define AARCH64_DBGVCR32_EL2_SD 0x10U
7489
7490#define AARCH64_DBGVCR32_EL2_I 0x40U
7491
7492#define AARCH64_DBGVCR32_EL2_SI 0x40U
7493
7494#define AARCH64_DBGVCR32_EL2_F 0x80U
7495
7496#define AARCH64_DBGVCR32_EL2_SF 0x80U
7497
7498#define AARCH64_DBGVCR32_EL2_NSU 0x2000000U
7499
7500#define AARCH64_DBGVCR32_EL2_NSS 0x4000000U
7501
7502#define AARCH64_DBGVCR32_EL2_NSP 0x8000000U
7503
7504#define AARCH64_DBGVCR32_EL2_NSD 0x10000000U
7505
7506#define AARCH64_DBGVCR32_EL2_NSI 0x40000000U
7507
7508#define AARCH64_DBGVCR32_EL2_NSF 0x80000000U
7509
7510static inline uint64_t _AArch64_Read_dbgvcr32_el2( void )
7511{
7512 uint64_t value;
7513
7514 __asm__ volatile (
7515 "mrs %0, DBGVCR32_EL2" : "=&r" ( value ) : : "memory"
7516 );
7517
7518 return value;
7519}
7520
7521static inline void _AArch64_Write_dbgvcr32_el2( uint64_t value )
7522{
7523 __asm__ volatile (
7524 "msr DBGVCR32_EL2, %0" : : "r" ( value ) : "memory"
7525 );
7526}
7527
7528/* DBGWCR_N_EL1, Debug Watchpoint Control Registers, n = 0 - 15 */
7529
7530#define AARCH64_DBGWCR_N_EL1_E 0x1U
7531
7532#define AARCH64_DBGWCR_N_EL1_PAC( _val ) ( ( _val ) << 1 )
7533#define AARCH64_DBGWCR_N_EL1_PAC_SHIFT 1
7534#define AARCH64_DBGWCR_N_EL1_PAC_MASK 0x6U
7535#define AARCH64_DBGWCR_N_EL1_PAC_GET( _reg ) \
7536 ( ( ( _reg ) >> 1 ) & 0x3U )
7537
7538#define AARCH64_DBGWCR_N_EL1_LSC( _val ) ( ( _val ) << 3 )
7539#define AARCH64_DBGWCR_N_EL1_LSC_SHIFT 3
7540#define AARCH64_DBGWCR_N_EL1_LSC_MASK 0x18U
7541#define AARCH64_DBGWCR_N_EL1_LSC_GET( _reg ) \
7542 ( ( ( _reg ) >> 3 ) & 0x3U )
7543
7544#define AARCH64_DBGWCR_N_EL1_BAS( _val ) ( ( _val ) << 5 )
7545#define AARCH64_DBGWCR_N_EL1_BAS_SHIFT 5
7546#define AARCH64_DBGWCR_N_EL1_BAS_MASK 0x1fe0U
7547#define AARCH64_DBGWCR_N_EL1_BAS_GET( _reg ) \
7548 ( ( ( _reg ) >> 5 ) & 0xffU )
7549
7550#define AARCH64_DBGWCR_N_EL1_HMC 0x2000U
7551
7552#define AARCH64_DBGWCR_N_EL1_SSC( _val ) ( ( _val ) << 14 )
7553#define AARCH64_DBGWCR_N_EL1_SSC_SHIFT 14
7554#define AARCH64_DBGWCR_N_EL1_SSC_MASK 0xc000U
7555#define AARCH64_DBGWCR_N_EL1_SSC_GET( _reg ) \
7556 ( ( ( _reg ) >> 14 ) & 0x3U )
7557
7558#define AARCH64_DBGWCR_N_EL1_LBN( _val ) ( ( _val ) << 16 )
7559#define AARCH64_DBGWCR_N_EL1_LBN_SHIFT 16
7560#define AARCH64_DBGWCR_N_EL1_LBN_MASK 0xf0000U
7561#define AARCH64_DBGWCR_N_EL1_LBN_GET( _reg ) \
7562 ( ( ( _reg ) >> 16 ) & 0xfU )
7563
7564#define AARCH64_DBGWCR_N_EL1_WT 0x100000U
7565
7566#define AARCH64_DBGWCR_N_EL1_MASK( _val ) ( ( _val ) << 24 )
7567#define AARCH64_DBGWCR_N_EL1_MASK_SHIFT 24
7568#define AARCH64_DBGWCR_N_EL1_MASK_MASK 0x1f000000U
7569#define AARCH64_DBGWCR_N_EL1_MASK_GET( _reg ) \
7570 ( ( ( _reg ) >> 24 ) & 0x1fU )
7571
7572static inline uint64_t _AArch64_Read_dbgwcr0_el1( void )
7573{
7574 uint64_t value;
7575
7576 __asm__ volatile (
7577 "mrs %0, DBGWCR0_EL1" : "=&r" ( value ) : : "memory"
7578 );
7579
7580 return value;
7581}
7582
7583static inline void _AArch64_Write_dbgwcr0_el1( uint64_t value )
7584{
7585 __asm__ volatile (
7586 "msr DBGWCR0_EL1, %0" : : "r" ( value ) : "memory"
7587 );
7588}
7589
7590static inline uint64_t _AArch64_Read_dbgwcr1_el1( void )
7591{
7592 uint64_t value;
7593
7594 __asm__ volatile (
7595 "mrs %0, DBGWCR1_EL1" : "=&r" ( value ) : : "memory"
7596 );
7597
7598 return value;
7599}
7600
7601static inline void _AArch64_Write_dbgwcr1_el1( uint64_t value )
7602{
7603 __asm__ volatile (
7604 "msr DBGWCR1_EL1, %0" : : "r" ( value ) : "memory"
7605 );
7606}
7607
7608static inline uint64_t _AArch64_Read_dbgwcr2_el1( void )
7609{
7610 uint64_t value;
7611
7612 __asm__ volatile (
7613 "mrs %0, DBGWCR2_EL1" : "=&r" ( value ) : : "memory"
7614 );
7615
7616 return value;
7617}
7618
7619static inline void _AArch64_Write_dbgwcr2_el1( uint64_t value )
7620{
7621 __asm__ volatile (
7622 "msr DBGWCR2_EL1, %0" : : "r" ( value ) : "memory"
7623 );
7624}
7625
7626static inline uint64_t _AArch64_Read_dbgwcr3_el1( void )
7627{
7628 uint64_t value;
7629
7630 __asm__ volatile (
7631 "mrs %0, DBGWCR3_EL1" : "=&r" ( value ) : : "memory"
7632 );
7633
7634 return value;
7635}
7636
7637static inline void _AArch64_Write_dbgwcr3_el1( uint64_t value )
7638{
7639 __asm__ volatile (
7640 "msr DBGWCR3_EL1, %0" : : "r" ( value ) : "memory"
7641 );
7642}
7643
7644static inline uint64_t _AArch64_Read_dbgwcr4_el1( void )
7645{
7646 uint64_t value;
7647
7648 __asm__ volatile (
7649 "mrs %0, DBGWCR4_EL1" : "=&r" ( value ) : : "memory"
7650 );
7651
7652 return value;
7653}
7654
7655static inline void _AArch64_Write_dbgwcr4_el1( uint64_t value )
7656{
7657 __asm__ volatile (
7658 "msr DBGWCR4_EL1, %0" : : "r" ( value ) : "memory"
7659 );
7660}
7661
7662static inline uint64_t _AArch64_Read_dbgwcr5_el1( void )
7663{
7664 uint64_t value;
7665
7666 __asm__ volatile (
7667 "mrs %0, DBGWCR5_EL1" : "=&r" ( value ) : : "memory"
7668 );
7669
7670 return value;
7671}
7672
7673static inline void _AArch64_Write_dbgwcr5_el1( uint64_t value )
7674{
7675 __asm__ volatile (
7676 "msr DBGWCR5_EL1, %0" : : "r" ( value ) : "memory"
7677 );
7678}
7679
7680static inline uint64_t _AArch64_Read_dbgwcr6_el1( void )
7681{
7682 uint64_t value;
7683
7684 __asm__ volatile (
7685 "mrs %0, DBGWCR6_EL1" : "=&r" ( value ) : : "memory"
7686 );
7687
7688 return value;
7689}
7690
7691static inline void _AArch64_Write_dbgwcr6_el1( uint64_t value )
7692{
7693 __asm__ volatile (
7694 "msr DBGWCR6_EL1, %0" : : "r" ( value ) : "memory"
7695 );
7696}
7697
7698static inline uint64_t _AArch64_Read_dbgwcr7_el1( void )
7699{
7700 uint64_t value;
7701
7702 __asm__ volatile (
7703 "mrs %0, DBGWCR7_EL1" : "=&r" ( value ) : : "memory"
7704 );
7705
7706 return value;
7707}
7708
7709static inline void _AArch64_Write_dbgwcr7_el1( uint64_t value )
7710{
7711 __asm__ volatile (
7712 "msr DBGWCR7_EL1, %0" : : "r" ( value ) : "memory"
7713 );
7714}
7715
7716static inline uint64_t _AArch64_Read_dbgwcr8_el1( void )
7717{
7718 uint64_t value;
7719
7720 __asm__ volatile (
7721 "mrs %0, DBGWCR8_EL1" : "=&r" ( value ) : : "memory"
7722 );
7723
7724 return value;
7725}
7726
7727static inline void _AArch64_Write_dbgwcr8_el1( uint64_t value )
7728{
7729 __asm__ volatile (
7730 "msr DBGWCR8_EL1, %0" : : "r" ( value ) : "memory"
7731 );
7732}
7733
7734static inline uint64_t _AArch64_Read_dbgwcr9_el1( void )
7735{
7736 uint64_t value;
7737
7738 __asm__ volatile (
7739 "mrs %0, DBGWCR9_EL1" : "=&r" ( value ) : : "memory"
7740 );
7741
7742 return value;
7743}
7744
7745static inline void _AArch64_Write_dbgwcr9_el1( uint64_t value )
7746{
7747 __asm__ volatile (
7748 "msr DBGWCR9_EL1, %0" : : "r" ( value ) : "memory"
7749 );
7750}
7751
7752static inline uint64_t _AArch64_Read_dbgwcr10_el1( void )
7753{
7754 uint64_t value;
7755
7756 __asm__ volatile (
7757 "mrs %0, DBGWCR10_EL1" : "=&r" ( value ) : : "memory"
7758 );
7759
7760 return value;
7761}
7762
7763static inline void _AArch64_Write_dbgwcr10_el1( uint64_t value )
7764{
7765 __asm__ volatile (
7766 "msr DBGWCR10_EL1, %0" : : "r" ( value ) : "memory"
7767 );
7768}
7769
7770static inline uint64_t _AArch64_Read_dbgwcr11_el1( void )
7771{
7772 uint64_t value;
7773
7774 __asm__ volatile (
7775 "mrs %0, DBGWCR11_EL1" : "=&r" ( value ) : : "memory"
7776 );
7777
7778 return value;
7779}
7780
7781static inline void _AArch64_Write_dbgwcr11_el1( uint64_t value )
7782{
7783 __asm__ volatile (
7784 "msr DBGWCR11_EL1, %0" : : "r" ( value ) : "memory"
7785 );
7786}
7787
7788static inline uint64_t _AArch64_Read_dbgwcr12_el1( void )
7789{
7790 uint64_t value;
7791
7792 __asm__ volatile (
7793 "mrs %0, DBGWCR12_EL1" : "=&r" ( value ) : : "memory"
7794 );
7795
7796 return value;
7797}
7798
7799static inline void _AArch64_Write_dbgwcr12_el1( uint64_t value )
7800{
7801 __asm__ volatile (
7802 "msr DBGWCR12_EL1, %0" : : "r" ( value ) : "memory"
7803 );
7804}
7805
7806static inline uint64_t _AArch64_Read_dbgwcr13_el1( void )
7807{
7808 uint64_t value;
7809
7810 __asm__ volatile (
7811 "mrs %0, DBGWCR13_EL1" : "=&r" ( value ) : : "memory"
7812 );
7813
7814 return value;
7815}
7816
7817static inline void _AArch64_Write_dbgwcr13_el1( uint64_t value )
7818{
7819 __asm__ volatile (
7820 "msr DBGWCR13_EL1, %0" : : "r" ( value ) : "memory"
7821 );
7822}
7823
7824static inline uint64_t _AArch64_Read_dbgwcr14_el1( void )
7825{
7826 uint64_t value;
7827
7828 __asm__ volatile (
7829 "mrs %0, DBGWCR14_EL1" : "=&r" ( value ) : : "memory"
7830 );
7831
7832 return value;
7833}
7834
7835static inline void _AArch64_Write_dbgwcr14_el1( uint64_t value )
7836{
7837 __asm__ volatile (
7838 "msr DBGWCR14_EL1, %0" : : "r" ( value ) : "memory"
7839 );
7840}
7841
7842static inline uint64_t _AArch64_Read_dbgwcr15_el1( void )
7843{
7844 uint64_t value;
7845
7846 __asm__ volatile (
7847 "mrs %0, DBGWCR15_EL1" : "=&r" ( value ) : : "memory"
7848 );
7849
7850 return value;
7851}
7852
7853static inline void _AArch64_Write_dbgwcr15_el1( uint64_t value )
7854{
7855 __asm__ volatile (
7856 "msr DBGWCR15_EL1, %0" : : "r" ( value ) : "memory"
7857 );
7858}
7859
7860/* DBGWVR_N_EL1, Debug Watchpoint Value Registers, n = 0 - 15 */
7861
7862#define AARCH64_DBGWVR_N_EL1_VA_48_2( _val ) ( ( _val ) << 2 )
7863#define AARCH64_DBGWVR_N_EL1_VA_48_2_SHIFT 2
7864#define AARCH64_DBGWVR_N_EL1_VA_48_2_MASK 0x1fffffffffffcULL
7865#define AARCH64_DBGWVR_N_EL1_VA_48_2_GET( _reg ) \
7866 ( ( ( _reg ) >> 2 ) & 0x7fffffffffffULL )
7867
7868#define AARCH64_DBGWVR_N_EL1_VA_52_49( _val ) ( ( _val ) << 49 )
7869#define AARCH64_DBGWVR_N_EL1_VA_52_49_SHIFT 49
7870#define AARCH64_DBGWVR_N_EL1_VA_52_49_MASK 0x1e000000000000ULL
7871#define AARCH64_DBGWVR_N_EL1_VA_52_49_GET( _reg ) \
7872 ( ( ( _reg ) >> 49 ) & 0xfULL )
7873
7874#define AARCH64_DBGWVR_N_EL1_RESS_14_4( _val ) ( ( _val ) << 53 )
7875#define AARCH64_DBGWVR_N_EL1_RESS_14_4_SHIFT 53
7876#define AARCH64_DBGWVR_N_EL1_RESS_14_4_MASK 0xffe0000000000000ULL
7877#define AARCH64_DBGWVR_N_EL1_RESS_14_4_GET( _reg ) \
7878 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
7879
7880static inline uint64_t _AArch64_Read_dbgwvr0_el1( void )
7881{
7882 uint64_t value;
7883
7884 __asm__ volatile (
7885 "mrs %0, DBGWVR0_EL1" : "=&r" ( value ) : : "memory"
7886 );
7887
7888 return value;
7889}
7890
7891static inline void _AArch64_Write_dbgwvr0_el1( uint64_t value )
7892{
7893 __asm__ volatile (
7894 "msr DBGWVR0_EL1, %0" : : "r" ( value ) : "memory"
7895 );
7896}
7897
7898static inline uint64_t _AArch64_Read_dbgwvr1_el1( void )
7899{
7900 uint64_t value;
7901
7902 __asm__ volatile (
7903 "mrs %0, DBGWVR1_EL1" : "=&r" ( value ) : : "memory"
7904 );
7905
7906 return value;
7907}
7908
7909static inline void _AArch64_Write_dbgwvr1_el1( uint64_t value )
7910{
7911 __asm__ volatile (
7912 "msr DBGWVR1_EL1, %0" : : "r" ( value ) : "memory"
7913 );
7914}
7915
7916static inline uint64_t _AArch64_Read_dbgwvr2_el1( void )
7917{
7918 uint64_t value;
7919
7920 __asm__ volatile (
7921 "mrs %0, DBGWVR2_EL1" : "=&r" ( value ) : : "memory"
7922 );
7923
7924 return value;
7925}
7926
7927static inline void _AArch64_Write_dbgwvr2_el1( uint64_t value )
7928{
7929 __asm__ volatile (
7930 "msr DBGWVR2_EL1, %0" : : "r" ( value ) : "memory"
7931 );
7932}
7933
7934static inline uint64_t _AArch64_Read_dbgwvr3_el1( void )
7935{
7936 uint64_t value;
7937
7938 __asm__ volatile (
7939 "mrs %0, DBGWVR3_EL1" : "=&r" ( value ) : : "memory"
7940 );
7941
7942 return value;
7943}
7944
7945static inline void _AArch64_Write_dbgwvr3_el1( uint64_t value )
7946{
7947 __asm__ volatile (
7948 "msr DBGWVR3_EL1, %0" : : "r" ( value ) : "memory"
7949 );
7950}
7951
7952static inline uint64_t _AArch64_Read_dbgwvr4_el1( void )
7953{
7954 uint64_t value;
7955
7956 __asm__ volatile (
7957 "mrs %0, DBGWVR4_EL1" : "=&r" ( value ) : : "memory"
7958 );
7959
7960 return value;
7961}
7962
7963static inline void _AArch64_Write_dbgwvr4_el1( uint64_t value )
7964{
7965 __asm__ volatile (
7966 "msr DBGWVR4_EL1, %0" : : "r" ( value ) : "memory"
7967 );
7968}
7969
7970static inline uint64_t _AArch64_Read_dbgwvr5_el1( void )
7971{
7972 uint64_t value;
7973
7974 __asm__ volatile (
7975 "mrs %0, DBGWVR5_EL1" : "=&r" ( value ) : : "memory"
7976 );
7977
7978 return value;
7979}
7980
7981static inline void _AArch64_Write_dbgwvr5_el1( uint64_t value )
7982{
7983 __asm__ volatile (
7984 "msr DBGWVR5_EL1, %0" : : "r" ( value ) : "memory"
7985 );
7986}
7987
7988static inline uint64_t _AArch64_Read_dbgwvr6_el1( void )
7989{
7990 uint64_t value;
7991
7992 __asm__ volatile (
7993 "mrs %0, DBGWVR6_EL1" : "=&r" ( value ) : : "memory"
7994 );
7995
7996 return value;
7997}
7998
7999static inline void _AArch64_Write_dbgwvr6_el1( uint64_t value )
8000{
8001 __asm__ volatile (
8002 "msr DBGWVR6_EL1, %0" : : "r" ( value ) : "memory"
8003 );
8004}
8005
8006static inline uint64_t _AArch64_Read_dbgwvr7_el1( void )
8007{
8008 uint64_t value;
8009
8010 __asm__ volatile (
8011 "mrs %0, DBGWVR7_EL1" : "=&r" ( value ) : : "memory"
8012 );
8013
8014 return value;
8015}
8016
8017static inline void _AArch64_Write_dbgwvr7_el1( uint64_t value )
8018{
8019 __asm__ volatile (
8020 "msr DBGWVR7_EL1, %0" : : "r" ( value ) : "memory"
8021 );
8022}
8023
8024static inline uint64_t _AArch64_Read_dbgwvr8_el1( void )
8025{
8026 uint64_t value;
8027
8028 __asm__ volatile (
8029 "mrs %0, DBGWVR8_EL1" : "=&r" ( value ) : : "memory"
8030 );
8031
8032 return value;
8033}
8034
8035static inline void _AArch64_Write_dbgwvr8_el1( uint64_t value )
8036{
8037 __asm__ volatile (
8038 "msr DBGWVR8_EL1, %0" : : "r" ( value ) : "memory"
8039 );
8040}
8041
8042static inline uint64_t _AArch64_Read_dbgwvr9_el1( void )
8043{
8044 uint64_t value;
8045
8046 __asm__ volatile (
8047 "mrs %0, DBGWVR9_EL1" : "=&r" ( value ) : : "memory"
8048 );
8049
8050 return value;
8051}
8052
8053static inline void _AArch64_Write_dbgwvr9_el1( uint64_t value )
8054{
8055 __asm__ volatile (
8056 "msr DBGWVR9_EL1, %0" : : "r" ( value ) : "memory"
8057 );
8058}
8059
8060static inline uint64_t _AArch64_Read_dbgwvr10_el1( void )
8061{
8062 uint64_t value;
8063
8064 __asm__ volatile (
8065 "mrs %0, DBGWVR10_EL1" : "=&r" ( value ) : : "memory"
8066 );
8067
8068 return value;
8069}
8070
8071static inline void _AArch64_Write_dbgwvr10_el1( uint64_t value )
8072{
8073 __asm__ volatile (
8074 "msr DBGWVR10_EL1, %0" : : "r" ( value ) : "memory"
8075 );
8076}
8077
8078static inline uint64_t _AArch64_Read_dbgwvr11_el1( void )
8079{
8080 uint64_t value;
8081
8082 __asm__ volatile (
8083 "mrs %0, DBGWVR11_EL1" : "=&r" ( value ) : : "memory"
8084 );
8085
8086 return value;
8087}
8088
8089static inline void _AArch64_Write_dbgwvr11_el1( uint64_t value )
8090{
8091 __asm__ volatile (
8092 "msr DBGWVR11_EL1, %0" : : "r" ( value ) : "memory"
8093 );
8094}
8095
8096static inline uint64_t _AArch64_Read_dbgwvr12_el1( void )
8097{
8098 uint64_t value;
8099
8100 __asm__ volatile (
8101 "mrs %0, DBGWVR12_EL1" : "=&r" ( value ) : : "memory"
8102 );
8103
8104 return value;
8105}
8106
8107static inline void _AArch64_Write_dbgwvr12_el1( uint64_t value )
8108{
8109 __asm__ volatile (
8110 "msr DBGWVR12_EL1, %0" : : "r" ( value ) : "memory"
8111 );
8112}
8113
8114static inline uint64_t _AArch64_Read_dbgwvr13_el1( void )
8115{
8116 uint64_t value;
8117
8118 __asm__ volatile (
8119 "mrs %0, DBGWVR13_EL1" : "=&r" ( value ) : : "memory"
8120 );
8121
8122 return value;
8123}
8124
8125static inline void _AArch64_Write_dbgwvr13_el1( uint64_t value )
8126{
8127 __asm__ volatile (
8128 "msr DBGWVR13_EL1, %0" : : "r" ( value ) : "memory"
8129 );
8130}
8131
8132static inline uint64_t _AArch64_Read_dbgwvr14_el1( void )
8133{
8134 uint64_t value;
8135
8136 __asm__ volatile (
8137 "mrs %0, DBGWVR14_EL1" : "=&r" ( value ) : : "memory"
8138 );
8139
8140 return value;
8141}
8142
8143static inline void _AArch64_Write_dbgwvr14_el1( uint64_t value )
8144{
8145 __asm__ volatile (
8146 "msr DBGWVR14_EL1, %0" : : "r" ( value ) : "memory"
8147 );
8148}
8149
8150static inline uint64_t _AArch64_Read_dbgwvr15_el1( void )
8151{
8152 uint64_t value;
8153
8154 __asm__ volatile (
8155 "mrs %0, DBGWVR15_EL1" : "=&r" ( value ) : : "memory"
8156 );
8157
8158 return value;
8159}
8160
8161static inline void _AArch64_Write_dbgwvr15_el1( uint64_t value )
8162{
8163 __asm__ volatile (
8164 "msr DBGWVR15_EL1, %0" : : "r" ( value ) : "memory"
8165 );
8166}
8167
8168/* DLR_EL0, Debug Link Register */
8169
8170static inline uint64_t _AArch64_Read_dlr_el0( void )
8171{
8172 uint64_t value;
8173
8174 __asm__ volatile (
8175 "mrs %0, DLR_EL0" : "=&r" ( value ) : : "memory"
8176 );
8177
8178 return value;
8179}
8180
8181static inline void _AArch64_Write_dlr_el0( uint64_t value )
8182{
8183 __asm__ volatile (
8184 "msr DLR_EL0, %0" : : "r" ( value ) : "memory"
8185 );
8186}
8187
8188/* DSPSR_EL0, Debug Saved Program Status Register */
8189
8190#define AARCH64_DSPSR_EL0_M_3_0( _val ) ( ( _val ) << 0 )
8191#define AARCH64_DSPSR_EL0_M_3_0_SHIFT 0
8192#define AARCH64_DSPSR_EL0_M_3_0_MASK 0xfU
8193#define AARCH64_DSPSR_EL0_M_3_0_GET( _reg ) \
8194 ( ( ( _reg ) >> 0 ) & 0xfU )
8195
8196#define AARCH64_DSPSR_EL0_M_4 0x10U
8197
8198#define AARCH64_DSPSR_EL0_T 0x20U
8199
8200#define AARCH64_DSPSR_EL0_F 0x40U
8201
8202#define AARCH64_DSPSR_EL0_I 0x80U
8203
8204#define AARCH64_DSPSR_EL0_A 0x100U
8205
8206#define AARCH64_DSPSR_EL0_D 0x200U
8207
8208#define AARCH64_DSPSR_EL0_E 0x200U
8209
8210#define AARCH64_DSPSR_EL0_BTYPE( _val ) ( ( _val ) << 10 )
8211#define AARCH64_DSPSR_EL0_BTYPE_SHIFT 10
8212#define AARCH64_DSPSR_EL0_BTYPE_MASK 0xc00U
8213#define AARCH64_DSPSR_EL0_BTYPE_GET( _reg ) \
8214 ( ( ( _reg ) >> 10 ) & 0x3U )
8215
8216#define AARCH64_DSPSR_EL0_IT_7_2( _val ) ( ( _val ) << 10 )
8217#define AARCH64_DSPSR_EL0_IT_7_2_SHIFT 10
8218#define AARCH64_DSPSR_EL0_IT_7_2_MASK 0xfc00U
8219#define AARCH64_DSPSR_EL0_IT_7_2_GET( _reg ) \
8220 ( ( ( _reg ) >> 10 ) & 0x3fU )
8221
8222#define AARCH64_DSPSR_EL0_SSBS_0 0x1000U
8223
8224#define AARCH64_DSPSR_EL0_GE( _val ) ( ( _val ) << 16 )
8225#define AARCH64_DSPSR_EL0_GE_SHIFT 16
8226#define AARCH64_DSPSR_EL0_GE_MASK 0xf0000U
8227#define AARCH64_DSPSR_EL0_GE_GET( _reg ) \
8228 ( ( ( _reg ) >> 16 ) & 0xfU )
8229
8230#define AARCH64_DSPSR_EL0_IL 0x100000U
8231
8232#define AARCH64_DSPSR_EL0_SS 0x200000U
8233
8234#define AARCH64_DSPSR_EL0_PAN 0x400000U
8235
8236#define AARCH64_DSPSR_EL0_SSBS_1 0x800000U
8237
8238#define AARCH64_DSPSR_EL0_UAO 0x800000U
8239
8240#define AARCH64_DSPSR_EL0_DIT 0x1000000U
8241
8242#define AARCH64_DSPSR_EL0_TCO 0x2000000U
8243
8244#define AARCH64_DSPSR_EL0_IT_1_0( _val ) ( ( _val ) << 25 )
8245#define AARCH64_DSPSR_EL0_IT_1_0_SHIFT 25
8246#define AARCH64_DSPSR_EL0_IT_1_0_MASK 0x6000000U
8247#define AARCH64_DSPSR_EL0_IT_1_0_GET( _reg ) \
8248 ( ( ( _reg ) >> 25 ) & 0x3U )
8249
8250#define AARCH64_DSPSR_EL0_Q 0x8000000U
8251
8252#define AARCH64_DSPSR_EL0_V 0x10000000U
8253
8254#define AARCH64_DSPSR_EL0_C 0x20000000U
8255
8256#define AARCH64_DSPSR_EL0_Z 0x40000000U
8257
8258#define AARCH64_DSPSR_EL0_N 0x80000000U
8259
8260static inline uint64_t _AArch64_Read_dspsr_el0( void )
8261{
8262 uint64_t value;
8263
8264 __asm__ volatile (
8265 "mrs %0, DSPSR_EL0" : "=&r" ( value ) : : "memory"
8266 );
8267
8268 return value;
8269}
8270
8271static inline void _AArch64_Write_dspsr_el0( uint64_t value )
8272{
8273 __asm__ volatile (
8274 "msr DSPSR_EL0, %0" : : "r" ( value ) : "memory"
8275 );
8276}
8277
8278/* MDCCINT_EL1, Monitor DCC Interrupt Enable Register */
8279
8280#define AARCH64_MDCCINT_EL1_TX 0x20000000U
8281
8282#define AARCH64_MDCCINT_EL1_RX 0x40000000U
8283
8284static inline uint64_t _AArch64_Read_mdccint_el1( void )
8285{
8286 uint64_t value;
8287
8288 __asm__ volatile (
8289 "mrs %0, MDCCINT_EL1" : "=&r" ( value ) : : "memory"
8290 );
8291
8292 return value;
8293}
8294
8295static inline void _AArch64_Write_mdccint_el1( uint64_t value )
8296{
8297 __asm__ volatile (
8298 "msr MDCCINT_EL1, %0" : : "r" ( value ) : "memory"
8299 );
8300}
8301
8302/* MDCCSR_EL0, Monitor DCC Status Register */
8303
8304#define AARCH64_MDCCSR_EL0_TXFULL 0x20000000U
8305
8306#define AARCH64_MDCCSR_EL0_RXFULL 0x40000000U
8307
8308static inline uint64_t _AArch64_Read_mdccsr_el0( void )
8309{
8310 uint64_t value;
8311
8312 __asm__ volatile (
8313 "mrs %0, MDCCSR_EL0" : "=&r" ( value ) : : "memory"
8314 );
8315
8316 return value;
8317}
8318
8319/* MDCR_EL2, Monitor Debug Configuration Register (EL2) */
8320
8321#define AARCH64_MDCR_EL2_HPMN( _val ) ( ( _val ) << 0 )
8322#define AARCH64_MDCR_EL2_HPMN_SHIFT 0
8323#define AARCH64_MDCR_EL2_HPMN_MASK 0x1fU
8324#define AARCH64_MDCR_EL2_HPMN_GET( _reg ) \
8325 ( ( ( _reg ) >> 0 ) & 0x1fU )
8326
8327#define AARCH64_MDCR_EL2_TPMCR 0x20U
8328
8329#define AARCH64_MDCR_EL2_TPM 0x40U
8330
8331#define AARCH64_MDCR_EL2_HPME 0x80U
8332
8333#define AARCH64_MDCR_EL2_TDE 0x100U
8334
8335#define AARCH64_MDCR_EL2_TDA 0x200U
8336
8337#define AARCH64_MDCR_EL2_TDOSA 0x400U
8338
8339#define AARCH64_MDCR_EL2_TDRA 0x800U
8340
8341#define AARCH64_MDCR_EL2_E2PB( _val ) ( ( _val ) << 12 )
8342#define AARCH64_MDCR_EL2_E2PB_SHIFT 12
8343#define AARCH64_MDCR_EL2_E2PB_MASK 0x3000U
8344#define AARCH64_MDCR_EL2_E2PB_GET( _reg ) \
8345 ( ( ( _reg ) >> 12 ) & 0x3U )
8346
8347#define AARCH64_MDCR_EL2_TPMS 0x4000U
8348
8349#define AARCH64_MDCR_EL2_HPMD 0x20000U
8350
8351#define AARCH64_MDCR_EL2_TTRF 0x80000U
8352
8353#define AARCH64_MDCR_EL2_HCCD 0x800000U
8354
8355#define AARCH64_MDCR_EL2_HLP 0x4000000U
8356
8357#define AARCH64_MDCR_EL2_TDCC 0x8000000U
8358
8359#define AARCH64_MDCR_EL2_MTPME 0x10000000U
8360
8361static inline uint64_t _AArch64_Read_mdcr_el2( void )
8362{
8363 uint64_t value;
8364
8365 __asm__ volatile (
8366 "mrs %0, MDCR_EL2" : "=&r" ( value ) : : "memory"
8367 );
8368
8369 return value;
8370}
8371
8372static inline void _AArch64_Write_mdcr_el2( uint64_t value )
8373{
8374 __asm__ volatile (
8375 "msr MDCR_EL2, %0" : : "r" ( value ) : "memory"
8376 );
8377}
8378
8379/* MDCR_EL3, Monitor Debug Configuration Register (EL3) */
8380
8381#define AARCH64_MDCR_EL3_TPM 0x40U
8382
8383#define AARCH64_MDCR_EL3_TDA 0x200U
8384
8385#define AARCH64_MDCR_EL3_TDOSA 0x400U
8386
8387#define AARCH64_MDCR_EL3_NSPB( _val ) ( ( _val ) << 12 )
8388#define AARCH64_MDCR_EL3_NSPB_SHIFT 12
8389#define AARCH64_MDCR_EL3_NSPB_MASK 0x3000U
8390#define AARCH64_MDCR_EL3_NSPB_GET( _reg ) \
8391 ( ( ( _reg ) >> 12 ) & 0x3U )
8392
8393#define AARCH64_MDCR_EL3_SPD32( _val ) ( ( _val ) << 14 )
8394#define AARCH64_MDCR_EL3_SPD32_SHIFT 14
8395#define AARCH64_MDCR_EL3_SPD32_MASK 0xc000U
8396#define AARCH64_MDCR_EL3_SPD32_GET( _reg ) \
8397 ( ( ( _reg ) >> 14 ) & 0x3U )
8398
8399#define AARCH64_MDCR_EL3_SDD 0x10000U
8400
8401#define AARCH64_MDCR_EL3_SPME 0x20000U
8402
8403#define AARCH64_MDCR_EL3_STE 0x40000U
8404
8405#define AARCH64_MDCR_EL3_TTRF 0x80000U
8406
8407#define AARCH64_MDCR_EL3_EDAD 0x100000U
8408
8409#define AARCH64_MDCR_EL3_EPMAD 0x200000U
8410
8411#define AARCH64_MDCR_EL3_SCCD 0x800000U
8412
8413#define AARCH64_MDCR_EL3_TDCC 0x8000000U
8414
8415#define AARCH64_MDCR_EL3_MTPME 0x10000000U
8416
8417static inline uint64_t _AArch64_Read_mdcr_el3( void )
8418{
8419 uint64_t value;
8420
8421 __asm__ volatile (
8422 "mrs %0, MDCR_EL3" : "=&r" ( value ) : : "memory"
8423 );
8424
8425 return value;
8426}
8427
8428static inline void _AArch64_Write_mdcr_el3( uint64_t value )
8429{
8430 __asm__ volatile (
8431 "msr MDCR_EL3, %0" : : "r" ( value ) : "memory"
8432 );
8433}
8434
8435/* MDRAR_EL1, Monitor Debug ROM Address Register */
8436
8437#define AARCH64_MDRAR_EL1_VALID( _val ) ( ( _val ) << 0 )
8438#define AARCH64_MDRAR_EL1_VALID_SHIFT 0
8439#define AARCH64_MDRAR_EL1_VALID_MASK 0x3U
8440#define AARCH64_MDRAR_EL1_VALID_GET( _reg ) \
8441 ( ( ( _reg ) >> 0 ) & 0x3U )
8442
8443#define AARCH64_MDRAR_EL1_ROMADDR_47_12( _val ) ( ( _val ) << 12 )
8444#define AARCH64_MDRAR_EL1_ROMADDR_47_12_SHIFT 12
8445#define AARCH64_MDRAR_EL1_ROMADDR_47_12_MASK 0xfffffffff000ULL
8446#define AARCH64_MDRAR_EL1_ROMADDR_47_12_GET( _reg ) \
8447 ( ( ( _reg ) >> 12 ) & 0xfffffffffULL )
8448
8449#define AARCH64_MDRAR_EL1_ROMADDR_51_48( _val ) ( ( _val ) << 48 )
8450#define AARCH64_MDRAR_EL1_ROMADDR_51_48_SHIFT 48
8451#define AARCH64_MDRAR_EL1_ROMADDR_51_48_MASK 0xf000000000000ULL
8452#define AARCH64_MDRAR_EL1_ROMADDR_51_48_GET( _reg ) \
8453 ( ( ( _reg ) >> 48 ) & 0xfULL )
8454
8455static inline uint64_t _AArch64_Read_mdrar_el1( void )
8456{
8457 uint64_t value;
8458
8459 __asm__ volatile (
8460 "mrs %0, MDRAR_EL1" : "=&r" ( value ) : : "memory"
8461 );
8462
8463 return value;
8464}
8465
8466/* MDSCR_EL1, Monitor Debug System Control Register */
8467
8468#define AARCH64_MDSCR_EL1_SS 0x1U
8469
8470#define AARCH64_MDSCR_EL1_ERR 0x40U
8471
8472#define AARCH64_MDSCR_EL1_TDCC 0x1000U
8473
8474#define AARCH64_MDSCR_EL1_KDE 0x2000U
8475
8476#define AARCH64_MDSCR_EL1_HDE 0x4000U
8477
8478#define AARCH64_MDSCR_EL1_MDE 0x8000U
8479
8480#define AARCH64_MDSCR_EL1_SC2 0x80000U
8481
8482#define AARCH64_MDSCR_EL1_TDA 0x200000U
8483
8484#define AARCH64_MDSCR_EL1_INTDIS( _val ) ( ( _val ) << 22 )
8485#define AARCH64_MDSCR_EL1_INTDIS_SHIFT 22
8486#define AARCH64_MDSCR_EL1_INTDIS_MASK 0xc00000U
8487#define AARCH64_MDSCR_EL1_INTDIS_GET( _reg ) \
8488 ( ( ( _reg ) >> 22 ) & 0x3U )
8489
8490#define AARCH64_MDSCR_EL1_TXU 0x4000000U
8491
8492#define AARCH64_MDSCR_EL1_RXO 0x8000000U
8493
8494#define AARCH64_MDSCR_EL1_TXFULL 0x20000000U
8495
8496#define AARCH64_MDSCR_EL1_RXFULL 0x40000000U
8497
8498#define AARCH64_MDSCR_EL1_TFO 0x80000000U
8499
8500static inline uint64_t _AArch64_Read_mdscr_el1( void )
8501{
8502 uint64_t value;
8503
8504 __asm__ volatile (
8505 "mrs %0, MDSCR_EL1" : "=&r" ( value ) : : "memory"
8506 );
8507
8508 return value;
8509}
8510
8511static inline void _AArch64_Write_mdscr_el1( uint64_t value )
8512{
8513 __asm__ volatile (
8514 "msr MDSCR_EL1, %0" : : "r" ( value ) : "memory"
8515 );
8516}
8517
8518/* OSDLR_EL1, OS Double Lock Register */
8519
8520#define AARCH64_OSDLR_EL1_DLK 0x1U
8521
8522static inline uint64_t _AArch64_Read_osdlr_el1( void )
8523{
8524 uint64_t value;
8525
8526 __asm__ volatile (
8527 "mrs %0, OSDLR_EL1" : "=&r" ( value ) : : "memory"
8528 );
8529
8530 return value;
8531}
8532
8533static inline void _AArch64_Write_osdlr_el1( uint64_t value )
8534{
8535 __asm__ volatile (
8536 "msr OSDLR_EL1, %0" : : "r" ( value ) : "memory"
8537 );
8538}
8539
8540/* OSDTRRX_EL1, OS Lock Data Transfer Register, Receive */
8541
8542static inline uint64_t _AArch64_Read_osdtrrx_el1( void )
8543{
8544 uint64_t value;
8545
8546 __asm__ volatile (
8547 "mrs %0, OSDTRRX_EL1" : "=&r" ( value ) : : "memory"
8548 );
8549
8550 return value;
8551}
8552
8553static inline void _AArch64_Write_osdtrrx_el1( uint64_t value )
8554{
8555 __asm__ volatile (
8556 "msr OSDTRRX_EL1, %0" : : "r" ( value ) : "memory"
8557 );
8558}
8559
8560/* OSDTRTX_EL1, OS Lock Data Transfer Register, Transmit */
8561
8562static inline uint64_t _AArch64_Read_osdtrtx_el1( void )
8563{
8564 uint64_t value;
8565
8566 __asm__ volatile (
8567 "mrs %0, OSDTRTX_EL1" : "=&r" ( value ) : : "memory"
8568 );
8569
8570 return value;
8571}
8572
8573static inline void _AArch64_Write_osdtrtx_el1( uint64_t value )
8574{
8575 __asm__ volatile (
8576 "msr OSDTRTX_EL1, %0" : : "r" ( value ) : "memory"
8577 );
8578}
8579
8580/* OSECCR_EL1, OS Lock Exception Catch Control Register */
8581
8582#define AARCH64_OSECCR_EL1_EDECCR( _val ) ( ( _val ) << 0 )
8583#define AARCH64_OSECCR_EL1_EDECCR_SHIFT 0
8584#define AARCH64_OSECCR_EL1_EDECCR_MASK 0xffffffffU
8585#define AARCH64_OSECCR_EL1_EDECCR_GET( _reg ) \
8586 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
8587
8588static inline uint64_t _AArch64_Read_oseccr_el1( void )
8589{
8590 uint64_t value;
8591
8592 __asm__ volatile (
8593 "mrs %0, OSECCR_EL1" : "=&r" ( value ) : : "memory"
8594 );
8595
8596 return value;
8597}
8598
8599static inline void _AArch64_Write_oseccr_el1( uint64_t value )
8600{
8601 __asm__ volatile (
8602 "msr OSECCR_EL1, %0" : : "r" ( value ) : "memory"
8603 );
8604}
8605
8606/* OSLAR_EL1, OS Lock Access Register */
8607
8608#define AARCH64_OSLAR_EL1_OSLK 0x1U
8609
8610static inline void _AArch64_Write_oslar_el1( uint64_t value )
8611{
8612 __asm__ volatile (
8613 "msr OSLAR_EL1, %0" : : "r" ( value ) : "memory"
8614 );
8615}
8616
8617/* OSLSR_EL1, OS Lock Status Register */
8618
8619#define AARCH64_OSLSR_EL1_OSLM_0 0x1U
8620
8621#define AARCH64_OSLSR_EL1_OSLK 0x2U
8622
8623#define AARCH64_OSLSR_EL1_NTT 0x4U
8624
8625#define AARCH64_OSLSR_EL1_OSLM_1 0x8U
8626
8627static inline uint64_t _AArch64_Read_oslsr_el1( void )
8628{
8629 uint64_t value;
8630
8631 __asm__ volatile (
8632 "mrs %0, OSLSR_EL1" : "=&r" ( value ) : : "memory"
8633 );
8634
8635 return value;
8636}
8637
8638/* SDER32_EL2, AArch64 Secure Debug Enable Register */
8639
8640#define AARCH64_SDER32_EL2_SUIDEN 0x1U
8641
8642#define AARCH64_SDER32_EL2_SUNIDEN 0x2U
8643
8644static inline uint64_t _AArch64_Read_sder32_el2( void )
8645{
8646 uint64_t value;
8647
8648 __asm__ volatile (
8649 "mrs %0, SDER32_EL2" : "=&r" ( value ) : : "memory"
8650 );
8651
8652 return value;
8653}
8654
8655static inline void _AArch64_Write_sder32_el2( uint64_t value )
8656{
8657 __asm__ volatile (
8658 "msr SDER32_EL2, %0" : : "r" ( value ) : "memory"
8659 );
8660}
8661
8662/* SDER32_EL3, AArch64 Secure Debug Enable Register */
8663
8664#define AARCH64_SDER32_EL3_SUIDEN 0x1U
8665
8666#define AARCH64_SDER32_EL3_SUNIDEN 0x2U
8667
8668static inline uint64_t _AArch64_Read_sder32_el3( void )
8669{
8670 uint64_t value;
8671
8672 __asm__ volatile (
8673 "mrs %0, SDER32_EL3" : "=&r" ( value ) : : "memory"
8674 );
8675
8676 return value;
8677}
8678
8679static inline void _AArch64_Write_sder32_el3( uint64_t value )
8680{
8681 __asm__ volatile (
8682 "msr SDER32_EL3, %0" : : "r" ( value ) : "memory"
8683 );
8684}
8685
8686/* TRFCR_EL1, Trace Filter Control Register (EL1) */
8687
8688#define AARCH64_TRFCR_EL1_E0TRE 0x1U
8689
8690#define AARCH64_TRFCR_EL1_E1TRE 0x2U
8691
8692#define AARCH64_TRFCR_EL1_TS( _val ) ( ( _val ) << 5 )
8693#define AARCH64_TRFCR_EL1_TS_SHIFT 5
8694#define AARCH64_TRFCR_EL1_TS_MASK 0x60U
8695#define AARCH64_TRFCR_EL1_TS_GET( _reg ) \
8696 ( ( ( _reg ) >> 5 ) & 0x3U )
8697
8698static inline uint64_t _AArch64_Read_trfcr_el1( void )
8699{
8700 uint64_t value;
8701
8702 __asm__ volatile (
8703 "mrs %0, TRFCR_EL1" : "=&r" ( value ) : : "memory"
8704 );
8705
8706 return value;
8707}
8708
8709static inline void _AArch64_Write_trfcr_el1( uint64_t value )
8710{
8711 __asm__ volatile (
8712 "msr TRFCR_EL1, %0" : : "r" ( value ) : "memory"
8713 );
8714}
8715
8716/* TRFCR_EL2, Trace Filter Control Register (EL2) */
8717
8718#define AARCH64_TRFCR_EL2_E0HTRE 0x1U
8719
8720#define AARCH64_TRFCR_EL2_E2TRE 0x2U
8721
8722#define AARCH64_TRFCR_EL2_CX 0x8U
8723
8724#define AARCH64_TRFCR_EL2_TS( _val ) ( ( _val ) << 5 )
8725#define AARCH64_TRFCR_EL2_TS_SHIFT 5
8726#define AARCH64_TRFCR_EL2_TS_MASK 0x60U
8727#define AARCH64_TRFCR_EL2_TS_GET( _reg ) \
8728 ( ( ( _reg ) >> 5 ) & 0x3U )
8729
8730static inline uint64_t _AArch64_Read_trfcr_el2( void )
8731{
8732 uint64_t value;
8733
8734 __asm__ volatile (
8735 "mrs %0, TRFCR_EL2" : "=&r" ( value ) : : "memory"
8736 );
8737
8738 return value;
8739}
8740
8741static inline void _AArch64_Write_trfcr_el2( uint64_t value )
8742{
8743 __asm__ volatile (
8744 "msr TRFCR_EL2, %0" : : "r" ( value ) : "memory"
8745 );
8746}
8747
8748/* PMCCFILTR_EL0, Performance Monitors Cycle Count Filter Register */
8749
8750#define AARCH64_PMCCFILTR_EL0_SH 0x1000000U
8751
8752#define AARCH64_PMCCFILTR_EL0_M 0x4000000U
8753
8754#define AARCH64_PMCCFILTR_EL0_NSH 0x8000000U
8755
8756#define AARCH64_PMCCFILTR_EL0_NSU 0x10000000U
8757
8758#define AARCH64_PMCCFILTR_EL0_NSK 0x20000000U
8759
8760#define AARCH64_PMCCFILTR_EL0_U 0x40000000U
8761
8762#define AARCH64_PMCCFILTR_EL0_P 0x80000000U
8763
8764static inline uint64_t _AArch64_Read_pmccfiltr_el0( void )
8765{
8766 uint64_t value;
8767
8768 __asm__ volatile (
8769 "mrs %0, PMCCFILTR_EL0" : "=&r" ( value ) : : "memory"
8770 );
8771
8772 return value;
8773}
8774
8775static inline void _AArch64_Write_pmccfiltr_el0( uint64_t value )
8776{
8777 __asm__ volatile (
8778 "msr PMCCFILTR_EL0, %0" : : "r" ( value ) : "memory"
8779 );
8780}
8781
8782/* PMCCNTR_EL0, Performance Monitors Cycle Count Register */
8783
8784static inline uint64_t _AArch64_Read_pmccntr_el0( void )
8785{
8786 uint64_t value;
8787
8788 __asm__ volatile (
8789 "mrs %0, PMCCNTR_EL0" : "=&r" ( value ) : : "memory"
8790 );
8791
8792 return value;
8793}
8794
8795static inline void _AArch64_Write_pmccntr_el0( uint64_t value )
8796{
8797 __asm__ volatile (
8798 "msr PMCCNTR_EL0, %0" : : "r" ( value ) : "memory"
8799 );
8800}
8801
8802/* PMCEID0_EL0, Performance Monitors Common Event Identification Register 0 */
8803
8804static inline uint64_t _AArch64_Read_pmceid0_el0( void )
8805{
8806 uint64_t value;
8807
8808 __asm__ volatile (
8809 "mrs %0, PMCEID0_EL0" : "=&r" ( value ) : : "memory"
8810 );
8811
8812 return value;
8813}
8814
8815/* PMCEID1_EL0, Performance Monitors Common Event Identification Register 1 */
8816
8817static inline uint64_t _AArch64_Read_pmceid1_el0( void )
8818{
8819 uint64_t value;
8820
8821 __asm__ volatile (
8822 "mrs %0, PMCEID1_EL0" : "=&r" ( value ) : : "memory"
8823 );
8824
8825 return value;
8826}
8827
8828/* PMCNTENCLR_EL0, Performance Monitors Count Enable Clear Register */
8829
8830#define AARCH64_PMCNTENCLR_EL0_C 0x80000000U
8831
8832static inline uint64_t _AArch64_Read_pmcntenclr_el0( void )
8833{
8834 uint64_t value;
8835
8836 __asm__ volatile (
8837 "mrs %0, PMCNTENCLR_EL0" : "=&r" ( value ) : : "memory"
8838 );
8839
8840 return value;
8841}
8842
8843static inline void _AArch64_Write_pmcntenclr_el0( uint64_t value )
8844{
8845 __asm__ volatile (
8846 "msr PMCNTENCLR_EL0, %0" : : "r" ( value ) : "memory"
8847 );
8848}
8849
8850/* PMCNTENSET_EL0, Performance Monitors Count Enable Set Register */
8851
8852#define AARCH64_PMCNTENSET_EL0_C 0x80000000U
8853
8854static inline uint64_t _AArch64_Read_pmcntenset_el0( void )
8855{
8856 uint64_t value;
8857
8858 __asm__ volatile (
8859 "mrs %0, PMCNTENSET_EL0" : "=&r" ( value ) : : "memory"
8860 );
8861
8862 return value;
8863}
8864
8865static inline void _AArch64_Write_pmcntenset_el0( uint64_t value )
8866{
8867 __asm__ volatile (
8868 "msr PMCNTENSET_EL0, %0" : : "r" ( value ) : "memory"
8869 );
8870}
8871
8872/* PMCR_EL0, Performance Monitors Control Register */
8873
8874#define AARCH64_PMCR_EL0_E 0x1U
8875
8876#define AARCH64_PMCR_EL0_P 0x2U
8877
8878#define AARCH64_PMCR_EL0_C 0x4U
8879
8880#define AARCH64_PMCR_EL0_D 0x8U
8881
8882#define AARCH64_PMCR_EL0_X 0x10U
8883
8884#define AARCH64_PMCR_EL0_DP 0x20U
8885
8886#define AARCH64_PMCR_EL0_LC 0x40U
8887
8888#define AARCH64_PMCR_EL0_LP 0x80U
8889
8890#define AARCH64_PMCR_EL0_N( _val ) ( ( _val ) << 11 )
8891#define AARCH64_PMCR_EL0_N_SHIFT 11
8892#define AARCH64_PMCR_EL0_N_MASK 0xf800U
8893#define AARCH64_PMCR_EL0_N_GET( _reg ) \
8894 ( ( ( _reg ) >> 11 ) & 0x1fU )
8895
8896#define AARCH64_PMCR_EL0_IDCODE( _val ) ( ( _val ) << 16 )
8897#define AARCH64_PMCR_EL0_IDCODE_SHIFT 16
8898#define AARCH64_PMCR_EL0_IDCODE_MASK 0xff0000U
8899#define AARCH64_PMCR_EL0_IDCODE_GET( _reg ) \
8900 ( ( ( _reg ) >> 16 ) & 0xffU )
8901
8902#define AARCH64_PMCR_EL0_IMP( _val ) ( ( _val ) << 24 )
8903#define AARCH64_PMCR_EL0_IMP_SHIFT 24
8904#define AARCH64_PMCR_EL0_IMP_MASK 0xff000000U
8905#define AARCH64_PMCR_EL0_IMP_GET( _reg ) \
8906 ( ( ( _reg ) >> 24 ) & 0xffU )
8907
8908static inline uint64_t _AArch64_Read_pmcr_el0( void )
8909{
8910 uint64_t value;
8911
8912 __asm__ volatile (
8913 "mrs %0, PMCR_EL0" : "=&r" ( value ) : : "memory"
8914 );
8915
8916 return value;
8917}
8918
8919static inline void _AArch64_Write_pmcr_el0( uint64_t value )
8920{
8921 __asm__ volatile (
8922 "msr PMCR_EL0, %0" : : "r" ( value ) : "memory"
8923 );
8924}
8925
8926/* PMEVCNTR_N_EL0, Performance Monitors Event Count Registers, n = 0 - 30 */
8927
8928static inline uint64_t _AArch64_Read_pmevcntr_n_el0( void )
8929{
8930 uint64_t value;
8931
8932 __asm__ volatile (
8933 "mrs %0, PMEVCNTR_N_EL0" : "=&r" ( value ) : : "memory"
8934 );
8935
8936 return value;
8937}
8938
8939static inline void _AArch64_Write_pmevcntr_n_el0( uint64_t value )
8940{
8941 __asm__ volatile (
8942 "msr PMEVCNTR_N_EL0, %0" : : "r" ( value ) : "memory"
8943 );
8944}
8945
8946/* PMEVTYPER_N_EL0, Performance Monitors Event Type Registers, n = 0 - 30 */
8947
8948#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0( _val ) ( ( _val ) << 0 )
8949#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_SHIFT 0
8950#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_MASK 0x3ffU
8951#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_GET( _reg ) \
8952 ( ( ( _reg ) >> 0 ) & 0x3ffU )
8953
8954#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10( _val ) ( ( _val ) << 10 )
8955#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_SHIFT 10
8956#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_MASK 0xfc00U
8957#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_GET( _reg ) \
8958 ( ( ( _reg ) >> 10 ) & 0x3fU )
8959
8960#define AARCH64_PMEVTYPER_N_EL0_SH 0x1000000U
8961
8962#define AARCH64_PMEVTYPER_N_EL0_MT 0x2000000U
8963
8964#define AARCH64_PMEVTYPER_N_EL0_M 0x4000000U
8965
8966#define AARCH64_PMEVTYPER_N_EL0_NSH 0x8000000U
8967
8968#define AARCH64_PMEVTYPER_N_EL0_NSU 0x10000000U
8969
8970#define AARCH64_PMEVTYPER_N_EL0_NSK 0x20000000U
8971
8972#define AARCH64_PMEVTYPER_N_EL0_U 0x40000000U
8973
8974#define AARCH64_PMEVTYPER_N_EL0_P 0x80000000U
8975
8976static inline uint64_t _AArch64_Read_pmevtyper_n_el0( void )
8977{
8978 uint64_t value;
8979
8980 __asm__ volatile (
8981 "mrs %0, PMEVTYPER_N_EL0" : "=&r" ( value ) : : "memory"
8982 );
8983
8984 return value;
8985}
8986
8987static inline void _AArch64_Write_pmevtyper_n_el0( uint64_t value )
8988{
8989 __asm__ volatile (
8990 "msr PMEVTYPER_N_EL0, %0" : : "r" ( value ) : "memory"
8991 );
8992}
8993
8994/* PMINTENCLR_EL1, Performance Monitors Interrupt Enable Clear Register */
8995
8996#define AARCH64_PMINTENCLR_EL1_C 0x80000000U
8997
8998static inline uint64_t _AArch64_Read_pmintenclr_el1( void )
8999{
9000 uint64_t value;
9001
9002 __asm__ volatile (
9003 "mrs %0, PMINTENCLR_EL1" : "=&r" ( value ) : : "memory"
9004 );
9005
9006 return value;
9007}
9008
9009static inline void _AArch64_Write_pmintenclr_el1( uint64_t value )
9010{
9011 __asm__ volatile (
9012 "msr PMINTENCLR_EL1, %0" : : "r" ( value ) : "memory"
9013 );
9014}
9015
9016/* PMINTENSET_EL1, Performance Monitors Interrupt Enable Set Register */
9017
9018#define AARCH64_PMINTENSET_EL1_C 0x80000000U
9019
9020static inline uint64_t _AArch64_Read_pmintenset_el1( void )
9021{
9022 uint64_t value;
9023
9024 __asm__ volatile (
9025 "mrs %0, PMINTENSET_EL1" : "=&r" ( value ) : : "memory"
9026 );
9027
9028 return value;
9029}
9030
9031static inline void _AArch64_Write_pmintenset_el1( uint64_t value )
9032{
9033 __asm__ volatile (
9034 "msr PMINTENSET_EL1, %0" : : "r" ( value ) : "memory"
9035 );
9036}
9037
9038/* PMMIR_EL1, Performance Monitors Machine Identification Register */
9039
9040#define AARCH64_PMMIR_EL1_SLOTS( _val ) ( ( _val ) << 0 )
9041#define AARCH64_PMMIR_EL1_SLOTS_SHIFT 0
9042#define AARCH64_PMMIR_EL1_SLOTS_MASK 0xffU
9043#define AARCH64_PMMIR_EL1_SLOTS_GET( _reg ) \
9044 ( ( ( _reg ) >> 0 ) & 0xffU )
9045
9046static inline uint64_t _AArch64_Read_pmmir_el1( void )
9047{
9048 uint64_t value;
9049
9050 __asm__ volatile (
9051 "mrs %0, PMMIR_EL1" : "=&r" ( value ) : : "memory"
9052 );
9053
9054 return value;
9055}
9056
9057/* PMOVSCLR_EL0, Performance Monitors Overflow Flag Status Clear Register */
9058
9059#define AARCH64_PMOVSCLR_EL0_C 0x80000000U
9060
9061static inline uint64_t _AArch64_Read_pmovsclr_el0( void )
9062{
9063 uint64_t value;
9064
9065 __asm__ volatile (
9066 "mrs %0, PMOVSCLR_EL0" : "=&r" ( value ) : : "memory"
9067 );
9068
9069 return value;
9070}
9071
9072static inline void _AArch64_Write_pmovsclr_el0( uint64_t value )
9073{
9074 __asm__ volatile (
9075 "msr PMOVSCLR_EL0, %0" : : "r" ( value ) : "memory"
9076 );
9077}
9078
9079/* PMOVSSET_EL0, Performance Monitors Overflow Flag Status Set Register */
9080
9081#define AARCH64_PMOVSSET_EL0_C 0x80000000U
9082
9083static inline uint64_t _AArch64_Read_pmovsset_el0( void )
9084{
9085 uint64_t value;
9086
9087 __asm__ volatile (
9088 "mrs %0, PMOVSSET_EL0" : "=&r" ( value ) : : "memory"
9089 );
9090
9091 return value;
9092}
9093
9094static inline void _AArch64_Write_pmovsset_el0( uint64_t value )
9095{
9096 __asm__ volatile (
9097 "msr PMOVSSET_EL0, %0" : : "r" ( value ) : "memory"
9098 );
9099}
9100
9101/* PMSELR_EL0, Performance Monitors Event Counter Selection Register */
9102
9103#define AARCH64_PMSELR_EL0_SEL( _val ) ( ( _val ) << 0 )
9104#define AARCH64_PMSELR_EL0_SEL_SHIFT 0
9105#define AARCH64_PMSELR_EL0_SEL_MASK 0x1fU
9106#define AARCH64_PMSELR_EL0_SEL_GET( _reg ) \
9107 ( ( ( _reg ) >> 0 ) & 0x1fU )
9108
9109static inline uint64_t _AArch64_Read_pmselr_el0( void )
9110{
9111 uint64_t value;
9112
9113 __asm__ volatile (
9114 "mrs %0, PMSELR_EL0" : "=&r" ( value ) : : "memory"
9115 );
9116
9117 return value;
9118}
9119
9120static inline void _AArch64_Write_pmselr_el0( uint64_t value )
9121{
9122 __asm__ volatile (
9123 "msr PMSELR_EL0, %0" : : "r" ( value ) : "memory"
9124 );
9125}
9126
9127/* PMSWINC_EL0, Performance Monitors Software Increment Register */
9128
9129static inline void _AArch64_Write_pmswinc_el0( uint64_t value )
9130{
9131 __asm__ volatile (
9132 "msr PMSWINC_EL0, %0" : : "r" ( value ) : "memory"
9133 );
9134}
9135
9136/* PMUSERENR_EL0, Performance Monitors User Enable Register */
9137
9138#define AARCH64_PMUSERENR_EL0_EN 0x1U
9139
9140#define AARCH64_PMUSERENR_EL0_SW 0x2U
9141
9142#define AARCH64_PMUSERENR_EL0_CR 0x4U
9143
9144#define AARCH64_PMUSERENR_EL0_ER 0x8U
9145
9146static inline uint64_t _AArch64_Read_pmuserenr_el0( void )
9147{
9148 uint64_t value;
9149
9150 __asm__ volatile (
9151 "mrs %0, PMUSERENR_EL0" : "=&r" ( value ) : : "memory"
9152 );
9153
9154 return value;
9155}
9156
9157static inline void _AArch64_Write_pmuserenr_el0( uint64_t value )
9158{
9159 __asm__ volatile (
9160 "msr PMUSERENR_EL0, %0" : : "r" ( value ) : "memory"
9161 );
9162}
9163
9164/* PMXEVCNTR_EL0, Performance Monitors Selected Event Count Register */
9165
9166#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N( _val ) ( ( _val ) << 0 )
9167#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_SHIFT 0
9168#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_MASK 0xffffffffU
9169#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_GET( _reg ) \
9170 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
9171
9172static inline uint64_t _AArch64_Read_pmxevcntr_el0( void )
9173{
9174 uint64_t value;
9175
9176 __asm__ volatile (
9177 "mrs %0, PMXEVCNTR_EL0" : "=&r" ( value ) : : "memory"
9178 );
9179
9180 return value;
9181}
9182
9183static inline void _AArch64_Write_pmxevcntr_el0( uint64_t value )
9184{
9185 __asm__ volatile (
9186 "msr PMXEVCNTR_EL0, %0" : : "r" ( value ) : "memory"
9187 );
9188}
9189
9190/* PMXEVTYPER_EL0, Performance Monitors Selected Event Type Register */
9191
9192static inline uint64_t _AArch64_Read_pmxevtyper_el0( void )
9193{
9194 uint64_t value;
9195
9196 __asm__ volatile (
9197 "mrs %0, PMXEVTYPER_EL0" : "=&r" ( value ) : : "memory"
9198 );
9199
9200 return value;
9201}
9202
9203static inline void _AArch64_Write_pmxevtyper_el0( uint64_t value )
9204{
9205 __asm__ volatile (
9206 "msr PMXEVTYPER_EL0, %0" : : "r" ( value ) : "memory"
9207 );
9208}
9209
9210/* AMCFGR_EL0, Activity Monitors Configuration Register */
9211
9212#define AARCH64_AMCFGR_EL0_N( _val ) ( ( _val ) << 0 )
9213#define AARCH64_AMCFGR_EL0_N_SHIFT 0
9214#define AARCH64_AMCFGR_EL0_N_MASK 0xffU
9215#define AARCH64_AMCFGR_EL0_N_GET( _reg ) \
9216 ( ( ( _reg ) >> 0 ) & 0xffU )
9217
9218#define AARCH64_AMCFGR_EL0_SIZE( _val ) ( ( _val ) << 8 )
9219#define AARCH64_AMCFGR_EL0_SIZE_SHIFT 8
9220#define AARCH64_AMCFGR_EL0_SIZE_MASK 0x3f00U
9221#define AARCH64_AMCFGR_EL0_SIZE_GET( _reg ) \
9222 ( ( ( _reg ) >> 8 ) & 0x3fU )
9223
9224#define AARCH64_AMCFGR_EL0_HDBG 0x1000000U
9225
9226#define AARCH64_AMCFGR_EL0_NCG( _val ) ( ( _val ) << 28 )
9227#define AARCH64_AMCFGR_EL0_NCG_SHIFT 28
9228#define AARCH64_AMCFGR_EL0_NCG_MASK 0xf0000000U
9229#define AARCH64_AMCFGR_EL0_NCG_GET( _reg ) \
9230 ( ( ( _reg ) >> 28 ) & 0xfU )
9231
9232static inline uint64_t _AArch64_Read_amcfgr_el0( void )
9233{
9234 uint64_t value;
9235
9236 __asm__ volatile (
9237 "mrs %0, AMCFGR_EL0" : "=&r" ( value ) : : "memory"
9238 );
9239
9240 return value;
9241}
9242
9243/* AMCG1IDR_EL0, Activity Monitors Counter Group 1 Identification Register */
9244
9245static inline uint64_t _AArch64_Read_amcg1idr_el0( void )
9246{
9247 uint64_t value;
9248
9249 __asm__ volatile (
9250 "mrs %0, AMCG1IDR_EL0" : "=&r" ( value ) : : "memory"
9251 );
9252
9253 return value;
9254}
9255
9256/* AMCGCR_EL0, Activity Monitors Counter Group Configuration Register */
9257
9258#define AARCH64_AMCGCR_EL0_CG0NC( _val ) ( ( _val ) << 0 )
9259#define AARCH64_AMCGCR_EL0_CG0NC_SHIFT 0
9260#define AARCH64_AMCGCR_EL0_CG0NC_MASK 0xffU
9261#define AARCH64_AMCGCR_EL0_CG0NC_GET( _reg ) \
9262 ( ( ( _reg ) >> 0 ) & 0xffU )
9263
9264#define AARCH64_AMCGCR_EL0_CG1NC( _val ) ( ( _val ) << 8 )
9265#define AARCH64_AMCGCR_EL0_CG1NC_SHIFT 8
9266#define AARCH64_AMCGCR_EL0_CG1NC_MASK 0xff00U
9267#define AARCH64_AMCGCR_EL0_CG1NC_GET( _reg ) \
9268 ( ( ( _reg ) >> 8 ) & 0xffU )
9269
9270static inline uint64_t _AArch64_Read_amcgcr_el0( void )
9271{
9272 uint64_t value;
9273
9274 __asm__ volatile (
9275 "mrs %0, AMCGCR_EL0" : "=&r" ( value ) : : "memory"
9276 );
9277
9278 return value;
9279}
9280
9281/* AMCNTENCLR0_EL0, Activity Monitors Count Enable Clear Register 0 */
9282
9283static inline uint64_t _AArch64_Read_amcntenclr0_el0( void )
9284{
9285 uint64_t value;
9286
9287 __asm__ volatile (
9288 "mrs %0, AMCNTENCLR0_EL0" : "=&r" ( value ) : : "memory"
9289 );
9290
9291 return value;
9292}
9293
9294static inline void _AArch64_Write_amcntenclr0_el0( uint64_t value )
9295{
9296 __asm__ volatile (
9297 "msr AMCNTENCLR0_EL0, %0" : : "r" ( value ) : "memory"
9298 );
9299}
9300
9301/* AMCNTENCLR1_EL0, Activity Monitors Count Enable Clear Register 1 */
9302
9303static inline uint64_t _AArch64_Read_amcntenclr1_el0( void )
9304{
9305 uint64_t value;
9306
9307 __asm__ volatile (
9308 "mrs %0, AMCNTENCLR1_EL0" : "=&r" ( value ) : : "memory"
9309 );
9310
9311 return value;
9312}
9313
9314static inline void _AArch64_Write_amcntenclr1_el0( uint64_t value )
9315{
9316 __asm__ volatile (
9317 "msr AMCNTENCLR1_EL0, %0" : : "r" ( value ) : "memory"
9318 );
9319}
9320
9321/* AMCNTENSET0_EL0, Activity Monitors Count Enable Set Register 0 */
9322
9323static inline uint64_t _AArch64_Read_amcntenset0_el0( void )
9324{
9325 uint64_t value;
9326
9327 __asm__ volatile (
9328 "mrs %0, AMCNTENSET0_EL0" : "=&r" ( value ) : : "memory"
9329 );
9330
9331 return value;
9332}
9333
9334static inline void _AArch64_Write_amcntenset0_el0( uint64_t value )
9335{
9336 __asm__ volatile (
9337 "msr AMCNTENSET0_EL0, %0" : : "r" ( value ) : "memory"
9338 );
9339}
9340
9341/* AMCNTENSET1_EL0, Activity Monitors Count Enable Set Register 1 */
9342
9343static inline uint64_t _AArch64_Read_amcntenset1_el0( void )
9344{
9345 uint64_t value;
9346
9347 __asm__ volatile (
9348 "mrs %0, AMCNTENSET1_EL0" : "=&r" ( value ) : : "memory"
9349 );
9350
9351 return value;
9352}
9353
9354static inline void _AArch64_Write_amcntenset1_el0( uint64_t value )
9355{
9356 __asm__ volatile (
9357 "msr AMCNTENSET1_EL0, %0" : : "r" ( value ) : "memory"
9358 );
9359}
9360
9361/* AMCR_EL0, Activity Monitors Control Register */
9362
9363#define AARCH64_AMCR_EL0_HDBG 0x400U
9364
9365#define AARCH64_AMCR_EL0_CG1RZ 0x20000U
9366
9367static inline uint64_t _AArch64_Read_amcr_el0( void )
9368{
9369 uint64_t value;
9370
9371 __asm__ volatile (
9372 "mrs %0, AMCR_EL0" : "=&r" ( value ) : : "memory"
9373 );
9374
9375 return value;
9376}
9377
9378static inline void _AArch64_Write_amcr_el0( uint64_t value )
9379{
9380 __asm__ volatile (
9381 "msr AMCR_EL0, %0" : : "r" ( value ) : "memory"
9382 );
9383}
9384
9385/* AMEVCNTR0_N_EL0, Activity Monitors Event Counter Registers 0, n = 0 - 15 */
9386
9387static inline uint64_t _AArch64_Read_amevcntr0_n_el0( void )
9388{
9389 uint64_t value;
9390
9391 __asm__ volatile (
9392 "mrs %0, AMEVCNTR0_N_EL0" : "=&r" ( value ) : : "memory"
9393 );
9394
9395 return value;
9396}
9397
9398static inline void _AArch64_Write_amevcntr0_n_el0( uint64_t value )
9399{
9400 __asm__ volatile (
9401 "msr AMEVCNTR0_N_EL0, %0" : : "r" ( value ) : "memory"
9402 );
9403}
9404
9405/* AMEVCNTR1_N_EL0, Activity Monitors Event Counter Registers 1, n = 0 - 15 */
9406
9407static inline uint64_t _AArch64_Read_amevcntr1_n_el0( void )
9408{
9409 uint64_t value;
9410
9411 __asm__ volatile (
9412 "mrs %0, AMEVCNTR1_N_EL0" : "=&r" ( value ) : : "memory"
9413 );
9414
9415 return value;
9416}
9417
9418static inline void _AArch64_Write_amevcntr1_n_el0( uint64_t value )
9419{
9420 __asm__ volatile (
9421 "msr AMEVCNTR1_N_EL0, %0" : : "r" ( value ) : "memory"
9422 );
9423}
9424
9425/* AMEVCNTVOFF0_N_EL2, Activity Monitors Event Counter Virtual Offset Registers 0, n = 0 - */
9426
9427static inline uint64_t _AArch64_Read_amevcntvoff0_n_el2( void )
9428{
9429 uint64_t value;
9430
9431 __asm__ volatile (
9432 "mrs %0, AMEVCNTVOFF0_N_EL2" : "=&r" ( value ) : : "memory"
9433 );
9434
9435 return value;
9436}
9437
9438static inline void _AArch64_Write_amevcntvoff0_n_el2( uint64_t value )
9439{
9440 __asm__ volatile (
9441 "msr AMEVCNTVOFF0_N_EL2, %0" : : "r" ( value ) : "memory"
9442 );
9443}
9444
9445/* AMEVCNTVOFF1_N_EL2, Activity Monitors Event Counter Virtual Offset Registers 1, n = 0 - */
9446
9447static inline uint64_t _AArch64_Read_amevcntvoff1_n_el2( void )
9448{
9449 uint64_t value;
9450
9451 __asm__ volatile (
9452 "mrs %0, AMEVCNTVOFF1_N_EL2" : "=&r" ( value ) : : "memory"
9453 );
9454
9455 return value;
9456}
9457
9458static inline void _AArch64_Write_amevcntvoff1_n_el2( uint64_t value )
9459{
9460 __asm__ volatile (
9461 "msr AMEVCNTVOFF1_N_EL2, %0" : : "r" ( value ) : "memory"
9462 );
9463}
9464
9465/* AMEVTYPER0_N_EL0, Activity Monitors Event Type Registers 0, n = 0 - 15 */
9466
9467#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT( _val ) ( ( _val ) << 0 )
9468#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_SHIFT 0
9469#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_MASK 0xffffU
9470#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_GET( _reg ) \
9471 ( ( ( _reg ) >> 0 ) & 0xffffU )
9472
9473static inline uint64_t _AArch64_Read_amevtyper0_n_el0( void )
9474{
9475 uint64_t value;
9476
9477 __asm__ volatile (
9478 "mrs %0, AMEVTYPER0_N_EL0" : "=&r" ( value ) : : "memory"
9479 );
9480
9481 return value;
9482}
9483
9484/* AMEVTYPER1_N_EL0, Activity Monitors Event Type Registers 1, n = 0 - 15 */
9485
9486#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT( _val ) ( ( _val ) << 0 )
9487#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_SHIFT 0
9488#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_MASK 0xffffU
9489#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_GET( _reg ) \
9490 ( ( ( _reg ) >> 0 ) & 0xffffU )
9491
9492static inline uint64_t _AArch64_Read_amevtyper1_n_el0( void )
9493{
9494 uint64_t value;
9495
9496 __asm__ volatile (
9497 "mrs %0, AMEVTYPER1_N_EL0" : "=&r" ( value ) : : "memory"
9498 );
9499
9500 return value;
9501}
9502
9503static inline void _AArch64_Write_amevtyper1_n_el0( uint64_t value )
9504{
9505 __asm__ volatile (
9506 "msr AMEVTYPER1_N_EL0, %0" : : "r" ( value ) : "memory"
9507 );
9508}
9509
9510/* AMUSERENR_EL0, Activity Monitors User Enable Register */
9511
9512#define AARCH64_AMUSERENR_EL0_EN 0x1U
9513
9514static inline uint64_t _AArch64_Read_amuserenr_el0( void )
9515{
9516 uint64_t value;
9517
9518 __asm__ volatile (
9519 "mrs %0, AMUSERENR_EL0" : "=&r" ( value ) : : "memory"
9520 );
9521
9522 return value;
9523}
9524
9525static inline void _AArch64_Write_amuserenr_el0( uint64_t value )
9526{
9527 __asm__ volatile (
9528 "msr AMUSERENR_EL0, %0" : : "r" ( value ) : "memory"
9529 );
9530}
9531
9532/* PMBIDR_EL1, Profiling Buffer ID Register */
9533
9534#define AARCH64_PMBIDR_EL1_ALIGN( _val ) ( ( _val ) << 0 )
9535#define AARCH64_PMBIDR_EL1_ALIGN_SHIFT 0
9536#define AARCH64_PMBIDR_EL1_ALIGN_MASK 0xfU
9537#define AARCH64_PMBIDR_EL1_ALIGN_GET( _reg ) \
9538 ( ( ( _reg ) >> 0 ) & 0xfU )
9539
9540#define AARCH64_PMBIDR_EL1_P 0x10U
9541
9542#define AARCH64_PMBIDR_EL1_F 0x20U
9543
9544static inline uint64_t _AArch64_Read_pmbidr_el1( void )
9545{
9546 uint64_t value;
9547
9548 __asm__ volatile (
9549 "mrs %0, PMBIDR_EL1" : "=&r" ( value ) : : "memory"
9550 );
9551
9552 return value;
9553}
9554
9555/* PMBLIMITR_EL1, Profiling Buffer Limit Address Register */
9556
9557#define AARCH64_PMBLIMITR_EL1_E 0x1U
9558
9559#define AARCH64_PMBLIMITR_EL1_FM( _val ) ( ( _val ) << 1 )
9560#define AARCH64_PMBLIMITR_EL1_FM_SHIFT 1
9561#define AARCH64_PMBLIMITR_EL1_FM_MASK 0x6U
9562#define AARCH64_PMBLIMITR_EL1_FM_GET( _reg ) \
9563 ( ( ( _reg ) >> 1 ) & 0x3U )
9564
9565#define AARCH64_PMBLIMITR_EL1_LIMIT( _val ) ( ( _val ) << 12 )
9566#define AARCH64_PMBLIMITR_EL1_LIMIT_SHIFT 12
9567#define AARCH64_PMBLIMITR_EL1_LIMIT_MASK 0xfffffffffffff000ULL
9568#define AARCH64_PMBLIMITR_EL1_LIMIT_GET( _reg ) \
9569 ( ( ( _reg ) >> 12 ) & 0xfffffffffffffULL )
9570
9571static inline uint64_t _AArch64_Read_pmblimitr_el1( void )
9572{
9573 uint64_t value;
9574
9575 __asm__ volatile (
9576 "mrs %0, PMBLIMITR_EL1" : "=&r" ( value ) : : "memory"
9577 );
9578
9579 return value;
9580}
9581
9582static inline void _AArch64_Write_pmblimitr_el1( uint64_t value )
9583{
9584 __asm__ volatile (
9585 "msr PMBLIMITR_EL1, %0" : : "r" ( value ) : "memory"
9586 );
9587}
9588
9589/* PMBPTR_EL1, Profiling Buffer Write Pointer Register */
9590
9591static inline uint64_t _AArch64_Read_pmbptr_el1( void )
9592{
9593 uint64_t value;
9594
9595 __asm__ volatile (
9596 "mrs %0, PMBPTR_EL1" : "=&r" ( value ) : : "memory"
9597 );
9598
9599 return value;
9600}
9601
9602static inline void _AArch64_Write_pmbptr_el1( uint64_t value )
9603{
9604 __asm__ volatile (
9605 "msr PMBPTR_EL1, %0" : : "r" ( value ) : "memory"
9606 );
9607}
9608
9609/* PMBSR_EL1, Profiling Buffer Status/syndrome Register */
9610
9611#define AARCH64_PMBSR_EL1_BSC( _val ) ( ( _val ) << 0 )
9612#define AARCH64_PMBSR_EL1_BSC_SHIFT 0
9613#define AARCH64_PMBSR_EL1_BSC_MASK 0x3fU
9614#define AARCH64_PMBSR_EL1_BSC_GET( _reg ) \
9615 ( ( ( _reg ) >> 0 ) & 0x3fU )
9616
9617#define AARCH64_PMBSR_EL1_FSC( _val ) ( ( _val ) << 0 )
9618#define AARCH64_PMBSR_EL1_FSC_SHIFT 0
9619#define AARCH64_PMBSR_EL1_FSC_MASK 0x3fU
9620#define AARCH64_PMBSR_EL1_FSC_GET( _reg ) \
9621 ( ( ( _reg ) >> 0 ) & 0x3fU )
9622
9623#define AARCH64_PMBSR_EL1_MSS( _val ) ( ( _val ) << 0 )
9624#define AARCH64_PMBSR_EL1_MSS_SHIFT 0
9625#define AARCH64_PMBSR_EL1_MSS_MASK 0xffffU
9626#define AARCH64_PMBSR_EL1_MSS_GET( _reg ) \
9627 ( ( ( _reg ) >> 0 ) & 0xffffU )
9628
9629#define AARCH64_PMBSR_EL1_COLL 0x10000U
9630
9631#define AARCH64_PMBSR_EL1_S 0x20000U
9632
9633#define AARCH64_PMBSR_EL1_EA 0x40000U
9634
9635#define AARCH64_PMBSR_EL1_DL 0x80000U
9636
9637#define AARCH64_PMBSR_EL1_EC( _val ) ( ( _val ) << 26 )
9638#define AARCH64_PMBSR_EL1_EC_SHIFT 26
9639#define AARCH64_PMBSR_EL1_EC_MASK 0xfc000000U
9640#define AARCH64_PMBSR_EL1_EC_GET( _reg ) \
9641 ( ( ( _reg ) >> 26 ) & 0x3fU )
9642
9643static inline uint64_t _AArch64_Read_pmbsr_el1( void )
9644{
9645 uint64_t value;
9646
9647 __asm__ volatile (
9648 "mrs %0, PMBSR_EL1" : "=&r" ( value ) : : "memory"
9649 );
9650
9651 return value;
9652}
9653
9654static inline void _AArch64_Write_pmbsr_el1( uint64_t value )
9655{
9656 __asm__ volatile (
9657 "msr PMBSR_EL1, %0" : : "r" ( value ) : "memory"
9658 );
9659}
9660
9661/* PMSCR_EL1, Statistical Profiling Control Register (EL1) */
9662
9663#define AARCH64_PMSCR_EL1_E0SPE 0x1U
9664
9665#define AARCH64_PMSCR_EL1_E1SPE 0x2U
9666
9667#define AARCH64_PMSCR_EL1_CX 0x8U
9668
9669#define AARCH64_PMSCR_EL1_PA 0x10U
9670
9671#define AARCH64_PMSCR_EL1_TS 0x20U
9672
9673#define AARCH64_PMSCR_EL1_PCT( _val ) ( ( _val ) << 6 )
9674#define AARCH64_PMSCR_EL1_PCT_SHIFT 6
9675#define AARCH64_PMSCR_EL1_PCT_MASK 0xc0U
9676#define AARCH64_PMSCR_EL1_PCT_GET( _reg ) \
9677 ( ( ( _reg ) >> 6 ) & 0x3U )
9678
9679static inline uint64_t _AArch64_Read_pmscr_el1( void )
9680{
9681 uint64_t value;
9682
9683 __asm__ volatile (
9684 "mrs %0, PMSCR_EL1" : "=&r" ( value ) : : "memory"
9685 );
9686
9687 return value;
9688}
9689
9690static inline void _AArch64_Write_pmscr_el1( uint64_t value )
9691{
9692 __asm__ volatile (
9693 "msr PMSCR_EL1, %0" : : "r" ( value ) : "memory"
9694 );
9695}
9696
9697/* PMSCR_EL2, Statistical Profiling Control Register (EL2) */
9698
9699#define AARCH64_PMSCR_EL2_E0HSPE 0x1U
9700
9701#define AARCH64_PMSCR_EL2_E2SPE 0x2U
9702
9703#define AARCH64_PMSCR_EL2_CX 0x8U
9704
9705#define AARCH64_PMSCR_EL2_PA 0x10U
9706
9707#define AARCH64_PMSCR_EL2_TS 0x20U
9708
9709#define AARCH64_PMSCR_EL2_PCT( _val ) ( ( _val ) << 6 )
9710#define AARCH64_PMSCR_EL2_PCT_SHIFT 6
9711#define AARCH64_PMSCR_EL2_PCT_MASK 0xc0U
9712#define AARCH64_PMSCR_EL2_PCT_GET( _reg ) \
9713 ( ( ( _reg ) >> 6 ) & 0x3U )
9714
9715static inline uint64_t _AArch64_Read_pmscr_el2( void )
9716{
9717 uint64_t value;
9718
9719 __asm__ volatile (
9720 "mrs %0, PMSCR_EL2" : "=&r" ( value ) : : "memory"
9721 );
9722
9723 return value;
9724}
9725
9726static inline void _AArch64_Write_pmscr_el2( uint64_t value )
9727{
9728 __asm__ volatile (
9729 "msr PMSCR_EL2, %0" : : "r" ( value ) : "memory"
9730 );
9731}
9732
9733/* PMSEVFR_EL1, Sampling Event Filter Register */
9734
9735#define AARCH64_PMSEVFR_EL1_E_1 0x2U
9736
9737#define AARCH64_PMSEVFR_EL1_E_3 0x8U
9738
9739#define AARCH64_PMSEVFR_EL1_E_5 0x20U
9740
9741#define AARCH64_PMSEVFR_EL1_E_7 0x80U
9742
9743#define AARCH64_PMSEVFR_EL1_E_11 0x800U
9744
9745#define AARCH64_PMSEVFR_EL1_E_12 0x1000U
9746
9747#define AARCH64_PMSEVFR_EL1_E_13 0x2000U
9748
9749#define AARCH64_PMSEVFR_EL1_E_14 0x4000U
9750
9751#define AARCH64_PMSEVFR_EL1_E_15 0x8000U
9752
9753#define AARCH64_PMSEVFR_EL1_E_17 0x20000U
9754
9755#define AARCH64_PMSEVFR_EL1_E_18 0x40000U
9756
9757#define AARCH64_PMSEVFR_EL1_E_24 0x1000000U
9758
9759#define AARCH64_PMSEVFR_EL1_E_25 0x2000000U
9760
9761#define AARCH64_PMSEVFR_EL1_E_26 0x4000000U
9762
9763#define AARCH64_PMSEVFR_EL1_E_27 0x8000000U
9764
9765#define AARCH64_PMSEVFR_EL1_E_28 0x10000000U
9766
9767#define AARCH64_PMSEVFR_EL1_E_29 0x20000000U
9768
9769#define AARCH64_PMSEVFR_EL1_E_30 0x40000000U
9770
9771#define AARCH64_PMSEVFR_EL1_E_31 0x80000000U
9772
9773#define AARCH64_PMSEVFR_EL1_E_48 0x1000000000000ULL
9774
9775#define AARCH64_PMSEVFR_EL1_E_49 0x2000000000000ULL
9776
9777#define AARCH64_PMSEVFR_EL1_E_50 0x4000000000000ULL
9778
9779#define AARCH64_PMSEVFR_EL1_E_51 0x8000000000000ULL
9780
9781#define AARCH64_PMSEVFR_EL1_E_52 0x10000000000000ULL
9782
9783#define AARCH64_PMSEVFR_EL1_E_53 0x20000000000000ULL
9784
9785#define AARCH64_PMSEVFR_EL1_E_54 0x40000000000000ULL
9786
9787#define AARCH64_PMSEVFR_EL1_E_55 0x80000000000000ULL
9788
9789#define AARCH64_PMSEVFR_EL1_E_56 0x100000000000000ULL
9790
9791#define AARCH64_PMSEVFR_EL1_E_57 0x200000000000000ULL
9792
9793#define AARCH64_PMSEVFR_EL1_E_58 0x400000000000000ULL
9794
9795#define AARCH64_PMSEVFR_EL1_E_59 0x800000000000000ULL
9796
9797#define AARCH64_PMSEVFR_EL1_E_60 0x1000000000000000ULL
9798
9799#define AARCH64_PMSEVFR_EL1_E_61 0x2000000000000000ULL
9800
9801#define AARCH64_PMSEVFR_EL1_E_62 0x4000000000000000ULL
9802
9803#define AARCH64_PMSEVFR_EL1_E_63 0x8000000000000000ULL
9804
9805static inline uint64_t _AArch64_Read_pmsevfr_el1( void )
9806{
9807 uint64_t value;
9808
9809 __asm__ volatile (
9810 "mrs %0, PMSEVFR_EL1" : "=&r" ( value ) : : "memory"
9811 );
9812
9813 return value;
9814}
9815
9816static inline void _AArch64_Write_pmsevfr_el1( uint64_t value )
9817{
9818 __asm__ volatile (
9819 "msr PMSEVFR_EL1, %0" : : "r" ( value ) : "memory"
9820 );
9821}
9822
9823/* PMSFCR_EL1, Sampling Filter Control Register */
9824
9825#define AARCH64_PMSFCR_EL1_FE 0x1U
9826
9827#define AARCH64_PMSFCR_EL1_FT 0x2U
9828
9829#define AARCH64_PMSFCR_EL1_FL 0x4U
9830
9831#define AARCH64_PMSFCR_EL1_B 0x10000U
9832
9833#define AARCH64_PMSFCR_EL1_LD 0x20000U
9834
9835#define AARCH64_PMSFCR_EL1_ST 0x40000U
9836
9837static inline uint64_t _AArch64_Read_pmsfcr_el1( void )
9838{
9839 uint64_t value;
9840
9841 __asm__ volatile (
9842 "mrs %0, PMSFCR_EL1" : "=&r" ( value ) : : "memory"
9843 );
9844
9845 return value;
9846}
9847
9848static inline void _AArch64_Write_pmsfcr_el1( uint64_t value )
9849{
9850 __asm__ volatile (
9851 "msr PMSFCR_EL1, %0" : : "r" ( value ) : "memory"
9852 );
9853}
9854
9855/* PMSICR_EL1, Sampling Interval Counter Register */
9856
9857#define AARCH64_PMSICR_EL1_COUNT( _val ) ( ( _val ) << 0 )
9858#define AARCH64_PMSICR_EL1_COUNT_SHIFT 0
9859#define AARCH64_PMSICR_EL1_COUNT_MASK 0xffffffffU
9860#define AARCH64_PMSICR_EL1_COUNT_GET( _reg ) \
9861 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
9862
9863#define AARCH64_PMSICR_EL1_ECOUNT( _val ) ( ( _val ) << 56 )
9864#define AARCH64_PMSICR_EL1_ECOUNT_SHIFT 56
9865#define AARCH64_PMSICR_EL1_ECOUNT_MASK 0xff00000000000000ULL
9866#define AARCH64_PMSICR_EL1_ECOUNT_GET( _reg ) \
9867 ( ( ( _reg ) >> 56 ) & 0xffULL )
9868
9869static inline uint64_t _AArch64_Read_pmsicr_el1( void )
9870{
9871 uint64_t value;
9872
9873 __asm__ volatile (
9874 "mrs %0, PMSICR_EL1" : "=&r" ( value ) : : "memory"
9875 );
9876
9877 return value;
9878}
9879
9880static inline void _AArch64_Write_pmsicr_el1( uint64_t value )
9881{
9882 __asm__ volatile (
9883 "msr PMSICR_EL1, %0" : : "r" ( value ) : "memory"
9884 );
9885}
9886
9887/* PMSIDR_EL1, Sampling Profiling ID Register */
9888
9889#define AARCH64_PMSIDR_EL1_FE 0x1U
9890
9891#define AARCH64_PMSIDR_EL1_FT 0x2U
9892
9893#define AARCH64_PMSIDR_EL1_FL 0x4U
9894
9895#define AARCH64_PMSIDR_EL1_ARCHINST 0x8U
9896
9897#define AARCH64_PMSIDR_EL1_LDS 0x10U
9898
9899#define AARCH64_PMSIDR_EL1_ERND 0x20U
9900
9901#define AARCH64_PMSIDR_EL1_INTERVAL( _val ) ( ( _val ) << 8 )
9902#define AARCH64_PMSIDR_EL1_INTERVAL_SHIFT 8
9903#define AARCH64_PMSIDR_EL1_INTERVAL_MASK 0xf00U
9904#define AARCH64_PMSIDR_EL1_INTERVAL_GET( _reg ) \
9905 ( ( ( _reg ) >> 8 ) & 0xfU )
9906
9907#define AARCH64_PMSIDR_EL1_MAXSIZE( _val ) ( ( _val ) << 12 )
9908#define AARCH64_PMSIDR_EL1_MAXSIZE_SHIFT 12
9909#define AARCH64_PMSIDR_EL1_MAXSIZE_MASK 0xf000U
9910#define AARCH64_PMSIDR_EL1_MAXSIZE_GET( _reg ) \
9911 ( ( ( _reg ) >> 12 ) & 0xfU )
9912
9913#define AARCH64_PMSIDR_EL1_COUNTSIZE( _val ) ( ( _val ) << 16 )
9914#define AARCH64_PMSIDR_EL1_COUNTSIZE_SHIFT 16
9915#define AARCH64_PMSIDR_EL1_COUNTSIZE_MASK 0xf0000U
9916#define AARCH64_PMSIDR_EL1_COUNTSIZE_GET( _reg ) \
9917 ( ( ( _reg ) >> 16 ) & 0xfU )
9918
9919static inline uint64_t _AArch64_Read_pmsidr_el1( void )
9920{
9921 uint64_t value;
9922
9923 __asm__ volatile (
9924 "mrs %0, PMSIDR_EL1" : "=&r" ( value ) : : "memory"
9925 );
9926
9927 return value;
9928}
9929
9930/* PMSIRR_EL1, Sampling Interval Reload Register */
9931
9932#define AARCH64_PMSIRR_EL1_RND 0x1U
9933
9934#define AARCH64_PMSIRR_EL1_INTERVAL( _val ) ( ( _val ) << 8 )
9935#define AARCH64_PMSIRR_EL1_INTERVAL_SHIFT 8
9936#define AARCH64_PMSIRR_EL1_INTERVAL_MASK 0xffffff00U
9937#define AARCH64_PMSIRR_EL1_INTERVAL_GET( _reg ) \
9938 ( ( ( _reg ) >> 8 ) & 0xffffffU )
9939
9940static inline uint64_t _AArch64_Read_pmsirr_el1( void )
9941{
9942 uint64_t value;
9943
9944 __asm__ volatile (
9945 "mrs %0, PMSIRR_EL1" : "=&r" ( value ) : : "memory"
9946 );
9947
9948 return value;
9949}
9950
9951static inline void _AArch64_Write_pmsirr_el1( uint64_t value )
9952{
9953 __asm__ volatile (
9954 "msr PMSIRR_EL1, %0" : : "r" ( value ) : "memory"
9955 );
9956}
9957
9958/* PMSLATFR_EL1, Sampling Latency Filter Register */
9959
9960#define AARCH64_PMSLATFR_EL1_MINLAT( _val ) ( ( _val ) << 0 )
9961#define AARCH64_PMSLATFR_EL1_MINLAT_SHIFT 0
9962#define AARCH64_PMSLATFR_EL1_MINLAT_MASK 0xfffU
9963#define AARCH64_PMSLATFR_EL1_MINLAT_GET( _reg ) \
9964 ( ( ( _reg ) >> 0 ) & 0xfffU )
9965
9966static inline uint64_t _AArch64_Read_pmslatfr_el1( void )
9967{
9968 uint64_t value;
9969
9970 __asm__ volatile (
9971 "mrs %0, PMSLATFR_EL1" : "=&r" ( value ) : : "memory"
9972 );
9973
9974 return value;
9975}
9976
9977static inline void _AArch64_Write_pmslatfr_el1( uint64_t value )
9978{
9979 __asm__ volatile (
9980 "msr PMSLATFR_EL1, %0" : : "r" ( value ) : "memory"
9981 );
9982}
9983
9984/* DISR_EL1, Deferred Interrupt Status Register */
9985
9986#define AARCH64_DISR_EL1_DFSC( _val ) ( ( _val ) << 0 )
9987#define AARCH64_DISR_EL1_DFSC_SHIFT 0
9988#define AARCH64_DISR_EL1_DFSC_MASK 0x3fU
9989#define AARCH64_DISR_EL1_DFSC_GET( _reg ) \
9990 ( ( ( _reg ) >> 0 ) & 0x3fU )
9991
9992#define AARCH64_DISR_EL1_ISS( _val ) ( ( _val ) << 0 )
9993#define AARCH64_DISR_EL1_ISS_SHIFT 0
9994#define AARCH64_DISR_EL1_ISS_MASK 0xffffffU
9995#define AARCH64_DISR_EL1_ISS_GET( _reg ) \
9996 ( ( ( _reg ) >> 0 ) & 0xffffffU )
9997
9998#define AARCH64_DISR_EL1_EA 0x200U
9999
10000#define AARCH64_DISR_EL1_AET( _val ) ( ( _val ) << 10 )
10001#define AARCH64_DISR_EL1_AET_SHIFT 10
10002#define AARCH64_DISR_EL1_AET_MASK 0x1c00U
10003#define AARCH64_DISR_EL1_AET_GET( _reg ) \
10004 ( ( ( _reg ) >> 10 ) & 0x7U )
10005
10006#define AARCH64_DISR_EL1_IDS 0x1000000U
10007
10008#define AARCH64_DISR_EL1_A 0x80000000U
10009
10010static inline uint64_t _AArch64_Read_disr_el1( void )
10011{
10012 uint64_t value;
10013
10014 __asm__ volatile (
10015 "mrs %0, DISR_EL1" : "=&r" ( value ) : : "memory"
10016 );
10017
10018 return value;
10019}
10020
10021static inline void _AArch64_Write_disr_el1( uint64_t value )
10022{
10023 __asm__ volatile (
10024 "msr DISR_EL1, %0" : : "r" ( value ) : "memory"
10025 );
10026}
10027
10028/* ERRIDR_EL1, Error Record ID Register */
10029
10030#define AARCH64_ERRIDR_EL1_NUM( _val ) ( ( _val ) << 0 )
10031#define AARCH64_ERRIDR_EL1_NUM_SHIFT 0
10032#define AARCH64_ERRIDR_EL1_NUM_MASK 0xffffU
10033#define AARCH64_ERRIDR_EL1_NUM_GET( _reg ) \
10034 ( ( ( _reg ) >> 0 ) & 0xffffU )
10035
10036static inline uint64_t _AArch64_Read_erridr_el1( void )
10037{
10038 uint64_t value;
10039
10040 __asm__ volatile (
10041 "mrs %0, ERRIDR_EL1" : "=&r" ( value ) : : "memory"
10042 );
10043
10044 return value;
10045}
10046
10047/* ERRSELR_EL1, Error Record Select Register */
10048
10049#define AARCH64_ERRSELR_EL1_SEL( _val ) ( ( _val ) << 0 )
10050#define AARCH64_ERRSELR_EL1_SEL_SHIFT 0
10051#define AARCH64_ERRSELR_EL1_SEL_MASK 0xffffU
10052#define AARCH64_ERRSELR_EL1_SEL_GET( _reg ) \
10053 ( ( ( _reg ) >> 0 ) & 0xffffU )
10054
10055static inline uint64_t _AArch64_Read_errselr_el1( void )
10056{
10057 uint64_t value;
10058
10059 __asm__ volatile (
10060 "mrs %0, ERRSELR_EL1" : "=&r" ( value ) : : "memory"
10061 );
10062
10063 return value;
10064}
10065
10066static inline void _AArch64_Write_errselr_el1( uint64_t value )
10067{
10068 __asm__ volatile (
10069 "msr ERRSELR_EL1, %0" : : "r" ( value ) : "memory"
10070 );
10071}
10072
10073/* ERXADDR_EL1, Selected Error Record Address Register */
10074
10075static inline uint64_t _AArch64_Read_erxaddr_el1( void )
10076{
10077 uint64_t value;
10078
10079 __asm__ volatile (
10080 "mrs %0, ERXADDR_EL1" : "=&r" ( value ) : : "memory"
10081 );
10082
10083 return value;
10084}
10085
10086static inline void _AArch64_Write_erxaddr_el1( uint64_t value )
10087{
10088 __asm__ volatile (
10089 "msr ERXADDR_EL1, %0" : : "r" ( value ) : "memory"
10090 );
10091}
10092
10093/* ERXCTLR_EL1, Selected Error Record Control Register */
10094
10095static inline uint64_t _AArch64_Read_erxctlr_el1( void )
10096{
10097 uint64_t value;
10098
10099 __asm__ volatile (
10100 "mrs %0, ERXCTLR_EL1" : "=&r" ( value ) : : "memory"
10101 );
10102
10103 return value;
10104}
10105
10106static inline void _AArch64_Write_erxctlr_el1( uint64_t value )
10107{
10108 __asm__ volatile (
10109 "msr ERXCTLR_EL1, %0" : : "r" ( value ) : "memory"
10110 );
10111}
10112
10113/* ERXFR_EL1, Selected Error Record Feature Register */
10114
10115static inline uint64_t _AArch64_Read_erxfr_el1( void )
10116{
10117 uint64_t value;
10118
10119 __asm__ volatile (
10120 "mrs %0, ERXFR_EL1" : "=&r" ( value ) : : "memory"
10121 );
10122
10123 return value;
10124}
10125
10126/* ERXMISC0_EL1, Selected Error Record Miscellaneous Register 0 */
10127
10128static inline uint64_t _AArch64_Read_erxmisc0_el1( void )
10129{
10130 uint64_t value;
10131
10132 __asm__ volatile (
10133 "mrs %0, ERXMISC0_EL1" : "=&r" ( value ) : : "memory"
10134 );
10135
10136 return value;
10137}
10138
10139static inline void _AArch64_Write_erxmisc0_el1( uint64_t value )
10140{
10141 __asm__ volatile (
10142 "msr ERXMISC0_EL1, %0" : : "r" ( value ) : "memory"
10143 );
10144}
10145
10146/* ERXMISC1_EL1, Selected Error Record Miscellaneous Register 1 */
10147
10148static inline uint64_t _AArch64_Read_erxmisc1_el1( void )
10149{
10150 uint64_t value;
10151
10152 __asm__ volatile (
10153 "mrs %0, ERXMISC1_EL1" : "=&r" ( value ) : : "memory"
10154 );
10155
10156 return value;
10157}
10158
10159static inline void _AArch64_Write_erxmisc1_el1( uint64_t value )
10160{
10161 __asm__ volatile (
10162 "msr ERXMISC1_EL1, %0" : : "r" ( value ) : "memory"
10163 );
10164}
10165
10166/* ERXMISC2_EL1, Selected Error Record Miscellaneous Register 2 */
10167
10168static inline uint64_t _AArch64_Read_erxmisc2_el1( void )
10169{
10170 uint64_t value;
10171
10172 __asm__ volatile (
10173 "mrs %0, ERXMISC2_EL1" : "=&r" ( value ) : : "memory"
10174 );
10175
10176 return value;
10177}
10178
10179static inline void _AArch64_Write_erxmisc2_el1( uint64_t value )
10180{
10181 __asm__ volatile (
10182 "msr ERXMISC2_EL1, %0" : : "r" ( value ) : "memory"
10183 );
10184}
10185
10186/* ERXMISC3_EL1, Selected Error Record Miscellaneous Register 3 */
10187
10188static inline uint64_t _AArch64_Read_erxmisc3_el1( void )
10189{
10190 uint64_t value;
10191
10192 __asm__ volatile (
10193 "mrs %0, ERXMISC3_EL1" : "=&r" ( value ) : : "memory"
10194 );
10195
10196 return value;
10197}
10198
10199static inline void _AArch64_Write_erxmisc3_el1( uint64_t value )
10200{
10201 __asm__ volatile (
10202 "msr ERXMISC3_EL1, %0" : : "r" ( value ) : "memory"
10203 );
10204}
10205
10206/* ERXPFGCDN_EL1, Selected Pseudo-fault Generation Countdown Register */
10207
10208static inline uint64_t _AArch64_Read_erxpfgcdn_el1( void )
10209{
10210 uint64_t value;
10211
10212 __asm__ volatile (
10213 "mrs %0, ERXPFGCDN_EL1" : "=&r" ( value ) : : "memory"
10214 );
10215
10216 return value;
10217}
10218
10219static inline void _AArch64_Write_erxpfgcdn_el1( uint64_t value )
10220{
10221 __asm__ volatile (
10222 "msr ERXPFGCDN_EL1, %0" : : "r" ( value ) : "memory"
10223 );
10224}
10225
10226/* ERXPFGCTL_EL1, Selected Pseudo-fault Generation Control Register */
10227
10228static inline uint64_t _AArch64_Read_erxpfgctl_el1( void )
10229{
10230 uint64_t value;
10231
10232 __asm__ volatile (
10233 "mrs %0, ERXPFGCTL_EL1" : "=&r" ( value ) : : "memory"
10234 );
10235
10236 return value;
10237}
10238
10239static inline void _AArch64_Write_erxpfgctl_el1( uint64_t value )
10240{
10241 __asm__ volatile (
10242 "msr ERXPFGCTL_EL1, %0" : : "r" ( value ) : "memory"
10243 );
10244}
10245
10246/* ERXPFGF_EL1, Selected Pseudo-fault Generation Feature Register */
10247
10248static inline uint64_t _AArch64_Read_erxpfgf_el1( void )
10249{
10250 uint64_t value;
10251
10252 __asm__ volatile (
10253 "mrs %0, ERXPFGF_EL1" : "=&r" ( value ) : : "memory"
10254 );
10255
10256 return value;
10257}
10258
10259/* ERXSTATUS_EL1, Selected Error Record Primary Status Register */
10260
10261static inline uint64_t _AArch64_Read_erxstatus_el1( void )
10262{
10263 uint64_t value;
10264
10265 __asm__ volatile (
10266 "mrs %0, ERXSTATUS_EL1" : "=&r" ( value ) : : "memory"
10267 );
10268
10269 return value;
10270}
10271
10272static inline void _AArch64_Write_erxstatus_el1( uint64_t value )
10273{
10274 __asm__ volatile (
10275 "msr ERXSTATUS_EL1, %0" : : "r" ( value ) : "memory"
10276 );
10277}
10278
10279/* VDISR_EL2, Virtual Deferred Interrupt Status Register */
10280
10281#define AARCH64_VDISR_EL2_FS_3_0( _val ) ( ( _val ) << 0 )
10282#define AARCH64_VDISR_EL2_FS_3_0_SHIFT 0
10283#define AARCH64_VDISR_EL2_FS_3_0_MASK 0xfU
10284#define AARCH64_VDISR_EL2_FS_3_0_GET( _reg ) \
10285 ( ( ( _reg ) >> 0 ) & 0xfU )
10286
10287#define AARCH64_VDISR_EL2_STATUS( _val ) ( ( _val ) << 0 )
10288#define AARCH64_VDISR_EL2_STATUS_SHIFT 0
10289#define AARCH64_VDISR_EL2_STATUS_MASK 0x3fU
10290#define AARCH64_VDISR_EL2_STATUS_GET( _reg ) \
10291 ( ( ( _reg ) >> 0 ) & 0x3fU )
10292
10293#define AARCH64_VDISR_EL2_ISS( _val ) ( ( _val ) << 0 )
10294#define AARCH64_VDISR_EL2_ISS_SHIFT 0
10295#define AARCH64_VDISR_EL2_ISS_MASK 0xffffffU
10296#define AARCH64_VDISR_EL2_ISS_GET( _reg ) \
10297 ( ( ( _reg ) >> 0 ) & 0xffffffU )
10298
10299#define AARCH64_VDISR_EL2_LPAE 0x200U
10300
10301#define AARCH64_VDISR_EL2_FS_4 0x400U
10302
10303#define AARCH64_VDISR_EL2_EXT 0x1000U
10304
10305#define AARCH64_VDISR_EL2_AET( _val ) ( ( _val ) << 14 )
10306#define AARCH64_VDISR_EL2_AET_SHIFT 14
10307#define AARCH64_VDISR_EL2_AET_MASK 0xc000U
10308#define AARCH64_VDISR_EL2_AET_GET( _reg ) \
10309 ( ( ( _reg ) >> 14 ) & 0x3U )
10310
10311#define AARCH64_VDISR_EL2_IDS 0x1000000U
10312
10313#define AARCH64_VDISR_EL2_A 0x80000000U
10314
10315static inline uint64_t _AArch64_Read_vdisr_el2( void )
10316{
10317 uint64_t value;
10318
10319 __asm__ volatile (
10320 "mrs %0, VDISR_EL2" : "=&r" ( value ) : : "memory"
10321 );
10322
10323 return value;
10324}
10325
10326static inline void _AArch64_Write_vdisr_el2( uint64_t value )
10327{
10328 __asm__ volatile (
10329 "msr VDISR_EL2, %0" : : "r" ( value ) : "memory"
10330 );
10331}
10332
10333/* VSESR_EL2, Virtual SError Exception Syndrome Register */
10334
10335#define AARCH64_VSESR_EL2_ISS( _val ) ( ( _val ) << 0 )
10336#define AARCH64_VSESR_EL2_ISS_SHIFT 0
10337#define AARCH64_VSESR_EL2_ISS_MASK 0xffffffU
10338#define AARCH64_VSESR_EL2_ISS_GET( _reg ) \
10339 ( ( ( _reg ) >> 0 ) & 0xffffffU )
10340
10341#define AARCH64_VSESR_EL2_EXT 0x1000U
10342
10343#define AARCH64_VSESR_EL2_AET( _val ) ( ( _val ) << 14 )
10344#define AARCH64_VSESR_EL2_AET_SHIFT 14
10345#define AARCH64_VSESR_EL2_AET_MASK 0xc000U
10346#define AARCH64_VSESR_EL2_AET_GET( _reg ) \
10347 ( ( ( _reg ) >> 14 ) & 0x3U )
10348
10349#define AARCH64_VSESR_EL2_IDS 0x1000000U
10350
10351static inline uint64_t _AArch64_Read_vsesr_el2( void )
10352{
10353 uint64_t value;
10354
10355 __asm__ volatile (
10356 "mrs %0, VSESR_EL2" : "=&r" ( value ) : : "memory"
10357 );
10358
10359 return value;
10360}
10361
10362static inline void _AArch64_Write_vsesr_el2( uint64_t value )
10363{
10364 __asm__ volatile (
10365 "msr VSESR_EL2, %0" : : "r" ( value ) : "memory"
10366 );
10367}
10368
10369/* CNTFRQ_EL0, Counter-timer Frequency Register */
10370
10371static inline uint64_t _AArch64_Read_cntfrq_el0( void )
10372{
10373 uint64_t value;
10374
10375 __asm__ volatile (
10376 "mrs %0, CNTFRQ_EL0" : "=&r" ( value ) : : "memory"
10377 );
10378
10379 return value;
10380}
10381
10382static inline void _AArch64_Write_cntfrq_el0( uint64_t value )
10383{
10384 __asm__ volatile (
10385 "msr CNTFRQ_EL0, %0" : : "r" ( value ) : "memory"
10386 );
10387}
10388
10389/* CNTHCTL_EL2, Counter-timer Hypervisor Control Register */
10390
10391#define AARCH64_CNTHCTL_EL2_EL0PCTEN 0x1U
10392
10393#define AARCH64_CNTHCTL_EL2_EL1PCTEN_0 0x1U
10394
10395#define AARCH64_CNTHCTL_EL2_EL0VCTEN 0x2U
10396
10397#define AARCH64_CNTHCTL_EL2_EL1PCEN 0x2U
10398
10399#define AARCH64_CNTHCTL_EL2_EVNTEN 0x4U
10400
10401#define AARCH64_CNTHCTL_EL2_EVNTDIR 0x8U
10402
10403#define AARCH64_CNTHCTL_EL2_EVNTI( _val ) ( ( _val ) << 4 )
10404#define AARCH64_CNTHCTL_EL2_EVNTI_SHIFT 4
10405#define AARCH64_CNTHCTL_EL2_EVNTI_MASK 0xf0U
10406#define AARCH64_CNTHCTL_EL2_EVNTI_GET( _reg ) \
10407 ( ( ( _reg ) >> 4 ) & 0xfU )
10408
10409#define AARCH64_CNTHCTL_EL2_EL0VTEN 0x100U
10410
10411#define AARCH64_CNTHCTL_EL2_EL0PTEN 0x200U
10412
10413#define AARCH64_CNTHCTL_EL2_EL1PCTEN_1 0x400U
10414
10415#define AARCH64_CNTHCTL_EL2_EL1PTEN 0x800U
10416
10417#define AARCH64_CNTHCTL_EL2_ECV 0x1000U
10418
10419#define AARCH64_CNTHCTL_EL2_EL1TVT 0x2000U
10420
10421#define AARCH64_CNTHCTL_EL2_EL1TVCT 0x4000U
10422
10423#define AARCH64_CNTHCTL_EL2_EL1NVPCT 0x8000U
10424
10425#define AARCH64_CNTHCTL_EL2_EL1NVVCT 0x10000U
10426
10427#define AARCH64_CNTHCTL_EL2_EVNTIS 0x20000U
10428
10429static inline uint64_t _AArch64_Read_cnthctl_el2( void )
10430{
10431 uint64_t value;
10432
10433 __asm__ volatile (
10434 "mrs %0, CNTHCTL_EL2" : "=&r" ( value ) : : "memory"
10435 );
10436
10437 return value;
10438}
10439
10440static inline void _AArch64_Write_cnthctl_el2( uint64_t value )
10441{
10442 __asm__ volatile (
10443 "msr CNTHCTL_EL2, %0" : : "r" ( value ) : "memory"
10444 );
10445}
10446
10447/* CNTHP_CTL_EL2, Counter-timer Hypervisor Physical Timer Control Register */
10448
10449#define AARCH64_CNTHP_CTL_EL2_ENABLE 0x1U
10450
10451#define AARCH64_CNTHP_CTL_EL2_IMASK 0x2U
10452
10453#define AARCH64_CNTHP_CTL_EL2_ISTATUS 0x4U
10454
10455static inline uint64_t _AArch64_Read_cnthp_ctl_el2( void )
10456{
10457 uint64_t value;
10458
10459 __asm__ volatile (
10460 "mrs %0, CNTHP_CTL_EL2" : "=&r" ( value ) : : "memory"
10461 );
10462
10463 return value;
10464}
10465
10466static inline void _AArch64_Write_cnthp_ctl_el2( uint64_t value )
10467{
10468 __asm__ volatile (
10469 "msr CNTHP_CTL_EL2, %0" : : "r" ( value ) : "memory"
10470 );
10471}
10472
10473/* CNTHP_CVAL_EL2, Counter-timer Physical Timer CompareValue Register (EL2) */
10474
10475static inline uint64_t _AArch64_Read_cnthp_cval_el2( void )
10476{
10477 uint64_t value;
10478
10479 __asm__ volatile (
10480 "mrs %0, CNTHP_CVAL_EL2" : "=&r" ( value ) : : "memory"
10481 );
10482
10483 return value;
10484}
10485
10486static inline void _AArch64_Write_cnthp_cval_el2( uint64_t value )
10487{
10488 __asm__ volatile (
10489 "msr CNTHP_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10490 );
10491}
10492
10493/* CNTHP_TVAL_EL2, Counter-timer Physical Timer TimerValue Register (EL2) */
10494
10495#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10496#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_SHIFT 0
10497#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10498#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10499 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10500
10501static inline uint64_t _AArch64_Read_cnthp_tval_el2( void )
10502{
10503 uint64_t value;
10504
10505 __asm__ volatile (
10506 "mrs %0, CNTHP_TVAL_EL2" : "=&r" ( value ) : : "memory"
10507 );
10508
10509 return value;
10510}
10511
10512static inline void _AArch64_Write_cnthp_tval_el2( uint64_t value )
10513{
10514 __asm__ volatile (
10515 "msr CNTHP_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10516 );
10517}
10518
10519/* CNTHPS_CTL_EL2, Counter-timer Secure Physical Timer Control Register (EL2) */
10520
10521#define AARCH64_CNTHPS_CTL_EL2_ENABLE 0x1U
10522
10523#define AARCH64_CNTHPS_CTL_EL2_IMASK 0x2U
10524
10525#define AARCH64_CNTHPS_CTL_EL2_ISTATUS 0x4U
10526
10527static inline uint64_t _AArch64_Read_cnthps_ctl_el2( void )
10528{
10529 uint64_t value;
10530
10531 __asm__ volatile (
10532 "mrs %0, CNTHPS_CTL_EL2" : "=&r" ( value ) : : "memory"
10533 );
10534
10535 return value;
10536}
10537
10538static inline void _AArch64_Write_cnthps_ctl_el2( uint64_t value )
10539{
10540 __asm__ volatile (
10541 "msr CNTHPS_CTL_EL2, %0" : : "r" ( value ) : "memory"
10542 );
10543}
10544
10545/* CNTHPS_CVAL_EL2, Counter-timer Secure Physical Timer CompareValue Register (EL2) */
10546
10547static inline uint64_t _AArch64_Read_cnthps_cval_el2( void )
10548{
10549 uint64_t value;
10550
10551 __asm__ volatile (
10552 "mrs %0, CNTHPS_CVAL_EL2" : "=&r" ( value ) : : "memory"
10553 );
10554
10555 return value;
10556}
10557
10558static inline void _AArch64_Write_cnthps_cval_el2( uint64_t value )
10559{
10560 __asm__ volatile (
10561 "msr CNTHPS_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10562 );
10563}
10564
10565/* CNTHPS_TVAL_EL2, Counter-timer Secure Physical Timer TimerValue Register (EL2) */
10566
10567#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10568#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_SHIFT 0
10569#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10570#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10571 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10572
10573static inline uint64_t _AArch64_Read_cnthps_tval_el2( void )
10574{
10575 uint64_t value;
10576
10577 __asm__ volatile (
10578 "mrs %0, CNTHPS_TVAL_EL2" : "=&r" ( value ) : : "memory"
10579 );
10580
10581 return value;
10582}
10583
10584static inline void _AArch64_Write_cnthps_tval_el2( uint64_t value )
10585{
10586 __asm__ volatile (
10587 "msr CNTHPS_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10588 );
10589}
10590
10591/* CNTHV_CTL_EL2, Counter-timer Virtual Timer Control Register (EL2) */
10592
10593#define AARCH64_CNTHV_CTL_EL2_ENABLE 0x1U
10594
10595#define AARCH64_CNTHV_CTL_EL2_IMASK 0x2U
10596
10597#define AARCH64_CNTHV_CTL_EL2_ISTATUS 0x4U
10598
10599static inline uint64_t _AArch64_Read_cnthv_ctl_el2( void )
10600{
10601 uint64_t value;
10602
10603 __asm__ volatile (
10604 "mrs %0, CNTHV_CTL_EL2" : "=&r" ( value ) : : "memory"
10605 );
10606
10607 return value;
10608}
10609
10610static inline void _AArch64_Write_cnthv_ctl_el2( uint64_t value )
10611{
10612 __asm__ volatile (
10613 "msr CNTHV_CTL_EL2, %0" : : "r" ( value ) : "memory"
10614 );
10615}
10616
10617/* CNTHV_CVAL_EL2, Counter-timer Virtual Timer CompareValue Register (EL2) */
10618
10619static inline uint64_t _AArch64_Read_cnthv_cval_el2( void )
10620{
10621 uint64_t value;
10622
10623 __asm__ volatile (
10624 "mrs %0, CNTHV_CVAL_EL2" : "=&r" ( value ) : : "memory"
10625 );
10626
10627 return value;
10628}
10629
10630static inline void _AArch64_Write_cnthv_cval_el2( uint64_t value )
10631{
10632 __asm__ volatile (
10633 "msr CNTHV_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10634 );
10635}
10636
10637/* CNTHV_TVAL_EL2, Counter-timer Virtual Timer TimerValue Register (EL2) */
10638
10639#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10640#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_SHIFT 0
10641#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10642#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10643 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10644
10645static inline uint64_t _AArch64_Read_cnthv_tval_el2( void )
10646{
10647 uint64_t value;
10648
10649 __asm__ volatile (
10650 "mrs %0, CNTHV_TVAL_EL2" : "=&r" ( value ) : : "memory"
10651 );
10652
10653 return value;
10654}
10655
10656static inline void _AArch64_Write_cnthv_tval_el2( uint64_t value )
10657{
10658 __asm__ volatile (
10659 "msr CNTHV_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10660 );
10661}
10662
10663/* CNTHVS_CTL_EL2, Counter-timer Secure Virtual Timer Control Register (EL2) */
10664
10665#define AARCH64_CNTHVS_CTL_EL2_ENABLE 0x1U
10666
10667#define AARCH64_CNTHVS_CTL_EL2_IMASK 0x2U
10668
10669#define AARCH64_CNTHVS_CTL_EL2_ISTATUS 0x4U
10670
10671static inline uint64_t _AArch64_Read_cnthvs_ctl_el2( void )
10672{
10673 uint64_t value;
10674
10675 __asm__ volatile (
10676 "mrs %0, CNTHVS_CTL_EL2" : "=&r" ( value ) : : "memory"
10677 );
10678
10679 return value;
10680}
10681
10682static inline void _AArch64_Write_cnthvs_ctl_el2( uint64_t value )
10683{
10684 __asm__ volatile (
10685 "msr CNTHVS_CTL_EL2, %0" : : "r" ( value ) : "memory"
10686 );
10687}
10688
10689/* CNTHVS_CVAL_EL2, Counter-timer Secure Virtual Timer CompareValue Register (EL2) */
10690
10691static inline uint64_t _AArch64_Read_cnthvs_cval_el2( void )
10692{
10693 uint64_t value;
10694
10695 __asm__ volatile (
10696 "mrs %0, CNTHVS_CVAL_EL2" : "=&r" ( value ) : : "memory"
10697 );
10698
10699 return value;
10700}
10701
10702static inline void _AArch64_Write_cnthvs_cval_el2( uint64_t value )
10703{
10704 __asm__ volatile (
10705 "msr CNTHVS_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10706 );
10707}
10708
10709/* CNTHVS_TVAL_EL2, Counter-timer Secure Virtual Timer TimerValue Register (EL2) */
10710
10711#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10712#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_SHIFT 0
10713#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10714#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10715 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10716
10717static inline uint64_t _AArch64_Read_cnthvs_tval_el2( void )
10718{
10719 uint64_t value;
10720
10721 __asm__ volatile (
10722 "mrs %0, CNTHVS_TVAL_EL2" : "=&r" ( value ) : : "memory"
10723 );
10724
10725 return value;
10726}
10727
10728static inline void _AArch64_Write_cnthvs_tval_el2( uint64_t value )
10729{
10730 __asm__ volatile (
10731 "msr CNTHVS_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10732 );
10733}
10734
10735/* CNTKCTL_EL1, Counter-timer Kernel Control Register */
10736
10737#define AARCH64_CNTKCTL_EL1_EL0PCTEN 0x1U
10738
10739#define AARCH64_CNTKCTL_EL1_EL0VCTEN 0x2U
10740
10741#define AARCH64_CNTKCTL_EL1_EVNTEN 0x4U
10742
10743#define AARCH64_CNTKCTL_EL1_EVNTDIR 0x8U
10744
10745#define AARCH64_CNTKCTL_EL1_EVNTI( _val ) ( ( _val ) << 4 )
10746#define AARCH64_CNTKCTL_EL1_EVNTI_SHIFT 4
10747#define AARCH64_CNTKCTL_EL1_EVNTI_MASK 0xf0U
10748#define AARCH64_CNTKCTL_EL1_EVNTI_GET( _reg ) \
10749 ( ( ( _reg ) >> 4 ) & 0xfU )
10750
10751#define AARCH64_CNTKCTL_EL1_EL0VTEN 0x100U
10752
10753#define AARCH64_CNTKCTL_EL1_EL0PTEN 0x200U
10754
10755#define AARCH64_CNTKCTL_EL1_EVNTIS 0x20000U
10756
10757static inline uint64_t _AArch64_Read_cntkctl_el1( void )
10758{
10759 uint64_t value;
10760
10761 __asm__ volatile (
10762 "mrs %0, CNTKCTL_EL1" : "=&r" ( value ) : : "memory"
10763 );
10764
10765 return value;
10766}
10767
10768static inline void _AArch64_Write_cntkctl_el1( uint64_t value )
10769{
10770 __asm__ volatile (
10771 "msr CNTKCTL_EL1, %0" : : "r" ( value ) : "memory"
10772 );
10773}
10774
10775/* CNTP_CTL_EL0, Counter-timer Physical Timer Control Register */
10776
10777#define AARCH64_CNTP_CTL_EL0_ENABLE 0x1U
10778
10779#define AARCH64_CNTP_CTL_EL0_IMASK 0x2U
10780
10781#define AARCH64_CNTP_CTL_EL0_ISTATUS 0x4U
10782
10783static inline uint64_t _AArch64_Read_cntp_ctl_el0( void )
10784{
10785 uint64_t value;
10786
10787 __asm__ volatile (
10788 "mrs %0, CNTP_CTL_EL0" : "=&r" ( value ) : : "memory"
10789 );
10790
10791 return value;
10792}
10793
10794static inline void _AArch64_Write_cntp_ctl_el0( uint64_t value )
10795{
10796 __asm__ volatile (
10797 "msr CNTP_CTL_EL0, %0" : : "r" ( value ) : "memory"
10798 );
10799}
10800
10801/* CNTP_CVAL_EL0, Counter-timer Physical Timer CompareValue Register */
10802
10803static inline uint64_t _AArch64_Read_cntp_cval_el0( void )
10804{
10805 uint64_t value;
10806
10807 __asm__ volatile (
10808 "mrs %0, CNTP_CVAL_EL0" : "=&r" ( value ) : : "memory"
10809 );
10810
10811 return value;
10812}
10813
10814static inline void _AArch64_Write_cntp_cval_el0( uint64_t value )
10815{
10816 __asm__ volatile (
10817 "msr CNTP_CVAL_EL0, %0" : : "r" ( value ) : "memory"
10818 );
10819}
10820
10821/* CNTP_TVAL_EL0, Counter-timer Physical Timer TimerValue Register */
10822
10823#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE( _val ) ( ( _val ) << 0 )
10824#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_SHIFT 0
10825#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_MASK 0xffffffffU
10826#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_GET( _reg ) \
10827 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10828
10829static inline uint64_t _AArch64_Read_cntp_tval_el0( void )
10830{
10831 uint64_t value;
10832
10833 __asm__ volatile (
10834 "mrs %0, CNTP_TVAL_EL0" : "=&r" ( value ) : : "memory"
10835 );
10836
10837 return value;
10838}
10839
10840static inline void _AArch64_Write_cntp_tval_el0( uint64_t value )
10841{
10842 __asm__ volatile (
10843 "msr CNTP_TVAL_EL0, %0" : : "r" ( value ) : "memory"
10844 );
10845}
10846
10847/* CNTPCTSS_EL0, Counter-timer Self-Synchronized Physical Count Register */
10848
10849static inline uint64_t _AArch64_Read_cntpctss_el0( void )
10850{
10851 uint64_t value;
10852
10853 __asm__ volatile (
10854 "mrs %0, CNTPCTSS_EL0" : "=&r" ( value ) : : "memory"
10855 );
10856
10857 return value;
10858}
10859
10860/* CNTPCT_EL0, Counter-timer Physical Count Register */
10861
10862static inline uint64_t _AArch64_Read_cntpct_el0( void )
10863{
10864 uint64_t value;
10865
10866 __asm__ volatile (
10867 "mrs %0, CNTPCT_EL0" : "=&r" ( value ) : : "memory"
10868 );
10869
10870 return value;
10871}
10872
10873/* CNTPS_CTL_EL1, Counter-timer Physical Secure Timer Control Register */
10874
10875#define AARCH64_CNTPS_CTL_EL1_ENABLE 0x1U
10876
10877#define AARCH64_CNTPS_CTL_EL1_IMASK 0x2U
10878
10879#define AARCH64_CNTPS_CTL_EL1_ISTATUS 0x4U
10880
10881static inline uint64_t _AArch64_Read_cntps_ctl_el1( void )
10882{
10883 uint64_t value;
10884
10885 __asm__ volatile (
10886 "mrs %0, CNTPS_CTL_EL1" : "=&r" ( value ) : : "memory"
10887 );
10888
10889 return value;
10890}
10891
10892static inline void _AArch64_Write_cntps_ctl_el1( uint64_t value )
10893{
10894 __asm__ volatile (
10895 "msr CNTPS_CTL_EL1, %0" : : "r" ( value ) : "memory"
10896 );
10897}
10898
10899/* CNTPOFF_EL2, Counter-timer Physical Offset Register */
10900
10901static inline uint64_t _AArch64_Read_cntpoff_el2( void )
10902{
10903 uint64_t value;
10904
10905 __asm__ volatile (
10906 "mrs %0, CNTPOFF_EL2" : "=&r" ( value ) : : "memory"
10907 );
10908
10909 return value;
10910}
10911
10912static inline void _AArch64_Write_cntpoff_el2( uint64_t value )
10913{
10914 __asm__ volatile (
10915 "msr CNTPOFF_EL2, %0" : : "r" ( value ) : "memory"
10916 );
10917}
10918
10919/* CNTPS_CVAL_EL1, Counter-timer Physical Secure Timer CompareValue Register */
10920
10921static inline uint64_t _AArch64_Read_cntps_cval_el1( void )
10922{
10923 uint64_t value;
10924
10925 __asm__ volatile (
10926 "mrs %0, CNTPS_CVAL_EL1" : "=&r" ( value ) : : "memory"
10927 );
10928
10929 return value;
10930}
10931
10932static inline void _AArch64_Write_cntps_cval_el1( uint64_t value )
10933{
10934 __asm__ volatile (
10935 "msr CNTPS_CVAL_EL1, %0" : : "r" ( value ) : "memory"
10936 );
10937}
10938
10939/* CNTPS_TVAL_EL1, Counter-timer Physical Secure Timer TimerValue Register */
10940
10941#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE( _val ) ( ( _val ) << 0 )
10942#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_SHIFT 0
10943#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_MASK 0xffffffffU
10944#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_GET( _reg ) \
10945 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10946
10947static inline uint64_t _AArch64_Read_cntps_tval_el1( void )
10948{
10949 uint64_t value;
10950
10951 __asm__ volatile (
10952 "mrs %0, CNTPS_TVAL_EL1" : "=&r" ( value ) : : "memory"
10953 );
10954
10955 return value;
10956}
10957
10958static inline void _AArch64_Write_cntps_tval_el1( uint64_t value )
10959{
10960 __asm__ volatile (
10961 "msr CNTPS_TVAL_EL1, %0" : : "r" ( value ) : "memory"
10962 );
10963}
10964
10965/* CNTV_CTL_EL0, Counter-timer Virtual Timer Control Register */
10966
10967#define AARCH64_CNTV_CTL_EL0_ENABLE 0x1U
10968
10969#define AARCH64_CNTV_CTL_EL0_IMASK 0x2U
10970
10971#define AARCH64_CNTV_CTL_EL0_ISTATUS 0x4U
10972
10973static inline uint64_t _AArch64_Read_cntv_ctl_el0( void )
10974{
10975 uint64_t value;
10976
10977 __asm__ volatile (
10978 "mrs %0, CNTV_CTL_EL0" : "=&r" ( value ) : : "memory"
10979 );
10980
10981 return value;
10982}
10983
10984static inline void _AArch64_Write_cntv_ctl_el0( uint64_t value )
10985{
10986 __asm__ volatile (
10987 "msr CNTV_CTL_EL0, %0" : : "r" ( value ) : "memory"
10988 );
10989}
10990
10991/* CNTV_CVAL_EL0, Counter-timer Virtual Timer CompareValue Register */
10992
10993static inline uint64_t _AArch64_Read_cntv_cval_el0( void )
10994{
10995 uint64_t value;
10996
10997 __asm__ volatile (
10998 "mrs %0, CNTV_CVAL_EL0" : "=&r" ( value ) : : "memory"
10999 );
11000
11001 return value;
11002}
11003
11004static inline void _AArch64_Write_cntv_cval_el0( uint64_t value )
11005{
11006 __asm__ volatile (
11007 "msr CNTV_CVAL_EL0, %0" : : "r" ( value ) : "memory"
11008 );
11009}
11010
11011/* CNTV_TVAL_EL0, Counter-timer Virtual Timer TimerValue Register */
11012
11013#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE( _val ) ( ( _val ) << 0 )
11014#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_SHIFT 0
11015#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_MASK 0xffffffffU
11016#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_GET( _reg ) \
11017 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
11018
11019static inline uint64_t _AArch64_Read_cntv_tval_el0( void )
11020{
11021 uint64_t value;
11022
11023 __asm__ volatile (
11024 "mrs %0, CNTV_TVAL_EL0" : "=&r" ( value ) : : "memory"
11025 );
11026
11027 return value;
11028}
11029
11030static inline void _AArch64_Write_cntv_tval_el0( uint64_t value )
11031{
11032 __asm__ volatile (
11033 "msr CNTV_TVAL_EL0, %0" : : "r" ( value ) : "memory"
11034 );
11035}
11036
11037/* CNTVCTSS_EL0, Counter-timer Self-Synchronized Virtual Count Register */
11038
11039static inline uint64_t _AArch64_Read_cntvctss_el0( void )
11040{
11041 uint64_t value;
11042
11043 __asm__ volatile (
11044 "mrs %0, CNTVCTSS_EL0" : "=&r" ( value ) : : "memory"
11045 );
11046
11047 return value;
11048}
11049
11050/* CNTVCT_EL0, Counter-timer Virtual Count Register */
11051
11052static inline uint64_t _AArch64_Read_cntvct_el0( void )
11053{
11054 uint64_t value;
11055
11056 __asm__ volatile (
11057 "mrs %0, CNTVCT_EL0" : "=&r" ( value ) : : "memory"
11058 );
11059
11060 return value;
11061}
11062
11063/* CPUMERRSR_EL1, CPU Memory Error Syndrome Register, Cortex-A53-specific */
11064
11065#define AARCH64_CPUMERRSR_EL1_ADDR_SHIFT 0
11066#define AARCH64_CPUMERRSR_EL1_ADDR_MASK 0xfffLLU
11067#define AARCH64_CPUMERRSR_EL1_ADDR( _val ) \
11068 ( ( _val ) << AARCH64_CPUMERRSR_EL1_ADDR_SHIFT )
11069#define AARCH64_CPUMERRSR_EL1_ADDR_GET( _reg ) \
11070 ( ( ( _reg ) >> AARCH64_CPUMERRSR_EL1_ADDR_SHIFT ) \
11071 & AARCH64_CPUMERRSR_EL1_ADDR_MASK )
11072
11073#define AARCH64_CPUMERRSR_EL1_CPUIDWAY_SHIFT 18
11074#define AARCH64_CPUMERRSR_EL1_CPUIDWAY_MASK 0x7LLU
11075#define AARCH64_CPUMERRSR_EL1_CPUIDWAY( _val ) \
11076 ( ( _val ) << AARCH64_CPUMERRSR_EL1_CPUIDWAY_SHIFT )
11077#define AARCH64_CPUMERRSR_EL1_CPUIDWAY_GET( _reg ) \
11078 ( ( ( _reg ) >> AARCH64_CPUMERRSR_EL1_CPUIDWAY_SHIFT ) \
11079 & AARCH64_CPUMERRSR_EL1_CPUIDWAY_MASK )
11080
11081#define AARCH64_CPUMERRSR_EL1_RAMID_SHIFT 24
11082#define AARCH64_CPUMERRSR_EL1_RAMID_MASK 0x7fLLU
11083#define AARCH64_CPUMERRSR_EL1_RAMID( _val ) \
11084 ( ( _val ) << AARCH64_CPUMERRSR_EL1_RAMID_SHIFT )
11085#define AARCH64_CPUMERRSR_EL1_RAMID_GET( _reg ) \
11086 ( ( ( _reg ) >> AARCH64_CPUMERRSR_EL1_RAMID_SHIFT ) \
11087 & AARCH64_CPUMERRSR_EL1_RAMID_MASK )
11088
11089#define AARCH64_CPUMERRSR_EL1_VALID 0x80000000LLU
11090
11091#define AARCH64_CPUMERRSR_EL1_REPEATERR_SHIFT 32
11092#define AARCH64_CPUMERRSR_EL1_REPEATERR_MASK 0xffLLU
11093#define AARCH64_CPUMERRSR_EL1_REPEATERR( _val ) \
11094 ( ( _val ) << AARCH64_CPUMERRSR_EL1_REPEATERR_SHIFT )
11095#define AARCH64_CPUMERRSR_EL1_REPEATERR_GET( _reg ) \
11096 ( ( ( _reg ) >> AARCH64_CPUMERRSR_EL1_REPEATERR_SHIFT ) \
11097 & AARCH64_CPUMERRSR_EL1_REPEATERR_MASK )
11098
11099#define AARCH64_CPUMERRSR_EL1_OTHERERR_SHIFT 40
11100#define AARCH64_CPUMERRSR_EL1_OTHERERR_MASK 0xffLLU
11101#define AARCH64_CPUMERRSR_EL1_OTHERERR( _val ) \
11102 ( ( _val ) << AARCH64_CPUMERRSR_EL1_OTHERERR_SHIFT )
11103#define AARCH64_CPUMERRSR_EL1_OTHERERR_GET( _reg ) \
11104 ( ( ( _reg ) >> AARCH64_CPUMERRSR_EL1_OTHERERR_SHIFT ) \
11105 & AARCH64_CPUMERRSR_EL1_OTHERERR_MASK )
11106
11107#define AARCH64_CPUMERRSR_EL1_FATAL 0x8000000000000000LLU
11108
11109static inline uint64_t _AArch64_Read_cpumerrsr_el1( void )
11110{
11111 uint64_t value;
11112
11113 __asm__ volatile (
11114 "mrs %0, S3_1_c15_c2_2" : "=&r" ( value ) : : "memory"
11115 );
11116
11117 return value;
11118}
11119
11120static inline void _AArch64_Write_cpumerrsr_el1( uint64_t value )
11121{
11122 __asm__ volatile (
11123 "msr S3_1_c15_c2_2, %0" : : "r" ( value ) : "memory"
11124 );
11125}
11126
11127/* L2MERRSR_EL1, CPU Memory Error Syndrome Register, Cortex-A53-specific */
11128
11129#define AARCH64_L2MERRSR_EL1_ADDR_SHIFT 3
11130#define AARCH64_L2MERRSR_EL1_ADDR_MASK 0x3fffLLU
11131#define AARCH64_L2MERRSR_EL1_ADDR( _val ) \
11132 ( ( _val ) << AARCH64_L2MERRSR_EL1_ADDR_SHIFT )
11133#define AARCH64_L2MERRSR_EL1_ADDR_GET( _reg ) \
11134 ( ( ( _reg ) >> AARCH64_L2MERRSR_EL1_ADDR_SHIFT ) \
11135 & AARCH64_L2MERRSR_EL1_ADDR_MASK )
11136
11137#define AARCH64_L2MERRSR_EL1_CPUIDWAY_SHIFT 18
11138#define AARCH64_L2MERRSR_EL1_CPUIDWAY_MASK 0xfLLU
11139#define AARCH64_L2MERRSR_EL1_CPUIDWAY( _val ) \
11140 ( ( _val ) << AARCH64_L2MERRSR_EL1_CPUIDWAY_SHIFT )
11141#define AARCH64_L2MERRSR_EL1_CPUIDWAY_GET( _reg ) \
11142 ( ( ( _reg ) >> AARCH64_L2MERRSR_EL1_CPUIDWAY_SHIFT ) \
11143 & AARCH64_L2MERRSR_EL1_CPUIDWAY_MASK )
11144
11145#define AARCH64_L2MERRSR_EL1_RAMID_SHIFT 24
11146#define AARCH64_L2MERRSR_EL1_RAMID_MASK 0x7fLLU
11147#define AARCH64_L2MERRSR_EL1_RAMID( _val ) \
11148 ( ( _val ) << AARCH64_L2MERRSR_EL1_RAMID_SHIFT )
11149#define AARCH64_L2MERRSR_EL1_RAMID_GET( _reg ) \
11150 ( ( ( _reg ) >> AARCH64_L2MERRSR_EL1_RAMID_SHIFT ) \
11151 & AARCH64_L2MERRSR_EL1_RAMID_MASK )
11152
11153#define AARCH64_L2MERRSR_EL1_VALID 0x80000000LLU
11154
11155#define AARCH64_L2MERRSR_EL1_REPEATERR_SHIFT 32
11156#define AARCH64_L2MERRSR_EL1_REPEATERR_MASK 0xffLLU
11157#define AARCH64_L2MERRSR_EL1_REPEATERR( _val ) \
11158 ( ( _val ) << AARCH64_L2MERRSR_EL1_REPEATERR_SHIFT )
11159#define AARCH64_L2MERRSR_EL1_REPEATERR_GET( _reg ) \
11160 ( ( ( _reg ) >> AARCH64_L2MERRSR_EL1_REPEATERR_SHIFT ) \
11161 & AARCH64_L2MERRSR_EL1_REPEATERR_MASK )
11162
11163#define AARCH64_L2MERRSR_EL1_OTHERERR_SHIFT 40
11164#define AARCH64_L2MERRSR_EL1_OTHERERR_MASK 0xffLLU
11165#define AARCH64_L2MERRSR_EL1_OTHERERR( _val ) \
11166 ( ( _val ) << AARCH64_L2MERRSR_EL1_OTHERERR_SHIFT )
11167#define AARCH64_L2MERRSR_EL1_OTHERERR_GET( _reg ) \
11168 ( ( ( _reg ) >> AARCH64_L2MERRSR_EL1_OTHERERR_SHIFT ) \
11169 & AARCH64_L2MERRSR_EL1_OTHERERR_MASK )
11170
11171#define AARCH64_L2MERRSR_EL1_FATAL 0x8000000000000000LLU
11172
11173static inline uint64_t _AArch64_Read_l2merrsr_el1( void )
11174{
11175 uint64_t value;
11176
11177 __asm__ volatile (
11178 "mrs %0, S3_1_c15_c2_3" : "=&r" ( value ) : : "memory"
11179 );
11180
11181 return value;
11182}
11183
11184static inline void _AArch64_Write_l2merrsr_el1( uint64_t value )
11185{
11186 __asm__ volatile (
11187 "msr S3_1_c15_c2_3, %0" : : "r" ( value ) : "memory"
11188 );
11189}
11190
11191#ifdef __cplusplus
11192}
11193#endif
11194
11195#endif /* _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H */