RTEMS 6.1-rc2
Loading...
Searching...
No Matches
aarch64-system-registers.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-2-Clause */
2
10/*
11 * Copyright (C) 2020 embedded brains GmbH & Co. KG
12 *
13 * Redistribution and use in source and binary forms, with or without
14 * modification, are permitted provided that the following conditions
15 * are met:
16 * 1. Redistributions of source code must retain the above copyright
17 * notice, this list of conditions and the following disclaimer.
18 * 2. Redistributions in binary form must reproduce the above copyright
19 * notice, this list of conditions and the following disclaimer in the
20 * documentation and/or other materials provided with the distribution.
21 *
22 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
25 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
26 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
27 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
28 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
29 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
30 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
31 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 * POSSIBILITY OF SUCH DAMAGE.
33 */
34
35#ifndef _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H
36#define _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H
37
38#include <stdint.h>
39
40#ifdef __cplusplus
41extern "C" {
42#endif
43
44/* ACTLR_EL1, Auxiliary Control Register (EL1) */
45
46static inline uint64_t _AArch64_Read_actlr_el1( void )
47{
48 uint64_t value;
49
50 __asm__ volatile (
51 "mrs %0, ACTLR_EL1" : "=&r" ( value ) : : "memory"
52 );
53
54 return value;
55}
56
57static inline void _AArch64_Write_actlr_el1( uint64_t value )
58{
59 __asm__ volatile (
60 "msr ACTLR_EL1, %0" : : "r" ( value ) : "memory"
61 );
62}
63
64/* ACTLR_EL2, Auxiliary Control Register (EL2) */
65
66static inline uint64_t _AArch64_Read_actlr_el2( void )
67{
68 uint64_t value;
69
70 __asm__ volatile (
71 "mrs %0, ACTLR_EL2" : "=&r" ( value ) : : "memory"
72 );
73
74 return value;
75}
76
77static inline void _AArch64_Write_actlr_el2( uint64_t value )
78{
79 __asm__ volatile (
80 "msr ACTLR_EL2, %0" : : "r" ( value ) : "memory"
81 );
82}
83
84/* ACTLR_EL3, Auxiliary Control Register (EL3) */
85
86static inline uint64_t _AArch64_Read_actlr_el3( void )
87{
88 uint64_t value;
89
90 __asm__ volatile (
91 "mrs %0, ACTLR_EL3" : "=&r" ( value ) : : "memory"
92 );
93
94 return value;
95}
96
97static inline void _AArch64_Write_actlr_el3( uint64_t value )
98{
99 __asm__ volatile (
100 "msr ACTLR_EL3, %0" : : "r" ( value ) : "memory"
101 );
102}
103
104/* AFSR0_EL1, Auxiliary Fault Status Register 0 (EL1) */
105
106static inline uint64_t _AArch64_Read_afsr0_el1( void )
107{
108 uint64_t value;
109
110 __asm__ volatile (
111 "mrs %0, AFSR0_EL1" : "=&r" ( value ) : : "memory"
112 );
113
114 return value;
115}
116
117static inline void _AArch64_Write_afsr0_el1( uint64_t value )
118{
119 __asm__ volatile (
120 "msr AFSR0_EL1, %0" : : "r" ( value ) : "memory"
121 );
122}
123
124/* AFSR0_EL2, Auxiliary Fault Status Register 0 (EL2) */
125
126static inline uint64_t _AArch64_Read_afsr0_el2( void )
127{
128 uint64_t value;
129
130 __asm__ volatile (
131 "mrs %0, AFSR0_EL2" : "=&r" ( value ) : : "memory"
132 );
133
134 return value;
135}
136
137static inline void _AArch64_Write_afsr0_el2( uint64_t value )
138{
139 __asm__ volatile (
140 "msr AFSR0_EL2, %0" : : "r" ( value ) : "memory"
141 );
142}
143
144/* AFSR0_EL3, Auxiliary Fault Status Register 0 (EL3) */
145
146static inline uint64_t _AArch64_Read_afsr0_el3( void )
147{
148 uint64_t value;
149
150 __asm__ volatile (
151 "mrs %0, AFSR0_EL3" : "=&r" ( value ) : : "memory"
152 );
153
154 return value;
155}
156
157static inline void _AArch64_Write_afsr0_el3( uint64_t value )
158{
159 __asm__ volatile (
160 "msr AFSR0_EL3, %0" : : "r" ( value ) : "memory"
161 );
162}
163
164/* AFSR1_EL1, Auxiliary Fault Status Register 1 (EL1) */
165
166static inline uint64_t _AArch64_Read_afsr1_el1( void )
167{
168 uint64_t value;
169
170 __asm__ volatile (
171 "mrs %0, AFSR1_EL1" : "=&r" ( value ) : : "memory"
172 );
173
174 return value;
175}
176
177static inline void _AArch64_Write_afsr1_el1( uint64_t value )
178{
179 __asm__ volatile (
180 "msr AFSR1_EL1, %0" : : "r" ( value ) : "memory"
181 );
182}
183
184/* AFSR1_EL2, Auxiliary Fault Status Register 1 (EL2) */
185
186static inline uint64_t _AArch64_Read_afsr1_el2( void )
187{
188 uint64_t value;
189
190 __asm__ volatile (
191 "mrs %0, AFSR1_EL2" : "=&r" ( value ) : : "memory"
192 );
193
194 return value;
195}
196
197static inline void _AArch64_Write_afsr1_el2( uint64_t value )
198{
199 __asm__ volatile (
200 "msr AFSR1_EL2, %0" : : "r" ( value ) : "memory"
201 );
202}
203
204/* AFSR1_EL3, Auxiliary Fault Status Register 1 (EL3) */
205
206static inline uint64_t _AArch64_Read_afsr1_el3( void )
207{
208 uint64_t value;
209
210 __asm__ volatile (
211 "mrs %0, AFSR1_EL3" : "=&r" ( value ) : : "memory"
212 );
213
214 return value;
215}
216
217static inline void _AArch64_Write_afsr1_el3( uint64_t value )
218{
219 __asm__ volatile (
220 "msr AFSR1_EL3, %0" : : "r" ( value ) : "memory"
221 );
222}
223
224/* AIDR_EL1, Auxiliary ID Register */
225
226static inline uint64_t _AArch64_Read_aidr_el1( void )
227{
228 uint64_t value;
229
230 __asm__ volatile (
231 "mrs %0, AIDR_EL1" : "=&r" ( value ) : : "memory"
232 );
233
234 return value;
235}
236
237/* AMAIR_EL1, Auxiliary Memory Attribute Indirection Register (EL1) */
238
239static inline uint64_t _AArch64_Read_amair_el1( void )
240{
241 uint64_t value;
242
243 __asm__ volatile (
244 "mrs %0, AMAIR_EL1" : "=&r" ( value ) : : "memory"
245 );
246
247 return value;
248}
249
250static inline void _AArch64_Write_amair_el1( uint64_t value )
251{
252 __asm__ volatile (
253 "msr AMAIR_EL1, %0" : : "r" ( value ) : "memory"
254 );
255}
256
257/* AMAIR_EL2, Auxiliary Memory Attribute Indirection Register (EL2) */
258
259static inline uint64_t _AArch64_Read_amair_el2( void )
260{
261 uint64_t value;
262
263 __asm__ volatile (
264 "mrs %0, AMAIR_EL2" : "=&r" ( value ) : : "memory"
265 );
266
267 return value;
268}
269
270static inline void _AArch64_Write_amair_el2( uint64_t value )
271{
272 __asm__ volatile (
273 "msr AMAIR_EL2, %0" : : "r" ( value ) : "memory"
274 );
275}
276
277/* AMAIR_EL3, Auxiliary Memory Attribute Indirection Register (EL3) */
278
279static inline uint64_t _AArch64_Read_amair_el3( void )
280{
281 uint64_t value;
282
283 __asm__ volatile (
284 "mrs %0, AMAIR_EL3" : "=&r" ( value ) : : "memory"
285 );
286
287 return value;
288}
289
290static inline void _AArch64_Write_amair_el3( uint64_t value )
291{
292 __asm__ volatile (
293 "msr AMAIR_EL3, %0" : : "r" ( value ) : "memory"
294 );
295}
296
297/* APDAKEYHI_EL1, Pointer Authentication Key A for Data (bits[127:64]) */
298
299static inline uint64_t _AArch64_Read_apdakeyhi_el1( void )
300{
301 uint64_t value;
302
303 __asm__ volatile (
304 "mrs %0, APDAKEYHI_EL1" : "=&r" ( value ) : : "memory"
305 );
306
307 return value;
308}
309
310static inline void _AArch64_Write_apdakeyhi_el1( uint64_t value )
311{
312 __asm__ volatile (
313 "msr APDAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
314 );
315}
316
317/* APDAKEYLO_EL1, Pointer Authentication Key A for Data (bits[63:0]) */
318
319static inline uint64_t _AArch64_Read_apdakeylo_el1( void )
320{
321 uint64_t value;
322
323 __asm__ volatile (
324 "mrs %0, APDAKEYLO_EL1" : "=&r" ( value ) : : "memory"
325 );
326
327 return value;
328}
329
330static inline void _AArch64_Write_apdakeylo_el1( uint64_t value )
331{
332 __asm__ volatile (
333 "msr APDAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
334 );
335}
336
337/* APDBKEYHI_EL1, Pointer Authentication Key B for Data (bits[127:64]) */
338
339static inline uint64_t _AArch64_Read_apdbkeyhi_el1( void )
340{
341 uint64_t value;
342
343 __asm__ volatile (
344 "mrs %0, APDBKEYHI_EL1" : "=&r" ( value ) : : "memory"
345 );
346
347 return value;
348}
349
350static inline void _AArch64_Write_apdbkeyhi_el1( uint64_t value )
351{
352 __asm__ volatile (
353 "msr APDBKEYHI_EL1, %0" : : "r" ( value ) : "memory"
354 );
355}
356
357/* APDBKEYLO_EL1, Pointer Authentication Key B for Data (bits[63:0]) */
358
359static inline uint64_t _AArch64_Read_apdbkeylo_el1( void )
360{
361 uint64_t value;
362
363 __asm__ volatile (
364 "mrs %0, APDBKEYLO_EL1" : "=&r" ( value ) : : "memory"
365 );
366
367 return value;
368}
369
370static inline void _AArch64_Write_apdbkeylo_el1( uint64_t value )
371{
372 __asm__ volatile (
373 "msr APDBKEYLO_EL1, %0" : : "r" ( value ) : "memory"
374 );
375}
376
377/* APGAKEYHI_EL1, Pointer Authentication Key A for Code (bits[127:64]) */
378
379static inline uint64_t _AArch64_Read_apgakeyhi_el1( void )
380{
381 uint64_t value;
382
383 __asm__ volatile (
384 "mrs %0, APGAKEYHI_EL1" : "=&r" ( value ) : : "memory"
385 );
386
387 return value;
388}
389
390static inline void _AArch64_Write_apgakeyhi_el1( uint64_t value )
391{
392 __asm__ volatile (
393 "msr APGAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
394 );
395}
396
397/* APGAKEYLO_EL1, Pointer Authentication Key A for Code (bits[63:0]) */
398
399static inline uint64_t _AArch64_Read_apgakeylo_el1( void )
400{
401 uint64_t value;
402
403 __asm__ volatile (
404 "mrs %0, APGAKEYLO_EL1" : "=&r" ( value ) : : "memory"
405 );
406
407 return value;
408}
409
410static inline void _AArch64_Write_apgakeylo_el1( uint64_t value )
411{
412 __asm__ volatile (
413 "msr APGAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
414 );
415}
416
417/* APIAKEYHI_EL1, Pointer Authentication Key A for Instruction (bits[127:64]) */
418
419static inline uint64_t _AArch64_Read_apiakeyhi_el1( void )
420{
421 uint64_t value;
422
423 __asm__ volatile (
424 "mrs %0, APIAKEYHI_EL1" : "=&r" ( value ) : : "memory"
425 );
426
427 return value;
428}
429
430static inline void _AArch64_Write_apiakeyhi_el1( uint64_t value )
431{
432 __asm__ volatile (
433 "msr APIAKEYHI_EL1, %0" : : "r" ( value ) : "memory"
434 );
435}
436
437/* APIAKEYLO_EL1, Pointer Authentication Key A for Instruction (bits[63:0]) */
438
439static inline uint64_t _AArch64_Read_apiakeylo_el1( void )
440{
441 uint64_t value;
442
443 __asm__ volatile (
444 "mrs %0, APIAKEYLO_EL1" : "=&r" ( value ) : : "memory"
445 );
446
447 return value;
448}
449
450static inline void _AArch64_Write_apiakeylo_el1( uint64_t value )
451{
452 __asm__ volatile (
453 "msr APIAKEYLO_EL1, %0" : : "r" ( value ) : "memory"
454 );
455}
456
457/* APIBKEYHI_EL1, Pointer Authentication Key B for Instruction (bits[127:64]) */
458
459static inline uint64_t _AArch64_Read_apibkeyhi_el1( void )
460{
461 uint64_t value;
462
463 __asm__ volatile (
464 "mrs %0, APIBKEYHI_EL1" : "=&r" ( value ) : : "memory"
465 );
466
467 return value;
468}
469
470static inline void _AArch64_Write_apibkeyhi_el1( uint64_t value )
471{
472 __asm__ volatile (
473 "msr APIBKEYHI_EL1, %0" : : "r" ( value ) : "memory"
474 );
475}
476
477/* APIBKEYLO_EL1, Pointer Authentication Key B for Instruction (bits[63:0]) */
478
479static inline uint64_t _AArch64_Read_apibkeylo_el1( void )
480{
481 uint64_t value;
482
483 __asm__ volatile (
484 "mrs %0, APIBKEYLO_EL1" : "=&r" ( value ) : : "memory"
485 );
486
487 return value;
488}
489
490static inline void _AArch64_Write_apibkeylo_el1( uint64_t value )
491{
492 __asm__ volatile (
493 "msr APIBKEYLO_EL1, %0" : : "r" ( value ) : "memory"
494 );
495}
496
497/* CCSIDR2_EL1, Current Cache Size ID Register 2 */
498
499#define AARCH64_CCSIDR2_EL1_NUMSETS( _val ) ( ( _val ) << 0 )
500#define AARCH64_CCSIDR2_EL1_NUMSETS_SHIFT 0
501#define AARCH64_CCSIDR2_EL1_NUMSETS_MASK 0xffffffU
502#define AARCH64_CCSIDR2_EL1_NUMSETS_GET( _reg ) \
503 ( ( ( _reg ) >> 0 ) & 0xffffffU )
504
505static inline uint64_t _AArch64_Read_ccsidr2_el1( void )
506{
507 uint64_t value;
508
509 __asm__ volatile (
510 "mrs %0, CCSIDR2_EL1" : "=&r" ( value ) : : "memory"
511 );
512
513 return value;
514}
515
516/* CCSIDR_EL1, Current Cache Size ID Register */
517
518#define AARCH64_CCSIDR_EL1_LINESIZE( _val ) ( ( _val ) << 0 )
519#define AARCH64_CCSIDR_EL1_LINESIZE_SHIFT 0
520#define AARCH64_CCSIDR_EL1_LINESIZE_MASK 0x7U
521#define AARCH64_CCSIDR_EL1_LINESIZE_GET( _reg ) \
522 ( ( ( _reg ) >> 0 ) & 0x7U )
523
524#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_0( _val ) ( ( _val ) << 3 )
525#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_SHIFT_0 3
526#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_MASK_0 0x1ff8U
527#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_GET_0( _reg ) \
528 ( ( ( _reg ) >> 3 ) & 0x3ffU )
529
530#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_1( _val ) ( ( _val ) << 3 )
531#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_SHIFT_1 3
532#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_MASK_1 0xfffff8U
533#define AARCH64_CCSIDR_EL1_ASSOCIATIVITY_GET_1( _reg ) \
534 ( ( ( _reg ) >> 3 ) & 0x1fffffU )
535
536#define AARCH64_CCSIDR_EL1_NUMSETS_0( _val ) ( ( _val ) << 13 )
537#define AARCH64_CCSIDR_EL1_NUMSETS_SHIFT_0 13
538#define AARCH64_CCSIDR_EL1_NUMSETS_MASK_0 0xfffe000U
539#define AARCH64_CCSIDR_EL1_NUMSETS_GET_0( _reg ) \
540 ( ( ( _reg ) >> 13 ) & 0x7fffU )
541
542#define AARCH64_CCSIDR_EL1_NUMSETS_1( _val ) ( ( _val ) << 32 )
543#define AARCH64_CCSIDR_EL1_NUMSETS_SHIFT_1 32
544#define AARCH64_CCSIDR_EL1_NUMSETS_MASK_1 0xffffff00000000ULL
545#define AARCH64_CCSIDR_EL1_NUMSETS_GET_1( _reg ) \
546 ( ( ( _reg ) >> 32 ) & 0xffffffULL )
547
548static inline uint64_t _AArch64_Read_ccsidr_el1( void )
549{
550 uint64_t value;
551
552 __asm__ volatile (
553 "mrs %0, CCSIDR_EL1" : "=&r" ( value ) : : "memory"
554 );
555
556 return value;
557}
558
559/* CLIDR_EL1, Cache Level ID Register */
560
561#define AARCH64_CLIDR_EL1_CTYPE1( _val ) ( ( _val ) << 0 )
562#define AARCH64_CLIDR_EL1_CTYPE1_SHIFT 0
563#define AARCH64_CLIDR_EL1_CTYPE1_MASK ( 0x7U << 0 )
564#define AARCH64_CLIDR_EL1_CTYPE1_GET( _reg ) \
565 ( ( ( _reg ) >> 0 ) & 0x7U )
566
567#define AARCH64_CLIDR_EL1_CTYPE2( _val ) ( ( _val ) << 3 )
568#define AARCH64_CLIDR_EL1_CTYPE2_SHIFT 3
569#define AARCH64_CLIDR_EL1_CTYPE2_MASK ( 0x7U << 3 )
570#define AARCH64_CLIDR_EL1_CTYPE2_GET( _reg ) \
571 ( ( ( _reg ) >> 3 ) & 0x7U )
572
573#define AARCH64_CLIDR_EL1_CTYPE3( _val ) ( ( _val ) << 6 )
574#define AARCH64_CLIDR_EL1_CTYPE3_SHIFT 6
575#define AARCH64_CLIDR_EL1_CTYPE3_MASK ( 0x7U << 6 )
576#define AARCH64_CLIDR_EL1_CTYPE3_GET( _reg ) \
577 ( ( ( _reg ) >> 6 ) & 0x7U )
578
579#define AARCH64_CLIDR_EL1_CTYPE4( _val ) ( ( _val ) << 9 )
580#define AARCH64_CLIDR_EL1_CTYPE4_SHIFT 9
581#define AARCH64_CLIDR_EL1_CTYPE4_MASK ( 0x7U << 9 )
582#define AARCH64_CLIDR_EL1_CTYPE4_GET( _reg ) \
583 ( ( ( _reg ) >> 9 ) & 0x7U )
584
585#define AARCH64_CLIDR_EL1_CTYPE5( _val ) ( ( _val ) << 12 )
586#define AARCH64_CLIDR_EL1_CTYPE5_SHIFT 12
587#define AARCH64_CLIDR_EL1_CTYPE5_MASK ( 0x7U << 12 )
588#define AARCH64_CLIDR_EL1_CTYPE5_GET( _reg ) \
589 ( ( ( _reg ) >> 12 ) & 0x7U )
590
591#define AARCH64_CLIDR_EL1_CTYPE6( _val ) ( ( _val ) << 15 )
592#define AARCH64_CLIDR_EL1_CTYPE6_SHIFT 15
593#define AARCH64_CLIDR_EL1_CTYPE6_MASK ( 0x7U << 15 )
594#define AARCH64_CLIDR_EL1_CTYPE6_GET( _reg ) \
595 ( ( ( _reg ) >> 15 ) & 0x7U )
596
597#define AARCH64_CLIDR_EL1_CTYPE7( _val ) ( ( _val ) << 18 )
598#define AARCH64_CLIDR_EL1_CTYPE7_SHIFT 18
599#define AARCH64_CLIDR_EL1_CTYPE7_MASK ( 0x7U << 18 )
600#define AARCH64_CLIDR_EL1_CTYPE7_GET( _reg ) \
601 ( ( ( _reg ) >> 18 ) & 0x7U )
602
603#define AARCH64_CLIDR_EL1_LOUIS( _val ) ( ( _val ) << 21 )
604#define AARCH64_CLIDR_EL1_LOUIS_SHIFT 21
605#define AARCH64_CLIDR_EL1_LOUIS_MASK 0xe00000U
606#define AARCH64_CLIDR_EL1_LOUIS_GET( _reg ) \
607 ( ( ( _reg ) >> 21 ) & 0x7U )
608
609#define AARCH64_CLIDR_EL1_LOC( _val ) ( ( _val ) << 24 )
610#define AARCH64_CLIDR_EL1_LOC_SHIFT 24
611#define AARCH64_CLIDR_EL1_LOC_MASK 0x7000000U
612#define AARCH64_CLIDR_EL1_LOC_GET( _reg ) \
613 ( ( ( _reg ) >> 24 ) & 0x7U )
614
615#define AARCH64_CLIDR_EL1_LOUU( _val ) ( ( _val ) << 27 )
616#define AARCH64_CLIDR_EL1_LOUU_SHIFT 27
617#define AARCH64_CLIDR_EL1_LOUU_MASK 0x38000000U
618#define AARCH64_CLIDR_EL1_LOUU_GET( _reg ) \
619 ( ( ( _reg ) >> 27 ) & 0x7U )
620
621#define AARCH64_CLIDR_EL1_ICB( _val ) ( ( _val ) << 30 )
622#define AARCH64_CLIDR_EL1_ICB_SHIFT 30
623#define AARCH64_CLIDR_EL1_ICB_MASK 0x1c0000000ULL
624#define AARCH64_CLIDR_EL1_ICB_GET( _reg ) \
625 ( ( ( _reg ) >> 30 ) & 0x7ULL )
626
627static inline uint64_t _AArch64_Read_clidr_el1( void )
628{
629 uint64_t value;
630
631 __asm__ volatile (
632 "mrs %0, CLIDR_EL1" : "=&r" ( value ) : : "memory"
633 );
634
635 return value;
636}
637
638/* CONTEXTIDR_EL1, Context ID Register (EL1) */
639
640#define AARCH64_CONTEXTIDR_EL1_PROCID( _val ) ( ( _val ) << 0 )
641#define AARCH64_CONTEXTIDR_EL1_PROCID_SHIFT 0
642#define AARCH64_CONTEXTIDR_EL1_PROCID_MASK 0xffffffffU
643#define AARCH64_CONTEXTIDR_EL1_PROCID_GET( _reg ) \
644 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
645
646static inline uint64_t _AArch64_Read_contextidr_el1( void )
647{
648 uint64_t value;
649
650 __asm__ volatile (
651 "mrs %0, CONTEXTIDR_EL1" : "=&r" ( value ) : : "memory"
652 );
653
654 return value;
655}
656
657static inline void _AArch64_Write_contextidr_el1( uint64_t value )
658{
659 __asm__ volatile (
660 "msr CONTEXTIDR_EL1, %0" : : "r" ( value ) : "memory"
661 );
662}
663
664/* CONTEXTIDR_EL2, Context ID Register (EL2) */
665
666#define AARCH64_CONTEXTIDR_EL2_PROCID( _val ) ( ( _val ) << 0 )
667#define AARCH64_CONTEXTIDR_EL2_PROCID_SHIFT 0
668#define AARCH64_CONTEXTIDR_EL2_PROCID_MASK 0xffffffffU
669#define AARCH64_CONTEXTIDR_EL2_PROCID_GET( _reg ) \
670 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
671
672static inline uint64_t _AArch64_Read_contextidr_el2( void )
673{
674 uint64_t value;
675
676 __asm__ volatile (
677 "mrs %0, CONTEXTIDR_EL2" : "=&r" ( value ) : : "memory"
678 );
679
680 return value;
681}
682
683static inline void _AArch64_Write_contextidr_el2( uint64_t value )
684{
685 __asm__ volatile (
686 "msr CONTEXTIDR_EL2, %0" : : "r" ( value ) : "memory"
687 );
688}
689
690/* CPACR_EL1, Architectural Feature Access Control Register */
691
692#define AARCH64_CPACR_EL1_ZEN( _val ) ( ( _val ) << 16 )
693#define AARCH64_CPACR_EL1_ZEN_SHIFT 16
694#define AARCH64_CPACR_EL1_ZEN_MASK 0x30000U
695#define AARCH64_CPACR_EL1_ZEN_GET( _reg ) \
696 ( ( ( _reg ) >> 16 ) & 0x3U )
697
698#define AARCH64_CPACR_EL1_FPEN( _val ) ( ( _val ) << 20 )
699#define AARCH64_CPACR_EL1_FPEN_SHIFT 20
700#define AARCH64_CPACR_EL1_FPEN_MASK 0x300000U
701#define AARCH64_CPACR_EL1_FPEN_GET( _reg ) \
702 ( ( ( _reg ) >> 20 ) & 0x3U )
703
704#define AARCH64_CPACR_EL1_TTA 0x10000000U
705
706static inline uint64_t _AArch64_Read_cpacr_el1( void )
707{
708 uint64_t value;
709
710 __asm__ volatile (
711 "mrs %0, CPACR_EL1" : "=&r" ( value ) : : "memory"
712 );
713
714 return value;
715}
716
717static inline void _AArch64_Write_cpacr_el1( uint64_t value )
718{
719 __asm__ volatile (
720 "msr CPACR_EL1, %0" : : "r" ( value ) : "memory"
721 );
722}
723
724/* CPTR_EL2, Architectural Feature Trap Register (EL2) */
725
726#define AARCH64_CPTR_EL2_TZ 0x100U
727
728#define AARCH64_CPTR_EL2_TFP 0x400U
729
730#define AARCH64_CPTR_EL2_ZEN( _val ) ( ( _val ) << 16 )
731#define AARCH64_CPTR_EL2_ZEN_SHIFT 16
732#define AARCH64_CPTR_EL2_ZEN_MASK 0x30000U
733#define AARCH64_CPTR_EL2_ZEN_GET( _reg ) \
734 ( ( ( _reg ) >> 16 ) & 0x3U )
735
736#define AARCH64_CPTR_EL2_TTA_0 0x100000U
737
738#define AARCH64_CPTR_EL2_FPEN( _val ) ( ( _val ) << 20 )
739#define AARCH64_CPTR_EL2_FPEN_SHIFT 20
740#define AARCH64_CPTR_EL2_FPEN_MASK 0x300000U
741#define AARCH64_CPTR_EL2_FPEN_GET( _reg ) \
742 ( ( ( _reg ) >> 20 ) & 0x3U )
743
744#define AARCH64_CPTR_EL2_TTA_1 0x10000000U
745
746#define AARCH64_CPTR_EL2_TAM 0x40000000U
747
748#define AARCH64_CPTR_EL2_TCPAC 0x80000000U
749
750static inline uint64_t _AArch64_Read_cptr_el2( void )
751{
752 uint64_t value;
753
754 __asm__ volatile (
755 "mrs %0, CPTR_EL2" : "=&r" ( value ) : : "memory"
756 );
757
758 return value;
759}
760
761static inline void _AArch64_Write_cptr_el2( uint64_t value )
762{
763 __asm__ volatile (
764 "msr CPTR_EL2, %0" : : "r" ( value ) : "memory"
765 );
766}
767
768/* CPTR_EL3, Architectural Feature Trap Register (EL3) */
769
770#define AARCH64_CPTR_EL3_EZ 0x100U
771
772#define AARCH64_CPTR_EL3_TFP 0x400U
773
774#define AARCH64_CPTR_EL3_TTA 0x100000U
775
776#define AARCH64_CPTR_EL3_TAM 0x40000000U
777
778#define AARCH64_CPTR_EL3_TCPAC 0x80000000U
779
780static inline uint64_t _AArch64_Read_cptr_el3( void )
781{
782 uint64_t value;
783
784 __asm__ volatile (
785 "mrs %0, CPTR_EL3" : "=&r" ( value ) : : "memory"
786 );
787
788 return value;
789}
790
791static inline void _AArch64_Write_cptr_el3( uint64_t value )
792{
793 __asm__ volatile (
794 "msr CPTR_EL3, %0" : : "r" ( value ) : "memory"
795 );
796}
797
798/* CSSELR_EL1, Cache Size Selection Register */
799
800#define AARCH64_CSSELR_EL1_IND 0x1U
801
802#define AARCH64_CSSELR_EL1_LEVEL( _val ) ( ( _val ) << 1 )
803#define AARCH64_CSSELR_EL1_LEVEL_SHIFT 1
804#define AARCH64_CSSELR_EL1_LEVEL_MASK 0xeU
805#define AARCH64_CSSELR_EL1_LEVEL_GET( _reg ) \
806 ( ( ( _reg ) >> 1 ) & 0x7U )
807
808#define AARCH64_CSSELR_EL1_TND 0x10U
809
810static inline uint64_t _AArch64_Read_csselr_el1( void )
811{
812 uint64_t value;
813
814 __asm__ volatile (
815 "mrs %0, CSSELR_EL1" : "=&r" ( value ) : : "memory"
816 );
817
818 return value;
819}
820
821static inline void _AArch64_Write_csselr_el1( uint64_t value )
822{
823 __asm__ volatile (
824 "msr CSSELR_EL1, %0" : : "r" ( value ) : "memory"
825 );
826}
827
828/* CTR_EL0, Cache Type Register */
829
830#define AARCH64_CTR_EL0_IMINLINE( _val ) ( ( _val ) << 0 )
831#define AARCH64_CTR_EL0_IMINLINE_SHIFT 0
832#define AARCH64_CTR_EL0_IMINLINE_MASK 0xfU
833#define AARCH64_CTR_EL0_IMINLINE_GET( _reg ) \
834 ( ( ( _reg ) >> 0 ) & 0xfU )
835
836#define AARCH64_CTR_EL0_L1IP( _val ) ( ( _val ) << 14 )
837#define AARCH64_CTR_EL0_L1IP_SHIFT 14
838#define AARCH64_CTR_EL0_L1IP_MASK 0xc000U
839#define AARCH64_CTR_EL0_L1IP_GET( _reg ) \
840 ( ( ( _reg ) >> 14 ) & 0x3U )
841
842#define AARCH64_CTR_EL0_DMINLINE( _val ) ( ( _val ) << 16 )
843#define AARCH64_CTR_EL0_DMINLINE_SHIFT 16
844#define AARCH64_CTR_EL0_DMINLINE_MASK 0xf0000U
845#define AARCH64_CTR_EL0_DMINLINE_GET( _reg ) \
846 ( ( ( _reg ) >> 16 ) & 0xfU )
847
848#define AARCH64_CTR_EL0_ERG( _val ) ( ( _val ) << 20 )
849#define AARCH64_CTR_EL0_ERG_SHIFT 20
850#define AARCH64_CTR_EL0_ERG_MASK 0xf00000U
851#define AARCH64_CTR_EL0_ERG_GET( _reg ) \
852 ( ( ( _reg ) >> 20 ) & 0xfU )
853
854#define AARCH64_CTR_EL0_CWG( _val ) ( ( _val ) << 24 )
855#define AARCH64_CTR_EL0_CWG_SHIFT 24
856#define AARCH64_CTR_EL0_CWG_MASK 0xf000000U
857#define AARCH64_CTR_EL0_CWG_GET( _reg ) \
858 ( ( ( _reg ) >> 24 ) & 0xfU )
859
860#define AARCH64_CTR_EL0_IDC 0x10000000U
861
862#define AARCH64_CTR_EL0_DIC 0x20000000U
863
864#define AARCH64_CTR_EL0_TMINLINE( _val ) ( ( _val ) << 32 )
865#define AARCH64_CTR_EL0_TMINLINE_SHIFT 32
866#define AARCH64_CTR_EL0_TMINLINE_MASK 0x3f00000000ULL
867#define AARCH64_CTR_EL0_TMINLINE_GET( _reg ) \
868 ( ( ( _reg ) >> 32 ) & 0x3fULL )
869
870static inline uint64_t _AArch64_Read_ctr_el0( void )
871{
872 uint64_t value;
873
874 __asm__ volatile (
875 "mrs %0, CTR_EL0" : "=&r" ( value ) : : "memory"
876 );
877
878 return value;
879}
880
881/* DACR32_EL2, Domain Access Control Register */
882
883static inline uint64_t _AArch64_Read_dacr32_el2( void )
884{
885 uint64_t value;
886
887 __asm__ volatile (
888 "mrs %0, DACR32_EL2" : "=&r" ( value ) : : "memory"
889 );
890
891 return value;
892}
893
894static inline void _AArch64_Write_dacr32_el2( uint64_t value )
895{
896 __asm__ volatile (
897 "msr DACR32_EL2, %0" : : "r" ( value ) : "memory"
898 );
899}
900
901/* DCZID_EL0, Data Cache Zero ID Register */
902
903#define AARCH64_DCZID_EL0_BS( _val ) ( ( _val ) << 0 )
904#define AARCH64_DCZID_EL0_BS_SHIFT 0
905#define AARCH64_DCZID_EL0_BS_MASK 0xfU
906#define AARCH64_DCZID_EL0_BS_GET( _reg ) \
907 ( ( ( _reg ) >> 0 ) & 0xfU )
908
909#define AARCH64_DCZID_EL0_DZP 0x10U
910
911static inline uint64_t _AArch64_Read_dczid_el0( void )
912{
913 uint64_t value;
914
915 __asm__ volatile (
916 "mrs %0, DCZID_EL0" : "=&r" ( value ) : : "memory"
917 );
918
919 return value;
920}
921
922/* ESR_EL1, Exception Syndrome Register (EL1) */
923
924#define AARCH64_ESR_EL1_DIRECTION 0x1U
925
926#define AARCH64_ESR_EL1_ERETA 0x1U
927
928#define AARCH64_ESR_EL1_IOF 0x1U
929
930#define AARCH64_ESR_EL1_TI 0x1U
931
932#define AARCH64_ESR_EL1_BTYPE( _val ) ( ( _val ) << 0 )
933#define AARCH64_ESR_EL1_BTYPE_SHIFT 0
934#define AARCH64_ESR_EL1_BTYPE_MASK 0x3U
935#define AARCH64_ESR_EL1_BTYPE_GET( _reg ) \
936 ( ( ( _reg ) >> 0 ) & 0x3U )
937
938#define AARCH64_ESR_EL1_DFSC( _val ) ( ( _val ) << 0 )
939#define AARCH64_ESR_EL1_DFSC_SHIFT 0
940#define AARCH64_ESR_EL1_DFSC_MASK 0x3fU
941#define AARCH64_ESR_EL1_DFSC_GET( _reg ) \
942 ( ( ( _reg ) >> 0 ) & 0x3fU )
943
944#define AARCH64_ESR_EL1_IFSC( _val ) ( ( _val ) << 0 )
945#define AARCH64_ESR_EL1_IFSC_SHIFT 0
946#define AARCH64_ESR_EL1_IFSC_MASK 0x3fU
947#define AARCH64_ESR_EL1_IFSC_GET( _reg ) \
948 ( ( ( _reg ) >> 0 ) & 0x3fU )
949
950#define AARCH64_ESR_EL1_COMMENT( _val ) ( ( _val ) << 0 )
951#define AARCH64_ESR_EL1_COMMENT_SHIFT 0
952#define AARCH64_ESR_EL1_COMMENT_MASK 0xffffU
953#define AARCH64_ESR_EL1_COMMENT_GET( _reg ) \
954 ( ( ( _reg ) >> 0 ) & 0xffffU )
955
956#define AARCH64_ESR_EL1_IMM16( _val ) ( ( _val ) << 0 )
957#define AARCH64_ESR_EL1_IMM16_SHIFT 0
958#define AARCH64_ESR_EL1_IMM16_MASK 0xffffU
959#define AARCH64_ESR_EL1_IMM16_GET( _reg ) \
960 ( ( ( _reg ) >> 0 ) & 0xffffU )
961
962#define AARCH64_ESR_EL1_ISS( _val ) ( ( _val ) << 0 )
963#define AARCH64_ESR_EL1_ISS_SHIFT 0
964#define AARCH64_ESR_EL1_ISS_MASK 0x1ffffffU
965#define AARCH64_ESR_EL1_ISS_GET( _reg ) \
966 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
967
968#define AARCH64_ESR_EL1_DZF 0x2U
969
970#define AARCH64_ESR_EL1_ERET 0x2U
971
972#define AARCH64_ESR_EL1_AM( _val ) ( ( _val ) << 1 )
973#define AARCH64_ESR_EL1_AM_SHIFT 1
974#define AARCH64_ESR_EL1_AM_MASK 0xeU
975#define AARCH64_ESR_EL1_AM_GET( _reg ) \
976 ( ( ( _reg ) >> 1 ) & 0x7U )
977
978#define AARCH64_ESR_EL1_CRM( _val ) ( ( _val ) << 1 )
979#define AARCH64_ESR_EL1_CRM_SHIFT 1
980#define AARCH64_ESR_EL1_CRM_MASK 0x1eU
981#define AARCH64_ESR_EL1_CRM_GET( _reg ) \
982 ( ( ( _reg ) >> 1 ) & 0xfU )
983
984#define AARCH64_ESR_EL1_OFF 0x4U
985
986#define AARCH64_ESR_EL1_UFF 0x8U
987
988#define AARCH64_ESR_EL1_IXF 0x10U
989
990#define AARCH64_ESR_EL1_OFFSET 0x10U
991
992#define AARCH64_ESR_EL1_RN( _val ) ( ( _val ) << 5 )
993#define AARCH64_ESR_EL1_RN_SHIFT 5
994#define AARCH64_ESR_EL1_RN_MASK 0x3e0U
995#define AARCH64_ESR_EL1_RN_GET( _reg ) \
996 ( ( ( _reg ) >> 5 ) & 0x1fU )
997
998#define AARCH64_ESR_EL1_RT( _val ) ( ( _val ) << 5 )
999#define AARCH64_ESR_EL1_RT_SHIFT 5
1000#define AARCH64_ESR_EL1_RT_MASK 0x3e0U
1001#define AARCH64_ESR_EL1_RT_GET( _reg ) \
1002 ( ( ( _reg ) >> 5 ) & 0x1fU )
1003
1004#define AARCH64_ESR_EL1_EX 0x40U
1005
1006#define AARCH64_ESR_EL1_WNR 0x40U
1007
1008#define AARCH64_ESR_EL1_IDF 0x80U
1009
1010#define AARCH64_ESR_EL1_S1PTW 0x80U
1011
1012#define AARCH64_ESR_EL1_CM 0x100U
1013
1014#define AARCH64_ESR_EL1_VECITR( _val ) ( ( _val ) << 8 )
1015#define AARCH64_ESR_EL1_VECITR_SHIFT 8
1016#define AARCH64_ESR_EL1_VECITR_MASK 0x700U
1017#define AARCH64_ESR_EL1_VECITR_GET( _reg ) \
1018 ( ( ( _reg ) >> 8 ) & 0x7U )
1019
1020#define AARCH64_ESR_EL1_EA 0x200U
1021
1022#define AARCH64_ESR_EL1_FNV 0x400U
1023
1024#define AARCH64_ESR_EL1_AET( _val ) ( ( _val ) << 10 )
1025#define AARCH64_ESR_EL1_AET_SHIFT 10
1026#define AARCH64_ESR_EL1_AET_MASK 0x1c00U
1027#define AARCH64_ESR_EL1_AET_GET( _reg ) \
1028 ( ( ( _reg ) >> 10 ) & 0x7U )
1029
1030#define AARCH64_ESR_EL1_CRN( _val ) ( ( _val ) << 10 )
1031#define AARCH64_ESR_EL1_CRN_SHIFT 10
1032#define AARCH64_ESR_EL1_CRN_MASK 0x3c00U
1033#define AARCH64_ESR_EL1_CRN_GET( _reg ) \
1034 ( ( ( _reg ) >> 10 ) & 0xfU )
1035
1036#define AARCH64_ESR_EL1_RT2( _val ) ( ( _val ) << 10 )
1037#define AARCH64_ESR_EL1_RT2_SHIFT 10
1038#define AARCH64_ESR_EL1_RT2_MASK 0x7c00U
1039#define AARCH64_ESR_EL1_RT2_GET( _reg ) \
1040 ( ( ( _reg ) >> 10 ) & 0x1fU )
1041
1042#define AARCH64_ESR_EL1_SET( _val ) ( ( _val ) << 11 )
1043#define AARCH64_ESR_EL1_SET_SHIFT 11
1044#define AARCH64_ESR_EL1_SET_MASK 0x1800U
1045#define AARCH64_ESR_EL1_SET_GET( _reg ) \
1046 ( ( ( _reg ) >> 11 ) & 0x3U )
1047
1048#define AARCH64_ESR_EL1_IMM8( _val ) ( ( _val ) << 12 )
1049#define AARCH64_ESR_EL1_IMM8_SHIFT 12
1050#define AARCH64_ESR_EL1_IMM8_MASK 0xff000U
1051#define AARCH64_ESR_EL1_IMM8_GET( _reg ) \
1052 ( ( ( _reg ) >> 12 ) & 0xffU )
1053
1054#define AARCH64_ESR_EL1_IESB 0x2000U
1055
1056#define AARCH64_ESR_EL1_VNCR 0x2000U
1057
1058#define AARCH64_ESR_EL1_AR 0x4000U
1059
1060#define AARCH64_ESR_EL1_OP1( _val ) ( ( _val ) << 14 )
1061#define AARCH64_ESR_EL1_OP1_SHIFT 14
1062#define AARCH64_ESR_EL1_OP1_MASK 0x1c000U
1063#define AARCH64_ESR_EL1_OP1_GET( _reg ) \
1064 ( ( ( _reg ) >> 14 ) & 0x7U )
1065
1066#define AARCH64_ESR_EL1_OPC1_0( _val ) ( ( _val ) << 14 )
1067#define AARCH64_ESR_EL1_OPC1_SHIFT_0 14
1068#define AARCH64_ESR_EL1_OPC1_MASK_0 0x1c000U
1069#define AARCH64_ESR_EL1_OPC1_GET_0( _reg ) \
1070 ( ( ( _reg ) >> 14 ) & 0x7U )
1071
1072#define AARCH64_ESR_EL1_SF 0x8000U
1073
1074#define AARCH64_ESR_EL1_OPC1_1( _val ) ( ( _val ) << 16 )
1075#define AARCH64_ESR_EL1_OPC1_SHIFT_1 16
1076#define AARCH64_ESR_EL1_OPC1_MASK_1 0xf0000U
1077#define AARCH64_ESR_EL1_OPC1_GET_1( _reg ) \
1078 ( ( ( _reg ) >> 16 ) & 0xfU )
1079
1080#define AARCH64_ESR_EL1_SRT( _val ) ( ( _val ) << 16 )
1081#define AARCH64_ESR_EL1_SRT_SHIFT 16
1082#define AARCH64_ESR_EL1_SRT_MASK 0x1f0000U
1083#define AARCH64_ESR_EL1_SRT_GET( _reg ) \
1084 ( ( ( _reg ) >> 16 ) & 0x1fU )
1085
1086#define AARCH64_ESR_EL1_OP2( _val ) ( ( _val ) << 17 )
1087#define AARCH64_ESR_EL1_OP2_SHIFT 17
1088#define AARCH64_ESR_EL1_OP2_MASK 0xe0000U
1089#define AARCH64_ESR_EL1_OP2_GET( _reg ) \
1090 ( ( ( _reg ) >> 17 ) & 0x7U )
1091
1092#define AARCH64_ESR_EL1_OPC2( _val ) ( ( _val ) << 17 )
1093#define AARCH64_ESR_EL1_OPC2_SHIFT 17
1094#define AARCH64_ESR_EL1_OPC2_MASK 0xe0000U
1095#define AARCH64_ESR_EL1_OPC2_GET( _reg ) \
1096 ( ( ( _reg ) >> 17 ) & 0x7U )
1097
1098#define AARCH64_ESR_EL1_CCKNOWNPASS 0x80000U
1099
1100#define AARCH64_ESR_EL1_OP0( _val ) ( ( _val ) << 20 )
1101#define AARCH64_ESR_EL1_OP0_SHIFT 20
1102#define AARCH64_ESR_EL1_OP0_MASK 0x300000U
1103#define AARCH64_ESR_EL1_OP0_GET( _reg ) \
1104 ( ( ( _reg ) >> 20 ) & 0x3U )
1105
1106#define AARCH64_ESR_EL1_COND( _val ) ( ( _val ) << 20 )
1107#define AARCH64_ESR_EL1_COND_SHIFT 20
1108#define AARCH64_ESR_EL1_COND_MASK 0xf00000U
1109#define AARCH64_ESR_EL1_COND_GET( _reg ) \
1110 ( ( ( _reg ) >> 20 ) & 0xfU )
1111
1112#define AARCH64_ESR_EL1_SSE 0x200000U
1113
1114#define AARCH64_ESR_EL1_SAS( _val ) ( ( _val ) << 22 )
1115#define AARCH64_ESR_EL1_SAS_SHIFT 22
1116#define AARCH64_ESR_EL1_SAS_MASK 0xc00000U
1117#define AARCH64_ESR_EL1_SAS_GET( _reg ) \
1118 ( ( ( _reg ) >> 22 ) & 0x3U )
1119
1120#define AARCH64_ESR_EL1_TFV 0x800000U
1121
1122#define AARCH64_ESR_EL1_CV 0x1000000U
1123
1124#define AARCH64_ESR_EL1_IDS 0x1000000U
1125
1126#define AARCH64_ESR_EL1_ISV 0x1000000U
1127
1128#define AARCH64_ESR_EL1_IL 0x2000000U
1129
1130#define AARCH64_ESR_EL1_EC( _val ) ( ( _val ) << 26 )
1131#define AARCH64_ESR_EL1_EC_SHIFT 26
1132#define AARCH64_ESR_EL1_EC_MASK 0xfc000000U
1133#define AARCH64_ESR_EL1_EC_GET( _reg ) \
1134 ( ( ( _reg ) >> 26 ) & 0x3fU )
1135
1136static inline uint64_t _AArch64_Read_esr_el1( void )
1137{
1138 uint64_t value;
1139
1140 __asm__ volatile (
1141 "mrs %0, ESR_EL1" : "=&r" ( value ) : : "memory"
1142 );
1143
1144 return value;
1145}
1146
1147static inline void _AArch64_Write_esr_el1( uint64_t value )
1148{
1149 __asm__ volatile (
1150 "msr ESR_EL1, %0" : : "r" ( value ) : "memory"
1151 );
1152}
1153
1154/* ESR_EL2, Exception Syndrome Register (EL2) */
1155
1156#define AARCH64_ESR_EL2_DIRECTION 0x1U
1157
1158#define AARCH64_ESR_EL2_ERETA 0x1U
1159
1160#define AARCH64_ESR_EL2_IOF 0x1U
1161
1162#define AARCH64_ESR_EL2_TI 0x1U
1163
1164#define AARCH64_ESR_EL2_BTYPE( _val ) ( ( _val ) << 0 )
1165#define AARCH64_ESR_EL2_BTYPE_SHIFT 0
1166#define AARCH64_ESR_EL2_BTYPE_MASK 0x3U
1167#define AARCH64_ESR_EL2_BTYPE_GET( _reg ) \
1168 ( ( ( _reg ) >> 0 ) & 0x3U )
1169
1170#define AARCH64_ESR_EL2_DFSC( _val ) ( ( _val ) << 0 )
1171#define AARCH64_ESR_EL2_DFSC_SHIFT 0
1172#define AARCH64_ESR_EL2_DFSC_MASK 0x3fU
1173#define AARCH64_ESR_EL2_DFSC_GET( _reg ) \
1174 ( ( ( _reg ) >> 0 ) & 0x3fU )
1175
1176#define AARCH64_ESR_EL2_IFSC( _val ) ( ( _val ) << 0 )
1177#define AARCH64_ESR_EL2_IFSC_SHIFT 0
1178#define AARCH64_ESR_EL2_IFSC_MASK 0x3fU
1179#define AARCH64_ESR_EL2_IFSC_GET( _reg ) \
1180 ( ( ( _reg ) >> 0 ) & 0x3fU )
1181
1182#define AARCH64_ESR_EL2_COMMENT( _val ) ( ( _val ) << 0 )
1183#define AARCH64_ESR_EL2_COMMENT_SHIFT 0
1184#define AARCH64_ESR_EL2_COMMENT_MASK 0xffffU
1185#define AARCH64_ESR_EL2_COMMENT_GET( _reg ) \
1186 ( ( ( _reg ) >> 0 ) & 0xffffU )
1187
1188#define AARCH64_ESR_EL2_IMM16( _val ) ( ( _val ) << 0 )
1189#define AARCH64_ESR_EL2_IMM16_SHIFT 0
1190#define AARCH64_ESR_EL2_IMM16_MASK 0xffffU
1191#define AARCH64_ESR_EL2_IMM16_GET( _reg ) \
1192 ( ( ( _reg ) >> 0 ) & 0xffffU )
1193
1194#define AARCH64_ESR_EL2_ISS( _val ) ( ( _val ) << 0 )
1195#define AARCH64_ESR_EL2_ISS_SHIFT 0
1196#define AARCH64_ESR_EL2_ISS_MASK 0x1ffffffU
1197#define AARCH64_ESR_EL2_ISS_GET( _reg ) \
1198 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
1199
1200#define AARCH64_ESR_EL2_DZF 0x2U
1201
1202#define AARCH64_ESR_EL2_ERET 0x2U
1203
1204#define AARCH64_ESR_EL2_AM( _val ) ( ( _val ) << 1 )
1205#define AARCH64_ESR_EL2_AM_SHIFT 1
1206#define AARCH64_ESR_EL2_AM_MASK 0xeU
1207#define AARCH64_ESR_EL2_AM_GET( _reg ) \
1208 ( ( ( _reg ) >> 1 ) & 0x7U )
1209
1210#define AARCH64_ESR_EL2_CRM( _val ) ( ( _val ) << 1 )
1211#define AARCH64_ESR_EL2_CRM_SHIFT 1
1212#define AARCH64_ESR_EL2_CRM_MASK 0x1eU
1213#define AARCH64_ESR_EL2_CRM_GET( _reg ) \
1214 ( ( ( _reg ) >> 1 ) & 0xfU )
1215
1216#define AARCH64_ESR_EL2_OFF 0x4U
1217
1218#define AARCH64_ESR_EL2_UFF 0x8U
1219
1220#define AARCH64_ESR_EL2_IXF 0x10U
1221
1222#define AARCH64_ESR_EL2_OFFSET 0x10U
1223
1224#define AARCH64_ESR_EL2_RN( _val ) ( ( _val ) << 5 )
1225#define AARCH64_ESR_EL2_RN_SHIFT 5
1226#define AARCH64_ESR_EL2_RN_MASK 0x3e0U
1227#define AARCH64_ESR_EL2_RN_GET( _reg ) \
1228 ( ( ( _reg ) >> 5 ) & 0x1fU )
1229
1230#define AARCH64_ESR_EL2_RT( _val ) ( ( _val ) << 5 )
1231#define AARCH64_ESR_EL2_RT_SHIFT 5
1232#define AARCH64_ESR_EL2_RT_MASK 0x3e0U
1233#define AARCH64_ESR_EL2_RT_GET( _reg ) \
1234 ( ( ( _reg ) >> 5 ) & 0x1fU )
1235
1236#define AARCH64_ESR_EL2_EX 0x40U
1237
1238#define AARCH64_ESR_EL2_WNR 0x40U
1239
1240#define AARCH64_ESR_EL2_IDF 0x80U
1241
1242#define AARCH64_ESR_EL2_S1PTW 0x80U
1243
1244#define AARCH64_ESR_EL2_CM 0x100U
1245
1246#define AARCH64_ESR_EL2_VECITR( _val ) ( ( _val ) << 8 )
1247#define AARCH64_ESR_EL2_VECITR_SHIFT 8
1248#define AARCH64_ESR_EL2_VECITR_MASK 0x700U
1249#define AARCH64_ESR_EL2_VECITR_GET( _reg ) \
1250 ( ( ( _reg ) >> 8 ) & 0x7U )
1251
1252#define AARCH64_ESR_EL2_EA 0x200U
1253
1254#define AARCH64_ESR_EL2_FNV 0x400U
1255
1256#define AARCH64_ESR_EL2_AET( _val ) ( ( _val ) << 10 )
1257#define AARCH64_ESR_EL2_AET_SHIFT 10
1258#define AARCH64_ESR_EL2_AET_MASK 0x1c00U
1259#define AARCH64_ESR_EL2_AET_GET( _reg ) \
1260 ( ( ( _reg ) >> 10 ) & 0x7U )
1261
1262#define AARCH64_ESR_EL2_CRN( _val ) ( ( _val ) << 10 )
1263#define AARCH64_ESR_EL2_CRN_SHIFT 10
1264#define AARCH64_ESR_EL2_CRN_MASK 0x3c00U
1265#define AARCH64_ESR_EL2_CRN_GET( _reg ) \
1266 ( ( ( _reg ) >> 10 ) & 0xfU )
1267
1268#define AARCH64_ESR_EL2_RT2( _val ) ( ( _val ) << 10 )
1269#define AARCH64_ESR_EL2_RT2_SHIFT 10
1270#define AARCH64_ESR_EL2_RT2_MASK 0x7c00U
1271#define AARCH64_ESR_EL2_RT2_GET( _reg ) \
1272 ( ( ( _reg ) >> 10 ) & 0x1fU )
1273
1274#define AARCH64_ESR_EL2_SET( _val ) ( ( _val ) << 11 )
1275#define AARCH64_ESR_EL2_SET_SHIFT 11
1276#define AARCH64_ESR_EL2_SET_MASK 0x1800U
1277#define AARCH64_ESR_EL2_SET_GET( _reg ) \
1278 ( ( ( _reg ) >> 11 ) & 0x3U )
1279
1280#define AARCH64_ESR_EL2_IMM8( _val ) ( ( _val ) << 12 )
1281#define AARCH64_ESR_EL2_IMM8_SHIFT 12
1282#define AARCH64_ESR_EL2_IMM8_MASK 0xff000U
1283#define AARCH64_ESR_EL2_IMM8_GET( _reg ) \
1284 ( ( ( _reg ) >> 12 ) & 0xffU )
1285
1286#define AARCH64_ESR_EL2_IESB 0x2000U
1287
1288#define AARCH64_ESR_EL2_VNCR 0x2000U
1289
1290#define AARCH64_ESR_EL2_AR 0x4000U
1291
1292#define AARCH64_ESR_EL2_OP1( _val ) ( ( _val ) << 14 )
1293#define AARCH64_ESR_EL2_OP1_SHIFT 14
1294#define AARCH64_ESR_EL2_OP1_MASK 0x1c000U
1295#define AARCH64_ESR_EL2_OP1_GET( _reg ) \
1296 ( ( ( _reg ) >> 14 ) & 0x7U )
1297
1298#define AARCH64_ESR_EL2_OPC1_0( _val ) ( ( _val ) << 14 )
1299#define AARCH64_ESR_EL2_OPC1_SHIFT_0 14
1300#define AARCH64_ESR_EL2_OPC1_MASK_0 0x1c000U
1301#define AARCH64_ESR_EL2_OPC1_GET_0( _reg ) \
1302 ( ( ( _reg ) >> 14 ) & 0x7U )
1303
1304#define AARCH64_ESR_EL2_SF 0x8000U
1305
1306#define AARCH64_ESR_EL2_OPC1_1( _val ) ( ( _val ) << 16 )
1307#define AARCH64_ESR_EL2_OPC1_SHIFT_1 16
1308#define AARCH64_ESR_EL2_OPC1_MASK_1 0xf0000U
1309#define AARCH64_ESR_EL2_OPC1_GET_1( _reg ) \
1310 ( ( ( _reg ) >> 16 ) & 0xfU )
1311
1312#define AARCH64_ESR_EL2_SRT( _val ) ( ( _val ) << 16 )
1313#define AARCH64_ESR_EL2_SRT_SHIFT 16
1314#define AARCH64_ESR_EL2_SRT_MASK 0x1f0000U
1315#define AARCH64_ESR_EL2_SRT_GET( _reg ) \
1316 ( ( ( _reg ) >> 16 ) & 0x1fU )
1317
1318#define AARCH64_ESR_EL2_OP2( _val ) ( ( _val ) << 17 )
1319#define AARCH64_ESR_EL2_OP2_SHIFT 17
1320#define AARCH64_ESR_EL2_OP2_MASK 0xe0000U
1321#define AARCH64_ESR_EL2_OP2_GET( _reg ) \
1322 ( ( ( _reg ) >> 17 ) & 0x7U )
1323
1324#define AARCH64_ESR_EL2_OPC2( _val ) ( ( _val ) << 17 )
1325#define AARCH64_ESR_EL2_OPC2_SHIFT 17
1326#define AARCH64_ESR_EL2_OPC2_MASK 0xe0000U
1327#define AARCH64_ESR_EL2_OPC2_GET( _reg ) \
1328 ( ( ( _reg ) >> 17 ) & 0x7U )
1329
1330#define AARCH64_ESR_EL2_CCKNOWNPASS 0x80000U
1331
1332#define AARCH64_ESR_EL2_OP0( _val ) ( ( _val ) << 20 )
1333#define AARCH64_ESR_EL2_OP0_SHIFT 20
1334#define AARCH64_ESR_EL2_OP0_MASK 0x300000U
1335#define AARCH64_ESR_EL2_OP0_GET( _reg ) \
1336 ( ( ( _reg ) >> 20 ) & 0x3U )
1337
1338#define AARCH64_ESR_EL2_COND( _val ) ( ( _val ) << 20 )
1339#define AARCH64_ESR_EL2_COND_SHIFT 20
1340#define AARCH64_ESR_EL2_COND_MASK 0xf00000U
1341#define AARCH64_ESR_EL2_COND_GET( _reg ) \
1342 ( ( ( _reg ) >> 20 ) & 0xfU )
1343
1344#define AARCH64_ESR_EL2_SSE 0x200000U
1345
1346#define AARCH64_ESR_EL2_SAS( _val ) ( ( _val ) << 22 )
1347#define AARCH64_ESR_EL2_SAS_SHIFT 22
1348#define AARCH64_ESR_EL2_SAS_MASK 0xc00000U
1349#define AARCH64_ESR_EL2_SAS_GET( _reg ) \
1350 ( ( ( _reg ) >> 22 ) & 0x3U )
1351
1352#define AARCH64_ESR_EL2_TFV 0x800000U
1353
1354#define AARCH64_ESR_EL2_CV 0x1000000U
1355
1356#define AARCH64_ESR_EL2_IDS 0x1000000U
1357
1358#define AARCH64_ESR_EL2_ISV 0x1000000U
1359
1360#define AARCH64_ESR_EL2_IL 0x2000000U
1361
1362#define AARCH64_ESR_EL2_EC( _val ) ( ( _val ) << 26 )
1363#define AARCH64_ESR_EL2_EC_SHIFT 26
1364#define AARCH64_ESR_EL2_EC_MASK 0xfc000000U
1365#define AARCH64_ESR_EL2_EC_GET( _reg ) \
1366 ( ( ( _reg ) >> 26 ) & 0x3fU )
1367
1368static inline uint64_t _AArch64_Read_esr_el2( void )
1369{
1370 uint64_t value;
1371
1372 __asm__ volatile (
1373 "mrs %0, ESR_EL2" : "=&r" ( value ) : : "memory"
1374 );
1375
1376 return value;
1377}
1378
1379static inline void _AArch64_Write_esr_el2( uint64_t value )
1380{
1381 __asm__ volatile (
1382 "msr ESR_EL2, %0" : : "r" ( value ) : "memory"
1383 );
1384}
1385
1386/* ESR_EL3, Exception Syndrome Register (EL3) */
1387
1388#define AARCH64_ESR_EL3_DIRECTION 0x1U
1389
1390#define AARCH64_ESR_EL3_ERETA 0x1U
1391
1392#define AARCH64_ESR_EL3_IOF 0x1U
1393
1394#define AARCH64_ESR_EL3_TI 0x1U
1395
1396#define AARCH64_ESR_EL3_BTYPE( _val ) ( ( _val ) << 0 )
1397#define AARCH64_ESR_EL3_BTYPE_SHIFT 0
1398#define AARCH64_ESR_EL3_BTYPE_MASK 0x3U
1399#define AARCH64_ESR_EL3_BTYPE_GET( _reg ) \
1400 ( ( ( _reg ) >> 0 ) & 0x3U )
1401
1402#define AARCH64_ESR_EL3_DFSC( _val ) ( ( _val ) << 0 )
1403#define AARCH64_ESR_EL3_DFSC_SHIFT 0
1404#define AARCH64_ESR_EL3_DFSC_MASK 0x3fU
1405#define AARCH64_ESR_EL3_DFSC_GET( _reg ) \
1406 ( ( ( _reg ) >> 0 ) & 0x3fU )
1407
1408#define AARCH64_ESR_EL3_IFSC( _val ) ( ( _val ) << 0 )
1409#define AARCH64_ESR_EL3_IFSC_SHIFT 0
1410#define AARCH64_ESR_EL3_IFSC_MASK 0x3fU
1411#define AARCH64_ESR_EL3_IFSC_GET( _reg ) \
1412 ( ( ( _reg ) >> 0 ) & 0x3fU )
1413
1414#define AARCH64_ESR_EL3_COMMENT( _val ) ( ( _val ) << 0 )
1415#define AARCH64_ESR_EL3_COMMENT_SHIFT 0
1416#define AARCH64_ESR_EL3_COMMENT_MASK 0xffffU
1417#define AARCH64_ESR_EL3_COMMENT_GET( _reg ) \
1418 ( ( ( _reg ) >> 0 ) & 0xffffU )
1419
1420#define AARCH64_ESR_EL3_IMM16( _val ) ( ( _val ) << 0 )
1421#define AARCH64_ESR_EL3_IMM16_SHIFT 0
1422#define AARCH64_ESR_EL3_IMM16_MASK 0xffffU
1423#define AARCH64_ESR_EL3_IMM16_GET( _reg ) \
1424 ( ( ( _reg ) >> 0 ) & 0xffffU )
1425
1426#define AARCH64_ESR_EL3_ISS( _val ) ( ( _val ) << 0 )
1427#define AARCH64_ESR_EL3_ISS_SHIFT 0
1428#define AARCH64_ESR_EL3_ISS_MASK 0x1ffffffU
1429#define AARCH64_ESR_EL3_ISS_GET( _reg ) \
1430 ( ( ( _reg ) >> 0 ) & 0x1ffffffU )
1431
1432#define AARCH64_ESR_EL3_DZF 0x2U
1433
1434#define AARCH64_ESR_EL3_ERET 0x2U
1435
1436#define AARCH64_ESR_EL3_AM( _val ) ( ( _val ) << 1 )
1437#define AARCH64_ESR_EL3_AM_SHIFT 1
1438#define AARCH64_ESR_EL3_AM_MASK 0xeU
1439#define AARCH64_ESR_EL3_AM_GET( _reg ) \
1440 ( ( ( _reg ) >> 1 ) & 0x7U )
1441
1442#define AARCH64_ESR_EL3_CRM( _val ) ( ( _val ) << 1 )
1443#define AARCH64_ESR_EL3_CRM_SHIFT 1
1444#define AARCH64_ESR_EL3_CRM_MASK 0x1eU
1445#define AARCH64_ESR_EL3_CRM_GET( _reg ) \
1446 ( ( ( _reg ) >> 1 ) & 0xfU )
1447
1448#define AARCH64_ESR_EL3_OFF 0x4U
1449
1450#define AARCH64_ESR_EL3_UFF 0x8U
1451
1452#define AARCH64_ESR_EL3_IXF 0x10U
1453
1454#define AARCH64_ESR_EL3_OFFSET 0x10U
1455
1456#define AARCH64_ESR_EL3_RN( _val ) ( ( _val ) << 5 )
1457#define AARCH64_ESR_EL3_RN_SHIFT 5
1458#define AARCH64_ESR_EL3_RN_MASK 0x3e0U
1459#define AARCH64_ESR_EL3_RN_GET( _reg ) \
1460 ( ( ( _reg ) >> 5 ) & 0x1fU )
1461
1462#define AARCH64_ESR_EL3_RT( _val ) ( ( _val ) << 5 )
1463#define AARCH64_ESR_EL3_RT_SHIFT 5
1464#define AARCH64_ESR_EL3_RT_MASK 0x3e0U
1465#define AARCH64_ESR_EL3_RT_GET( _reg ) \
1466 ( ( ( _reg ) >> 5 ) & 0x1fU )
1467
1468#define AARCH64_ESR_EL3_EX 0x40U
1469
1470#define AARCH64_ESR_EL3_WNR 0x40U
1471
1472#define AARCH64_ESR_EL3_IDF 0x80U
1473
1474#define AARCH64_ESR_EL3_S1PTW 0x80U
1475
1476#define AARCH64_ESR_EL3_CM 0x100U
1477
1478#define AARCH64_ESR_EL3_VECITR( _val ) ( ( _val ) << 8 )
1479#define AARCH64_ESR_EL3_VECITR_SHIFT 8
1480#define AARCH64_ESR_EL3_VECITR_MASK 0x700U
1481#define AARCH64_ESR_EL3_VECITR_GET( _reg ) \
1482 ( ( ( _reg ) >> 8 ) & 0x7U )
1483
1484#define AARCH64_ESR_EL3_EA 0x200U
1485
1486#define AARCH64_ESR_EL3_FNV 0x400U
1487
1488#define AARCH64_ESR_EL3_AET( _val ) ( ( _val ) << 10 )
1489#define AARCH64_ESR_EL3_AET_SHIFT 10
1490#define AARCH64_ESR_EL3_AET_MASK 0x1c00U
1491#define AARCH64_ESR_EL3_AET_GET( _reg ) \
1492 ( ( ( _reg ) >> 10 ) & 0x7U )
1493
1494#define AARCH64_ESR_EL3_CRN( _val ) ( ( _val ) << 10 )
1495#define AARCH64_ESR_EL3_CRN_SHIFT 10
1496#define AARCH64_ESR_EL3_CRN_MASK 0x3c00U
1497#define AARCH64_ESR_EL3_CRN_GET( _reg ) \
1498 ( ( ( _reg ) >> 10 ) & 0xfU )
1499
1500#define AARCH64_ESR_EL3_RT2( _val ) ( ( _val ) << 10 )
1501#define AARCH64_ESR_EL3_RT2_SHIFT 10
1502#define AARCH64_ESR_EL3_RT2_MASK 0x7c00U
1503#define AARCH64_ESR_EL3_RT2_GET( _reg ) \
1504 ( ( ( _reg ) >> 10 ) & 0x1fU )
1505
1506#define AARCH64_ESR_EL3_SET( _val ) ( ( _val ) << 11 )
1507#define AARCH64_ESR_EL3_SET_SHIFT 11
1508#define AARCH64_ESR_EL3_SET_MASK 0x1800U
1509#define AARCH64_ESR_EL3_SET_GET( _reg ) \
1510 ( ( ( _reg ) >> 11 ) & 0x3U )
1511
1512#define AARCH64_ESR_EL3_IMM8( _val ) ( ( _val ) << 12 )
1513#define AARCH64_ESR_EL3_IMM8_SHIFT 12
1514#define AARCH64_ESR_EL3_IMM8_MASK 0xff000U
1515#define AARCH64_ESR_EL3_IMM8_GET( _reg ) \
1516 ( ( ( _reg ) >> 12 ) & 0xffU )
1517
1518#define AARCH64_ESR_EL3_IESB 0x2000U
1519
1520#define AARCH64_ESR_EL3_VNCR 0x2000U
1521
1522#define AARCH64_ESR_EL3_AR 0x4000U
1523
1524#define AARCH64_ESR_EL3_OP1( _val ) ( ( _val ) << 14 )
1525#define AARCH64_ESR_EL3_OP1_SHIFT 14
1526#define AARCH64_ESR_EL3_OP1_MASK 0x1c000U
1527#define AARCH64_ESR_EL3_OP1_GET( _reg ) \
1528 ( ( ( _reg ) >> 14 ) & 0x7U )
1529
1530#define AARCH64_ESR_EL3_OPC1_0( _val ) ( ( _val ) << 14 )
1531#define AARCH64_ESR_EL3_OPC1_SHIFT_0 14
1532#define AARCH64_ESR_EL3_OPC1_MASK_0 0x1c000U
1533#define AARCH64_ESR_EL3_OPC1_GET_0( _reg ) \
1534 ( ( ( _reg ) >> 14 ) & 0x7U )
1535
1536#define AARCH64_ESR_EL3_SF 0x8000U
1537
1538#define AARCH64_ESR_EL3_OPC1_1( _val ) ( ( _val ) << 16 )
1539#define AARCH64_ESR_EL3_OPC1_SHIFT_1 16
1540#define AARCH64_ESR_EL3_OPC1_MASK_1 0xf0000U
1541#define AARCH64_ESR_EL3_OPC1_GET_1( _reg ) \
1542 ( ( ( _reg ) >> 16 ) & 0xfU )
1543
1544#define AARCH64_ESR_EL3_SRT( _val ) ( ( _val ) << 16 )
1545#define AARCH64_ESR_EL3_SRT_SHIFT 16
1546#define AARCH64_ESR_EL3_SRT_MASK 0x1f0000U
1547#define AARCH64_ESR_EL3_SRT_GET( _reg ) \
1548 ( ( ( _reg ) >> 16 ) & 0x1fU )
1549
1550#define AARCH64_ESR_EL3_OP2( _val ) ( ( _val ) << 17 )
1551#define AARCH64_ESR_EL3_OP2_SHIFT 17
1552#define AARCH64_ESR_EL3_OP2_MASK 0xe0000U
1553#define AARCH64_ESR_EL3_OP2_GET( _reg ) \
1554 ( ( ( _reg ) >> 17 ) & 0x7U )
1555
1556#define AARCH64_ESR_EL3_OPC2( _val ) ( ( _val ) << 17 )
1557#define AARCH64_ESR_EL3_OPC2_SHIFT 17
1558#define AARCH64_ESR_EL3_OPC2_MASK 0xe0000U
1559#define AARCH64_ESR_EL3_OPC2_GET( _reg ) \
1560 ( ( ( _reg ) >> 17 ) & 0x7U )
1561
1562#define AARCH64_ESR_EL3_CCKNOWNPASS 0x80000U
1563
1564#define AARCH64_ESR_EL3_OP0( _val ) ( ( _val ) << 20 )
1565#define AARCH64_ESR_EL3_OP0_SHIFT 20
1566#define AARCH64_ESR_EL3_OP0_MASK 0x300000U
1567#define AARCH64_ESR_EL3_OP0_GET( _reg ) \
1568 ( ( ( _reg ) >> 20 ) & 0x3U )
1569
1570#define AARCH64_ESR_EL3_COND( _val ) ( ( _val ) << 20 )
1571#define AARCH64_ESR_EL3_COND_SHIFT 20
1572#define AARCH64_ESR_EL3_COND_MASK 0xf00000U
1573#define AARCH64_ESR_EL3_COND_GET( _reg ) \
1574 ( ( ( _reg ) >> 20 ) & 0xfU )
1575
1576#define AARCH64_ESR_EL3_SSE 0x200000U
1577
1578#define AARCH64_ESR_EL3_SAS( _val ) ( ( _val ) << 22 )
1579#define AARCH64_ESR_EL3_SAS_SHIFT 22
1580#define AARCH64_ESR_EL3_SAS_MASK 0xc00000U
1581#define AARCH64_ESR_EL3_SAS_GET( _reg ) \
1582 ( ( ( _reg ) >> 22 ) & 0x3U )
1583
1584#define AARCH64_ESR_EL3_TFV 0x800000U
1585
1586#define AARCH64_ESR_EL3_CV 0x1000000U
1587
1588#define AARCH64_ESR_EL3_IDS 0x1000000U
1589
1590#define AARCH64_ESR_EL3_ISV 0x1000000U
1591
1592#define AARCH64_ESR_EL3_IL 0x2000000U
1593
1594#define AARCH64_ESR_EL3_EC( _val ) ( ( _val ) << 26 )
1595#define AARCH64_ESR_EL3_EC_SHIFT 26
1596#define AARCH64_ESR_EL3_EC_MASK 0xfc000000U
1597#define AARCH64_ESR_EL3_EC_GET( _reg ) \
1598 ( ( ( _reg ) >> 26 ) & 0x3fU )
1599
1600static inline uint64_t _AArch64_Read_esr_el3( void )
1601{
1602 uint64_t value;
1603
1604 __asm__ volatile (
1605 "mrs %0, ESR_EL3" : "=&r" ( value ) : : "memory"
1606 );
1607
1608 return value;
1609}
1610
1611static inline void _AArch64_Write_esr_el3( uint64_t value )
1612{
1613 __asm__ volatile (
1614 "msr ESR_EL3, %0" : : "r" ( value ) : "memory"
1615 );
1616}
1617
1618/* FAR_EL1, Fault Address Register (EL1) */
1619
1620static inline uint64_t _AArch64_Read_far_el1( void )
1621{
1622 uint64_t value;
1623
1624 __asm__ volatile (
1625 "mrs %0, FAR_EL1" : "=&r" ( value ) : : "memory"
1626 );
1627
1628 return value;
1629}
1630
1631static inline void _AArch64_Write_far_el1( uint64_t value )
1632{
1633 __asm__ volatile (
1634 "msr FAR_EL1, %0" : : "r" ( value ) : "memory"
1635 );
1636}
1637
1638/* FAR_EL2, Fault Address Register (EL2) */
1639
1640static inline uint64_t _AArch64_Read_far_el2( void )
1641{
1642 uint64_t value;
1643
1644 __asm__ volatile (
1645 "mrs %0, FAR_EL2" : "=&r" ( value ) : : "memory"
1646 );
1647
1648 return value;
1649}
1650
1651static inline void _AArch64_Write_far_el2( uint64_t value )
1652{
1653 __asm__ volatile (
1654 "msr FAR_EL2, %0" : : "r" ( value ) : "memory"
1655 );
1656}
1657
1658/* FAR_EL3, Fault Address Register (EL3) */
1659
1660static inline uint64_t _AArch64_Read_far_el3( void )
1661{
1662 uint64_t value;
1663
1664 __asm__ volatile (
1665 "mrs %0, FAR_EL3" : "=&r" ( value ) : : "memory"
1666 );
1667
1668 return value;
1669}
1670
1671static inline void _AArch64_Write_far_el3( uint64_t value )
1672{
1673 __asm__ volatile (
1674 "msr FAR_EL3, %0" : : "r" ( value ) : "memory"
1675 );
1676}
1677
1678/* FPEXC32_EL2, Floating-Point Exception Control Register */
1679
1680#define AARCH64_FPEXC32_EL2_IOF 0x1U
1681
1682#define AARCH64_FPEXC32_EL2_DZF 0x2U
1683
1684#define AARCH64_FPEXC32_EL2_OFF 0x4U
1685
1686#define AARCH64_FPEXC32_EL2_UFF 0x8U
1687
1688#define AARCH64_FPEXC32_EL2_IXF 0x10U
1689
1690#define AARCH64_FPEXC32_EL2_IDF 0x80U
1691
1692#define AARCH64_FPEXC32_EL2_VECITR( _val ) ( ( _val ) << 8 )
1693#define AARCH64_FPEXC32_EL2_VECITR_SHIFT 8
1694#define AARCH64_FPEXC32_EL2_VECITR_MASK 0x700U
1695#define AARCH64_FPEXC32_EL2_VECITR_GET( _reg ) \
1696 ( ( ( _reg ) >> 8 ) & 0x7U )
1697
1698#define AARCH64_FPEXC32_EL2_TFV 0x4000000U
1699
1700#define AARCH64_FPEXC32_EL2_VV 0x8000000U
1701
1702#define AARCH64_FPEXC32_EL2_FP2V 0x10000000U
1703
1704#define AARCH64_FPEXC32_EL2_DEX 0x20000000U
1705
1706#define AARCH64_FPEXC32_EL2_EN 0x40000000U
1707
1708#define AARCH64_FPEXC32_EL2_EX 0x80000000U
1709
1710static inline uint64_t _AArch64_Read_fpexc32_el2( void )
1711{
1712 uint64_t value;
1713
1714 __asm__ volatile (
1715 "mrs %0, FPEXC32_EL2" : "=&r" ( value ) : : "memory"
1716 );
1717
1718 return value;
1719}
1720
1721static inline void _AArch64_Write_fpexc32_el2( uint64_t value )
1722{
1723 __asm__ volatile (
1724 "msr FPEXC32_EL2, %0" : : "r" ( value ) : "memory"
1725 );
1726}
1727
1728/* GCR_EL1, Tag Control Register. */
1729
1730#define AARCH64_GCR_EL1_EXCLUDE( _val ) ( ( _val ) << 0 )
1731#define AARCH64_GCR_EL1_EXCLUDE_SHIFT 0
1732#define AARCH64_GCR_EL1_EXCLUDE_MASK 0xffffU
1733#define AARCH64_GCR_EL1_EXCLUDE_GET( _reg ) \
1734 ( ( ( _reg ) >> 0 ) & 0xffffU )
1735
1736#define AARCH64_GCR_EL1_RRND 0x10000U
1737
1738static inline uint64_t _AArch64_Read_gcr_el1( void )
1739{
1740 uint64_t value;
1741
1742 __asm__ volatile (
1743 "mrs %0, GCR_EL1" : "=&r" ( value ) : : "memory"
1744 );
1745
1746 return value;
1747}
1748
1749static inline void _AArch64_Write_gcr_el1( uint64_t value )
1750{
1751 __asm__ volatile (
1752 "msr GCR_EL1, %0" : : "r" ( value ) : "memory"
1753 );
1754}
1755
1756/* GMID_EL1, Multiple tag transfer ID Register */
1757
1758#define AARCH64_GMID_EL1_BS( _val ) ( ( _val ) << 0 )
1759#define AARCH64_GMID_EL1_BS_SHIFT 0
1760#define AARCH64_GMID_EL1_BS_MASK 0xfU
1761#define AARCH64_GMID_EL1_BS_GET( _reg ) \
1762 ( ( ( _reg ) >> 0 ) & 0xfU )
1763
1764static inline uint64_t _AArch64_Read_gmid_el1( void )
1765{
1766 uint64_t value;
1767
1768 __asm__ volatile (
1769 "mrs %0, GMID_EL1" : "=&r" ( value ) : : "memory"
1770 );
1771
1772 return value;
1773}
1774
1775/* HACR_EL2, Hypervisor Auxiliary Control Register */
1776
1777static inline uint64_t _AArch64_Read_hacr_el2( void )
1778{
1779 uint64_t value;
1780
1781 __asm__ volatile (
1782 "mrs %0, HACR_EL2" : "=&r" ( value ) : : "memory"
1783 );
1784
1785 return value;
1786}
1787
1788static inline void _AArch64_Write_hacr_el2( uint64_t value )
1789{
1790 __asm__ volatile (
1791 "msr HACR_EL2, %0" : : "r" ( value ) : "memory"
1792 );
1793}
1794
1795/* HAFGRTR_EL2, Hypervisor Activity Monitors Fine-Grained Read Trap Register */
1796
1797#define AARCH64_HAFGRTR_EL2_AMCNTEN0 0x1U
1798
1799#define AARCH64_HAFGRTR_EL2_AMCNTEN1 0x20000U
1800
1801#define AARCH64_HAFGRTR_EL2_AMEVCNTR10_EL0 0x40000U
1802
1803#define AARCH64_HAFGRTR_EL2_AMEVTYPER10_EL0 0x80000U
1804
1805#define AARCH64_HAFGRTR_EL2_AMEVCNTR11_EL0 0x100000U
1806
1807#define AARCH64_HAFGRTR_EL2_AMEVTYPER11_EL0 0x200000U
1808
1809#define AARCH64_HAFGRTR_EL2_AMEVCNTR12_EL0 0x400000U
1810
1811#define AARCH64_HAFGRTR_EL2_AMEVTYPER12_EL0 0x800000U
1812
1813#define AARCH64_HAFGRTR_EL2_AMEVCNTR13_EL0 0x1000000U
1814
1815#define AARCH64_HAFGRTR_EL2_AMEVTYPER13_EL0 0x2000000U
1816
1817#define AARCH64_HAFGRTR_EL2_AMEVCNTR14_EL0 0x4000000U
1818
1819#define AARCH64_HAFGRTR_EL2_AMEVTYPER14_EL0 0x8000000U
1820
1821#define AARCH64_HAFGRTR_EL2_AMEVCNTR15_EL0 0x10000000U
1822
1823#define AARCH64_HAFGRTR_EL2_AMEVTYPER15_EL0 0x20000000U
1824
1825#define AARCH64_HAFGRTR_EL2_AMEVCNTR16_EL0 0x40000000U
1826
1827#define AARCH64_HAFGRTR_EL2_AMEVTYPER16_EL0 0x80000000U
1828
1829#define AARCH64_HAFGRTR_EL2_AMEVCNTR17_EL0 0x100000000ULL
1830
1831#define AARCH64_HAFGRTR_EL2_AMEVTYPER17_EL0 0x200000000ULL
1832
1833#define AARCH64_HAFGRTR_EL2_AMEVCNTR18_EL0 0x400000000ULL
1834
1835#define AARCH64_HAFGRTR_EL2_AMEVTYPER18_EL0 0x800000000ULL
1836
1837#define AARCH64_HAFGRTR_EL2_AMEVCNTR19_EL0 0x1000000000ULL
1838
1839#define AARCH64_HAFGRTR_EL2_AMEVTYPER19_EL0 0x2000000000ULL
1840
1841#define AARCH64_HAFGRTR_EL2_AMEVCNTR110_EL0 0x4000000000ULL
1842
1843#define AARCH64_HAFGRTR_EL2_AMEVTYPER110_EL0 0x8000000000ULL
1844
1845#define AARCH64_HAFGRTR_EL2_AMEVCNTR111_EL0 0x10000000000ULL
1846
1847#define AARCH64_HAFGRTR_EL2_AMEVTYPER111_EL0 0x20000000000ULL
1848
1849#define AARCH64_HAFGRTR_EL2_AMEVCNTR112_EL0 0x40000000000ULL
1850
1851#define AARCH64_HAFGRTR_EL2_AMEVTYPER112_EL0 0x80000000000ULL
1852
1853#define AARCH64_HAFGRTR_EL2_AMEVCNTR113_EL0 0x100000000000ULL
1854
1855#define AARCH64_HAFGRTR_EL2_AMEVTYPER113_EL0 0x200000000000ULL
1856
1857#define AARCH64_HAFGRTR_EL2_AMEVCNTR114_EL0 0x400000000000ULL
1858
1859#define AARCH64_HAFGRTR_EL2_AMEVTYPER114_EL0 0x800000000000ULL
1860
1861#define AARCH64_HAFGRTR_EL2_AMEVCNTR115_EL0 0x1000000000000ULL
1862
1863#define AARCH64_HAFGRTR_EL2_AMEVTYPER115_EL0 0x2000000000000ULL
1864
1865static inline uint64_t _AArch64_Read_hafgrtr_el2( void )
1866{
1867 uint64_t value;
1868
1869 __asm__ volatile (
1870 "mrs %0, HAFGRTR_EL2" : "=&r" ( value ) : : "memory"
1871 );
1872
1873 return value;
1874}
1875
1876static inline void _AArch64_Write_hafgrtr_el2( uint64_t value )
1877{
1878 __asm__ volatile (
1879 "msr HAFGRTR_EL2, %0" : : "r" ( value ) : "memory"
1880 );
1881}
1882
1883/* HCR_EL2, Hypervisor Configuration Register */
1884
1885#define AARCH64_HCR_EL2_VM 0x1U
1886
1887#define AARCH64_HCR_EL2_SWIO 0x2U
1888
1889#define AARCH64_HCR_EL2_PTW 0x4U
1890
1891#define AARCH64_HCR_EL2_FMO 0x8U
1892
1893#define AARCH64_HCR_EL2_IMO 0x10U
1894
1895#define AARCH64_HCR_EL2_AMO 0x20U
1896
1897#define AARCH64_HCR_EL2_VF 0x40U
1898
1899#define AARCH64_HCR_EL2_VI 0x80U
1900
1901#define AARCH64_HCR_EL2_VSE 0x100U
1902
1903#define AARCH64_HCR_EL2_FB 0x200U
1904
1905#define AARCH64_HCR_EL2_BSU( _val ) ( ( _val ) << 10 )
1906#define AARCH64_HCR_EL2_BSU_SHIFT 10
1907#define AARCH64_HCR_EL2_BSU_MASK 0xc00U
1908#define AARCH64_HCR_EL2_BSU_GET( _reg ) \
1909 ( ( ( _reg ) >> 10 ) & 0x3U )
1910
1911#define AARCH64_HCR_EL2_DC 0x1000U
1912
1913#define AARCH64_HCR_EL2_TWI 0x2000U
1914
1915#define AARCH64_HCR_EL2_TWE 0x4000U
1916
1917#define AARCH64_HCR_EL2_TID0 0x8000U
1918
1919#define AARCH64_HCR_EL2_TID1 0x10000U
1920
1921#define AARCH64_HCR_EL2_TID2 0x20000U
1922
1923#define AARCH64_HCR_EL2_TID3 0x40000U
1924
1925#define AARCH64_HCR_EL2_TSC 0x80000U
1926
1927#define AARCH64_HCR_EL2_TIDCP 0x100000U
1928
1929#define AARCH64_HCR_EL2_TACR 0x200000U
1930
1931#define AARCH64_HCR_EL2_TSW 0x400000U
1932
1933#define AARCH64_HCR_EL2_TPCP 0x800000U
1934
1935#define AARCH64_HCR_EL2_TPU 0x1000000U
1936
1937#define AARCH64_HCR_EL2_TTLB 0x2000000U
1938
1939#define AARCH64_HCR_EL2_TVM 0x4000000U
1940
1941#define AARCH64_HCR_EL2_TGE 0x8000000U
1942
1943#define AARCH64_HCR_EL2_TDZ 0x10000000U
1944
1945#define AARCH64_HCR_EL2_HCD 0x20000000U
1946
1947#define AARCH64_HCR_EL2_TRVM 0x40000000U
1948
1949#define AARCH64_HCR_EL2_RW 0x80000000U
1950
1951#define AARCH64_HCR_EL2_CD 0x100000000ULL
1952
1953#define AARCH64_HCR_EL2_ID 0x200000000ULL
1954
1955#define AARCH64_HCR_EL2_E2H 0x400000000ULL
1956
1957#define AARCH64_HCR_EL2_TLOR 0x800000000ULL
1958
1959#define AARCH64_HCR_EL2_TERR 0x1000000000ULL
1960
1961#define AARCH64_HCR_EL2_TEA 0x2000000000ULL
1962
1963#define AARCH64_HCR_EL2_MIOCNCE 0x4000000000ULL
1964
1965#define AARCH64_HCR_EL2_APK 0x10000000000ULL
1966
1967#define AARCH64_HCR_EL2_API 0x20000000000ULL
1968
1969#define AARCH64_HCR_EL2_NV 0x40000000000ULL
1970
1971#define AARCH64_HCR_EL2_NV1 0x80000000000ULL
1972
1973#define AARCH64_HCR_EL2_AT 0x100000000000ULL
1974
1975#define AARCH64_HCR_EL2_NV2 0x200000000000ULL
1976
1977#define AARCH64_HCR_EL2_FWB 0x400000000000ULL
1978
1979#define AARCH64_HCR_EL2_FIEN 0x800000000000ULL
1980
1981#define AARCH64_HCR_EL2_TID4 0x2000000000000ULL
1982
1983#define AARCH64_HCR_EL2_TICAB 0x4000000000000ULL
1984
1985#define AARCH64_HCR_EL2_AMVOFFEN 0x8000000000000ULL
1986
1987#define AARCH64_HCR_EL2_TOCU 0x10000000000000ULL
1988
1989#define AARCH64_HCR_EL2_ENSCXT 0x20000000000000ULL
1990
1991#define AARCH64_HCR_EL2_TTLBIS 0x40000000000000ULL
1992
1993#define AARCH64_HCR_EL2_TTLBOS 0x80000000000000ULL
1994
1995#define AARCH64_HCR_EL2_ATA 0x100000000000000ULL
1996
1997#define AARCH64_HCR_EL2_DCT 0x200000000000000ULL
1998
1999#define AARCH64_HCR_EL2_TID5 0x400000000000000ULL
2000
2001#define AARCH64_HCR_EL2_TWEDEN 0x800000000000000ULL
2002
2003#define AARCH64_HCR_EL2_TWEDEL( _val ) ( ( _val ) << 60 )
2004#define AARCH64_HCR_EL2_TWEDEL_SHIFT 60
2005#define AARCH64_HCR_EL2_TWEDEL_MASK 0xf000000000000000ULL
2006#define AARCH64_HCR_EL2_TWEDEL_GET( _reg ) \
2007 ( ( ( _reg ) >> 60 ) & 0xfULL )
2008
2009static inline uint64_t _AArch64_Read_hcr_el2( void )
2010{
2011 uint64_t value;
2012
2013 __asm__ volatile (
2014 "mrs %0, HCR_EL2" : "=&r" ( value ) : : "memory"
2015 );
2016
2017 return value;
2018}
2019
2020static inline void _AArch64_Write_hcr_el2( uint64_t value )
2021{
2022 __asm__ volatile (
2023 "msr HCR_EL2, %0" : : "r" ( value ) : "memory"
2024 );
2025}
2026
2027/* HDFGRTR_EL2, Hypervisor Debug Fine-Grained Read Trap Register */
2028
2029#define AARCH64_HDFGRTR_EL2_DBGBCRN_EL1 0x1U
2030
2031#define AARCH64_HDFGRTR_EL2_DBGBVRN_EL1 0x2U
2032
2033#define AARCH64_HDFGRTR_EL2_DBGWCRN_EL1 0x4U
2034
2035#define AARCH64_HDFGRTR_EL2_DBGWVRN_EL1 0x8U
2036
2037#define AARCH64_HDFGRTR_EL2_MDSCR_EL1 0x10U
2038
2039#define AARCH64_HDFGRTR_EL2_DBGCLAIM 0x20U
2040
2041#define AARCH64_HDFGRTR_EL2_DBGAUTHSTATUS_EL1 0x40U
2042
2043#define AARCH64_HDFGRTR_EL2_DBGPRCR_EL1 0x80U
2044
2045#define AARCH64_HDFGRTR_EL2_OSLSR_EL1 0x200U
2046
2047#define AARCH64_HDFGRTR_EL2_OSECCR_EL1 0x400U
2048
2049#define AARCH64_HDFGRTR_EL2_OSDLR_EL1 0x800U
2050
2051#define AARCH64_HDFGRTR_EL2_PMEVCNTRN_EL0 0x1000U
2052
2053#define AARCH64_HDFGRTR_EL2_PMEVTYPERN_EL0 0x2000U
2054
2055#define AARCH64_HDFGRTR_EL2_PMCCFILTR_EL0 0x4000U
2056
2057#define AARCH64_HDFGRTR_EL2_PMCCNTR_EL0 0x8000U
2058
2059#define AARCH64_HDFGRTR_EL2_PMCNTEN 0x10000U
2060
2061#define AARCH64_HDFGRTR_EL2_PMINTEN 0x20000U
2062
2063#define AARCH64_HDFGRTR_EL2_PMOVS 0x40000U
2064
2065#define AARCH64_HDFGRTR_EL2_PMSELR_EL0 0x80000U
2066
2067#define AARCH64_HDFGRTR_EL2_PMMIR_EL1 0x400000U
2068
2069#define AARCH64_HDFGRTR_EL2_PMBLIMITR_EL1 0x800000U
2070
2071#define AARCH64_HDFGRTR_EL2_PMBPTR_EL1 0x1000000U
2072
2073#define AARCH64_HDFGRTR_EL2_PMBSR_EL1 0x2000000U
2074
2075#define AARCH64_HDFGRTR_EL2_PMSCR_EL1 0x4000000U
2076
2077#define AARCH64_HDFGRTR_EL2_PMSEVFR_EL1 0x8000000U
2078
2079#define AARCH64_HDFGRTR_EL2_PMSFCR_EL1 0x10000000U
2080
2081#define AARCH64_HDFGRTR_EL2_PMSICR_EL1 0x20000000U
2082
2083#define AARCH64_HDFGRTR_EL2_PMSIDR_EL1 0x40000000U
2084
2085#define AARCH64_HDFGRTR_EL2_PMSIRR_EL1 0x80000000U
2086
2087#define AARCH64_HDFGRTR_EL2_PMSLATFR_EL1 0x100000000ULL
2088
2089#define AARCH64_HDFGRTR_EL2_TRC 0x200000000ULL
2090
2091#define AARCH64_HDFGRTR_EL2_TRCAUTHSTATUS 0x400000000ULL
2092
2093#define AARCH64_HDFGRTR_EL2_TRCAUXCTLR 0x800000000ULL
2094
2095#define AARCH64_HDFGRTR_EL2_TRCCLAIM 0x1000000000ULL
2096
2097#define AARCH64_HDFGRTR_EL2_TRCCNTVRN 0x2000000000ULL
2098
2099#define AARCH64_HDFGRTR_EL2_TRCID 0x10000000000ULL
2100
2101#define AARCH64_HDFGRTR_EL2_TRCIMSPECN 0x20000000000ULL
2102
2103#define AARCH64_HDFGRTR_EL2_TRCOSLSR 0x80000000000ULL
2104
2105#define AARCH64_HDFGRTR_EL2_TRCPRGCTLR 0x100000000000ULL
2106
2107#define AARCH64_HDFGRTR_EL2_TRCSEQSTR 0x200000000000ULL
2108
2109#define AARCH64_HDFGRTR_EL2_TRCSSCSRN 0x400000000000ULL
2110
2111#define AARCH64_HDFGRTR_EL2_TRCSTATR 0x800000000000ULL
2112
2113#define AARCH64_HDFGRTR_EL2_TRCVICTLR 0x1000000000000ULL
2114
2115#define AARCH64_HDFGRTR_EL2_PMUSERENR_EL0 0x200000000000000ULL
2116
2117#define AARCH64_HDFGRTR_EL2_PMCEIDN_EL0 0x400000000000000ULL
2118
2119static inline uint64_t _AArch64_Read_hdfgrtr_el2( void )
2120{
2121 uint64_t value;
2122
2123 __asm__ volatile (
2124 "mrs %0, HDFGRTR_EL2" : "=&r" ( value ) : : "memory"
2125 );
2126
2127 return value;
2128}
2129
2130static inline void _AArch64_Write_hdfgrtr_el2( uint64_t value )
2131{
2132 __asm__ volatile (
2133 "msr HDFGRTR_EL2, %0" : : "r" ( value ) : "memory"
2134 );
2135}
2136
2137/* HDFGWTR_EL2, Hypervisor Debug Fine-Grained Write Trap Register */
2138
2139#define AARCH64_HDFGWTR_EL2_DBGBCRN_EL1 0x1U
2140
2141#define AARCH64_HDFGWTR_EL2_DBGBVRN_EL1 0x2U
2142
2143#define AARCH64_HDFGWTR_EL2_DBGWCRN_EL1 0x4U
2144
2145#define AARCH64_HDFGWTR_EL2_DBGWVRN_EL1 0x8U
2146
2147#define AARCH64_HDFGWTR_EL2_MDSCR_EL1 0x10U
2148
2149#define AARCH64_HDFGWTR_EL2_DBGCLAIM 0x20U
2150
2151#define AARCH64_HDFGWTR_EL2_DBGPRCR_EL1 0x80U
2152
2153#define AARCH64_HDFGWTR_EL2_OSLAR_EL1 0x100U
2154
2155#define AARCH64_HDFGWTR_EL2_OSECCR_EL1 0x400U
2156
2157#define AARCH64_HDFGWTR_EL2_OSDLR_EL1 0x800U
2158
2159#define AARCH64_HDFGWTR_EL2_PMEVCNTRN_EL0 0x1000U
2160
2161#define AARCH64_HDFGWTR_EL2_PMEVTYPERN_EL0 0x2000U
2162
2163#define AARCH64_HDFGWTR_EL2_PMCCFILTR_EL0 0x4000U
2164
2165#define AARCH64_HDFGWTR_EL2_PMCCNTR_EL0 0x8000U
2166
2167#define AARCH64_HDFGWTR_EL2_PMCNTEN 0x10000U
2168
2169#define AARCH64_HDFGWTR_EL2_PMINTEN 0x20000U
2170
2171#define AARCH64_HDFGWTR_EL2_PMOVS 0x40000U
2172
2173#define AARCH64_HDFGWTR_EL2_PMSELR_EL0 0x80000U
2174
2175#define AARCH64_HDFGWTR_EL2_PMSWINC_EL0 0x100000U
2176
2177#define AARCH64_HDFGWTR_EL2_PMCR_EL0 0x200000U
2178
2179#define AARCH64_HDFGWTR_EL2_PMBLIMITR_EL1 0x800000U
2180
2181#define AARCH64_HDFGWTR_EL2_PMBPTR_EL1 0x1000000U
2182
2183#define AARCH64_HDFGWTR_EL2_PMBSR_EL1 0x2000000U
2184
2185#define AARCH64_HDFGWTR_EL2_PMSCR_EL1 0x4000000U
2186
2187#define AARCH64_HDFGWTR_EL2_PMSEVFR_EL1 0x8000000U
2188
2189#define AARCH64_HDFGWTR_EL2_PMSFCR_EL1 0x10000000U
2190
2191#define AARCH64_HDFGWTR_EL2_PMSICR_EL1 0x20000000U
2192
2193#define AARCH64_HDFGWTR_EL2_PMSIRR_EL1 0x80000000U
2194
2195#define AARCH64_HDFGWTR_EL2_PMSLATFR_EL1 0x100000000ULL
2196
2197#define AARCH64_HDFGWTR_EL2_TRC 0x200000000ULL
2198
2199#define AARCH64_HDFGWTR_EL2_TRCAUXCTLR 0x800000000ULL
2200
2201#define AARCH64_HDFGWTR_EL2_TRCCLAIM 0x1000000000ULL
2202
2203#define AARCH64_HDFGWTR_EL2_TRCCNTVRN 0x2000000000ULL
2204
2205#define AARCH64_HDFGWTR_EL2_TRCIMSPECN 0x20000000000ULL
2206
2207#define AARCH64_HDFGWTR_EL2_TRCOSLAR 0x40000000000ULL
2208
2209#define AARCH64_HDFGWTR_EL2_TRCPRGCTLR 0x100000000000ULL
2210
2211#define AARCH64_HDFGWTR_EL2_TRCSEQSTR 0x200000000000ULL
2212
2213#define AARCH64_HDFGWTR_EL2_TRCSSCSRN 0x400000000000ULL
2214
2215#define AARCH64_HDFGWTR_EL2_TRCVICTLR 0x1000000000000ULL
2216
2217#define AARCH64_HDFGWTR_EL2_TRFCR_EL1 0x2000000000000ULL
2218
2219#define AARCH64_HDFGWTR_EL2_PMUSERENR_EL0 0x200000000000000ULL
2220
2221static inline uint64_t _AArch64_Read_hdfgwtr_el2( void )
2222{
2223 uint64_t value;
2224
2225 __asm__ volatile (
2226 "mrs %0, HDFGWTR_EL2" : "=&r" ( value ) : : "memory"
2227 );
2228
2229 return value;
2230}
2231
2232static inline void _AArch64_Write_hdfgwtr_el2( uint64_t value )
2233{
2234 __asm__ volatile (
2235 "msr HDFGWTR_EL2, %0" : : "r" ( value ) : "memory"
2236 );
2237}
2238
2239/* HFGITR_EL2, Hypervisor Fine-Grained Instruction Trap Register */
2240
2241#define AARCH64_HFGITR_EL2_ICIALLUIS 0x1U
2242
2243#define AARCH64_HFGITR_EL2_ICIALLU 0x2U
2244
2245#define AARCH64_HFGITR_EL2_ICIVAU 0x4U
2246
2247#define AARCH64_HFGITR_EL2_DCIVAC 0x8U
2248
2249#define AARCH64_HFGITR_EL2_DCISW 0x10U
2250
2251#define AARCH64_HFGITR_EL2_DCCSW 0x20U
2252
2253#define AARCH64_HFGITR_EL2_DCCISW 0x40U
2254
2255#define AARCH64_HFGITR_EL2_DCCVAU 0x80U
2256
2257#define AARCH64_HFGITR_EL2_DCCVAP 0x100U
2258
2259#define AARCH64_HFGITR_EL2_DCCVADP 0x200U
2260
2261#define AARCH64_HFGITR_EL2_DCCIVAC 0x400U
2262
2263#define AARCH64_HFGITR_EL2_DCZVA 0x800U
2264
2265#define AARCH64_HFGITR_EL2_ATS1E1R 0x1000U
2266
2267#define AARCH64_HFGITR_EL2_ATS1E1W 0x2000U
2268
2269#define AARCH64_HFGITR_EL2_ATS1E0R 0x4000U
2270
2271#define AARCH64_HFGITR_EL2_ATS1E0W 0x8000U
2272
2273#define AARCH64_HFGITR_EL2_ATS1E1RP 0x10000U
2274
2275#define AARCH64_HFGITR_EL2_ATS1E1WP 0x20000U
2276
2277#define AARCH64_HFGITR_EL2_TLBIVMALLE1OS 0x40000U
2278
2279#define AARCH64_HFGITR_EL2_TLBIVAE1OS 0x80000U
2280
2281#define AARCH64_HFGITR_EL2_TLBIASIDE1OS 0x100000U
2282
2283#define AARCH64_HFGITR_EL2_TLBIVAAE1OS 0x200000U
2284
2285#define AARCH64_HFGITR_EL2_TLBIVALE1OS 0x400000U
2286
2287#define AARCH64_HFGITR_EL2_TLBIVAALE1OS 0x800000U
2288
2289#define AARCH64_HFGITR_EL2_TLBIRVAE1OS 0x1000000U
2290
2291#define AARCH64_HFGITR_EL2_TLBIRVAAE1OS 0x2000000U
2292
2293#define AARCH64_HFGITR_EL2_TLBIRVALE1OS 0x4000000U
2294
2295#define AARCH64_HFGITR_EL2_TLBIRVAALE1OS 0x8000000U
2296
2297#define AARCH64_HFGITR_EL2_TLBIVMALLE1IS 0x10000000U
2298
2299#define AARCH64_HFGITR_EL2_TLBIVAE1IS 0x20000000U
2300
2301#define AARCH64_HFGITR_EL2_TLBIASIDE1IS 0x40000000U
2302
2303#define AARCH64_HFGITR_EL2_TLBIVAAE1IS 0x80000000U
2304
2305#define AARCH64_HFGITR_EL2_TLBIVALE1IS 0x100000000ULL
2306
2307#define AARCH64_HFGITR_EL2_TLBIVAALE1IS 0x200000000ULL
2308
2309#define AARCH64_HFGITR_EL2_TLBIRVAE1IS 0x400000000ULL
2310
2311#define AARCH64_HFGITR_EL2_TLBIRVAAE1IS 0x800000000ULL
2312
2313#define AARCH64_HFGITR_EL2_TLBIRVALE1IS 0x1000000000ULL
2314
2315#define AARCH64_HFGITR_EL2_TLBIRVAALE1IS 0x2000000000ULL
2316
2317#define AARCH64_HFGITR_EL2_TLBIRVAE1 0x4000000000ULL
2318
2319#define AARCH64_HFGITR_EL2_TLBIRVAAE1 0x8000000000ULL
2320
2321#define AARCH64_HFGITR_EL2_TLBIRVALE1 0x10000000000ULL
2322
2323#define AARCH64_HFGITR_EL2_TLBIRVAALE1 0x20000000000ULL
2324
2325#define AARCH64_HFGITR_EL2_TLBIVMALLE1 0x40000000000ULL
2326
2327#define AARCH64_HFGITR_EL2_TLBIVAE1 0x80000000000ULL
2328
2329#define AARCH64_HFGITR_EL2_TLBIASIDE1 0x100000000000ULL
2330
2331#define AARCH64_HFGITR_EL2_TLBIVAAE1 0x200000000000ULL
2332
2333#define AARCH64_HFGITR_EL2_TLBIVALE1 0x400000000000ULL
2334
2335#define AARCH64_HFGITR_EL2_TLBIVAALE1 0x800000000000ULL
2336
2337#define AARCH64_HFGITR_EL2_CFPRCTX 0x1000000000000ULL
2338
2339#define AARCH64_HFGITR_EL2_DVPRCTX 0x2000000000000ULL
2340
2341#define AARCH64_HFGITR_EL2_CPPRCTX 0x4000000000000ULL
2342
2343#define AARCH64_HFGITR_EL2_ERET 0x8000000000000ULL
2344
2345#define AARCH64_HFGITR_EL2_SVC_EL0 0x10000000000000ULL
2346
2347#define AARCH64_HFGITR_EL2_SVC_EL1 0x20000000000000ULL
2348
2349#define AARCH64_HFGITR_EL2_DCCVAC 0x40000000000000ULL
2350
2351static inline uint64_t _AArch64_Read_hfgitr_el2( void )
2352{
2353 uint64_t value;
2354
2355 __asm__ volatile (
2356 "mrs %0, HFGITR_EL2" : "=&r" ( value ) : : "memory"
2357 );
2358
2359 return value;
2360}
2361
2362static inline void _AArch64_Write_hfgitr_el2( uint64_t value )
2363{
2364 __asm__ volatile (
2365 "msr HFGITR_EL2, %0" : : "r" ( value ) : "memory"
2366 );
2367}
2368
2369/* HFGRTR_EL2, Hypervisor Fine-Grained Read Trap Register */
2370
2371#define AARCH64_HFGRTR_EL2_AFSR0_EL1 0x1U
2372
2373#define AARCH64_HFGRTR_EL2_AFSR1_EL1 0x2U
2374
2375#define AARCH64_HFGRTR_EL2_AIDR_EL1 0x4U
2376
2377#define AARCH64_HFGRTR_EL2_AMAIR_EL1 0x8U
2378
2379#define AARCH64_HFGRTR_EL2_APDAKEY 0x10U
2380
2381#define AARCH64_HFGRTR_EL2_APDBKEY 0x20U
2382
2383#define AARCH64_HFGRTR_EL2_APGAKEY 0x40U
2384
2385#define AARCH64_HFGRTR_EL2_APIAKEY 0x80U
2386
2387#define AARCH64_HFGRTR_EL2_APIBKEY 0x100U
2388
2389#define AARCH64_HFGRTR_EL2_CCSIDR_EL1 0x200U
2390
2391#define AARCH64_HFGRTR_EL2_CLIDR_EL1 0x400U
2392
2393#define AARCH64_HFGRTR_EL2_CONTEXTIDR_EL1 0x800U
2394
2395#define AARCH64_HFGRTR_EL2_CPACR_EL1 0x1000U
2396
2397#define AARCH64_HFGRTR_EL2_CSSELR_EL1 0x2000U
2398
2399#define AARCH64_HFGRTR_EL2_CTR_EL0 0x4000U
2400
2401#define AARCH64_HFGRTR_EL2_DCZID_EL0 0x8000U
2402
2403#define AARCH64_HFGRTR_EL2_ESR_EL1 0x10000U
2404
2405#define AARCH64_HFGRTR_EL2_FAR_EL1 0x20000U
2406
2407#define AARCH64_HFGRTR_EL2_ISR_EL1 0x40000U
2408
2409#define AARCH64_HFGRTR_EL2_LORC_EL1 0x80000U
2410
2411#define AARCH64_HFGRTR_EL2_LOREA_EL1 0x100000U
2412
2413#define AARCH64_HFGRTR_EL2_LORID_EL1 0x200000U
2414
2415#define AARCH64_HFGRTR_EL2_LORN_EL1 0x400000U
2416
2417#define AARCH64_HFGRTR_EL2_LORSA_EL1 0x800000U
2418
2419#define AARCH64_HFGRTR_EL2_MAIR_EL1 0x1000000U
2420
2421#define AARCH64_HFGRTR_EL2_MIDR_EL1 0x2000000U
2422
2423#define AARCH64_HFGRTR_EL2_MPIDR_EL1 0x4000000U
2424
2425#define AARCH64_HFGRTR_EL2_PAR_EL1 0x8000000U
2426
2427#define AARCH64_HFGRTR_EL2_REVIDR_EL1 0x10000000U
2428
2429#define AARCH64_HFGRTR_EL2_SCTLR_EL1 0x20000000U
2430
2431#define AARCH64_HFGRTR_EL2_SCXTNUM_EL1 0x40000000U
2432
2433#define AARCH64_HFGRTR_EL2_SCXTNUM_EL0 0x80000000U
2434
2435#define AARCH64_HFGRTR_EL2_TCR_EL1 0x100000000ULL
2436
2437#define AARCH64_HFGRTR_EL2_TPIDR_EL1 0x200000000ULL
2438
2439#define AARCH64_HFGRTR_EL2_TPIDRRO_EL0 0x400000000ULL
2440
2441#define AARCH64_HFGRTR_EL2_TPIDR_EL0 0x800000000ULL
2442
2443#define AARCH64_HFGRTR_EL2_TTBR0_EL1 0x1000000000ULL
2444
2445#define AARCH64_HFGRTR_EL2_TTBR1_EL1 0x2000000000ULL
2446
2447#define AARCH64_HFGRTR_EL2_VBAR_EL1 0x4000000000ULL
2448
2449#define AARCH64_HFGRTR_EL2_ICC_IGRPENN_EL1 0x8000000000ULL
2450
2451#define AARCH64_HFGRTR_EL2_ERRIDR_EL1 0x10000000000ULL
2452
2453#define AARCH64_HFGRTR_EL2_ERRSELR_EL1 0x20000000000ULL
2454
2455#define AARCH64_HFGRTR_EL2_ERXFR_EL1 0x40000000000ULL
2456
2457#define AARCH64_HFGRTR_EL2_ERXCTLR_EL1 0x80000000000ULL
2458
2459#define AARCH64_HFGRTR_EL2_ERXSTATUS_EL1 0x100000000000ULL
2460
2461#define AARCH64_HFGRTR_EL2_ERXMISCN_EL1 0x200000000000ULL
2462
2463#define AARCH64_HFGRTR_EL2_ERXPFGF_EL1 0x400000000000ULL
2464
2465#define AARCH64_HFGRTR_EL2_ERXPFGCTL_EL1 0x800000000000ULL
2466
2467#define AARCH64_HFGRTR_EL2_ERXPFGCDN_EL1 0x1000000000000ULL
2468
2469#define AARCH64_HFGRTR_EL2_ERXADDR_EL1 0x2000000000000ULL
2470
2471static inline uint64_t _AArch64_Read_hfgrtr_el2( void )
2472{
2473 uint64_t value;
2474
2475 __asm__ volatile (
2476 "mrs %0, HFGRTR_EL2" : "=&r" ( value ) : : "memory"
2477 );
2478
2479 return value;
2480}
2481
2482static inline void _AArch64_Write_hfgrtr_el2( uint64_t value )
2483{
2484 __asm__ volatile (
2485 "msr HFGRTR_EL2, %0" : : "r" ( value ) : "memory"
2486 );
2487}
2488
2489/* HFGWTR_EL2, Hypervisor Fine-Grained Write Trap Register */
2490
2491#define AARCH64_HFGWTR_EL2_AFSR0_EL1 0x1U
2492
2493#define AARCH64_HFGWTR_EL2_AFSR1_EL1 0x2U
2494
2495#define AARCH64_HFGWTR_EL2_AMAIR_EL1 0x8U
2496
2497#define AARCH64_HFGWTR_EL2_APDAKEY 0x10U
2498
2499#define AARCH64_HFGWTR_EL2_APDBKEY 0x20U
2500
2501#define AARCH64_HFGWTR_EL2_APGAKEY 0x40U
2502
2503#define AARCH64_HFGWTR_EL2_APIAKEY 0x80U
2504
2505#define AARCH64_HFGWTR_EL2_APIBKEY 0x100U
2506
2507#define AARCH64_HFGWTR_EL2_CONTEXTIDR_EL1 0x800U
2508
2509#define AARCH64_HFGWTR_EL2_CPACR_EL1 0x1000U
2510
2511#define AARCH64_HFGWTR_EL2_CSSELR_EL1 0x2000U
2512
2513#define AARCH64_HFGWTR_EL2_ESR_EL1 0x10000U
2514
2515#define AARCH64_HFGWTR_EL2_FAR_EL1 0x20000U
2516
2517#define AARCH64_HFGWTR_EL2_LORC_EL1 0x80000U
2518
2519#define AARCH64_HFGWTR_EL2_LOREA_EL1 0x100000U
2520
2521#define AARCH64_HFGWTR_EL2_LORN_EL1 0x400000U
2522
2523#define AARCH64_HFGWTR_EL2_LORSA_EL1 0x800000U
2524
2525#define AARCH64_HFGWTR_EL2_MAIR_EL1 0x1000000U
2526
2527#define AARCH64_HFGWTR_EL2_PAR_EL1 0x8000000U
2528
2529#define AARCH64_HFGWTR_EL2_SCTLR_EL1 0x20000000U
2530
2531#define AARCH64_HFGWTR_EL2_SCXTNUM_EL1 0x40000000U
2532
2533#define AARCH64_HFGWTR_EL2_SCXTNUM_EL0 0x80000000U
2534
2535#define AARCH64_HFGWTR_EL2_TCR_EL1 0x100000000ULL
2536
2537#define AARCH64_HFGWTR_EL2_TPIDR_EL1 0x200000000ULL
2538
2539#define AARCH64_HFGWTR_EL2_TPIDRRO_EL0 0x400000000ULL
2540
2541#define AARCH64_HFGWTR_EL2_TPIDR_EL0 0x800000000ULL
2542
2543#define AARCH64_HFGWTR_EL2_TTBR0_EL1 0x1000000000ULL
2544
2545#define AARCH64_HFGWTR_EL2_TTBR1_EL1 0x2000000000ULL
2546
2547#define AARCH64_HFGWTR_EL2_VBAR_EL1 0x4000000000ULL
2548
2549#define AARCH64_HFGWTR_EL2_ICC_IGRPENN_EL1 0x8000000000ULL
2550
2551#define AARCH64_HFGWTR_EL2_ERRSELR_EL1 0x20000000000ULL
2552
2553#define AARCH64_HFGWTR_EL2_ERXCTLR_EL1 0x80000000000ULL
2554
2555#define AARCH64_HFGWTR_EL2_ERXSTATUS_EL1 0x100000000000ULL
2556
2557#define AARCH64_HFGWTR_EL2_ERXMISCN_EL1 0x200000000000ULL
2558
2559#define AARCH64_HFGWTR_EL2_ERXPFGCTL_EL1 0x800000000000ULL
2560
2561#define AARCH64_HFGWTR_EL2_ERXPFGCDN_EL1 0x1000000000000ULL
2562
2563#define AARCH64_HFGWTR_EL2_ERXADDR_EL1 0x2000000000000ULL
2564
2565static inline uint64_t _AArch64_Read_hfgwtr_el2( void )
2566{
2567 uint64_t value;
2568
2569 __asm__ volatile (
2570 "mrs %0, HFGWTR_EL2" : "=&r" ( value ) : : "memory"
2571 );
2572
2573 return value;
2574}
2575
2576static inline void _AArch64_Write_hfgwtr_el2( uint64_t value )
2577{
2578 __asm__ volatile (
2579 "msr HFGWTR_EL2, %0" : : "r" ( value ) : "memory"
2580 );
2581}
2582
2583/* HPFAR_EL2, Hypervisor IPA Fault Address Register */
2584
2585#define AARCH64_HPFAR_EL2_FIPA_47_12( _val ) ( ( _val ) << 4 )
2586#define AARCH64_HPFAR_EL2_FIPA_47_12_SHIFT 4
2587#define AARCH64_HPFAR_EL2_FIPA_47_12_MASK 0xfffffffff0ULL
2588#define AARCH64_HPFAR_EL2_FIPA_47_12_GET( _reg ) \
2589 ( ( ( _reg ) >> 4 ) & 0xfffffffffULL )
2590
2591#define AARCH64_HPFAR_EL2_FIPA_51_48( _val ) ( ( _val ) << 40 )
2592#define AARCH64_HPFAR_EL2_FIPA_51_48_SHIFT 40
2593#define AARCH64_HPFAR_EL2_FIPA_51_48_MASK 0xf0000000000ULL
2594#define AARCH64_HPFAR_EL2_FIPA_51_48_GET( _reg ) \
2595 ( ( ( _reg ) >> 40 ) & 0xfULL )
2596
2597#define AARCH64_HPFAR_EL2_NS 0x8000000000000000ULL
2598
2599static inline uint64_t _AArch64_Read_hpfar_el2( void )
2600{
2601 uint64_t value;
2602
2603 __asm__ volatile (
2604 "mrs %0, HPFAR_EL2" : "=&r" ( value ) : : "memory"
2605 );
2606
2607 return value;
2608}
2609
2610static inline void _AArch64_Write_hpfar_el2( uint64_t value )
2611{
2612 __asm__ volatile (
2613 "msr HPFAR_EL2, %0" : : "r" ( value ) : "memory"
2614 );
2615}
2616
2617/* HSTR_EL2, Hypervisor System Trap Register */
2618
2619static inline uint64_t _AArch64_Read_hstr_el2( void )
2620{
2621 uint64_t value;
2622
2623 __asm__ volatile (
2624 "mrs %0, HSTR_EL2" : "=&r" ( value ) : : "memory"
2625 );
2626
2627 return value;
2628}
2629
2630static inline void _AArch64_Write_hstr_el2( uint64_t value )
2631{
2632 __asm__ volatile (
2633 "msr HSTR_EL2, %0" : : "r" ( value ) : "memory"
2634 );
2635}
2636
2637/* ID_AA64AFR0_EL1, AArch64 Auxiliary Feature Register 0 */
2638
2639static inline uint64_t _AArch64_Read_id_aa64afr0_el1( void )
2640{
2641 uint64_t value;
2642
2643 __asm__ volatile (
2644 "mrs %0, ID_AA64AFR0_EL1" : "=&r" ( value ) : : "memory"
2645 );
2646
2647 return value;
2648}
2649
2650/* ID_AA64AFR1_EL1, AArch64 Auxiliary Feature Register 1 */
2651
2652static inline uint64_t _AArch64_Read_id_aa64afr1_el1( void )
2653{
2654 uint64_t value;
2655
2656 __asm__ volatile (
2657 "mrs %0, ID_AA64AFR1_EL1" : "=&r" ( value ) : : "memory"
2658 );
2659
2660 return value;
2661}
2662
2663/* ID_AA64DFR0_EL1, AArch64 Debug Feature Register 0 */
2664
2665#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER( _val ) ( ( _val ) << 0 )
2666#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_SHIFT 0
2667#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_MASK 0xfU
2668#define AARCH64_ID_AA64DFR0_EL1_DEBUGVER_GET( _reg ) \
2669 ( ( ( _reg ) >> 0 ) & 0xfU )
2670
2671#define AARCH64_ID_AA64DFR0_EL1_TRACEVER( _val ) ( ( _val ) << 4 )
2672#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_SHIFT 4
2673#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_MASK 0xf0U
2674#define AARCH64_ID_AA64DFR0_EL1_TRACEVER_GET( _reg ) \
2675 ( ( ( _reg ) >> 4 ) & 0xfU )
2676
2677#define AARCH64_ID_AA64DFR0_EL1_PMUVER( _val ) ( ( _val ) << 8 )
2678#define AARCH64_ID_AA64DFR0_EL1_PMUVER_SHIFT 8
2679#define AARCH64_ID_AA64DFR0_EL1_PMUVER_MASK 0xf00U
2680#define AARCH64_ID_AA64DFR0_EL1_PMUVER_GET( _reg ) \
2681 ( ( ( _reg ) >> 8 ) & 0xfU )
2682
2683#define AARCH64_ID_AA64DFR0_EL1_BRPS( _val ) ( ( _val ) << 12 )
2684#define AARCH64_ID_AA64DFR0_EL1_BRPS_SHIFT 12
2685#define AARCH64_ID_AA64DFR0_EL1_BRPS_MASK 0xf000U
2686#define AARCH64_ID_AA64DFR0_EL1_BRPS_GET( _reg ) \
2687 ( ( ( _reg ) >> 12 ) & 0xfU )
2688
2689#define AARCH64_ID_AA64DFR0_EL1_WRPS( _val ) ( ( _val ) << 20 )
2690#define AARCH64_ID_AA64DFR0_EL1_WRPS_SHIFT 20
2691#define AARCH64_ID_AA64DFR0_EL1_WRPS_MASK 0xf00000U
2692#define AARCH64_ID_AA64DFR0_EL1_WRPS_GET( _reg ) \
2693 ( ( ( _reg ) >> 20 ) & 0xfU )
2694
2695#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS( _val ) ( ( _val ) << 28 )
2696#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_SHIFT 28
2697#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_MASK 0xf0000000U
2698#define AARCH64_ID_AA64DFR0_EL1_CTX_CMPS_GET( _reg ) \
2699 ( ( ( _reg ) >> 28 ) & 0xfU )
2700
2701#define AARCH64_ID_AA64DFR0_EL1_PMSVER( _val ) ( ( _val ) << 32 )
2702#define AARCH64_ID_AA64DFR0_EL1_PMSVER_SHIFT 32
2703#define AARCH64_ID_AA64DFR0_EL1_PMSVER_MASK 0xf00000000ULL
2704#define AARCH64_ID_AA64DFR0_EL1_PMSVER_GET( _reg ) \
2705 ( ( ( _reg ) >> 32 ) & 0xfULL )
2706
2707#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK( _val ) ( ( _val ) << 36 )
2708#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_SHIFT 36
2709#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_MASK 0xf000000000ULL
2710#define AARCH64_ID_AA64DFR0_EL1_DOUBLELOCK_GET( _reg ) \
2711 ( ( ( _reg ) >> 36 ) & 0xfULL )
2712
2713#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT( _val ) ( ( _val ) << 40 )
2714#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_SHIFT 40
2715#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_MASK 0xf0000000000ULL
2716#define AARCH64_ID_AA64DFR0_EL1_TRACEFILT_GET( _reg ) \
2717 ( ( ( _reg ) >> 40 ) & 0xfULL )
2718
2719#define AARCH64_ID_AA64DFR0_EL1_MTPMU( _val ) ( ( _val ) << 48 )
2720#define AARCH64_ID_AA64DFR0_EL1_MTPMU_SHIFT 48
2721#define AARCH64_ID_AA64DFR0_EL1_MTPMU_MASK 0xf000000000000ULL
2722#define AARCH64_ID_AA64DFR0_EL1_MTPMU_GET( _reg ) \
2723 ( ( ( _reg ) >> 48 ) & 0xfULL )
2724
2725static inline uint64_t _AArch64_Read_id_aa64dfr0_el1( void )
2726{
2727 uint64_t value;
2728
2729 __asm__ volatile (
2730 "mrs %0, ID_AA64DFR0_EL1" : "=&r" ( value ) : : "memory"
2731 );
2732
2733 return value;
2734}
2735
2736/* ID_AA64DFR1_EL1, AArch64 Debug Feature Register 1 */
2737
2738static inline uint64_t _AArch64_Read_id_aa64dfr1_el1( void )
2739{
2740 uint64_t value;
2741
2742 __asm__ volatile (
2743 "mrs %0, ID_AA64DFR1_EL1" : "=&r" ( value ) : : "memory"
2744 );
2745
2746 return value;
2747}
2748
2749/* ID_AA64ISAR0_EL1, AArch64 Instruction Set Attribute Register 0 */
2750
2751#define AARCH64_ID_AA64ISAR0_EL1_AES( _val ) ( ( _val ) << 4 )
2752#define AARCH64_ID_AA64ISAR0_EL1_AES_SHIFT 4
2753#define AARCH64_ID_AA64ISAR0_EL1_AES_MASK 0xf0U
2754#define AARCH64_ID_AA64ISAR0_EL1_AES_GET( _reg ) \
2755 ( ( ( _reg ) >> 4 ) & 0xfU )
2756
2757#define AARCH64_ID_AA64ISAR0_EL1_SHA1( _val ) ( ( _val ) << 8 )
2758#define AARCH64_ID_AA64ISAR0_EL1_SHA1_SHIFT 8
2759#define AARCH64_ID_AA64ISAR0_EL1_SHA1_MASK 0xf00U
2760#define AARCH64_ID_AA64ISAR0_EL1_SHA1_GET( _reg ) \
2761 ( ( ( _reg ) >> 8 ) & 0xfU )
2762
2763#define AARCH64_ID_AA64ISAR0_EL1_SHA2( _val ) ( ( _val ) << 12 )
2764#define AARCH64_ID_AA64ISAR0_EL1_SHA2_SHIFT 12
2765#define AARCH64_ID_AA64ISAR0_EL1_SHA2_MASK 0xf000U
2766#define AARCH64_ID_AA64ISAR0_EL1_SHA2_GET( _reg ) \
2767 ( ( ( _reg ) >> 12 ) & 0xfU )
2768
2769#define AARCH64_ID_AA64ISAR0_EL1_CRC32( _val ) ( ( _val ) << 16 )
2770#define AARCH64_ID_AA64ISAR0_EL1_CRC32_SHIFT 16
2771#define AARCH64_ID_AA64ISAR0_EL1_CRC32_MASK 0xf0000U
2772#define AARCH64_ID_AA64ISAR0_EL1_CRC32_GET( _reg ) \
2773 ( ( ( _reg ) >> 16 ) & 0xfU )
2774
2775#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC( _val ) ( ( _val ) << 20 )
2776#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_SHIFT 20
2777#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_MASK 0xf00000U
2778#define AARCH64_ID_AA64ISAR0_EL1_ATOMIC_GET( _reg ) \
2779 ( ( ( _reg ) >> 20 ) & 0xfU )
2780
2781#define AARCH64_ID_AA64ISAR0_EL1_RDM( _val ) ( ( _val ) << 28 )
2782#define AARCH64_ID_AA64ISAR0_EL1_RDM_SHIFT 28
2783#define AARCH64_ID_AA64ISAR0_EL1_RDM_MASK 0xf0000000U
2784#define AARCH64_ID_AA64ISAR0_EL1_RDM_GET( _reg ) \
2785 ( ( ( _reg ) >> 28 ) & 0xfU )
2786
2787#define AARCH64_ID_AA64ISAR0_EL1_SHA3( _val ) ( ( _val ) << 32 )
2788#define AARCH64_ID_AA64ISAR0_EL1_SHA3_SHIFT 32
2789#define AARCH64_ID_AA64ISAR0_EL1_SHA3_MASK 0xf00000000ULL
2790#define AARCH64_ID_AA64ISAR0_EL1_SHA3_GET( _reg ) \
2791 ( ( ( _reg ) >> 32 ) & 0xfULL )
2792
2793#define AARCH64_ID_AA64ISAR0_EL1_SM3( _val ) ( ( _val ) << 36 )
2794#define AARCH64_ID_AA64ISAR0_EL1_SM3_SHIFT 36
2795#define AARCH64_ID_AA64ISAR0_EL1_SM3_MASK 0xf000000000ULL
2796#define AARCH64_ID_AA64ISAR0_EL1_SM3_GET( _reg ) \
2797 ( ( ( _reg ) >> 36 ) & 0xfULL )
2798
2799#define AARCH64_ID_AA64ISAR0_EL1_SM4( _val ) ( ( _val ) << 40 )
2800#define AARCH64_ID_AA64ISAR0_EL1_SM4_SHIFT 40
2801#define AARCH64_ID_AA64ISAR0_EL1_SM4_MASK 0xf0000000000ULL
2802#define AARCH64_ID_AA64ISAR0_EL1_SM4_GET( _reg ) \
2803 ( ( ( _reg ) >> 40 ) & 0xfULL )
2804
2805#define AARCH64_ID_AA64ISAR0_EL1_DP( _val ) ( ( _val ) << 44 )
2806#define AARCH64_ID_AA64ISAR0_EL1_DP_SHIFT 44
2807#define AARCH64_ID_AA64ISAR0_EL1_DP_MASK 0xf00000000000ULL
2808#define AARCH64_ID_AA64ISAR0_EL1_DP_GET( _reg ) \
2809 ( ( ( _reg ) >> 44 ) & 0xfULL )
2810
2811#define AARCH64_ID_AA64ISAR0_EL1_FHM( _val ) ( ( _val ) << 48 )
2812#define AARCH64_ID_AA64ISAR0_EL1_FHM_SHIFT 48
2813#define AARCH64_ID_AA64ISAR0_EL1_FHM_MASK 0xf000000000000ULL
2814#define AARCH64_ID_AA64ISAR0_EL1_FHM_GET( _reg ) \
2815 ( ( ( _reg ) >> 48 ) & 0xfULL )
2816
2817#define AARCH64_ID_AA64ISAR0_EL1_TS( _val ) ( ( _val ) << 52 )
2818#define AARCH64_ID_AA64ISAR0_EL1_TS_SHIFT 52
2819#define AARCH64_ID_AA64ISAR0_EL1_TS_MASK 0xf0000000000000ULL
2820#define AARCH64_ID_AA64ISAR0_EL1_TS_GET( _reg ) \
2821 ( ( ( _reg ) >> 52 ) & 0xfULL )
2822
2823#define AARCH64_ID_AA64ISAR0_EL1_TLB( _val ) ( ( _val ) << 56 )
2824#define AARCH64_ID_AA64ISAR0_EL1_TLB_SHIFT 56
2825#define AARCH64_ID_AA64ISAR0_EL1_TLB_MASK 0xf00000000000000ULL
2826#define AARCH64_ID_AA64ISAR0_EL1_TLB_GET( _reg ) \
2827 ( ( ( _reg ) >> 56 ) & 0xfULL )
2828
2829#define AARCH64_ID_AA64ISAR0_EL1_RNDR( _val ) ( ( _val ) << 60 )
2830#define AARCH64_ID_AA64ISAR0_EL1_RNDR_SHIFT 60
2831#define AARCH64_ID_AA64ISAR0_EL1_RNDR_MASK 0xf000000000000000ULL
2832#define AARCH64_ID_AA64ISAR0_EL1_RNDR_GET( _reg ) \
2833 ( ( ( _reg ) >> 60 ) & 0xfULL )
2834
2835static inline uint64_t _AArch64_Read_id_aa64isar0_el1( void )
2836{
2837 uint64_t value;
2838
2839 __asm__ volatile (
2840 "mrs %0, ID_AA64ISAR0_EL1" : "=&r" ( value ) : : "memory"
2841 );
2842
2843 return value;
2844}
2845
2846/* ID_AA64ISAR1_EL1, AArch64 Instruction Set Attribute Register 1 */
2847
2848#define AARCH64_ID_AA64ISAR1_EL1_DPB( _val ) ( ( _val ) << 0 )
2849#define AARCH64_ID_AA64ISAR1_EL1_DPB_SHIFT 0
2850#define AARCH64_ID_AA64ISAR1_EL1_DPB_MASK 0xfU
2851#define AARCH64_ID_AA64ISAR1_EL1_DPB_GET( _reg ) \
2852 ( ( ( _reg ) >> 0 ) & 0xfU )
2853
2854#define AARCH64_ID_AA64ISAR1_EL1_APA( _val ) ( ( _val ) << 4 )
2855#define AARCH64_ID_AA64ISAR1_EL1_APA_SHIFT 4
2856#define AARCH64_ID_AA64ISAR1_EL1_APA_MASK 0xf0U
2857#define AARCH64_ID_AA64ISAR1_EL1_APA_GET( _reg ) \
2858 ( ( ( _reg ) >> 4 ) & 0xfU )
2859
2860#define AARCH64_ID_AA64ISAR1_EL1_API( _val ) ( ( _val ) << 8 )
2861#define AARCH64_ID_AA64ISAR1_EL1_API_SHIFT 8
2862#define AARCH64_ID_AA64ISAR1_EL1_API_MASK 0xf00U
2863#define AARCH64_ID_AA64ISAR1_EL1_API_GET( _reg ) \
2864 ( ( ( _reg ) >> 8 ) & 0xfU )
2865
2866#define AARCH64_ID_AA64ISAR1_EL1_JSCVT( _val ) ( ( _val ) << 12 )
2867#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_SHIFT 12
2868#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_MASK 0xf000U
2869#define AARCH64_ID_AA64ISAR1_EL1_JSCVT_GET( _reg ) \
2870 ( ( ( _reg ) >> 12 ) & 0xfU )
2871
2872#define AARCH64_ID_AA64ISAR1_EL1_FCMA( _val ) ( ( _val ) << 16 )
2873#define AARCH64_ID_AA64ISAR1_EL1_FCMA_SHIFT 16
2874#define AARCH64_ID_AA64ISAR1_EL1_FCMA_MASK 0xf0000U
2875#define AARCH64_ID_AA64ISAR1_EL1_FCMA_GET( _reg ) \
2876 ( ( ( _reg ) >> 16 ) & 0xfU )
2877
2878#define AARCH64_ID_AA64ISAR1_EL1_LRCPC( _val ) ( ( _val ) << 20 )
2879#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_SHIFT 20
2880#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_MASK 0xf00000U
2881#define AARCH64_ID_AA64ISAR1_EL1_LRCPC_GET( _reg ) \
2882 ( ( ( _reg ) >> 20 ) & 0xfU )
2883
2884#define AARCH64_ID_AA64ISAR1_EL1_GPA( _val ) ( ( _val ) << 24 )
2885#define AARCH64_ID_AA64ISAR1_EL1_GPA_SHIFT 24
2886#define AARCH64_ID_AA64ISAR1_EL1_GPA_MASK 0xf000000U
2887#define AARCH64_ID_AA64ISAR1_EL1_GPA_GET( _reg ) \
2888 ( ( ( _reg ) >> 24 ) & 0xfU )
2889
2890#define AARCH64_ID_AA64ISAR1_EL1_GPI( _val ) ( ( _val ) << 28 )
2891#define AARCH64_ID_AA64ISAR1_EL1_GPI_SHIFT 28
2892#define AARCH64_ID_AA64ISAR1_EL1_GPI_MASK 0xf0000000U
2893#define AARCH64_ID_AA64ISAR1_EL1_GPI_GET( _reg ) \
2894 ( ( ( _reg ) >> 28 ) & 0xfU )
2895
2896#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS( _val ) ( ( _val ) << 32 )
2897#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_SHIFT 32
2898#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_MASK 0xf00000000ULL
2899#define AARCH64_ID_AA64ISAR1_EL1_FRINTTS_GET( _reg ) \
2900 ( ( ( _reg ) >> 32 ) & 0xfULL )
2901
2902#define AARCH64_ID_AA64ISAR1_EL1_SB( _val ) ( ( _val ) << 36 )
2903#define AARCH64_ID_AA64ISAR1_EL1_SB_SHIFT 36
2904#define AARCH64_ID_AA64ISAR1_EL1_SB_MASK 0xf000000000ULL
2905#define AARCH64_ID_AA64ISAR1_EL1_SB_GET( _reg ) \
2906 ( ( ( _reg ) >> 36 ) & 0xfULL )
2907
2908#define AARCH64_ID_AA64ISAR1_EL1_SPECRES( _val ) ( ( _val ) << 40 )
2909#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_SHIFT 40
2910#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_MASK 0xf0000000000ULL
2911#define AARCH64_ID_AA64ISAR1_EL1_SPECRES_GET( _reg ) \
2912 ( ( ( _reg ) >> 40 ) & 0xfULL )
2913
2914#define AARCH64_ID_AA64ISAR1_EL1_BF16( _val ) ( ( _val ) << 44 )
2915#define AARCH64_ID_AA64ISAR1_EL1_BF16_SHIFT 44
2916#define AARCH64_ID_AA64ISAR1_EL1_BF16_MASK 0xf00000000000ULL
2917#define AARCH64_ID_AA64ISAR1_EL1_BF16_GET( _reg ) \
2918 ( ( ( _reg ) >> 44 ) & 0xfULL )
2919
2920#define AARCH64_ID_AA64ISAR1_EL1_DGH( _val ) ( ( _val ) << 48 )
2921#define AARCH64_ID_AA64ISAR1_EL1_DGH_SHIFT 48
2922#define AARCH64_ID_AA64ISAR1_EL1_DGH_MASK 0xf000000000000ULL
2923#define AARCH64_ID_AA64ISAR1_EL1_DGH_GET( _reg ) \
2924 ( ( ( _reg ) >> 48 ) & 0xfULL )
2925
2926#define AARCH64_ID_AA64ISAR1_EL1_I8MM( _val ) ( ( _val ) << 52 )
2927#define AARCH64_ID_AA64ISAR1_EL1_I8MM_SHIFT 52
2928#define AARCH64_ID_AA64ISAR1_EL1_I8MM_MASK 0xf0000000000000ULL
2929#define AARCH64_ID_AA64ISAR1_EL1_I8MM_GET( _reg ) \
2930 ( ( ( _reg ) >> 52 ) & 0xfULL )
2931
2932static inline uint64_t _AArch64_Read_id_aa64isar1_el1( void )
2933{
2934 uint64_t value;
2935
2936 __asm__ volatile (
2937 "mrs %0, ID_AA64ISAR1_EL1" : "=&r" ( value ) : : "memory"
2938 );
2939
2940 return value;
2941}
2942
2943/* ID_AA64MMFR0_EL1, AArch64 Memory Model Feature Register 0 */
2944
2945#define AARCH64_ID_AA64MMFR0_EL1_PARANGE( _val ) ( ( _val ) << 0 )
2946#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_SHIFT 0
2947#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_MASK 0xfU
2948#define AARCH64_ID_AA64MMFR0_EL1_PARANGE_GET( _reg ) \
2949 ( ( ( _reg ) >> 0 ) & 0xfU )
2950
2951#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS( _val ) ( ( _val ) << 4 )
2952#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_SHIFT 4
2953#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_MASK 0xf0U
2954#define AARCH64_ID_AA64MMFR0_EL1_ASIDBITS_GET( _reg ) \
2955 ( ( ( _reg ) >> 4 ) & 0xfU )
2956
2957#define AARCH64_ID_AA64MMFR0_EL1_BIGEND( _val ) ( ( _val ) << 8 )
2958#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_SHIFT 8
2959#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_MASK 0xf00U
2960#define AARCH64_ID_AA64MMFR0_EL1_BIGEND_GET( _reg ) \
2961 ( ( ( _reg ) >> 8 ) & 0xfU )
2962
2963#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM( _val ) ( ( _val ) << 12 )
2964#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_SHIFT 12
2965#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_MASK 0xf000U
2966#define AARCH64_ID_AA64MMFR0_EL1_SNSMEM_GET( _reg ) \
2967 ( ( ( _reg ) >> 12 ) & 0xfU )
2968
2969#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0( _val ) ( ( _val ) << 16 )
2970#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT 16
2971#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_MASK 0xf0000U
2972#define AARCH64_ID_AA64MMFR0_EL1_BIGENDEL0_GET( _reg ) \
2973 ( ( ( _reg ) >> 16 ) & 0xfU )
2974
2975#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16( _val ) ( ( _val ) << 20 )
2976#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_SHIFT 20
2977#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_MASK 0xf00000U
2978#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_GET( _reg ) \
2979 ( ( ( _reg ) >> 20 ) & 0xfU )
2980
2981#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64( _val ) ( ( _val ) << 24 )
2982#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_SHIFT 24
2983#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_MASK 0xf000000U
2984#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_GET( _reg ) \
2985 ( ( ( _reg ) >> 24 ) & 0xfU )
2986
2987#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4( _val ) ( ( _val ) << 28 )
2988#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_SHIFT 28
2989#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_MASK 0xf0000000U
2990#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_GET( _reg ) \
2991 ( ( ( _reg ) >> 28 ) & 0xfU )
2992
2993#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2( _val ) ( ( _val ) << 32 )
2994#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_SHIFT 32
2995#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_MASK 0xf00000000ULL
2996#define AARCH64_ID_AA64MMFR0_EL1_TGRAN16_2_GET( _reg ) \
2997 ( ( ( _reg ) >> 32 ) & 0xfULL )
2998
2999#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2( _val ) ( ( _val ) << 36 )
3000#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_SHIFT 36
3001#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_MASK 0xf000000000ULL
3002#define AARCH64_ID_AA64MMFR0_EL1_TGRAN64_2_GET( _reg ) \
3003 ( ( ( _reg ) >> 36 ) & 0xfULL )
3004
3005#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2( _val ) ( ( _val ) << 40 )
3006#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_SHIFT 40
3007#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_MASK 0xf0000000000ULL
3008#define AARCH64_ID_AA64MMFR0_EL1_TGRAN4_2_GET( _reg ) \
3009 ( ( ( _reg ) >> 40 ) & 0xfULL )
3010
3011#define AARCH64_ID_AA64MMFR0_EL1_EXS( _val ) ( ( _val ) << 44 )
3012#define AARCH64_ID_AA64MMFR0_EL1_EXS_SHIFT 44
3013#define AARCH64_ID_AA64MMFR0_EL1_EXS_MASK 0xf00000000000ULL
3014#define AARCH64_ID_AA64MMFR0_EL1_EXS_GET( _reg ) \
3015 ( ( ( _reg ) >> 44 ) & 0xfULL )
3016
3017#define AARCH64_ID_AA64MMFR0_EL1_FGT( _val ) ( ( _val ) << 56 )
3018#define AARCH64_ID_AA64MMFR0_EL1_FGT_SHIFT 56
3019#define AARCH64_ID_AA64MMFR0_EL1_FGT_MASK 0xf00000000000000ULL
3020#define AARCH64_ID_AA64MMFR0_EL1_FGT_GET( _reg ) \
3021 ( ( ( _reg ) >> 56 ) & 0xfULL )
3022
3023#define AARCH64_ID_AA64MMFR0_EL1_ECV( _val ) ( ( _val ) << 60 )
3024#define AARCH64_ID_AA64MMFR0_EL1_ECV_SHIFT 60
3025#define AARCH64_ID_AA64MMFR0_EL1_ECV_MASK 0xf000000000000000ULL
3026#define AARCH64_ID_AA64MMFR0_EL1_ECV_GET( _reg ) \
3027 ( ( ( _reg ) >> 60 ) & 0xfULL )
3028
3029static inline uint64_t _AArch64_Read_id_aa64mmfr0_el1( void )
3030{
3031 uint64_t value;
3032
3033 __asm__ volatile (
3034 "mrs %0, ID_AA64MMFR0_EL1" : "=&r" ( value ) : : "memory"
3035 );
3036
3037 return value;
3038}
3039
3040/* ID_AA64MMFR1_EL1, AArch64 Memory Model Feature Register 1 */
3041
3042#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS( _val ) ( ( _val ) << 0 )
3043#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_SHIFT 0
3044#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_MASK 0xfU
3045#define AARCH64_ID_AA64MMFR1_EL1_HAFDBS_GET( _reg ) \
3046 ( ( ( _reg ) >> 0 ) & 0xfU )
3047
3048#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS( _val ) ( ( _val ) << 4 )
3049#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_SHIFT 4
3050#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_MASK 0xf0U
3051#define AARCH64_ID_AA64MMFR1_EL1_VMIDBITS_GET( _reg ) \
3052 ( ( ( _reg ) >> 4 ) & 0xfU )
3053
3054#define AARCH64_ID_AA64MMFR1_EL1_VH( _val ) ( ( _val ) << 8 )
3055#define AARCH64_ID_AA64MMFR1_EL1_VH_SHIFT 8
3056#define AARCH64_ID_AA64MMFR1_EL1_VH_MASK 0xf00U
3057#define AARCH64_ID_AA64MMFR1_EL1_VH_GET( _reg ) \
3058 ( ( ( _reg ) >> 8 ) & 0xfU )
3059
3060#define AARCH64_ID_AA64MMFR1_EL1_HPDS( _val ) ( ( _val ) << 12 )
3061#define AARCH64_ID_AA64MMFR1_EL1_HPDS_SHIFT 12
3062#define AARCH64_ID_AA64MMFR1_EL1_HPDS_MASK 0xf000U
3063#define AARCH64_ID_AA64MMFR1_EL1_HPDS_GET( _reg ) \
3064 ( ( ( _reg ) >> 12 ) & 0xfU )
3065
3066#define AARCH64_ID_AA64MMFR1_EL1_LO( _val ) ( ( _val ) << 16 )
3067#define AARCH64_ID_AA64MMFR1_EL1_LO_SHIFT 16
3068#define AARCH64_ID_AA64MMFR1_EL1_LO_MASK 0xf0000U
3069#define AARCH64_ID_AA64MMFR1_EL1_LO_GET( _reg ) \
3070 ( ( ( _reg ) >> 16 ) & 0xfU )
3071
3072#define AARCH64_ID_AA64MMFR1_EL1_PAN( _val ) ( ( _val ) << 20 )
3073#define AARCH64_ID_AA64MMFR1_EL1_PAN_SHIFT 20
3074#define AARCH64_ID_AA64MMFR1_EL1_PAN_MASK 0xf00000U
3075#define AARCH64_ID_AA64MMFR1_EL1_PAN_GET( _reg ) \
3076 ( ( ( _reg ) >> 20 ) & 0xfU )
3077
3078#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI( _val ) ( ( _val ) << 24 )
3079#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_SHIFT 24
3080#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_MASK 0xf000000U
3081#define AARCH64_ID_AA64MMFR1_EL1_SPECSEI_GET( _reg ) \
3082 ( ( ( _reg ) >> 24 ) & 0xfU )
3083
3084#define AARCH64_ID_AA64MMFR1_EL1_XNX( _val ) ( ( _val ) << 28 )
3085#define AARCH64_ID_AA64MMFR1_EL1_XNX_SHIFT 28
3086#define AARCH64_ID_AA64MMFR1_EL1_XNX_MASK 0xf0000000U
3087#define AARCH64_ID_AA64MMFR1_EL1_XNX_GET( _reg ) \
3088 ( ( ( _reg ) >> 28 ) & 0xfU )
3089
3090#define AARCH64_ID_AA64MMFR1_EL1_TWED( _val ) ( ( _val ) << 32 )
3091#define AARCH64_ID_AA64MMFR1_EL1_TWED_SHIFT 32
3092#define AARCH64_ID_AA64MMFR1_EL1_TWED_MASK 0xf00000000ULL
3093#define AARCH64_ID_AA64MMFR1_EL1_TWED_GET( _reg ) \
3094 ( ( ( _reg ) >> 32 ) & 0xfULL )
3095
3096#define AARCH64_ID_AA64MMFR1_EL1_ETS( _val ) ( ( _val ) << 36 )
3097#define AARCH64_ID_AA64MMFR1_EL1_ETS_SHIFT 36
3098#define AARCH64_ID_AA64MMFR1_EL1_ETS_MASK 0xf000000000ULL
3099#define AARCH64_ID_AA64MMFR1_EL1_ETS_GET( _reg ) \
3100 ( ( ( _reg ) >> 36 ) & 0xfULL )
3101
3102static inline uint64_t _AArch64_Read_id_aa64mmfr1_el1( void )
3103{
3104 uint64_t value;
3105
3106 __asm__ volatile (
3107 "mrs %0, ID_AA64MMFR1_EL1" : "=&r" ( value ) : : "memory"
3108 );
3109
3110 return value;
3111}
3112
3113/* ID_AA64MMFR2_EL1, AArch64 Memory Model Feature Register 2 */
3114
3115#define AARCH64_ID_AA64MMFR2_EL1_CNP( _val ) ( ( _val ) << 0 )
3116#define AARCH64_ID_AA64MMFR2_EL1_CNP_SHIFT 0
3117#define AARCH64_ID_AA64MMFR2_EL1_CNP_MASK 0xfU
3118#define AARCH64_ID_AA64MMFR2_EL1_CNP_GET( _reg ) \
3119 ( ( ( _reg ) >> 0 ) & 0xfU )
3120
3121#define AARCH64_ID_AA64MMFR2_EL1_UAO( _val ) ( ( _val ) << 4 )
3122#define AARCH64_ID_AA64MMFR2_EL1_UAO_SHIFT 4
3123#define AARCH64_ID_AA64MMFR2_EL1_UAO_MASK 0xf0U
3124#define AARCH64_ID_AA64MMFR2_EL1_UAO_GET( _reg ) \
3125 ( ( ( _reg ) >> 4 ) & 0xfU )
3126
3127#define AARCH64_ID_AA64MMFR2_EL1_LSM( _val ) ( ( _val ) << 8 )
3128#define AARCH64_ID_AA64MMFR2_EL1_LSM_SHIFT 8
3129#define AARCH64_ID_AA64MMFR2_EL1_LSM_MASK 0xf00U
3130#define AARCH64_ID_AA64MMFR2_EL1_LSM_GET( _reg ) \
3131 ( ( ( _reg ) >> 8 ) & 0xfU )
3132
3133#define AARCH64_ID_AA64MMFR2_EL1_IESB( _val ) ( ( _val ) << 12 )
3134#define AARCH64_ID_AA64MMFR2_EL1_IESB_SHIFT 12
3135#define AARCH64_ID_AA64MMFR2_EL1_IESB_MASK 0xf000U
3136#define AARCH64_ID_AA64MMFR2_EL1_IESB_GET( _reg ) \
3137 ( ( ( _reg ) >> 12 ) & 0xfU )
3138
3139#define AARCH64_ID_AA64MMFR2_EL1_VARANGE( _val ) ( ( _val ) << 16 )
3140#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_SHIFT 16
3141#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_MASK 0xf0000U
3142#define AARCH64_ID_AA64MMFR2_EL1_VARANGE_GET( _reg ) \
3143 ( ( ( _reg ) >> 16 ) & 0xfU )
3144
3145#define AARCH64_ID_AA64MMFR2_EL1_CCIDX( _val ) ( ( _val ) << 20 )
3146#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
3147#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_MASK 0xf00000U
3148#define AARCH64_ID_AA64MMFR2_EL1_CCIDX_GET( _reg ) \
3149 ( ( ( _reg ) >> 20 ) & 0xfU )
3150
3151#define AARCH64_ID_AA64MMFR2_EL1_NV( _val ) ( ( _val ) << 24 )
3152#define AARCH64_ID_AA64MMFR2_EL1_NV_SHIFT 24
3153#define AARCH64_ID_AA64MMFR2_EL1_NV_MASK 0xf000000U
3154#define AARCH64_ID_AA64MMFR2_EL1_NV_GET( _reg ) \
3155 ( ( ( _reg ) >> 24 ) & 0xfU )
3156
3157#define AARCH64_ID_AA64MMFR2_EL1_ST( _val ) ( ( _val ) << 28 )
3158#define AARCH64_ID_AA64MMFR2_EL1_ST_SHIFT 28
3159#define AARCH64_ID_AA64MMFR2_EL1_ST_MASK 0xf0000000U
3160#define AARCH64_ID_AA64MMFR2_EL1_ST_GET( _reg ) \
3161 ( ( ( _reg ) >> 28 ) & 0xfU )
3162
3163#define AARCH64_ID_AA64MMFR2_EL1_AT( _val ) ( ( _val ) << 32 )
3164#define AARCH64_ID_AA64MMFR2_EL1_AT_SHIFT 32
3165#define AARCH64_ID_AA64MMFR2_EL1_AT_MASK 0xf00000000ULL
3166#define AARCH64_ID_AA64MMFR2_EL1_AT_GET( _reg ) \
3167 ( ( ( _reg ) >> 32 ) & 0xfULL )
3168
3169#define AARCH64_ID_AA64MMFR2_EL1_IDS( _val ) ( ( _val ) << 36 )
3170#define AARCH64_ID_AA64MMFR2_EL1_IDS_SHIFT 36
3171#define AARCH64_ID_AA64MMFR2_EL1_IDS_MASK 0xf000000000ULL
3172#define AARCH64_ID_AA64MMFR2_EL1_IDS_GET( _reg ) \
3173 ( ( ( _reg ) >> 36 ) & 0xfULL )
3174
3175#define AARCH64_ID_AA64MMFR2_EL1_FWB( _val ) ( ( _val ) << 40 )
3176#define AARCH64_ID_AA64MMFR2_EL1_FWB_SHIFT 40
3177#define AARCH64_ID_AA64MMFR2_EL1_FWB_MASK 0xf0000000000ULL
3178#define AARCH64_ID_AA64MMFR2_EL1_FWB_GET( _reg ) \
3179 ( ( ( _reg ) >> 40 ) & 0xfULL )
3180
3181#define AARCH64_ID_AA64MMFR2_EL1_TTL( _val ) ( ( _val ) << 48 )
3182#define AARCH64_ID_AA64MMFR2_EL1_TTL_SHIFT 48
3183#define AARCH64_ID_AA64MMFR2_EL1_TTL_MASK 0xf000000000000ULL
3184#define AARCH64_ID_AA64MMFR2_EL1_TTL_GET( _reg ) \
3185 ( ( ( _reg ) >> 48 ) & 0xfULL )
3186
3187#define AARCH64_ID_AA64MMFR2_EL1_BBM( _val ) ( ( _val ) << 52 )
3188#define AARCH64_ID_AA64MMFR2_EL1_BBM_SHIFT 52
3189#define AARCH64_ID_AA64MMFR2_EL1_BBM_MASK 0xf0000000000000ULL
3190#define AARCH64_ID_AA64MMFR2_EL1_BBM_GET( _reg ) \
3191 ( ( ( _reg ) >> 52 ) & 0xfULL )
3192
3193#define AARCH64_ID_AA64MMFR2_EL1_EVT( _val ) ( ( _val ) << 56 )
3194#define AARCH64_ID_AA64MMFR2_EL1_EVT_SHIFT 56
3195#define AARCH64_ID_AA64MMFR2_EL1_EVT_MASK 0xf00000000000000ULL
3196#define AARCH64_ID_AA64MMFR2_EL1_EVT_GET( _reg ) \
3197 ( ( ( _reg ) >> 56 ) & 0xfULL )
3198
3199#define AARCH64_ID_AA64MMFR2_EL1_E0PD( _val ) ( ( _val ) << 60 )
3200#define AARCH64_ID_AA64MMFR2_EL1_E0PD_SHIFT 60
3201#define AARCH64_ID_AA64MMFR2_EL1_E0PD_MASK 0xf000000000000000ULL
3202#define AARCH64_ID_AA64MMFR2_EL1_E0PD_GET( _reg ) \
3203 ( ( ( _reg ) >> 60 ) & 0xfULL )
3204
3205static inline uint64_t _AArch64_Read_id_aa64mmfr2_el1( void )
3206{
3207 uint64_t value;
3208
3209 __asm__ volatile (
3210 "mrs %0, ID_AA64MMFR2_EL1" : "=&r" ( value ) : : "memory"
3211 );
3212
3213 return value;
3214}
3215
3216/* ID_AA64PFR0_EL1, AArch64 Processor Feature Register 0 */
3217
3218#define AARCH64_ID_AA64PFR0_EL1_EL0( _val ) ( ( _val ) << 0 )
3219#define AARCH64_ID_AA64PFR0_EL1_EL0_SHIFT 0
3220#define AARCH64_ID_AA64PFR0_EL1_EL0_MASK 0xfU
3221#define AARCH64_ID_AA64PFR0_EL1_EL0_GET( _reg ) \
3222 ( ( ( _reg ) >> 0 ) & 0xfU )
3223
3224#define AARCH64_ID_AA64PFR0_EL1_EL1( _val ) ( ( _val ) << 4 )
3225#define AARCH64_ID_AA64PFR0_EL1_EL1_SHIFT 4
3226#define AARCH64_ID_AA64PFR0_EL1_EL1_MASK 0xf0U
3227#define AARCH64_ID_AA64PFR0_EL1_EL1_GET( _reg ) \
3228 ( ( ( _reg ) >> 4 ) & 0xfU )
3229
3230#define AARCH64_ID_AA64PFR0_EL1_EL2( _val ) ( ( _val ) << 8 )
3231#define AARCH64_ID_AA64PFR0_EL1_EL2_SHIFT 8
3232#define AARCH64_ID_AA64PFR0_EL1_EL2_MASK 0xf00U
3233#define AARCH64_ID_AA64PFR0_EL1_EL2_GET( _reg ) \
3234 ( ( ( _reg ) >> 8 ) & 0xfU )
3235
3236#define AARCH64_ID_AA64PFR0_EL1_EL3( _val ) ( ( _val ) << 12 )
3237#define AARCH64_ID_AA64PFR0_EL1_EL3_SHIFT 12
3238#define AARCH64_ID_AA64PFR0_EL1_EL3_MASK 0xf000U
3239#define AARCH64_ID_AA64PFR0_EL1_EL3_GET( _reg ) \
3240 ( ( ( _reg ) >> 12 ) & 0xfU )
3241
3242#define AARCH64_ID_AA64PFR0_EL1_FP( _val ) ( ( _val ) << 16 )
3243#define AARCH64_ID_AA64PFR0_EL1_FP_SHIFT 16
3244#define AARCH64_ID_AA64PFR0_EL1_FP_MASK 0xf0000U
3245#define AARCH64_ID_AA64PFR0_EL1_FP_GET( _reg ) \
3246 ( ( ( _reg ) >> 16 ) & 0xfU )
3247
3248#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD( _val ) ( ( _val ) << 20 )
3249#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_SHIFT 20
3250#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_MASK 0xf00000U
3251#define AARCH64_ID_AA64PFR0_EL1_ADVSIMD_GET( _reg ) \
3252 ( ( ( _reg ) >> 20 ) & 0xfU )
3253
3254#define AARCH64_ID_AA64PFR0_EL1_GIC( _val ) ( ( _val ) << 24 )
3255#define AARCH64_ID_AA64PFR0_EL1_GIC_SHIFT 24
3256#define AARCH64_ID_AA64PFR0_EL1_GIC_MASK 0xf000000U
3257#define AARCH64_ID_AA64PFR0_EL1_GIC_GET( _reg ) \
3258 ( ( ( _reg ) >> 24 ) & 0xfU )
3259
3260#define AARCH64_ID_AA64PFR0_EL1_RAS( _val ) ( ( _val ) << 28 )
3261#define AARCH64_ID_AA64PFR0_EL1_RAS_SHIFT 28
3262#define AARCH64_ID_AA64PFR0_EL1_RAS_MASK 0xf0000000U
3263#define AARCH64_ID_AA64PFR0_EL1_RAS_GET( _reg ) \
3264 ( ( ( _reg ) >> 28 ) & 0xfU )
3265
3266#define AARCH64_ID_AA64PFR0_EL1_SVE( _val ) ( ( _val ) << 32 )
3267#define AARCH64_ID_AA64PFR0_EL1_SVE_SHIFT 32
3268#define AARCH64_ID_AA64PFR0_EL1_SVE_MASK 0xf00000000ULL
3269#define AARCH64_ID_AA64PFR0_EL1_SVE_GET( _reg ) \
3270 ( ( ( _reg ) >> 32 ) & 0xfULL )
3271
3272#define AARCH64_ID_AA64PFR0_EL1_SEL2( _val ) ( ( _val ) << 36 )
3273#define AARCH64_ID_AA64PFR0_EL1_SEL2_SHIFT 36
3274#define AARCH64_ID_AA64PFR0_EL1_SEL2_MASK 0xf000000000ULL
3275#define AARCH64_ID_AA64PFR0_EL1_SEL2_GET( _reg ) \
3276 ( ( ( _reg ) >> 36 ) & 0xfULL )
3277
3278#define AARCH64_ID_AA64PFR0_EL1_MPAM( _val ) ( ( _val ) << 40 )
3279#define AARCH64_ID_AA64PFR0_EL1_MPAM_SHIFT 40
3280#define AARCH64_ID_AA64PFR0_EL1_MPAM_MASK 0xf0000000000ULL
3281#define AARCH64_ID_AA64PFR0_EL1_MPAM_GET( _reg ) \
3282 ( ( ( _reg ) >> 40 ) & 0xfULL )
3283
3284#define AARCH64_ID_AA64PFR0_EL1_AMU( _val ) ( ( _val ) << 44 )
3285#define AARCH64_ID_AA64PFR0_EL1_AMU_SHIFT 44
3286#define AARCH64_ID_AA64PFR0_EL1_AMU_MASK 0xf00000000000ULL
3287#define AARCH64_ID_AA64PFR0_EL1_AMU_GET( _reg ) \
3288 ( ( ( _reg ) >> 44 ) & 0xfULL )
3289
3290#define AARCH64_ID_AA64PFR0_EL1_DIT( _val ) ( ( _val ) << 48 )
3291#define AARCH64_ID_AA64PFR0_EL1_DIT_SHIFT 48
3292#define AARCH64_ID_AA64PFR0_EL1_DIT_MASK 0xf000000000000ULL
3293#define AARCH64_ID_AA64PFR0_EL1_DIT_GET( _reg ) \
3294 ( ( ( _reg ) >> 48 ) & 0xfULL )
3295
3296#define AARCH64_ID_AA64PFR0_EL1_CSV2( _val ) ( ( _val ) << 56 )
3297#define AARCH64_ID_AA64PFR0_EL1_CSV2_SHIFT 56
3298#define AARCH64_ID_AA64PFR0_EL1_CSV2_MASK 0xf00000000000000ULL
3299#define AARCH64_ID_AA64PFR0_EL1_CSV2_GET( _reg ) \
3300 ( ( ( _reg ) >> 56 ) & 0xfULL )
3301
3302#define AARCH64_ID_AA64PFR0_EL1_CSV3( _val ) ( ( _val ) << 60 )
3303#define AARCH64_ID_AA64PFR0_EL1_CSV3_SHIFT 60
3304#define AARCH64_ID_AA64PFR0_EL1_CSV3_MASK 0xf000000000000000ULL
3305#define AARCH64_ID_AA64PFR0_EL1_CSV3_GET( _reg ) \
3306 ( ( ( _reg ) >> 60 ) & 0xfULL )
3307
3308static inline uint64_t _AArch64_Read_id_aa64pfr0_el1( void )
3309{
3310 uint64_t value;
3311
3312 __asm__ volatile (
3313 "mrs %0, ID_AA64PFR0_EL1" : "=&r" ( value ) : : "memory"
3314 );
3315
3316 return value;
3317}
3318
3319/* ID_AA64PFR1_EL1, AArch64 Processor Feature Register 1 */
3320
3321#define AARCH64_ID_AA64PFR1_EL1_BT( _val ) ( ( _val ) << 0 )
3322#define AARCH64_ID_AA64PFR1_EL1_BT_SHIFT 0
3323#define AARCH64_ID_AA64PFR1_EL1_BT_MASK 0xfU
3324#define AARCH64_ID_AA64PFR1_EL1_BT_GET( _reg ) \
3325 ( ( ( _reg ) >> 0 ) & 0xfU )
3326
3327#define AARCH64_ID_AA64PFR1_EL1_SSBS( _val ) ( ( _val ) << 4 )
3328#define AARCH64_ID_AA64PFR1_EL1_SSBS_SHIFT 4
3329#define AARCH64_ID_AA64PFR1_EL1_SSBS_MASK 0xf0U
3330#define AARCH64_ID_AA64PFR1_EL1_SSBS_GET( _reg ) \
3331 ( ( ( _reg ) >> 4 ) & 0xfU )
3332
3333#define AARCH64_ID_AA64PFR1_EL1_MTE( _val ) ( ( _val ) << 8 )
3334#define AARCH64_ID_AA64PFR1_EL1_MTE_SHIFT 8
3335#define AARCH64_ID_AA64PFR1_EL1_MTE_MASK 0xf00U
3336#define AARCH64_ID_AA64PFR1_EL1_MTE_GET( _reg ) \
3337 ( ( ( _reg ) >> 8 ) & 0xfU )
3338
3339#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC( _val ) ( ( _val ) << 12 )
3340#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_SHIFT 12
3341#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_MASK 0xf000U
3342#define AARCH64_ID_AA64PFR1_EL1_RAS_FRAC_GET( _reg ) \
3343 ( ( ( _reg ) >> 12 ) & 0xfU )
3344
3345#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC( _val ) ( ( _val ) << 16 )
3346#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_SHIFT 16
3347#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_MASK 0xf0000U
3348#define AARCH64_ID_AA64PFR1_EL1_MPAM_FRAC_GET( _reg ) \
3349 ( ( ( _reg ) >> 16 ) & 0xfU )
3350
3351static inline uint64_t _AArch64_Read_id_aa64pfr1_el1( void )
3352{
3353 uint64_t value;
3354
3355 __asm__ volatile (
3356 "mrs %0, ID_AA64PFR1_EL1" : "=&r" ( value ) : : "memory"
3357 );
3358
3359 return value;
3360}
3361
3362/* ID_AFR0_EL1, AArch64 Auxiliary Feature Register 0 */
3363
3364static inline uint64_t _AArch64_Read_id_afr0_el1( void )
3365{
3366 uint64_t value;
3367
3368 __asm__ volatile (
3369 "mrs %0, ID_AFR0_EL1" : "=&r" ( value ) : : "memory"
3370 );
3371
3372 return value;
3373}
3374
3375/* ID_DFR0_EL1, AArch64 Debug Feature Register 0 */
3376
3377#define AARCH64_ID_DFR0_EL1_COPDBG( _val ) ( ( _val ) << 0 )
3378#define AARCH64_ID_DFR0_EL1_COPDBG_SHIFT 0
3379#define AARCH64_ID_DFR0_EL1_COPDBG_MASK 0xfU
3380#define AARCH64_ID_DFR0_EL1_COPDBG_GET( _reg ) \
3381 ( ( ( _reg ) >> 0 ) & 0xfU )
3382
3383#define AARCH64_ID_DFR0_EL1_COPSDBG( _val ) ( ( _val ) << 4 )
3384#define AARCH64_ID_DFR0_EL1_COPSDBG_SHIFT 4
3385#define AARCH64_ID_DFR0_EL1_COPSDBG_MASK 0xf0U
3386#define AARCH64_ID_DFR0_EL1_COPSDBG_GET( _reg ) \
3387 ( ( ( _reg ) >> 4 ) & 0xfU )
3388
3389#define AARCH64_ID_DFR0_EL1_MMAPDBG( _val ) ( ( _val ) << 8 )
3390#define AARCH64_ID_DFR0_EL1_MMAPDBG_SHIFT 8
3391#define AARCH64_ID_DFR0_EL1_MMAPDBG_MASK 0xf00U
3392#define AARCH64_ID_DFR0_EL1_MMAPDBG_GET( _reg ) \
3393 ( ( ( _reg ) >> 8 ) & 0xfU )
3394
3395#define AARCH64_ID_DFR0_EL1_COPTRC( _val ) ( ( _val ) << 12 )
3396#define AARCH64_ID_DFR0_EL1_COPTRC_SHIFT 12
3397#define AARCH64_ID_DFR0_EL1_COPTRC_MASK 0xf000U
3398#define AARCH64_ID_DFR0_EL1_COPTRC_GET( _reg ) \
3399 ( ( ( _reg ) >> 12 ) & 0xfU )
3400
3401#define AARCH64_ID_DFR0_EL1_MMAPTRC( _val ) ( ( _val ) << 16 )
3402#define AARCH64_ID_DFR0_EL1_MMAPTRC_SHIFT 16
3403#define AARCH64_ID_DFR0_EL1_MMAPTRC_MASK 0xf0000U
3404#define AARCH64_ID_DFR0_EL1_MMAPTRC_GET( _reg ) \
3405 ( ( ( _reg ) >> 16 ) & 0xfU )
3406
3407#define AARCH64_ID_DFR0_EL1_MPROFDBG( _val ) ( ( _val ) << 20 )
3408#define AARCH64_ID_DFR0_EL1_MPROFDBG_SHIFT 20
3409#define AARCH64_ID_DFR0_EL1_MPROFDBG_MASK 0xf00000U
3410#define AARCH64_ID_DFR0_EL1_MPROFDBG_GET( _reg ) \
3411 ( ( ( _reg ) >> 20 ) & 0xfU )
3412
3413#define AARCH64_ID_DFR0_EL1_PERFMON( _val ) ( ( _val ) << 24 )
3414#define AARCH64_ID_DFR0_EL1_PERFMON_SHIFT 24
3415#define AARCH64_ID_DFR0_EL1_PERFMON_MASK 0xf000000U
3416#define AARCH64_ID_DFR0_EL1_PERFMON_GET( _reg ) \
3417 ( ( ( _reg ) >> 24 ) & 0xfU )
3418
3419#define AARCH64_ID_DFR0_EL1_TRACEFILT( _val ) ( ( _val ) << 28 )
3420#define AARCH64_ID_DFR0_EL1_TRACEFILT_SHIFT 28
3421#define AARCH64_ID_DFR0_EL1_TRACEFILT_MASK 0xf0000000U
3422#define AARCH64_ID_DFR0_EL1_TRACEFILT_GET( _reg ) \
3423 ( ( ( _reg ) >> 28 ) & 0xfU )
3424
3425static inline uint64_t _AArch64_Read_id_dfr0_el1( void )
3426{
3427 uint64_t value;
3428
3429 __asm__ volatile (
3430 "mrs %0, ID_DFR0_EL1" : "=&r" ( value ) : : "memory"
3431 );
3432
3433 return value;
3434}
3435
3436/* ID_DFR1_EL1, Debug Feature Register 1 */
3437
3438#define AARCH64_ID_DFR1_EL1_MTPMU( _val ) ( ( _val ) << 0 )
3439#define AARCH64_ID_DFR1_EL1_MTPMU_SHIFT 0
3440#define AARCH64_ID_DFR1_EL1_MTPMU_MASK 0xfU
3441#define AARCH64_ID_DFR1_EL1_MTPMU_GET( _reg ) \
3442 ( ( ( _reg ) >> 0 ) & 0xfU )
3443
3444static inline uint64_t _AArch64_Read_id_dfr1_el1( void )
3445{
3446 uint64_t value;
3447
3448 __asm__ volatile (
3449 "mrs %0, ID_DFR1_EL1" : "=&r" ( value ) : : "memory"
3450 );
3451
3452 return value;
3453}
3454
3455/* ID_ISAR0_EL1, AArch64 Instruction Set Attribute Register 0 */
3456
3457#define AARCH64_ID_ISAR0_EL1_SWAP( _val ) ( ( _val ) << 0 )
3458#define AARCH64_ID_ISAR0_EL1_SWAP_SHIFT 0
3459#define AARCH64_ID_ISAR0_EL1_SWAP_MASK 0xfU
3460#define AARCH64_ID_ISAR0_EL1_SWAP_GET( _reg ) \
3461 ( ( ( _reg ) >> 0 ) & 0xfU )
3462
3463#define AARCH64_ID_ISAR0_EL1_BITCOUNT( _val ) ( ( _val ) << 4 )
3464#define AARCH64_ID_ISAR0_EL1_BITCOUNT_SHIFT 4
3465#define AARCH64_ID_ISAR0_EL1_BITCOUNT_MASK 0xf0U
3466#define AARCH64_ID_ISAR0_EL1_BITCOUNT_GET( _reg ) \
3467 ( ( ( _reg ) >> 4 ) & 0xfU )
3468
3469#define AARCH64_ID_ISAR0_EL1_BITFIELD( _val ) ( ( _val ) << 8 )
3470#define AARCH64_ID_ISAR0_EL1_BITFIELD_SHIFT 8
3471#define AARCH64_ID_ISAR0_EL1_BITFIELD_MASK 0xf00U
3472#define AARCH64_ID_ISAR0_EL1_BITFIELD_GET( _reg ) \
3473 ( ( ( _reg ) >> 8 ) & 0xfU )
3474
3475#define AARCH64_ID_ISAR0_EL1_CMPBRANCH( _val ) ( ( _val ) << 12 )
3476#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_SHIFT 12
3477#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_MASK 0xf000U
3478#define AARCH64_ID_ISAR0_EL1_CMPBRANCH_GET( _reg ) \
3479 ( ( ( _reg ) >> 12 ) & 0xfU )
3480
3481#define AARCH64_ID_ISAR0_EL1_COPROC( _val ) ( ( _val ) << 16 )
3482#define AARCH64_ID_ISAR0_EL1_COPROC_SHIFT 16
3483#define AARCH64_ID_ISAR0_EL1_COPROC_MASK 0xf0000U
3484#define AARCH64_ID_ISAR0_EL1_COPROC_GET( _reg ) \
3485 ( ( ( _reg ) >> 16 ) & 0xfU )
3486
3487#define AARCH64_ID_ISAR0_EL1_DEBUG( _val ) ( ( _val ) << 20 )
3488#define AARCH64_ID_ISAR0_EL1_DEBUG_SHIFT 20
3489#define AARCH64_ID_ISAR0_EL1_DEBUG_MASK 0xf00000U
3490#define AARCH64_ID_ISAR0_EL1_DEBUG_GET( _reg ) \
3491 ( ( ( _reg ) >> 20 ) & 0xfU )
3492
3493#define AARCH64_ID_ISAR0_EL1_DIVIDE( _val ) ( ( _val ) << 24 )
3494#define AARCH64_ID_ISAR0_EL1_DIVIDE_SHIFT 24
3495#define AARCH64_ID_ISAR0_EL1_DIVIDE_MASK 0xf000000U
3496#define AARCH64_ID_ISAR0_EL1_DIVIDE_GET( _reg ) \
3497 ( ( ( _reg ) >> 24 ) & 0xfU )
3498
3499static inline uint64_t _AArch64_Read_id_isar0_el1( void )
3500{
3501 uint64_t value;
3502
3503 __asm__ volatile (
3504 "mrs %0, ID_ISAR0_EL1" : "=&r" ( value ) : : "memory"
3505 );
3506
3507 return value;
3508}
3509
3510/* ID_ISAR1_EL1, AArch64 Instruction Set Attribute Register 1 */
3511
3512#define AARCH64_ID_ISAR1_EL1_ENDIAN( _val ) ( ( _val ) << 0 )
3513#define AARCH64_ID_ISAR1_EL1_ENDIAN_SHIFT 0
3514#define AARCH64_ID_ISAR1_EL1_ENDIAN_MASK 0xfU
3515#define AARCH64_ID_ISAR1_EL1_ENDIAN_GET( _reg ) \
3516 ( ( ( _reg ) >> 0 ) & 0xfU )
3517
3518#define AARCH64_ID_ISAR1_EL1_EXCEPT( _val ) ( ( _val ) << 4 )
3519#define AARCH64_ID_ISAR1_EL1_EXCEPT_SHIFT 4
3520#define AARCH64_ID_ISAR1_EL1_EXCEPT_MASK 0xf0U
3521#define AARCH64_ID_ISAR1_EL1_EXCEPT_GET( _reg ) \
3522 ( ( ( _reg ) >> 4 ) & 0xfU )
3523
3524#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR( _val ) ( ( _val ) << 8 )
3525#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_SHIFT 8
3526#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_MASK 0xf00U
3527#define AARCH64_ID_ISAR1_EL1_EXCEPT_AR_GET( _reg ) \
3528 ( ( ( _reg ) >> 8 ) & 0xfU )
3529
3530#define AARCH64_ID_ISAR1_EL1_EXTEND( _val ) ( ( _val ) << 12 )
3531#define AARCH64_ID_ISAR1_EL1_EXTEND_SHIFT 12
3532#define AARCH64_ID_ISAR1_EL1_EXTEND_MASK 0xf000U
3533#define AARCH64_ID_ISAR1_EL1_EXTEND_GET( _reg ) \
3534 ( ( ( _reg ) >> 12 ) & 0xfU )
3535
3536#define AARCH64_ID_ISAR1_EL1_IFTHEN( _val ) ( ( _val ) << 16 )
3537#define AARCH64_ID_ISAR1_EL1_IFTHEN_SHIFT 16
3538#define AARCH64_ID_ISAR1_EL1_IFTHEN_MASK 0xf0000U
3539#define AARCH64_ID_ISAR1_EL1_IFTHEN_GET( _reg ) \
3540 ( ( ( _reg ) >> 16 ) & 0xfU )
3541
3542#define AARCH64_ID_ISAR1_EL1_IMMEDIATE( _val ) ( ( _val ) << 20 )
3543#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_SHIFT 20
3544#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_MASK 0xf00000U
3545#define AARCH64_ID_ISAR1_EL1_IMMEDIATE_GET( _reg ) \
3546 ( ( ( _reg ) >> 20 ) & 0xfU )
3547
3548#define AARCH64_ID_ISAR1_EL1_INTERWORK( _val ) ( ( _val ) << 24 )
3549#define AARCH64_ID_ISAR1_EL1_INTERWORK_SHIFT 24
3550#define AARCH64_ID_ISAR1_EL1_INTERWORK_MASK 0xf000000U
3551#define AARCH64_ID_ISAR1_EL1_INTERWORK_GET( _reg ) \
3552 ( ( ( _reg ) >> 24 ) & 0xfU )
3553
3554#define AARCH64_ID_ISAR1_EL1_JAZELLE( _val ) ( ( _val ) << 28 )
3555#define AARCH64_ID_ISAR1_EL1_JAZELLE_SHIFT 28
3556#define AARCH64_ID_ISAR1_EL1_JAZELLE_MASK 0xf0000000U
3557#define AARCH64_ID_ISAR1_EL1_JAZELLE_GET( _reg ) \
3558 ( ( ( _reg ) >> 28 ) & 0xfU )
3559
3560static inline uint64_t _AArch64_Read_id_isar1_el1( void )
3561{
3562 uint64_t value;
3563
3564 __asm__ volatile (
3565 "mrs %0, ID_ISAR1_EL1" : "=&r" ( value ) : : "memory"
3566 );
3567
3568 return value;
3569}
3570
3571/* ID_ISAR2_EL1, AArch64 Instruction Set Attribute Register 2 */
3572
3573#define AARCH64_ID_ISAR2_EL1_LOADSTORE( _val ) ( ( _val ) << 0 )
3574#define AARCH64_ID_ISAR2_EL1_LOADSTORE_SHIFT 0
3575#define AARCH64_ID_ISAR2_EL1_LOADSTORE_MASK 0xfU
3576#define AARCH64_ID_ISAR2_EL1_LOADSTORE_GET( _reg ) \
3577 ( ( ( _reg ) >> 0 ) & 0xfU )
3578
3579#define AARCH64_ID_ISAR2_EL1_MEMHINT( _val ) ( ( _val ) << 4 )
3580#define AARCH64_ID_ISAR2_EL1_MEMHINT_SHIFT 4
3581#define AARCH64_ID_ISAR2_EL1_MEMHINT_MASK 0xf0U
3582#define AARCH64_ID_ISAR2_EL1_MEMHINT_GET( _reg ) \
3583 ( ( ( _reg ) >> 4 ) & 0xfU )
3584
3585#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT( _val ) ( ( _val ) << 8 )
3586#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_SHIFT 8
3587#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_MASK 0xf00U
3588#define AARCH64_ID_ISAR2_EL1_MULTIACCESSINT_GET( _reg ) \
3589 ( ( ( _reg ) >> 8 ) & 0xfU )
3590
3591#define AARCH64_ID_ISAR2_EL1_MULT( _val ) ( ( _val ) << 12 )
3592#define AARCH64_ID_ISAR2_EL1_MULT_SHIFT 12
3593#define AARCH64_ID_ISAR2_EL1_MULT_MASK 0xf000U
3594#define AARCH64_ID_ISAR2_EL1_MULT_GET( _reg ) \
3595 ( ( ( _reg ) >> 12 ) & 0xfU )
3596
3597#define AARCH64_ID_ISAR2_EL1_MULTS( _val ) ( ( _val ) << 16 )
3598#define AARCH64_ID_ISAR2_EL1_MULTS_SHIFT 16
3599#define AARCH64_ID_ISAR2_EL1_MULTS_MASK 0xf0000U
3600#define AARCH64_ID_ISAR2_EL1_MULTS_GET( _reg ) \
3601 ( ( ( _reg ) >> 16 ) & 0xfU )
3602
3603#define AARCH64_ID_ISAR2_EL1_MULTU( _val ) ( ( _val ) << 20 )
3604#define AARCH64_ID_ISAR2_EL1_MULTU_SHIFT 20
3605#define AARCH64_ID_ISAR2_EL1_MULTU_MASK 0xf00000U
3606#define AARCH64_ID_ISAR2_EL1_MULTU_GET( _reg ) \
3607 ( ( ( _reg ) >> 20 ) & 0xfU )
3608
3609#define AARCH64_ID_ISAR2_EL1_PSR_AR( _val ) ( ( _val ) << 24 )
3610#define AARCH64_ID_ISAR2_EL1_PSR_AR_SHIFT 24
3611#define AARCH64_ID_ISAR2_EL1_PSR_AR_MASK 0xf000000U
3612#define AARCH64_ID_ISAR2_EL1_PSR_AR_GET( _reg ) \
3613 ( ( ( _reg ) >> 24 ) & 0xfU )
3614
3615#define AARCH64_ID_ISAR2_EL1_REVERSAL( _val ) ( ( _val ) << 28 )
3616#define AARCH64_ID_ISAR2_EL1_REVERSAL_SHIFT 28
3617#define AARCH64_ID_ISAR2_EL1_REVERSAL_MASK 0xf0000000U
3618#define AARCH64_ID_ISAR2_EL1_REVERSAL_GET( _reg ) \
3619 ( ( ( _reg ) >> 28 ) & 0xfU )
3620
3621static inline uint64_t _AArch64_Read_id_isar2_el1( void )
3622{
3623 uint64_t value;
3624
3625 __asm__ volatile (
3626 "mrs %0, ID_ISAR2_EL1" : "=&r" ( value ) : : "memory"
3627 );
3628
3629 return value;
3630}
3631
3632/* ID_ISAR3_EL1, AArch64 Instruction Set Attribute Register 3 */
3633
3634#define AARCH64_ID_ISAR3_EL1_SATURATE( _val ) ( ( _val ) << 0 )
3635#define AARCH64_ID_ISAR3_EL1_SATURATE_SHIFT 0
3636#define AARCH64_ID_ISAR3_EL1_SATURATE_MASK 0xfU
3637#define AARCH64_ID_ISAR3_EL1_SATURATE_GET( _reg ) \
3638 ( ( ( _reg ) >> 0 ) & 0xfU )
3639
3640#define AARCH64_ID_ISAR3_EL1_SIMD( _val ) ( ( _val ) << 4 )
3641#define AARCH64_ID_ISAR3_EL1_SIMD_SHIFT 4
3642#define AARCH64_ID_ISAR3_EL1_SIMD_MASK 0xf0U
3643#define AARCH64_ID_ISAR3_EL1_SIMD_GET( _reg ) \
3644 ( ( ( _reg ) >> 4 ) & 0xfU )
3645
3646#define AARCH64_ID_ISAR3_EL1_SVC( _val ) ( ( _val ) << 8 )
3647#define AARCH64_ID_ISAR3_EL1_SVC_SHIFT 8
3648#define AARCH64_ID_ISAR3_EL1_SVC_MASK 0xf00U
3649#define AARCH64_ID_ISAR3_EL1_SVC_GET( _reg ) \
3650 ( ( ( _reg ) >> 8 ) & 0xfU )
3651
3652#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM( _val ) ( ( _val ) << 12 )
3653#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_SHIFT 12
3654#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_MASK 0xf000U
3655#define AARCH64_ID_ISAR3_EL1_SYNCHPRIM_GET( _reg ) \
3656 ( ( ( _reg ) >> 12 ) & 0xfU )
3657
3658#define AARCH64_ID_ISAR3_EL1_TABBRANCH( _val ) ( ( _val ) << 16 )
3659#define AARCH64_ID_ISAR3_EL1_TABBRANCH_SHIFT 16
3660#define AARCH64_ID_ISAR3_EL1_TABBRANCH_MASK 0xf0000U
3661#define AARCH64_ID_ISAR3_EL1_TABBRANCH_GET( _reg ) \
3662 ( ( ( _reg ) >> 16 ) & 0xfU )
3663
3664#define AARCH64_ID_ISAR3_EL1_T32COPY( _val ) ( ( _val ) << 20 )
3665#define AARCH64_ID_ISAR3_EL1_T32COPY_SHIFT 20
3666#define AARCH64_ID_ISAR3_EL1_T32COPY_MASK 0xf00000U
3667#define AARCH64_ID_ISAR3_EL1_T32COPY_GET( _reg ) \
3668 ( ( ( _reg ) >> 20 ) & 0xfU )
3669
3670#define AARCH64_ID_ISAR3_EL1_TRUENOP( _val ) ( ( _val ) << 24 )
3671#define AARCH64_ID_ISAR3_EL1_TRUENOP_SHIFT 24
3672#define AARCH64_ID_ISAR3_EL1_TRUENOP_MASK 0xf000000U
3673#define AARCH64_ID_ISAR3_EL1_TRUENOP_GET( _reg ) \
3674 ( ( ( _reg ) >> 24 ) & 0xfU )
3675
3676#define AARCH64_ID_ISAR3_EL1_T32EE( _val ) ( ( _val ) << 28 )
3677#define AARCH64_ID_ISAR3_EL1_T32EE_SHIFT 28
3678#define AARCH64_ID_ISAR3_EL1_T32EE_MASK 0xf0000000U
3679#define AARCH64_ID_ISAR3_EL1_T32EE_GET( _reg ) \
3680 ( ( ( _reg ) >> 28 ) & 0xfU )
3681
3682static inline uint64_t _AArch64_Read_id_isar3_el1( void )
3683{
3684 uint64_t value;
3685
3686 __asm__ volatile (
3687 "mrs %0, ID_ISAR3_EL1" : "=&r" ( value ) : : "memory"
3688 );
3689
3690 return value;
3691}
3692
3693/* ID_ISAR4_EL1, AArch64 Instruction Set Attribute Register 4 */
3694
3695#define AARCH64_ID_ISAR4_EL1_UNPRIV( _val ) ( ( _val ) << 0 )
3696#define AARCH64_ID_ISAR4_EL1_UNPRIV_SHIFT 0
3697#define AARCH64_ID_ISAR4_EL1_UNPRIV_MASK 0xfU
3698#define AARCH64_ID_ISAR4_EL1_UNPRIV_GET( _reg ) \
3699 ( ( ( _reg ) >> 0 ) & 0xfU )
3700
3701#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS( _val ) ( ( _val ) << 4 )
3702#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_SHIFT 4
3703#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_MASK 0xf0U
3704#define AARCH64_ID_ISAR4_EL1_WITHSHIFTS_GET( _reg ) \
3705 ( ( ( _reg ) >> 4 ) & 0xfU )
3706
3707#define AARCH64_ID_ISAR4_EL1_WRITEBACK( _val ) ( ( _val ) << 8 )
3708#define AARCH64_ID_ISAR4_EL1_WRITEBACK_SHIFT 8
3709#define AARCH64_ID_ISAR4_EL1_WRITEBACK_MASK 0xf00U
3710#define AARCH64_ID_ISAR4_EL1_WRITEBACK_GET( _reg ) \
3711 ( ( ( _reg ) >> 8 ) & 0xfU )
3712
3713#define AARCH64_ID_ISAR4_EL1_SMC( _val ) ( ( _val ) << 12 )
3714#define AARCH64_ID_ISAR4_EL1_SMC_SHIFT 12
3715#define AARCH64_ID_ISAR4_EL1_SMC_MASK 0xf000U
3716#define AARCH64_ID_ISAR4_EL1_SMC_GET( _reg ) \
3717 ( ( ( _reg ) >> 12 ) & 0xfU )
3718
3719#define AARCH64_ID_ISAR4_EL1_BARRIER( _val ) ( ( _val ) << 16 )
3720#define AARCH64_ID_ISAR4_EL1_BARRIER_SHIFT 16
3721#define AARCH64_ID_ISAR4_EL1_BARRIER_MASK 0xf0000U
3722#define AARCH64_ID_ISAR4_EL1_BARRIER_GET( _reg ) \
3723 ( ( ( _reg ) >> 16 ) & 0xfU )
3724
3725#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC( _val ) ( ( _val ) << 20 )
3726#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_SHIFT 20
3727#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_MASK 0xf00000U
3728#define AARCH64_ID_ISAR4_EL1_SYNCHPRIM_FRAC_GET( _reg ) \
3729 ( ( ( _reg ) >> 20 ) & 0xfU )
3730
3731#define AARCH64_ID_ISAR4_EL1_PSR_M( _val ) ( ( _val ) << 24 )
3732#define AARCH64_ID_ISAR4_EL1_PSR_M_SHIFT 24
3733#define AARCH64_ID_ISAR4_EL1_PSR_M_MASK 0xf000000U
3734#define AARCH64_ID_ISAR4_EL1_PSR_M_GET( _reg ) \
3735 ( ( ( _reg ) >> 24 ) & 0xfU )
3736
3737#define AARCH64_ID_ISAR4_EL1_SWP_FRAC( _val ) ( ( _val ) << 28 )
3738#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_SHIFT 28
3739#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_MASK 0xf0000000U
3740#define AARCH64_ID_ISAR4_EL1_SWP_FRAC_GET( _reg ) \
3741 ( ( ( _reg ) >> 28 ) & 0xfU )
3742
3743static inline uint64_t _AArch64_Read_id_isar4_el1( void )
3744{
3745 uint64_t value;
3746
3747 __asm__ volatile (
3748 "mrs %0, ID_ISAR4_EL1" : "=&r" ( value ) : : "memory"
3749 );
3750
3751 return value;
3752}
3753
3754/* ID_ISAR5_EL1, AArch64 Instruction Set Attribute Register 5 */
3755
3756#define AARCH64_ID_ISAR5_EL1_SEVL( _val ) ( ( _val ) << 0 )
3757#define AARCH64_ID_ISAR5_EL1_SEVL_SHIFT 0
3758#define AARCH64_ID_ISAR5_EL1_SEVL_MASK 0xfU
3759#define AARCH64_ID_ISAR5_EL1_SEVL_GET( _reg ) \
3760 ( ( ( _reg ) >> 0 ) & 0xfU )
3761
3762#define AARCH64_ID_ISAR5_EL1_AES( _val ) ( ( _val ) << 4 )
3763#define AARCH64_ID_ISAR5_EL1_AES_SHIFT 4
3764#define AARCH64_ID_ISAR5_EL1_AES_MASK 0xf0U
3765#define AARCH64_ID_ISAR5_EL1_AES_GET( _reg ) \
3766 ( ( ( _reg ) >> 4 ) & 0xfU )
3767
3768#define AARCH64_ID_ISAR5_EL1_SHA1( _val ) ( ( _val ) << 8 )
3769#define AARCH64_ID_ISAR5_EL1_SHA1_SHIFT 8
3770#define AARCH64_ID_ISAR5_EL1_SHA1_MASK 0xf00U
3771#define AARCH64_ID_ISAR5_EL1_SHA1_GET( _reg ) \
3772 ( ( ( _reg ) >> 8 ) & 0xfU )
3773
3774#define AARCH64_ID_ISAR5_EL1_SHA2( _val ) ( ( _val ) << 12 )
3775#define AARCH64_ID_ISAR5_EL1_SHA2_SHIFT 12
3776#define AARCH64_ID_ISAR5_EL1_SHA2_MASK 0xf000U
3777#define AARCH64_ID_ISAR5_EL1_SHA2_GET( _reg ) \
3778 ( ( ( _reg ) >> 12 ) & 0xfU )
3779
3780#define AARCH64_ID_ISAR5_EL1_CRC32( _val ) ( ( _val ) << 16 )
3781#define AARCH64_ID_ISAR5_EL1_CRC32_SHIFT 16
3782#define AARCH64_ID_ISAR5_EL1_CRC32_MASK 0xf0000U
3783#define AARCH64_ID_ISAR5_EL1_CRC32_GET( _reg ) \
3784 ( ( ( _reg ) >> 16 ) & 0xfU )
3785
3786#define AARCH64_ID_ISAR5_EL1_RDM( _val ) ( ( _val ) << 24 )
3787#define AARCH64_ID_ISAR5_EL1_RDM_SHIFT 24
3788#define AARCH64_ID_ISAR5_EL1_RDM_MASK 0xf000000U
3789#define AARCH64_ID_ISAR5_EL1_RDM_GET( _reg ) \
3790 ( ( ( _reg ) >> 24 ) & 0xfU )
3791
3792#define AARCH64_ID_ISAR5_EL1_VCMA( _val ) ( ( _val ) << 28 )
3793#define AARCH64_ID_ISAR5_EL1_VCMA_SHIFT 28
3794#define AARCH64_ID_ISAR5_EL1_VCMA_MASK 0xf0000000U
3795#define AARCH64_ID_ISAR5_EL1_VCMA_GET( _reg ) \
3796 ( ( ( _reg ) >> 28 ) & 0xfU )
3797
3798static inline uint64_t _AArch64_Read_id_isar5_el1( void )
3799{
3800 uint64_t value;
3801
3802 __asm__ volatile (
3803 "mrs %0, ID_ISAR5_EL1" : "=&r" ( value ) : : "memory"
3804 );
3805
3806 return value;
3807}
3808
3809/* ID_ISAR6_EL1, AArch64 Instruction Set Attribute Register 6 */
3810
3811#define AARCH64_ID_ISAR6_EL1_JSCVT( _val ) ( ( _val ) << 0 )
3812#define AARCH64_ID_ISAR6_EL1_JSCVT_SHIFT 0
3813#define AARCH64_ID_ISAR6_EL1_JSCVT_MASK 0xfU
3814#define AARCH64_ID_ISAR6_EL1_JSCVT_GET( _reg ) \
3815 ( ( ( _reg ) >> 0 ) & 0xfU )
3816
3817#define AARCH64_ID_ISAR6_EL1_DP( _val ) ( ( _val ) << 4 )
3818#define AARCH64_ID_ISAR6_EL1_DP_SHIFT 4
3819#define AARCH64_ID_ISAR6_EL1_DP_MASK 0xf0U
3820#define AARCH64_ID_ISAR6_EL1_DP_GET( _reg ) \
3821 ( ( ( _reg ) >> 4 ) & 0xfU )
3822
3823#define AARCH64_ID_ISAR6_EL1_FHM( _val ) ( ( _val ) << 8 )
3824#define AARCH64_ID_ISAR6_EL1_FHM_SHIFT 8
3825#define AARCH64_ID_ISAR6_EL1_FHM_MASK 0xf00U
3826#define AARCH64_ID_ISAR6_EL1_FHM_GET( _reg ) \
3827 ( ( ( _reg ) >> 8 ) & 0xfU )
3828
3829#define AARCH64_ID_ISAR6_EL1_SB( _val ) ( ( _val ) << 12 )
3830#define AARCH64_ID_ISAR6_EL1_SB_SHIFT 12
3831#define AARCH64_ID_ISAR6_EL1_SB_MASK 0xf000U
3832#define AARCH64_ID_ISAR6_EL1_SB_GET( _reg ) \
3833 ( ( ( _reg ) >> 12 ) & 0xfU )
3834
3835#define AARCH64_ID_ISAR6_EL1_SPECRES( _val ) ( ( _val ) << 16 )
3836#define AARCH64_ID_ISAR6_EL1_SPECRES_SHIFT 16
3837#define AARCH64_ID_ISAR6_EL1_SPECRES_MASK 0xf0000U
3838#define AARCH64_ID_ISAR6_EL1_SPECRES_GET( _reg ) \
3839 ( ( ( _reg ) >> 16 ) & 0xfU )
3840
3841#define AARCH64_ID_ISAR6_EL1_BF16( _val ) ( ( _val ) << 20 )
3842#define AARCH64_ID_ISAR6_EL1_BF16_SHIFT 20
3843#define AARCH64_ID_ISAR6_EL1_BF16_MASK 0xf00000U
3844#define AARCH64_ID_ISAR6_EL1_BF16_GET( _reg ) \
3845 ( ( ( _reg ) >> 20 ) & 0xfU )
3846
3847#define AARCH64_ID_ISAR6_EL1_I8MM( _val ) ( ( _val ) << 24 )
3848#define AARCH64_ID_ISAR6_EL1_I8MM_SHIFT 24
3849#define AARCH64_ID_ISAR6_EL1_I8MM_MASK 0xf000000U
3850#define AARCH64_ID_ISAR6_EL1_I8MM_GET( _reg ) \
3851 ( ( ( _reg ) >> 24 ) & 0xfU )
3852
3853static inline uint64_t _AArch64_Read_id_isar6_el1( void )
3854{
3855 uint64_t value;
3856
3857 __asm__ volatile (
3858 "mrs %0, ID_ISAR6_EL1" : "=&r" ( value ) : : "memory"
3859 );
3860
3861 return value;
3862}
3863
3864/* ID_MMFR0_EL1, AArch64 Memory Model Feature Register 0 */
3865
3866#define AARCH64_ID_MMFR0_EL1_VMSA( _val ) ( ( _val ) << 0 )
3867#define AARCH64_ID_MMFR0_EL1_VMSA_SHIFT 0
3868#define AARCH64_ID_MMFR0_EL1_VMSA_MASK 0xfU
3869#define AARCH64_ID_MMFR0_EL1_VMSA_GET( _reg ) \
3870 ( ( ( _reg ) >> 0 ) & 0xfU )
3871
3872#define AARCH64_ID_MMFR0_EL1_PMSA( _val ) ( ( _val ) << 4 )
3873#define AARCH64_ID_MMFR0_EL1_PMSA_SHIFT 4
3874#define AARCH64_ID_MMFR0_EL1_PMSA_MASK 0xf0U
3875#define AARCH64_ID_MMFR0_EL1_PMSA_GET( _reg ) \
3876 ( ( ( _reg ) >> 4 ) & 0xfU )
3877
3878#define AARCH64_ID_MMFR0_EL1_OUTERSHR( _val ) ( ( _val ) << 8 )
3879#define AARCH64_ID_MMFR0_EL1_OUTERSHR_SHIFT 8
3880#define AARCH64_ID_MMFR0_EL1_OUTERSHR_MASK 0xf00U
3881#define AARCH64_ID_MMFR0_EL1_OUTERSHR_GET( _reg ) \
3882 ( ( ( _reg ) >> 8 ) & 0xfU )
3883
3884#define AARCH64_ID_MMFR0_EL1_SHARELVL( _val ) ( ( _val ) << 12 )
3885#define AARCH64_ID_MMFR0_EL1_SHARELVL_SHIFT 12
3886#define AARCH64_ID_MMFR0_EL1_SHARELVL_MASK 0xf000U
3887#define AARCH64_ID_MMFR0_EL1_SHARELVL_GET( _reg ) \
3888 ( ( ( _reg ) >> 12 ) & 0xfU )
3889
3890#define AARCH64_ID_MMFR0_EL1_TCM( _val ) ( ( _val ) << 16 )
3891#define AARCH64_ID_MMFR0_EL1_TCM_SHIFT 16
3892#define AARCH64_ID_MMFR0_EL1_TCM_MASK 0xf0000U
3893#define AARCH64_ID_MMFR0_EL1_TCM_GET( _reg ) \
3894 ( ( ( _reg ) >> 16 ) & 0xfU )
3895
3896#define AARCH64_ID_MMFR0_EL1_AUXREG( _val ) ( ( _val ) << 20 )
3897#define AARCH64_ID_MMFR0_EL1_AUXREG_SHIFT 20
3898#define AARCH64_ID_MMFR0_EL1_AUXREG_MASK 0xf00000U
3899#define AARCH64_ID_MMFR0_EL1_AUXREG_GET( _reg ) \
3900 ( ( ( _reg ) >> 20 ) & 0xfU )
3901
3902#define AARCH64_ID_MMFR0_EL1_FCSE( _val ) ( ( _val ) << 24 )
3903#define AARCH64_ID_MMFR0_EL1_FCSE_SHIFT 24
3904#define AARCH64_ID_MMFR0_EL1_FCSE_MASK 0xf000000U
3905#define AARCH64_ID_MMFR0_EL1_FCSE_GET( _reg ) \
3906 ( ( ( _reg ) >> 24 ) & 0xfU )
3907
3908#define AARCH64_ID_MMFR0_EL1_INNERSHR( _val ) ( ( _val ) << 28 )
3909#define AARCH64_ID_MMFR0_EL1_INNERSHR_SHIFT 28
3910#define AARCH64_ID_MMFR0_EL1_INNERSHR_MASK 0xf0000000U
3911#define AARCH64_ID_MMFR0_EL1_INNERSHR_GET( _reg ) \
3912 ( ( ( _reg ) >> 28 ) & 0xfU )
3913
3914static inline uint64_t _AArch64_Read_id_mmfr0_el1( void )
3915{
3916 uint64_t value;
3917
3918 __asm__ volatile (
3919 "mrs %0, ID_MMFR0_EL1" : "=&r" ( value ) : : "memory"
3920 );
3921
3922 return value;
3923}
3924
3925/* ID_MMFR1_EL1, AArch64 Memory Model Feature Register 1 */
3926
3927#define AARCH64_ID_MMFR1_EL1_L1HVDVA( _val ) ( ( _val ) << 0 )
3928#define AARCH64_ID_MMFR1_EL1_L1HVDVA_SHIFT 0
3929#define AARCH64_ID_MMFR1_EL1_L1HVDVA_MASK 0xfU
3930#define AARCH64_ID_MMFR1_EL1_L1HVDVA_GET( _reg ) \
3931 ( ( ( _reg ) >> 0 ) & 0xfU )
3932
3933#define AARCH64_ID_MMFR1_EL1_L1UNIVA( _val ) ( ( _val ) << 4 )
3934#define AARCH64_ID_MMFR1_EL1_L1UNIVA_SHIFT 4
3935#define AARCH64_ID_MMFR1_EL1_L1UNIVA_MASK 0xf0U
3936#define AARCH64_ID_MMFR1_EL1_L1UNIVA_GET( _reg ) \
3937 ( ( ( _reg ) >> 4 ) & 0xfU )
3938
3939#define AARCH64_ID_MMFR1_EL1_L1HVDSW( _val ) ( ( _val ) << 8 )
3940#define AARCH64_ID_MMFR1_EL1_L1HVDSW_SHIFT 8
3941#define AARCH64_ID_MMFR1_EL1_L1HVDSW_MASK 0xf00U
3942#define AARCH64_ID_MMFR1_EL1_L1HVDSW_GET( _reg ) \
3943 ( ( ( _reg ) >> 8 ) & 0xfU )
3944
3945#define AARCH64_ID_MMFR1_EL1_L1UNISW( _val ) ( ( _val ) << 12 )
3946#define AARCH64_ID_MMFR1_EL1_L1UNISW_SHIFT 12
3947#define AARCH64_ID_MMFR1_EL1_L1UNISW_MASK 0xf000U
3948#define AARCH64_ID_MMFR1_EL1_L1UNISW_GET( _reg ) \
3949 ( ( ( _reg ) >> 12 ) & 0xfU )
3950
3951#define AARCH64_ID_MMFR1_EL1_L1HVD( _val ) ( ( _val ) << 16 )
3952#define AARCH64_ID_MMFR1_EL1_L1HVD_SHIFT 16
3953#define AARCH64_ID_MMFR1_EL1_L1HVD_MASK 0xf0000U
3954#define AARCH64_ID_MMFR1_EL1_L1HVD_GET( _reg ) \
3955 ( ( ( _reg ) >> 16 ) & 0xfU )
3956
3957#define AARCH64_ID_MMFR1_EL1_L1UNI( _val ) ( ( _val ) << 20 )
3958#define AARCH64_ID_MMFR1_EL1_L1UNI_SHIFT 20
3959#define AARCH64_ID_MMFR1_EL1_L1UNI_MASK 0xf00000U
3960#define AARCH64_ID_MMFR1_EL1_L1UNI_GET( _reg ) \
3961 ( ( ( _reg ) >> 20 ) & 0xfU )
3962
3963#define AARCH64_ID_MMFR1_EL1_L1TSTCLN( _val ) ( ( _val ) << 24 )
3964#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_SHIFT 24
3965#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_MASK 0xf000000U
3966#define AARCH64_ID_MMFR1_EL1_L1TSTCLN_GET( _reg ) \
3967 ( ( ( _reg ) >> 24 ) & 0xfU )
3968
3969#define AARCH64_ID_MMFR1_EL1_BPRED( _val ) ( ( _val ) << 28 )
3970#define AARCH64_ID_MMFR1_EL1_BPRED_SHIFT 28
3971#define AARCH64_ID_MMFR1_EL1_BPRED_MASK 0xf0000000U
3972#define AARCH64_ID_MMFR1_EL1_BPRED_GET( _reg ) \
3973 ( ( ( _reg ) >> 28 ) & 0xfU )
3974
3975static inline uint64_t _AArch64_Read_id_mmfr1_el1( void )
3976{
3977 uint64_t value;
3978
3979 __asm__ volatile (
3980 "mrs %0, ID_MMFR1_EL1" : "=&r" ( value ) : : "memory"
3981 );
3982
3983 return value;
3984}
3985
3986/* ID_MMFR2_EL1, AArch64 Memory Model Feature Register 2 */
3987
3988#define AARCH64_ID_MMFR2_EL1_L1HVDFG( _val ) ( ( _val ) << 0 )
3989#define AARCH64_ID_MMFR2_EL1_L1HVDFG_SHIFT 0
3990#define AARCH64_ID_MMFR2_EL1_L1HVDFG_MASK 0xfU
3991#define AARCH64_ID_MMFR2_EL1_L1HVDFG_GET( _reg ) \
3992 ( ( ( _reg ) >> 0 ) & 0xfU )
3993
3994#define AARCH64_ID_MMFR2_EL1_L1HVDBG( _val ) ( ( _val ) << 4 )
3995#define AARCH64_ID_MMFR2_EL1_L1HVDBG_SHIFT 4
3996#define AARCH64_ID_MMFR2_EL1_L1HVDBG_MASK 0xf0U
3997#define AARCH64_ID_MMFR2_EL1_L1HVDBG_GET( _reg ) \
3998 ( ( ( _reg ) >> 4 ) & 0xfU )
3999
4000#define AARCH64_ID_MMFR2_EL1_L1HVDRNG( _val ) ( ( _val ) << 8 )
4001#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_SHIFT 8
4002#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_MASK 0xf00U
4003#define AARCH64_ID_MMFR2_EL1_L1HVDRNG_GET( _reg ) \
4004 ( ( ( _reg ) >> 8 ) & 0xfU )
4005
4006#define AARCH64_ID_MMFR2_EL1_HVDTLB( _val ) ( ( _val ) << 12 )
4007#define AARCH64_ID_MMFR2_EL1_HVDTLB_SHIFT 12
4008#define AARCH64_ID_MMFR2_EL1_HVDTLB_MASK 0xf000U
4009#define AARCH64_ID_MMFR2_EL1_HVDTLB_GET( _reg ) \
4010 ( ( ( _reg ) >> 12 ) & 0xfU )
4011
4012#define AARCH64_ID_MMFR2_EL1_UNITLB( _val ) ( ( _val ) << 16 )
4013#define AARCH64_ID_MMFR2_EL1_UNITLB_SHIFT 16
4014#define AARCH64_ID_MMFR2_EL1_UNITLB_MASK 0xf0000U
4015#define AARCH64_ID_MMFR2_EL1_UNITLB_GET( _reg ) \
4016 ( ( ( _reg ) >> 16 ) & 0xfU )
4017
4018#define AARCH64_ID_MMFR2_EL1_MEMBARR( _val ) ( ( _val ) << 20 )
4019#define AARCH64_ID_MMFR2_EL1_MEMBARR_SHIFT 20
4020#define AARCH64_ID_MMFR2_EL1_MEMBARR_MASK 0xf00000U
4021#define AARCH64_ID_MMFR2_EL1_MEMBARR_GET( _reg ) \
4022 ( ( ( _reg ) >> 20 ) & 0xfU )
4023
4024#define AARCH64_ID_MMFR2_EL1_WFISTALL( _val ) ( ( _val ) << 24 )
4025#define AARCH64_ID_MMFR2_EL1_WFISTALL_SHIFT 24
4026#define AARCH64_ID_MMFR2_EL1_WFISTALL_MASK 0xf000000U
4027#define AARCH64_ID_MMFR2_EL1_WFISTALL_GET( _reg ) \
4028 ( ( ( _reg ) >> 24 ) & 0xfU )
4029
4030#define AARCH64_ID_MMFR2_EL1_HWACCFLG( _val ) ( ( _val ) << 28 )
4031#define AARCH64_ID_MMFR2_EL1_HWACCFLG_SHIFT 28
4032#define AARCH64_ID_MMFR2_EL1_HWACCFLG_MASK 0xf0000000U
4033#define AARCH64_ID_MMFR2_EL1_HWACCFLG_GET( _reg ) \
4034 ( ( ( _reg ) >> 28 ) & 0xfU )
4035
4036static inline uint64_t _AArch64_Read_id_mmfr2_el1( void )
4037{
4038 uint64_t value;
4039
4040 __asm__ volatile (
4041 "mrs %0, ID_MMFR2_EL1" : "=&r" ( value ) : : "memory"
4042 );
4043
4044 return value;
4045}
4046
4047/* ID_MMFR3_EL1, AArch64 Memory Model Feature Register 3 */
4048
4049#define AARCH64_ID_MMFR3_EL1_CMAINTVA( _val ) ( ( _val ) << 0 )
4050#define AARCH64_ID_MMFR3_EL1_CMAINTVA_SHIFT 0
4051#define AARCH64_ID_MMFR3_EL1_CMAINTVA_MASK 0xfU
4052#define AARCH64_ID_MMFR3_EL1_CMAINTVA_GET( _reg ) \
4053 ( ( ( _reg ) >> 0 ) & 0xfU )
4054
4055#define AARCH64_ID_MMFR3_EL1_CMAINTSW( _val ) ( ( _val ) << 4 )
4056#define AARCH64_ID_MMFR3_EL1_CMAINTSW_SHIFT 4
4057#define AARCH64_ID_MMFR3_EL1_CMAINTSW_MASK 0xf0U
4058#define AARCH64_ID_MMFR3_EL1_CMAINTSW_GET( _reg ) \
4059 ( ( ( _reg ) >> 4 ) & 0xfU )
4060
4061#define AARCH64_ID_MMFR3_EL1_BPMAINT( _val ) ( ( _val ) << 8 )
4062#define AARCH64_ID_MMFR3_EL1_BPMAINT_SHIFT 8
4063#define AARCH64_ID_MMFR3_EL1_BPMAINT_MASK 0xf00U
4064#define AARCH64_ID_MMFR3_EL1_BPMAINT_GET( _reg ) \
4065 ( ( ( _reg ) >> 8 ) & 0xfU )
4066
4067#define AARCH64_ID_MMFR3_EL1_MAINTBCST( _val ) ( ( _val ) << 12 )
4068#define AARCH64_ID_MMFR3_EL1_MAINTBCST_SHIFT 12
4069#define AARCH64_ID_MMFR3_EL1_MAINTBCST_MASK 0xf000U
4070#define AARCH64_ID_MMFR3_EL1_MAINTBCST_GET( _reg ) \
4071 ( ( ( _reg ) >> 12 ) & 0xfU )
4072
4073#define AARCH64_ID_MMFR3_EL1_PAN( _val ) ( ( _val ) << 16 )
4074#define AARCH64_ID_MMFR3_EL1_PAN_SHIFT 16
4075#define AARCH64_ID_MMFR3_EL1_PAN_MASK 0xf0000U
4076#define AARCH64_ID_MMFR3_EL1_PAN_GET( _reg ) \
4077 ( ( ( _reg ) >> 16 ) & 0xfU )
4078
4079#define AARCH64_ID_MMFR3_EL1_COHWALK( _val ) ( ( _val ) << 20 )
4080#define AARCH64_ID_MMFR3_EL1_COHWALK_SHIFT 20
4081#define AARCH64_ID_MMFR3_EL1_COHWALK_MASK 0xf00000U
4082#define AARCH64_ID_MMFR3_EL1_COHWALK_GET( _reg ) \
4083 ( ( ( _reg ) >> 20 ) & 0xfU )
4084
4085#define AARCH64_ID_MMFR3_EL1_CMEMSZ( _val ) ( ( _val ) << 24 )
4086#define AARCH64_ID_MMFR3_EL1_CMEMSZ_SHIFT 24
4087#define AARCH64_ID_MMFR3_EL1_CMEMSZ_MASK 0xf000000U
4088#define AARCH64_ID_MMFR3_EL1_CMEMSZ_GET( _reg ) \
4089 ( ( ( _reg ) >> 24 ) & 0xfU )
4090
4091#define AARCH64_ID_MMFR3_EL1_SUPERSEC( _val ) ( ( _val ) << 28 )
4092#define AARCH64_ID_MMFR3_EL1_SUPERSEC_SHIFT 28
4093#define AARCH64_ID_MMFR3_EL1_SUPERSEC_MASK 0xf0000000U
4094#define AARCH64_ID_MMFR3_EL1_SUPERSEC_GET( _reg ) \
4095 ( ( ( _reg ) >> 28 ) & 0xfU )
4096
4097static inline uint64_t _AArch64_Read_id_mmfr3_el1( void )
4098{
4099 uint64_t value;
4100
4101 __asm__ volatile (
4102 "mrs %0, ID_MMFR3_EL1" : "=&r" ( value ) : : "memory"
4103 );
4104
4105 return value;
4106}
4107
4108/* ID_MMFR4_EL1, AArch64 Memory Model Feature Register 4 */
4109
4110#define AARCH64_ID_MMFR4_EL1_SPECSEI( _val ) ( ( _val ) << 0 )
4111#define AARCH64_ID_MMFR4_EL1_SPECSEI_SHIFT 0
4112#define AARCH64_ID_MMFR4_EL1_SPECSEI_MASK 0xfU
4113#define AARCH64_ID_MMFR4_EL1_SPECSEI_GET( _reg ) \
4114 ( ( ( _reg ) >> 0 ) & 0xfU )
4115
4116#define AARCH64_ID_MMFR4_EL1_AC2( _val ) ( ( _val ) << 4 )
4117#define AARCH64_ID_MMFR4_EL1_AC2_SHIFT 4
4118#define AARCH64_ID_MMFR4_EL1_AC2_MASK 0xf0U
4119#define AARCH64_ID_MMFR4_EL1_AC2_GET( _reg ) \
4120 ( ( ( _reg ) >> 4 ) & 0xfU )
4121
4122#define AARCH64_ID_MMFR4_EL1_XNX( _val ) ( ( _val ) << 8 )
4123#define AARCH64_ID_MMFR4_EL1_XNX_SHIFT 8
4124#define AARCH64_ID_MMFR4_EL1_XNX_MASK 0xf00U
4125#define AARCH64_ID_MMFR4_EL1_XNX_GET( _reg ) \
4126 ( ( ( _reg ) >> 8 ) & 0xfU )
4127
4128#define AARCH64_ID_MMFR4_EL1_CNP( _val ) ( ( _val ) << 12 )
4129#define AARCH64_ID_MMFR4_EL1_CNP_SHIFT 12
4130#define AARCH64_ID_MMFR4_EL1_CNP_MASK 0xf000U
4131#define AARCH64_ID_MMFR4_EL1_CNP_GET( _reg ) \
4132 ( ( ( _reg ) >> 12 ) & 0xfU )
4133
4134#define AARCH64_ID_MMFR4_EL1_HPDS( _val ) ( ( _val ) << 16 )
4135#define AARCH64_ID_MMFR4_EL1_HPDS_SHIFT 16
4136#define AARCH64_ID_MMFR4_EL1_HPDS_MASK 0xf0000U
4137#define AARCH64_ID_MMFR4_EL1_HPDS_GET( _reg ) \
4138 ( ( ( _reg ) >> 16 ) & 0xfU )
4139
4140#define AARCH64_ID_MMFR4_EL1_LSM( _val ) ( ( _val ) << 20 )
4141#define AARCH64_ID_MMFR4_EL1_LSM_SHIFT 20
4142#define AARCH64_ID_MMFR4_EL1_LSM_MASK 0xf00000U
4143#define AARCH64_ID_MMFR4_EL1_LSM_GET( _reg ) \
4144 ( ( ( _reg ) >> 20 ) & 0xfU )
4145
4146#define AARCH64_ID_MMFR4_EL1_CCIDX( _val ) ( ( _val ) << 24 )
4147#define AARCH64_ID_MMFR4_EL1_CCIDX_SHIFT 24
4148#define AARCH64_ID_MMFR4_EL1_CCIDX_MASK 0xf000000U
4149#define AARCH64_ID_MMFR4_EL1_CCIDX_GET( _reg ) \
4150 ( ( ( _reg ) >> 24 ) & 0xfU )
4151
4152#define AARCH64_ID_MMFR4_EL1_EVT( _val ) ( ( _val ) << 28 )
4153#define AARCH64_ID_MMFR4_EL1_EVT_SHIFT 28
4154#define AARCH64_ID_MMFR4_EL1_EVT_MASK 0xf0000000U
4155#define AARCH64_ID_MMFR4_EL1_EVT_GET( _reg ) \
4156 ( ( ( _reg ) >> 28 ) & 0xfU )
4157
4158static inline uint64_t _AArch64_Read_id_mmfr4_el1( void )
4159{
4160 uint64_t value;
4161
4162 __asm__ volatile (
4163 "mrs %0, ID_MMFR4_EL1" : "=&r" ( value ) : : "memory"
4164 );
4165
4166 return value;
4167}
4168
4169/* ID_MMFR5_EL1, AArch64 Memory Model Feature Register 5 */
4170
4171#define AARCH64_ID_MMFR5_EL1_ETS( _val ) ( ( _val ) << 0 )
4172#define AARCH64_ID_MMFR5_EL1_ETS_SHIFT 0
4173#define AARCH64_ID_MMFR5_EL1_ETS_MASK 0xfU
4174#define AARCH64_ID_MMFR5_EL1_ETS_GET( _reg ) \
4175 ( ( ( _reg ) >> 0 ) & 0xfU )
4176
4177static inline uint64_t _AArch64_Read_id_mmfr5_el1( void )
4178{
4179 uint64_t value;
4180
4181 __asm__ volatile (
4182 "mrs %0, ID_MMFR5_EL1" : "=&r" ( value ) : : "memory"
4183 );
4184
4185 return value;
4186}
4187
4188/* ID_PFR0_EL1, AArch64 Processor Feature Register 0 */
4189
4190#define AARCH64_ID_PFR0_EL1_STATE0( _val ) ( ( _val ) << 0 )
4191#define AARCH64_ID_PFR0_EL1_STATE0_SHIFT 0
4192#define AARCH64_ID_PFR0_EL1_STATE0_MASK 0xfU
4193#define AARCH64_ID_PFR0_EL1_STATE0_GET( _reg ) \
4194 ( ( ( _reg ) >> 0 ) & 0xfU )
4195
4196#define AARCH64_ID_PFR0_EL1_STATE1( _val ) ( ( _val ) << 4 )
4197#define AARCH64_ID_PFR0_EL1_STATE1_SHIFT 4
4198#define AARCH64_ID_PFR0_EL1_STATE1_MASK 0xf0U
4199#define AARCH64_ID_PFR0_EL1_STATE1_GET( _reg ) \
4200 ( ( ( _reg ) >> 4 ) & 0xfU )
4201
4202#define AARCH64_ID_PFR0_EL1_STATE2( _val ) ( ( _val ) << 8 )
4203#define AARCH64_ID_PFR0_EL1_STATE2_SHIFT 8
4204#define AARCH64_ID_PFR0_EL1_STATE2_MASK 0xf00U
4205#define AARCH64_ID_PFR0_EL1_STATE2_GET( _reg ) \
4206 ( ( ( _reg ) >> 8 ) & 0xfU )
4207
4208#define AARCH64_ID_PFR0_EL1_STATE3( _val ) ( ( _val ) << 12 )
4209#define AARCH64_ID_PFR0_EL1_STATE3_SHIFT 12
4210#define AARCH64_ID_PFR0_EL1_STATE3_MASK 0xf000U
4211#define AARCH64_ID_PFR0_EL1_STATE3_GET( _reg ) \
4212 ( ( ( _reg ) >> 12 ) & 0xfU )
4213
4214#define AARCH64_ID_PFR0_EL1_CSV2( _val ) ( ( _val ) << 16 )
4215#define AARCH64_ID_PFR0_EL1_CSV2_SHIFT 16
4216#define AARCH64_ID_PFR0_EL1_CSV2_MASK 0xf0000U
4217#define AARCH64_ID_PFR0_EL1_CSV2_GET( _reg ) \
4218 ( ( ( _reg ) >> 16 ) & 0xfU )
4219
4220#define AARCH64_ID_PFR0_EL1_AMU( _val ) ( ( _val ) << 20 )
4221#define AARCH64_ID_PFR0_EL1_AMU_SHIFT 20
4222#define AARCH64_ID_PFR0_EL1_AMU_MASK 0xf00000U
4223#define AARCH64_ID_PFR0_EL1_AMU_GET( _reg ) \
4224 ( ( ( _reg ) >> 20 ) & 0xfU )
4225
4226#define AARCH64_ID_PFR0_EL1_DIT( _val ) ( ( _val ) << 24 )
4227#define AARCH64_ID_PFR0_EL1_DIT_SHIFT 24
4228#define AARCH64_ID_PFR0_EL1_DIT_MASK 0xf000000U
4229#define AARCH64_ID_PFR0_EL1_DIT_GET( _reg ) \
4230 ( ( ( _reg ) >> 24 ) & 0xfU )
4231
4232#define AARCH64_ID_PFR0_EL1_RAS( _val ) ( ( _val ) << 28 )
4233#define AARCH64_ID_PFR0_EL1_RAS_SHIFT 28
4234#define AARCH64_ID_PFR0_EL1_RAS_MASK 0xf0000000U
4235#define AARCH64_ID_PFR0_EL1_RAS_GET( _reg ) \
4236 ( ( ( _reg ) >> 28 ) & 0xfU )
4237
4238static inline uint64_t _AArch64_Read_id_pfr0_el1( void )
4239{
4240 uint64_t value;
4241
4242 __asm__ volatile (
4243 "mrs %0, ID_PFR0_EL1" : "=&r" ( value ) : : "memory"
4244 );
4245
4246 return value;
4247}
4248
4249/* ID_PFR1_EL1, AArch64 Processor Feature Register 1 */
4250
4251#define AARCH64_ID_PFR1_EL1_PROGMOD( _val ) ( ( _val ) << 0 )
4252#define AARCH64_ID_PFR1_EL1_PROGMOD_SHIFT 0
4253#define AARCH64_ID_PFR1_EL1_PROGMOD_MASK 0xfU
4254#define AARCH64_ID_PFR1_EL1_PROGMOD_GET( _reg ) \
4255 ( ( ( _reg ) >> 0 ) & 0xfU )
4256
4257#define AARCH64_ID_PFR1_EL1_SECURITY( _val ) ( ( _val ) << 4 )
4258#define AARCH64_ID_PFR1_EL1_SECURITY_SHIFT 4
4259#define AARCH64_ID_PFR1_EL1_SECURITY_MASK 0xf0U
4260#define AARCH64_ID_PFR1_EL1_SECURITY_GET( _reg ) \
4261 ( ( ( _reg ) >> 4 ) & 0xfU )
4262
4263#define AARCH64_ID_PFR1_EL1_MPROGMOD( _val ) ( ( _val ) << 8 )
4264#define AARCH64_ID_PFR1_EL1_MPROGMOD_SHIFT 8
4265#define AARCH64_ID_PFR1_EL1_MPROGMOD_MASK 0xf00U
4266#define AARCH64_ID_PFR1_EL1_MPROGMOD_GET( _reg ) \
4267 ( ( ( _reg ) >> 8 ) & 0xfU )
4268
4269#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION( _val ) ( ( _val ) << 12 )
4270#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_SHIFT 12
4271#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_MASK 0xf000U
4272#define AARCH64_ID_PFR1_EL1_VIRTUALIZATION_GET( _reg ) \
4273 ( ( ( _reg ) >> 12 ) & 0xfU )
4274
4275#define AARCH64_ID_PFR1_EL1_GENTIMER( _val ) ( ( _val ) << 16 )
4276#define AARCH64_ID_PFR1_EL1_GENTIMER_SHIFT 16
4277#define AARCH64_ID_PFR1_EL1_GENTIMER_MASK 0xf0000U
4278#define AARCH64_ID_PFR1_EL1_GENTIMER_GET( _reg ) \
4279 ( ( ( _reg ) >> 16 ) & 0xfU )
4280
4281#define AARCH64_ID_PFR1_EL1_SEC_FRAC( _val ) ( ( _val ) << 20 )
4282#define AARCH64_ID_PFR1_EL1_SEC_FRAC_SHIFT 20
4283#define AARCH64_ID_PFR1_EL1_SEC_FRAC_MASK 0xf00000U
4284#define AARCH64_ID_PFR1_EL1_SEC_FRAC_GET( _reg ) \
4285 ( ( ( _reg ) >> 20 ) & 0xfU )
4286
4287#define AARCH64_ID_PFR1_EL1_VIRT_FRAC( _val ) ( ( _val ) << 24 )
4288#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_SHIFT 24
4289#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_MASK 0xf000000U
4290#define AARCH64_ID_PFR1_EL1_VIRT_FRAC_GET( _reg ) \
4291 ( ( ( _reg ) >> 24 ) & 0xfU )
4292
4293#define AARCH64_ID_PFR1_EL1_GIC( _val ) ( ( _val ) << 28 )
4294#define AARCH64_ID_PFR1_EL1_GIC_SHIFT 28
4295#define AARCH64_ID_PFR1_EL1_GIC_MASK 0xf0000000U
4296#define AARCH64_ID_PFR1_EL1_GIC_GET( _reg ) \
4297 ( ( ( _reg ) >> 28 ) & 0xfU )
4298
4299static inline uint64_t _AArch64_Read_id_pfr1_el1( void )
4300{
4301 uint64_t value;
4302
4303 __asm__ volatile (
4304 "mrs %0, ID_PFR1_EL1" : "=&r" ( value ) : : "memory"
4305 );
4306
4307 return value;
4308}
4309
4310/* ID_PFR2_EL1, AArch64 Processor Feature Register 2 */
4311
4312#define AARCH64_ID_PFR2_EL1_CSV3( _val ) ( ( _val ) << 0 )
4313#define AARCH64_ID_PFR2_EL1_CSV3_SHIFT 0
4314#define AARCH64_ID_PFR2_EL1_CSV3_MASK 0xfU
4315#define AARCH64_ID_PFR2_EL1_CSV3_GET( _reg ) \
4316 ( ( ( _reg ) >> 0 ) & 0xfU )
4317
4318#define AARCH64_ID_PFR2_EL1_SSBS( _val ) ( ( _val ) << 4 )
4319#define AARCH64_ID_PFR2_EL1_SSBS_SHIFT 4
4320#define AARCH64_ID_PFR2_EL1_SSBS_MASK 0xf0U
4321#define AARCH64_ID_PFR2_EL1_SSBS_GET( _reg ) \
4322 ( ( ( _reg ) >> 4 ) & 0xfU )
4323
4324#define AARCH64_ID_PFR2_EL1_RAS_FRAC( _val ) ( ( _val ) << 8 )
4325#define AARCH64_ID_PFR2_EL1_RAS_FRAC_SHIFT 8
4326#define AARCH64_ID_PFR2_EL1_RAS_FRAC_MASK 0xf00U
4327#define AARCH64_ID_PFR2_EL1_RAS_FRAC_GET( _reg ) \
4328 ( ( ( _reg ) >> 8 ) & 0xfU )
4329
4330static inline uint64_t _AArch64_Read_id_pfr2_el1( void )
4331{
4332 uint64_t value;
4333
4334 __asm__ volatile (
4335 "mrs %0, ID_PFR2_EL1" : "=&r" ( value ) : : "memory"
4336 );
4337
4338 return value;
4339}
4340
4341/* IFSR32_EL2, Instruction Fault Status Register (EL2) */
4342
4343#define AARCH64_IFSR32_EL2_FS_3_0( _val ) ( ( _val ) << 0 )
4344#define AARCH64_IFSR32_EL2_FS_3_0_SHIFT 0
4345#define AARCH64_IFSR32_EL2_FS_3_0_MASK 0xfU
4346#define AARCH64_IFSR32_EL2_FS_3_0_GET( _reg ) \
4347 ( ( ( _reg ) >> 0 ) & 0xfU )
4348
4349#define AARCH64_IFSR32_EL2_STATUS( _val ) ( ( _val ) << 0 )
4350#define AARCH64_IFSR32_EL2_STATUS_SHIFT 0
4351#define AARCH64_IFSR32_EL2_STATUS_MASK 0x3fU
4352#define AARCH64_IFSR32_EL2_STATUS_GET( _reg ) \
4353 ( ( ( _reg ) >> 0 ) & 0x3fU )
4354
4355#define AARCH64_IFSR32_EL2_LPAE 0x200U
4356
4357#define AARCH64_IFSR32_EL2_FS_4 0x400U
4358
4359#define AARCH64_IFSR32_EL2_EXT 0x1000U
4360
4361#define AARCH64_IFSR32_EL2_FNV 0x10000U
4362
4363static inline uint64_t _AArch64_Read_ifsr32_el2( void )
4364{
4365 uint64_t value;
4366
4367 __asm__ volatile (
4368 "mrs %0, IFSR32_EL2" : "=&r" ( value ) : : "memory"
4369 );
4370
4371 return value;
4372}
4373
4374static inline void _AArch64_Write_ifsr32_el2( uint64_t value )
4375{
4376 __asm__ volatile (
4377 "msr IFSR32_EL2, %0" : : "r" ( value ) : "memory"
4378 );
4379}
4380
4381/* ISR_EL1, Interrupt Status Register */
4382
4383#define AARCH64_ISR_EL1_F 0x40U
4384
4385#define AARCH64_ISR_EL1_I 0x80U
4386
4387#define AARCH64_ISR_EL1_A 0x100U
4388
4389static inline uint64_t _AArch64_Read_isr_el1( void )
4390{
4391 uint64_t value;
4392
4393 __asm__ volatile (
4394 "mrs %0, ISR_EL1" : "=&r" ( value ) : : "memory"
4395 );
4396
4397 return value;
4398}
4399
4400/* LORC_EL1, LORegion Control (EL1) */
4401
4402#define AARCH64_LORC_EL1_EN 0x1U
4403
4404#define AARCH64_LORC_EL1_DS( _val ) ( ( _val ) << 2 )
4405#define AARCH64_LORC_EL1_DS_SHIFT 2
4406#define AARCH64_LORC_EL1_DS_MASK 0x3fcU
4407#define AARCH64_LORC_EL1_DS_GET( _reg ) \
4408 ( ( ( _reg ) >> 2 ) & 0xffU )
4409
4410static inline uint64_t _AArch64_Read_lorc_el1( void )
4411{
4412 uint64_t value;
4413
4414 __asm__ volatile (
4415 "mrs %0, LORC_EL1" : "=&r" ( value ) : : "memory"
4416 );
4417
4418 return value;
4419}
4420
4421static inline void _AArch64_Write_lorc_el1( uint64_t value )
4422{
4423 __asm__ volatile (
4424 "msr LORC_EL1, %0" : : "r" ( value ) : "memory"
4425 );
4426}
4427
4428/* LOREA_EL1, LORegion End Address (EL1) */
4429
4430#define AARCH64_LOREA_EL1_EA_47_16( _val ) ( ( _val ) << 16 )
4431#define AARCH64_LOREA_EL1_EA_47_16_SHIFT 16
4432#define AARCH64_LOREA_EL1_EA_47_16_MASK 0xffffffff0000ULL
4433#define AARCH64_LOREA_EL1_EA_47_16_GET( _reg ) \
4434 ( ( ( _reg ) >> 16 ) & 0xffffffffULL )
4435
4436#define AARCH64_LOREA_EL1_EA_51_48( _val ) ( ( _val ) << 48 )
4437#define AARCH64_LOREA_EL1_EA_51_48_SHIFT 48
4438#define AARCH64_LOREA_EL1_EA_51_48_MASK 0xf000000000000ULL
4439#define AARCH64_LOREA_EL1_EA_51_48_GET( _reg ) \
4440 ( ( ( _reg ) >> 48 ) & 0xfULL )
4441
4442static inline uint64_t _AArch64_Read_lorea_el1( void )
4443{
4444 uint64_t value;
4445
4446 __asm__ volatile (
4447 "mrs %0, LOREA_EL1" : "=&r" ( value ) : : "memory"
4448 );
4449
4450 return value;
4451}
4452
4453static inline void _AArch64_Write_lorea_el1( uint64_t value )
4454{
4455 __asm__ volatile (
4456 "msr LOREA_EL1, %0" : : "r" ( value ) : "memory"
4457 );
4458}
4459
4460/* LORID_EL1, LORegionID (EL1) */
4461
4462#define AARCH64_LORID_EL1_LR( _val ) ( ( _val ) << 0 )
4463#define AARCH64_LORID_EL1_LR_SHIFT 0
4464#define AARCH64_LORID_EL1_LR_MASK 0xffU
4465#define AARCH64_LORID_EL1_LR_GET( _reg ) \
4466 ( ( ( _reg ) >> 0 ) & 0xffU )
4467
4468#define AARCH64_LORID_EL1_LD( _val ) ( ( _val ) << 16 )
4469#define AARCH64_LORID_EL1_LD_SHIFT 16
4470#define AARCH64_LORID_EL1_LD_MASK 0xff0000U
4471#define AARCH64_LORID_EL1_LD_GET( _reg ) \
4472 ( ( ( _reg ) >> 16 ) & 0xffU )
4473
4474static inline uint64_t _AArch64_Read_lorid_el1( void )
4475{
4476 uint64_t value;
4477
4478 __asm__ volatile (
4479 "mrs %0, LORID_EL1" : "=&r" ( value ) : : "memory"
4480 );
4481
4482 return value;
4483}
4484
4485/* LORN_EL1, LORegion Number (EL1) */
4486
4487#define AARCH64_LORN_EL1_NUM( _val ) ( ( _val ) << 0 )
4488#define AARCH64_LORN_EL1_NUM_SHIFT 0
4489#define AARCH64_LORN_EL1_NUM_MASK 0xffU
4490#define AARCH64_LORN_EL1_NUM_GET( _reg ) \
4491 ( ( ( _reg ) >> 0 ) & 0xffU )
4492
4493static inline uint64_t _AArch64_Read_lorn_el1( void )
4494{
4495 uint64_t value;
4496
4497 __asm__ volatile (
4498 "mrs %0, LORN_EL1" : "=&r" ( value ) : : "memory"
4499 );
4500
4501 return value;
4502}
4503
4504static inline void _AArch64_Write_lorn_el1( uint64_t value )
4505{
4506 __asm__ volatile (
4507 "msr LORN_EL1, %0" : : "r" ( value ) : "memory"
4508 );
4509}
4510
4511/* LORSA_EL1, LORegion Start Address (EL1) */
4512
4513#define AARCH64_LORSA_EL1_VALID 0x1U
4514
4515#define AARCH64_LORSA_EL1_SA_47_16( _val ) ( ( _val ) << 16 )
4516#define AARCH64_LORSA_EL1_SA_47_16_SHIFT 16
4517#define AARCH64_LORSA_EL1_SA_47_16_MASK 0xffffffff0000ULL
4518#define AARCH64_LORSA_EL1_SA_47_16_GET( _reg ) \
4519 ( ( ( _reg ) >> 16 ) & 0xffffffffULL )
4520
4521#define AARCH64_LORSA_EL1_SA_51_48( _val ) ( ( _val ) << 48 )
4522#define AARCH64_LORSA_EL1_SA_51_48_SHIFT 48
4523#define AARCH64_LORSA_EL1_SA_51_48_MASK 0xf000000000000ULL
4524#define AARCH64_LORSA_EL1_SA_51_48_GET( _reg ) \
4525 ( ( ( _reg ) >> 48 ) & 0xfULL )
4526
4527static inline uint64_t _AArch64_Read_lorsa_el1( void )
4528{
4529 uint64_t value;
4530
4531 __asm__ volatile (
4532 "mrs %0, LORSA_EL1" : "=&r" ( value ) : : "memory"
4533 );
4534
4535 return value;
4536}
4537
4538static inline void _AArch64_Write_lorsa_el1( uint64_t value )
4539{
4540 __asm__ volatile (
4541 "msr LORSA_EL1, %0" : : "r" ( value ) : "memory"
4542 );
4543}
4544
4545/* MAIR_EL1, Memory Attribute Indirection Register (EL1) */
4546
4547#define AARCH64_MAIR_EL1_ATTR0( _val ) ( ( _val ) << 0 )
4548#define AARCH64_MAIR_EL1_ATTR1( _val ) ( ( _val ) << 8 )
4549#define AARCH64_MAIR_EL1_ATTR2( _val ) ( ( _val ) << 16 )
4550#define AARCH64_MAIR_EL1_ATTR3( _val ) ( ( _val ) << 24 )
4551#define AARCH64_MAIR_EL1_ATTR4( _val ) ( ( _val ) << 32 )
4552#define AARCH64_MAIR_EL1_ATTR5( _val ) ( ( _val ) << 40 )
4553#define AARCH64_MAIR_EL1_ATTR6( _val ) ( ( _val ) << 48 )
4554#define AARCH64_MAIR_EL1_ATTR7( _val ) ( ( _val ) << 56 )
4555
4556static inline uint64_t _AArch64_Read_mair_el1( void )
4557{
4558 uint64_t value;
4559
4560 __asm__ volatile (
4561 "mrs %0, MAIR_EL1" : "=&r" ( value ) : : "memory"
4562 );
4563
4564 return value;
4565}
4566
4567static inline void _AArch64_Write_mair_el1( uint64_t value )
4568{
4569 __asm__ volatile (
4570 "msr MAIR_EL1, %0" : : "r" ( value ) : "memory"
4571 );
4572}
4573
4574/* MAIR_EL2, Memory Attribute Indirection Register (EL2) */
4575
4576static inline uint64_t _AArch64_Read_mair_el2( void )
4577{
4578 uint64_t value;
4579
4580 __asm__ volatile (
4581 "mrs %0, MAIR_EL2" : "=&r" ( value ) : : "memory"
4582 );
4583
4584 return value;
4585}
4586
4587static inline void _AArch64_Write_mair_el2( uint64_t value )
4588{
4589 __asm__ volatile (
4590 "msr MAIR_EL2, %0" : : "r" ( value ) : "memory"
4591 );
4592}
4593
4594/* MAIR_EL3, Memory Attribute Indirection Register (EL3) */
4595
4596static inline uint64_t _AArch64_Read_mair_el3( void )
4597{
4598 uint64_t value;
4599
4600 __asm__ volatile (
4601 "mrs %0, MAIR_EL3" : "=&r" ( value ) : : "memory"
4602 );
4603
4604 return value;
4605}
4606
4607static inline void _AArch64_Write_mair_el3( uint64_t value )
4608{
4609 __asm__ volatile (
4610 "msr MAIR_EL3, %0" : : "r" ( value ) : "memory"
4611 );
4612}
4613
4614/* MIDR_EL1, Main ID Register */
4615
4616#define AARCH64_MIDR_EL1_REVISION( _val ) ( ( _val ) << 0 )
4617#define AARCH64_MIDR_EL1_REVISION_SHIFT 0
4618#define AARCH64_MIDR_EL1_REVISION_MASK 0xfU
4619#define AARCH64_MIDR_EL1_REVISION_GET( _reg ) \
4620 ( ( ( _reg ) >> 0 ) & 0xfU )
4621
4622#define AARCH64_MIDR_EL1_PARTNUM( _val ) ( ( _val ) << 4 )
4623#define AARCH64_MIDR_EL1_PARTNUM_SHIFT 4
4624#define AARCH64_MIDR_EL1_PARTNUM_MASK 0xfff0U
4625#define AARCH64_MIDR_EL1_PARTNUM_GET( _reg ) \
4626 ( ( ( _reg ) >> 4 ) & 0xfffU )
4627
4628#define AARCH64_MIDR_EL1_ARCHITECTURE( _val ) ( ( _val ) << 16 )
4629#define AARCH64_MIDR_EL1_ARCHITECTURE_SHIFT 16
4630#define AARCH64_MIDR_EL1_ARCHITECTURE_MASK 0xf0000U
4631#define AARCH64_MIDR_EL1_ARCHITECTURE_GET( _reg ) \
4632 ( ( ( _reg ) >> 16 ) & 0xfU )
4633
4634#define AARCH64_MIDR_EL1_VARIANT( _val ) ( ( _val ) << 20 )
4635#define AARCH64_MIDR_EL1_VARIANT_SHIFT 20
4636#define AARCH64_MIDR_EL1_VARIANT_MASK 0xf00000U
4637#define AARCH64_MIDR_EL1_VARIANT_GET( _reg ) \
4638 ( ( ( _reg ) >> 20 ) & 0xfU )
4639
4640#define AARCH64_MIDR_EL1_IMPLEMENTER( _val ) ( ( _val ) << 24 )
4641#define AARCH64_MIDR_EL1_IMPLEMENTER_SHIFT 24
4642#define AARCH64_MIDR_EL1_IMPLEMENTER_MASK 0xff000000U
4643#define AARCH64_MIDR_EL1_IMPLEMENTER_GET( _reg ) \
4644 ( ( ( _reg ) >> 24 ) & 0xffU )
4645
4646static inline uint64_t _AArch64_Read_midr_el1( void )
4647{
4648 uint64_t value;
4649
4650 __asm__ volatile (
4651 "mrs %0, MIDR_EL1" : "=&r" ( value ) : : "memory"
4652 );
4653
4654 return value;
4655}
4656
4657/* MPIDR_EL1, Multiprocessor Affinity Register */
4658
4659#define AARCH64_MPIDR_EL1_AFF0( _val ) ( ( _val ) << 0 )
4660#define AARCH64_MPIDR_EL1_AFF0_SHIFT 0
4661#define AARCH64_MPIDR_EL1_AFF0_MASK 0xffU
4662#define AARCH64_MPIDR_EL1_AFF0_GET( _reg ) \
4663 ( ( ( _reg ) >> 0 ) & 0xffU )
4664
4665#define AARCH64_MPIDR_EL1_AFF1( _val ) ( ( _val ) << 8 )
4666#define AARCH64_MPIDR_EL1_AFF1_SHIFT 8
4667#define AARCH64_MPIDR_EL1_AFF1_MASK 0xff00U
4668#define AARCH64_MPIDR_EL1_AFF1_GET( _reg ) \
4669 ( ( ( _reg ) >> 8 ) & 0xffU )
4670
4671#define AARCH64_MPIDR_EL1_AFF2( _val ) ( ( _val ) << 16 )
4672#define AARCH64_MPIDR_EL1_AFF2_SHIFT 16
4673#define AARCH64_MPIDR_EL1_AFF2_MASK 0xff0000U
4674#define AARCH64_MPIDR_EL1_AFF2_GET( _reg ) \
4675 ( ( ( _reg ) >> 16 ) & 0xffU )
4676
4677#define AARCH64_MPIDR_EL1_MT 0x1000000U
4678
4679#define AARCH64_MPIDR_EL1_U 0x40000000U
4680
4681#define AARCH64_MPIDR_EL1_AFF3( _val ) ( ( _val ) << 32 )
4682#define AARCH64_MPIDR_EL1_AFF3_SHIFT 32
4683#define AARCH64_MPIDR_EL1_AFF3_MASK 0xff00000000ULL
4684#define AARCH64_MPIDR_EL1_AFF3_GET( _reg ) \
4685 ( ( ( _reg ) >> 32 ) & 0xffULL )
4686
4687static inline uint64_t _AArch64_Read_mpidr_el1( void )
4688{
4689 uint64_t value;
4690
4691 __asm__ volatile (
4692 "mrs %0, MPIDR_EL1" : "=&r" ( value ) : : "memory"
4693 );
4694
4695 return value;
4696}
4697
4698/* MVFR0_EL1, AArch64 Media and VFP Feature Register 0 */
4699
4700#define AARCH64_MVFR0_EL1_SIMDREG( _val ) ( ( _val ) << 0 )
4701#define AARCH64_MVFR0_EL1_SIMDREG_SHIFT 0
4702#define AARCH64_MVFR0_EL1_SIMDREG_MASK 0xfU
4703#define AARCH64_MVFR0_EL1_SIMDREG_GET( _reg ) \
4704 ( ( ( _reg ) >> 0 ) & 0xfU )
4705
4706#define AARCH64_MVFR0_EL1_FPSP( _val ) ( ( _val ) << 4 )
4707#define AARCH64_MVFR0_EL1_FPSP_SHIFT 4
4708#define AARCH64_MVFR0_EL1_FPSP_MASK 0xf0U
4709#define AARCH64_MVFR0_EL1_FPSP_GET( _reg ) \
4710 ( ( ( _reg ) >> 4 ) & 0xfU )
4711
4712#define AARCH64_MVFR0_EL1_FPDP( _val ) ( ( _val ) << 8 )
4713#define AARCH64_MVFR0_EL1_FPDP_SHIFT 8
4714#define AARCH64_MVFR0_EL1_FPDP_MASK 0xf00U
4715#define AARCH64_MVFR0_EL1_FPDP_GET( _reg ) \
4716 ( ( ( _reg ) >> 8 ) & 0xfU )
4717
4718#define AARCH64_MVFR0_EL1_FPTRAP( _val ) ( ( _val ) << 12 )
4719#define AARCH64_MVFR0_EL1_FPTRAP_SHIFT 12
4720#define AARCH64_MVFR0_EL1_FPTRAP_MASK 0xf000U
4721#define AARCH64_MVFR0_EL1_FPTRAP_GET( _reg ) \
4722 ( ( ( _reg ) >> 12 ) & 0xfU )
4723
4724#define AARCH64_MVFR0_EL1_FPDIVIDE( _val ) ( ( _val ) << 16 )
4725#define AARCH64_MVFR0_EL1_FPDIVIDE_SHIFT 16
4726#define AARCH64_MVFR0_EL1_FPDIVIDE_MASK 0xf0000U
4727#define AARCH64_MVFR0_EL1_FPDIVIDE_GET( _reg ) \
4728 ( ( ( _reg ) >> 16 ) & 0xfU )
4729
4730#define AARCH64_MVFR0_EL1_FPSQRT( _val ) ( ( _val ) << 20 )
4731#define AARCH64_MVFR0_EL1_FPSQRT_SHIFT 20
4732#define AARCH64_MVFR0_EL1_FPSQRT_MASK 0xf00000U
4733#define AARCH64_MVFR0_EL1_FPSQRT_GET( _reg ) \
4734 ( ( ( _reg ) >> 20 ) & 0xfU )
4735
4736#define AARCH64_MVFR0_EL1_FPSHVEC( _val ) ( ( _val ) << 24 )
4737#define AARCH64_MVFR0_EL1_FPSHVEC_SHIFT 24
4738#define AARCH64_MVFR0_EL1_FPSHVEC_MASK 0xf000000U
4739#define AARCH64_MVFR0_EL1_FPSHVEC_GET( _reg ) \
4740 ( ( ( _reg ) >> 24 ) & 0xfU )
4741
4742#define AARCH64_MVFR0_EL1_FPROUND( _val ) ( ( _val ) << 28 )
4743#define AARCH64_MVFR0_EL1_FPROUND_SHIFT 28
4744#define AARCH64_MVFR0_EL1_FPROUND_MASK 0xf0000000U
4745#define AARCH64_MVFR0_EL1_FPROUND_GET( _reg ) \
4746 ( ( ( _reg ) >> 28 ) & 0xfU )
4747
4748static inline uint64_t _AArch64_Read_mvfr0_el1( void )
4749{
4750 uint64_t value;
4751
4752 __asm__ volatile (
4753 "mrs %0, MVFR0_EL1" : "=&r" ( value ) : : "memory"
4754 );
4755
4756 return value;
4757}
4758
4759/* MVFR1_EL1, AArch64 Media and VFP Feature Register 1 */
4760
4761#define AARCH64_MVFR1_EL1_FPFTZ( _val ) ( ( _val ) << 0 )
4762#define AARCH64_MVFR1_EL1_FPFTZ_SHIFT 0
4763#define AARCH64_MVFR1_EL1_FPFTZ_MASK 0xfU
4764#define AARCH64_MVFR1_EL1_FPFTZ_GET( _reg ) \
4765 ( ( ( _reg ) >> 0 ) & 0xfU )
4766
4767#define AARCH64_MVFR1_EL1_FPDNAN( _val ) ( ( _val ) << 4 )
4768#define AARCH64_MVFR1_EL1_FPDNAN_SHIFT 4
4769#define AARCH64_MVFR1_EL1_FPDNAN_MASK 0xf0U
4770#define AARCH64_MVFR1_EL1_FPDNAN_GET( _reg ) \
4771 ( ( ( _reg ) >> 4 ) & 0xfU )
4772
4773#define AARCH64_MVFR1_EL1_SIMDLS( _val ) ( ( _val ) << 8 )
4774#define AARCH64_MVFR1_EL1_SIMDLS_SHIFT 8
4775#define AARCH64_MVFR1_EL1_SIMDLS_MASK 0xf00U
4776#define AARCH64_MVFR1_EL1_SIMDLS_GET( _reg ) \
4777 ( ( ( _reg ) >> 8 ) & 0xfU )
4778
4779#define AARCH64_MVFR1_EL1_SIMDINT( _val ) ( ( _val ) << 12 )
4780#define AARCH64_MVFR1_EL1_SIMDINT_SHIFT 12
4781#define AARCH64_MVFR1_EL1_SIMDINT_MASK 0xf000U
4782#define AARCH64_MVFR1_EL1_SIMDINT_GET( _reg ) \
4783 ( ( ( _reg ) >> 12 ) & 0xfU )
4784
4785#define AARCH64_MVFR1_EL1_SIMDSP( _val ) ( ( _val ) << 16 )
4786#define AARCH64_MVFR1_EL1_SIMDSP_SHIFT 16
4787#define AARCH64_MVFR1_EL1_SIMDSP_MASK 0xf0000U
4788#define AARCH64_MVFR1_EL1_SIMDSP_GET( _reg ) \
4789 ( ( ( _reg ) >> 16 ) & 0xfU )
4790
4791#define AARCH64_MVFR1_EL1_SIMDHP( _val ) ( ( _val ) << 20 )
4792#define AARCH64_MVFR1_EL1_SIMDHP_SHIFT 20
4793#define AARCH64_MVFR1_EL1_SIMDHP_MASK 0xf00000U
4794#define AARCH64_MVFR1_EL1_SIMDHP_GET( _reg ) \
4795 ( ( ( _reg ) >> 20 ) & 0xfU )
4796
4797#define AARCH64_MVFR1_EL1_FPHP( _val ) ( ( _val ) << 24 )
4798#define AARCH64_MVFR1_EL1_FPHP_SHIFT 24
4799#define AARCH64_MVFR1_EL1_FPHP_MASK 0xf000000U
4800#define AARCH64_MVFR1_EL1_FPHP_GET( _reg ) \
4801 ( ( ( _reg ) >> 24 ) & 0xfU )
4802
4803#define AARCH64_MVFR1_EL1_SIMDFMAC( _val ) ( ( _val ) << 28 )
4804#define AARCH64_MVFR1_EL1_SIMDFMAC_SHIFT 28
4805#define AARCH64_MVFR1_EL1_SIMDFMAC_MASK 0xf0000000U
4806#define AARCH64_MVFR1_EL1_SIMDFMAC_GET( _reg ) \
4807 ( ( ( _reg ) >> 28 ) & 0xfU )
4808
4809static inline uint64_t _AArch64_Read_mvfr1_el1( void )
4810{
4811 uint64_t value;
4812
4813 __asm__ volatile (
4814 "mrs %0, MVFR1_EL1" : "=&r" ( value ) : : "memory"
4815 );
4816
4817 return value;
4818}
4819
4820/* MVFR2_EL1, AArch64 Media and VFP Feature Register 2 */
4821
4822#define AARCH64_MVFR2_EL1_SIMDMISC( _val ) ( ( _val ) << 0 )
4823#define AARCH64_MVFR2_EL1_SIMDMISC_SHIFT 0
4824#define AARCH64_MVFR2_EL1_SIMDMISC_MASK 0xfU
4825#define AARCH64_MVFR2_EL1_SIMDMISC_GET( _reg ) \
4826 ( ( ( _reg ) >> 0 ) & 0xfU )
4827
4828#define AARCH64_MVFR2_EL1_FPMISC( _val ) ( ( _val ) << 4 )
4829#define AARCH64_MVFR2_EL1_FPMISC_SHIFT 4
4830#define AARCH64_MVFR2_EL1_FPMISC_MASK 0xf0U
4831#define AARCH64_MVFR2_EL1_FPMISC_GET( _reg ) \
4832 ( ( ( _reg ) >> 4 ) & 0xfU )
4833
4834static inline uint64_t _AArch64_Read_mvfr2_el1( void )
4835{
4836 uint64_t value;
4837
4838 __asm__ volatile (
4839 "mrs %0, MVFR2_EL1" : "=&r" ( value ) : : "memory"
4840 );
4841
4842 return value;
4843}
4844
4845/* PAR_EL1, Physical Address Register */
4846
4847#define AARCH64_PAR_EL1_F 0x1U
4848
4849#define AARCH64_PAR_EL1_FST( _val ) ( ( _val ) << 1 )
4850#define AARCH64_PAR_EL1_FST_SHIFT 1
4851#define AARCH64_PAR_EL1_FST_MASK 0x7eU
4852#define AARCH64_PAR_EL1_FST_GET( _reg ) \
4853 ( ( ( _reg ) >> 1 ) & 0x3fU )
4854
4855#define AARCH64_PAR_EL1_SH( _val ) ( ( _val ) << 7 )
4856#define AARCH64_PAR_EL1_SH_SHIFT 7
4857#define AARCH64_PAR_EL1_SH_MASK 0x180U
4858#define AARCH64_PAR_EL1_SH_GET( _reg ) \
4859 ( ( ( _reg ) >> 7 ) & 0x3U )
4860
4861#define AARCH64_PAR_EL1_PTW 0x100U
4862
4863#define AARCH64_PAR_EL1_NS 0x200U
4864
4865#define AARCH64_PAR_EL1_S 0x200U
4866
4867#define AARCH64_PAR_EL1_PA_47_12( _val ) ( ( _val ) << 12 )
4868#define AARCH64_PAR_EL1_PA_47_12_SHIFT 12
4869#define AARCH64_PAR_EL1_PA_47_12_MASK 0xfffffffff000ULL
4870#define AARCH64_PAR_EL1_PA_47_12_GET( _reg ) \
4871 ( ( ( _reg ) >> 12 ) & 0xfffffffffULL )
4872
4873#define AARCH64_PAR_EL1_PA_51_48( _val ) ( ( _val ) << 48 )
4874#define AARCH64_PAR_EL1_PA_51_48_SHIFT 48
4875#define AARCH64_PAR_EL1_PA_51_48_MASK 0xf000000000000ULL
4876#define AARCH64_PAR_EL1_PA_51_48_GET( _reg ) \
4877 ( ( ( _reg ) >> 48 ) & 0xfULL )
4878
4879#define AARCH64_PAR_EL1_ATTR( _val ) ( ( _val ) << 56 )
4880#define AARCH64_PAR_EL1_ATTR_SHIFT 56
4881#define AARCH64_PAR_EL1_ATTR_MASK 0xff00000000000000ULL
4882#define AARCH64_PAR_EL1_ATTR_GET( _reg ) \
4883 ( ( ( _reg ) >> 56 ) & 0xffULL )
4884
4885static inline uint64_t _AArch64_Read_par_el1( void )
4886{
4887 uint64_t value;
4888
4889 __asm__ volatile (
4890 "mrs %0, PAR_EL1" : "=&r" ( value ) : : "memory"
4891 );
4892
4893 return value;
4894}
4895
4896static inline void _AArch64_Write_par_el1( uint64_t value )
4897{
4898 __asm__ volatile (
4899 "msr PAR_EL1, %0" : : "r" ( value ) : "memory"
4900 );
4901}
4902
4903/* REVIDR_EL1, Revision ID Register */
4904
4905static inline uint64_t _AArch64_Read_revidr_el1( void )
4906{
4907 uint64_t value;
4908
4909 __asm__ volatile (
4910 "mrs %0, REVIDR_EL1" : "=&r" ( value ) : : "memory"
4911 );
4912
4913 return value;
4914}
4915
4916/* RGSR_EL1, Random Allocation Tag Seed Register. */
4917
4918#define AARCH64_RGSR_EL1_TAG( _val ) ( ( _val ) << 0 )
4919#define AARCH64_RGSR_EL1_TAG_SHIFT 0
4920#define AARCH64_RGSR_EL1_TAG_MASK 0xfU
4921#define AARCH64_RGSR_EL1_TAG_GET( _reg ) \
4922 ( ( ( _reg ) >> 0 ) & 0xfU )
4923
4924#define AARCH64_RGSR_EL1_SEED( _val ) ( ( _val ) << 8 )
4925#define AARCH64_RGSR_EL1_SEED_SHIFT 8
4926#define AARCH64_RGSR_EL1_SEED_MASK 0xffff00U
4927#define AARCH64_RGSR_EL1_SEED_GET( _reg ) \
4928 ( ( ( _reg ) >> 8 ) & 0xffffU )
4929
4930static inline uint64_t _AArch64_Read_rgsr_el1( void )
4931{
4932 uint64_t value;
4933
4934 __asm__ volatile (
4935 "mrs %0, RGSR_EL1" : "=&r" ( value ) : : "memory"
4936 );
4937
4938 return value;
4939}
4940
4941static inline void _AArch64_Write_rgsr_el1( uint64_t value )
4942{
4943 __asm__ volatile (
4944 "msr RGSR_EL1, %0" : : "r" ( value ) : "memory"
4945 );
4946}
4947
4948/* RMR_EL1, Reset Management Register (EL1) */
4949
4950#define AARCH64_RMR_EL1_AA64 0x1U
4951
4952#define AARCH64_RMR_EL1_RR 0x2U
4953
4954static inline uint64_t _AArch64_Read_rmr_el1( void )
4955{
4956 uint64_t value;
4957
4958 __asm__ volatile (
4959 "mrs %0, RMR_EL1" : "=&r" ( value ) : : "memory"
4960 );
4961
4962 return value;
4963}
4964
4965static inline void _AArch64_Write_rmr_el1( uint64_t value )
4966{
4967 __asm__ volatile (
4968 "msr RMR_EL1, %0" : : "r" ( value ) : "memory"
4969 );
4970}
4971
4972/* RMR_EL2, Reset Management Register (EL2) */
4973
4974#define AARCH64_RMR_EL2_AA64 0x1U
4975
4976#define AARCH64_RMR_EL2_RR 0x2U
4977
4978static inline uint64_t _AArch64_Read_rmr_el2( void )
4979{
4980 uint64_t value;
4981
4982 __asm__ volatile (
4983 "mrs %0, RMR_EL2" : "=&r" ( value ) : : "memory"
4984 );
4985
4986 return value;
4987}
4988
4989static inline void _AArch64_Write_rmr_el2( uint64_t value )
4990{
4991 __asm__ volatile (
4992 "msr RMR_EL2, %0" : : "r" ( value ) : "memory"
4993 );
4994}
4995
4996/* RMR_EL3, Reset Management Register (EL3) */
4997
4998#define AARCH64_RMR_EL3_AA64 0x1U
4999
5000#define AARCH64_RMR_EL3_RR 0x2U
5001
5002static inline uint64_t _AArch64_Read_rmr_el3( void )
5003{
5004 uint64_t value;
5005
5006 __asm__ volatile (
5007 "mrs %0, RMR_EL3" : "=&r" ( value ) : : "memory"
5008 );
5009
5010 return value;
5011}
5012
5013static inline void _AArch64_Write_rmr_el3( uint64_t value )
5014{
5015 __asm__ volatile (
5016 "msr RMR_EL3, %0" : : "r" ( value ) : "memory"
5017 );
5018}
5019
5020/* RNDR, Random Number */
5021
5022static inline uint64_t _AArch64_Read_rndr( void )
5023{
5024 uint64_t value;
5025
5026 __asm__ volatile (
5027 "mrs %0, RNDR" : "=&r" ( value ) : : "memory"
5028 );
5029
5030 return value;
5031}
5032
5033/* RNDRRS, Reseeded Random Number */
5034
5035static inline uint64_t _AArch64_Read_rndrrs( void )
5036{
5037 uint64_t value;
5038
5039 __asm__ volatile (
5040 "mrs %0, RNDRRS" : "=&r" ( value ) : : "memory"
5041 );
5042
5043 return value;
5044}
5045
5046/* RVBAR_EL1, Reset Vector Base Address Register (if EL2 and EL3 not implemented) */
5047
5048static inline uint64_t _AArch64_Read_rvbar_el1( void )
5049{
5050 uint64_t value;
5051
5052 __asm__ volatile (
5053 "mrs %0, RVBAR_EL1" : "=&r" ( value ) : : "memory"
5054 );
5055
5056 return value;
5057}
5058
5059/* RVBAR_EL2, Reset Vector Base Address Register (if EL3 not implemented) */
5060
5061static inline uint64_t _AArch64_Read_rvbar_el2( void )
5062{
5063 uint64_t value;
5064
5065 __asm__ volatile (
5066 "mrs %0, RVBAR_EL2" : "=&r" ( value ) : : "memory"
5067 );
5068
5069 return value;
5070}
5071
5072/* RVBAR_EL3, Reset Vector Base Address Register (if EL3 implemented) */
5073
5074static inline uint64_t _AArch64_Read_rvbar_el3( void )
5075{
5076 uint64_t value;
5077
5078 __asm__ volatile (
5079 "mrs %0, RVBAR_EL3" : "=&r" ( value ) : : "memory"
5080 );
5081
5082 return value;
5083}
5084
5085/* SCR_EL3, Secure Configuration Register */
5086
5087#define AARCH64_SCR_EL3_NS 0x1U
5088
5089#define AARCH64_SCR_EL3_IRQ 0x2U
5090
5091#define AARCH64_SCR_EL3_FIQ 0x4U
5092
5093#define AARCH64_SCR_EL3_EA 0x8U
5094
5095#define AARCH64_SCR_EL3_SMD 0x80U
5096
5097#define AARCH64_SCR_EL3_HCE 0x100U
5098
5099#define AARCH64_SCR_EL3_SIF 0x200U
5100
5101#define AARCH64_SCR_EL3_RW 0x400U
5102
5103#define AARCH64_SCR_EL3_ST 0x800U
5104
5105#define AARCH64_SCR_EL3_TWI 0x1000U
5106
5107#define AARCH64_SCR_EL3_TWE 0x2000U
5108
5109#define AARCH64_SCR_EL3_TLOR 0x4000U
5110
5111#define AARCH64_SCR_EL3_TERR 0x8000U
5112
5113#define AARCH64_SCR_EL3_APK 0x10000U
5114
5115#define AARCH64_SCR_EL3_API 0x20000U
5116
5117#define AARCH64_SCR_EL3_EEL2 0x40000U
5118
5119#define AARCH64_SCR_EL3_EASE 0x80000U
5120
5121#define AARCH64_SCR_EL3_NMEA 0x100000U
5122
5123#define AARCH64_SCR_EL3_FIEN 0x200000U
5124
5125#define AARCH64_SCR_EL3_ENSCXT 0x2000000U
5126
5127#define AARCH64_SCR_EL3_ATA 0x4000000U
5128
5129#define AARCH64_SCR_EL3_FGTEN 0x8000000U
5130
5131#define AARCH64_SCR_EL3_ECVEN 0x10000000U
5132
5133#define AARCH64_SCR_EL3_TWEDEN 0x20000000U
5134
5135#define AARCH64_SCR_EL3_TWEDEL( _val ) ( ( _val ) << 30 )
5136#define AARCH64_SCR_EL3_TWEDEL_SHIFT 30
5137#define AARCH64_SCR_EL3_TWEDEL_MASK 0x3c0000000ULL
5138#define AARCH64_SCR_EL3_TWEDEL_GET( _reg ) \
5139 ( ( ( _reg ) >> 30 ) & 0xfULL )
5140
5141#define AARCH64_SCR_EL3_AMVOFFEN 0x800000000ULL
5142
5143static inline uint64_t _AArch64_Read_scr_el3( void )
5144{
5145 uint64_t value;
5146
5147 __asm__ volatile (
5148 "mrs %0, SCR_EL3" : "=&r" ( value ) : : "memory"
5149 );
5150
5151 return value;
5152}
5153
5154static inline void _AArch64_Write_scr_el3( uint64_t value )
5155{
5156 __asm__ volatile (
5157 "msr SCR_EL3, %0" : : "r" ( value ) : "memory"
5158 );
5159}
5160
5161/* SCTLR_EL1, System Control Register (EL1) */
5162
5163#define AARCH64_SCTLR_EL1_M 0x1U
5164
5165#define AARCH64_SCTLR_EL1_A 0x2U
5166
5167#define AARCH64_SCTLR_EL1_C 0x4U
5168
5169#define AARCH64_SCTLR_EL1_SA 0x8U
5170
5171#define AARCH64_SCTLR_EL1_SA0 0x10U
5172
5173#define AARCH64_SCTLR_EL1_CP15BEN 0x20U
5174
5175#define AARCH64_SCTLR_EL1_NAA 0x40U
5176
5177#define AARCH64_SCTLR_EL1_ITD 0x80U
5178
5179#define AARCH64_SCTLR_EL1_SED 0x100U
5180
5181#define AARCH64_SCTLR_EL1_UMA 0x200U
5182
5183#define AARCH64_SCTLR_EL1_ENRCTX 0x400U
5184
5185#define AARCH64_SCTLR_EL1_EOS 0x800U
5186
5187#define AARCH64_SCTLR_EL1_I 0x1000U
5188
5189#define AARCH64_SCTLR_EL1_ENDB 0x2000U
5190
5191#define AARCH64_SCTLR_EL1_DZE 0x4000U
5192
5193#define AARCH64_SCTLR_EL1_UCT 0x8000U
5194
5195#define AARCH64_SCTLR_EL1_NTWI 0x10000U
5196
5197#define AARCH64_SCTLR_EL1_NTWE 0x40000U
5198
5199#define AARCH64_SCTLR_EL1_WXN 0x80000U
5200
5201#define AARCH64_SCTLR_EL1_TSCXT 0x100000U
5202
5203#define AARCH64_SCTLR_EL1_IESB 0x200000U
5204
5205#define AARCH64_SCTLR_EL1_EIS 0x400000U
5206
5207#define AARCH64_SCTLR_EL1_SPAN 0x800000U
5208
5209#define AARCH64_SCTLR_EL1_E0E 0x1000000U
5210
5211#define AARCH64_SCTLR_EL1_EE 0x2000000U
5212
5213#define AARCH64_SCTLR_EL1_UCI 0x4000000U
5214
5215#define AARCH64_SCTLR_EL1_ENDA 0x8000000U
5216
5217#define AARCH64_SCTLR_EL1_NTLSMD 0x10000000U
5218
5219#define AARCH64_SCTLR_EL1_LSMAOE 0x20000000U
5220
5221#define AARCH64_SCTLR_EL1_ENIB 0x40000000U
5222
5223#define AARCH64_SCTLR_EL1_ENIA 0x80000000U
5224
5225#define AARCH64_SCTLR_EL1_BT0 0x800000000ULL
5226
5227#define AARCH64_SCTLR_EL1_BT1 0x1000000000ULL
5228
5229#define AARCH64_SCTLR_EL1_ITFSB 0x2000000000ULL
5230
5231#define AARCH64_SCTLR_EL1_TCF0( _val ) ( ( _val ) << 38 )
5232#define AARCH64_SCTLR_EL1_TCF0_SHIFT 38
5233#define AARCH64_SCTLR_EL1_TCF0_MASK 0xc000000000ULL
5234#define AARCH64_SCTLR_EL1_TCF0_GET( _reg ) \
5235 ( ( ( _reg ) >> 38 ) & 0x3ULL )
5236
5237#define AARCH64_SCTLR_EL1_TCF( _val ) ( ( _val ) << 40 )
5238#define AARCH64_SCTLR_EL1_TCF_SHIFT 40
5239#define AARCH64_SCTLR_EL1_TCF_MASK 0x30000000000ULL
5240#define AARCH64_SCTLR_EL1_TCF_GET( _reg ) \
5241 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5242
5243#define AARCH64_SCTLR_EL1_ATA0 0x40000000000ULL
5244
5245#define AARCH64_SCTLR_EL1_ATA 0x80000000000ULL
5246
5247#define AARCH64_SCTLR_EL1_DSSBS 0x100000000000ULL
5248
5249#define AARCH64_SCTLR_EL1_TWEDEN 0x200000000000ULL
5250
5251#define AARCH64_SCTLR_EL1_TWEDEL( _val ) ( ( _val ) << 46 )
5252#define AARCH64_SCTLR_EL1_TWEDEL_SHIFT 46
5253#define AARCH64_SCTLR_EL1_TWEDEL_MASK 0x3c00000000000ULL
5254#define AARCH64_SCTLR_EL1_TWEDEL_GET( _reg ) \
5255 ( ( ( _reg ) >> 46 ) & 0xfULL )
5256
5257static inline uint64_t _AArch64_Read_sctlr_el1( void )
5258{
5259 uint64_t value;
5260
5261 __asm__ volatile (
5262 "mrs %0, SCTLR_EL1" : "=&r" ( value ) : : "memory"
5263 );
5264
5265 return value;
5266}
5267
5268static inline void _AArch64_Write_sctlr_el1( uint64_t value )
5269{
5270 __asm__ volatile (
5271 "msr SCTLR_EL1, %0" : : "r" ( value ) : "memory"
5272 );
5273}
5274
5275/* SCTLR_EL2, System Control Register (EL2) */
5276
5277#define AARCH64_SCTLR_EL2_M 0x1U
5278
5279#define AARCH64_SCTLR_EL2_A 0x2U
5280
5281#define AARCH64_SCTLR_EL2_C 0x4U
5282
5283#define AARCH64_SCTLR_EL2_SA 0x8U
5284
5285#define AARCH64_SCTLR_EL2_SA0 0x10U
5286
5287#define AARCH64_SCTLR_EL2_CP15BEN 0x20U
5288
5289#define AARCH64_SCTLR_EL2_NAA 0x40U
5290
5291#define AARCH64_SCTLR_EL2_ITD 0x80U
5292
5293#define AARCH64_SCTLR_EL2_SED 0x100U
5294
5295#define AARCH64_SCTLR_EL2_ENRCTX 0x400U
5296
5297#define AARCH64_SCTLR_EL2_EOS 0x800U
5298
5299#define AARCH64_SCTLR_EL2_I 0x1000U
5300
5301#define AARCH64_SCTLR_EL2_ENDB 0x2000U
5302
5303#define AARCH64_SCTLR_EL2_DZE 0x4000U
5304
5305#define AARCH64_SCTLR_EL2_UCT 0x8000U
5306
5307#define AARCH64_SCTLR_EL2_NTWI 0x10000U
5308
5309#define AARCH64_SCTLR_EL2_NTWE 0x40000U
5310
5311#define AARCH64_SCTLR_EL2_WXN 0x80000U
5312
5313#define AARCH64_SCTLR_EL2_TSCXT 0x100000U
5314
5315#define AARCH64_SCTLR_EL2_IESB 0x200000U
5316
5317#define AARCH64_SCTLR_EL2_EIS 0x400000U
5318
5319#define AARCH64_SCTLR_EL2_SPAN 0x800000U
5320
5321#define AARCH64_SCTLR_EL2_E0E 0x1000000U
5322
5323#define AARCH64_SCTLR_EL2_EE 0x2000000U
5324
5325#define AARCH64_SCTLR_EL2_UCI 0x4000000U
5326
5327#define AARCH64_SCTLR_EL2_ENDA 0x8000000U
5328
5329#define AARCH64_SCTLR_EL2_NTLSMD 0x10000000U
5330
5331#define AARCH64_SCTLR_EL2_LSMAOE 0x20000000U
5332
5333#define AARCH64_SCTLR_EL2_ENIB 0x40000000U
5334
5335#define AARCH64_SCTLR_EL2_ENIA 0x80000000U
5336
5337#define AARCH64_SCTLR_EL2_BT0 0x800000000ULL
5338
5339#define AARCH64_SCTLR_EL2_BT 0x1000000000ULL
5340
5341#define AARCH64_SCTLR_EL2_BT1 0x1000000000ULL
5342
5343#define AARCH64_SCTLR_EL2_ITFSB 0x2000000000ULL
5344
5345#define AARCH64_SCTLR_EL2_TCF0( _val ) ( ( _val ) << 38 )
5346#define AARCH64_SCTLR_EL2_TCF0_SHIFT 38
5347#define AARCH64_SCTLR_EL2_TCF0_MASK 0xc000000000ULL
5348#define AARCH64_SCTLR_EL2_TCF0_GET( _reg ) \
5349 ( ( ( _reg ) >> 38 ) & 0x3ULL )
5350
5351#define AARCH64_SCTLR_EL2_TCF( _val ) ( ( _val ) << 40 )
5352#define AARCH64_SCTLR_EL2_TCF_SHIFT 40
5353#define AARCH64_SCTLR_EL2_TCF_MASK 0x30000000000ULL
5354#define AARCH64_SCTLR_EL2_TCF_GET( _reg ) \
5355 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5356
5357#define AARCH64_SCTLR_EL2_ATA0 0x40000000000ULL
5358
5359#define AARCH64_SCTLR_EL2_ATA 0x80000000000ULL
5360
5361#define AARCH64_SCTLR_EL2_DSSBS 0x100000000000ULL
5362
5363#define AARCH64_SCTLR_EL2_TWEDEN 0x200000000000ULL
5364
5365#define AARCH64_SCTLR_EL2_TWEDEL( _val ) ( ( _val ) << 46 )
5366#define AARCH64_SCTLR_EL2_TWEDEL_SHIFT 46
5367#define AARCH64_SCTLR_EL2_TWEDEL_MASK 0x3c00000000000ULL
5368#define AARCH64_SCTLR_EL2_TWEDEL_GET( _reg ) \
5369 ( ( ( _reg ) >> 46 ) & 0xfULL )
5370
5371static inline uint64_t _AArch64_Read_sctlr_el2( void )
5372{
5373 uint64_t value;
5374
5375 __asm__ volatile (
5376 "mrs %0, SCTLR_EL2" : "=&r" ( value ) : : "memory"
5377 );
5378
5379 return value;
5380}
5381
5382static inline void _AArch64_Write_sctlr_el2( uint64_t value )
5383{
5384 __asm__ volatile (
5385 "msr SCTLR_EL2, %0" : : "r" ( value ) : "memory"
5386 );
5387}
5388
5389/* SCTLR_EL3, System Control Register (EL3) */
5390
5391#define AARCH64_SCTLR_EL3_M 0x1U
5392
5393#define AARCH64_SCTLR_EL3_A 0x2U
5394
5395#define AARCH64_SCTLR_EL3_C 0x4U
5396
5397#define AARCH64_SCTLR_EL3_SA 0x8U
5398
5399#define AARCH64_SCTLR_EL3_NAA 0x40U
5400
5401#define AARCH64_SCTLR_EL3_EOS 0x800U
5402
5403#define AARCH64_SCTLR_EL3_I 0x1000U
5404
5405#define AARCH64_SCTLR_EL3_ENDB 0x2000U
5406
5407#define AARCH64_SCTLR_EL3_WXN 0x80000U
5408
5409#define AARCH64_SCTLR_EL3_IESB 0x200000U
5410
5411#define AARCH64_SCTLR_EL3_EIS 0x400000U
5412
5413#define AARCH64_SCTLR_EL3_EE 0x2000000U
5414
5415#define AARCH64_SCTLR_EL3_ENDA 0x8000000U
5416
5417#define AARCH64_SCTLR_EL3_ENIB 0x40000000U
5418
5419#define AARCH64_SCTLR_EL3_ENIA 0x80000000U
5420
5421#define AARCH64_SCTLR_EL3_BT 0x1000000000ULL
5422
5423#define AARCH64_SCTLR_EL3_ITFSB 0x2000000000ULL
5424
5425#define AARCH64_SCTLR_EL3_TCF( _val ) ( ( _val ) << 40 )
5426#define AARCH64_SCTLR_EL3_TCF_SHIFT 40
5427#define AARCH64_SCTLR_EL3_TCF_MASK 0x30000000000ULL
5428#define AARCH64_SCTLR_EL3_TCF_GET( _reg ) \
5429 ( ( ( _reg ) >> 40 ) & 0x3ULL )
5430
5431#define AARCH64_SCTLR_EL3_ATA 0x80000000000ULL
5432
5433#define AARCH64_SCTLR_EL3_DSSBS 0x100000000000ULL
5434
5435static inline uint64_t _AArch64_Read_sctlr_el3( void )
5436{
5437 uint64_t value;
5438
5439 __asm__ volatile (
5440 "mrs %0, SCTLR_EL3" : "=&r" ( value ) : : "memory"
5441 );
5442
5443 return value;
5444}
5445
5446static inline void _AArch64_Write_sctlr_el3( uint64_t value )
5447{
5448 __asm__ volatile (
5449 "msr SCTLR_EL3, %0" : : "r" ( value ) : "memory"
5450 );
5451}
5452
5453/* SCXTNUM_EL0, EL0 Read/Write Software Context Number */
5454
5455static inline uint64_t _AArch64_Read_scxtnum_el0( void )
5456{
5457 uint64_t value;
5458
5459 __asm__ volatile (
5460 "mrs %0, SCXTNUM_EL0" : "=&r" ( value ) : : "memory"
5461 );
5462
5463 return value;
5464}
5465
5466static inline void _AArch64_Write_scxtnum_el0( uint64_t value )
5467{
5468 __asm__ volatile (
5469 "msr SCXTNUM_EL0, %0" : : "r" ( value ) : "memory"
5470 );
5471}
5472
5473/* SCXTNUM_EL1, EL1 Read/Write Software Context Number */
5474
5475static inline uint64_t _AArch64_Read_scxtnum_el1( void )
5476{
5477 uint64_t value;
5478
5479 __asm__ volatile (
5480 "mrs %0, SCXTNUM_EL1" : "=&r" ( value ) : : "memory"
5481 );
5482
5483 return value;
5484}
5485
5486static inline void _AArch64_Write_scxtnum_el1( uint64_t value )
5487{
5488 __asm__ volatile (
5489 "msr SCXTNUM_EL1, %0" : : "r" ( value ) : "memory"
5490 );
5491}
5492
5493/* SCXTNUM_EL2, EL2 Read/Write Software Context Number */
5494
5495static inline uint64_t _AArch64_Read_scxtnum_el2( void )
5496{
5497 uint64_t value;
5498
5499 __asm__ volatile (
5500 "mrs %0, SCXTNUM_EL2" : "=&r" ( value ) : : "memory"
5501 );
5502
5503 return value;
5504}
5505
5506static inline void _AArch64_Write_scxtnum_el2( uint64_t value )
5507{
5508 __asm__ volatile (
5509 "msr SCXTNUM_EL2, %0" : : "r" ( value ) : "memory"
5510 );
5511}
5512
5513/* SCXTNUM_EL3, EL3 Read/Write Software Context Number */
5514
5515static inline uint64_t _AArch64_Read_scxtnum_el3( void )
5516{
5517 uint64_t value;
5518
5519 __asm__ volatile (
5520 "mrs %0, SCXTNUM_EL3" : "=&r" ( value ) : : "memory"
5521 );
5522
5523 return value;
5524}
5525
5526static inline void _AArch64_Write_scxtnum_el3( uint64_t value )
5527{
5528 __asm__ volatile (
5529 "msr SCXTNUM_EL3, %0" : : "r" ( value ) : "memory"
5530 );
5531}
5532
5533/* TCR_EL1, Translation Control Register (EL1) */
5534
5535#define AARCH64_TCR_EL1_T0SZ( _val ) ( ( _val ) << 0 )
5536#define AARCH64_TCR_EL1_T0SZ_SHIFT 0
5537#define AARCH64_TCR_EL1_T0SZ_MASK 0x3fU
5538#define AARCH64_TCR_EL1_T0SZ_GET( _reg ) \
5539 ( ( ( _reg ) >> 0 ) & 0x3fU )
5540
5541#define AARCH64_TCR_EL1_EPD0 0x80U
5542
5543#define AARCH64_TCR_EL1_IRGN0( _val ) ( ( _val ) << 8 )
5544#define AARCH64_TCR_EL1_IRGN0_SHIFT 8
5545#define AARCH64_TCR_EL1_IRGN0_MASK 0x300U
5546#define AARCH64_TCR_EL1_IRGN0_GET( _reg ) \
5547 ( ( ( _reg ) >> 8 ) & 0x3U )
5548
5549#define AARCH64_TCR_EL1_ORGN0( _val ) ( ( _val ) << 10 )
5550#define AARCH64_TCR_EL1_ORGN0_SHIFT 10
5551#define AARCH64_TCR_EL1_ORGN0_MASK 0xc00U
5552#define AARCH64_TCR_EL1_ORGN0_GET( _reg ) \
5553 ( ( ( _reg ) >> 10 ) & 0x3U )
5554
5555#define AARCH64_TCR_EL1_SH0( _val ) ( ( _val ) << 12 )
5556#define AARCH64_TCR_EL1_SH0_SHIFT 12
5557#define AARCH64_TCR_EL1_SH0_MASK 0x3000U
5558#define AARCH64_TCR_EL1_SH0_GET( _reg ) \
5559 ( ( ( _reg ) >> 12 ) & 0x3U )
5560
5561#define AARCH64_TCR_EL1_TG0( _val ) ( ( _val ) << 14 )
5562#define AARCH64_TCR_EL1_TG0_SHIFT 14
5563#define AARCH64_TCR_EL1_TG0_MASK 0xc000U
5564#define AARCH64_TCR_EL1_TG0_GET( _reg ) \
5565 ( ( ( _reg ) >> 14 ) & 0x3U )
5566
5567#define AARCH64_TCR_EL1_T1SZ( _val ) ( ( _val ) << 16 )
5568#define AARCH64_TCR_EL1_T1SZ_SHIFT 16
5569#define AARCH64_TCR_EL1_T1SZ_MASK 0x3f0000U
5570#define AARCH64_TCR_EL1_T1SZ_GET( _reg ) \
5571 ( ( ( _reg ) >> 16 ) & 0x3fU )
5572
5573#define AARCH64_TCR_EL1_A1 0x400000U
5574
5575#define AARCH64_TCR_EL1_EPD1 0x800000U
5576
5577#define AARCH64_TCR_EL1_IRGN1( _val ) ( ( _val ) << 24 )
5578#define AARCH64_TCR_EL1_IRGN1_SHIFT 24
5579#define AARCH64_TCR_EL1_IRGN1_MASK 0x3000000U
5580#define AARCH64_TCR_EL1_IRGN1_GET( _reg ) \
5581 ( ( ( _reg ) >> 24 ) & 0x3U )
5582
5583#define AARCH64_TCR_EL1_ORGN1( _val ) ( ( _val ) << 26 )
5584#define AARCH64_TCR_EL1_ORGN1_SHIFT 26
5585#define AARCH64_TCR_EL1_ORGN1_MASK 0xc000000U
5586#define AARCH64_TCR_EL1_ORGN1_GET( _reg ) \
5587 ( ( ( _reg ) >> 26 ) & 0x3U )
5588
5589#define AARCH64_TCR_EL1_SH1( _val ) ( ( _val ) << 28 )
5590#define AARCH64_TCR_EL1_SH1_SHIFT 28
5591#define AARCH64_TCR_EL1_SH1_MASK 0x30000000U
5592#define AARCH64_TCR_EL1_SH1_GET( _reg ) \
5593 ( ( ( _reg ) >> 28 ) & 0x3U )
5594
5595#define AARCH64_TCR_EL1_TG1( _val ) ( ( _val ) << 30 )
5596#define AARCH64_TCR_EL1_TG1_SHIFT 30
5597#define AARCH64_TCR_EL1_TG1_MASK 0xc0000000U
5598#define AARCH64_TCR_EL1_TG1_GET( _reg ) \
5599 ( ( ( _reg ) >> 30 ) & 0x3U )
5600
5601#define AARCH64_TCR_EL1_IPS( _val ) ( ( _val ) << 32 )
5602#define AARCH64_TCR_EL1_IPS_SHIFT 32
5603#define AARCH64_TCR_EL1_IPS_MASK 0x700000000ULL
5604#define AARCH64_TCR_EL1_IPS_GET( _reg ) \
5605 ( ( ( _reg ) >> 32 ) & 0x7ULL )
5606
5607#define AARCH64_TCR_EL1_AS 0x1000000000ULL
5608
5609#define AARCH64_TCR_EL1_TBI0 0x2000000000ULL
5610
5611#define AARCH64_TCR_EL1_TBI1 0x4000000000ULL
5612
5613#define AARCH64_TCR_EL1_HA 0x8000000000ULL
5614
5615#define AARCH64_TCR_EL1_HD 0x10000000000ULL
5616
5617#define AARCH64_TCR_EL1_HPD0 0x20000000000ULL
5618
5619#define AARCH64_TCR_EL1_HPD1 0x40000000000ULL
5620
5621#define AARCH64_TCR_EL1_HWU059 0x80000000000ULL
5622
5623#define AARCH64_TCR_EL1_HWU060 0x100000000000ULL
5624
5625#define AARCH64_TCR_EL1_HWU061 0x200000000000ULL
5626
5627#define AARCH64_TCR_EL1_HWU062 0x400000000000ULL
5628
5629#define AARCH64_TCR_EL1_HWU159 0x800000000000ULL
5630
5631#define AARCH64_TCR_EL1_HWU160 0x1000000000000ULL
5632
5633#define AARCH64_TCR_EL1_HWU161 0x2000000000000ULL
5634
5635#define AARCH64_TCR_EL1_HWU162 0x4000000000000ULL
5636
5637#define AARCH64_TCR_EL1_TBID0 0x8000000000000ULL
5638
5639#define AARCH64_TCR_EL1_TBID1 0x10000000000000ULL
5640
5641#define AARCH64_TCR_EL1_NFD0 0x20000000000000ULL
5642
5643#define AARCH64_TCR_EL1_NFD1 0x40000000000000ULL
5644
5645#define AARCH64_TCR_EL1_E0PD0 0x80000000000000ULL
5646
5647#define AARCH64_TCR_EL1_E0PD1 0x100000000000000ULL
5648
5649#define AARCH64_TCR_EL1_TCMA0 0x200000000000000ULL
5650
5651#define AARCH64_TCR_EL1_TCMA1 0x400000000000000ULL
5652
5653static inline uint64_t _AArch64_Read_tcr_el1( void )
5654{
5655 uint64_t value;
5656
5657 __asm__ volatile (
5658 "mrs %0, TCR_EL1" : "=&r" ( value ) : : "memory"
5659 );
5660
5661 return value;
5662}
5663
5664static inline void _AArch64_Write_tcr_el1( uint64_t value )
5665{
5666 __asm__ volatile (
5667 "msr TCR_EL1, %0" : : "r" ( value ) : "memory"
5668 );
5669}
5670
5671/* TCR_EL2, Translation Control Register (EL2) */
5672
5673#define AARCH64_TCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
5674#define AARCH64_TCR_EL2_T0SZ_SHIFT 0
5675#define AARCH64_TCR_EL2_T0SZ_MASK 0x3fU
5676#define AARCH64_TCR_EL2_T0SZ_GET( _reg ) \
5677 ( ( ( _reg ) >> 0 ) & 0x3fU )
5678
5679#define AARCH64_TCR_EL2_EPD0 0x80U
5680
5681#define AARCH64_TCR_EL2_IRGN0( _val ) ( ( _val ) << 8 )
5682#define AARCH64_TCR_EL2_IRGN0_SHIFT 8
5683#define AARCH64_TCR_EL2_IRGN0_MASK 0x300U
5684#define AARCH64_TCR_EL2_IRGN0_GET( _reg ) \
5685 ( ( ( _reg ) >> 8 ) & 0x3U )
5686
5687#define AARCH64_TCR_EL2_ORGN0( _val ) ( ( _val ) << 10 )
5688#define AARCH64_TCR_EL2_ORGN0_SHIFT 10
5689#define AARCH64_TCR_EL2_ORGN0_MASK 0xc00U
5690#define AARCH64_TCR_EL2_ORGN0_GET( _reg ) \
5691 ( ( ( _reg ) >> 10 ) & 0x3U )
5692
5693#define AARCH64_TCR_EL2_SH0( _val ) ( ( _val ) << 12 )
5694#define AARCH64_TCR_EL2_SH0_SHIFT 12
5695#define AARCH64_TCR_EL2_SH0_MASK 0x3000U
5696#define AARCH64_TCR_EL2_SH0_GET( _reg ) \
5697 ( ( ( _reg ) >> 12 ) & 0x3U )
5698
5699#define AARCH64_TCR_EL2_TG0( _val ) ( ( _val ) << 14 )
5700#define AARCH64_TCR_EL2_TG0_SHIFT 14
5701#define AARCH64_TCR_EL2_TG0_MASK 0xc000U
5702#define AARCH64_TCR_EL2_TG0_GET( _reg ) \
5703 ( ( ( _reg ) >> 14 ) & 0x3U )
5704
5705#define AARCH64_TCR_EL2_PS( _val ) ( ( _val ) << 16 )
5706#define AARCH64_TCR_EL2_PS_SHIFT 16
5707#define AARCH64_TCR_EL2_PS_MASK 0x70000U
5708#define AARCH64_TCR_EL2_PS_GET( _reg ) \
5709 ( ( ( _reg ) >> 16 ) & 0x7U )
5710
5711#define AARCH64_TCR_EL2_T1SZ( _val ) ( ( _val ) << 16 )
5712#define AARCH64_TCR_EL2_T1SZ_SHIFT 16
5713#define AARCH64_TCR_EL2_T1SZ_MASK 0x3f0000U
5714#define AARCH64_TCR_EL2_T1SZ_GET( _reg ) \
5715 ( ( ( _reg ) >> 16 ) & 0x3fU )
5716
5717#define AARCH64_TCR_EL2_TBI 0x100000U
5718
5719#define AARCH64_TCR_EL2_HA_0 0x200000U
5720
5721#define AARCH64_TCR_EL2_A1 0x400000U
5722
5723#define AARCH64_TCR_EL2_HD_0 0x400000U
5724
5725#define AARCH64_TCR_EL2_EPD1 0x800000U
5726
5727#define AARCH64_TCR_EL2_HPD 0x1000000U
5728
5729#define AARCH64_TCR_EL2_IRGN1( _val ) ( ( _val ) << 24 )
5730#define AARCH64_TCR_EL2_IRGN1_SHIFT 24
5731#define AARCH64_TCR_EL2_IRGN1_MASK 0x3000000U
5732#define AARCH64_TCR_EL2_IRGN1_GET( _reg ) \
5733 ( ( ( _reg ) >> 24 ) & 0x3U )
5734
5735#define AARCH64_TCR_EL2_HWU59 0x2000000U
5736
5737#define AARCH64_TCR_EL2_HWU60 0x4000000U
5738
5739#define AARCH64_TCR_EL2_ORGN1( _val ) ( ( _val ) << 26 )
5740#define AARCH64_TCR_EL2_ORGN1_SHIFT 26
5741#define AARCH64_TCR_EL2_ORGN1_MASK 0xc000000U
5742#define AARCH64_TCR_EL2_ORGN1_GET( _reg ) \
5743 ( ( ( _reg ) >> 26 ) & 0x3U )
5744
5745#define AARCH64_TCR_EL2_HWU61 0x8000000U
5746
5747#define AARCH64_TCR_EL2_HWU62 0x10000000U
5748
5749#define AARCH64_TCR_EL2_SH1( _val ) ( ( _val ) << 28 )
5750#define AARCH64_TCR_EL2_SH1_SHIFT 28
5751#define AARCH64_TCR_EL2_SH1_MASK 0x30000000U
5752#define AARCH64_TCR_EL2_SH1_GET( _reg ) \
5753 ( ( ( _reg ) >> 28 ) & 0x3U )
5754
5755#define AARCH64_TCR_EL2_TBID 0x20000000U
5756
5757#define AARCH64_TCR_EL2_TCMA 0x40000000U
5758
5759#define AARCH64_TCR_EL2_TG1( _val ) ( ( _val ) << 30 )
5760#define AARCH64_TCR_EL2_TG1_SHIFT 30
5761#define AARCH64_TCR_EL2_TG1_MASK 0xc0000000U
5762#define AARCH64_TCR_EL2_TG1_GET( _reg ) \
5763 ( ( ( _reg ) >> 30 ) & 0x3U )
5764
5765#define AARCH64_TCR_EL2_IPS( _val ) ( ( _val ) << 32 )
5766#define AARCH64_TCR_EL2_IPS_SHIFT 32
5767#define AARCH64_TCR_EL2_IPS_MASK 0x700000000ULL
5768#define AARCH64_TCR_EL2_IPS_GET( _reg ) \
5769 ( ( ( _reg ) >> 32 ) & 0x7ULL )
5770
5771#define AARCH64_TCR_EL2_AS 0x1000000000ULL
5772
5773#define AARCH64_TCR_EL2_TBI0 0x2000000000ULL
5774
5775#define AARCH64_TCR_EL2_TBI1 0x4000000000ULL
5776
5777#define AARCH64_TCR_EL2_HA_1 0x8000000000ULL
5778
5779#define AARCH64_TCR_EL2_HD_1 0x10000000000ULL
5780
5781#define AARCH64_TCR_EL2_HPD0 0x20000000000ULL
5782
5783#define AARCH64_TCR_EL2_HPD1 0x40000000000ULL
5784
5785#define AARCH64_TCR_EL2_HWU059 0x80000000000ULL
5786
5787#define AARCH64_TCR_EL2_HWU060 0x100000000000ULL
5788
5789#define AARCH64_TCR_EL2_HWU061 0x200000000000ULL
5790
5791#define AARCH64_TCR_EL2_HWU062 0x400000000000ULL
5792
5793#define AARCH64_TCR_EL2_HWU159 0x800000000000ULL
5794
5795#define AARCH64_TCR_EL2_HWU160 0x1000000000000ULL
5796
5797#define AARCH64_TCR_EL2_HWU161 0x2000000000000ULL
5798
5799#define AARCH64_TCR_EL2_HWU162 0x4000000000000ULL
5800
5801#define AARCH64_TCR_EL2_TBID0 0x8000000000000ULL
5802
5803#define AARCH64_TCR_EL2_TBID1 0x10000000000000ULL
5804
5805#define AARCH64_TCR_EL2_NFD0 0x20000000000000ULL
5806
5807#define AARCH64_TCR_EL2_NFD1 0x40000000000000ULL
5808
5809#define AARCH64_TCR_EL2_E0PD0 0x80000000000000ULL
5810
5811#define AARCH64_TCR_EL2_E0PD1 0x100000000000000ULL
5812
5813#define AARCH64_TCR_EL2_TCMA0 0x200000000000000ULL
5814
5815#define AARCH64_TCR_EL2_TCMA1 0x400000000000000ULL
5816
5817static inline uint64_t _AArch64_Read_tcr_el2( void )
5818{
5819 uint64_t value;
5820
5821 __asm__ volatile (
5822 "mrs %0, TCR_EL2" : "=&r" ( value ) : : "memory"
5823 );
5824
5825 return value;
5826}
5827
5828static inline void _AArch64_Write_tcr_el2( uint64_t value )
5829{
5830 __asm__ volatile (
5831 "msr TCR_EL2, %0" : : "r" ( value ) : "memory"
5832 );
5833}
5834
5835/* TCR_EL3, Translation Control Register (EL3) */
5836
5837#define AARCH64_TCR_EL3_T0SZ( _val ) ( ( _val ) << 0 )
5838#define AARCH64_TCR_EL3_T0SZ_SHIFT 0
5839#define AARCH64_TCR_EL3_T0SZ_MASK 0x3fU
5840#define AARCH64_TCR_EL3_T0SZ_GET( _reg ) \
5841 ( ( ( _reg ) >> 0 ) & 0x3fU )
5842
5843#define AARCH64_TCR_EL3_IRGN0( _val ) ( ( _val ) << 8 )
5844#define AARCH64_TCR_EL3_IRGN0_SHIFT 8
5845#define AARCH64_TCR_EL3_IRGN0_MASK 0x300U
5846#define AARCH64_TCR_EL3_IRGN0_GET( _reg ) \
5847 ( ( ( _reg ) >> 8 ) & 0x3U )
5848
5849#define AARCH64_TCR_EL3_ORGN0( _val ) ( ( _val ) << 10 )
5850#define AARCH64_TCR_EL3_ORGN0_SHIFT 10
5851#define AARCH64_TCR_EL3_ORGN0_MASK 0xc00U
5852#define AARCH64_TCR_EL3_ORGN0_GET( _reg ) \
5853 ( ( ( _reg ) >> 10 ) & 0x3U )
5854
5855#define AARCH64_TCR_EL3_SH0( _val ) ( ( _val ) << 12 )
5856#define AARCH64_TCR_EL3_SH0_SHIFT 12
5857#define AARCH64_TCR_EL3_SH0_MASK 0x3000U
5858#define AARCH64_TCR_EL3_SH0_GET( _reg ) \
5859 ( ( ( _reg ) >> 12 ) & 0x3U )
5860
5861#define AARCH64_TCR_EL3_TG0( _val ) ( ( _val ) << 14 )
5862#define AARCH64_TCR_EL3_TG0_SHIFT 14
5863#define AARCH64_TCR_EL3_TG0_MASK 0xc000U
5864#define AARCH64_TCR_EL3_TG0_GET( _reg ) \
5865 ( ( ( _reg ) >> 14 ) & 0x3U )
5866
5867#define AARCH64_TCR_EL3_PS( _val ) ( ( _val ) << 16 )
5868#define AARCH64_TCR_EL3_PS_SHIFT 16
5869#define AARCH64_TCR_EL3_PS_MASK 0x70000U
5870#define AARCH64_TCR_EL3_PS_GET( _reg ) \
5871 ( ( ( _reg ) >> 16 ) & 0x7U )
5872
5873#define AARCH64_TCR_EL3_TBI 0x100000U
5874
5875#define AARCH64_TCR_EL3_HA 0x200000U
5876
5877#define AARCH64_TCR_EL3_HD 0x400000U
5878
5879#define AARCH64_TCR_EL3_HPD 0x1000000U
5880
5881#define AARCH64_TCR_EL3_HWU59 0x2000000U
5882
5883#define AARCH64_TCR_EL3_HWU60 0x4000000U
5884
5885#define AARCH64_TCR_EL3_HWU61 0x8000000U
5886
5887#define AARCH64_TCR_EL3_HWU62 0x10000000U
5888
5889#define AARCH64_TCR_EL3_TBID 0x20000000U
5890
5891#define AARCH64_TCR_EL3_TCMA 0x40000000U
5892
5893static inline uint64_t _AArch64_Read_tcr_el3( void )
5894{
5895 uint64_t value;
5896
5897 __asm__ volatile (
5898 "mrs %0, TCR_EL3" : "=&r" ( value ) : : "memory"
5899 );
5900
5901 return value;
5902}
5903
5904static inline void _AArch64_Write_tcr_el3( uint64_t value )
5905{
5906 __asm__ volatile (
5907 "msr TCR_EL3, %0" : : "r" ( value ) : "memory"
5908 );
5909}
5910
5911/* TFSRE0_EL1, Tag Fault Status Register (EL0). */
5912
5913#define AARCH64_TFSRE0_EL1_TF0 0x1U
5914
5915#define AARCH64_TFSRE0_EL1_TF1 0x2U
5916
5917static inline uint64_t _AArch64_Read_tfsre0_el1( void )
5918{
5919 uint64_t value;
5920
5921 __asm__ volatile (
5922 "mrs %0, TFSRE0_EL1" : "=&r" ( value ) : : "memory"
5923 );
5924
5925 return value;
5926}
5927
5928static inline void _AArch64_Write_tfsre0_el1( uint64_t value )
5929{
5930 __asm__ volatile (
5931 "msr TFSRE0_EL1, %0" : : "r" ( value ) : "memory"
5932 );
5933}
5934
5935/* TFSR_EL1, Tag Fault Status Register (EL1) */
5936
5937#define AARCH64_TFSR_EL1_TF0 0x1U
5938
5939#define AARCH64_TFSR_EL1_TF1 0x2U
5940
5941static inline uint64_t _AArch64_Read_tfsr_el1( void )
5942{
5943 uint64_t value;
5944
5945 __asm__ volatile (
5946 "mrs %0, TFSR_EL1" : "=&r" ( value ) : : "memory"
5947 );
5948
5949 return value;
5950}
5951
5952static inline void _AArch64_Write_tfsr_el1( uint64_t value )
5953{
5954 __asm__ volatile (
5955 "msr TFSR_EL1, %0" : : "r" ( value ) : "memory"
5956 );
5957}
5958
5959/* TFSR_EL2, Tag Fault Status Register (EL2) */
5960
5961#define AARCH64_TFSR_EL2_TF0 0x1U
5962
5963#define AARCH64_TFSR_EL2_TF1 0x2U
5964
5965static inline uint64_t _AArch64_Read_tfsr_el2( void )
5966{
5967 uint64_t value;
5968
5969 __asm__ volatile (
5970 "mrs %0, TFSR_EL2" : "=&r" ( value ) : : "memory"
5971 );
5972
5973 return value;
5974}
5975
5976static inline void _AArch64_Write_tfsr_el2( uint64_t value )
5977{
5978 __asm__ volatile (
5979 "msr TFSR_EL2, %0" : : "r" ( value ) : "memory"
5980 );
5981}
5982
5983/* TFSR_EL3, Tag Fault Status Register (EL3) */
5984
5985#define AARCH64_TFSR_EL3_TF0 0x1U
5986
5987static inline uint64_t _AArch64_Read_tfsr_el3( void )
5988{
5989 uint64_t value;
5990
5991 __asm__ volatile (
5992 "mrs %0, TFSR_EL3" : "=&r" ( value ) : : "memory"
5993 );
5994
5995 return value;
5996}
5997
5998static inline void _AArch64_Write_tfsr_el3( uint64_t value )
5999{
6000 __asm__ volatile (
6001 "msr TFSR_EL3, %0" : : "r" ( value ) : "memory"
6002 );
6003}
6004
6005/* TPIDR_EL0, EL0 Read/Write Software Thread ID Register */
6006
6007static inline uint64_t _AArch64_Read_tpidr_el0( void )
6008{
6009 uint64_t value;
6010
6011 __asm__ volatile (
6012 "mrs %0, TPIDR_EL0" : "=&r" ( value ) : : "memory"
6013 );
6014
6015 return value;
6016}
6017
6018static inline void _AArch64_Write_tpidr_el0( uint64_t value )
6019{
6020 __asm__ volatile (
6021 "msr TPIDR_EL0, %0" : : "r" ( value ) : "memory"
6022 );
6023}
6024
6025/* TPIDR_EL1, EL1 Software Thread ID Register */
6026
6027static inline uint64_t _AArch64_Read_tpidr_el1( void )
6028{
6029 uint64_t value;
6030
6031 __asm__ volatile (
6032 "mrs %0, TPIDR_EL1" : "=&r" ( value ) : : "memory"
6033 );
6034
6035 return value;
6036}
6037
6038static inline void _AArch64_Write_tpidr_el1( uint64_t value )
6039{
6040 __asm__ volatile (
6041 "msr TPIDR_EL1, %0" : : "r" ( value ) : "memory"
6042 );
6043}
6044
6045/* TPIDR_EL2, EL2 Software Thread ID Register */
6046
6047static inline uint64_t _AArch64_Read_tpidr_el2( void )
6048{
6049 uint64_t value;
6050
6051 __asm__ volatile (
6052 "mrs %0, TPIDR_EL2" : "=&r" ( value ) : : "memory"
6053 );
6054
6055 return value;
6056}
6057
6058static inline void _AArch64_Write_tpidr_el2( uint64_t value )
6059{
6060 __asm__ volatile (
6061 "msr TPIDR_EL2, %0" : : "r" ( value ) : "memory"
6062 );
6063}
6064
6065/* TPIDR_EL3, EL3 Software Thread ID Register */
6066
6067static inline uint64_t _AArch64_Read_tpidr_el3( void )
6068{
6069 uint64_t value;
6070
6071 __asm__ volatile (
6072 "mrs %0, TPIDR_EL3" : "=&r" ( value ) : : "memory"
6073 );
6074
6075 return value;
6076}
6077
6078static inline void _AArch64_Write_tpidr_el3( uint64_t value )
6079{
6080 __asm__ volatile (
6081 "msr TPIDR_EL3, %0" : : "r" ( value ) : "memory"
6082 );
6083}
6084
6085/* TPIDRRO_EL0, EL0 Read-Only Software Thread ID Register */
6086
6087static inline uint64_t _AArch64_Read_tpidrro_el0( void )
6088{
6089 uint64_t value;
6090
6091 __asm__ volatile (
6092 "mrs %0, TPIDRRO_EL0" : "=&r" ( value ) : : "memory"
6093 );
6094
6095 return value;
6096}
6097
6098static inline void _AArch64_Write_tpidrro_el0( uint64_t value )
6099{
6100 __asm__ volatile (
6101 "msr TPIDRRO_EL0, %0" : : "r" ( value ) : "memory"
6102 );
6103}
6104
6105/* TTBR0_EL1, Translation Table Base Register 0 (EL1) */
6106
6107#define AARCH64_TTBR0_EL1_CNP 0x1U
6108
6109#define AARCH64_TTBR0_EL1_BADDR( _val ) ( ( _val ) << 1 )
6110#define AARCH64_TTBR0_EL1_BADDR_SHIFT 1
6111#define AARCH64_TTBR0_EL1_BADDR_MASK 0xfffffffffffeULL
6112#define AARCH64_TTBR0_EL1_BADDR_GET( _reg ) \
6113 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6114
6115#define AARCH64_TTBR0_EL1_ASID( _val ) ( ( _val ) << 48 )
6116#define AARCH64_TTBR0_EL1_ASID_SHIFT 48
6117#define AARCH64_TTBR0_EL1_ASID_MASK 0xffff000000000000ULL
6118#define AARCH64_TTBR0_EL1_ASID_GET( _reg ) \
6119 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6120
6121static inline uint64_t _AArch64_Read_ttbr0_el1( void )
6122{
6123 uint64_t value;
6124
6125 __asm__ volatile (
6126 "mrs %0, TTBR0_EL1" : "=&r" ( value ) : : "memory"
6127 );
6128
6129 return value;
6130}
6131
6132static inline void _AArch64_Write_ttbr0_el1( uint64_t value )
6133{
6134 __asm__ volatile (
6135 "msr TTBR0_EL1, %0" : : "r" ( value ) : "memory"
6136 );
6137}
6138
6139/* TTBR0_EL2, Translation Table Base Register 0 (EL2) */
6140
6141#define AARCH64_TTBR0_EL2_CNP 0x1U
6142
6143#define AARCH64_TTBR0_EL2_BADDR( _val ) ( ( _val ) << 1 )
6144#define AARCH64_TTBR0_EL2_BADDR_SHIFT 1
6145#define AARCH64_TTBR0_EL2_BADDR_MASK 0xfffffffffffeULL
6146#define AARCH64_TTBR0_EL2_BADDR_GET( _reg ) \
6147 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6148
6149#define AARCH64_TTBR0_EL2_ASID( _val ) ( ( _val ) << 48 )
6150#define AARCH64_TTBR0_EL2_ASID_SHIFT 48
6151#define AARCH64_TTBR0_EL2_ASID_MASK 0xffff000000000000ULL
6152#define AARCH64_TTBR0_EL2_ASID_GET( _reg ) \
6153 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6154
6155static inline uint64_t _AArch64_Read_ttbr0_el2( void )
6156{
6157 uint64_t value;
6158
6159 __asm__ volatile (
6160 "mrs %0, TTBR0_EL2" : "=&r" ( value ) : : "memory"
6161 );
6162
6163 return value;
6164}
6165
6166static inline void _AArch64_Write_ttbr0_el2( uint64_t value )
6167{
6168 __asm__ volatile (
6169 "msr TTBR0_EL2, %0" : : "r" ( value ) : "memory"
6170 );
6171}
6172
6173/* TTBR0_EL3, Translation Table Base Register 0 (EL3) */
6174
6175#define AARCH64_TTBR0_EL3_CNP 0x1U
6176
6177#define AARCH64_TTBR0_EL3_BADDR( _val ) ( ( _val ) << 1 )
6178#define AARCH64_TTBR0_EL3_BADDR_SHIFT 1
6179#define AARCH64_TTBR0_EL3_BADDR_MASK 0xfffffffffffeULL
6180#define AARCH64_TTBR0_EL3_BADDR_GET( _reg ) \
6181 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6182
6183static inline uint64_t _AArch64_Read_ttbr0_el3( void )
6184{
6185 uint64_t value;
6186
6187 __asm__ volatile (
6188 "mrs %0, TTBR0_EL3" : "=&r" ( value ) : : "memory"
6189 );
6190
6191 return value;
6192}
6193
6194static inline void _AArch64_Write_ttbr0_el3( uint64_t value )
6195{
6196 __asm__ volatile (
6197 "msr TTBR0_EL3, %0" : : "r" ( value ) : "memory"
6198 );
6199}
6200
6201/* TTBR1_EL1, Translation Table Base Register 1 (EL1) */
6202
6203#define AARCH64_TTBR1_EL1_CNP 0x1U
6204
6205#define AARCH64_TTBR1_EL1_BADDR( _val ) ( ( _val ) << 1 )
6206#define AARCH64_TTBR1_EL1_BADDR_SHIFT 1
6207#define AARCH64_TTBR1_EL1_BADDR_MASK 0xfffffffffffeULL
6208#define AARCH64_TTBR1_EL1_BADDR_GET( _reg ) \
6209 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6210
6211#define AARCH64_TTBR1_EL1_ASID( _val ) ( ( _val ) << 48 )
6212#define AARCH64_TTBR1_EL1_ASID_SHIFT 48
6213#define AARCH64_TTBR1_EL1_ASID_MASK 0xffff000000000000ULL
6214#define AARCH64_TTBR1_EL1_ASID_GET( _reg ) \
6215 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6216
6217static inline uint64_t _AArch64_Read_ttbr1_el1( void )
6218{
6219 uint64_t value;
6220
6221 __asm__ volatile (
6222 "mrs %0, TTBR1_EL1" : "=&r" ( value ) : : "memory"
6223 );
6224
6225 return value;
6226}
6227
6228static inline void _AArch64_Write_ttbr1_el1( uint64_t value )
6229{
6230 __asm__ volatile (
6231 "msr TTBR1_EL1, %0" : : "r" ( value ) : "memory"
6232 );
6233}
6234
6235/* TTBR1_EL2, Translation Table Base Register 1 (EL2) */
6236
6237#define AARCH64_TTBR1_EL2_CNP 0x1U
6238
6239#define AARCH64_TTBR1_EL2_BADDR( _val ) ( ( _val ) << 1 )
6240#define AARCH64_TTBR1_EL2_BADDR_SHIFT 1
6241#define AARCH64_TTBR1_EL2_BADDR_MASK 0xfffffffffffeULL
6242#define AARCH64_TTBR1_EL2_BADDR_GET( _reg ) \
6243 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6244
6245#define AARCH64_TTBR1_EL2_ASID( _val ) ( ( _val ) << 48 )
6246#define AARCH64_TTBR1_EL2_ASID_SHIFT 48
6247#define AARCH64_TTBR1_EL2_ASID_MASK 0xffff000000000000ULL
6248#define AARCH64_TTBR1_EL2_ASID_GET( _reg ) \
6249 ( ( ( _reg ) >> 48 ) & 0xffffULL )
6250
6251static inline uint64_t _AArch64_Read_ttbr1_el2( void )
6252{
6253 uint64_t value;
6254
6255 __asm__ volatile (
6256 "mrs %0, TTBR1_EL2" : "=&r" ( value ) : : "memory"
6257 );
6258
6259 return value;
6260}
6261
6262static inline void _AArch64_Write_ttbr1_el2( uint64_t value )
6263{
6264 __asm__ volatile (
6265 "msr TTBR1_EL2, %0" : : "r" ( value ) : "memory"
6266 );
6267}
6268
6269/* VBAR_EL1, Vector Base Address Register (EL1) */
6270
6271static inline uint64_t _AArch64_Read_vbar_el1( void )
6272{
6273 uint64_t value;
6274
6275 __asm__ volatile (
6276 "mrs %0, VBAR_EL1" : "=&r" ( value ) : : "memory"
6277 );
6278
6279 return value;
6280}
6281
6282static inline void _AArch64_Write_vbar_el1( uint64_t value )
6283{
6284 __asm__ volatile (
6285 "msr VBAR_EL1, %0" : : "r" ( value ) : "memory"
6286 );
6287}
6288
6289/* VBAR_EL2, Vector Base Address Register (EL2) */
6290
6291static inline uint64_t _AArch64_Read_vbar_el2( void )
6292{
6293 uint64_t value;
6294
6295 __asm__ volatile (
6296 "mrs %0, VBAR_EL2" : "=&r" ( value ) : : "memory"
6297 );
6298
6299 return value;
6300}
6301
6302static inline void _AArch64_Write_vbar_el2( uint64_t value )
6303{
6304 __asm__ volatile (
6305 "msr VBAR_EL2, %0" : : "r" ( value ) : "memory"
6306 );
6307}
6308
6309/* VBAR_EL3, Vector Base Address Register (EL3) */
6310
6311static inline uint64_t _AArch64_Read_vbar_el3( void )
6312{
6313 uint64_t value;
6314
6315 __asm__ volatile (
6316 "mrs %0, VBAR_EL3" : "=&r" ( value ) : : "memory"
6317 );
6318
6319 return value;
6320}
6321
6322static inline void _AArch64_Write_vbar_el3( uint64_t value )
6323{
6324 __asm__ volatile (
6325 "msr VBAR_EL3, %0" : : "r" ( value ) : "memory"
6326 );
6327}
6328
6329/* VMPIDR_EL2, Virtualization Multiprocessor ID Register */
6330
6331#define AARCH64_VMPIDR_EL2_AFF0( _val ) ( ( _val ) << 0 )
6332#define AARCH64_VMPIDR_EL2_AFF0_SHIFT 0
6333#define AARCH64_VMPIDR_EL2_AFF0_MASK 0xffU
6334#define AARCH64_VMPIDR_EL2_AFF0_GET( _reg ) \
6335 ( ( ( _reg ) >> 0 ) & 0xffU )
6336
6337#define AARCH64_VMPIDR_EL2_AFF1( _val ) ( ( _val ) << 8 )
6338#define AARCH64_VMPIDR_EL2_AFF1_SHIFT 8
6339#define AARCH64_VMPIDR_EL2_AFF1_MASK 0xff00U
6340#define AARCH64_VMPIDR_EL2_AFF1_GET( _reg ) \
6341 ( ( ( _reg ) >> 8 ) & 0xffU )
6342
6343#define AARCH64_VMPIDR_EL2_AFF2( _val ) ( ( _val ) << 16 )
6344#define AARCH64_VMPIDR_EL2_AFF2_SHIFT 16
6345#define AARCH64_VMPIDR_EL2_AFF2_MASK 0xff0000U
6346#define AARCH64_VMPIDR_EL2_AFF2_GET( _reg ) \
6347 ( ( ( _reg ) >> 16 ) & 0xffU )
6348
6349#define AARCH64_VMPIDR_EL2_MT 0x1000000U
6350
6351#define AARCH64_VMPIDR_EL2_U 0x40000000U
6352
6353#define AARCH64_VMPIDR_EL2_AFF3( _val ) ( ( _val ) << 32 )
6354#define AARCH64_VMPIDR_EL2_AFF3_SHIFT 32
6355#define AARCH64_VMPIDR_EL2_AFF3_MASK 0xff00000000ULL
6356#define AARCH64_VMPIDR_EL2_AFF3_GET( _reg ) \
6357 ( ( ( _reg ) >> 32 ) & 0xffULL )
6358
6359static inline uint64_t _AArch64_Read_vmpidr_el2( void )
6360{
6361 uint64_t value;
6362
6363 __asm__ volatile (
6364 "mrs %0, VMPIDR_EL2" : "=&r" ( value ) : : "memory"
6365 );
6366
6367 return value;
6368}
6369
6370static inline void _AArch64_Write_vmpidr_el2( uint64_t value )
6371{
6372 __asm__ volatile (
6373 "msr VMPIDR_EL2, %0" : : "r" ( value ) : "memory"
6374 );
6375}
6376
6377/* VNCR_EL2, Virtual Nested Control Register */
6378
6379#define AARCH64_VNCR_EL2_BADDR( _val ) ( ( _val ) << 12 )
6380#define AARCH64_VNCR_EL2_BADDR_SHIFT 12
6381#define AARCH64_VNCR_EL2_BADDR_MASK 0x1ffffffffff000ULL
6382#define AARCH64_VNCR_EL2_BADDR_GET( _reg ) \
6383 ( ( ( _reg ) >> 12 ) & 0x1ffffffffffULL )
6384
6385#define AARCH64_VNCR_EL2_RESS( _val ) ( ( _val ) << 53 )
6386#define AARCH64_VNCR_EL2_RESS_SHIFT 53
6387#define AARCH64_VNCR_EL2_RESS_MASK 0xffe0000000000000ULL
6388#define AARCH64_VNCR_EL2_RESS_GET( _reg ) \
6389 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
6390
6391static inline uint64_t _AArch64_Read_vncr_el2( void )
6392{
6393 uint64_t value;
6394
6395 __asm__ volatile (
6396 "mrs %0, VNCR_EL2" : "=&r" ( value ) : : "memory"
6397 );
6398
6399 return value;
6400}
6401
6402static inline void _AArch64_Write_vncr_el2( uint64_t value )
6403{
6404 __asm__ volatile (
6405 "msr VNCR_EL2, %0" : : "r" ( value ) : "memory"
6406 );
6407}
6408
6409/* VPIDR_EL2, Virtualization Processor ID Register */
6410
6411#define AARCH64_VPIDR_EL2_REVISION( _val ) ( ( _val ) << 0 )
6412#define AARCH64_VPIDR_EL2_REVISION_SHIFT 0
6413#define AARCH64_VPIDR_EL2_REVISION_MASK 0xfU
6414#define AARCH64_VPIDR_EL2_REVISION_GET( _reg ) \
6415 ( ( ( _reg ) >> 0 ) & 0xfU )
6416
6417#define AARCH64_VPIDR_EL2_PARTNUM( _val ) ( ( _val ) << 4 )
6418#define AARCH64_VPIDR_EL2_PARTNUM_SHIFT 4
6419#define AARCH64_VPIDR_EL2_PARTNUM_MASK 0xfff0U
6420#define AARCH64_VPIDR_EL2_PARTNUM_GET( _reg ) \
6421 ( ( ( _reg ) >> 4 ) & 0xfffU )
6422
6423#define AARCH64_VPIDR_EL2_ARCHITECTURE( _val ) ( ( _val ) << 16 )
6424#define AARCH64_VPIDR_EL2_ARCHITECTURE_SHIFT 16
6425#define AARCH64_VPIDR_EL2_ARCHITECTURE_MASK 0xf0000U
6426#define AARCH64_VPIDR_EL2_ARCHITECTURE_GET( _reg ) \
6427 ( ( ( _reg ) >> 16 ) & 0xfU )
6428
6429#define AARCH64_VPIDR_EL2_VARIANT( _val ) ( ( _val ) << 20 )
6430#define AARCH64_VPIDR_EL2_VARIANT_SHIFT 20
6431#define AARCH64_VPIDR_EL2_VARIANT_MASK 0xf00000U
6432#define AARCH64_VPIDR_EL2_VARIANT_GET( _reg ) \
6433 ( ( ( _reg ) >> 20 ) & 0xfU )
6434
6435#define AARCH64_VPIDR_EL2_IMPLEMENTER( _val ) ( ( _val ) << 24 )
6436#define AARCH64_VPIDR_EL2_IMPLEMENTER_SHIFT 24
6437#define AARCH64_VPIDR_EL2_IMPLEMENTER_MASK 0xff000000U
6438#define AARCH64_VPIDR_EL2_IMPLEMENTER_GET( _reg ) \
6439 ( ( ( _reg ) >> 24 ) & 0xffU )
6440
6441static inline uint64_t _AArch64_Read_vpidr_el2( void )
6442{
6443 uint64_t value;
6444
6445 __asm__ volatile (
6446 "mrs %0, VPIDR_EL2" : "=&r" ( value ) : : "memory"
6447 );
6448
6449 return value;
6450}
6451
6452static inline void _AArch64_Write_vpidr_el2( uint64_t value )
6453{
6454 __asm__ volatile (
6455 "msr VPIDR_EL2, %0" : : "r" ( value ) : "memory"
6456 );
6457}
6458
6459/* VSTCR_EL2, Virtualization Secure Translation Control Register */
6460
6461#define AARCH64_VSTCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
6462#define AARCH64_VSTCR_EL2_T0SZ_SHIFT 0
6463#define AARCH64_VSTCR_EL2_T0SZ_MASK 0x3fU
6464#define AARCH64_VSTCR_EL2_T0SZ_GET( _reg ) \
6465 ( ( ( _reg ) >> 0 ) & 0x3fU )
6466
6467#define AARCH64_VSTCR_EL2_SL0( _val ) ( ( _val ) << 6 )
6468#define AARCH64_VSTCR_EL2_SL0_SHIFT 6
6469#define AARCH64_VSTCR_EL2_SL0_MASK 0xc0U
6470#define AARCH64_VSTCR_EL2_SL0_GET( _reg ) \
6471 ( ( ( _reg ) >> 6 ) & 0x3U )
6472
6473#define AARCH64_VSTCR_EL2_TG0( _val ) ( ( _val ) << 14 )
6474#define AARCH64_VSTCR_EL2_TG0_SHIFT 14
6475#define AARCH64_VSTCR_EL2_TG0_MASK 0xc000U
6476#define AARCH64_VSTCR_EL2_TG0_GET( _reg ) \
6477 ( ( ( _reg ) >> 14 ) & 0x3U )
6478
6479#define AARCH64_VSTCR_EL2_SW 0x20000000U
6480
6481#define AARCH64_VSTCR_EL2_SA 0x40000000U
6482
6483static inline uint64_t _AArch64_Read_vstcr_el2( void )
6484{
6485 uint64_t value;
6486
6487 __asm__ volatile (
6488 "mrs %0, VSTCR_EL2" : "=&r" ( value ) : : "memory"
6489 );
6490
6491 return value;
6492}
6493
6494static inline void _AArch64_Write_vstcr_el2( uint64_t value )
6495{
6496 __asm__ volatile (
6497 "msr VSTCR_EL2, %0" : : "r" ( value ) : "memory"
6498 );
6499}
6500
6501/* VSTTBR_EL2, Virtualization Secure Translation Table Base Register */
6502
6503#define AARCH64_VSTTBR_EL2_CNP 0x1U
6504
6505#define AARCH64_VSTTBR_EL2_BADDR( _val ) ( ( _val ) << 1 )
6506#define AARCH64_VSTTBR_EL2_BADDR_SHIFT 1
6507#define AARCH64_VSTTBR_EL2_BADDR_MASK 0xfffffffffffeULL
6508#define AARCH64_VSTTBR_EL2_BADDR_GET( _reg ) \
6509 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6510
6511static inline uint64_t _AArch64_Read_vsttbr_el2( void )
6512{
6513 uint64_t value;
6514
6515 __asm__ volatile (
6516 "mrs %0, VSTTBR_EL2" : "=&r" ( value ) : : "memory"
6517 );
6518
6519 return value;
6520}
6521
6522static inline void _AArch64_Write_vsttbr_el2( uint64_t value )
6523{
6524 __asm__ volatile (
6525 "msr VSTTBR_EL2, %0" : : "r" ( value ) : "memory"
6526 );
6527}
6528
6529/* VTCR_EL2, Virtualization Translation Control Register */
6530
6531#define AARCH64_VTCR_EL2_T0SZ( _val ) ( ( _val ) << 0 )
6532#define AARCH64_VTCR_EL2_T0SZ_SHIFT 0
6533#define AARCH64_VTCR_EL2_T0SZ_MASK 0x3fU
6534#define AARCH64_VTCR_EL2_T0SZ_GET( _reg ) \
6535 ( ( ( _reg ) >> 0 ) & 0x3fU )
6536
6537#define AARCH64_VTCR_EL2_SL0( _val ) ( ( _val ) << 6 )
6538#define AARCH64_VTCR_EL2_SL0_SHIFT 6
6539#define AARCH64_VTCR_EL2_SL0_MASK 0xc0U
6540#define AARCH64_VTCR_EL2_SL0_GET( _reg ) \
6541 ( ( ( _reg ) >> 6 ) & 0x3U )
6542
6543#define AARCH64_VTCR_EL2_IRGN0( _val ) ( ( _val ) << 8 )
6544#define AARCH64_VTCR_EL2_IRGN0_SHIFT 8
6545#define AARCH64_VTCR_EL2_IRGN0_MASK 0x300U
6546#define AARCH64_VTCR_EL2_IRGN0_GET( _reg ) \
6547 ( ( ( _reg ) >> 8 ) & 0x3U )
6548
6549#define AARCH64_VTCR_EL2_ORGN0( _val ) ( ( _val ) << 10 )
6550#define AARCH64_VTCR_EL2_ORGN0_SHIFT 10
6551#define AARCH64_VTCR_EL2_ORGN0_MASK 0xc00U
6552#define AARCH64_VTCR_EL2_ORGN0_GET( _reg ) \
6553 ( ( ( _reg ) >> 10 ) & 0x3U )
6554
6555#define AARCH64_VTCR_EL2_SH0( _val ) ( ( _val ) << 12 )
6556#define AARCH64_VTCR_EL2_SH0_SHIFT 12
6557#define AARCH64_VTCR_EL2_SH0_MASK 0x3000U
6558#define AARCH64_VTCR_EL2_SH0_GET( _reg ) \
6559 ( ( ( _reg ) >> 12 ) & 0x3U )
6560
6561#define AARCH64_VTCR_EL2_TG0( _val ) ( ( _val ) << 14 )
6562#define AARCH64_VTCR_EL2_TG0_SHIFT 14
6563#define AARCH64_VTCR_EL2_TG0_MASK 0xc000U
6564#define AARCH64_VTCR_EL2_TG0_GET( _reg ) \
6565 ( ( ( _reg ) >> 14 ) & 0x3U )
6566
6567#define AARCH64_VTCR_EL2_PS( _val ) ( ( _val ) << 16 )
6568#define AARCH64_VTCR_EL2_PS_SHIFT 16
6569#define AARCH64_VTCR_EL2_PS_MASK 0x70000U
6570#define AARCH64_VTCR_EL2_PS_GET( _reg ) \
6571 ( ( ( _reg ) >> 16 ) & 0x7U )
6572
6573#define AARCH64_VTCR_EL2_VS 0x80000U
6574
6575#define AARCH64_VTCR_EL2_HA 0x200000U
6576
6577#define AARCH64_VTCR_EL2_HD 0x400000U
6578
6579#define AARCH64_VTCR_EL2_HWU59 0x2000000U
6580
6581#define AARCH64_VTCR_EL2_HWU60 0x4000000U
6582
6583#define AARCH64_VTCR_EL2_HWU61 0x8000000U
6584
6585#define AARCH64_VTCR_EL2_HWU62 0x10000000U
6586
6587#define AARCH64_VTCR_EL2_NSW 0x20000000U
6588
6589#define AARCH64_VTCR_EL2_NSA 0x40000000U
6590
6591static inline uint64_t _AArch64_Read_vtcr_el2( void )
6592{
6593 uint64_t value;
6594
6595 __asm__ volatile (
6596 "mrs %0, VTCR_EL2" : "=&r" ( value ) : : "memory"
6597 );
6598
6599 return value;
6600}
6601
6602static inline void _AArch64_Write_vtcr_el2( uint64_t value )
6603{
6604 __asm__ volatile (
6605 "msr VTCR_EL2, %0" : : "r" ( value ) : "memory"
6606 );
6607}
6608
6609/* VTTBR_EL2, Virtualization Translation Table Base Register */
6610
6611#define AARCH64_VTTBR_EL2_CNP 0x1U
6612
6613#define AARCH64_VTTBR_EL2_BADDR( _val ) ( ( _val ) << 1 )
6614#define AARCH64_VTTBR_EL2_BADDR_SHIFT 1
6615#define AARCH64_VTTBR_EL2_BADDR_MASK 0xfffffffffffeULL
6616#define AARCH64_VTTBR_EL2_BADDR_GET( _reg ) \
6617 ( ( ( _reg ) >> 1 ) & 0x7fffffffffffULL )
6618
6619#define AARCH64_VTTBR_EL2_VMID_7_0( _val ) ( ( _val ) << 48 )
6620#define AARCH64_VTTBR_EL2_VMID_7_0_SHIFT 48
6621#define AARCH64_VTTBR_EL2_VMID_7_0_MASK 0xff000000000000ULL
6622#define AARCH64_VTTBR_EL2_VMID_7_0_GET( _reg ) \
6623 ( ( ( _reg ) >> 48 ) & 0xffULL )
6624
6625#define AARCH64_VTTBR_EL2_VMID_15_8( _val ) ( ( _val ) << 56 )
6626#define AARCH64_VTTBR_EL2_VMID_15_8_SHIFT 56
6627#define AARCH64_VTTBR_EL2_VMID_15_8_MASK 0xff00000000000000ULL
6628#define AARCH64_VTTBR_EL2_VMID_15_8_GET( _reg ) \
6629 ( ( ( _reg ) >> 56 ) & 0xffULL )
6630
6631static inline uint64_t _AArch64_Read_vttbr_el2( void )
6632{
6633 uint64_t value;
6634
6635 __asm__ volatile (
6636 "mrs %0, VTTBR_EL2" : "=&r" ( value ) : : "memory"
6637 );
6638
6639 return value;
6640}
6641
6642static inline void _AArch64_Write_vttbr_el2( uint64_t value )
6643{
6644 __asm__ volatile (
6645 "msr VTTBR_EL2, %0" : : "r" ( value ) : "memory"
6646 );
6647}
6648
6649/* DBGAUTHSTATUS_EL1, Debug Authentication Status Register */
6650
6651#define AARCH64_DBGAUTHSTATUS_EL1_NSID( _val ) ( ( _val ) << 0 )
6652#define AARCH64_DBGAUTHSTATUS_EL1_NSID_SHIFT 0
6653#define AARCH64_DBGAUTHSTATUS_EL1_NSID_MASK 0x3U
6654#define AARCH64_DBGAUTHSTATUS_EL1_NSID_GET( _reg ) \
6655 ( ( ( _reg ) >> 0 ) & 0x3U )
6656
6657#define AARCH64_DBGAUTHSTATUS_EL1_NSNID( _val ) ( ( _val ) << 2 )
6658#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_SHIFT 2
6659#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_MASK 0xcU
6660#define AARCH64_DBGAUTHSTATUS_EL1_NSNID_GET( _reg ) \
6661 ( ( ( _reg ) >> 2 ) & 0x3U )
6662
6663#define AARCH64_DBGAUTHSTATUS_EL1_SID( _val ) ( ( _val ) << 4 )
6664#define AARCH64_DBGAUTHSTATUS_EL1_SID_SHIFT 4
6665#define AARCH64_DBGAUTHSTATUS_EL1_SID_MASK 0x30U
6666#define AARCH64_DBGAUTHSTATUS_EL1_SID_GET( _reg ) \
6667 ( ( ( _reg ) >> 4 ) & 0x3U )
6668
6669#define AARCH64_DBGAUTHSTATUS_EL1_SNID( _val ) ( ( _val ) << 6 )
6670#define AARCH64_DBGAUTHSTATUS_EL1_SNID_SHIFT 6
6671#define AARCH64_DBGAUTHSTATUS_EL1_SNID_MASK 0xc0U
6672#define AARCH64_DBGAUTHSTATUS_EL1_SNID_GET( _reg ) \
6673 ( ( ( _reg ) >> 6 ) & 0x3U )
6674
6675static inline uint64_t _AArch64_Read_dbgauthstatus_el1( void )
6676{
6677 uint64_t value;
6678
6679 __asm__ volatile (
6680 "mrs %0, DBGAUTHSTATUS_EL1" : "=&r" ( value ) : : "memory"
6681 );
6682
6683 return value;
6684}
6685
6686/* DBGBCR_N_EL1, Debug Breakpoint Control Registers, n = 0 - 15 */
6687
6688#define AARCH64_DBGBCR_N_EL1_E 0x1U
6689
6690#define AARCH64_DBGBCR_N_EL1_PMC( _val ) ( ( _val ) << 1 )
6691#define AARCH64_DBGBCR_N_EL1_PMC_SHIFT 1
6692#define AARCH64_DBGBCR_N_EL1_PMC_MASK 0x6U
6693#define AARCH64_DBGBCR_N_EL1_PMC_GET( _reg ) \
6694 ( ( ( _reg ) >> 1 ) & 0x3U )
6695
6696#define AARCH64_DBGBCR_N_EL1_BAS( _val ) ( ( _val ) << 5 )
6697#define AARCH64_DBGBCR_N_EL1_BAS_SHIFT 5
6698#define AARCH64_DBGBCR_N_EL1_BAS_MASK 0x1e0U
6699#define AARCH64_DBGBCR_N_EL1_BAS_GET( _reg ) \
6700 ( ( ( _reg ) >> 5 ) & 0xfU )
6701
6702#define AARCH64_DBGBCR_N_EL1_HMC 0x2000U
6703
6704#define AARCH64_DBGBCR_N_EL1_SSC( _val ) ( ( _val ) << 14 )
6705#define AARCH64_DBGBCR_N_EL1_SSC_SHIFT 14
6706#define AARCH64_DBGBCR_N_EL1_SSC_MASK 0xc000U
6707#define AARCH64_DBGBCR_N_EL1_SSC_GET( _reg ) \
6708 ( ( ( _reg ) >> 14 ) & 0x3U )
6709
6710#define AARCH64_DBGBCR_N_EL1_LBN( _val ) ( ( _val ) << 16 )
6711#define AARCH64_DBGBCR_N_EL1_LBN_SHIFT 16
6712#define AARCH64_DBGBCR_N_EL1_LBN_MASK 0xf0000U
6713#define AARCH64_DBGBCR_N_EL1_LBN_GET( _reg ) \
6714 ( ( ( _reg ) >> 16 ) & 0xfU )
6715
6716#define AARCH64_DBGBCR_N_EL1_BT( _val ) ( ( _val ) << 20 )
6717#define AARCH64_DBGBCR_N_EL1_BT_SHIFT 20
6718#define AARCH64_DBGBCR_N_EL1_BT_MASK 0xf00000U
6719#define AARCH64_DBGBCR_N_EL1_BT_GET( _reg ) \
6720 ( ( ( _reg ) >> 20 ) & 0xfU )
6721
6722static inline uint64_t _AArch64_Read_dbgbcr0_el1( void )
6723{
6724 uint64_t value;
6725
6726 __asm__ volatile (
6727 "mrs %0, DBGBCR0_EL1" : "=&r" ( value ) : : "memory"
6728 );
6729
6730 return value;
6731}
6732
6733static inline void _AArch64_Write_dbgbcr0_el1( uint64_t value )
6734{
6735 __asm__ volatile (
6736 "msr DBGBCR0_EL1, %0" : : "r" ( value ) : "memory"
6737 );
6738}
6739
6740static inline uint64_t _AArch64_Read_dbgbcr1_el1( void )
6741{
6742 uint64_t value;
6743
6744 __asm__ volatile (
6745 "mrs %0, DBGBCR1_EL1" : "=&r" ( value ) : : "memory"
6746 );
6747
6748 return value;
6749}
6750
6751static inline void _AArch64_Write_dbgbcr1_el1( uint64_t value )
6752{
6753 __asm__ volatile (
6754 "msr DBGBCR1_EL1, %0" : : "r" ( value ) : "memory"
6755 );
6756}
6757
6758static inline uint64_t _AArch64_Read_dbgbcr2_el1( void )
6759{
6760 uint64_t value;
6761
6762 __asm__ volatile (
6763 "mrs %0, DBGBCR2_EL1" : "=&r" ( value ) : : "memory"
6764 );
6765
6766 return value;
6767}
6768
6769static inline void _AArch64_Write_dbgbcr2_el1( uint64_t value )
6770{
6771 __asm__ volatile (
6772 "msr DBGBCR2_EL1, %0" : : "r" ( value ) : "memory"
6773 );
6774}
6775
6776static inline uint64_t _AArch64_Read_dbgbcr3_el1( void )
6777{
6778 uint64_t value;
6779
6780 __asm__ volatile (
6781 "mrs %0, DBGBCR3_EL1" : "=&r" ( value ) : : "memory"
6782 );
6783
6784 return value;
6785}
6786
6787static inline void _AArch64_Write_dbgbcr3_el1( uint64_t value )
6788{
6789 __asm__ volatile (
6790 "msr DBGBCR3_EL1, %0" : : "r" ( value ) : "memory"
6791 );
6792}
6793
6794static inline uint64_t _AArch64_Read_dbgbcr4_el1( void )
6795{
6796 uint64_t value;
6797
6798 __asm__ volatile (
6799 "mrs %0, DBGBCR4_EL1" : "=&r" ( value ) : : "memory"
6800 );
6801
6802 return value;
6803}
6804
6805static inline void _AArch64_Write_dbgbcr4_el1( uint64_t value )
6806{
6807 __asm__ volatile (
6808 "msr DBGBCR4_EL1, %0" : : "r" ( value ) : "memory"
6809 );
6810}
6811
6812static inline uint64_t _AArch64_Read_dbgbcr5_el1( void )
6813{
6814 uint64_t value;
6815
6816 __asm__ volatile (
6817 "mrs %0, DBGBCR5_EL1" : "=&r" ( value ) : : "memory"
6818 );
6819
6820 return value;
6821}
6822
6823static inline void _AArch64_Write_dbgbcr5_el1( uint64_t value )
6824{
6825 __asm__ volatile (
6826 "msr DBGBCR5_EL1, %0" : : "r" ( value ) : "memory"
6827 );
6828}
6829
6830static inline uint64_t _AArch64_Read_dbgbcr6_el1( void )
6831{
6832 uint64_t value;
6833
6834 __asm__ volatile (
6835 "mrs %0, DBGBCR6_EL1" : "=&r" ( value ) : : "memory"
6836 );
6837
6838 return value;
6839}
6840
6841static inline void _AArch64_Write_dbgbcr6_el1( uint64_t value )
6842{
6843 __asm__ volatile (
6844 "msr DBGBCR6_EL1, %0" : : "r" ( value ) : "memory"
6845 );
6846}
6847
6848static inline uint64_t _AArch64_Read_dbgbcr7_el1( void )
6849{
6850 uint64_t value;
6851
6852 __asm__ volatile (
6853 "mrs %0, DBGBCR7_EL1" : "=&r" ( value ) : : "memory"
6854 );
6855
6856 return value;
6857}
6858
6859static inline void _AArch64_Write_dbgbcr7_el1( uint64_t value )
6860{
6861 __asm__ volatile (
6862 "msr DBGBCR7_EL1, %0" : : "r" ( value ) : "memory"
6863 );
6864}
6865
6866static inline uint64_t _AArch64_Read_dbgbcr8_el1( void )
6867{
6868 uint64_t value;
6869
6870 __asm__ volatile (
6871 "mrs %0, DBGBCR8_EL1" : "=&r" ( value ) : : "memory"
6872 );
6873
6874 return value;
6875}
6876
6877static inline void _AArch64_Write_dbgbcr8_el1( uint64_t value )
6878{
6879 __asm__ volatile (
6880 "msr DBGBCR8_EL1, %0" : : "r" ( value ) : "memory"
6881 );
6882}
6883
6884static inline uint64_t _AArch64_Read_dbgbcr9_el1( void )
6885{
6886 uint64_t value;
6887
6888 __asm__ volatile (
6889 "mrs %0, DBGBCR9_EL1" : "=&r" ( value ) : : "memory"
6890 );
6891
6892 return value;
6893}
6894
6895static inline void _AArch64_Write_dbgbcr9_el1( uint64_t value )
6896{
6897 __asm__ volatile (
6898 "msr DBGBCR9_EL1, %0" : : "r" ( value ) : "memory"
6899 );
6900}
6901
6902static inline uint64_t _AArch64_Read_dbgbcr10_el1( void )
6903{
6904 uint64_t value;
6905
6906 __asm__ volatile (
6907 "mrs %0, DBGBCR10_EL1" : "=&r" ( value ) : : "memory"
6908 );
6909
6910 return value;
6911}
6912
6913static inline void _AArch64_Write_dbgbcr10_el1( uint64_t value )
6914{
6915 __asm__ volatile (
6916 "msr DBGBCR10_EL1, %0" : : "r" ( value ) : "memory"
6917 );
6918}
6919
6920static inline uint64_t _AArch64_Read_dbgbcr11_el1( void )
6921{
6922 uint64_t value;
6923
6924 __asm__ volatile (
6925 "mrs %0, DBGBCR11_EL1" : "=&r" ( value ) : : "memory"
6926 );
6927
6928 return value;
6929}
6930
6931static inline void _AArch64_Write_dbgbcr11_el1( uint64_t value )
6932{
6933 __asm__ volatile (
6934 "msr DBGBCR11_EL1, %0" : : "r" ( value ) : "memory"
6935 );
6936}
6937
6938static inline uint64_t _AArch64_Read_dbgbcr12_el1( void )
6939{
6940 uint64_t value;
6941
6942 __asm__ volatile (
6943 "mrs %0, DBGBCR12_EL1" : "=&r" ( value ) : : "memory"
6944 );
6945
6946 return value;
6947}
6948
6949static inline void _AArch64_Write_dbgbcr12_el1( uint64_t value )
6950{
6951 __asm__ volatile (
6952 "msr DBGBCR12_EL1, %0" : : "r" ( value ) : "memory"
6953 );
6954}
6955
6956static inline uint64_t _AArch64_Read_dbgbcr13_el1( void )
6957{
6958 uint64_t value;
6959
6960 __asm__ volatile (
6961 "mrs %0, DBGBCR13_EL1" : "=&r" ( value ) : : "memory"
6962 );
6963
6964 return value;
6965}
6966
6967static inline void _AArch64_Write_dbgbcr13_el1( uint64_t value )
6968{
6969 __asm__ volatile (
6970 "msr DBGBCR13_EL1, %0" : : "r" ( value ) : "memory"
6971 );
6972}
6973
6974static inline uint64_t _AArch64_Read_dbgbcr14_el1( void )
6975{
6976 uint64_t value;
6977
6978 __asm__ volatile (
6979 "mrs %0, DBGBCR14_EL1" : "=&r" ( value ) : : "memory"
6980 );
6981
6982 return value;
6983}
6984
6985static inline void _AArch64_Write_dbgbcr14_el1( uint64_t value )
6986{
6987 __asm__ volatile (
6988 "msr DBGBCR14_EL1, %0" : : "r" ( value ) : "memory"
6989 );
6990}
6991
6992static inline uint64_t _AArch64_Read_dbgbcr15_el1( void )
6993{
6994 uint64_t value;
6995
6996 __asm__ volatile (
6997 "mrs %0, DBGBCR15_EL1" : "=&r" ( value ) : : "memory"
6998 );
6999
7000 return value;
7001}
7002
7003static inline void _AArch64_Write_dbgbcr15_el1( uint64_t value )
7004{
7005 __asm__ volatile (
7006 "msr DBGBCR15_EL1, %0" : : "r" ( value ) : "memory"
7007 );
7008}
7009
7010/* DBGBVR_N_EL1, Debug Breakpoint Value Registers, n = 0 - 15 */
7011
7012#define AARCH64_DBGBVR_N_EL1_CONTEXTID( _val ) ( ( _val ) << 0 )
7013#define AARCH64_DBGBVR_N_EL1_CONTEXTID_SHIFT 0
7014#define AARCH64_DBGBVR_N_EL1_CONTEXTID_MASK 0xffffffffU
7015#define AARCH64_DBGBVR_N_EL1_CONTEXTID_GET( _reg ) \
7016 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
7017
7018#define AARCH64_DBGBVR_N_EL1_VA_48_2( _val ) ( ( _val ) << 2 )
7019#define AARCH64_DBGBVR_N_EL1_VA_48_2_SHIFT 2
7020#define AARCH64_DBGBVR_N_EL1_VA_48_2_MASK 0x1fffffffffffcULL
7021#define AARCH64_DBGBVR_N_EL1_VA_48_2_GET( _reg ) \
7022 ( ( ( _reg ) >> 2 ) & 0x7fffffffffffULL )
7023
7024#define AARCH64_DBGBVR_N_EL1_VMID_7_0( _val ) ( ( _val ) << 32 )
7025#define AARCH64_DBGBVR_N_EL1_VMID_7_0_SHIFT 32
7026#define AARCH64_DBGBVR_N_EL1_VMID_7_0_MASK 0xff00000000ULL
7027#define AARCH64_DBGBVR_N_EL1_VMID_7_0_GET( _reg ) \
7028 ( ( ( _reg ) >> 32 ) & 0xffULL )
7029
7030#define AARCH64_DBGBVR_N_EL1_CONTEXTID2( _val ) ( ( _val ) << 32 )
7031#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_SHIFT 32
7032#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_MASK 0xffffffff00000000ULL
7033#define AARCH64_DBGBVR_N_EL1_CONTEXTID2_GET( _reg ) \
7034 ( ( ( _reg ) >> 32 ) & 0xffffffffULL )
7035
7036#define AARCH64_DBGBVR_N_EL1_VMID_15_8( _val ) ( ( _val ) << 40 )
7037#define AARCH64_DBGBVR_N_EL1_VMID_15_8_SHIFT 40
7038#define AARCH64_DBGBVR_N_EL1_VMID_15_8_MASK 0xff0000000000ULL
7039#define AARCH64_DBGBVR_N_EL1_VMID_15_8_GET( _reg ) \
7040 ( ( ( _reg ) >> 40 ) & 0xffULL )
7041
7042#define AARCH64_DBGBVR_N_EL1_VA_52_49( _val ) ( ( _val ) << 49 )
7043#define AARCH64_DBGBVR_N_EL1_VA_52_49_SHIFT 49
7044#define AARCH64_DBGBVR_N_EL1_VA_52_49_MASK 0x1e000000000000ULL
7045#define AARCH64_DBGBVR_N_EL1_VA_52_49_GET( _reg ) \
7046 ( ( ( _reg ) >> 49 ) & 0xfULL )
7047
7048#define AARCH64_DBGBVR_N_EL1_RESS_14_4( _val ) ( ( _val ) << 53 )
7049#define AARCH64_DBGBVR_N_EL1_RESS_14_4_SHIFT 53
7050#define AARCH64_DBGBVR_N_EL1_RESS_14_4_MASK 0xffe0000000000000ULL
7051#define AARCH64_DBGBVR_N_EL1_RESS_14_4_GET( _reg ) \
7052 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
7053
7054static inline uint64_t _AArch64_Read_dbgbvr0_el1( void )
7055{
7056 uint64_t value;
7057
7058 __asm__ volatile (
7059 "mrs %0, DBGBVR0_EL1" : "=&r" ( value ) : : "memory"
7060 );
7061
7062 return value;
7063}
7064
7065static inline void _AArch64_Write_dbgbvr0_el1( uint64_t value )
7066{
7067 __asm__ volatile (
7068 "msr DBGBVR0_EL1, %0" : : "r" ( value ) : "memory"
7069 );
7070}
7071
7072static inline uint64_t _AArch64_Read_dbgbvr1_el1( void )
7073{
7074 uint64_t value;
7075
7076 __asm__ volatile (
7077 "mrs %0, DBGBVR1_EL1" : "=&r" ( value ) : : "memory"
7078 );
7079
7080 return value;
7081}
7082
7083static inline void _AArch64_Write_dbgbvr1_el1( uint64_t value )
7084{
7085 __asm__ volatile (
7086 "msr DBGBVR1_EL1, %0" : : "r" ( value ) : "memory"
7087 );
7088}
7089
7090static inline uint64_t _AArch64_Read_dbgbvr2_el1( void )
7091{
7092 uint64_t value;
7093
7094 __asm__ volatile (
7095 "mrs %0, DBGBVR2_EL1" : "=&r" ( value ) : : "memory"
7096 );
7097
7098 return value;
7099}
7100
7101static inline void _AArch64_Write_dbgbvr2_el1( uint64_t value )
7102{
7103 __asm__ volatile (
7104 "msr DBGBVR2_EL1, %0" : : "r" ( value ) : "memory"
7105 );
7106}
7107
7108static inline uint64_t _AArch64_Read_dbgbvr3_el1( void )
7109{
7110 uint64_t value;
7111
7112 __asm__ volatile (
7113 "mrs %0, DBGBVR3_EL1" : "=&r" ( value ) : : "memory"
7114 );
7115
7116 return value;
7117}
7118
7119static inline void _AArch64_Write_dbgbvr3_el1( uint64_t value )
7120{
7121 __asm__ volatile (
7122 "msr DBGBVR3_EL1, %0" : : "r" ( value ) : "memory"
7123 );
7124}
7125
7126static inline uint64_t _AArch64_Read_dbgbvr4_el1( void )
7127{
7128 uint64_t value;
7129
7130 __asm__ volatile (
7131 "mrs %0, DBGBVR4_EL1" : "=&r" ( value ) : : "memory"
7132 );
7133
7134 return value;
7135}
7136
7137static inline void _AArch64_Write_dbgbvr4_el1( uint64_t value )
7138{
7139 __asm__ volatile (
7140 "msr DBGBVR4_EL1, %0" : : "r" ( value ) : "memory"
7141 );
7142}
7143
7144static inline uint64_t _AArch64_Read_dbgbvr5_el1( void )
7145{
7146 uint64_t value;
7147
7148 __asm__ volatile (
7149 "mrs %0, DBGBVR5_EL1" : "=&r" ( value ) : : "memory"
7150 );
7151
7152 return value;
7153}
7154
7155static inline void _AArch64_Write_dbgbvr5_el1( uint64_t value )
7156{
7157 __asm__ volatile (
7158 "msr DBGBVR5_EL1, %0" : : "r" ( value ) : "memory"
7159 );
7160}
7161
7162static inline uint64_t _AArch64_Read_dbgbvr6_el1( void )
7163{
7164 uint64_t value;
7165
7166 __asm__ volatile (
7167 "mrs %0, DBGBVR6_EL1" : "=&r" ( value ) : : "memory"
7168 );
7169
7170 return value;
7171}
7172
7173static inline void _AArch64_Write_dbgbvr6_el1( uint64_t value )
7174{
7175 __asm__ volatile (
7176 "msr DBGBVR6_EL1, %0" : : "r" ( value ) : "memory"
7177 );
7178}
7179
7180static inline uint64_t _AArch64_Read_dbgbvr7_el1( void )
7181{
7182 uint64_t value;
7183
7184 __asm__ volatile (
7185 "mrs %0, DBGBVR7_EL1" : "=&r" ( value ) : : "memory"
7186 );
7187
7188 return value;
7189}
7190
7191static inline void _AArch64_Write_dbgbvr7_el1( uint64_t value )
7192{
7193 __asm__ volatile (
7194 "msr DBGBVR7_EL1, %0" : : "r" ( value ) : "memory"
7195 );
7196}
7197
7198static inline uint64_t _AArch64_Read_dbgbvr8_el1( void )
7199{
7200 uint64_t value;
7201
7202 __asm__ volatile (
7203 "mrs %0, DBGBVR8_EL1" : "=&r" ( value ) : : "memory"
7204 );
7205
7206 return value;
7207}
7208
7209static inline void _AArch64_Write_dbgbvr8_el1( uint64_t value )
7210{
7211 __asm__ volatile (
7212 "msr DBGBVR8_EL1, %0" : : "r" ( value ) : "memory"
7213 );
7214}
7215
7216static inline uint64_t _AArch64_Read_dbgbvr9_el1( void )
7217{
7218 uint64_t value;
7219
7220 __asm__ volatile (
7221 "mrs %0, DBGBVR9_EL1" : "=&r" ( value ) : : "memory"
7222 );
7223
7224 return value;
7225}
7226
7227static inline void _AArch64_Write_dbgbvr9_el1( uint64_t value )
7228{
7229 __asm__ volatile (
7230 "msr DBGBVR9_EL1, %0" : : "r" ( value ) : "memory"
7231 );
7232}
7233
7234static inline uint64_t _AArch64_Read_dbgbvr10_el1( void )
7235{
7236 uint64_t value;
7237
7238 __asm__ volatile (
7239 "mrs %0, DBGBVR10_EL1" : "=&r" ( value ) : : "memory"
7240 );
7241
7242 return value;
7243}
7244
7245static inline void _AArch64_Write_dbgbvr10_el1( uint64_t value )
7246{
7247 __asm__ volatile (
7248 "msr DBGBVR10_EL1, %0" : : "r" ( value ) : "memory"
7249 );
7250}
7251
7252static inline uint64_t _AArch64_Read_dbgbvr11_el1( void )
7253{
7254 uint64_t value;
7255
7256 __asm__ volatile (
7257 "mrs %0, DBGBVR11_EL1" : "=&r" ( value ) : : "memory"
7258 );
7259
7260 return value;
7261}
7262
7263static inline void _AArch64_Write_dbgbvr11_el1( uint64_t value )
7264{
7265 __asm__ volatile (
7266 "msr DBGBVR11_EL1, %0" : : "r" ( value ) : "memory"
7267 );
7268}
7269
7270static inline uint64_t _AArch64_Read_dbgbvr12_el1( void )
7271{
7272 uint64_t value;
7273
7274 __asm__ volatile (
7275 "mrs %0, DBGBVR12_EL1" : "=&r" ( value ) : : "memory"
7276 );
7277
7278 return value;
7279}
7280
7281static inline void _AArch64_Write_dbgbvr12_el1( uint64_t value )
7282{
7283 __asm__ volatile (
7284 "msr DBGBVR12_EL1, %0" : : "r" ( value ) : "memory"
7285 );
7286}
7287
7288static inline uint64_t _AArch64_Read_dbgbvr13_el1( void )
7289{
7290 uint64_t value;
7291
7292 __asm__ volatile (
7293 "mrs %0, DBGBVR13_EL1" : "=&r" ( value ) : : "memory"
7294 );
7295
7296 return value;
7297}
7298
7299static inline void _AArch64_Write_dbgbvr13_el1( uint64_t value )
7300{
7301 __asm__ volatile (
7302 "msr DBGBVR13_EL1, %0" : : "r" ( value ) : "memory"
7303 );
7304}
7305
7306static inline uint64_t _AArch64_Read_dbgbvr14_el1( void )
7307{
7308 uint64_t value;
7309
7310 __asm__ volatile (
7311 "mrs %0, DBGBVR14_EL1" : "=&r" ( value ) : : "memory"
7312 );
7313
7314 return value;
7315}
7316
7317static inline void _AArch64_Write_dbgbvr14_el1( uint64_t value )
7318{
7319 __asm__ volatile (
7320 "msr DBGBVR14_EL1, %0" : : "r" ( value ) : "memory"
7321 );
7322}
7323
7324static inline uint64_t _AArch64_Read_dbgbvr15_el1( void )
7325{
7326 uint64_t value;
7327
7328 __asm__ volatile (
7329 "mrs %0, DBGBVR15_EL1" : "=&r" ( value ) : : "memory"
7330 );
7331
7332 return value;
7333}
7334
7335static inline void _AArch64_Write_dbgbvr15_el1( uint64_t value )
7336{
7337 __asm__ volatile (
7338 "msr DBGBVR15_EL1, %0" : : "r" ( value ) : "memory"
7339 );
7340}
7341
7342/* DBGCLAIMCLR_EL1, Debug CLAIM Tag Clear Register */
7343
7344#define AARCH64_DBGCLAIMCLR_EL1_CLAIM( _val ) ( ( _val ) << 0 )
7345#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_SHIFT 0
7346#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_MASK 0xffU
7347#define AARCH64_DBGCLAIMCLR_EL1_CLAIM_GET( _reg ) \
7348 ( ( ( _reg ) >> 0 ) & 0xffU )
7349
7350static inline uint64_t _AArch64_Read_dbgclaimclr_el1( void )
7351{
7352 uint64_t value;
7353
7354 __asm__ volatile (
7355 "mrs %0, DBGCLAIMCLR_EL1" : "=&r" ( value ) : : "memory"
7356 );
7357
7358 return value;
7359}
7360
7361static inline void _AArch64_Write_dbgclaimclr_el1( uint64_t value )
7362{
7363 __asm__ volatile (
7364 "msr DBGCLAIMCLR_EL1, %0" : : "r" ( value ) : "memory"
7365 );
7366}
7367
7368/* DBGCLAIMSET_EL1, Debug CLAIM Tag Set Register */
7369
7370#define AARCH64_DBGCLAIMSET_EL1_CLAIM( _val ) ( ( _val ) << 0 )
7371#define AARCH64_DBGCLAIMSET_EL1_CLAIM_SHIFT 0
7372#define AARCH64_DBGCLAIMSET_EL1_CLAIM_MASK 0xffU
7373#define AARCH64_DBGCLAIMSET_EL1_CLAIM_GET( _reg ) \
7374 ( ( ( _reg ) >> 0 ) & 0xffU )
7375
7376static inline uint64_t _AArch64_Read_dbgclaimset_el1( void )
7377{
7378 uint64_t value;
7379
7380 __asm__ volatile (
7381 "mrs %0, DBGCLAIMSET_EL1" : "=&r" ( value ) : : "memory"
7382 );
7383
7384 return value;
7385}
7386
7387static inline void _AArch64_Write_dbgclaimset_el1( uint64_t value )
7388{
7389 __asm__ volatile (
7390 "msr DBGCLAIMSET_EL1, %0" : : "r" ( value ) : "memory"
7391 );
7392}
7393
7394/* DBGDTR_EL0, Debug Data Transfer Register, half-duplex */
7395
7396#define AARCH64_DBGDTR_EL0_LOWWORD( _val ) ( ( _val ) << 0 )
7397#define AARCH64_DBGDTR_EL0_LOWWORD_SHIFT 0
7398#define AARCH64_DBGDTR_EL0_LOWWORD_MASK 0xffffffffU
7399#define AARCH64_DBGDTR_EL0_LOWWORD_GET( _reg ) \
7400 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
7401
7402#define AARCH64_DBGDTR_EL0_HIGHWORD( _val ) ( ( _val ) << 32 )
7403#define AARCH64_DBGDTR_EL0_HIGHWORD_SHIFT 32
7404#define AARCH64_DBGDTR_EL0_HIGHWORD_MASK 0xffffffff00000000ULL
7405#define AARCH64_DBGDTR_EL0_HIGHWORD_GET( _reg ) \
7406 ( ( ( _reg ) >> 32 ) & 0xffffffffULL )
7407
7408static inline uint64_t _AArch64_Read_dbgdtr_el0( void )
7409{
7410 uint64_t value;
7411
7412 __asm__ volatile (
7413 "mrs %0, DBGDTR_EL0" : "=&r" ( value ) : : "memory"
7414 );
7415
7416 return value;
7417}
7418
7419static inline void _AArch64_Write_dbgdtr_el0( uint64_t value )
7420{
7421 __asm__ volatile (
7422 "msr DBGDTR_EL0, %0" : : "r" ( value ) : "memory"
7423 );
7424}
7425
7426/* DBGDTRRX_EL0, Debug Data Transfer Register, Receive */
7427
7428static inline uint64_t _AArch64_Read_dbgdtrrx_el0( void )
7429{
7430 uint64_t value;
7431
7432 __asm__ volatile (
7433 "mrs %0, DBGDTRRX_EL0" : "=&r" ( value ) : : "memory"
7434 );
7435
7436 return value;
7437}
7438
7439/* DBGDTRTX_EL0, Debug Data Transfer Register, Transmit */
7440
7441static inline void _AArch64_Write_dbgdtrtx_el0( uint64_t value )
7442{
7443 __asm__ volatile (
7444 "msr DBGDTRTX_EL0, %0" : : "r" ( value ) : "memory"
7445 );
7446}
7447
7448/* DBGPRCR_EL1, Debug Power Control Register */
7449
7450#define AARCH64_DBGPRCR_EL1_CORENPDRQ 0x1U
7451
7452static inline uint64_t _AArch64_Read_dbgprcr_el1( void )
7453{
7454 uint64_t value;
7455
7456 __asm__ volatile (
7457 "mrs %0, DBGPRCR_EL1" : "=&r" ( value ) : : "memory"
7458 );
7459
7460 return value;
7461}
7462
7463static inline void _AArch64_Write_dbgprcr_el1( uint64_t value )
7464{
7465 __asm__ volatile (
7466 "msr DBGPRCR_EL1, %0" : : "r" ( value ) : "memory"
7467 );
7468}
7469
7470/* DBGVCR32_EL2, Debug Vector Catch Register */
7471
7472#define AARCH64_DBGVCR32_EL2_SU 0x2U
7473
7474#define AARCH64_DBGVCR32_EL2_U 0x2U
7475
7476#define AARCH64_DBGVCR32_EL2_S 0x4U
7477
7478#define AARCH64_DBGVCR32_EL2_SS 0x4U
7479
7480#define AARCH64_DBGVCR32_EL2_P 0x8U
7481
7482#define AARCH64_DBGVCR32_EL2_SP 0x8U
7483
7484#define AARCH64_DBGVCR32_EL2_D 0x10U
7485
7486#define AARCH64_DBGVCR32_EL2_SD 0x10U
7487
7488#define AARCH64_DBGVCR32_EL2_I 0x40U
7489
7490#define AARCH64_DBGVCR32_EL2_SI 0x40U
7491
7492#define AARCH64_DBGVCR32_EL2_F 0x80U
7493
7494#define AARCH64_DBGVCR32_EL2_SF 0x80U
7495
7496#define AARCH64_DBGVCR32_EL2_NSU 0x2000000U
7497
7498#define AARCH64_DBGVCR32_EL2_NSS 0x4000000U
7499
7500#define AARCH64_DBGVCR32_EL2_NSP 0x8000000U
7501
7502#define AARCH64_DBGVCR32_EL2_NSD 0x10000000U
7503
7504#define AARCH64_DBGVCR32_EL2_NSI 0x40000000U
7505
7506#define AARCH64_DBGVCR32_EL2_NSF 0x80000000U
7507
7508static inline uint64_t _AArch64_Read_dbgvcr32_el2( void )
7509{
7510 uint64_t value;
7511
7512 __asm__ volatile (
7513 "mrs %0, DBGVCR32_EL2" : "=&r" ( value ) : : "memory"
7514 );
7515
7516 return value;
7517}
7518
7519static inline void _AArch64_Write_dbgvcr32_el2( uint64_t value )
7520{
7521 __asm__ volatile (
7522 "msr DBGVCR32_EL2, %0" : : "r" ( value ) : "memory"
7523 );
7524}
7525
7526/* DBGWCR_N_EL1, Debug Watchpoint Control Registers, n = 0 - 15 */
7527
7528#define AARCH64_DBGWCR_N_EL1_E 0x1U
7529
7530#define AARCH64_DBGWCR_N_EL1_PAC( _val ) ( ( _val ) << 1 )
7531#define AARCH64_DBGWCR_N_EL1_PAC_SHIFT 1
7532#define AARCH64_DBGWCR_N_EL1_PAC_MASK 0x6U
7533#define AARCH64_DBGWCR_N_EL1_PAC_GET( _reg ) \
7534 ( ( ( _reg ) >> 1 ) & 0x3U )
7535
7536#define AARCH64_DBGWCR_N_EL1_LSC( _val ) ( ( _val ) << 3 )
7537#define AARCH64_DBGWCR_N_EL1_LSC_SHIFT 3
7538#define AARCH64_DBGWCR_N_EL1_LSC_MASK 0x18U
7539#define AARCH64_DBGWCR_N_EL1_LSC_GET( _reg ) \
7540 ( ( ( _reg ) >> 3 ) & 0x3U )
7541
7542#define AARCH64_DBGWCR_N_EL1_BAS( _val ) ( ( _val ) << 5 )
7543#define AARCH64_DBGWCR_N_EL1_BAS_SHIFT 5
7544#define AARCH64_DBGWCR_N_EL1_BAS_MASK 0x1fe0U
7545#define AARCH64_DBGWCR_N_EL1_BAS_GET( _reg ) \
7546 ( ( ( _reg ) >> 5 ) & 0xffU )
7547
7548#define AARCH64_DBGWCR_N_EL1_HMC 0x2000U
7549
7550#define AARCH64_DBGWCR_N_EL1_SSC( _val ) ( ( _val ) << 14 )
7551#define AARCH64_DBGWCR_N_EL1_SSC_SHIFT 14
7552#define AARCH64_DBGWCR_N_EL1_SSC_MASK 0xc000U
7553#define AARCH64_DBGWCR_N_EL1_SSC_GET( _reg ) \
7554 ( ( ( _reg ) >> 14 ) & 0x3U )
7555
7556#define AARCH64_DBGWCR_N_EL1_LBN( _val ) ( ( _val ) << 16 )
7557#define AARCH64_DBGWCR_N_EL1_LBN_SHIFT 16
7558#define AARCH64_DBGWCR_N_EL1_LBN_MASK 0xf0000U
7559#define AARCH64_DBGWCR_N_EL1_LBN_GET( _reg ) \
7560 ( ( ( _reg ) >> 16 ) & 0xfU )
7561
7562#define AARCH64_DBGWCR_N_EL1_WT 0x100000U
7563
7564#define AARCH64_DBGWCR_N_EL1_MASK( _val ) ( ( _val ) << 24 )
7565#define AARCH64_DBGWCR_N_EL1_MASK_SHIFT 24
7566#define AARCH64_DBGWCR_N_EL1_MASK_MASK 0x1f000000U
7567#define AARCH64_DBGWCR_N_EL1_MASK_GET( _reg ) \
7568 ( ( ( _reg ) >> 24 ) & 0x1fU )
7569
7570static inline uint64_t _AArch64_Read_dbgwcr0_el1( void )
7571{
7572 uint64_t value;
7573
7574 __asm__ volatile (
7575 "mrs %0, DBGWCR0_EL1" : "=&r" ( value ) : : "memory"
7576 );
7577
7578 return value;
7579}
7580
7581static inline void _AArch64_Write_dbgwcr0_el1( uint64_t value )
7582{
7583 __asm__ volatile (
7584 "msr DBGWCR0_EL1, %0" : : "r" ( value ) : "memory"
7585 );
7586}
7587
7588static inline uint64_t _AArch64_Read_dbgwcr1_el1( void )
7589{
7590 uint64_t value;
7591
7592 __asm__ volatile (
7593 "mrs %0, DBGWCR1_EL1" : "=&r" ( value ) : : "memory"
7594 );
7595
7596 return value;
7597}
7598
7599static inline void _AArch64_Write_dbgwcr1_el1( uint64_t value )
7600{
7601 __asm__ volatile (
7602 "msr DBGWCR1_EL1, %0" : : "r" ( value ) : "memory"
7603 );
7604}
7605
7606static inline uint64_t _AArch64_Read_dbgwcr2_el1( void )
7607{
7608 uint64_t value;
7609
7610 __asm__ volatile (
7611 "mrs %0, DBGWCR2_EL1" : "=&r" ( value ) : : "memory"
7612 );
7613
7614 return value;
7615}
7616
7617static inline void _AArch64_Write_dbgwcr2_el1( uint64_t value )
7618{
7619 __asm__ volatile (
7620 "msr DBGWCR2_EL1, %0" : : "r" ( value ) : "memory"
7621 );
7622}
7623
7624static inline uint64_t _AArch64_Read_dbgwcr3_el1( void )
7625{
7626 uint64_t value;
7627
7628 __asm__ volatile (
7629 "mrs %0, DBGWCR3_EL1" : "=&r" ( value ) : : "memory"
7630 );
7631
7632 return value;
7633}
7634
7635static inline void _AArch64_Write_dbgwcr3_el1( uint64_t value )
7636{
7637 __asm__ volatile (
7638 "msr DBGWCR3_EL1, %0" : : "r" ( value ) : "memory"
7639 );
7640}
7641
7642static inline uint64_t _AArch64_Read_dbgwcr4_el1( void )
7643{
7644 uint64_t value;
7645
7646 __asm__ volatile (
7647 "mrs %0, DBGWCR4_EL1" : "=&r" ( value ) : : "memory"
7648 );
7649
7650 return value;
7651}
7652
7653static inline void _AArch64_Write_dbgwcr4_el1( uint64_t value )
7654{
7655 __asm__ volatile (
7656 "msr DBGWCR4_EL1, %0" : : "r" ( value ) : "memory"
7657 );
7658}
7659
7660static inline uint64_t _AArch64_Read_dbgwcr5_el1( void )
7661{
7662 uint64_t value;
7663
7664 __asm__ volatile (
7665 "mrs %0, DBGWCR5_EL1" : "=&r" ( value ) : : "memory"
7666 );
7667
7668 return value;
7669}
7670
7671static inline void _AArch64_Write_dbgwcr5_el1( uint64_t value )
7672{
7673 __asm__ volatile (
7674 "msr DBGWCR5_EL1, %0" : : "r" ( value ) : "memory"
7675 );
7676}
7677
7678static inline uint64_t _AArch64_Read_dbgwcr6_el1( void )
7679{
7680 uint64_t value;
7681
7682 __asm__ volatile (
7683 "mrs %0, DBGWCR6_EL1" : "=&r" ( value ) : : "memory"
7684 );
7685
7686 return value;
7687}
7688
7689static inline void _AArch64_Write_dbgwcr6_el1( uint64_t value )
7690{
7691 __asm__ volatile (
7692 "msr DBGWCR6_EL1, %0" : : "r" ( value ) : "memory"
7693 );
7694}
7695
7696static inline uint64_t _AArch64_Read_dbgwcr7_el1( void )
7697{
7698 uint64_t value;
7699
7700 __asm__ volatile (
7701 "mrs %0, DBGWCR7_EL1" : "=&r" ( value ) : : "memory"
7702 );
7703
7704 return value;
7705}
7706
7707static inline void _AArch64_Write_dbgwcr7_el1( uint64_t value )
7708{
7709 __asm__ volatile (
7710 "msr DBGWCR7_EL1, %0" : : "r" ( value ) : "memory"
7711 );
7712}
7713
7714static inline uint64_t _AArch64_Read_dbgwcr8_el1( void )
7715{
7716 uint64_t value;
7717
7718 __asm__ volatile (
7719 "mrs %0, DBGWCR8_EL1" : "=&r" ( value ) : : "memory"
7720 );
7721
7722 return value;
7723}
7724
7725static inline void _AArch64_Write_dbgwcr8_el1( uint64_t value )
7726{
7727 __asm__ volatile (
7728 "msr DBGWCR8_EL1, %0" : : "r" ( value ) : "memory"
7729 );
7730}
7731
7732static inline uint64_t _AArch64_Read_dbgwcr9_el1( void )
7733{
7734 uint64_t value;
7735
7736 __asm__ volatile (
7737 "mrs %0, DBGWCR9_EL1" : "=&r" ( value ) : : "memory"
7738 );
7739
7740 return value;
7741}
7742
7743static inline void _AArch64_Write_dbgwcr9_el1( uint64_t value )
7744{
7745 __asm__ volatile (
7746 "msr DBGWCR9_EL1, %0" : : "r" ( value ) : "memory"
7747 );
7748}
7749
7750static inline uint64_t _AArch64_Read_dbgwcr10_el1( void )
7751{
7752 uint64_t value;
7753
7754 __asm__ volatile (
7755 "mrs %0, DBGWCR10_EL1" : "=&r" ( value ) : : "memory"
7756 );
7757
7758 return value;
7759}
7760
7761static inline void _AArch64_Write_dbgwcr10_el1( uint64_t value )
7762{
7763 __asm__ volatile (
7764 "msr DBGWCR10_EL1, %0" : : "r" ( value ) : "memory"
7765 );
7766}
7767
7768static inline uint64_t _AArch64_Read_dbgwcr11_el1( void )
7769{
7770 uint64_t value;
7771
7772 __asm__ volatile (
7773 "mrs %0, DBGWCR11_EL1" : "=&r" ( value ) : : "memory"
7774 );
7775
7776 return value;
7777}
7778
7779static inline void _AArch64_Write_dbgwcr11_el1( uint64_t value )
7780{
7781 __asm__ volatile (
7782 "msr DBGWCR11_EL1, %0" : : "r" ( value ) : "memory"
7783 );
7784}
7785
7786static inline uint64_t _AArch64_Read_dbgwcr12_el1( void )
7787{
7788 uint64_t value;
7789
7790 __asm__ volatile (
7791 "mrs %0, DBGWCR12_EL1" : "=&r" ( value ) : : "memory"
7792 );
7793
7794 return value;
7795}
7796
7797static inline void _AArch64_Write_dbgwcr12_el1( uint64_t value )
7798{
7799 __asm__ volatile (
7800 "msr DBGWCR12_EL1, %0" : : "r" ( value ) : "memory"
7801 );
7802}
7803
7804static inline uint64_t _AArch64_Read_dbgwcr13_el1( void )
7805{
7806 uint64_t value;
7807
7808 __asm__ volatile (
7809 "mrs %0, DBGWCR13_EL1" : "=&r" ( value ) : : "memory"
7810 );
7811
7812 return value;
7813}
7814
7815static inline void _AArch64_Write_dbgwcr13_el1( uint64_t value )
7816{
7817 __asm__ volatile (
7818 "msr DBGWCR13_EL1, %0" : : "r" ( value ) : "memory"
7819 );
7820}
7821
7822static inline uint64_t _AArch64_Read_dbgwcr14_el1( void )
7823{
7824 uint64_t value;
7825
7826 __asm__ volatile (
7827 "mrs %0, DBGWCR14_EL1" : "=&r" ( value ) : : "memory"
7828 );
7829
7830 return value;
7831}
7832
7833static inline void _AArch64_Write_dbgwcr14_el1( uint64_t value )
7834{
7835 __asm__ volatile (
7836 "msr DBGWCR14_EL1, %0" : : "r" ( value ) : "memory"
7837 );
7838}
7839
7840static inline uint64_t _AArch64_Read_dbgwcr15_el1( void )
7841{
7842 uint64_t value;
7843
7844 __asm__ volatile (
7845 "mrs %0, DBGWCR15_EL1" : "=&r" ( value ) : : "memory"
7846 );
7847
7848 return value;
7849}
7850
7851static inline void _AArch64_Write_dbgwcr15_el1( uint64_t value )
7852{
7853 __asm__ volatile (
7854 "msr DBGWCR15_EL1, %0" : : "r" ( value ) : "memory"
7855 );
7856}
7857
7858/* DBGWVR_N_EL1, Debug Watchpoint Value Registers, n = 0 - 15 */
7859
7860#define AARCH64_DBGWVR_N_EL1_VA_48_2( _val ) ( ( _val ) << 2 )
7861#define AARCH64_DBGWVR_N_EL1_VA_48_2_SHIFT 2
7862#define AARCH64_DBGWVR_N_EL1_VA_48_2_MASK 0x1fffffffffffcULL
7863#define AARCH64_DBGWVR_N_EL1_VA_48_2_GET( _reg ) \
7864 ( ( ( _reg ) >> 2 ) & 0x7fffffffffffULL )
7865
7866#define AARCH64_DBGWVR_N_EL1_VA_52_49( _val ) ( ( _val ) << 49 )
7867#define AARCH64_DBGWVR_N_EL1_VA_52_49_SHIFT 49
7868#define AARCH64_DBGWVR_N_EL1_VA_52_49_MASK 0x1e000000000000ULL
7869#define AARCH64_DBGWVR_N_EL1_VA_52_49_GET( _reg ) \
7870 ( ( ( _reg ) >> 49 ) & 0xfULL )
7871
7872#define AARCH64_DBGWVR_N_EL1_RESS_14_4( _val ) ( ( _val ) << 53 )
7873#define AARCH64_DBGWVR_N_EL1_RESS_14_4_SHIFT 53
7874#define AARCH64_DBGWVR_N_EL1_RESS_14_4_MASK 0xffe0000000000000ULL
7875#define AARCH64_DBGWVR_N_EL1_RESS_14_4_GET( _reg ) \
7876 ( ( ( _reg ) >> 53 ) & 0x7ffULL )
7877
7878static inline uint64_t _AArch64_Read_dbgwvr0_el1( void )
7879{
7880 uint64_t value;
7881
7882 __asm__ volatile (
7883 "mrs %0, DBGWVR0_EL1" : "=&r" ( value ) : : "memory"
7884 );
7885
7886 return value;
7887}
7888
7889static inline void _AArch64_Write_dbgwvr0_el1( uint64_t value )
7890{
7891 __asm__ volatile (
7892 "msr DBGWVR0_EL1, %0" : : "r" ( value ) : "memory"
7893 );
7894}
7895
7896static inline uint64_t _AArch64_Read_dbgwvr1_el1( void )
7897{
7898 uint64_t value;
7899
7900 __asm__ volatile (
7901 "mrs %0, DBGWVR1_EL1" : "=&r" ( value ) : : "memory"
7902 );
7903
7904 return value;
7905}
7906
7907static inline void _AArch64_Write_dbgwvr1_el1( uint64_t value )
7908{
7909 __asm__ volatile (
7910 "msr DBGWVR1_EL1, %0" : : "r" ( value ) : "memory"
7911 );
7912}
7913
7914static inline uint64_t _AArch64_Read_dbgwvr2_el1( void )
7915{
7916 uint64_t value;
7917
7918 __asm__ volatile (
7919 "mrs %0, DBGWVR2_EL1" : "=&r" ( value ) : : "memory"
7920 );
7921
7922 return value;
7923}
7924
7925static inline void _AArch64_Write_dbgwvr2_el1( uint64_t value )
7926{
7927 __asm__ volatile (
7928 "msr DBGWVR2_EL1, %0" : : "r" ( value ) : "memory"
7929 );
7930}
7931
7932static inline uint64_t _AArch64_Read_dbgwvr3_el1( void )
7933{
7934 uint64_t value;
7935
7936 __asm__ volatile (
7937 "mrs %0, DBGWVR3_EL1" : "=&r" ( value ) : : "memory"
7938 );
7939
7940 return value;
7941}
7942
7943static inline void _AArch64_Write_dbgwvr3_el1( uint64_t value )
7944{
7945 __asm__ volatile (
7946 "msr DBGWVR3_EL1, %0" : : "r" ( value ) : "memory"
7947 );
7948}
7949
7950static inline uint64_t _AArch64_Read_dbgwvr4_el1( void )
7951{
7952 uint64_t value;
7953
7954 __asm__ volatile (
7955 "mrs %0, DBGWVR4_EL1" : "=&r" ( value ) : : "memory"
7956 );
7957
7958 return value;
7959}
7960
7961static inline void _AArch64_Write_dbgwvr4_el1( uint64_t value )
7962{
7963 __asm__ volatile (
7964 "msr DBGWVR4_EL1, %0" : : "r" ( value ) : "memory"
7965 );
7966}
7967
7968static inline uint64_t _AArch64_Read_dbgwvr5_el1( void )
7969{
7970 uint64_t value;
7971
7972 __asm__ volatile (
7973 "mrs %0, DBGWVR5_EL1" : "=&r" ( value ) : : "memory"
7974 );
7975
7976 return value;
7977}
7978
7979static inline void _AArch64_Write_dbgwvr5_el1( uint64_t value )
7980{
7981 __asm__ volatile (
7982 "msr DBGWVR5_EL1, %0" : : "r" ( value ) : "memory"
7983 );
7984}
7985
7986static inline uint64_t _AArch64_Read_dbgwvr6_el1( void )
7987{
7988 uint64_t value;
7989
7990 __asm__ volatile (
7991 "mrs %0, DBGWVR6_EL1" : "=&r" ( value ) : : "memory"
7992 );
7993
7994 return value;
7995}
7996
7997static inline void _AArch64_Write_dbgwvr6_el1( uint64_t value )
7998{
7999 __asm__ volatile (
8000 "msr DBGWVR6_EL1, %0" : : "r" ( value ) : "memory"
8001 );
8002}
8003
8004static inline uint64_t _AArch64_Read_dbgwvr7_el1( void )
8005{
8006 uint64_t value;
8007
8008 __asm__ volatile (
8009 "mrs %0, DBGWVR7_EL1" : "=&r" ( value ) : : "memory"
8010 );
8011
8012 return value;
8013}
8014
8015static inline void _AArch64_Write_dbgwvr7_el1( uint64_t value )
8016{
8017 __asm__ volatile (
8018 "msr DBGWVR7_EL1, %0" : : "r" ( value ) : "memory"
8019 );
8020}
8021
8022static inline uint64_t _AArch64_Read_dbgwvr8_el1( void )
8023{
8024 uint64_t value;
8025
8026 __asm__ volatile (
8027 "mrs %0, DBGWVR8_EL1" : "=&r" ( value ) : : "memory"
8028 );
8029
8030 return value;
8031}
8032
8033static inline void _AArch64_Write_dbgwvr8_el1( uint64_t value )
8034{
8035 __asm__ volatile (
8036 "msr DBGWVR8_EL1, %0" : : "r" ( value ) : "memory"
8037 );
8038}
8039
8040static inline uint64_t _AArch64_Read_dbgwvr9_el1( void )
8041{
8042 uint64_t value;
8043
8044 __asm__ volatile (
8045 "mrs %0, DBGWVR9_EL1" : "=&r" ( value ) : : "memory"
8046 );
8047
8048 return value;
8049}
8050
8051static inline void _AArch64_Write_dbgwvr9_el1( uint64_t value )
8052{
8053 __asm__ volatile (
8054 "msr DBGWVR9_EL1, %0" : : "r" ( value ) : "memory"
8055 );
8056}
8057
8058static inline uint64_t _AArch64_Read_dbgwvr10_el1( void )
8059{
8060 uint64_t value;
8061
8062 __asm__ volatile (
8063 "mrs %0, DBGWVR10_EL1" : "=&r" ( value ) : : "memory"
8064 );
8065
8066 return value;
8067}
8068
8069static inline void _AArch64_Write_dbgwvr10_el1( uint64_t value )
8070{
8071 __asm__ volatile (
8072 "msr DBGWVR10_EL1, %0" : : "r" ( value ) : "memory"
8073 );
8074}
8075
8076static inline uint64_t _AArch64_Read_dbgwvr11_el1( void )
8077{
8078 uint64_t value;
8079
8080 __asm__ volatile (
8081 "mrs %0, DBGWVR11_EL1" : "=&r" ( value ) : : "memory"
8082 );
8083
8084 return value;
8085}
8086
8087static inline void _AArch64_Write_dbgwvr11_el1( uint64_t value )
8088{
8089 __asm__ volatile (
8090 "msr DBGWVR11_EL1, %0" : : "r" ( value ) : "memory"
8091 );
8092}
8093
8094static inline uint64_t _AArch64_Read_dbgwvr12_el1( void )
8095{
8096 uint64_t value;
8097
8098 __asm__ volatile (
8099 "mrs %0, DBGWVR12_EL1" : "=&r" ( value ) : : "memory"
8100 );
8101
8102 return value;
8103}
8104
8105static inline void _AArch64_Write_dbgwvr12_el1( uint64_t value )
8106{
8107 __asm__ volatile (
8108 "msr DBGWVR12_EL1, %0" : : "r" ( value ) : "memory"
8109 );
8110}
8111
8112static inline uint64_t _AArch64_Read_dbgwvr13_el1( void )
8113{
8114 uint64_t value;
8115
8116 __asm__ volatile (
8117 "mrs %0, DBGWVR13_EL1" : "=&r" ( value ) : : "memory"
8118 );
8119
8120 return value;
8121}
8122
8123static inline void _AArch64_Write_dbgwvr13_el1( uint64_t value )
8124{
8125 __asm__ volatile (
8126 "msr DBGWVR13_EL1, %0" : : "r" ( value ) : "memory"
8127 );
8128}
8129
8130static inline uint64_t _AArch64_Read_dbgwvr14_el1( void )
8131{
8132 uint64_t value;
8133
8134 __asm__ volatile (
8135 "mrs %0, DBGWVR14_EL1" : "=&r" ( value ) : : "memory"
8136 );
8137
8138 return value;
8139}
8140
8141static inline void _AArch64_Write_dbgwvr14_el1( uint64_t value )
8142{
8143 __asm__ volatile (
8144 "msr DBGWVR14_EL1, %0" : : "r" ( value ) : "memory"
8145 );
8146}
8147
8148static inline uint64_t _AArch64_Read_dbgwvr15_el1( void )
8149{
8150 uint64_t value;
8151
8152 __asm__ volatile (
8153 "mrs %0, DBGWVR15_EL1" : "=&r" ( value ) : : "memory"
8154 );
8155
8156 return value;
8157}
8158
8159static inline void _AArch64_Write_dbgwvr15_el1( uint64_t value )
8160{
8161 __asm__ volatile (
8162 "msr DBGWVR15_EL1, %0" : : "r" ( value ) : "memory"
8163 );
8164}
8165
8166/* DLR_EL0, Debug Link Register */
8167
8168static inline uint64_t _AArch64_Read_dlr_el0( void )
8169{
8170 uint64_t value;
8171
8172 __asm__ volatile (
8173 "mrs %0, DLR_EL0" : "=&r" ( value ) : : "memory"
8174 );
8175
8176 return value;
8177}
8178
8179static inline void _AArch64_Write_dlr_el0( uint64_t value )
8180{
8181 __asm__ volatile (
8182 "msr DLR_EL0, %0" : : "r" ( value ) : "memory"
8183 );
8184}
8185
8186/* DSPSR_EL0, Debug Saved Program Status Register */
8187
8188#define AARCH64_DSPSR_EL0_M_3_0( _val ) ( ( _val ) << 0 )
8189#define AARCH64_DSPSR_EL0_M_3_0_SHIFT 0
8190#define AARCH64_DSPSR_EL0_M_3_0_MASK 0xfU
8191#define AARCH64_DSPSR_EL0_M_3_0_GET( _reg ) \
8192 ( ( ( _reg ) >> 0 ) & 0xfU )
8193
8194#define AARCH64_DSPSR_EL0_M_4 0x10U
8195
8196#define AARCH64_DSPSR_EL0_T 0x20U
8197
8198#define AARCH64_DSPSR_EL0_F 0x40U
8199
8200#define AARCH64_DSPSR_EL0_I 0x80U
8201
8202#define AARCH64_DSPSR_EL0_A 0x100U
8203
8204#define AARCH64_DSPSR_EL0_D 0x200U
8205
8206#define AARCH64_DSPSR_EL0_E 0x200U
8207
8208#define AARCH64_DSPSR_EL0_BTYPE( _val ) ( ( _val ) << 10 )
8209#define AARCH64_DSPSR_EL0_BTYPE_SHIFT 10
8210#define AARCH64_DSPSR_EL0_BTYPE_MASK 0xc00U
8211#define AARCH64_DSPSR_EL0_BTYPE_GET( _reg ) \
8212 ( ( ( _reg ) >> 10 ) & 0x3U )
8213
8214#define AARCH64_DSPSR_EL0_IT_7_2( _val ) ( ( _val ) << 10 )
8215#define AARCH64_DSPSR_EL0_IT_7_2_SHIFT 10
8216#define AARCH64_DSPSR_EL0_IT_7_2_MASK 0xfc00U
8217#define AARCH64_DSPSR_EL0_IT_7_2_GET( _reg ) \
8218 ( ( ( _reg ) >> 10 ) & 0x3fU )
8219
8220#define AARCH64_DSPSR_EL0_SSBS_0 0x1000U
8221
8222#define AARCH64_DSPSR_EL0_GE( _val ) ( ( _val ) << 16 )
8223#define AARCH64_DSPSR_EL0_GE_SHIFT 16
8224#define AARCH64_DSPSR_EL0_GE_MASK 0xf0000U
8225#define AARCH64_DSPSR_EL0_GE_GET( _reg ) \
8226 ( ( ( _reg ) >> 16 ) & 0xfU )
8227
8228#define AARCH64_DSPSR_EL0_IL 0x100000U
8229
8230#define AARCH64_DSPSR_EL0_SS 0x200000U
8231
8232#define AARCH64_DSPSR_EL0_PAN 0x400000U
8233
8234#define AARCH64_DSPSR_EL0_SSBS_1 0x800000U
8235
8236#define AARCH64_DSPSR_EL0_UAO 0x800000U
8237
8238#define AARCH64_DSPSR_EL0_DIT 0x1000000U
8239
8240#define AARCH64_DSPSR_EL0_TCO 0x2000000U
8241
8242#define AARCH64_DSPSR_EL0_IT_1_0( _val ) ( ( _val ) << 25 )
8243#define AARCH64_DSPSR_EL0_IT_1_0_SHIFT 25
8244#define AARCH64_DSPSR_EL0_IT_1_0_MASK 0x6000000U
8245#define AARCH64_DSPSR_EL0_IT_1_0_GET( _reg ) \
8246 ( ( ( _reg ) >> 25 ) & 0x3U )
8247
8248#define AARCH64_DSPSR_EL0_Q 0x8000000U
8249
8250#define AARCH64_DSPSR_EL0_V 0x10000000U
8251
8252#define AARCH64_DSPSR_EL0_C 0x20000000U
8253
8254#define AARCH64_DSPSR_EL0_Z 0x40000000U
8255
8256#define AARCH64_DSPSR_EL0_N 0x80000000U
8257
8258static inline uint64_t _AArch64_Read_dspsr_el0( void )
8259{
8260 uint64_t value;
8261
8262 __asm__ volatile (
8263 "mrs %0, DSPSR_EL0" : "=&r" ( value ) : : "memory"
8264 );
8265
8266 return value;
8267}
8268
8269static inline void _AArch64_Write_dspsr_el0( uint64_t value )
8270{
8271 __asm__ volatile (
8272 "msr DSPSR_EL0, %0" : : "r" ( value ) : "memory"
8273 );
8274}
8275
8276/* MDCCINT_EL1, Monitor DCC Interrupt Enable Register */
8277
8278#define AARCH64_MDCCINT_EL1_TX 0x20000000U
8279
8280#define AARCH64_MDCCINT_EL1_RX 0x40000000U
8281
8282static inline uint64_t _AArch64_Read_mdccint_el1( void )
8283{
8284 uint64_t value;
8285
8286 __asm__ volatile (
8287 "mrs %0, MDCCINT_EL1" : "=&r" ( value ) : : "memory"
8288 );
8289
8290 return value;
8291}
8292
8293static inline void _AArch64_Write_mdccint_el1( uint64_t value )
8294{
8295 __asm__ volatile (
8296 "msr MDCCINT_EL1, %0" : : "r" ( value ) : "memory"
8297 );
8298}
8299
8300/* MDCCSR_EL0, Monitor DCC Status Register */
8301
8302#define AARCH64_MDCCSR_EL0_TXFULL 0x20000000U
8303
8304#define AARCH64_MDCCSR_EL0_RXFULL 0x40000000U
8305
8306static inline uint64_t _AArch64_Read_mdccsr_el0( void )
8307{
8308 uint64_t value;
8309
8310 __asm__ volatile (
8311 "mrs %0, MDCCSR_EL0" : "=&r" ( value ) : : "memory"
8312 );
8313
8314 return value;
8315}
8316
8317/* MDCR_EL2, Monitor Debug Configuration Register (EL2) */
8318
8319#define AARCH64_MDCR_EL2_HPMN( _val ) ( ( _val ) << 0 )
8320#define AARCH64_MDCR_EL2_HPMN_SHIFT 0
8321#define AARCH64_MDCR_EL2_HPMN_MASK 0x1fU
8322#define AARCH64_MDCR_EL2_HPMN_GET( _reg ) \
8323 ( ( ( _reg ) >> 0 ) & 0x1fU )
8324
8325#define AARCH64_MDCR_EL2_TPMCR 0x20U
8326
8327#define AARCH64_MDCR_EL2_TPM 0x40U
8328
8329#define AARCH64_MDCR_EL2_HPME 0x80U
8330
8331#define AARCH64_MDCR_EL2_TDE 0x100U
8332
8333#define AARCH64_MDCR_EL2_TDA 0x200U
8334
8335#define AARCH64_MDCR_EL2_TDOSA 0x400U
8336
8337#define AARCH64_MDCR_EL2_TDRA 0x800U
8338
8339#define AARCH64_MDCR_EL2_E2PB( _val ) ( ( _val ) << 12 )
8340#define AARCH64_MDCR_EL2_E2PB_SHIFT 12
8341#define AARCH64_MDCR_EL2_E2PB_MASK 0x3000U
8342#define AARCH64_MDCR_EL2_E2PB_GET( _reg ) \
8343 ( ( ( _reg ) >> 12 ) & 0x3U )
8344
8345#define AARCH64_MDCR_EL2_TPMS 0x4000U
8346
8347#define AARCH64_MDCR_EL2_HPMD 0x20000U
8348
8349#define AARCH64_MDCR_EL2_TTRF 0x80000U
8350
8351#define AARCH64_MDCR_EL2_HCCD 0x800000U
8352
8353#define AARCH64_MDCR_EL2_HLP 0x4000000U
8354
8355#define AARCH64_MDCR_EL2_TDCC 0x8000000U
8356
8357#define AARCH64_MDCR_EL2_MTPME 0x10000000U
8358
8359static inline uint64_t _AArch64_Read_mdcr_el2( void )
8360{
8361 uint64_t value;
8362
8363 __asm__ volatile (
8364 "mrs %0, MDCR_EL2" : "=&r" ( value ) : : "memory"
8365 );
8366
8367 return value;
8368}
8369
8370static inline void _AArch64_Write_mdcr_el2( uint64_t value )
8371{
8372 __asm__ volatile (
8373 "msr MDCR_EL2, %0" : : "r" ( value ) : "memory"
8374 );
8375}
8376
8377/* MDCR_EL3, Monitor Debug Configuration Register (EL3) */
8378
8379#define AARCH64_MDCR_EL3_TPM 0x40U
8380
8381#define AARCH64_MDCR_EL3_TDA 0x200U
8382
8383#define AARCH64_MDCR_EL3_TDOSA 0x400U
8384
8385#define AARCH64_MDCR_EL3_NSPB( _val ) ( ( _val ) << 12 )
8386#define AARCH64_MDCR_EL3_NSPB_SHIFT 12
8387#define AARCH64_MDCR_EL3_NSPB_MASK 0x3000U
8388#define AARCH64_MDCR_EL3_NSPB_GET( _reg ) \
8389 ( ( ( _reg ) >> 12 ) & 0x3U )
8390
8391#define AARCH64_MDCR_EL3_SPD32( _val ) ( ( _val ) << 14 )
8392#define AARCH64_MDCR_EL3_SPD32_SHIFT 14
8393#define AARCH64_MDCR_EL3_SPD32_MASK 0xc000U
8394#define AARCH64_MDCR_EL3_SPD32_GET( _reg ) \
8395 ( ( ( _reg ) >> 14 ) & 0x3U )
8396
8397#define AARCH64_MDCR_EL3_SDD 0x10000U
8398
8399#define AARCH64_MDCR_EL3_SPME 0x20000U
8400
8401#define AARCH64_MDCR_EL3_STE 0x40000U
8402
8403#define AARCH64_MDCR_EL3_TTRF 0x80000U
8404
8405#define AARCH64_MDCR_EL3_EDAD 0x100000U
8406
8407#define AARCH64_MDCR_EL3_EPMAD 0x200000U
8408
8409#define AARCH64_MDCR_EL3_SCCD 0x800000U
8410
8411#define AARCH64_MDCR_EL3_TDCC 0x8000000U
8412
8413#define AARCH64_MDCR_EL3_MTPME 0x10000000U
8414
8415static inline uint64_t _AArch64_Read_mdcr_el3( void )
8416{
8417 uint64_t value;
8418
8419 __asm__ volatile (
8420 "mrs %0, MDCR_EL3" : "=&r" ( value ) : : "memory"
8421 );
8422
8423 return value;
8424}
8425
8426static inline void _AArch64_Write_mdcr_el3( uint64_t value )
8427{
8428 __asm__ volatile (
8429 "msr MDCR_EL3, %0" : : "r" ( value ) : "memory"
8430 );
8431}
8432
8433/* MDRAR_EL1, Monitor Debug ROM Address Register */
8434
8435#define AARCH64_MDRAR_EL1_VALID( _val ) ( ( _val ) << 0 )
8436#define AARCH64_MDRAR_EL1_VALID_SHIFT 0
8437#define AARCH64_MDRAR_EL1_VALID_MASK 0x3U
8438#define AARCH64_MDRAR_EL1_VALID_GET( _reg ) \
8439 ( ( ( _reg ) >> 0 ) & 0x3U )
8440
8441#define AARCH64_MDRAR_EL1_ROMADDR_47_12( _val ) ( ( _val ) << 12 )
8442#define AARCH64_MDRAR_EL1_ROMADDR_47_12_SHIFT 12
8443#define AARCH64_MDRAR_EL1_ROMADDR_47_12_MASK 0xfffffffff000ULL
8444#define AARCH64_MDRAR_EL1_ROMADDR_47_12_GET( _reg ) \
8445 ( ( ( _reg ) >> 12 ) & 0xfffffffffULL )
8446
8447#define AARCH64_MDRAR_EL1_ROMADDR_51_48( _val ) ( ( _val ) << 48 )
8448#define AARCH64_MDRAR_EL1_ROMADDR_51_48_SHIFT 48
8449#define AARCH64_MDRAR_EL1_ROMADDR_51_48_MASK 0xf000000000000ULL
8450#define AARCH64_MDRAR_EL1_ROMADDR_51_48_GET( _reg ) \
8451 ( ( ( _reg ) >> 48 ) & 0xfULL )
8452
8453static inline uint64_t _AArch64_Read_mdrar_el1( void )
8454{
8455 uint64_t value;
8456
8457 __asm__ volatile (
8458 "mrs %0, MDRAR_EL1" : "=&r" ( value ) : : "memory"
8459 );
8460
8461 return value;
8462}
8463
8464/* MDSCR_EL1, Monitor Debug System Control Register */
8465
8466#define AARCH64_MDSCR_EL1_SS 0x1U
8467
8468#define AARCH64_MDSCR_EL1_ERR 0x40U
8469
8470#define AARCH64_MDSCR_EL1_TDCC 0x1000U
8471
8472#define AARCH64_MDSCR_EL1_KDE 0x2000U
8473
8474#define AARCH64_MDSCR_EL1_HDE 0x4000U
8475
8476#define AARCH64_MDSCR_EL1_MDE 0x8000U
8477
8478#define AARCH64_MDSCR_EL1_SC2 0x80000U
8479
8480#define AARCH64_MDSCR_EL1_TDA 0x200000U
8481
8482#define AARCH64_MDSCR_EL1_INTDIS( _val ) ( ( _val ) << 22 )
8483#define AARCH64_MDSCR_EL1_INTDIS_SHIFT 22
8484#define AARCH64_MDSCR_EL1_INTDIS_MASK 0xc00000U
8485#define AARCH64_MDSCR_EL1_INTDIS_GET( _reg ) \
8486 ( ( ( _reg ) >> 22 ) & 0x3U )
8487
8488#define AARCH64_MDSCR_EL1_TXU 0x4000000U
8489
8490#define AARCH64_MDSCR_EL1_RXO 0x8000000U
8491
8492#define AARCH64_MDSCR_EL1_TXFULL 0x20000000U
8493
8494#define AARCH64_MDSCR_EL1_RXFULL 0x40000000U
8495
8496#define AARCH64_MDSCR_EL1_TFO 0x80000000U
8497
8498static inline uint64_t _AArch64_Read_mdscr_el1( void )
8499{
8500 uint64_t value;
8501
8502 __asm__ volatile (
8503 "mrs %0, MDSCR_EL1" : "=&r" ( value ) : : "memory"
8504 );
8505
8506 return value;
8507}
8508
8509static inline void _AArch64_Write_mdscr_el1( uint64_t value )
8510{
8511 __asm__ volatile (
8512 "msr MDSCR_EL1, %0" : : "r" ( value ) : "memory"
8513 );
8514}
8515
8516/* OSDLR_EL1, OS Double Lock Register */
8517
8518#define AARCH64_OSDLR_EL1_DLK 0x1U
8519
8520static inline uint64_t _AArch64_Read_osdlr_el1( void )
8521{
8522 uint64_t value;
8523
8524 __asm__ volatile (
8525 "mrs %0, OSDLR_EL1" : "=&r" ( value ) : : "memory"
8526 );
8527
8528 return value;
8529}
8530
8531static inline void _AArch64_Write_osdlr_el1( uint64_t value )
8532{
8533 __asm__ volatile (
8534 "msr OSDLR_EL1, %0" : : "r" ( value ) : "memory"
8535 );
8536}
8537
8538/* OSDTRRX_EL1, OS Lock Data Transfer Register, Receive */
8539
8540static inline uint64_t _AArch64_Read_osdtrrx_el1( void )
8541{
8542 uint64_t value;
8543
8544 __asm__ volatile (
8545 "mrs %0, OSDTRRX_EL1" : "=&r" ( value ) : : "memory"
8546 );
8547
8548 return value;
8549}
8550
8551static inline void _AArch64_Write_osdtrrx_el1( uint64_t value )
8552{
8553 __asm__ volatile (
8554 "msr OSDTRRX_EL1, %0" : : "r" ( value ) : "memory"
8555 );
8556}
8557
8558/* OSDTRTX_EL1, OS Lock Data Transfer Register, Transmit */
8559
8560static inline uint64_t _AArch64_Read_osdtrtx_el1( void )
8561{
8562 uint64_t value;
8563
8564 __asm__ volatile (
8565 "mrs %0, OSDTRTX_EL1" : "=&r" ( value ) : : "memory"
8566 );
8567
8568 return value;
8569}
8570
8571static inline void _AArch64_Write_osdtrtx_el1( uint64_t value )
8572{
8573 __asm__ volatile (
8574 "msr OSDTRTX_EL1, %0" : : "r" ( value ) : "memory"
8575 );
8576}
8577
8578/* OSECCR_EL1, OS Lock Exception Catch Control Register */
8579
8580#define AARCH64_OSECCR_EL1_EDECCR( _val ) ( ( _val ) << 0 )
8581#define AARCH64_OSECCR_EL1_EDECCR_SHIFT 0
8582#define AARCH64_OSECCR_EL1_EDECCR_MASK 0xffffffffU
8583#define AARCH64_OSECCR_EL1_EDECCR_GET( _reg ) \
8584 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
8585
8586static inline uint64_t _AArch64_Read_oseccr_el1( void )
8587{
8588 uint64_t value;
8589
8590 __asm__ volatile (
8591 "mrs %0, OSECCR_EL1" : "=&r" ( value ) : : "memory"
8592 );
8593
8594 return value;
8595}
8596
8597static inline void _AArch64_Write_oseccr_el1( uint64_t value )
8598{
8599 __asm__ volatile (
8600 "msr OSECCR_EL1, %0" : : "r" ( value ) : "memory"
8601 );
8602}
8603
8604/* OSLAR_EL1, OS Lock Access Register */
8605
8606#define AARCH64_OSLAR_EL1_OSLK 0x1U
8607
8608static inline void _AArch64_Write_oslar_el1( uint64_t value )
8609{
8610 __asm__ volatile (
8611 "msr OSLAR_EL1, %0" : : "r" ( value ) : "memory"
8612 );
8613}
8614
8615/* OSLSR_EL1, OS Lock Status Register */
8616
8617#define AARCH64_OSLSR_EL1_OSLM_0 0x1U
8618
8619#define AARCH64_OSLSR_EL1_OSLK 0x2U
8620
8621#define AARCH64_OSLSR_EL1_NTT 0x4U
8622
8623#define AARCH64_OSLSR_EL1_OSLM_1 0x8U
8624
8625static inline uint64_t _AArch64_Read_oslsr_el1( void )
8626{
8627 uint64_t value;
8628
8629 __asm__ volatile (
8630 "mrs %0, OSLSR_EL1" : "=&r" ( value ) : : "memory"
8631 );
8632
8633 return value;
8634}
8635
8636/* SDER32_EL2, AArch64 Secure Debug Enable Register */
8637
8638#define AARCH64_SDER32_EL2_SUIDEN 0x1U
8639
8640#define AARCH64_SDER32_EL2_SUNIDEN 0x2U
8641
8642static inline uint64_t _AArch64_Read_sder32_el2( void )
8643{
8644 uint64_t value;
8645
8646 __asm__ volatile (
8647 "mrs %0, SDER32_EL2" : "=&r" ( value ) : : "memory"
8648 );
8649
8650 return value;
8651}
8652
8653static inline void _AArch64_Write_sder32_el2( uint64_t value )
8654{
8655 __asm__ volatile (
8656 "msr SDER32_EL2, %0" : : "r" ( value ) : "memory"
8657 );
8658}
8659
8660/* SDER32_EL3, AArch64 Secure Debug Enable Register */
8661
8662#define AARCH64_SDER32_EL3_SUIDEN 0x1U
8663
8664#define AARCH64_SDER32_EL3_SUNIDEN 0x2U
8665
8666static inline uint64_t _AArch64_Read_sder32_el3( void )
8667{
8668 uint64_t value;
8669
8670 __asm__ volatile (
8671 "mrs %0, SDER32_EL3" : "=&r" ( value ) : : "memory"
8672 );
8673
8674 return value;
8675}
8676
8677static inline void _AArch64_Write_sder32_el3( uint64_t value )
8678{
8679 __asm__ volatile (
8680 "msr SDER32_EL3, %0" : : "r" ( value ) : "memory"
8681 );
8682}
8683
8684/* TRFCR_EL1, Trace Filter Control Register (EL1) */
8685
8686#define AARCH64_TRFCR_EL1_E0TRE 0x1U
8687
8688#define AARCH64_TRFCR_EL1_E1TRE 0x2U
8689
8690#define AARCH64_TRFCR_EL1_TS( _val ) ( ( _val ) << 5 )
8691#define AARCH64_TRFCR_EL1_TS_SHIFT 5
8692#define AARCH64_TRFCR_EL1_TS_MASK 0x60U
8693#define AARCH64_TRFCR_EL1_TS_GET( _reg ) \
8694 ( ( ( _reg ) >> 5 ) & 0x3U )
8695
8696static inline uint64_t _AArch64_Read_trfcr_el1( void )
8697{
8698 uint64_t value;
8699
8700 __asm__ volatile (
8701 "mrs %0, TRFCR_EL1" : "=&r" ( value ) : : "memory"
8702 );
8703
8704 return value;
8705}
8706
8707static inline void _AArch64_Write_trfcr_el1( uint64_t value )
8708{
8709 __asm__ volatile (
8710 "msr TRFCR_EL1, %0" : : "r" ( value ) : "memory"
8711 );
8712}
8713
8714/* TRFCR_EL2, Trace Filter Control Register (EL2) */
8715
8716#define AARCH64_TRFCR_EL2_E0HTRE 0x1U
8717
8718#define AARCH64_TRFCR_EL2_E2TRE 0x2U
8719
8720#define AARCH64_TRFCR_EL2_CX 0x8U
8721
8722#define AARCH64_TRFCR_EL2_TS( _val ) ( ( _val ) << 5 )
8723#define AARCH64_TRFCR_EL2_TS_SHIFT 5
8724#define AARCH64_TRFCR_EL2_TS_MASK 0x60U
8725#define AARCH64_TRFCR_EL2_TS_GET( _reg ) \
8726 ( ( ( _reg ) >> 5 ) & 0x3U )
8727
8728static inline uint64_t _AArch64_Read_trfcr_el2( void )
8729{
8730 uint64_t value;
8731
8732 __asm__ volatile (
8733 "mrs %0, TRFCR_EL2" : "=&r" ( value ) : : "memory"
8734 );
8735
8736 return value;
8737}
8738
8739static inline void _AArch64_Write_trfcr_el2( uint64_t value )
8740{
8741 __asm__ volatile (
8742 "msr TRFCR_EL2, %0" : : "r" ( value ) : "memory"
8743 );
8744}
8745
8746/* PMCCFILTR_EL0, Performance Monitors Cycle Count Filter Register */
8747
8748#define AARCH64_PMCCFILTR_EL0_SH 0x1000000U
8749
8750#define AARCH64_PMCCFILTR_EL0_M 0x4000000U
8751
8752#define AARCH64_PMCCFILTR_EL0_NSH 0x8000000U
8753
8754#define AARCH64_PMCCFILTR_EL0_NSU 0x10000000U
8755
8756#define AARCH64_PMCCFILTR_EL0_NSK 0x20000000U
8757
8758#define AARCH64_PMCCFILTR_EL0_U 0x40000000U
8759
8760#define AARCH64_PMCCFILTR_EL0_P 0x80000000U
8761
8762static inline uint64_t _AArch64_Read_pmccfiltr_el0( void )
8763{
8764 uint64_t value;
8765
8766 __asm__ volatile (
8767 "mrs %0, PMCCFILTR_EL0" : "=&r" ( value ) : : "memory"
8768 );
8769
8770 return value;
8771}
8772
8773static inline void _AArch64_Write_pmccfiltr_el0( uint64_t value )
8774{
8775 __asm__ volatile (
8776 "msr PMCCFILTR_EL0, %0" : : "r" ( value ) : "memory"
8777 );
8778}
8779
8780/* PMCCNTR_EL0, Performance Monitors Cycle Count Register */
8781
8782static inline uint64_t _AArch64_Read_pmccntr_el0( void )
8783{
8784 uint64_t value;
8785
8786 __asm__ volatile (
8787 "mrs %0, PMCCNTR_EL0" : "=&r" ( value ) : : "memory"
8788 );
8789
8790 return value;
8791}
8792
8793static inline void _AArch64_Write_pmccntr_el0( uint64_t value )
8794{
8795 __asm__ volatile (
8796 "msr PMCCNTR_EL0, %0" : : "r" ( value ) : "memory"
8797 );
8798}
8799
8800/* PMCEID0_EL0, Performance Monitors Common Event Identification Register 0 */
8801
8802static inline uint64_t _AArch64_Read_pmceid0_el0( void )
8803{
8804 uint64_t value;
8805
8806 __asm__ volatile (
8807 "mrs %0, PMCEID0_EL0" : "=&r" ( value ) : : "memory"
8808 );
8809
8810 return value;
8811}
8812
8813/* PMCEID1_EL0, Performance Monitors Common Event Identification Register 1 */
8814
8815static inline uint64_t _AArch64_Read_pmceid1_el0( void )
8816{
8817 uint64_t value;
8818
8819 __asm__ volatile (
8820 "mrs %0, PMCEID1_EL0" : "=&r" ( value ) : : "memory"
8821 );
8822
8823 return value;
8824}
8825
8826/* PMCNTENCLR_EL0, Performance Monitors Count Enable Clear Register */
8827
8828#define AARCH64_PMCNTENCLR_EL0_C 0x80000000U
8829
8830static inline uint64_t _AArch64_Read_pmcntenclr_el0( void )
8831{
8832 uint64_t value;
8833
8834 __asm__ volatile (
8835 "mrs %0, PMCNTENCLR_EL0" : "=&r" ( value ) : : "memory"
8836 );
8837
8838 return value;
8839}
8840
8841static inline void _AArch64_Write_pmcntenclr_el0( uint64_t value )
8842{
8843 __asm__ volatile (
8844 "msr PMCNTENCLR_EL0, %0" : : "r" ( value ) : "memory"
8845 );
8846}
8847
8848/* PMCNTENSET_EL0, Performance Monitors Count Enable Set Register */
8849
8850#define AARCH64_PMCNTENSET_EL0_C 0x80000000U
8851
8852static inline uint64_t _AArch64_Read_pmcntenset_el0( void )
8853{
8854 uint64_t value;
8855
8856 __asm__ volatile (
8857 "mrs %0, PMCNTENSET_EL0" : "=&r" ( value ) : : "memory"
8858 );
8859
8860 return value;
8861}
8862
8863static inline void _AArch64_Write_pmcntenset_el0( uint64_t value )
8864{
8865 __asm__ volatile (
8866 "msr PMCNTENSET_EL0, %0" : : "r" ( value ) : "memory"
8867 );
8868}
8869
8870/* PMCR_EL0, Performance Monitors Control Register */
8871
8872#define AARCH64_PMCR_EL0_E 0x1U
8873
8874#define AARCH64_PMCR_EL0_P 0x2U
8875
8876#define AARCH64_PMCR_EL0_C 0x4U
8877
8878#define AARCH64_PMCR_EL0_D 0x8U
8879
8880#define AARCH64_PMCR_EL0_X 0x10U
8881
8882#define AARCH64_PMCR_EL0_DP 0x20U
8883
8884#define AARCH64_PMCR_EL0_LC 0x40U
8885
8886#define AARCH64_PMCR_EL0_LP 0x80U
8887
8888#define AARCH64_PMCR_EL0_N( _val ) ( ( _val ) << 11 )
8889#define AARCH64_PMCR_EL0_N_SHIFT 11
8890#define AARCH64_PMCR_EL0_N_MASK 0xf800U
8891#define AARCH64_PMCR_EL0_N_GET( _reg ) \
8892 ( ( ( _reg ) >> 11 ) & 0x1fU )
8893
8894#define AARCH64_PMCR_EL0_IDCODE( _val ) ( ( _val ) << 16 )
8895#define AARCH64_PMCR_EL0_IDCODE_SHIFT 16
8896#define AARCH64_PMCR_EL0_IDCODE_MASK 0xff0000U
8897#define AARCH64_PMCR_EL0_IDCODE_GET( _reg ) \
8898 ( ( ( _reg ) >> 16 ) & 0xffU )
8899
8900#define AARCH64_PMCR_EL0_IMP( _val ) ( ( _val ) << 24 )
8901#define AARCH64_PMCR_EL0_IMP_SHIFT 24
8902#define AARCH64_PMCR_EL0_IMP_MASK 0xff000000U
8903#define AARCH64_PMCR_EL0_IMP_GET( _reg ) \
8904 ( ( ( _reg ) >> 24 ) & 0xffU )
8905
8906static inline uint64_t _AArch64_Read_pmcr_el0( void )
8907{
8908 uint64_t value;
8909
8910 __asm__ volatile (
8911 "mrs %0, PMCR_EL0" : "=&r" ( value ) : : "memory"
8912 );
8913
8914 return value;
8915}
8916
8917static inline void _AArch64_Write_pmcr_el0( uint64_t value )
8918{
8919 __asm__ volatile (
8920 "msr PMCR_EL0, %0" : : "r" ( value ) : "memory"
8921 );
8922}
8923
8924/* PMEVCNTR_N_EL0, Performance Monitors Event Count Registers, n = 0 - 30 */
8925
8926static inline uint64_t _AArch64_Read_pmevcntr_n_el0( void )
8927{
8928 uint64_t value;
8929
8930 __asm__ volatile (
8931 "mrs %0, PMEVCNTR_N_EL0" : "=&r" ( value ) : : "memory"
8932 );
8933
8934 return value;
8935}
8936
8937static inline void _AArch64_Write_pmevcntr_n_el0( uint64_t value )
8938{
8939 __asm__ volatile (
8940 "msr PMEVCNTR_N_EL0, %0" : : "r" ( value ) : "memory"
8941 );
8942}
8943
8944/* PMEVTYPER_N_EL0, Performance Monitors Event Type Registers, n = 0 - 30 */
8945
8946#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0( _val ) ( ( _val ) << 0 )
8947#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_SHIFT 0
8948#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_MASK 0x3ffU
8949#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_9_0_GET( _reg ) \
8950 ( ( ( _reg ) >> 0 ) & 0x3ffU )
8951
8952#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10( _val ) ( ( _val ) << 10 )
8953#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_SHIFT 10
8954#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_MASK 0xfc00U
8955#define AARCH64_PMEVTYPER_N_EL0_EVTCOUNT_15_10_GET( _reg ) \
8956 ( ( ( _reg ) >> 10 ) & 0x3fU )
8957
8958#define AARCH64_PMEVTYPER_N_EL0_SH 0x1000000U
8959
8960#define AARCH64_PMEVTYPER_N_EL0_MT 0x2000000U
8961
8962#define AARCH64_PMEVTYPER_N_EL0_M 0x4000000U
8963
8964#define AARCH64_PMEVTYPER_N_EL0_NSH 0x8000000U
8965
8966#define AARCH64_PMEVTYPER_N_EL0_NSU 0x10000000U
8967
8968#define AARCH64_PMEVTYPER_N_EL0_NSK 0x20000000U
8969
8970#define AARCH64_PMEVTYPER_N_EL0_U 0x40000000U
8971
8972#define AARCH64_PMEVTYPER_N_EL0_P 0x80000000U
8973
8974static inline uint64_t _AArch64_Read_pmevtyper_n_el0( void )
8975{
8976 uint64_t value;
8977
8978 __asm__ volatile (
8979 "mrs %0, PMEVTYPER_N_EL0" : "=&r" ( value ) : : "memory"
8980 );
8981
8982 return value;
8983}
8984
8985static inline void _AArch64_Write_pmevtyper_n_el0( uint64_t value )
8986{
8987 __asm__ volatile (
8988 "msr PMEVTYPER_N_EL0, %0" : : "r" ( value ) : "memory"
8989 );
8990}
8991
8992/* PMINTENCLR_EL1, Performance Monitors Interrupt Enable Clear Register */
8993
8994#define AARCH64_PMINTENCLR_EL1_C 0x80000000U
8995
8996static inline uint64_t _AArch64_Read_pmintenclr_el1( void )
8997{
8998 uint64_t value;
8999
9000 __asm__ volatile (
9001 "mrs %0, PMINTENCLR_EL1" : "=&r" ( value ) : : "memory"
9002 );
9003
9004 return value;
9005}
9006
9007static inline void _AArch64_Write_pmintenclr_el1( uint64_t value )
9008{
9009 __asm__ volatile (
9010 "msr PMINTENCLR_EL1, %0" : : "r" ( value ) : "memory"
9011 );
9012}
9013
9014/* PMINTENSET_EL1, Performance Monitors Interrupt Enable Set Register */
9015
9016#define AARCH64_PMINTENSET_EL1_C 0x80000000U
9017
9018static inline uint64_t _AArch64_Read_pmintenset_el1( void )
9019{
9020 uint64_t value;
9021
9022 __asm__ volatile (
9023 "mrs %0, PMINTENSET_EL1" : "=&r" ( value ) : : "memory"
9024 );
9025
9026 return value;
9027}
9028
9029static inline void _AArch64_Write_pmintenset_el1( uint64_t value )
9030{
9031 __asm__ volatile (
9032 "msr PMINTENSET_EL1, %0" : : "r" ( value ) : "memory"
9033 );
9034}
9035
9036/* PMMIR_EL1, Performance Monitors Machine Identification Register */
9037
9038#define AARCH64_PMMIR_EL1_SLOTS( _val ) ( ( _val ) << 0 )
9039#define AARCH64_PMMIR_EL1_SLOTS_SHIFT 0
9040#define AARCH64_PMMIR_EL1_SLOTS_MASK 0xffU
9041#define AARCH64_PMMIR_EL1_SLOTS_GET( _reg ) \
9042 ( ( ( _reg ) >> 0 ) & 0xffU )
9043
9044static inline uint64_t _AArch64_Read_pmmir_el1( void )
9045{
9046 uint64_t value;
9047
9048 __asm__ volatile (
9049 "mrs %0, PMMIR_EL1" : "=&r" ( value ) : : "memory"
9050 );
9051
9052 return value;
9053}
9054
9055/* PMOVSCLR_EL0, Performance Monitors Overflow Flag Status Clear Register */
9056
9057#define AARCH64_PMOVSCLR_EL0_C 0x80000000U
9058
9059static inline uint64_t _AArch64_Read_pmovsclr_el0( void )
9060{
9061 uint64_t value;
9062
9063 __asm__ volatile (
9064 "mrs %0, PMOVSCLR_EL0" : "=&r" ( value ) : : "memory"
9065 );
9066
9067 return value;
9068}
9069
9070static inline void _AArch64_Write_pmovsclr_el0( uint64_t value )
9071{
9072 __asm__ volatile (
9073 "msr PMOVSCLR_EL0, %0" : : "r" ( value ) : "memory"
9074 );
9075}
9076
9077/* PMOVSSET_EL0, Performance Monitors Overflow Flag Status Set Register */
9078
9079#define AARCH64_PMOVSSET_EL0_C 0x80000000U
9080
9081static inline uint64_t _AArch64_Read_pmovsset_el0( void )
9082{
9083 uint64_t value;
9084
9085 __asm__ volatile (
9086 "mrs %0, PMOVSSET_EL0" : "=&r" ( value ) : : "memory"
9087 );
9088
9089 return value;
9090}
9091
9092static inline void _AArch64_Write_pmovsset_el0( uint64_t value )
9093{
9094 __asm__ volatile (
9095 "msr PMOVSSET_EL0, %0" : : "r" ( value ) : "memory"
9096 );
9097}
9098
9099/* PMSELR_EL0, Performance Monitors Event Counter Selection Register */
9100
9101#define AARCH64_PMSELR_EL0_SEL( _val ) ( ( _val ) << 0 )
9102#define AARCH64_PMSELR_EL0_SEL_SHIFT 0
9103#define AARCH64_PMSELR_EL0_SEL_MASK 0x1fU
9104#define AARCH64_PMSELR_EL0_SEL_GET( _reg ) \
9105 ( ( ( _reg ) >> 0 ) & 0x1fU )
9106
9107static inline uint64_t _AArch64_Read_pmselr_el0( void )
9108{
9109 uint64_t value;
9110
9111 __asm__ volatile (
9112 "mrs %0, PMSELR_EL0" : "=&r" ( value ) : : "memory"
9113 );
9114
9115 return value;
9116}
9117
9118static inline void _AArch64_Write_pmselr_el0( uint64_t value )
9119{
9120 __asm__ volatile (
9121 "msr PMSELR_EL0, %0" : : "r" ( value ) : "memory"
9122 );
9123}
9124
9125/* PMSWINC_EL0, Performance Monitors Software Increment Register */
9126
9127static inline void _AArch64_Write_pmswinc_el0( uint64_t value )
9128{
9129 __asm__ volatile (
9130 "msr PMSWINC_EL0, %0" : : "r" ( value ) : "memory"
9131 );
9132}
9133
9134/* PMUSERENR_EL0, Performance Monitors User Enable Register */
9135
9136#define AARCH64_PMUSERENR_EL0_EN 0x1U
9137
9138#define AARCH64_PMUSERENR_EL0_SW 0x2U
9139
9140#define AARCH64_PMUSERENR_EL0_CR 0x4U
9141
9142#define AARCH64_PMUSERENR_EL0_ER 0x8U
9143
9144static inline uint64_t _AArch64_Read_pmuserenr_el0( void )
9145{
9146 uint64_t value;
9147
9148 __asm__ volatile (
9149 "mrs %0, PMUSERENR_EL0" : "=&r" ( value ) : : "memory"
9150 );
9151
9152 return value;
9153}
9154
9155static inline void _AArch64_Write_pmuserenr_el0( uint64_t value )
9156{
9157 __asm__ volatile (
9158 "msr PMUSERENR_EL0, %0" : : "r" ( value ) : "memory"
9159 );
9160}
9161
9162/* PMXEVCNTR_EL0, Performance Monitors Selected Event Count Register */
9163
9164#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N( _val ) ( ( _val ) << 0 )
9165#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_SHIFT 0
9166#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_MASK 0xffffffffU
9167#define AARCH64_PMXEVCNTR_EL0_PMEVCNTR_N_GET( _reg ) \
9168 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
9169
9170static inline uint64_t _AArch64_Read_pmxevcntr_el0( void )
9171{
9172 uint64_t value;
9173
9174 __asm__ volatile (
9175 "mrs %0, PMXEVCNTR_EL0" : "=&r" ( value ) : : "memory"
9176 );
9177
9178 return value;
9179}
9180
9181static inline void _AArch64_Write_pmxevcntr_el0( uint64_t value )
9182{
9183 __asm__ volatile (
9184 "msr PMXEVCNTR_EL0, %0" : : "r" ( value ) : "memory"
9185 );
9186}
9187
9188/* PMXEVTYPER_EL0, Performance Monitors Selected Event Type Register */
9189
9190static inline uint64_t _AArch64_Read_pmxevtyper_el0( void )
9191{
9192 uint64_t value;
9193
9194 __asm__ volatile (
9195 "mrs %0, PMXEVTYPER_EL0" : "=&r" ( value ) : : "memory"
9196 );
9197
9198 return value;
9199}
9200
9201static inline void _AArch64_Write_pmxevtyper_el0( uint64_t value )
9202{
9203 __asm__ volatile (
9204 "msr PMXEVTYPER_EL0, %0" : : "r" ( value ) : "memory"
9205 );
9206}
9207
9208/* AMCFGR_EL0, Activity Monitors Configuration Register */
9209
9210#define AARCH64_AMCFGR_EL0_N( _val ) ( ( _val ) << 0 )
9211#define AARCH64_AMCFGR_EL0_N_SHIFT 0
9212#define AARCH64_AMCFGR_EL0_N_MASK 0xffU
9213#define AARCH64_AMCFGR_EL0_N_GET( _reg ) \
9214 ( ( ( _reg ) >> 0 ) & 0xffU )
9215
9216#define AARCH64_AMCFGR_EL0_SIZE( _val ) ( ( _val ) << 8 )
9217#define AARCH64_AMCFGR_EL0_SIZE_SHIFT 8
9218#define AARCH64_AMCFGR_EL0_SIZE_MASK 0x3f00U
9219#define AARCH64_AMCFGR_EL0_SIZE_GET( _reg ) \
9220 ( ( ( _reg ) >> 8 ) & 0x3fU )
9221
9222#define AARCH64_AMCFGR_EL0_HDBG 0x1000000U
9223
9224#define AARCH64_AMCFGR_EL0_NCG( _val ) ( ( _val ) << 28 )
9225#define AARCH64_AMCFGR_EL0_NCG_SHIFT 28
9226#define AARCH64_AMCFGR_EL0_NCG_MASK 0xf0000000U
9227#define AARCH64_AMCFGR_EL0_NCG_GET( _reg ) \
9228 ( ( ( _reg ) >> 28 ) & 0xfU )
9229
9230static inline uint64_t _AArch64_Read_amcfgr_el0( void )
9231{
9232 uint64_t value;
9233
9234 __asm__ volatile (
9235 "mrs %0, AMCFGR_EL0" : "=&r" ( value ) : : "memory"
9236 );
9237
9238 return value;
9239}
9240
9241/* AMCG1IDR_EL0, Activity Monitors Counter Group 1 Identification Register */
9242
9243static inline uint64_t _AArch64_Read_amcg1idr_el0( void )
9244{
9245 uint64_t value;
9246
9247 __asm__ volatile (
9248 "mrs %0, AMCG1IDR_EL0" : "=&r" ( value ) : : "memory"
9249 );
9250
9251 return value;
9252}
9253
9254/* AMCGCR_EL0, Activity Monitors Counter Group Configuration Register */
9255
9256#define AARCH64_AMCGCR_EL0_CG0NC( _val ) ( ( _val ) << 0 )
9257#define AARCH64_AMCGCR_EL0_CG0NC_SHIFT 0
9258#define AARCH64_AMCGCR_EL0_CG0NC_MASK 0xffU
9259#define AARCH64_AMCGCR_EL0_CG0NC_GET( _reg ) \
9260 ( ( ( _reg ) >> 0 ) & 0xffU )
9261
9262#define AARCH64_AMCGCR_EL0_CG1NC( _val ) ( ( _val ) << 8 )
9263#define AARCH64_AMCGCR_EL0_CG1NC_SHIFT 8
9264#define AARCH64_AMCGCR_EL0_CG1NC_MASK 0xff00U
9265#define AARCH64_AMCGCR_EL0_CG1NC_GET( _reg ) \
9266 ( ( ( _reg ) >> 8 ) & 0xffU )
9267
9268static inline uint64_t _AArch64_Read_amcgcr_el0( void )
9269{
9270 uint64_t value;
9271
9272 __asm__ volatile (
9273 "mrs %0, AMCGCR_EL0" : "=&r" ( value ) : : "memory"
9274 );
9275
9276 return value;
9277}
9278
9279/* AMCNTENCLR0_EL0, Activity Monitors Count Enable Clear Register 0 */
9280
9281static inline uint64_t _AArch64_Read_amcntenclr0_el0( void )
9282{
9283 uint64_t value;
9284
9285 __asm__ volatile (
9286 "mrs %0, AMCNTENCLR0_EL0" : "=&r" ( value ) : : "memory"
9287 );
9288
9289 return value;
9290}
9291
9292static inline void _AArch64_Write_amcntenclr0_el0( uint64_t value )
9293{
9294 __asm__ volatile (
9295 "msr AMCNTENCLR0_EL0, %0" : : "r" ( value ) : "memory"
9296 );
9297}
9298
9299/* AMCNTENCLR1_EL0, Activity Monitors Count Enable Clear Register 1 */
9300
9301static inline uint64_t _AArch64_Read_amcntenclr1_el0( void )
9302{
9303 uint64_t value;
9304
9305 __asm__ volatile (
9306 "mrs %0, AMCNTENCLR1_EL0" : "=&r" ( value ) : : "memory"
9307 );
9308
9309 return value;
9310}
9311
9312static inline void _AArch64_Write_amcntenclr1_el0( uint64_t value )
9313{
9314 __asm__ volatile (
9315 "msr AMCNTENCLR1_EL0, %0" : : "r" ( value ) : "memory"
9316 );
9317}
9318
9319/* AMCNTENSET0_EL0, Activity Monitors Count Enable Set Register 0 */
9320
9321static inline uint64_t _AArch64_Read_amcntenset0_el0( void )
9322{
9323 uint64_t value;
9324
9325 __asm__ volatile (
9326 "mrs %0, AMCNTENSET0_EL0" : "=&r" ( value ) : : "memory"
9327 );
9328
9329 return value;
9330}
9331
9332static inline void _AArch64_Write_amcntenset0_el0( uint64_t value )
9333{
9334 __asm__ volatile (
9335 "msr AMCNTENSET0_EL0, %0" : : "r" ( value ) : "memory"
9336 );
9337}
9338
9339/* AMCNTENSET1_EL0, Activity Monitors Count Enable Set Register 1 */
9340
9341static inline uint64_t _AArch64_Read_amcntenset1_el0( void )
9342{
9343 uint64_t value;
9344
9345 __asm__ volatile (
9346 "mrs %0, AMCNTENSET1_EL0" : "=&r" ( value ) : : "memory"
9347 );
9348
9349 return value;
9350}
9351
9352static inline void _AArch64_Write_amcntenset1_el0( uint64_t value )
9353{
9354 __asm__ volatile (
9355 "msr AMCNTENSET1_EL0, %0" : : "r" ( value ) : "memory"
9356 );
9357}
9358
9359/* AMCR_EL0, Activity Monitors Control Register */
9360
9361#define AARCH64_AMCR_EL0_HDBG 0x400U
9362
9363#define AARCH64_AMCR_EL0_CG1RZ 0x20000U
9364
9365static inline uint64_t _AArch64_Read_amcr_el0( void )
9366{
9367 uint64_t value;
9368
9369 __asm__ volatile (
9370 "mrs %0, AMCR_EL0" : "=&r" ( value ) : : "memory"
9371 );
9372
9373 return value;
9374}
9375
9376static inline void _AArch64_Write_amcr_el0( uint64_t value )
9377{
9378 __asm__ volatile (
9379 "msr AMCR_EL0, %0" : : "r" ( value ) : "memory"
9380 );
9381}
9382
9383/* AMEVCNTR0_N_EL0, Activity Monitors Event Counter Registers 0, n = 0 - 15 */
9384
9385static inline uint64_t _AArch64_Read_amevcntr0_n_el0( void )
9386{
9387 uint64_t value;
9388
9389 __asm__ volatile (
9390 "mrs %0, AMEVCNTR0_N_EL0" : "=&r" ( value ) : : "memory"
9391 );
9392
9393 return value;
9394}
9395
9396static inline void _AArch64_Write_amevcntr0_n_el0( uint64_t value )
9397{
9398 __asm__ volatile (
9399 "msr AMEVCNTR0_N_EL0, %0" : : "r" ( value ) : "memory"
9400 );
9401}
9402
9403/* AMEVCNTR1_N_EL0, Activity Monitors Event Counter Registers 1, n = 0 - 15 */
9404
9405static inline uint64_t _AArch64_Read_amevcntr1_n_el0( void )
9406{
9407 uint64_t value;
9408
9409 __asm__ volatile (
9410 "mrs %0, AMEVCNTR1_N_EL0" : "=&r" ( value ) : : "memory"
9411 );
9412
9413 return value;
9414}
9415
9416static inline void _AArch64_Write_amevcntr1_n_el0( uint64_t value )
9417{
9418 __asm__ volatile (
9419 "msr AMEVCNTR1_N_EL0, %0" : : "r" ( value ) : "memory"
9420 );
9421}
9422
9423/* AMEVCNTVOFF0_N_EL2, Activity Monitors Event Counter Virtual Offset Registers 0, n = 0 - */
9424
9425static inline uint64_t _AArch64_Read_amevcntvoff0_n_el2( void )
9426{
9427 uint64_t value;
9428
9429 __asm__ volatile (
9430 "mrs %0, AMEVCNTVOFF0_N_EL2" : "=&r" ( value ) : : "memory"
9431 );
9432
9433 return value;
9434}
9435
9436static inline void _AArch64_Write_amevcntvoff0_n_el2( uint64_t value )
9437{
9438 __asm__ volatile (
9439 "msr AMEVCNTVOFF0_N_EL2, %0" : : "r" ( value ) : "memory"
9440 );
9441}
9442
9443/* AMEVCNTVOFF1_N_EL2, Activity Monitors Event Counter Virtual Offset Registers 1, n = 0 - */
9444
9445static inline uint64_t _AArch64_Read_amevcntvoff1_n_el2( void )
9446{
9447 uint64_t value;
9448
9449 __asm__ volatile (
9450 "mrs %0, AMEVCNTVOFF1_N_EL2" : "=&r" ( value ) : : "memory"
9451 );
9452
9453 return value;
9454}
9455
9456static inline void _AArch64_Write_amevcntvoff1_n_el2( uint64_t value )
9457{
9458 __asm__ volatile (
9459 "msr AMEVCNTVOFF1_N_EL2, %0" : : "r" ( value ) : "memory"
9460 );
9461}
9462
9463/* AMEVTYPER0_N_EL0, Activity Monitors Event Type Registers 0, n = 0 - 15 */
9464
9465#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT( _val ) ( ( _val ) << 0 )
9466#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_SHIFT 0
9467#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_MASK 0xffffU
9468#define AARCH64_AMEVTYPER0_N_EL0_EVTCOUNT_GET( _reg ) \
9469 ( ( ( _reg ) >> 0 ) & 0xffffU )
9470
9471static inline uint64_t _AArch64_Read_amevtyper0_n_el0( void )
9472{
9473 uint64_t value;
9474
9475 __asm__ volatile (
9476 "mrs %0, AMEVTYPER0_N_EL0" : "=&r" ( value ) : : "memory"
9477 );
9478
9479 return value;
9480}
9481
9482/* AMEVTYPER1_N_EL0, Activity Monitors Event Type Registers 1, n = 0 - 15 */
9483
9484#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT( _val ) ( ( _val ) << 0 )
9485#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_SHIFT 0
9486#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_MASK 0xffffU
9487#define AARCH64_AMEVTYPER1_N_EL0_EVTCOUNT_GET( _reg ) \
9488 ( ( ( _reg ) >> 0 ) & 0xffffU )
9489
9490static inline uint64_t _AArch64_Read_amevtyper1_n_el0( void )
9491{
9492 uint64_t value;
9493
9494 __asm__ volatile (
9495 "mrs %0, AMEVTYPER1_N_EL0" : "=&r" ( value ) : : "memory"
9496 );
9497
9498 return value;
9499}
9500
9501static inline void _AArch64_Write_amevtyper1_n_el0( uint64_t value )
9502{
9503 __asm__ volatile (
9504 "msr AMEVTYPER1_N_EL0, %0" : : "r" ( value ) : "memory"
9505 );
9506}
9507
9508/* AMUSERENR_EL0, Activity Monitors User Enable Register */
9509
9510#define AARCH64_AMUSERENR_EL0_EN 0x1U
9511
9512static inline uint64_t _AArch64_Read_amuserenr_el0( void )
9513{
9514 uint64_t value;
9515
9516 __asm__ volatile (
9517 "mrs %0, AMUSERENR_EL0" : "=&r" ( value ) : : "memory"
9518 );
9519
9520 return value;
9521}
9522
9523static inline void _AArch64_Write_amuserenr_el0( uint64_t value )
9524{
9525 __asm__ volatile (
9526 "msr AMUSERENR_EL0, %0" : : "r" ( value ) : "memory"
9527 );
9528}
9529
9530/* PMBIDR_EL1, Profiling Buffer ID Register */
9531
9532#define AARCH64_PMBIDR_EL1_ALIGN( _val ) ( ( _val ) << 0 )
9533#define AARCH64_PMBIDR_EL1_ALIGN_SHIFT 0
9534#define AARCH64_PMBIDR_EL1_ALIGN_MASK 0xfU
9535#define AARCH64_PMBIDR_EL1_ALIGN_GET( _reg ) \
9536 ( ( ( _reg ) >> 0 ) & 0xfU )
9537
9538#define AARCH64_PMBIDR_EL1_P 0x10U
9539
9540#define AARCH64_PMBIDR_EL1_F 0x20U
9541
9542static inline uint64_t _AArch64_Read_pmbidr_el1( void )
9543{
9544 uint64_t value;
9545
9546 __asm__ volatile (
9547 "mrs %0, PMBIDR_EL1" : "=&r" ( value ) : : "memory"
9548 );
9549
9550 return value;
9551}
9552
9553/* PMBLIMITR_EL1, Profiling Buffer Limit Address Register */
9554
9555#define AARCH64_PMBLIMITR_EL1_E 0x1U
9556
9557#define AARCH64_PMBLIMITR_EL1_FM( _val ) ( ( _val ) << 1 )
9558#define AARCH64_PMBLIMITR_EL1_FM_SHIFT 1
9559#define AARCH64_PMBLIMITR_EL1_FM_MASK 0x6U
9560#define AARCH64_PMBLIMITR_EL1_FM_GET( _reg ) \
9561 ( ( ( _reg ) >> 1 ) & 0x3U )
9562
9563#define AARCH64_PMBLIMITR_EL1_LIMIT( _val ) ( ( _val ) << 12 )
9564#define AARCH64_PMBLIMITR_EL1_LIMIT_SHIFT 12
9565#define AARCH64_PMBLIMITR_EL1_LIMIT_MASK 0xfffffffffffff000ULL
9566#define AARCH64_PMBLIMITR_EL1_LIMIT_GET( _reg ) \
9567 ( ( ( _reg ) >> 12 ) & 0xfffffffffffffULL )
9568
9569static inline uint64_t _AArch64_Read_pmblimitr_el1( void )
9570{
9571 uint64_t value;
9572
9573 __asm__ volatile (
9574 "mrs %0, PMBLIMITR_EL1" : "=&r" ( value ) : : "memory"
9575 );
9576
9577 return value;
9578}
9579
9580static inline void _AArch64_Write_pmblimitr_el1( uint64_t value )
9581{
9582 __asm__ volatile (
9583 "msr PMBLIMITR_EL1, %0" : : "r" ( value ) : "memory"
9584 );
9585}
9586
9587/* PMBPTR_EL1, Profiling Buffer Write Pointer Register */
9588
9589static inline uint64_t _AArch64_Read_pmbptr_el1( void )
9590{
9591 uint64_t value;
9592
9593 __asm__ volatile (
9594 "mrs %0, PMBPTR_EL1" : "=&r" ( value ) : : "memory"
9595 );
9596
9597 return value;
9598}
9599
9600static inline void _AArch64_Write_pmbptr_el1( uint64_t value )
9601{
9602 __asm__ volatile (
9603 "msr PMBPTR_EL1, %0" : : "r" ( value ) : "memory"
9604 );
9605}
9606
9607/* PMBSR_EL1, Profiling Buffer Status/syndrome Register */
9608
9609#define AARCH64_PMBSR_EL1_BSC( _val ) ( ( _val ) << 0 )
9610#define AARCH64_PMBSR_EL1_BSC_SHIFT 0
9611#define AARCH64_PMBSR_EL1_BSC_MASK 0x3fU
9612#define AARCH64_PMBSR_EL1_BSC_GET( _reg ) \
9613 ( ( ( _reg ) >> 0 ) & 0x3fU )
9614
9615#define AARCH64_PMBSR_EL1_FSC( _val ) ( ( _val ) << 0 )
9616#define AARCH64_PMBSR_EL1_FSC_SHIFT 0
9617#define AARCH64_PMBSR_EL1_FSC_MASK 0x3fU
9618#define AARCH64_PMBSR_EL1_FSC_GET( _reg ) \
9619 ( ( ( _reg ) >> 0 ) & 0x3fU )
9620
9621#define AARCH64_PMBSR_EL1_MSS( _val ) ( ( _val ) << 0 )
9622#define AARCH64_PMBSR_EL1_MSS_SHIFT 0
9623#define AARCH64_PMBSR_EL1_MSS_MASK 0xffffU
9624#define AARCH64_PMBSR_EL1_MSS_GET( _reg ) \
9625 ( ( ( _reg ) >> 0 ) & 0xffffU )
9626
9627#define AARCH64_PMBSR_EL1_COLL 0x10000U
9628
9629#define AARCH64_PMBSR_EL1_S 0x20000U
9630
9631#define AARCH64_PMBSR_EL1_EA 0x40000U
9632
9633#define AARCH64_PMBSR_EL1_DL 0x80000U
9634
9635#define AARCH64_PMBSR_EL1_EC( _val ) ( ( _val ) << 26 )
9636#define AARCH64_PMBSR_EL1_EC_SHIFT 26
9637#define AARCH64_PMBSR_EL1_EC_MASK 0xfc000000U
9638#define AARCH64_PMBSR_EL1_EC_GET( _reg ) \
9639 ( ( ( _reg ) >> 26 ) & 0x3fU )
9640
9641static inline uint64_t _AArch64_Read_pmbsr_el1( void )
9642{
9643 uint64_t value;
9644
9645 __asm__ volatile (
9646 "mrs %0, PMBSR_EL1" : "=&r" ( value ) : : "memory"
9647 );
9648
9649 return value;
9650}
9651
9652static inline void _AArch64_Write_pmbsr_el1( uint64_t value )
9653{
9654 __asm__ volatile (
9655 "msr PMBSR_EL1, %0" : : "r" ( value ) : "memory"
9656 );
9657}
9658
9659/* PMSCR_EL1, Statistical Profiling Control Register (EL1) */
9660
9661#define AARCH64_PMSCR_EL1_E0SPE 0x1U
9662
9663#define AARCH64_PMSCR_EL1_E1SPE 0x2U
9664
9665#define AARCH64_PMSCR_EL1_CX 0x8U
9666
9667#define AARCH64_PMSCR_EL1_PA 0x10U
9668
9669#define AARCH64_PMSCR_EL1_TS 0x20U
9670
9671#define AARCH64_PMSCR_EL1_PCT( _val ) ( ( _val ) << 6 )
9672#define AARCH64_PMSCR_EL1_PCT_SHIFT 6
9673#define AARCH64_PMSCR_EL1_PCT_MASK 0xc0U
9674#define AARCH64_PMSCR_EL1_PCT_GET( _reg ) \
9675 ( ( ( _reg ) >> 6 ) & 0x3U )
9676
9677static inline uint64_t _AArch64_Read_pmscr_el1( void )
9678{
9679 uint64_t value;
9680
9681 __asm__ volatile (
9682 "mrs %0, PMSCR_EL1" : "=&r" ( value ) : : "memory"
9683 );
9684
9685 return value;
9686}
9687
9688static inline void _AArch64_Write_pmscr_el1( uint64_t value )
9689{
9690 __asm__ volatile (
9691 "msr PMSCR_EL1, %0" : : "r" ( value ) : "memory"
9692 );
9693}
9694
9695/* PMSCR_EL2, Statistical Profiling Control Register (EL2) */
9696
9697#define AARCH64_PMSCR_EL2_E0HSPE 0x1U
9698
9699#define AARCH64_PMSCR_EL2_E2SPE 0x2U
9700
9701#define AARCH64_PMSCR_EL2_CX 0x8U
9702
9703#define AARCH64_PMSCR_EL2_PA 0x10U
9704
9705#define AARCH64_PMSCR_EL2_TS 0x20U
9706
9707#define AARCH64_PMSCR_EL2_PCT( _val ) ( ( _val ) << 6 )
9708#define AARCH64_PMSCR_EL2_PCT_SHIFT 6
9709#define AARCH64_PMSCR_EL2_PCT_MASK 0xc0U
9710#define AARCH64_PMSCR_EL2_PCT_GET( _reg ) \
9711 ( ( ( _reg ) >> 6 ) & 0x3U )
9712
9713static inline uint64_t _AArch64_Read_pmscr_el2( void )
9714{
9715 uint64_t value;
9716
9717 __asm__ volatile (
9718 "mrs %0, PMSCR_EL2" : "=&r" ( value ) : : "memory"
9719 );
9720
9721 return value;
9722}
9723
9724static inline void _AArch64_Write_pmscr_el2( uint64_t value )
9725{
9726 __asm__ volatile (
9727 "msr PMSCR_EL2, %0" : : "r" ( value ) : "memory"
9728 );
9729}
9730
9731/* PMSEVFR_EL1, Sampling Event Filter Register */
9732
9733#define AARCH64_PMSEVFR_EL1_E_1 0x2U
9734
9735#define AARCH64_PMSEVFR_EL1_E_3 0x8U
9736
9737#define AARCH64_PMSEVFR_EL1_E_5 0x20U
9738
9739#define AARCH64_PMSEVFR_EL1_E_7 0x80U
9740
9741#define AARCH64_PMSEVFR_EL1_E_11 0x800U
9742
9743#define AARCH64_PMSEVFR_EL1_E_12 0x1000U
9744
9745#define AARCH64_PMSEVFR_EL1_E_13 0x2000U
9746
9747#define AARCH64_PMSEVFR_EL1_E_14 0x4000U
9748
9749#define AARCH64_PMSEVFR_EL1_E_15 0x8000U
9750
9751#define AARCH64_PMSEVFR_EL1_E_17 0x20000U
9752
9753#define AARCH64_PMSEVFR_EL1_E_18 0x40000U
9754
9755#define AARCH64_PMSEVFR_EL1_E_24 0x1000000U
9756
9757#define AARCH64_PMSEVFR_EL1_E_25 0x2000000U
9758
9759#define AARCH64_PMSEVFR_EL1_E_26 0x4000000U
9760
9761#define AARCH64_PMSEVFR_EL1_E_27 0x8000000U
9762
9763#define AARCH64_PMSEVFR_EL1_E_28 0x10000000U
9764
9765#define AARCH64_PMSEVFR_EL1_E_29 0x20000000U
9766
9767#define AARCH64_PMSEVFR_EL1_E_30 0x40000000U
9768
9769#define AARCH64_PMSEVFR_EL1_E_31 0x80000000U
9770
9771#define AARCH64_PMSEVFR_EL1_E_48 0x1000000000000ULL
9772
9773#define AARCH64_PMSEVFR_EL1_E_49 0x2000000000000ULL
9774
9775#define AARCH64_PMSEVFR_EL1_E_50 0x4000000000000ULL
9776
9777#define AARCH64_PMSEVFR_EL1_E_51 0x8000000000000ULL
9778
9779#define AARCH64_PMSEVFR_EL1_E_52 0x10000000000000ULL
9780
9781#define AARCH64_PMSEVFR_EL1_E_53 0x20000000000000ULL
9782
9783#define AARCH64_PMSEVFR_EL1_E_54 0x40000000000000ULL
9784
9785#define AARCH64_PMSEVFR_EL1_E_55 0x80000000000000ULL
9786
9787#define AARCH64_PMSEVFR_EL1_E_56 0x100000000000000ULL
9788
9789#define AARCH64_PMSEVFR_EL1_E_57 0x200000000000000ULL
9790
9791#define AARCH64_PMSEVFR_EL1_E_58 0x400000000000000ULL
9792
9793#define AARCH64_PMSEVFR_EL1_E_59 0x800000000000000ULL
9794
9795#define AARCH64_PMSEVFR_EL1_E_60 0x1000000000000000ULL
9796
9797#define AARCH64_PMSEVFR_EL1_E_61 0x2000000000000000ULL
9798
9799#define AARCH64_PMSEVFR_EL1_E_62 0x4000000000000000ULL
9800
9801#define AARCH64_PMSEVFR_EL1_E_63 0x8000000000000000ULL
9802
9803static inline uint64_t _AArch64_Read_pmsevfr_el1( void )
9804{
9805 uint64_t value;
9806
9807 __asm__ volatile (
9808 "mrs %0, PMSEVFR_EL1" : "=&r" ( value ) : : "memory"
9809 );
9810
9811 return value;
9812}
9813
9814static inline void _AArch64_Write_pmsevfr_el1( uint64_t value )
9815{
9816 __asm__ volatile (
9817 "msr PMSEVFR_EL1, %0" : : "r" ( value ) : "memory"
9818 );
9819}
9820
9821/* PMSFCR_EL1, Sampling Filter Control Register */
9822
9823#define AARCH64_PMSFCR_EL1_FE 0x1U
9824
9825#define AARCH64_PMSFCR_EL1_FT 0x2U
9826
9827#define AARCH64_PMSFCR_EL1_FL 0x4U
9828
9829#define AARCH64_PMSFCR_EL1_B 0x10000U
9830
9831#define AARCH64_PMSFCR_EL1_LD 0x20000U
9832
9833#define AARCH64_PMSFCR_EL1_ST 0x40000U
9834
9835static inline uint64_t _AArch64_Read_pmsfcr_el1( void )
9836{
9837 uint64_t value;
9838
9839 __asm__ volatile (
9840 "mrs %0, PMSFCR_EL1" : "=&r" ( value ) : : "memory"
9841 );
9842
9843 return value;
9844}
9845
9846static inline void _AArch64_Write_pmsfcr_el1( uint64_t value )
9847{
9848 __asm__ volatile (
9849 "msr PMSFCR_EL1, %0" : : "r" ( value ) : "memory"
9850 );
9851}
9852
9853/* PMSICR_EL1, Sampling Interval Counter Register */
9854
9855#define AARCH64_PMSICR_EL1_COUNT( _val ) ( ( _val ) << 0 )
9856#define AARCH64_PMSICR_EL1_COUNT_SHIFT 0
9857#define AARCH64_PMSICR_EL1_COUNT_MASK 0xffffffffU
9858#define AARCH64_PMSICR_EL1_COUNT_GET( _reg ) \
9859 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
9860
9861#define AARCH64_PMSICR_EL1_ECOUNT( _val ) ( ( _val ) << 56 )
9862#define AARCH64_PMSICR_EL1_ECOUNT_SHIFT 56
9863#define AARCH64_PMSICR_EL1_ECOUNT_MASK 0xff00000000000000ULL
9864#define AARCH64_PMSICR_EL1_ECOUNT_GET( _reg ) \
9865 ( ( ( _reg ) >> 56 ) & 0xffULL )
9866
9867static inline uint64_t _AArch64_Read_pmsicr_el1( void )
9868{
9869 uint64_t value;
9870
9871 __asm__ volatile (
9872 "mrs %0, PMSICR_EL1" : "=&r" ( value ) : : "memory"
9873 );
9874
9875 return value;
9876}
9877
9878static inline void _AArch64_Write_pmsicr_el1( uint64_t value )
9879{
9880 __asm__ volatile (
9881 "msr PMSICR_EL1, %0" : : "r" ( value ) : "memory"
9882 );
9883}
9884
9885/* PMSIDR_EL1, Sampling Profiling ID Register */
9886
9887#define AARCH64_PMSIDR_EL1_FE 0x1U
9888
9889#define AARCH64_PMSIDR_EL1_FT 0x2U
9890
9891#define AARCH64_PMSIDR_EL1_FL 0x4U
9892
9893#define AARCH64_PMSIDR_EL1_ARCHINST 0x8U
9894
9895#define AARCH64_PMSIDR_EL1_LDS 0x10U
9896
9897#define AARCH64_PMSIDR_EL1_ERND 0x20U
9898
9899#define AARCH64_PMSIDR_EL1_INTERVAL( _val ) ( ( _val ) << 8 )
9900#define AARCH64_PMSIDR_EL1_INTERVAL_SHIFT 8
9901#define AARCH64_PMSIDR_EL1_INTERVAL_MASK 0xf00U
9902#define AARCH64_PMSIDR_EL1_INTERVAL_GET( _reg ) \
9903 ( ( ( _reg ) >> 8 ) & 0xfU )
9904
9905#define AARCH64_PMSIDR_EL1_MAXSIZE( _val ) ( ( _val ) << 12 )
9906#define AARCH64_PMSIDR_EL1_MAXSIZE_SHIFT 12
9907#define AARCH64_PMSIDR_EL1_MAXSIZE_MASK 0xf000U
9908#define AARCH64_PMSIDR_EL1_MAXSIZE_GET( _reg ) \
9909 ( ( ( _reg ) >> 12 ) & 0xfU )
9910
9911#define AARCH64_PMSIDR_EL1_COUNTSIZE( _val ) ( ( _val ) << 16 )
9912#define AARCH64_PMSIDR_EL1_COUNTSIZE_SHIFT 16
9913#define AARCH64_PMSIDR_EL1_COUNTSIZE_MASK 0xf0000U
9914#define AARCH64_PMSIDR_EL1_COUNTSIZE_GET( _reg ) \
9915 ( ( ( _reg ) >> 16 ) & 0xfU )
9916
9917static inline uint64_t _AArch64_Read_pmsidr_el1( void )
9918{
9919 uint64_t value;
9920
9921 __asm__ volatile (
9922 "mrs %0, PMSIDR_EL1" : "=&r" ( value ) : : "memory"
9923 );
9924
9925 return value;
9926}
9927
9928/* PMSIRR_EL1, Sampling Interval Reload Register */
9929
9930#define AARCH64_PMSIRR_EL1_RND 0x1U
9931
9932#define AARCH64_PMSIRR_EL1_INTERVAL( _val ) ( ( _val ) << 8 )
9933#define AARCH64_PMSIRR_EL1_INTERVAL_SHIFT 8
9934#define AARCH64_PMSIRR_EL1_INTERVAL_MASK 0xffffff00U
9935#define AARCH64_PMSIRR_EL1_INTERVAL_GET( _reg ) \
9936 ( ( ( _reg ) >> 8 ) & 0xffffffU )
9937
9938static inline uint64_t _AArch64_Read_pmsirr_el1( void )
9939{
9940 uint64_t value;
9941
9942 __asm__ volatile (
9943 "mrs %0, PMSIRR_EL1" : "=&r" ( value ) : : "memory"
9944 );
9945
9946 return value;
9947}
9948
9949static inline void _AArch64_Write_pmsirr_el1( uint64_t value )
9950{
9951 __asm__ volatile (
9952 "msr PMSIRR_EL1, %0" : : "r" ( value ) : "memory"
9953 );
9954}
9955
9956/* PMSLATFR_EL1, Sampling Latency Filter Register */
9957
9958#define AARCH64_PMSLATFR_EL1_MINLAT( _val ) ( ( _val ) << 0 )
9959#define AARCH64_PMSLATFR_EL1_MINLAT_SHIFT 0
9960#define AARCH64_PMSLATFR_EL1_MINLAT_MASK 0xfffU
9961#define AARCH64_PMSLATFR_EL1_MINLAT_GET( _reg ) \
9962 ( ( ( _reg ) >> 0 ) & 0xfffU )
9963
9964static inline uint64_t _AArch64_Read_pmslatfr_el1( void )
9965{
9966 uint64_t value;
9967
9968 __asm__ volatile (
9969 "mrs %0, PMSLATFR_EL1" : "=&r" ( value ) : : "memory"
9970 );
9971
9972 return value;
9973}
9974
9975static inline void _AArch64_Write_pmslatfr_el1( uint64_t value )
9976{
9977 __asm__ volatile (
9978 "msr PMSLATFR_EL1, %0" : : "r" ( value ) : "memory"
9979 );
9980}
9981
9982/* DISR_EL1, Deferred Interrupt Status Register */
9983
9984#define AARCH64_DISR_EL1_DFSC( _val ) ( ( _val ) << 0 )
9985#define AARCH64_DISR_EL1_DFSC_SHIFT 0
9986#define AARCH64_DISR_EL1_DFSC_MASK 0x3fU
9987#define AARCH64_DISR_EL1_DFSC_GET( _reg ) \
9988 ( ( ( _reg ) >> 0 ) & 0x3fU )
9989
9990#define AARCH64_DISR_EL1_ISS( _val ) ( ( _val ) << 0 )
9991#define AARCH64_DISR_EL1_ISS_SHIFT 0
9992#define AARCH64_DISR_EL1_ISS_MASK 0xffffffU
9993#define AARCH64_DISR_EL1_ISS_GET( _reg ) \
9994 ( ( ( _reg ) >> 0 ) & 0xffffffU )
9995
9996#define AARCH64_DISR_EL1_EA 0x200U
9997
9998#define AARCH64_DISR_EL1_AET( _val ) ( ( _val ) << 10 )
9999#define AARCH64_DISR_EL1_AET_SHIFT 10
10000#define AARCH64_DISR_EL1_AET_MASK 0x1c00U
10001#define AARCH64_DISR_EL1_AET_GET( _reg ) \
10002 ( ( ( _reg ) >> 10 ) & 0x7U )
10003
10004#define AARCH64_DISR_EL1_IDS 0x1000000U
10005
10006#define AARCH64_DISR_EL1_A 0x80000000U
10007
10008static inline uint64_t _AArch64_Read_disr_el1( void )
10009{
10010 uint64_t value;
10011
10012 __asm__ volatile (
10013 "mrs %0, DISR_EL1" : "=&r" ( value ) : : "memory"
10014 );
10015
10016 return value;
10017}
10018
10019static inline void _AArch64_Write_disr_el1( uint64_t value )
10020{
10021 __asm__ volatile (
10022 "msr DISR_EL1, %0" : : "r" ( value ) : "memory"
10023 );
10024}
10025
10026/* ERRIDR_EL1, Error Record ID Register */
10027
10028#define AARCH64_ERRIDR_EL1_NUM( _val ) ( ( _val ) << 0 )
10029#define AARCH64_ERRIDR_EL1_NUM_SHIFT 0
10030#define AARCH64_ERRIDR_EL1_NUM_MASK 0xffffU
10031#define AARCH64_ERRIDR_EL1_NUM_GET( _reg ) \
10032 ( ( ( _reg ) >> 0 ) & 0xffffU )
10033
10034static inline uint64_t _AArch64_Read_erridr_el1( void )
10035{
10036 uint64_t value;
10037
10038 __asm__ volatile (
10039 "mrs %0, ERRIDR_EL1" : "=&r" ( value ) : : "memory"
10040 );
10041
10042 return value;
10043}
10044
10045/* ERRSELR_EL1, Error Record Select Register */
10046
10047#define AARCH64_ERRSELR_EL1_SEL( _val ) ( ( _val ) << 0 )
10048#define AARCH64_ERRSELR_EL1_SEL_SHIFT 0
10049#define AARCH64_ERRSELR_EL1_SEL_MASK 0xffffU
10050#define AARCH64_ERRSELR_EL1_SEL_GET( _reg ) \
10051 ( ( ( _reg ) >> 0 ) & 0xffffU )
10052
10053static inline uint64_t _AArch64_Read_errselr_el1( void )
10054{
10055 uint64_t value;
10056
10057 __asm__ volatile (
10058 "mrs %0, ERRSELR_EL1" : "=&r" ( value ) : : "memory"
10059 );
10060
10061 return value;
10062}
10063
10064static inline void _AArch64_Write_errselr_el1( uint64_t value )
10065{
10066 __asm__ volatile (
10067 "msr ERRSELR_EL1, %0" : : "r" ( value ) : "memory"
10068 );
10069}
10070
10071/* ERXADDR_EL1, Selected Error Record Address Register */
10072
10073static inline uint64_t _AArch64_Read_erxaddr_el1( void )
10074{
10075 uint64_t value;
10076
10077 __asm__ volatile (
10078 "mrs %0, ERXADDR_EL1" : "=&r" ( value ) : : "memory"
10079 );
10080
10081 return value;
10082}
10083
10084static inline void _AArch64_Write_erxaddr_el1( uint64_t value )
10085{
10086 __asm__ volatile (
10087 "msr ERXADDR_EL1, %0" : : "r" ( value ) : "memory"
10088 );
10089}
10090
10091/* ERXCTLR_EL1, Selected Error Record Control Register */
10092
10093static inline uint64_t _AArch64_Read_erxctlr_el1( void )
10094{
10095 uint64_t value;
10096
10097 __asm__ volatile (
10098 "mrs %0, ERXCTLR_EL1" : "=&r" ( value ) : : "memory"
10099 );
10100
10101 return value;
10102}
10103
10104static inline void _AArch64_Write_erxctlr_el1( uint64_t value )
10105{
10106 __asm__ volatile (
10107 "msr ERXCTLR_EL1, %0" : : "r" ( value ) : "memory"
10108 );
10109}
10110
10111/* ERXFR_EL1, Selected Error Record Feature Register */
10112
10113static inline uint64_t _AArch64_Read_erxfr_el1( void )
10114{
10115 uint64_t value;
10116
10117 __asm__ volatile (
10118 "mrs %0, ERXFR_EL1" : "=&r" ( value ) : : "memory"
10119 );
10120
10121 return value;
10122}
10123
10124/* ERXMISC0_EL1, Selected Error Record Miscellaneous Register 0 */
10125
10126static inline uint64_t _AArch64_Read_erxmisc0_el1( void )
10127{
10128 uint64_t value;
10129
10130 __asm__ volatile (
10131 "mrs %0, ERXMISC0_EL1" : "=&r" ( value ) : : "memory"
10132 );
10133
10134 return value;
10135}
10136
10137static inline void _AArch64_Write_erxmisc0_el1( uint64_t value )
10138{
10139 __asm__ volatile (
10140 "msr ERXMISC0_EL1, %0" : : "r" ( value ) : "memory"
10141 );
10142}
10143
10144/* ERXMISC1_EL1, Selected Error Record Miscellaneous Register 1 */
10145
10146static inline uint64_t _AArch64_Read_erxmisc1_el1( void )
10147{
10148 uint64_t value;
10149
10150 __asm__ volatile (
10151 "mrs %0, ERXMISC1_EL1" : "=&r" ( value ) : : "memory"
10152 );
10153
10154 return value;
10155}
10156
10157static inline void _AArch64_Write_erxmisc1_el1( uint64_t value )
10158{
10159 __asm__ volatile (
10160 "msr ERXMISC1_EL1, %0" : : "r" ( value ) : "memory"
10161 );
10162}
10163
10164/* ERXMISC2_EL1, Selected Error Record Miscellaneous Register 2 */
10165
10166static inline uint64_t _AArch64_Read_erxmisc2_el1( void )
10167{
10168 uint64_t value;
10169
10170 __asm__ volatile (
10171 "mrs %0, ERXMISC2_EL1" : "=&r" ( value ) : : "memory"
10172 );
10173
10174 return value;
10175}
10176
10177static inline void _AArch64_Write_erxmisc2_el1( uint64_t value )
10178{
10179 __asm__ volatile (
10180 "msr ERXMISC2_EL1, %0" : : "r" ( value ) : "memory"
10181 );
10182}
10183
10184/* ERXMISC3_EL1, Selected Error Record Miscellaneous Register 3 */
10185
10186static inline uint64_t _AArch64_Read_erxmisc3_el1( void )
10187{
10188 uint64_t value;
10189
10190 __asm__ volatile (
10191 "mrs %0, ERXMISC3_EL1" : "=&r" ( value ) : : "memory"
10192 );
10193
10194 return value;
10195}
10196
10197static inline void _AArch64_Write_erxmisc3_el1( uint64_t value )
10198{
10199 __asm__ volatile (
10200 "msr ERXMISC3_EL1, %0" : : "r" ( value ) : "memory"
10201 );
10202}
10203
10204/* ERXPFGCDN_EL1, Selected Pseudo-fault Generation Countdown Register */
10205
10206static inline uint64_t _AArch64_Read_erxpfgcdn_el1( void )
10207{
10208 uint64_t value;
10209
10210 __asm__ volatile (
10211 "mrs %0, ERXPFGCDN_EL1" : "=&r" ( value ) : : "memory"
10212 );
10213
10214 return value;
10215}
10216
10217static inline void _AArch64_Write_erxpfgcdn_el1( uint64_t value )
10218{
10219 __asm__ volatile (
10220 "msr ERXPFGCDN_EL1, %0" : : "r" ( value ) : "memory"
10221 );
10222}
10223
10224/* ERXPFGCTL_EL1, Selected Pseudo-fault Generation Control Register */
10225
10226static inline uint64_t _AArch64_Read_erxpfgctl_el1( void )
10227{
10228 uint64_t value;
10229
10230 __asm__ volatile (
10231 "mrs %0, ERXPFGCTL_EL1" : "=&r" ( value ) : : "memory"
10232 );
10233
10234 return value;
10235}
10236
10237static inline void _AArch64_Write_erxpfgctl_el1( uint64_t value )
10238{
10239 __asm__ volatile (
10240 "msr ERXPFGCTL_EL1, %0" : : "r" ( value ) : "memory"
10241 );
10242}
10243
10244/* ERXPFGF_EL1, Selected Pseudo-fault Generation Feature Register */
10245
10246static inline uint64_t _AArch64_Read_erxpfgf_el1( void )
10247{
10248 uint64_t value;
10249
10250 __asm__ volatile (
10251 "mrs %0, ERXPFGF_EL1" : "=&r" ( value ) : : "memory"
10252 );
10253
10254 return value;
10255}
10256
10257/* ERXSTATUS_EL1, Selected Error Record Primary Status Register */
10258
10259static inline uint64_t _AArch64_Read_erxstatus_el1( void )
10260{
10261 uint64_t value;
10262
10263 __asm__ volatile (
10264 "mrs %0, ERXSTATUS_EL1" : "=&r" ( value ) : : "memory"
10265 );
10266
10267 return value;
10268}
10269
10270static inline void _AArch64_Write_erxstatus_el1( uint64_t value )
10271{
10272 __asm__ volatile (
10273 "msr ERXSTATUS_EL1, %0" : : "r" ( value ) : "memory"
10274 );
10275}
10276
10277/* VDISR_EL2, Virtual Deferred Interrupt Status Register */
10278
10279#define AARCH64_VDISR_EL2_FS_3_0( _val ) ( ( _val ) << 0 )
10280#define AARCH64_VDISR_EL2_FS_3_0_SHIFT 0
10281#define AARCH64_VDISR_EL2_FS_3_0_MASK 0xfU
10282#define AARCH64_VDISR_EL2_FS_3_0_GET( _reg ) \
10283 ( ( ( _reg ) >> 0 ) & 0xfU )
10284
10285#define AARCH64_VDISR_EL2_STATUS( _val ) ( ( _val ) << 0 )
10286#define AARCH64_VDISR_EL2_STATUS_SHIFT 0
10287#define AARCH64_VDISR_EL2_STATUS_MASK 0x3fU
10288#define AARCH64_VDISR_EL2_STATUS_GET( _reg ) \
10289 ( ( ( _reg ) >> 0 ) & 0x3fU )
10290
10291#define AARCH64_VDISR_EL2_ISS( _val ) ( ( _val ) << 0 )
10292#define AARCH64_VDISR_EL2_ISS_SHIFT 0
10293#define AARCH64_VDISR_EL2_ISS_MASK 0xffffffU
10294#define AARCH64_VDISR_EL2_ISS_GET( _reg ) \
10295 ( ( ( _reg ) >> 0 ) & 0xffffffU )
10296
10297#define AARCH64_VDISR_EL2_LPAE 0x200U
10298
10299#define AARCH64_VDISR_EL2_FS_4 0x400U
10300
10301#define AARCH64_VDISR_EL2_EXT 0x1000U
10302
10303#define AARCH64_VDISR_EL2_AET( _val ) ( ( _val ) << 14 )
10304#define AARCH64_VDISR_EL2_AET_SHIFT 14
10305#define AARCH64_VDISR_EL2_AET_MASK 0xc000U
10306#define AARCH64_VDISR_EL2_AET_GET( _reg ) \
10307 ( ( ( _reg ) >> 14 ) & 0x3U )
10308
10309#define AARCH64_VDISR_EL2_IDS 0x1000000U
10310
10311#define AARCH64_VDISR_EL2_A 0x80000000U
10312
10313static inline uint64_t _AArch64_Read_vdisr_el2( void )
10314{
10315 uint64_t value;
10316
10317 __asm__ volatile (
10318 "mrs %0, VDISR_EL2" : "=&r" ( value ) : : "memory"
10319 );
10320
10321 return value;
10322}
10323
10324static inline void _AArch64_Write_vdisr_el2( uint64_t value )
10325{
10326 __asm__ volatile (
10327 "msr VDISR_EL2, %0" : : "r" ( value ) : "memory"
10328 );
10329}
10330
10331/* VSESR_EL2, Virtual SError Exception Syndrome Register */
10332
10333#define AARCH64_VSESR_EL2_ISS( _val ) ( ( _val ) << 0 )
10334#define AARCH64_VSESR_EL2_ISS_SHIFT 0
10335#define AARCH64_VSESR_EL2_ISS_MASK 0xffffffU
10336#define AARCH64_VSESR_EL2_ISS_GET( _reg ) \
10337 ( ( ( _reg ) >> 0 ) & 0xffffffU )
10338
10339#define AARCH64_VSESR_EL2_EXT 0x1000U
10340
10341#define AARCH64_VSESR_EL2_AET( _val ) ( ( _val ) << 14 )
10342#define AARCH64_VSESR_EL2_AET_SHIFT 14
10343#define AARCH64_VSESR_EL2_AET_MASK 0xc000U
10344#define AARCH64_VSESR_EL2_AET_GET( _reg ) \
10345 ( ( ( _reg ) >> 14 ) & 0x3U )
10346
10347#define AARCH64_VSESR_EL2_IDS 0x1000000U
10348
10349static inline uint64_t _AArch64_Read_vsesr_el2( void )
10350{
10351 uint64_t value;
10352
10353 __asm__ volatile (
10354 "mrs %0, VSESR_EL2" : "=&r" ( value ) : : "memory"
10355 );
10356
10357 return value;
10358}
10359
10360static inline void _AArch64_Write_vsesr_el2( uint64_t value )
10361{
10362 __asm__ volatile (
10363 "msr VSESR_EL2, %0" : : "r" ( value ) : "memory"
10364 );
10365}
10366
10367/* CNTFRQ_EL0, Counter-timer Frequency Register */
10368
10369static inline uint64_t _AArch64_Read_cntfrq_el0( void )
10370{
10371 uint64_t value;
10372
10373 __asm__ volatile (
10374 "mrs %0, CNTFRQ_EL0" : "=&r" ( value ) : : "memory"
10375 );
10376
10377 return value;
10378}
10379
10380static inline void _AArch64_Write_cntfrq_el0( uint64_t value )
10381{
10382 __asm__ volatile (
10383 "msr CNTFRQ_EL0, %0" : : "r" ( value ) : "memory"
10384 );
10385}
10386
10387/* CNTHCTL_EL2, Counter-timer Hypervisor Control Register */
10388
10389#define AARCH64_CNTHCTL_EL2_EL0PCTEN 0x1U
10390
10391#define AARCH64_CNTHCTL_EL2_EL1PCTEN_0 0x1U
10392
10393#define AARCH64_CNTHCTL_EL2_EL0VCTEN 0x2U
10394
10395#define AARCH64_CNTHCTL_EL2_EL1PCEN 0x2U
10396
10397#define AARCH64_CNTHCTL_EL2_EVNTEN 0x4U
10398
10399#define AARCH64_CNTHCTL_EL2_EVNTDIR 0x8U
10400
10401#define AARCH64_CNTHCTL_EL2_EVNTI( _val ) ( ( _val ) << 4 )
10402#define AARCH64_CNTHCTL_EL2_EVNTI_SHIFT 4
10403#define AARCH64_CNTHCTL_EL2_EVNTI_MASK 0xf0U
10404#define AARCH64_CNTHCTL_EL2_EVNTI_GET( _reg ) \
10405 ( ( ( _reg ) >> 4 ) & 0xfU )
10406
10407#define AARCH64_CNTHCTL_EL2_EL0VTEN 0x100U
10408
10409#define AARCH64_CNTHCTL_EL2_EL0PTEN 0x200U
10410
10411#define AARCH64_CNTHCTL_EL2_EL1PCTEN_1 0x400U
10412
10413#define AARCH64_CNTHCTL_EL2_EL1PTEN 0x800U
10414
10415#define AARCH64_CNTHCTL_EL2_ECV 0x1000U
10416
10417#define AARCH64_CNTHCTL_EL2_EL1TVT 0x2000U
10418
10419#define AARCH64_CNTHCTL_EL2_EL1TVCT 0x4000U
10420
10421#define AARCH64_CNTHCTL_EL2_EL1NVPCT 0x8000U
10422
10423#define AARCH64_CNTHCTL_EL2_EL1NVVCT 0x10000U
10424
10425#define AARCH64_CNTHCTL_EL2_EVNTIS 0x20000U
10426
10427static inline uint64_t _AArch64_Read_cnthctl_el2( void )
10428{
10429 uint64_t value;
10430
10431 __asm__ volatile (
10432 "mrs %0, CNTHCTL_EL2" : "=&r" ( value ) : : "memory"
10433 );
10434
10435 return value;
10436}
10437
10438static inline void _AArch64_Write_cnthctl_el2( uint64_t value )
10439{
10440 __asm__ volatile (
10441 "msr CNTHCTL_EL2, %0" : : "r" ( value ) : "memory"
10442 );
10443}
10444
10445/* CNTHP_CTL_EL2, Counter-timer Hypervisor Physical Timer Control Register */
10446
10447#define AARCH64_CNTHP_CTL_EL2_ENABLE 0x1U
10448
10449#define AARCH64_CNTHP_CTL_EL2_IMASK 0x2U
10450
10451#define AARCH64_CNTHP_CTL_EL2_ISTATUS 0x4U
10452
10453static inline uint64_t _AArch64_Read_cnthp_ctl_el2( void )
10454{
10455 uint64_t value;
10456
10457 __asm__ volatile (
10458 "mrs %0, CNTHP_CTL_EL2" : "=&r" ( value ) : : "memory"
10459 );
10460
10461 return value;
10462}
10463
10464static inline void _AArch64_Write_cnthp_ctl_el2( uint64_t value )
10465{
10466 __asm__ volatile (
10467 "msr CNTHP_CTL_EL2, %0" : : "r" ( value ) : "memory"
10468 );
10469}
10470
10471/* CNTHP_CVAL_EL2, Counter-timer Physical Timer CompareValue Register (EL2) */
10472
10473static inline uint64_t _AArch64_Read_cnthp_cval_el2( void )
10474{
10475 uint64_t value;
10476
10477 __asm__ volatile (
10478 "mrs %0, CNTHP_CVAL_EL2" : "=&r" ( value ) : : "memory"
10479 );
10480
10481 return value;
10482}
10483
10484static inline void _AArch64_Write_cnthp_cval_el2( uint64_t value )
10485{
10486 __asm__ volatile (
10487 "msr CNTHP_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10488 );
10489}
10490
10491/* CNTHP_TVAL_EL2, Counter-timer Physical Timer TimerValue Register (EL2) */
10492
10493#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10494#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_SHIFT 0
10495#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10496#define AARCH64_CNTHP_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10497 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10498
10499static inline uint64_t _AArch64_Read_cnthp_tval_el2( void )
10500{
10501 uint64_t value;
10502
10503 __asm__ volatile (
10504 "mrs %0, CNTHP_TVAL_EL2" : "=&r" ( value ) : : "memory"
10505 );
10506
10507 return value;
10508}
10509
10510static inline void _AArch64_Write_cnthp_tval_el2( uint64_t value )
10511{
10512 __asm__ volatile (
10513 "msr CNTHP_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10514 );
10515}
10516
10517/* CNTHPS_CTL_EL2, Counter-timer Secure Physical Timer Control Register (EL2) */
10518
10519#define AARCH64_CNTHPS_CTL_EL2_ENABLE 0x1U
10520
10521#define AARCH64_CNTHPS_CTL_EL2_IMASK 0x2U
10522
10523#define AARCH64_CNTHPS_CTL_EL2_ISTATUS 0x4U
10524
10525static inline uint64_t _AArch64_Read_cnthps_ctl_el2( void )
10526{
10527 uint64_t value;
10528
10529 __asm__ volatile (
10530 "mrs %0, CNTHPS_CTL_EL2" : "=&r" ( value ) : : "memory"
10531 );
10532
10533 return value;
10534}
10535
10536static inline void _AArch64_Write_cnthps_ctl_el2( uint64_t value )
10537{
10538 __asm__ volatile (
10539 "msr CNTHPS_CTL_EL2, %0" : : "r" ( value ) : "memory"
10540 );
10541}
10542
10543/* CNTHPS_CVAL_EL2, Counter-timer Secure Physical Timer CompareValue Register (EL2) */
10544
10545static inline uint64_t _AArch64_Read_cnthps_cval_el2( void )
10546{
10547 uint64_t value;
10548
10549 __asm__ volatile (
10550 "mrs %0, CNTHPS_CVAL_EL2" : "=&r" ( value ) : : "memory"
10551 );
10552
10553 return value;
10554}
10555
10556static inline void _AArch64_Write_cnthps_cval_el2( uint64_t value )
10557{
10558 __asm__ volatile (
10559 "msr CNTHPS_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10560 );
10561}
10562
10563/* CNTHPS_TVAL_EL2, Counter-timer Secure Physical Timer TimerValue Register (EL2) */
10564
10565#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10566#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_SHIFT 0
10567#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10568#define AARCH64_CNTHPS_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10569 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10570
10571static inline uint64_t _AArch64_Read_cnthps_tval_el2( void )
10572{
10573 uint64_t value;
10574
10575 __asm__ volatile (
10576 "mrs %0, CNTHPS_TVAL_EL2" : "=&r" ( value ) : : "memory"
10577 );
10578
10579 return value;
10580}
10581
10582static inline void _AArch64_Write_cnthps_tval_el2( uint64_t value )
10583{
10584 __asm__ volatile (
10585 "msr CNTHPS_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10586 );
10587}
10588
10589/* CNTHV_CTL_EL2, Counter-timer Virtual Timer Control Register (EL2) */
10590
10591#define AARCH64_CNTHV_CTL_EL2_ENABLE 0x1U
10592
10593#define AARCH64_CNTHV_CTL_EL2_IMASK 0x2U
10594
10595#define AARCH64_CNTHV_CTL_EL2_ISTATUS 0x4U
10596
10597static inline uint64_t _AArch64_Read_cnthv_ctl_el2( void )
10598{
10599 uint64_t value;
10600
10601 __asm__ volatile (
10602 "mrs %0, CNTHV_CTL_EL2" : "=&r" ( value ) : : "memory"
10603 );
10604
10605 return value;
10606}
10607
10608static inline void _AArch64_Write_cnthv_ctl_el2( uint64_t value )
10609{
10610 __asm__ volatile (
10611 "msr CNTHV_CTL_EL2, %0" : : "r" ( value ) : "memory"
10612 );
10613}
10614
10615/* CNTHV_CVAL_EL2, Counter-timer Virtual Timer CompareValue Register (EL2) */
10616
10617static inline uint64_t _AArch64_Read_cnthv_cval_el2( void )
10618{
10619 uint64_t value;
10620
10621 __asm__ volatile (
10622 "mrs %0, CNTHV_CVAL_EL2" : "=&r" ( value ) : : "memory"
10623 );
10624
10625 return value;
10626}
10627
10628static inline void _AArch64_Write_cnthv_cval_el2( uint64_t value )
10629{
10630 __asm__ volatile (
10631 "msr CNTHV_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10632 );
10633}
10634
10635/* CNTHV_TVAL_EL2, Counter-timer Virtual Timer TimerValue Register (EL2) */
10636
10637#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10638#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_SHIFT 0
10639#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10640#define AARCH64_CNTHV_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10641 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10642
10643static inline uint64_t _AArch64_Read_cnthv_tval_el2( void )
10644{
10645 uint64_t value;
10646
10647 __asm__ volatile (
10648 "mrs %0, CNTHV_TVAL_EL2" : "=&r" ( value ) : : "memory"
10649 );
10650
10651 return value;
10652}
10653
10654static inline void _AArch64_Write_cnthv_tval_el2( uint64_t value )
10655{
10656 __asm__ volatile (
10657 "msr CNTHV_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10658 );
10659}
10660
10661/* CNTHVS_CTL_EL2, Counter-timer Secure Virtual Timer Control Register (EL2) */
10662
10663#define AARCH64_CNTHVS_CTL_EL2_ENABLE 0x1U
10664
10665#define AARCH64_CNTHVS_CTL_EL2_IMASK 0x2U
10666
10667#define AARCH64_CNTHVS_CTL_EL2_ISTATUS 0x4U
10668
10669static inline uint64_t _AArch64_Read_cnthvs_ctl_el2( void )
10670{
10671 uint64_t value;
10672
10673 __asm__ volatile (
10674 "mrs %0, CNTHVS_CTL_EL2" : "=&r" ( value ) : : "memory"
10675 );
10676
10677 return value;
10678}
10679
10680static inline void _AArch64_Write_cnthvs_ctl_el2( uint64_t value )
10681{
10682 __asm__ volatile (
10683 "msr CNTHVS_CTL_EL2, %0" : : "r" ( value ) : "memory"
10684 );
10685}
10686
10687/* CNTHVS_CVAL_EL2, Counter-timer Secure Virtual Timer CompareValue Register (EL2) */
10688
10689static inline uint64_t _AArch64_Read_cnthvs_cval_el2( void )
10690{
10691 uint64_t value;
10692
10693 __asm__ volatile (
10694 "mrs %0, CNTHVS_CVAL_EL2" : "=&r" ( value ) : : "memory"
10695 );
10696
10697 return value;
10698}
10699
10700static inline void _AArch64_Write_cnthvs_cval_el2( uint64_t value )
10701{
10702 __asm__ volatile (
10703 "msr CNTHVS_CVAL_EL2, %0" : : "r" ( value ) : "memory"
10704 );
10705}
10706
10707/* CNTHVS_TVAL_EL2, Counter-timer Secure Virtual Timer TimerValue Register (EL2) */
10708
10709#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE( _val ) ( ( _val ) << 0 )
10710#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_SHIFT 0
10711#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_MASK 0xffffffffU
10712#define AARCH64_CNTHVS_TVAL_EL2_TIMERVALUE_GET( _reg ) \
10713 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10714
10715static inline uint64_t _AArch64_Read_cnthvs_tval_el2( void )
10716{
10717 uint64_t value;
10718
10719 __asm__ volatile (
10720 "mrs %0, CNTHVS_TVAL_EL2" : "=&r" ( value ) : : "memory"
10721 );
10722
10723 return value;
10724}
10725
10726static inline void _AArch64_Write_cnthvs_tval_el2( uint64_t value )
10727{
10728 __asm__ volatile (
10729 "msr CNTHVS_TVAL_EL2, %0" : : "r" ( value ) : "memory"
10730 );
10731}
10732
10733/* CNTKCTL_EL1, Counter-timer Kernel Control Register */
10734
10735#define AARCH64_CNTKCTL_EL1_EL0PCTEN 0x1U
10736
10737#define AARCH64_CNTKCTL_EL1_EL0VCTEN 0x2U
10738
10739#define AARCH64_CNTKCTL_EL1_EVNTEN 0x4U
10740
10741#define AARCH64_CNTKCTL_EL1_EVNTDIR 0x8U
10742
10743#define AARCH64_CNTKCTL_EL1_EVNTI( _val ) ( ( _val ) << 4 )
10744#define AARCH64_CNTKCTL_EL1_EVNTI_SHIFT 4
10745#define AARCH64_CNTKCTL_EL1_EVNTI_MASK 0xf0U
10746#define AARCH64_CNTKCTL_EL1_EVNTI_GET( _reg ) \
10747 ( ( ( _reg ) >> 4 ) & 0xfU )
10748
10749#define AARCH64_CNTKCTL_EL1_EL0VTEN 0x100U
10750
10751#define AARCH64_CNTKCTL_EL1_EL0PTEN 0x200U
10752
10753#define AARCH64_CNTKCTL_EL1_EVNTIS 0x20000U
10754
10755static inline uint64_t _AArch64_Read_cntkctl_el1( void )
10756{
10757 uint64_t value;
10758
10759 __asm__ volatile (
10760 "mrs %0, CNTKCTL_EL1" : "=&r" ( value ) : : "memory"
10761 );
10762
10763 return value;
10764}
10765
10766static inline void _AArch64_Write_cntkctl_el1( uint64_t value )
10767{
10768 __asm__ volatile (
10769 "msr CNTKCTL_EL1, %0" : : "r" ( value ) : "memory"
10770 );
10771}
10772
10773/* CNTP_CTL_EL0, Counter-timer Physical Timer Control Register */
10774
10775#define AARCH64_CNTP_CTL_EL0_ENABLE 0x1U
10776
10777#define AARCH64_CNTP_CTL_EL0_IMASK 0x2U
10778
10779#define AARCH64_CNTP_CTL_EL0_ISTATUS 0x4U
10780
10781static inline uint64_t _AArch64_Read_cntp_ctl_el0( void )
10782{
10783 uint64_t value;
10784
10785 __asm__ volatile (
10786 "mrs %0, CNTP_CTL_EL0" : "=&r" ( value ) : : "memory"
10787 );
10788
10789 return value;
10790}
10791
10792static inline void _AArch64_Write_cntp_ctl_el0( uint64_t value )
10793{
10794 __asm__ volatile (
10795 "msr CNTP_CTL_EL0, %0" : : "r" ( value ) : "memory"
10796 );
10797}
10798
10799/* CNTP_CVAL_EL0, Counter-timer Physical Timer CompareValue Register */
10800
10801static inline uint64_t _AArch64_Read_cntp_cval_el0( void )
10802{
10803 uint64_t value;
10804
10805 __asm__ volatile (
10806 "mrs %0, CNTP_CVAL_EL0" : "=&r" ( value ) : : "memory"
10807 );
10808
10809 return value;
10810}
10811
10812static inline void _AArch64_Write_cntp_cval_el0( uint64_t value )
10813{
10814 __asm__ volatile (
10815 "msr CNTP_CVAL_EL0, %0" : : "r" ( value ) : "memory"
10816 );
10817}
10818
10819/* CNTP_TVAL_EL0, Counter-timer Physical Timer TimerValue Register */
10820
10821#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE( _val ) ( ( _val ) << 0 )
10822#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_SHIFT 0
10823#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_MASK 0xffffffffU
10824#define AARCH64_CNTP_TVAL_EL0_TIMERVALUE_GET( _reg ) \
10825 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10826
10827static inline uint64_t _AArch64_Read_cntp_tval_el0( void )
10828{
10829 uint64_t value;
10830
10831 __asm__ volatile (
10832 "mrs %0, CNTP_TVAL_EL0" : "=&r" ( value ) : : "memory"
10833 );
10834
10835 return value;
10836}
10837
10838static inline void _AArch64_Write_cntp_tval_el0( uint64_t value )
10839{
10840 __asm__ volatile (
10841 "msr CNTP_TVAL_EL0, %0" : : "r" ( value ) : "memory"
10842 );
10843}
10844
10845/* CNTPCTSS_EL0, Counter-timer Self-Synchronized Physical Count Register */
10846
10847static inline uint64_t _AArch64_Read_cntpctss_el0( void )
10848{
10849 uint64_t value;
10850
10851 __asm__ volatile (
10852 "mrs %0, CNTPCTSS_EL0" : "=&r" ( value ) : : "memory"
10853 );
10854
10855 return value;
10856}
10857
10858/* CNTPCT_EL0, Counter-timer Physical Count Register */
10859
10860static inline uint64_t _AArch64_Read_cntpct_el0( void )
10861{
10862 uint64_t value;
10863
10864 __asm__ volatile (
10865 "mrs %0, CNTPCT_EL0" : "=&r" ( value ) : : "memory"
10866 );
10867
10868 return value;
10869}
10870
10871/* CNTPS_CTL_EL1, Counter-timer Physical Secure Timer Control Register */
10872
10873#define AARCH64_CNTPS_CTL_EL1_ENABLE 0x1U
10874
10875#define AARCH64_CNTPS_CTL_EL1_IMASK 0x2U
10876
10877#define AARCH64_CNTPS_CTL_EL1_ISTATUS 0x4U
10878
10879static inline uint64_t _AArch64_Read_cntps_ctl_el1( void )
10880{
10881 uint64_t value;
10882
10883 __asm__ volatile (
10884 "mrs %0, CNTPS_CTL_EL1" : "=&r" ( value ) : : "memory"
10885 );
10886
10887 return value;
10888}
10889
10890static inline void _AArch64_Write_cntps_ctl_el1( uint64_t value )
10891{
10892 __asm__ volatile (
10893 "msr CNTPS_CTL_EL1, %0" : : "r" ( value ) : "memory"
10894 );
10895}
10896
10897/* CNTPOFF_EL2, Counter-timer Physical Offset Register */
10898
10899static inline uint64_t _AArch64_Read_cntpoff_el2( void )
10900{
10901 uint64_t value;
10902
10903 __asm__ volatile (
10904 "mrs %0, CNTPOFF_EL2" : "=&r" ( value ) : : "memory"
10905 );
10906
10907 return value;
10908}
10909
10910static inline void _AArch64_Write_cntpoff_el2( uint64_t value )
10911{
10912 __asm__ volatile (
10913 "msr CNTPOFF_EL2, %0" : : "r" ( value ) : "memory"
10914 );
10915}
10916
10917/* CNTPS_CVAL_EL1, Counter-timer Physical Secure Timer CompareValue Register */
10918
10919static inline uint64_t _AArch64_Read_cntps_cval_el1( void )
10920{
10921 uint64_t value;
10922
10923 __asm__ volatile (
10924 "mrs %0, CNTPS_CVAL_EL1" : "=&r" ( value ) : : "memory"
10925 );
10926
10927 return value;
10928}
10929
10930static inline void _AArch64_Write_cntps_cval_el1( uint64_t value )
10931{
10932 __asm__ volatile (
10933 "msr CNTPS_CVAL_EL1, %0" : : "r" ( value ) : "memory"
10934 );
10935}
10936
10937/* CNTPS_TVAL_EL1, Counter-timer Physical Secure Timer TimerValue Register */
10938
10939#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE( _val ) ( ( _val ) << 0 )
10940#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_SHIFT 0
10941#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_MASK 0xffffffffU
10942#define AARCH64_CNTPS_TVAL_EL1_TIMERVALUE_GET( _reg ) \
10943 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
10944
10945static inline uint64_t _AArch64_Read_cntps_tval_el1( void )
10946{
10947 uint64_t value;
10948
10949 __asm__ volatile (
10950 "mrs %0, CNTPS_TVAL_EL1" : "=&r" ( value ) : : "memory"
10951 );
10952
10953 return value;
10954}
10955
10956static inline void _AArch64_Write_cntps_tval_el1( uint64_t value )
10957{
10958 __asm__ volatile (
10959 "msr CNTPS_TVAL_EL1, %0" : : "r" ( value ) : "memory"
10960 );
10961}
10962
10963/* CNTV_CTL_EL0, Counter-timer Virtual Timer Control Register */
10964
10965#define AARCH64_CNTV_CTL_EL0_ENABLE 0x1U
10966
10967#define AARCH64_CNTV_CTL_EL0_IMASK 0x2U
10968
10969#define AARCH64_CNTV_CTL_EL0_ISTATUS 0x4U
10970
10971static inline uint64_t _AArch64_Read_cntv_ctl_el0( void )
10972{
10973 uint64_t value;
10974
10975 __asm__ volatile (
10976 "mrs %0, CNTV_CTL_EL0" : "=&r" ( value ) : : "memory"
10977 );
10978
10979 return value;
10980}
10981
10982static inline void _AArch64_Write_cntv_ctl_el0( uint64_t value )
10983{
10984 __asm__ volatile (
10985 "msr CNTV_CTL_EL0, %0" : : "r" ( value ) : "memory"
10986 );
10987}
10988
10989/* CNTV_CVAL_EL0, Counter-timer Virtual Timer CompareValue Register */
10990
10991static inline uint64_t _AArch64_Read_cntv_cval_el0( void )
10992{
10993 uint64_t value;
10994
10995 __asm__ volatile (
10996 "mrs %0, CNTV_CVAL_EL0" : "=&r" ( value ) : : "memory"
10997 );
10998
10999 return value;
11000}
11001
11002static inline void _AArch64_Write_cntv_cval_el0( uint64_t value )
11003{
11004 __asm__ volatile (
11005 "msr CNTV_CVAL_EL0, %0" : : "r" ( value ) : "memory"
11006 );
11007}
11008
11009/* CNTV_TVAL_EL0, Counter-timer Virtual Timer TimerValue Register */
11010
11011#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE( _val ) ( ( _val ) << 0 )
11012#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_SHIFT 0
11013#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_MASK 0xffffffffU
11014#define AARCH64_CNTV_TVAL_EL0_TIMERVALUE_GET( _reg ) \
11015 ( ( ( _reg ) >> 0 ) & 0xffffffffU )
11016
11017static inline uint64_t _AArch64_Read_cntv_tval_el0( void )
11018{
11019 uint64_t value;
11020
11021 __asm__ volatile (
11022 "mrs %0, CNTV_TVAL_EL0" : "=&r" ( value ) : : "memory"
11023 );
11024
11025 return value;
11026}
11027
11028static inline void _AArch64_Write_cntv_tval_el0( uint64_t value )
11029{
11030 __asm__ volatile (
11031 "msr CNTV_TVAL_EL0, %0" : : "r" ( value ) : "memory"
11032 );
11033}
11034
11035/* CNTVCTSS_EL0, Counter-timer Self-Synchronized Virtual Count Register */
11036
11037static inline uint64_t _AArch64_Read_cntvctss_el0( void )
11038{
11039 uint64_t value;
11040
11041 __asm__ volatile (
11042 "mrs %0, CNTVCTSS_EL0" : "=&r" ( value ) : : "memory"
11043 );
11044
11045 return value;
11046}
11047
11048/* CNTVCT_EL0, Counter-timer Virtual Count Register */
11049
11050static inline uint64_t _AArch64_Read_cntvct_el0( void )
11051{
11052 uint64_t value;
11053
11054 __asm__ volatile (
11055 "mrs %0, CNTVCT_EL0" : "=&r" ( value ) : : "memory"
11056 );
11057
11058 return value;
11059}
11060
11061#ifdef __cplusplus
11062}
11063#endif
11064
11065#endif /* _RTEMS_SCORE_AARCH64_SYSTEM_REGISTERS_H */