42#ifndef _RTEMS_SCORE_SPARC_H
43#define _RTEMS_SCORE_SPARC_H
78#define SPARC_HAS_BITSCAN 0
90#define SPARC_NUMBER_OF_REGISTER_WINDOWS 8
98#if defined(_SOFT_FLOAT)
99#define SPARC_HAS_FPU 0
101#define SPARC_HAS_FPU 1
105#define CPU_MODEL_NAME "w/FPU"
107#define CPU_MODEL_NAME "w/soft-float"
114#define CPU_NAME "SPARC"
137#define SPARC_PSTATE_AG_MASK 0x00000001
138#define SPARC_PSTATE_IE_MASK 0x00000002
139#define SPARC_PSTATE_PRIV_MASK 0x00000004
140#define SPARC_PSTATE_AM_MASK 0x00000008
141#define SPARC_PSTATE_PEF_MASK 0x00000010
142#define SPARC_PSTATE_MM_MASK 0x00000040
143#define SPARC_PSTATE_TLE_MASK 0x00000100
144#define SPARC_PSTATE_CLE_MASK 0x00000200
146#define SPARC_PSTATE_AG_BIT_POSITION 0
147#define SPARC_PSTATE_IE_BIT_POSITION 1
148#define SPARC_PSTATE_PRIV_BIT_POSITION 2
149#define SPARC_PSTATE_AM_BIT_POSITION 3
150#define SPARC_PSTATE_PEF_BIT_POSITION 4
151#define SPARC_PSTATE_MM_BIT_POSITION 6
152#define SPARC_PSTATE_TLE_BIT_POSITION 8
153#define SPARC_PSTATE_CLE_BIT_POSITION 9
155#define SPARC_FPRS_FEF_MASK 0x0100
156#define SPARC_FPRS_FEF_BIT_POSITION 2
158#define SPARC_TSTATE_IE_MASK 0x00000200
160#define SPARC_SOFTINT_TM_MASK 0x00000001
161#define SPARC_SOFTINT_SM_MASK 0x00010000
162#define SPARC_SOFTINT_TM_BIT_POSITION 1
163#define SPARC_SOFTINT_SM_BIT_POSITION 17
165#define STACK_BIAS (2047)
173#define sparc64_enable_FPU(rtmp1) \
174 rdpr %pstate, rtmp1; \
175 or rtmp1, SPARC_PSTATE_PEF_MASK, rtmp1; \
176 wrpr %g0, rtmp1, %pstate; \
178 or rtmp1, SPARC_FPRS_FEF_MASK, rtmp1; \
192 __asm__ volatile ( "nop" ); \
199#define sparc64_get_pstate( _pstate ) \
202 __asm__ volatile( "rdpr %%pstate, %0" : "=r" (_pstate) : "0" (_pstate) ); \
205#define sparc64_set_pstate( _pstate ) \
208 "wrpr %%g0, %0, %%pstate " : "=r" ((_pstate)) : "0" ((_pstate)) ); \
215#define sparc64_get_pil( _pil ) \
218 __asm__ volatile( "rdpr %%pil, %0" : "=r" (_pil) : "0" (_pil) ); \
221#define sparc64_set_pil( _pil ) \
223 __asm__ volatile ( "wrpr %%g0, %0, %%pil " : "=r" ((_pil)) : "0" ((_pil)) ); \
231#define sparc64_get_tba( _tba ) \
234 __asm__ volatile( "rdpr %%tba, %0" : "=r" (_tba) : "0" (_tba) ); \
237#define sparc64_set_tba( _tba ) \
239 __asm__ volatile( "wrpr %%g0, %0, %%tba" : "=r" (_tba) : "0" (_tba) ); \
246#define sparc64_get_tl( _tl ) \
249 __asm__ volatile( "rdpr %%tl, %0" : "=r" (_tl) : "0" (_tl) ); \
252#define sparc64_set_tl( _tl ) \
254 __asm__ volatile( "wrpr %%g0, %0, %%tl" : "=r" (_tl) : "0" (_tl) ); \
266#define sparc64_read_stick( _stick ) \
269 __asm__ volatile( "rd %%stick, %0" : "=r" (_stick) : "0" (_stick) ); \
280#define sparc64_write_stick_cmpr( _stick_cmpr ) \
282 __asm__ volatile( "wr %%g0, %0, %%stick_cmpr" : "=r" (_stick_cmpr) \
283 : "0" (_stick_cmpr) ); \
289#define sparc64_read_tick( _tick ) \
292 __asm__ volatile( "rd %%tick, %0" : "=r" (_tick) : "0" (_tick) ); \
298#define sparc64_write_tick_cmpr( _tick_cmpr ) \
300 __asm__ volatile( "wr %%g0, %0, %%tick_cmpr" : "=r" (_tick_cmpr) \
301 : "0" (_tick_cmpr) ); \
309#define sparc64_clear_interrupt_bits( _bit_mask ) \
311 __asm__ volatile( "wr %%g0, %0, %%clear_softint" : "=r" (_bit_mask) \
312 : "0" (_bit_mask)); \
321#define sparc_get_y( _y ) \
323 __asm__ volatile( "rd %%y, %0" : "=r" (_y) : "0" (_y) ); \
326#define sparc_set_y( _y ) \
328 __asm__ volatile( "wr %0, %%y" : "=r" (_y) : "0" (_y) ); \
337uint32_t sparc_disable_interrupts(
void);
338void sparc_enable_interrupts(uint32_t);
340#define sparc_flash_interrupts( _level ) \
344 sparc_enable_interrupts( (_level) ); \
345 _ignored = sparc_disable_interrupts(); \
349#define sparc64_get_interrupt_level( _level ) \
352 sparc64_get_pil( _level ); \