23#ifndef _RTEMS_SCORE_SPARC_H
24#define _RTEMS_SCORE_SPARC_H
59#define SPARC_HAS_BITSCAN 0
71#define SPARC_NUMBER_OF_REGISTER_WINDOWS 8
79#if defined(_SOFT_FLOAT)
80#define SPARC_HAS_FPU 0
82#define SPARC_HAS_FPU 1
86#define CPU_MODEL_NAME "w/FPU"
88#define CPU_MODEL_NAME "w/soft-float"
95#define CPU_NAME "SPARC"
118#define SPARC_PSTATE_AG_MASK 0x00000001
119#define SPARC_PSTATE_IE_MASK 0x00000002
120#define SPARC_PSTATE_PRIV_MASK 0x00000004
121#define SPARC_PSTATE_AM_MASK 0x00000008
122#define SPARC_PSTATE_PEF_MASK 0x00000010
123#define SPARC_PSTATE_MM_MASK 0x00000040
124#define SPARC_PSTATE_TLE_MASK 0x00000100
125#define SPARC_PSTATE_CLE_MASK 0x00000200
127#define SPARC_PSTATE_AG_BIT_POSITION 0
128#define SPARC_PSTATE_IE_BIT_POSITION 1
129#define SPARC_PSTATE_PRIV_BIT_POSITION 2
130#define SPARC_PSTATE_AM_BIT_POSITION 3
131#define SPARC_PSTATE_PEF_BIT_POSITION 4
132#define SPARC_PSTATE_MM_BIT_POSITION 6
133#define SPARC_PSTATE_TLE_BIT_POSITION 8
134#define SPARC_PSTATE_CLE_BIT_POSITION 9
136#define SPARC_FPRS_FEF_MASK 0x0100
137#define SPARC_FPRS_FEF_BIT_POSITION 2
139#define SPARC_TSTATE_IE_MASK 0x00000200
141#define SPARC_SOFTINT_TM_MASK 0x00000001
142#define SPARC_SOFTINT_SM_MASK 0x00010000
143#define SPARC_SOFTINT_TM_BIT_POSITION 1
144#define SPARC_SOFTINT_SM_BIT_POSITION 17
146#define STACK_BIAS (2047)
154#define sparc64_enable_FPU(rtmp1) \
155 rdpr %pstate, rtmp1; \
156 or rtmp1, SPARC_PSTATE_PEF_MASK, rtmp1; \
157 wrpr %g0, rtmp1, %pstate; \
159 or rtmp1, SPARC_FPRS_FEF_MASK, rtmp1; \
173 __asm__ volatile ( "nop" ); \
180#define sparc64_get_pstate( _pstate ) \
183 __asm__ volatile( "rdpr %%pstate, %0" : "=r" (_pstate) : "0" (_pstate) ); \
186#define sparc64_set_pstate( _pstate ) \
189 "wrpr %%g0, %0, %%pstate " : "=r" ((_pstate)) : "0" ((_pstate)) ); \
196#define sparc64_get_pil( _pil ) \
199 __asm__ volatile( "rdpr %%pil, %0" : "=r" (_pil) : "0" (_pil) ); \
202#define sparc64_set_pil( _pil ) \
204 __asm__ volatile ( "wrpr %%g0, %0, %%pil " : "=r" ((_pil)) : "0" ((_pil)) ); \
212#define sparc64_get_tba( _tba ) \
215 __asm__ volatile( "rdpr %%tba, %0" : "=r" (_tba) : "0" (_tba) ); \
218#define sparc64_set_tba( _tba ) \
220 __asm__ volatile( "wrpr %%g0, %0, %%tba" : "=r" (_tba) : "0" (_tba) ); \
227#define sparc64_get_tl( _tl ) \
230 __asm__ volatile( "rdpr %%tl, %0" : "=r" (_tl) : "0" (_tl) ); \
233#define sparc64_set_tl( _tl ) \
235 __asm__ volatile( "wrpr %%g0, %0, %%tl" : "=r" (_tl) : "0" (_tl) ); \
247#define sparc64_read_stick( _stick ) \
250 __asm__ volatile( "rd %%stick, %0" : "=r" (_stick) : "0" (_stick) ); \
261#define sparc64_write_stick_cmpr( _stick_cmpr ) \
263 __asm__ volatile( "wr %%g0, %0, %%stick_cmpr" : "=r" (_stick_cmpr) \
264 : "0" (_stick_cmpr) ); \
270#define sparc64_read_tick( _tick ) \
273 __asm__ volatile( "rd %%tick, %0" : "=r" (_tick) : "0" (_tick) ); \
279#define sparc64_write_tick_cmpr( _tick_cmpr ) \
281 __asm__ volatile( "wr %%g0, %0, %%tick_cmpr" : "=r" (_tick_cmpr) \
282 : "0" (_tick_cmpr) ); \
290#define sparc64_clear_interrupt_bits( _bit_mask ) \
292 __asm__ volatile( "wr %%g0, %0, %%clear_softint" : "=r" (_bit_mask) \
293 : "0" (_bit_mask)); \
302#define sparc_get_y( _y ) \
304 __asm__ volatile( "rd %%y, %0" : "=r" (_y) : "0" (_y) ); \
307#define sparc_set_y( _y ) \
309 __asm__ volatile( "wr %0, %%y" : "=r" (_y) : "0" (_y) ); \
318uint32_t sparc_disable_interrupts(
void);
319void sparc_enable_interrupts(uint32_t);
321#define sparc_flash_interrupts( _level ) \
325 sparc_enable_interrupts( (_level) ); \
326 _ignored = sparc_disable_interrupts(); \
330#define sparc64_get_interrupt_level( _level ) \
333 sparc64_get_pil( _level ); \