5#if (defined(__mc68020__) && !defined(__mcpu32__))
6# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
7#elif defined(__mc68030__)
8# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
9# define M68K_DATA_CACHE_ALIGNMENT 16
10#elif ( defined(__mc68040__) || defined (__mc68060__) )
11# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
12# define M68K_DATA_CACHE_ALIGNMENT 16
13#elif ( defined(__mcf5200__) )
14# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
15# if ( defined(__mcf528x__) )
16# define M68K_DATA_CACHE_ALIGNMENT 16
18#elif ( defined(__mcf5300__) )
19# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
20# define M68K_DATA_CACHE_ALIGNMENT 16
21#elif defined(__mcfv4e__)
22# define M68K_INSTRUCTION_CACHE_ALIGNMENT 16
23# define M68K_DATA_CACHE_ALIGNMENT 16
26#if defined(M68K_DATA_CACHE_ALIGNMENT)
27#define CPU_DATA_CACHE_ALIGNMENT M68K_DATA_CACHE_ALIGNMENT
30#if defined(M68K_INSTRUCTION_CACHE_ALIGNMENT)
31#define CPU_INSTRUCTION_CACHE_ALIGNMENT M68K_INSTRUCTION_CACHE_ALIGNMENT
42#define _CPU_CACR_AND(mask) \
44 register unsigned long _value = mask; \
45 register unsigned long _ctl = 0; \
46 __asm__ volatile ( "movec %%cacr, %0; /* read the cacr */ \
47 andl %2, %0; /* and with _val */ \
49 : "=d" (_ctl) : "0" (_ctl), "d" (_value) : "%%cc" ); \
56#define _CPU_CACR_OR(mask) \
58 register unsigned long _value = mask; \
59 register unsigned long _ctl = 0; \
60 __asm__ volatile ( "movec %%cacr, %0; /* read the cacr */ \
61 orl %2, %0; /* or with _val */ \
63 : "=d" (_ctl) : "0" (_ctl), "d" (_value) : "%%cc" ); \
73#if ( (defined(__mc68020__) && !defined(__mcpu32__)) || defined(__mc68030__) )
75#if defined(__mc68030__)
86 void * p_address = (
void *) _CPU_virtual_to_physical( d_addr );
87 __asm__ volatile (
"movec %0, %%caar" ::
"a" (p_address) );
88 _CPU_CACR_OR(0x00000400);
93 _CPU_CACR_OR( 0x00000800 );
98 _CPU_CACR_OR( 0x00000200 );
103 _CPU_CACR_AND( 0xFFFFFDFF );
108 _CPU_CACR_OR( 0x00000100 );
113 _CPU_CACR_AND( 0xFFFFFEFF );
124 void * p_address = (
void *) _CPU_virtual_to_physical( d_addr );
125 __asm__ volatile (
"movec %0, %%caar" ::
"a" (p_address) );
126 _CPU_CACR_OR( 0x00000004 );
131 _CPU_CACR_OR( 0x00000008 );
136 _CPU_CACR_OR( 0x00000002);
141 _CPU_CACR_AND( 0xFFFFFFFD );
146 _CPU_CACR_OR( 0x00000001 );
151 _CPU_CACR_AND( 0xFFFFFFFE );
155#elif ( defined(__mc68040__) || defined (__mc68060__) )
167 void * p_address = (
void *) _CPU_virtual_to_physical( d_addr );
168 __asm__ volatile (
"cpushl %%dc,(%0)" ::
"a" (p_address) );
175 void * p_address = (
void *) _CPU_virtual_to_physical( d_addr );
176 __asm__ volatile (
"cinvl %%dc,(%0)" ::
"a" (p_address) );
181 __asm__ volatile (
"cpusha %%dc" :: );
186 __asm__ volatile (
"cinva %%dc" :: );
191 _CPU_CACR_OR( 0x80000000 );
196 _CPU_CACR_AND( 0x7FFFFFFF );
200 const void * i_addr )
202 void * p_address = (
void *) _CPU_virtual_to_physical( i_addr );
203 __asm__ volatile (
"cinvl %%ic,(%0)" ::
"a" (p_address) );
208 __asm__ volatile (
"cinva %%ic" :: );
213 _CPU_CACR_OR( 0x00008000 );
218 _CPU_CACR_AND( 0xFFFF7FFF );
#define RTEMS_INLINE_ROUTINE
Definition: basedefs.h:66
register struct Per_CPU_Control *_SPARC_Per_CPU_current __asm__("g6")
The pointer to the current per-CPU control is available via register g6.