39 #include <rtems/score/cpuopts.h> 49 #ifndef __USER_LABEL_PREFIX__ 50 #define __USER_LABEL_PREFIX__ _ 53 #ifndef __REGISTER_PREFIX__ 54 #define __REGISTER_PREFIX__ 61 #define SYM(x) CONCAT1 (__USER_LABEL_PREFIX__, x) 65 #define REG(x) CONCAT1 (__REGISTER_PREFIX__, x) 79 #define BEGIN_CODE_DCL .text 81 #define BEGIN_DATA_DCL .data 83 #define BEGIN_CODE .text 96 #define PUBLIC(sym) .globl SYM (sym) 97 #define EXTERN(sym) .globl SYM (sym) 140 #ifndef __USER_LABEL_PREFIX__ 141 #define __USER_LABEL_PREFIX__ _ 144 #ifndef __REGISTER_PREFIX__ 145 #define __REGISTER_PREFIX__ 151 #define CONCAT1(a, b) CONCAT2(a, b) 152 #define CONCAT2(a, b) a ## b 154 #define _U(x) CONCAT1(__USER_LABEL_PREFIX__, x) 156 #define _R(x) CONCAT1(__REGISTER_PREFIX__, x) 195 #define __tmp_reg__ r0 199 #define __zero_reg__ r1 211 #define PROLOGUE_SAVES(offset) XJMP (__prologue_saves__ + 2 * (offset)) 212 #define EPILOGUE_RESTORES(offset) XJMP (__epilogue_restores__ + 2 * (offset)) 214 #if FLASHEND > 0x10000 220 #ifndef __AVR_HAVE_MOVW__ 221 # if defined(__AVR_ENHANCED__) && __AVR_ENHANCED__ 222 # define __AVR_HAVE_MOVW__ 1 226 #ifndef __AVR_HAVE_LPMX__ 227 # if defined(__AVR_ENHANCED__) && __AVR_ENHANCED__ 228 # define __AVR_HAVE_LPMX__ 1 232 #ifndef __AVR_HAVE_MUL__ 233 # if defined(__AVR_ENHANCED__) && __AVR_ENHANCED__ 234 # define __AVR_HAVE_MUL__ 1 246 .macro X_movw dst src
250 .irp reg, r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, \
251 r10,r11,r12,r13,r14,r15,r16,r17,r18,r19, \
252 r20,r21,r22,r23,r24,r25,r26,r27,r28,r29, \
255 .L_movw_dst = .L_movw_n
258 .L_movw_src = .L_movw_n
260 .L_movw_n = .L_movw_n + 1
263 .irp reg, R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, \
264 R10,R11,R12,R13,R14,R15,R16,R17,R18,R19, \
265 R20,R21,R22,R23,R24,R25,R26,R27,R28,R29, \
268 .L_movw_dst = .L_movw_n
271 .L_movw_src = .L_movw_n
273 .L_movw_n = .L_movw_n + 1
278 .if \dst == .L_movw_n
279 .L_movw_dst = .L_movw_n
281 .L_movw_n = .L_movw_n + 1
287 .if \src == .L_movw_n
288 .L_movw_src = .L_movw_n
290 .L_movw_n = .L_movw_n + 1
293 .if (.L_movw_dst < 0) || (.L_movw_src < 0)
294 .err ; Invalid
'X_movw' arg.
297 .if ((.L_movw_src) - (.L_movw_dst))
298 .if (((.L_movw_src) | (.L_movw_dst)) & 0x01)
299 .if (((.L_movw_src)-(.L_movw_dst)) & 0x80)
300 mov (.L_movw_dst)+1, (.L_movw_src)+1
301 mov (.L_movw_dst), (.L_movw_src)
303 mov (.L_movw_dst), (.L_movw_src)
304 mov (.L_movw_dst)+1, (.L_movw_src)+1
307 #
if defined(__AVR_HAVE_MOVW__) && __AVR_HAVE_MOVW__
310 mov (.L_movw_dst), (.L_movw_src)
311 mov (.L_movw_dst)+1, (.L_movw_src)+1
336 .macro X_lpm dst=r0, src=Z
342 .irp reg, r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, \
343 r10,r11,r12,r13,r14,r15,r16,r17,r18,r19, \
344 r20,r21,r22,r23,r24,r25,r26,r27,r28,r29, \
347 .L_lpm_dst = .L_lpm_n
349 .L_lpm_n = .L_lpm_n + 1
353 .irp reg, R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, \
354 R10,R11,R12,R13,R14,R15,R16,R17,R18,R19, \
355 R20,R21,R22,R23,R24,R25,R26,R27,R28,R29, \
358 .L_lpm_dst = .L_lpm_n
360 .L_lpm_n = .L_lpm_n + 1
367 .L_lpm_dst = .L_lpm_n
369 .L_lpm_n = .L_lpm_n + 1
374 .err ; Invalid dst arg of
'X_lpm' macro.
382 .L_lpm_src = .L_lpm_n
384 .L_lpm_n = .L_lpm_n + 1
388 .err ; Invalid src arg of
'X_lpm' macro.
396 #if defined(__AVR_HAVE_LPMX__) && __AVR_HAVE_LPMX__ 404 .if (.L_lpm_dst >= 30)
405 .err ; Registers 30 and 31 are inhibited as
'X_lpm *,Z+' dst.
407 #if defined(__AVR_HAVE_LPMX__) && __AVR_HAVE_LPMX__ 432 .macro LPM_R0_ZPLUS_INIT hhi
435 out AVR_RAMPZ_ADDR, \hhi
440 .macro LPM_R0_ZPLUS_NEXT hhi
452 out AVR_RAMPZ_ADDR, \hhi
455 adc \hhi, __zero_reg__
Intel AVR Set up Basic CPU Dependency Settings Based on Compiler Settings.
This include file defines ANSI concatenation macros.
Common Symbols and Define Undefined Registers.