3 @@ -10779,7 +10779,7 @@ validate_arglist (const_tree callexpr, .
7 - code = va_arg (ap, enum tree_code);
8 + code = va_arg (ap, int);
14 @@ -3496,7 +3496,7 @@ emit_library_call_value_1 (int retval, r
15 for (; count < nargs; count++)
17 rtx val = va_arg (p, rtx);
18 - enum machine_mode mode = va_arg (p, enum machine_mode);
19 + enum machine_mode mode = va_arg (p, int);
21 /* We cannot convert the arg value to the mode the library wants here;
22 must do it earlier where we know the signedness of the arg. */
24 +++ b/gcc/config/avr32/avr32.c
27 + Target hooks and helper functions for AVR32.
28 + Copyright 2003-2006 Atmel Corporation.
30 + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
31 + Initial porting by Anders �dland.
33 + This file is part of GCC.
35 + This program is free software; you can redistribute it and/or modify
36 + it under the terms of the GNU General Public License as published by
37 + the Free Software Foundation; either version 2 of the License, or
38 + (at your option) any later version.
40 + This program is distributed in the hope that it will be useful,
41 + but WITHOUT ANY WARRANTY; without even the implied warranty of
42 + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
43 + GNU General Public License for more details.
45 + You should have received a copy of the GNU General Public License
46 + along with this program; if not, write to the Free Software
47 + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
51 +#include "coretypes.h"
57 +#include "hard-reg-set.h"
59 +#include "insn-config.h"
60 +#include "conditions.h"
62 +#include "insn-attr.h"
65 +#include "function.h"
72 +#include "c-pragma.h"
73 +#include "integrate.h"
75 +#include "langhooks.h"
80 +#include "target-def.h"
84 +/* Forward definitions of types. */
85 +typedef struct minipool_node Mnode;
86 +typedef struct minipool_fixup Mfix;
88 +/* Obstack for minipool constant handling. */
89 +static struct obstack minipool_obstack;
90 +static char *minipool_startobj;
91 +static rtx minipool_vector_label;
93 +/* True if we are currently building a constant table. */
94 +int making_const_table;
96 +/* Some forward function declarations */
97 +static unsigned long avr32_isr_value (tree);
98 +static unsigned long avr32_compute_func_type (void);
99 +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
100 +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
101 +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
102 + int flags, bool * no_add_attrs);
103 +static void avr32_reorg (void);
104 +bool avr32_return_in_msb (tree type);
105 +bool avr32_vector_mode_supported (enum machine_mode mode);
106 +static void avr32_init_libfuncs (void);
110 +avr32_add_gc_roots (void)
112 + gcc_obstack_init (&minipool_obstack);
113 + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
117 +/* List of all known AVR32 parts */
118 +static const struct part_type_s avr32_part_types[] = {
119 + /* name, part_type, architecture type, macro */
120 + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
121 + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
122 + {"ap7001", PART_TYPE_AVR32_AP7001, ARCH_TYPE_AVR32_AP, "__AVR32_AP7001__"},
123 + {"ap7002", PART_TYPE_AVR32_AP7002, ARCH_TYPE_AVR32_AP, "__AVR32_AP7002__"},
124 + {"ap7200", PART_TYPE_AVR32_AP7200, ARCH_TYPE_AVR32_AP, "__AVR32_AP7200__"},
125 + {"uc3a0128", PART_TYPE_AVR32_UC3A0128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0128__"},
126 + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0256__"},
127 + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0512__"},
128 + {"uc3a0512es", PART_TYPE_AVR32_UC3A0512ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3A0512ES__"},
129 + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1128__"},
130 + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1256__"},
131 + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1512__"},
132 + {"uc3a1512es", PART_TYPE_AVR32_UC3A1512ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3A1512ES__"},
133 + {"uc3a3revd", PART_TYPE_AVR32_UC3A3REVD, ARCH_TYPE_AVR32_UCR2NOMUL, "__AVR32_UC3A3256S__"},
134 + {"uc3a364", PART_TYPE_AVR32_UC3A364, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A364__"},
135 + {"uc3a364s", PART_TYPE_AVR32_UC3A364S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A364S__"},
136 + {"uc3a3128", PART_TYPE_AVR32_UC3A3128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3128__"},
137 + {"uc3a3128s", PART_TYPE_AVR32_UC3A3128S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3128S__"},
138 + {"uc3a3256", PART_TYPE_AVR32_UC3A3256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3256__"},
139 + {"uc3a3256s", PART_TYPE_AVR32_UC3A3256S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3256S__"},
140 + {"uc3b064", PART_TYPE_AVR32_UC3B064, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B064__"},
141 + {"uc3b0128", PART_TYPE_AVR32_UC3B0128, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0128__"},
142 + {"uc3b0256", PART_TYPE_AVR32_UC3B0256, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0256__"},
143 + {"uc3b0256es", PART_TYPE_AVR32_UC3B0256ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0256ES__"},
144 + {"uc3b0512revc", PART_TYPE_AVR32_UC3B0512REVC, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B0512REVC__"},
145 + {"uc3b164", PART_TYPE_AVR32_UC3B164, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B164__"},
146 + {"uc3b1128", PART_TYPE_AVR32_UC3B1128, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1128__"},
147 + {"uc3b1256", PART_TYPE_AVR32_UC3B1256, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1256__"},
148 + {"uc3b1256es", PART_TYPE_AVR32_UC3B1256ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1256ES__"},
149 + {"uc3b1512revc", PART_TYPE_AVR32_UC3B1512REVC, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B1512REVC__"},
150 + {"uc3c0512c", PART_TYPE_AVR32_UC3C0512C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C0512C__"},
151 + {"uc3c0256c", PART_TYPE_AVR32_UC3C0256C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C0256C__"},
152 + {"uc3c0128c", PART_TYPE_AVR32_UC3C0128C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C0128C__"},
153 + {"uc3c064c", PART_TYPE_AVR32_UC3C064C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C064C__"},
154 + {"uc3c1512c", PART_TYPE_AVR32_UC3C1512C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C1512C__"},
155 + {"uc3c1256c", PART_TYPE_AVR32_UC3C1256C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C1256C__"},
156 + {"uc3c1128c", PART_TYPE_AVR32_UC3C1128C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C1128C__"},
157 + {"uc3c164c", PART_TYPE_AVR32_UC3C164C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C164C__"},
158 + {"uc3c2512c", PART_TYPE_AVR32_UC3C2512C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C2512C__"},
159 + {"uc3c2256c", PART_TYPE_AVR32_UC3C2256C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C2256C__"},
160 + {"uc3c2128c", PART_TYPE_AVR32_UC3C2128C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C2128C__"},
161 + {"uc3c264c", PART_TYPE_AVR32_UC3C264C, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C264C__"},
162 + {"uc3l064", PART_TYPE_AVR32_UC3L064, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L064__"},
163 + {"uc3l032", PART_TYPE_AVR32_UC3L032, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L032__"},
164 + {"uc3l016", PART_TYPE_AVR32_UC3L016, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L016__"},
168 +/* List of all known AVR32 architectures */
169 +static const struct arch_type_s avr32_arch_types[] = {
170 + /* name, architecture type, microarchitecture type, feature flags, macro */
171 + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B,
172 + (FLAG_AVR32_HAS_DSP
173 + | FLAG_AVR32_HAS_SIMD
174 + | FLAG_AVR32_HAS_UNALIGNED_WORD
175 + | FLAG_AVR32_HAS_BRANCH_PRED | FLAG_AVR32_HAS_RETURN_STACK
176 + | FLAG_AVR32_HAS_CACHES),
178 + {"ucr1", ARCH_TYPE_AVR32_UCR1, UARCH_TYPE_AVR32A,
179 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW),
181 + {"ucr2", ARCH_TYPE_AVR32_UCR2, UARCH_TYPE_AVR32A,
182 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
183 + | FLAG_AVR32_HAS_V2_INSNS),
185 + {"ucr2nomul", ARCH_TYPE_AVR32_UCR2NOMUL, UARCH_TYPE_AVR32A,
186 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
187 + | FLAG_AVR32_HAS_V2_INSNS | FLAG_AVR32_HAS_NO_MUL_INSNS),
189 + {"ucr3", ARCH_TYPE_AVR32_UCR3, UARCH_TYPE_AVR32A,
190 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
191 + | FLAG_AVR32_HAS_V2_INSNS),
193 + {NULL, 0, 0, 0, NULL}
196 +/* Default arch name */
197 +const char *avr32_arch_name = "none";
198 +const char *avr32_part_name = "none";
200 +const struct part_type_s *avr32_part;
201 +const struct arch_type_s *avr32_arch;
204 +/* Set default target_flags. */
205 +#undef TARGET_DEFAULT_TARGET_FLAGS
206 +#define TARGET_DEFAULT_TARGET_FLAGS \
207 + (MASK_HAS_ASM_ADDR_PSEUDOS | MASK_MD_REORG_OPTIMIZATION | MASK_COND_EXEC_BEFORE_RELOAD)
210 +avr32_optimization_options (int level,
212 + if (AVR32_ALWAYS_PIC)
215 + /* Enable section anchors if optimization is enabled. */
216 + if (level > 0 || size)
217 + flag_section_anchors = 1;
220 +/* Override command line options */
222 +avr32_override_options (void)
224 + const struct part_type_s *part;
225 + const struct arch_type_s *arch;
227 + /*Add backward compability*/
228 + if (strcmp ("uc", avr32_arch_name)== 0)
230 + fprintf (stderr, "Warning: Deprecated arch `%s' specified. "
231 + "Please use '-march=ucr1' instead. "
232 + "Converting to arch 'ucr1'\n",
234 + avr32_arch_name="ucr1";
237 + /* Check if arch type is set. */
238 + for (arch = avr32_arch_types; arch->name; arch++)
240 + if (strcmp (arch->name, avr32_arch_name) == 0)
245 + if (!arch->name && strcmp("none", avr32_arch_name) != 0)
247 + fprintf (stderr, "Unknown arch `%s' specified\n"
248 + "Known arch names:\n"
249 + "\tuc (deprecated)\n",
251 + for (arch = avr32_arch_types; arch->name; arch++)
252 + fprintf (stderr, "\t%s\n", arch->name);
253 + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
256 + /* Check if part type is set. */
257 + for (part = avr32_part_types; part->name; part++)
258 + if (strcmp (part->name, avr32_part_name) == 0)
264 + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
266 + for (part = avr32_part_types; part->name; part++)
268 + if (strcmp("none", part->name) != 0)
269 + fprintf (stderr, "\t%s\n", part->name);
271 + /* Set default to NONE*/
272 + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
275 + /* NB! option -march= overrides option -mpart
276 + * if both are used at the same time */
278 + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
280 + /* If optimization level is two or greater, then align start of loops to a
281 + word boundary since this will allow folding the first insn of the loop.
282 + Do this only for targets supporting branch prediction. */
283 + if (optimize >= 2 && TARGET_BRANCH_PRED)
287 + /* Enable fast-float library if unsafe math optimizations
289 + if (flag_unsafe_math_optimizations)
290 + target_flags |= MASK_FAST_FLOAT;
292 + /* Check if we should set avr32_imm_in_const_pool
293 + based on if caches are present or not. */
294 + if ( avr32_imm_in_const_pool == -1 )
296 + if ( TARGET_CACHES )
297 + avr32_imm_in_const_pool = 1;
299 + avr32_imm_in_const_pool = 0;
305 + avr32_add_gc_roots ();
310 +If defined, a function that outputs the assembler code for entry to a
311 +function. The prologue is responsible for setting up the stack frame,
312 +initializing the frame pointer register, saving registers that must be
313 +saved, and allocating size additional bytes of storage for the
314 +local variables. size is an integer. file is a stdio
315 +stream to which the assembler code should be output.
317 +The label for the beginning of the function need not be output by this
318 +macro. That has already been done when the macro is run.
320 +To determine which registers to save, the macro can refer to the array
321 +regs_ever_live: element r is nonzero if hard register
322 +r is used anywhere within the function. This implies the function
323 +prologue should save register r, provided it is not one of the
324 +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
327 +On machines that have ``register windows'', the function entry code does
328 +not save on the stack the registers that are in the windows, even if
329 +they are supposed to be preserved by function calls; instead it takes
330 +appropriate steps to ``push'' the register stack, if any non-call-used
331 +registers are used in the function.
333 +On machines where functions may or may not have frame-pointers, the
334 +function entry code must vary accordingly; it must set up the frame
335 +pointer if one is wanted, and not otherwise. To determine whether a
336 +frame pointer is in wanted, the macro can refer to the variable
337 +frame_pointer_needed. The variable's value will be 1 at run
338 +time in a function that needs a frame pointer. (see Elimination).
340 +The function entry code is responsible for allocating any stack space
341 +required for the function. This stack space consists of the regions
342 +listed below. In most cases, these regions are allocated in the
343 +order listed, with the last listed region closest to the top of the
344 +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
345 +the highest address if it is not defined). You can use a different order
346 +for a machine if doing so is more convenient or required for
347 +compatibility reasons. Except in cases where required by standard
348 +or by a debugger, there is no reason why the stack layout used by GCC
349 +need agree with that used by other compilers for a machine.
352 +#undef TARGET_ASM_FUNCTION_PROLOGUE
353 +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
356 +#undef TARGET_DEFAULT_SHORT_ENUMS
357 +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
359 +#undef TARGET_PROMOTE_FUNCTION_ARGS
360 +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
362 +#undef TARGET_PROMOTE_FUNCTION_RETURN
363 +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
365 +#undef TARGET_PROMOTE_PROTOTYPES
366 +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
368 +#undef TARGET_MUST_PASS_IN_STACK
369 +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
371 +#undef TARGET_PASS_BY_REFERENCE
372 +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
374 +#undef TARGET_STRICT_ARGUMENT_NAMING
375 +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
377 +#undef TARGET_VECTOR_MODE_SUPPORTED_P
378 +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
380 +#undef TARGET_RETURN_IN_MEMORY
381 +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
383 +#undef TARGET_RETURN_IN_MSB
384 +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
386 +#undef TARGET_ARG_PARTIAL_BYTES
387 +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
389 +#undef TARGET_STRIP_NAME_ENCODING
390 +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
392 +#define streq(string1, string2) (strcmp (string1, string2) == 0)
394 +#undef TARGET_NARROW_VOLATILE_BITFIELD
395 +#define TARGET_NARROW_VOLATILE_BITFIELD hook_bool_void_false
397 +#undef TARGET_ATTRIBUTE_TABLE
398 +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
400 +#undef TARGET_COMP_TYPE_ATTRIBUTES
401 +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
404 +#undef TARGET_RTX_COSTS
405 +#define TARGET_RTX_COSTS avr32_rtx_costs
407 +#undef TARGET_CANNOT_FORCE_CONST_MEM
408 +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
410 +#undef TARGET_ASM_INTEGER
411 +#define TARGET_ASM_INTEGER avr32_assemble_integer
413 +#undef TARGET_FUNCTION_VALUE
414 +#define TARGET_FUNCTION_VALUE avr32_function_value
416 +#undef TARGET_MIN_ANCHOR_OFFSET
417 +#define TARGET_MIN_ANCHOR_OFFSET (0)
419 +#undef TARGET_MAX_ANCHOR_OFFSET
420 +#define TARGET_MAX_ANCHOR_OFFSET ((1 << 15) - 1)
424 + * Switches to the appropriate section for output of constant pool
425 + * entry x in mode. You can assume that x is some kind of constant in
426 + * RTL. The argument mode is redundant except in the case of a
427 + * const_int rtx. Select the section by calling readonly_data_ section
428 + * or one of the alternatives for other sections. align is the
429 + * constant alignment in bits.
431 + * The default version of this function takes care of putting symbolic
432 + * constants in flag_ pic mode in data_section and everything else in
433 + * readonly_data_section.
435 +//#undef TARGET_ASM_SELECT_RTX_SECTION
436 +//#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
440 + * If non-null, this hook performs a target-specific pass over the
441 + * instruction stream. The compiler will run it at all optimization
442 + * levels, just before the point at which it normally does
443 + * delayed-branch scheduling.
445 + * The exact purpose of the hook varies from target to target. Some
446 + * use it to do transformations that are necessary for correctness,
447 + * such as laying out in-function constant pools or avoiding hardware
448 + * hazards. Others use it as an opportunity to do some
449 + * machine-dependent optimizations.
451 + * You need not implement the hook if it has nothing to do. The
452 + * default definition is null.
454 +#undef TARGET_MACHINE_DEPENDENT_REORG
455 +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
457 +/* Target hook for assembling integer objects.
458 + Need to handle integer vectors */
460 +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
462 + if (avr32_vector_mode_supported (GET_MODE (x)))
466 + if (GET_CODE (x) != CONST_VECTOR)
469 + units = CONST_VECTOR_NUNITS (x);
471 + switch (GET_MODE (x))
483 + for (i = 0; i < units; i++)
487 + elt = CONST_VECTOR_ELT (x, i);
488 + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
494 + return default_assemble_integer (x, size, aligned_p);
498 + * This target hook describes the relative costs of RTL expressions.
500 + * The cost may depend on the precise form of the expression, which is
501 + * available for examination in x, and the rtx code of the expression
502 + * in which it is contained, found in outer_code. code is the
503 + * expression code--redundant, since it can be obtained with GET_CODE
506 + * In implementing this hook, you can use the construct COSTS_N_INSNS
507 + * (n) to specify a cost equal to n fast instructions.
509 + * On entry to the hook, *total contains a default estimate for the
510 + * cost of the expression. The hook should modify this value as
511 + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
512 + * for multiplications, COSTS_N_INSNS (7) for division and modulus
513 + * operations, and COSTS_N_INSNS (1) for all other operations.
515 + * When optimizing for code size, i.e. when optimize_size is non-zero,
516 + * this target hook should be used to estimate the relative size cost
517 + * of an expression, again relative to COSTS_N_INSNS.
519 + * The hook returns true when all subexpressions of x have been
520 + * processed, and false when rtx_cost should recurse.
523 +/* Worker routine for avr32_rtx_costs. */
525 +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
526 + enum rtx_code outer ATTRIBUTE_UNUSED)
528 + enum machine_mode mode = GET_MODE (x);
530 + switch (GET_CODE (x))
533 + /* Using pre decrement / post increment memory operations on the
534 + avr32_uc architecture means that two writebacks must be performed
535 + and hence two cycles are needed. */
537 + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
539 + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
540 + || GET_CODE (XEXP (x, 0)) == POST_INC))
541 + return COSTS_N_INSNS (5);
543 + /* Memory costs quite a lot for the first word, but subsequent words
544 + load at the equivalent of a single insn each. */
545 + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
546 + return COSTS_N_INSNS (3 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
548 + return COSTS_N_INSNS (4);
551 + /* These are valid for the pseudo insns: lda.w and call which operates
552 + on direct addresses. We assume that the cost of a lda.w is the same
553 + as the cost of a ld.w insn. */
554 + return (outer == SET) ? COSTS_N_INSNS (4) : COSTS_N_INSNS (1);
559 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
563 + if (mode == TImode)
564 + return COSTS_N_INSNS (100);
566 + if (mode == DImode)
567 + return COSTS_N_INSNS (10);
568 + return COSTS_N_INSNS (4);
573 + if (mode == TImode)
574 + return COSTS_N_INSNS (10);
576 + if (mode == DImode)
577 + return COSTS_N_INSNS (4);
578 + return COSTS_N_INSNS (1);
584 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
585 + return COSTS_N_INSNS (100);
587 + if (mode == TImode)
588 + return COSTS_N_INSNS (50);
590 + if (mode == DImode)
591 + return COSTS_N_INSNS (2);
592 + return COSTS_N_INSNS (1);
596 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
597 + return COSTS_N_INSNS (300);
599 + if (mode == TImode)
600 + return COSTS_N_INSNS (16);
602 + if (mode == DImode)
603 + return COSTS_N_INSNS (4);
605 + if (mode == HImode)
606 + return COSTS_N_INSNS (2);
608 + return COSTS_N_INSNS (3);
611 + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
612 + return COSTS_N_INSNS (4);
613 + return COSTS_N_INSNS (1);
616 + /* Sign/Zero extensions of registers cost quite much since these
617 + instrcutions only take one register operand which means that gcc
618 + often must insert some move instrcutions */
619 + if (mode == QImode || mode == HImode)
620 + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
621 + return COSTS_N_INSNS (4);
623 + /* divmod operations */
624 + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
625 + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
627 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
631 + return COSTS_N_INSNS (1);
636 +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
638 + *total = avr32_rtx_costs_1 (x, code, outer_code);
644 +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
646 + /* Do not want symbols in the constant pool when compiling pic or if using
647 + address pseudo instructions. */
648 + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
649 + && avr32_find_symbol (x) != NULL_RTX);
653 +/* Table of machine attributes. */
654 +const struct attribute_spec avr32_attribute_table[] = {
655 + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
656 + /* Interrupt Service Routines have special prologue and epilogue
658 + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
659 + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
660 + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
661 + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
662 + {NULL, 0, 0, false, false, false, NULL}
668 + const char *const arg;
669 + const unsigned long return_value;
673 +static const isr_attribute_arg isr_attribute_args[] = {
674 + {"FULL", AVR32_FT_ISR_FULL},
675 + {"full", AVR32_FT_ISR_FULL},
676 + {"HALF", AVR32_FT_ISR_HALF},
677 + {"half", AVR32_FT_ISR_HALF},
678 + {"NONE", AVR32_FT_ISR_NONE},
679 + {"none", AVR32_FT_ISR_NONE},
680 + {"UNDEF", AVR32_FT_ISR_NONE},
681 + {"undef", AVR32_FT_ISR_NONE},
682 + {"SWI", AVR32_FT_ISR_NONE},
683 + {"swi", AVR32_FT_ISR_NONE},
684 + {NULL, AVR32_FT_ISR_NONE}
687 +/* Returns the (interrupt) function type of the current
688 + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
690 +static unsigned long
691 +avr32_isr_value (tree argument)
693 + const isr_attribute_arg *ptr;
696 + /* No argument - default to ISR_NONE. */
697 + if (argument == NULL_TREE)
698 + return AVR32_FT_ISR_NONE;
700 + /* Get the value of the argument. */
701 + if (TREE_VALUE (argument) == NULL_TREE
702 + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
703 + return AVR32_FT_UNKNOWN;
705 + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
707 + /* Check it against the list of known arguments. */
708 + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
709 + if (streq (arg, ptr->arg))
710 + return ptr->return_value;
712 + /* An unrecognized interrupt type. */
713 + return AVR32_FT_UNKNOWN;
719 +These hooks specify assembly directives for creating certain kinds
720 +of integer object. The TARGET_ASM_BYTE_OP directive creates a
721 +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
722 +aligned two-byte object, and so on. Any of the hooks may be
723 +NULL, indicating that no suitable directive is available.
725 +The compiler will print these strings at the start of a new line,
726 +followed immediately by the object's initial value. In most cases,
727 +the string should contain a tab, a pseudo-op, and then another tab.
729 +#undef TARGET_ASM_BYTE_OP
730 +#define TARGET_ASM_BYTE_OP "\t.byte\t"
731 +#undef TARGET_ASM_ALIGNED_HI_OP
732 +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
733 +#undef TARGET_ASM_ALIGNED_SI_OP
734 +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
735 +#undef TARGET_ASM_ALIGNED_DI_OP
736 +#define TARGET_ASM_ALIGNED_DI_OP NULL
737 +#undef TARGET_ASM_ALIGNED_TI_OP
738 +#define TARGET_ASM_ALIGNED_TI_OP NULL
739 +#undef TARGET_ASM_UNALIGNED_HI_OP
740 +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
741 +#undef TARGET_ASM_UNALIGNED_SI_OP
742 +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
743 +#undef TARGET_ASM_UNALIGNED_DI_OP
744 +#define TARGET_ASM_UNALIGNED_DI_OP NULL
745 +#undef TARGET_ASM_UNALIGNED_TI_OP
746 +#define TARGET_ASM_UNALIGNED_TI_OP NULL
748 +#undef TARGET_ASM_OUTPUT_MI_THUNK
749 +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
751 +#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
752 +#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
755 +avr32_output_mi_thunk (FILE * file,
756 + tree thunk ATTRIBUTE_UNUSED,
757 + HOST_WIDE_INT delta,
758 + HOST_WIDE_INT vcall_offset, tree function)
760 + int mi_delta = delta;
762 + (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function) ?
763 + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
766 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
769 + fputs ("\tpushm\tlr\n", file);
775 + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
777 + fprintf (file, "\tsub\t%s, %d\n", reg_names[this_regno], -mi_delta);
781 + /* Immediate is larger than k21 we must make us a temp register by
782 + pushing a register to the stack. */
783 + fprintf (file, "\tmov\tlr, lo(%d)\n", mi_delta);
784 + fprintf (file, "\torh\tlr, hi(%d)\n", mi_delta);
785 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
790 + if (vcall_offset != 0)
792 + fprintf (file, "\tld.w\tlr, %s[0]\n", reg_names[this_regno]);
793 + fprintf (file, "\tld.w\tlr, lr[%i]\n", (int) vcall_offset);
794 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
798 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
801 + fputs ("\tpopm\tlr\n", file);
804 + /* Jump to the function. We assume that we can use an rjmp since the
805 + function to jump to is local and probably not too far away from
806 + the thunk. If this assumption proves to be wrong we could implement
807 + this jump by calculating the offset between the jump source and destination
808 + and put this in the constant pool and then perform an add to pc.
809 + This would also be legitimate PIC code. But for now we hope that an rjmp
810 + will be sufficient...
812 + fputs ("\trjmp\t", file);
813 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
814 + fputc ('\n', file);
818 +/* Implements target hook vector_mode_supported. */
820 +avr32_vector_mode_supported (enum machine_mode mode)
822 + if ((mode == V2HImode) || (mode == V4QImode))
829 +#undef TARGET_INIT_LIBFUNCS
830 +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
832 +#undef TARGET_INIT_BUILTINS
833 +#define TARGET_INIT_BUILTINS avr32_init_builtins
835 +#undef TARGET_EXPAND_BUILTIN
836 +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
838 +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
839 + void_ftype_ptr_int;
840 +tree void_ftype_int, void_ftype_void, int_ftype_ptr_int;
841 +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
842 + short_ftype_short_short;
843 +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
844 +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
845 +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
846 +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
847 +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
849 +#define def_builtin(NAME, TYPE, CODE) \
850 + add_builtin_function ((NAME), (TYPE), (CODE), \
851 + BUILT_IN_MD, NULL, NULL_TREE)
853 +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
857 + add_builtin_function ((NAME), (TYPE), (CODE), \
858 + BUILT_IN_MD, NULL, NULL_TREE); \
862 +struct builtin_description
864 + const unsigned int mask;
865 + const enum insn_code icode;
866 + const char *const name;
868 + const enum rtx_code comparison;
869 + const unsigned int flag;
873 +static const struct builtin_description bdesc_2arg[] = {
874 +#define DSP_BUILTIN(code, builtin, ftype) \
875 + { 1, CODE_FOR_##code, "__builtin_" #code , \
876 + AVR32_BUILTIN_##builtin, 0, 0, ftype }
878 + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
879 + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
880 + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
881 + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
882 + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
883 + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
884 + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
885 + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
886 + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
887 + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
888 + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
893 +avr32_init_builtins (void)
896 + const struct builtin_description *d;
897 + tree endlink = void_list_node;
898 + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
899 + tree longlong_endlink =
900 + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
901 + tree short_endlink =
902 + tree_cons (NULL_TREE, short_integer_type_node, endlink);
903 + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
905 + /* int func (int) */
906 + int_ftype_int = build_function_type (integer_type_node, int_endlink);
908 + /* short func (short) */
910 + = build_function_type (short_integer_type_node, short_endlink);
912 + /* short func (short, short) */
913 + short_ftype_short_short
914 + = build_function_type (short_integer_type_node,
915 + tree_cons (NULL_TREE, short_integer_type_node,
918 + /* long long func (long long, short, short) */
919 + longlong_ftype_longlong_short_short
920 + = build_function_type (long_long_integer_type_node,
921 + tree_cons (NULL_TREE, long_long_integer_type_node,
922 + tree_cons (NULL_TREE,
923 + short_integer_type_node,
926 + /* long long func (short, short) */
927 + longlong_ftype_short_short
928 + = build_function_type (long_long_integer_type_node,
929 + tree_cons (NULL_TREE, short_integer_type_node,
932 + /* int func (int, int) */
934 + = build_function_type (integer_type_node,
935 + tree_cons (NULL_TREE, integer_type_node,
938 + /* long long func (int, int) */
939 + longlong_ftype_int_int
940 + = build_function_type (long_long_integer_type_node,
941 + tree_cons (NULL_TREE, integer_type_node,
944 + /* long long int func (long long, int, short) */
945 + longlong_ftype_longlong_int_short
946 + = build_function_type (long_long_integer_type_node,
947 + tree_cons (NULL_TREE, long_long_integer_type_node,
948 + tree_cons (NULL_TREE, integer_type_node,
951 + /* long long int func (int, short) */
952 + longlong_ftype_int_short
953 + = build_function_type (long_long_integer_type_node,
954 + tree_cons (NULL_TREE, integer_type_node,
957 + /* int func (int, short, short) */
958 + int_ftype_int_short_short
959 + = build_function_type (integer_type_node,
960 + tree_cons (NULL_TREE, integer_type_node,
961 + tree_cons (NULL_TREE,
962 + short_integer_type_node,
965 + /* int func (short, short) */
966 + int_ftype_short_short
967 + = build_function_type (integer_type_node,
968 + tree_cons (NULL_TREE, short_integer_type_node,
971 + /* int func (int, short) */
972 + int_ftype_int_short
973 + = build_function_type (integer_type_node,
974 + tree_cons (NULL_TREE, integer_type_node,
977 + /* void func (int, int) */
979 + = build_function_type (void_type_node,
980 + tree_cons (NULL_TREE, integer_type_node,
983 + /* void func (int, int, int) */
984 + void_ftype_int_int_int
985 + = build_function_type (void_type_node,
986 + tree_cons (NULL_TREE, integer_type_node,
987 + tree_cons (NULL_TREE, integer_type_node,
990 + /* void func (int, int, long long) */
991 + void_ftype_int_int_longlong
992 + = build_function_type (void_type_node,
993 + tree_cons (NULL_TREE, integer_type_node,
994 + tree_cons (NULL_TREE, integer_type_node,
995 + longlong_endlink)));
997 + /* void func (int, int, int, int, int) */
998 + void_ftype_int_int_int_int_int
999 + = build_function_type (void_type_node,
1000 + tree_cons (NULL_TREE, integer_type_node,
1001 + tree_cons (NULL_TREE, integer_type_node,
1002 + tree_cons (NULL_TREE,
1003 + integer_type_node,
1006 + integer_type_node,
1009 + /* void func (void *, int) */
1010 + void_ftype_ptr_int
1011 + = build_function_type (void_type_node,
1012 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1014 + /* void func (int) */
1015 + void_ftype_int = build_function_type (void_type_node, int_endlink);
1017 + /* void func (void) */
1018 + void_ftype_void = build_function_type (void_type_node, void_endlink);
1020 + /* int func (void) */
1021 + int_ftype_void = build_function_type (integer_type_node, void_endlink);
1023 + /* int func (void *, int) */
1025 + = build_function_type (integer_type_node,
1026 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1028 + /* int func (int, int, int) */
1029 + int_ftype_int_int_int
1030 + = build_function_type (integer_type_node,
1031 + tree_cons (NULL_TREE, integer_type_node,
1032 + tree_cons (NULL_TREE, integer_type_node,
1035 + /* Initialize avr32 builtins. */
1036 + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
1037 + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
1038 + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
1039 + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
1040 + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
1041 + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
1042 + def_builtin ("__builtin_ssrf", void_ftype_int, AVR32_BUILTIN_SSRF);
1043 + def_builtin ("__builtin_csrf", void_ftype_int, AVR32_BUILTIN_CSRF);
1044 + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
1045 + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
1046 + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
1047 + def_builtin ("__builtin_breakpoint", void_ftype_void,
1048 + AVR32_BUILTIN_BREAKPOINT);
1049 + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
1050 + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
1051 + def_builtin ("__builtin_bswap_16", short_ftype_short,
1052 + AVR32_BUILTIN_BSWAP16);
1053 + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
1054 + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
1055 + AVR32_BUILTIN_COP);
1056 + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
1057 + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
1058 + AVR32_BUILTIN_MVRC_W);
1059 + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
1060 + AVR32_BUILTIN_MVCR_D);
1061 + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
1062 + AVR32_BUILTIN_MVRC_D);
1063 + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
1064 + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
1065 + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
1066 + AVR32_BUILTIN_SATRNDS);
1067 + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
1068 + AVR32_BUILTIN_SATRNDU);
1069 + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
1070 + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
1071 + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
1072 + AVR32_BUILTIN_MACSATHH_W);
1073 + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
1074 + AVR32_BUILTIN_MACWH_D);
1075 + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
1076 + AVR32_BUILTIN_MACHH_D);
1078 + /* Add all builtins that are more or less simple operations on two
1080 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1082 + /* Use one of the operands; the target can have a different mode for
1083 + mask-generating compares. */
1088 + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
1093 +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
1096 +avr32_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
1099 + tree arg0 = CALL_EXPR_ARG (exp,0);
1100 + tree arg1 = CALL_EXPR_ARG (exp,1);
1101 + rtx op0 = expand_normal (arg0);
1102 + rtx op1 = expand_normal (arg1);
1103 + enum machine_mode tmode = insn_data[icode].operand[0].mode;
1104 + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1105 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1108 + || GET_MODE (target) != tmode
1109 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1110 + target = gen_reg_rtx (tmode);
1112 + /* In case the insn wants input operands in modes different from the
1114 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1116 + /* If op0 is already a reg we must cast it to the correct mode. */
1118 + op0 = convert_to_mode (mode0, op0, 1);
1120 + op0 = copy_to_mode_reg (mode0, op0);
1122 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1124 + /* If op1 is already a reg we must cast it to the correct mode. */
1126 + op1 = convert_to_mode (mode1, op1, 1);
1128 + op1 = copy_to_mode_reg (mode1, op1);
1130 + pat = GEN_FCN (icode) (target, op0, op1);
1137 +/* Expand an expression EXP that calls a built-in function,
1138 + with result going to TARGET if that's convenient
1139 + (and in mode MODE if that's convenient).
1140 + SUBTARGET may be used as the target for computing one of EXP's operands.
1141 + IGNORE is nonzero if the value is to be ignored. */
1144 +avr32_expand_builtin (tree exp,
1146 + rtx subtarget ATTRIBUTE_UNUSED,
1147 + enum machine_mode mode ATTRIBUTE_UNUSED,
1148 + int ignore ATTRIBUTE_UNUSED)
1150 + const struct builtin_description *d;
1152 + enum insn_code icode = 0;
1153 + tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1154 + tree arg0, arg1, arg2;
1155 + rtx op0, op1, op2, pat;
1156 + enum machine_mode tmode, mode0, mode1;
1157 + enum machine_mode arg0_mode;
1158 + int fcode = DECL_FUNCTION_CODE (fndecl);
1165 + case AVR32_BUILTIN_SATS:
1166 + case AVR32_BUILTIN_SATU:
1167 + case AVR32_BUILTIN_SATRNDS:
1168 + case AVR32_BUILTIN_SATRNDU:
1170 + const char *fname;
1174 + case AVR32_BUILTIN_SATS:
1175 + icode = CODE_FOR_sats;
1178 + case AVR32_BUILTIN_SATU:
1179 + icode = CODE_FOR_satu;
1182 + case AVR32_BUILTIN_SATRNDS:
1183 + icode = CODE_FOR_satrnds;
1184 + fname = "satrnds";
1186 + case AVR32_BUILTIN_SATRNDU:
1187 + icode = CODE_FOR_satrndu;
1188 + fname = "satrndu";
1192 + arg0 = CALL_EXPR_ARG (exp,0);
1193 + arg1 = CALL_EXPR_ARG (exp,1);
1194 + arg2 = CALL_EXPR_ARG (exp,2);
1195 + op0 = expand_normal (arg0);
1196 + op1 = expand_normal (arg1);
1197 + op2 = expand_normal (arg2);
1199 + tmode = insn_data[icode].operand[0].mode;
1203 + || GET_MODE (target) != tmode
1204 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1205 + target = gen_reg_rtx (tmode);
1208 + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
1210 + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
1213 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1215 + error ("Parameter 2 to __builtin_%s should be a constant number.",
1220 + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
1222 + error ("Parameter 3 to __builtin_%s should be a constant number.",
1227 + emit_move_insn (target, op0);
1228 + pat = GEN_FCN (icode) (target, op1, op2);
1235 + case AVR32_BUILTIN_MUSTR:
1236 + icode = CODE_FOR_mustr;
1237 + tmode = insn_data[icode].operand[0].mode;
1240 + || GET_MODE (target) != tmode
1241 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1242 + target = gen_reg_rtx (tmode);
1243 + pat = GEN_FCN (icode) (target);
1249 + case AVR32_BUILTIN_MFSR:
1250 + icode = CODE_FOR_mfsr;
1251 + arg0 = CALL_EXPR_ARG (exp,0);
1252 + op0 = expand_normal (arg0);
1253 + tmode = insn_data[icode].operand[0].mode;
1254 + mode0 = insn_data[icode].operand[1].mode;
1256 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1258 + error ("Parameter 1 to __builtin_mfsr must be a constant number");
1262 + || GET_MODE (target) != tmode
1263 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1264 + target = gen_reg_rtx (tmode);
1265 + pat = GEN_FCN (icode) (target, op0);
1270 + case AVR32_BUILTIN_MTSR:
1271 + icode = CODE_FOR_mtsr;
1272 + arg0 = CALL_EXPR_ARG (exp,0);
1273 + arg1 = CALL_EXPR_ARG (exp,1);
1274 + op0 = expand_normal (arg0);
1275 + op1 = expand_normal (arg1);
1276 + mode0 = insn_data[icode].operand[0].mode;
1277 + mode1 = insn_data[icode].operand[1].mode;
1279 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1281 + error ("Parameter 1 to __builtin_mtsr must be a constant number");
1282 + return gen_reg_rtx (mode0);
1284 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1285 + op1 = copy_to_mode_reg (mode1, op1);
1286 + pat = GEN_FCN (icode) (op0, op1);
1291 + case AVR32_BUILTIN_MFDR:
1292 + icode = CODE_FOR_mfdr;
1293 + arg0 = CALL_EXPR_ARG (exp,0);
1294 + op0 = expand_normal (arg0);
1295 + tmode = insn_data[icode].operand[0].mode;
1296 + mode0 = insn_data[icode].operand[1].mode;
1298 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1300 + error ("Parameter 1 to __builtin_mfdr must be a constant number");
1304 + || GET_MODE (target) != tmode
1305 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1306 + target = gen_reg_rtx (tmode);
1307 + pat = GEN_FCN (icode) (target, op0);
1312 + case AVR32_BUILTIN_MTDR:
1313 + icode = CODE_FOR_mtdr;
1314 + arg0 = CALL_EXPR_ARG (exp,0);
1315 + arg1 = CALL_EXPR_ARG (exp,1);
1316 + op0 = expand_normal (arg0);
1317 + op1 = expand_normal (arg1);
1318 + mode0 = insn_data[icode].operand[0].mode;
1319 + mode1 = insn_data[icode].operand[1].mode;
1321 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1323 + error ("Parameter 1 to __builtin_mtdr must be a constant number");
1324 + return gen_reg_rtx (mode0);
1326 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1327 + op1 = copy_to_mode_reg (mode1, op1);
1328 + pat = GEN_FCN (icode) (op0, op1);
1333 + case AVR32_BUILTIN_CACHE:
1334 + icode = CODE_FOR_cache;
1335 + arg0 = CALL_EXPR_ARG (exp,0);
1336 + arg1 = CALL_EXPR_ARG (exp,1);
1337 + op0 = expand_normal (arg0);
1338 + op1 = expand_normal (arg1);
1339 + mode0 = insn_data[icode].operand[0].mode;
1340 + mode1 = insn_data[icode].operand[1].mode;
1342 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1344 + error ("Parameter 2 to __builtin_cache must be a constant number");
1345 + return gen_reg_rtx (mode1);
1348 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1349 + op0 = copy_to_mode_reg (mode0, op0);
1351 + pat = GEN_FCN (icode) (op0, op1);
1356 + case AVR32_BUILTIN_SYNC:
1357 + case AVR32_BUILTIN_MUSFR:
1358 + case AVR32_BUILTIN_SSRF:
1359 + case AVR32_BUILTIN_CSRF:
1361 + const char *fname;
1365 + case AVR32_BUILTIN_SYNC:
1366 + icode = CODE_FOR_sync;
1369 + case AVR32_BUILTIN_MUSFR:
1370 + icode = CODE_FOR_musfr;
1373 + case AVR32_BUILTIN_SSRF:
1374 + icode = CODE_FOR_ssrf;
1377 + case AVR32_BUILTIN_CSRF:
1378 + icode = CODE_FOR_csrf;
1383 + arg0 = CALL_EXPR_ARG (exp,0);
1384 + op0 = expand_normal (arg0);
1385 + mode0 = insn_data[icode].operand[0].mode;
1387 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1389 + if (icode == CODE_FOR_musfr)
1390 + op0 = copy_to_mode_reg (mode0, op0);
1393 + error ("Parameter to __builtin_%s is illegal.", fname);
1394 + return gen_reg_rtx (mode0);
1397 + pat = GEN_FCN (icode) (op0);
1403 + case AVR32_BUILTIN_TLBR:
1404 + icode = CODE_FOR_tlbr;
1405 + pat = GEN_FCN (icode) (NULL_RTX);
1410 + case AVR32_BUILTIN_TLBS:
1411 + icode = CODE_FOR_tlbs;
1412 + pat = GEN_FCN (icode) (NULL_RTX);
1417 + case AVR32_BUILTIN_TLBW:
1418 + icode = CODE_FOR_tlbw;
1419 + pat = GEN_FCN (icode) (NULL_RTX);
1424 + case AVR32_BUILTIN_BREAKPOINT:
1425 + icode = CODE_FOR_breakpoint;
1426 + pat = GEN_FCN (icode) (NULL_RTX);
1431 + case AVR32_BUILTIN_XCHG:
1432 + icode = CODE_FOR_sync_lock_test_and_setsi;
1433 + arg0 = CALL_EXPR_ARG (exp,0);
1434 + arg1 = CALL_EXPR_ARG (exp,1);
1435 + op0 = expand_normal (arg0);
1436 + op1 = expand_normal (arg1);
1437 + tmode = insn_data[icode].operand[0].mode;
1438 + mode0 = insn_data[icode].operand[1].mode;
1439 + mode1 = insn_data[icode].operand[2].mode;
1441 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1443 + op1 = copy_to_mode_reg (mode1, op1);
1446 + op0 = force_reg (GET_MODE (op0), op0);
1447 + op0 = gen_rtx_MEM (GET_MODE (op0), op0);
1448 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1451 + ("Parameter 1 to __builtin_xchg must be a pointer to an integer.");
1455 + || GET_MODE (target) != tmode
1456 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1457 + target = gen_reg_rtx (tmode);
1458 + pat = GEN_FCN (icode) (target, op0, op1);
1463 + case AVR32_BUILTIN_LDXI:
1464 + icode = CODE_FOR_ldxi;
1465 + arg0 = CALL_EXPR_ARG (exp,0);
1466 + arg1 = CALL_EXPR_ARG (exp,1);
1467 + arg2 = CALL_EXPR_ARG (exp,2);
1468 + op0 = expand_normal (arg0);
1469 + op1 = expand_normal (arg1);
1470 + op2 = expand_normal (arg2);
1471 + tmode = insn_data[icode].operand[0].mode;
1472 + mode0 = insn_data[icode].operand[1].mode;
1473 + mode1 = insn_data[icode].operand[2].mode;
1475 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1477 + op0 = copy_to_mode_reg (mode0, op0);
1480 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1482 + op1 = copy_to_mode_reg (mode1, op1);
1485 + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
1488 + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
1489 + return gen_reg_rtx (mode0);
1493 + || GET_MODE (target) != tmode
1494 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1495 + target = gen_reg_rtx (tmode);
1496 + pat = GEN_FCN (icode) (target, op0, op1, op2);
1501 + case AVR32_BUILTIN_BSWAP16:
1503 + icode = CODE_FOR_bswap_16;
1504 + arg0 = CALL_EXPR_ARG (exp,0);
1505 + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
1506 + mode0 = insn_data[icode].operand[1].mode;
1507 + if (arg0_mode != mode0)
1508 + arg0 = build1 (NOP_EXPR,
1509 + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
1511 + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
1512 + tmode = insn_data[icode].operand[0].mode;
1515 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1517 + if ( CONST_INT_P (op0) )
1519 + HOST_WIDE_INT val = ( ((INTVAL (op0)&0x00ff) << 8) |
1520 + ((INTVAL (op0)&0xff00) >> 8) );
1521 + /* Sign extend 16-bit value to host wide int */
1522 + val <<= (HOST_BITS_PER_WIDE_INT - 16);
1523 + val >>= (HOST_BITS_PER_WIDE_INT - 16);
1524 + op0 = GEN_INT(val);
1526 + || GET_MODE (target) != tmode
1527 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1528 + target = gen_reg_rtx (tmode);
1529 + emit_move_insn(target, op0);
1533 + op0 = copy_to_mode_reg (mode0, op0);
1537 + || GET_MODE (target) != tmode
1538 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1540 + target = gen_reg_rtx (tmode);
1544 + pat = GEN_FCN (icode) (target, op0);
1551 + case AVR32_BUILTIN_BSWAP32:
1553 + icode = CODE_FOR_bswap_32;
1554 + arg0 = CALL_EXPR_ARG (exp,0);
1555 + op0 = expand_normal (arg0);
1556 + tmode = insn_data[icode].operand[0].mode;
1557 + mode0 = insn_data[icode].operand[1].mode;
1559 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1561 + if ( CONST_INT_P (op0) )
1563 + HOST_WIDE_INT val = ( ((INTVAL (op0)&0x000000ff) << 24) |
1564 + ((INTVAL (op0)&0x0000ff00) << 8) |
1565 + ((INTVAL (op0)&0x00ff0000) >> 8) |
1566 + ((INTVAL (op0)&0xff000000) >> 24) );
1567 + /* Sign extend 32-bit value to host wide int */
1568 + val <<= (HOST_BITS_PER_WIDE_INT - 32);
1569 + val >>= (HOST_BITS_PER_WIDE_INT - 32);
1570 + op0 = GEN_INT(val);
1572 + || GET_MODE (target) != tmode
1573 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1574 + target = gen_reg_rtx (tmode);
1575 + emit_move_insn(target, op0);
1579 + op0 = copy_to_mode_reg (mode0, op0);
1583 + || GET_MODE (target) != tmode
1584 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1585 + target = gen_reg_rtx (tmode);
1588 + pat = GEN_FCN (icode) (target, op0);
1595 + case AVR32_BUILTIN_MVCR_W:
1596 + case AVR32_BUILTIN_MVCR_D:
1598 + arg0 = CALL_EXPR_ARG (exp,0);
1599 + arg1 = CALL_EXPR_ARG (exp,1);
1600 + op0 = expand_normal (arg0);
1601 + op1 = expand_normal (arg1);
1603 + if (fcode == AVR32_BUILTIN_MVCR_W)
1604 + icode = CODE_FOR_mvcrsi;
1606 + icode = CODE_FOR_mvcrdi;
1608 + tmode = insn_data[icode].operand[0].mode;
1611 + || GET_MODE (target) != tmode
1612 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1613 + target = gen_reg_rtx (tmode);
1615 + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
1618 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1619 + error ("Number should be between 0 and 7.");
1623 + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
1626 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1627 + error ("Number should be between 0 and 15.");
1631 + pat = GEN_FCN (icode) (target, op0, op1);
1638 + case AVR32_BUILTIN_MACSATHH_W:
1639 + case AVR32_BUILTIN_MACWH_D:
1640 + case AVR32_BUILTIN_MACHH_D:
1642 + arg0 = CALL_EXPR_ARG (exp,0);
1643 + arg1 = CALL_EXPR_ARG (exp,1);
1644 + arg2 = CALL_EXPR_ARG (exp,2);
1645 + op0 = expand_normal (arg0);
1646 + op1 = expand_normal (arg1);
1647 + op2 = expand_normal (arg2);
1649 + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
1650 + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
1651 + CODE_FOR_machh_d);
1653 + tmode = insn_data[icode].operand[0].mode;
1654 + mode0 = insn_data[icode].operand[1].mode;
1655 + mode1 = insn_data[icode].operand[2].mode;
1659 + || GET_MODE (target) != tmode
1660 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1661 + target = gen_reg_rtx (tmode);
1663 + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
1665 + /* If op0 is already a reg we must cast it to the correct mode. */
1667 + op0 = convert_to_mode (tmode, op0, 1);
1669 + op0 = copy_to_mode_reg (tmode, op0);
1672 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
1674 + /* If op1 is already a reg we must cast it to the correct mode. */
1676 + op1 = convert_to_mode (mode0, op1, 1);
1678 + op1 = copy_to_mode_reg (mode0, op1);
1681 + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
1683 + /* If op1 is already a reg we must cast it to the correct mode. */
1685 + op2 = convert_to_mode (mode1, op2, 1);
1687 + op2 = copy_to_mode_reg (mode1, op2);
1690 + emit_move_insn (target, op0);
1692 + pat = GEN_FCN (icode) (target, op1, op2);
1698 + case AVR32_BUILTIN_MVRC_W:
1699 + case AVR32_BUILTIN_MVRC_D:
1701 + arg0 = CALL_EXPR_ARG (exp,0);
1702 + arg1 = CALL_EXPR_ARG (exp,1);
1703 + arg2 = CALL_EXPR_ARG (exp,2);
1704 + op0 = expand_normal (arg0);
1705 + op1 = expand_normal (arg1);
1706 + op2 = expand_normal (arg2);
1708 + if (fcode == AVR32_BUILTIN_MVRC_W)
1709 + icode = CODE_FOR_mvrcsi;
1711 + icode = CODE_FOR_mvrcdi;
1713 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1715 + error ("Parameter 1 is not a valid coprocessor number.");
1716 + error ("Number should be between 0 and 7.");
1720 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1722 + error ("Parameter 2 is not a valid coprocessor register number.");
1723 + error ("Number should be between 0 and 15.");
1727 + if (GET_CODE (op2) == CONST_INT
1728 + || GET_CODE (op2) == CONST
1729 + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
1731 + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
1734 + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
1735 + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
1738 + pat = GEN_FCN (icode) (op0, op1, op2);
1745 + case AVR32_BUILTIN_COP:
1749 + icode = CODE_FOR_cop;
1750 + arg0 = CALL_EXPR_ARG (exp,0);
1751 + arg1 = CALL_EXPR_ARG (exp,1);
1752 + arg2 = CALL_EXPR_ARG (exp,2);
1753 + arg3 = CALL_EXPR_ARG (exp,3);
1754 + arg4 = CALL_EXPR_ARG (exp,4);
1755 + op0 = expand_normal (arg0);
1756 + op1 = expand_normal (arg1);
1757 + op2 = expand_normal (arg2);
1758 + op3 = expand_normal (arg3);
1759 + op4 = expand_normal (arg4);
1761 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1764 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1765 + error ("Number should be between 0 and 7.");
1769 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1772 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1773 + error ("Number should be between 0 and 15.");
1777 + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
1780 + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
1781 + error ("Number should be between 0 and 15.");
1785 + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
1788 + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
1789 + error ("Number should be between 0 and 15.");
1793 + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
1796 + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
1797 + error ("Number should be between 0 and 127.");
1801 + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
1811 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1812 + if (d->code == fcode)
1813 + return avr32_expand_binop_builtin (d->icode, exp, target);
1816 + /* @@@ Should really do something sensible here. */
1821 +/* Handle an "interrupt" or "isr" attribute;
1822 + arguments as in struct attribute_spec.handler. */
1825 +avr32_handle_isr_attribute (tree * node, tree name, tree args,
1826 + int flags, bool * no_add_attrs)
1828 + if (DECL_P (*node))
1830 + if (TREE_CODE (*node) != FUNCTION_DECL)
1832 + warning (OPT_Wattributes,"`%s' attribute only applies to functions",
1833 + IDENTIFIER_POINTER (name));
1834 + *no_add_attrs = true;
1836 + /* FIXME: the argument if any is checked for type attributes; should it
1837 + be checked for decl ones? */
1841 + if (TREE_CODE (*node) == FUNCTION_TYPE
1842 + || TREE_CODE (*node) == METHOD_TYPE)
1844 + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
1846 + warning (OPT_Wattributes,"`%s' attribute ignored", IDENTIFIER_POINTER (name));
1847 + *no_add_attrs = true;
1850 + else if (TREE_CODE (*node) == POINTER_TYPE
1851 + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
1852 + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
1853 + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
1855 + *node = build_variant_type_copy (*node);
1856 + TREE_TYPE (*node) = build_type_attribute_variant
1857 + (TREE_TYPE (*node),
1858 + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
1859 + *no_add_attrs = true;
1863 + /* Possibly pass this attribute on from the type to a decl. */
1864 + if (flags & ((int) ATTR_FLAG_DECL_NEXT
1865 + | (int) ATTR_FLAG_FUNCTION_NEXT
1866 + | (int) ATTR_FLAG_ARRAY_NEXT))
1868 + *no_add_attrs = true;
1869 + return tree_cons (name, args, NULL_TREE);
1873 + warning (OPT_Wattributes,"`%s' attribute ignored", IDENTIFIER_POINTER (name));
1881 +/* Handle an attribute requiring a FUNCTION_DECL;
1882 + arguments as in struct attribute_spec.handler. */
1884 +avr32_handle_fndecl_attribute (tree * node, tree name,
1885 + tree args ATTRIBUTE_UNUSED,
1886 + int flags ATTRIBUTE_UNUSED,
1887 + bool * no_add_attrs)
1889 + if (TREE_CODE (*node) != FUNCTION_DECL)
1891 + warning (OPT_Wattributes,"%qs attribute only applies to functions",
1892 + IDENTIFIER_POINTER (name));
1893 + *no_add_attrs = true;
1900 +/* Handle an acall attribute;
1901 + arguments as in struct attribute_spec.handler. */
1904 +avr32_handle_acall_attribute (tree * node, tree name,
1905 + tree args ATTRIBUTE_UNUSED,
1906 + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
1908 + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
1910 + warning (OPT_Wattributes,"`%s' attribute not yet supported...",
1911 + IDENTIFIER_POINTER (name));
1912 + *no_add_attrs = true;
1916 + warning (OPT_Wattributes,"`%s' attribute only applies to functions",
1917 + IDENTIFIER_POINTER (name));
1918 + *no_add_attrs = true;
1923 +/* Return 0 if the attributes for two types are incompatible, 1 if they
1924 + are compatible, and 2 if they are nearly compatible (which causes a
1925 + warning to be generated). */
1928 +avr32_comp_type_attributes (tree type1, tree type2)
1930 + int acall1, acall2, isr1, isr2, naked1, naked2;
1932 + /* Check for mismatch of non-default calling convention. */
1933 + if (TREE_CODE (type1) != FUNCTION_TYPE)
1936 + /* Check for mismatched call attributes. */
1937 + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
1938 + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
1939 + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
1940 + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
1941 + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1943 + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1945 + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1947 + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1949 + if ((acall1 && isr2)
1950 + || (acall2 && isr1) || (naked1 && isr2) || (naked2 && isr1))
1957 +/* Computes the type of the current function. */
1959 +static unsigned long
1960 +avr32_compute_func_type (void)
1962 + unsigned long type = AVR32_FT_UNKNOWN;
1966 + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1969 + /* Decide if the current function is volatile. Such functions never
1970 + return, and many memory cycles can be saved by not storing register
1971 + values that will never be needed again. This optimization was added to
1972 + speed up context switching in a kernel application. */
1974 + && TREE_NOTHROW (current_function_decl)
1975 + && TREE_THIS_VOLATILE (current_function_decl))
1976 + type |= AVR32_FT_VOLATILE;
1978 + if (cfun->static_chain_decl != NULL)
1979 + type |= AVR32_FT_NESTED;
1981 + attr = DECL_ATTRIBUTES (current_function_decl);
1983 + a = lookup_attribute ("isr", attr);
1984 + if (a == NULL_TREE)
1985 + a = lookup_attribute ("interrupt", attr);
1987 + if (a == NULL_TREE)
1988 + type |= AVR32_FT_NORMAL;
1990 + type |= avr32_isr_value (TREE_VALUE (a));
1993 + a = lookup_attribute ("acall", attr);
1994 + if (a != NULL_TREE)
1995 + type |= AVR32_FT_ACALL;
1997 + a = lookup_attribute ("naked", attr);
1998 + if (a != NULL_TREE)
1999 + type |= AVR32_FT_NAKED;
2004 +/* Returns the type of the current function. */
2006 +static unsigned long
2007 +avr32_current_func_type (void)
2009 + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
2010 + cfun->machine->func_type = avr32_compute_func_type ();
2012 + return cfun->machine->func_type;
2016 + This target hook should return true if we should not pass type solely
2017 + in registers. The file expr.h defines a definition that is usually appropriate,
2018 + refer to expr.h for additional documentation.
2021 +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
2023 + if (type && AGGREGATE_TYPE_P (type)
2024 + /* If the alignment is less than the size then pass in the struct on
2026 + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
2027 + (unsigned int) int_size_in_bytes (type))
2028 + /* If we support unaligned word accesses then structs of size 4 and 8
2029 + can have any alignment and still be passed in registers. */
2030 + && !(TARGET_UNALIGNED_WORD
2031 + && (int_size_in_bytes (type) == 4
2032 + || int_size_in_bytes (type) == 8))
2033 + /* Double word structs need only a word alignment. */
2034 + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
2037 + if (type && AGGREGATE_TYPE_P (type)
2038 + /* Structs of size 3,5,6,7 are always passed in registers. */
2039 + && (int_size_in_bytes (type) == 3
2040 + || int_size_in_bytes (type) == 5
2041 + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
2045 + return (type && TREE_ADDRESSABLE (type));
2050 +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
2056 + This target hook should return true if an argument at the position indicated
2057 + by cum should be passed by reference. This predicate is queried after target
2058 + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
2060 + If the hook returns true, a copy of that argument is made in memory and a
2061 + pointer to the argument is passed instead of the argument itself. The pointer
2062 + is passed in whatever way is appropriate for passing a pointer to that type.
2065 +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
2066 + enum machine_mode mode ATTRIBUTE_UNUSED,
2067 + tree type, bool named ATTRIBUTE_UNUSED)
2069 + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
2073 +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
2074 + enum machine_mode mode ATTRIBUTE_UNUSED,
2075 + tree type ATTRIBUTE_UNUSED,
2076 + bool named ATTRIBUTE_UNUSED)
2082 +struct gcc_target targetm = TARGET_INITIALIZER;
2085 + Table used to convert from register number in the assembler instructions and
2086 + the register numbers used in gcc.
2088 +const int avr32_function_arg_reglist[] = {
2089 + INTERNAL_REGNUM (12),
2090 + INTERNAL_REGNUM (11),
2091 + INTERNAL_REGNUM (10),
2092 + INTERNAL_REGNUM (9),
2093 + INTERNAL_REGNUM (8)
2096 +rtx avr32_compare_op0 = NULL_RTX;
2097 +rtx avr32_compare_op1 = NULL_RTX;
2098 +rtx avr32_compare_operator = NULL_RTX;
2099 +rtx avr32_acc_cache = NULL_RTX;
2102 + Returns nonzero if it is allowed to store a value of mode mode in hard
2103 + register number regno.
2106 +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
2108 + /* We allow only float modes in the fp-registers */
2109 + if (regnr >= FIRST_FP_REGNUM
2110 + && regnr <= LAST_FP_REGNUM && GET_MODE_CLASS (mode) != MODE_FLOAT)
2117 + case DImode: /* long long */
2118 + case DFmode: /* double */
2119 + case SCmode: /* __complex__ float */
2120 + case CSImode: /* __complex__ int */
2122 + { /* long long int not supported in r12, sp, lr
2128 + if (regnr % 2) /* long long int has to be refered in even
2134 + case CDImode: /* __complex__ long long */
2135 + case DCmode: /* __complex__ double */
2136 + case TImode: /* 16 bytes */
2139 + else if (regnr % 2)
2150 +avr32_rnd_operands (rtx add, rtx shift)
2152 + if (GET_CODE (shift) == CONST_INT &&
2153 + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
2155 + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
2165 +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
2172 + HOST_WIDE_INT min_value = 0, max_value = 0;
2176 + size_str[0] = str[2];
2177 + size_str[1] = str[3];
2178 + size_str[2] = '\0';
2179 + const_size = atoi (size_str);
2181 + if (toupper (str[1]) == 'U')
2184 + max_value = (1 << const_size) - 1;
2186 + else if (toupper (str[1]) == 'S')
2188 + min_value = -(1 << (const_size - 1));
2189 + max_value = (1 << (const_size - 1)) - 1;
2197 + if (value >= min_value && value <= max_value)
2204 + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
2206 + return avr32_hi16_immediate_operand (GEN_INT (value), VOIDmode);
2213 +/*Compute mask of which floating-point registers needs saving upon
2214 + entry to this function*/
2215 +static unsigned long
2216 +avr32_compute_save_fp_reg_mask (void)
2218 + unsigned long func_type = avr32_current_func_type ();
2219 + unsigned int save_reg_mask = 0;
2221 + unsigned int max_reg = 7;
2222 + int save_all_call_used_regs = FALSE;
2224 + /* This only applies for hardware floating-point implementation. */
2225 + if (!TARGET_HARD_FLOAT)
2228 + if (IS_INTERRUPT (func_type))
2231 + /* Interrupt functions must not corrupt any registers, even call
2232 + clobbered ones. If this is a leaf function we can just examine the
2233 + registers used by the RTL, but otherwise we have to assume that
2234 + whatever function is called might clobber anything, and so we have
2235 + to save all the call-clobbered registers as well. */
2237 + save_all_call_used_regs = !current_function_is_leaf;
2240 + /* All used registers used must be saved */
2241 + for (reg = 0; reg <= max_reg; reg++)
2242 + if (df_regs_ever_live_p (INTERNAL_FP_REGNUM (reg))
2243 + || (save_all_call_used_regs
2244 + && call_used_regs[INTERNAL_FP_REGNUM (reg)]))
2245 + save_reg_mask |= (1 << reg);
2247 + return save_reg_mask;
2250 +/*Compute mask of registers which needs saving upon function entry */
2251 +static unsigned long
2252 +avr32_compute_save_reg_mask (int push)
2254 + unsigned long func_type;
2255 + unsigned int save_reg_mask = 0;
2258 + func_type = avr32_current_func_type ();
2260 + if (IS_INTERRUPT (func_type))
2262 + unsigned int max_reg = 12;
2265 + /* Get the banking scheme for the interrupt */
2266 + switch (func_type)
2268 + case AVR32_FT_ISR_FULL:
2271 + case AVR32_FT_ISR_HALF:
2274 + case AVR32_FT_ISR_NONE:
2279 + /* Interrupt functions must not corrupt any registers, even call
2280 + clobbered ones. If this is a leaf function we can just examine the
2281 + registers used by the RTL, but otherwise we have to assume that
2282 + whatever function is called might clobber anything, and so we have
2283 + to save all the call-clobbered registers as well. */
2285 + /* Need not push the registers r8-r12 for AVR32A architectures, as this
2286 + is automatially done in hardware. We also do not have any shadow
2288 + if (TARGET_UARCH_AVR32A)
2291 + func_type = AVR32_FT_ISR_NONE;
2294 + /* All registers which are used and is not shadowed must be saved */
2295 + for (reg = 0; reg <= max_reg; reg++)
2296 + if (df_regs_ever_live_p (INTERNAL_REGNUM (reg))
2297 + || (!current_function_is_leaf
2298 + && call_used_regs[INTERNAL_REGNUM (reg)]))
2299 + save_reg_mask |= (1 << reg);
2302 + if ((df_regs_ever_live_p (LR_REGNUM)
2303 + || !current_function_is_leaf || frame_pointer_needed)
2304 + /* Only non-shadowed register models */
2305 + && (func_type == AVR32_FT_ISR_NONE))
2306 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2308 + /* Make sure that the GOT register is pushed. */
2309 + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
2310 + && current_function_uses_pic_offset_table)
2311 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2316 + int use_pushm = optimize_size;
2318 + /* In the normal case we only need to save those registers which are
2319 + call saved and which are used by this function. */
2320 + for (reg = 0; reg <= 7; reg++)
2321 + if (df_regs_ever_live_p (INTERNAL_REGNUM (reg))
2322 + && !call_used_regs[INTERNAL_REGNUM (reg)])
2323 + save_reg_mask |= (1 << reg);
2325 + /* Make sure that the GOT register is pushed. */
2326 + if (current_function_uses_pic_offset_table)
2327 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2330 + /* If we optimize for size and do not have anonymous arguments: use
2331 + popm/pushm always */
2334 + if ((save_reg_mask & (1 << 0))
2335 + || (save_reg_mask & (1 << 1))
2336 + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
2337 + save_reg_mask |= 0xf;
2339 + if ((save_reg_mask & (1 << 4))
2340 + || (save_reg_mask & (1 << 5))
2341 + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
2342 + save_reg_mask |= 0xf0;
2344 + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
2345 + save_reg_mask |= 0x300;
2350 + if ((df_regs_ever_live_p (LR_REGNUM)
2351 + || !current_function_is_leaf
2354 + && !current_function_calls_eh_return) || frame_pointer_needed))
2357 + /* Never pop LR into PC for functions which
2358 + calls __builtin_eh_return, since we need to
2359 + fix the SP after the restoring of the registers
2360 + and before returning. */
2361 + || current_function_calls_eh_return)
2364 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2369 + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
2375 + /* Save registers so the exception handler can modify them. */
2376 + if (current_function_calls_eh_return)
2382 + reg = EH_RETURN_DATA_REGNO (i);
2383 + if (reg == INVALID_REGNUM)
2385 + save_reg_mask |= 1 << ASM_REGNUM (reg);
2389 + return save_reg_mask;
2392 +/*Compute total size in bytes of all saved registers */
2394 +avr32_get_reg_mask_size (int reg_mask)
2399 + for (reg = 0; reg <= 15; reg++)
2400 + if (reg_mask & (1 << reg))
2406 +/*Get a register from one of the registers which are saved onto the stack
2407 + upon function entry */
2410 +avr32_get_saved_reg (int save_reg_mask)
2414 + /* Find the first register which is saved in the saved_reg_mask */
2415 + for (reg = 0; reg <= 15; reg++)
2416 + if (save_reg_mask & (1 << reg))
2422 +/* Return 1 if it is possible to return using a single instruction. */
2424 +avr32_use_return_insn (int iscond)
2426 + unsigned int func_type = avr32_current_func_type ();
2427 + unsigned long saved_int_regs;
2428 + unsigned long saved_fp_regs;
2430 + /* Never use a return instruction before reload has run. */
2431 + if (!reload_completed)
2434 + /* Must adjust the stack for vararg functions. */
2435 + if (current_function_args_info.uses_anonymous_args)
2438 + /* If there a stack adjstment. */
2439 + if (get_frame_size ())
2442 + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
2443 + saved_fp_regs = avr32_compute_save_fp_reg_mask ();
2445 + /* Functions which have saved fp-regs on the stack can not be performed in
2446 + one instruction */
2447 + if (saved_fp_regs)
2450 + /* Conditional returns can not be performed in one instruction if we need
2451 + to restore registers from the stack */
2452 + if (iscond && saved_int_regs)
2455 + /* Conditional return can not be used for interrupt handlers. */
2456 + if (iscond && IS_INTERRUPT (func_type))
2459 + /* For interrupt handlers which needs to pop registers */
2460 + if (saved_int_regs && IS_INTERRUPT (func_type))
2464 + /* If there are saved registers but the LR isn't saved, then we need two
2465 + instructions for the return. */
2466 + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2474 +/*Generate some function prologue info in the assembly file*/
2477 +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
2479 + if (IS_NAKED (avr32_current_func_type ()))
2481 + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
2483 + if (IS_INTERRUPT (avr32_current_func_type ()))
2485 + switch (avr32_current_func_type ())
2487 + case AVR32_FT_ISR_FULL:
2489 + "\t# Interrupt Function: Fully shadowed register file\n");
2491 + case AVR32_FT_ISR_HALF:
2493 + "\t# Interrupt Function: Half shadowed register file\n");
2496 + case AVR32_FT_ISR_NONE:
2497 + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
2503 + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
2504 + current_function_args_size, frame_size,
2505 + current_function_pretend_args_size);
2507 + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
2508 + frame_pointer_needed, current_function_is_leaf);
2510 + fprintf (f, "\t# uses_anonymous_args = %i\n",
2511 + current_function_args_info.uses_anonymous_args);
2512 + if (current_function_calls_eh_return)
2513 + fprintf (f, "\t# Calls __builtin_eh_return.\n");
2518 +/* Generate and emit an insn that we will recognize as a pushm or stm.
2519 + Unfortunately, since this insn does not reflect very well the actual
2520 + semantics of the operation, we need to annotate the insn for the benefit
2521 + of DWARF2 frame unwind information. */
2523 +int avr32_convert_to_reglist16 (int reglist8_vect);
2526 +emit_multi_reg_push (int reglist, int usePUSHM)
2538 + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
2539 + reglist = avr32_convert_to_reglist16 (reglist);
2543 + insn = emit_insn (gen_stm (stack_pointer_rtx,
2544 + gen_rtx_CONST_INT (SImode, reglist),
2545 + gen_rtx_CONST_INT (SImode, 1)));
2548 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2549 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2551 + for (i = 15; i >= 0; i--)
2553 + if (reglist & (1 << i))
2555 + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
2556 + tmp = gen_rtx_SET (VOIDmode,
2557 + gen_rtx_MEM (SImode,
2558 + plus_constant (stack_pointer_rtx,
2559 + 4 * index)), reg);
2560 + RTX_FRAME_RELATED_P (tmp) = 1;
2561 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2565 + tmp = gen_rtx_SET (SImode,
2566 + stack_pointer_rtx,
2567 + gen_rtx_PLUS (SImode,
2568 + stack_pointer_rtx,
2569 + GEN_INT (-4 * nr_regs)));
2570 + RTX_FRAME_RELATED_P (tmp) = 1;
2571 + XVECEXP (dwarf, 0, 0) = tmp;
2572 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2573 + REG_NOTES (insn));
2579 +emit_multi_fp_reg_push (int reglist)
2589 + insn = emit_insn (gen_stm_fp (stack_pointer_rtx,
2590 + gen_rtx_CONST_INT (SImode, reglist),
2591 + gen_rtx_CONST_INT (SImode, 1)));
2593 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2594 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2596 + for (i = 15; i >= 0; i--)
2598 + if (reglist & (1 << i))
2600 + reg = gen_rtx_REG (SImode, INTERNAL_FP_REGNUM (i));
2601 + tmp = gen_rtx_SET (VOIDmode,
2602 + gen_rtx_MEM (SImode,
2603 + plus_constant (stack_pointer_rtx,
2604 + 4 * index)), reg);
2605 + RTX_FRAME_RELATED_P (tmp) = 1;
2606 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2610 + tmp = gen_rtx_SET (SImode,
2611 + stack_pointer_rtx,
2612 + gen_rtx_PLUS (SImode,
2613 + stack_pointer_rtx,
2614 + GEN_INT (-4 * nr_regs)));
2615 + RTX_FRAME_RELATED_P (tmp) = 1;
2616 + XVECEXP (dwarf, 0, 0) = tmp;
2617 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2618 + REG_NOTES (insn));
2623 +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
2624 + int write_back, int in_struct_p, int scalar_p)
2631 + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
2635 + XVECEXP (result, 0, 0)
2636 + = gen_rtx_SET (GET_MODE (from), from,
2637 + plus_constant (from, count * 4));
2643 + for (j = 0; i < count; i++, j++)
2646 + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
2647 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2648 + MEM_SCALAR_P (mem) = scalar_p;
2649 + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
2650 + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
2658 +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
2659 + int in_struct_p, int scalar_p)
2664 + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2666 + for (j = 0; i < count; i++, j++)
2668 + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
2669 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2670 + MEM_SCALAR_P (mem) = scalar_p;
2671 + XVECEXP (result, 0, i)
2672 + = gen_rtx_SET (VOIDmode, mem,
2673 + gen_rtx_UNSPEC (VOIDmode,
2674 + gen_rtvec (1, regs[j]),
2675 + UNSPEC_STORE_MULTIPLE));
2682 +/* Move a block of memory if it is word aligned or we support unaligned
2683 + word memory accesses. The size must be maximum 64 bytes. */
2686 +avr32_gen_movmemsi (rtx * operands)
2688 + HOST_WIDE_INT bytes_to_go;
2690 + rtx st_src, st_dst;
2691 + int src_offset = 0, dst_offset = 0;
2693 + int dst_in_struct_p, src_in_struct_p;
2694 + int dst_scalar_p, src_scalar_p;
2697 + if (GET_CODE (operands[2]) != CONST_INT
2698 + || GET_CODE (operands[3]) != CONST_INT
2699 + || INTVAL (operands[2]) > 64
2700 + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
2703 + unaligned = (INTVAL (operands[3]) & 3) != 0;
2707 + st_dst = XEXP (operands[0], 0);
2708 + st_src = XEXP (operands[1], 0);
2710 + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2711 + dst_scalar_p = MEM_SCALAR_P (operands[0]);
2712 + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2713 + src_scalar_p = MEM_SCALAR_P (operands[1]);
2715 + dst = copy_to_mode_reg (SImode, st_dst);
2716 + src = copy_to_mode_reg (SImode, st_src);
2718 + bytes_to_go = INTVAL (operands[2]);
2720 + while (bytes_to_go)
2722 + enum machine_mode move_mode;
2723 + /* (Seems to be a problem with reloads for the movti pattern so this is
2724 + disabled until that problem is resolved)
2725 + UPDATE: Problem seems to be solved now.... */
2726 + if (bytes_to_go >= GET_MODE_SIZE (TImode) && !unaligned
2727 + /* Do not emit ldm/stm for UC3 as ld.d/st.d is more optimal. */
2728 + && !TARGET_ARCH_UC)
2729 + move_mode = TImode;
2730 + else if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
2731 + move_mode = DImode;
2732 + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
2733 + move_mode = SImode;
2735 + move_mode = QImode;
2739 + rtx dst_mem = gen_rtx_MEM (move_mode,
2740 + gen_rtx_PLUS (SImode, dst,
2741 + GEN_INT (dst_offset)));
2742 + dst_offset += GET_MODE_SIZE (move_mode);
2743 + if ( 0 /* This causes an error in GCC. Think there is
2744 + something wrong in the gcse pass which causes REQ_EQUIV notes
2745 + to be wrong so disabling it for now. */
2746 + && move_mode == TImode
2747 + && INTVAL (operands[2]) > GET_MODE_SIZE (TImode) )
2749 + src_mem = gen_rtx_MEM (move_mode,
2750 + gen_rtx_POST_INC (SImode, src));
2754 + src_mem = gen_rtx_MEM (move_mode,
2755 + gen_rtx_PLUS (SImode, src,
2756 + GEN_INT (src_offset)));
2757 + src_offset += GET_MODE_SIZE (move_mode);
2760 + bytes_to_go -= GET_MODE_SIZE (move_mode);
2762 + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
2763 + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
2765 + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
2766 + MEM_SCALAR_P (src_mem) = src_scalar_p;
2767 + emit_move_insn (dst_mem, src_mem);
2777 +/*Expand the prologue instruction*/
2779 +avr32_expand_prologue (void)
2782 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2785 + /* Naked functions does not have a prologue */
2786 + if (IS_NAKED (avr32_current_func_type ()))
2789 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
2791 + if (saved_reg_mask)
2793 + /* Must push used registers */
2795 + /* Should we use POPM or LDM? */
2796 + int usePUSHM = TRUE;
2798 + if (((saved_reg_mask & (1 << 0)) ||
2799 + (saved_reg_mask & (1 << 1)) ||
2800 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2802 + /* One of R0-R3 should at least be pushed */
2803 + if (((saved_reg_mask & (1 << 0)) &&
2804 + (saved_reg_mask & (1 << 1)) &&
2805 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2807 + /* All should be pushed */
2816 + if (((saved_reg_mask & (1 << 4)) ||
2817 + (saved_reg_mask & (1 << 5)) ||
2818 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2820 + /* One of R4-R7 should at least be pushed */
2821 + if (((saved_reg_mask & (1 << 4)) &&
2822 + (saved_reg_mask & (1 << 5)) &&
2823 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2826 + /* All should be pushed */
2835 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2837 + /* One of R8-R9 should at least be pushed */
2838 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2841 + /* All should be pushed */
2850 + if (saved_reg_mask & (1 << 10))
2853 + if (saved_reg_mask & (1 << 11))
2856 + if (saved_reg_mask & (1 << 12))
2859 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2867 + insn = emit_multi_reg_push (reglist8, TRUE);
2871 + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
2873 + RTX_FRAME_RELATED_P (insn) = 1;
2875 + /* Prevent this instruction from being scheduled after any other
2877 + emit_insn (gen_blockage ());
2880 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2881 + if (saved_fp_reg_mask)
2883 + insn = emit_multi_fp_reg_push (saved_fp_reg_mask);
2884 + RTX_FRAME_RELATED_P (insn) = 1;
2886 + /* Prevent this instruction from being scheduled after any other
2888 + emit_insn (gen_blockage ());
2891 + /* Set frame pointer */
2892 + if (frame_pointer_needed)
2894 + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2895 + RTX_FRAME_RELATED_P (insn) = 1;
2898 + if (get_frame_size () > 0)
2900 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
2902 + insn = emit_insn (gen_rtx_SET (SImode,
2903 + stack_pointer_rtx,
2904 + gen_rtx_PLUS (SImode,
2905 + stack_pointer_rtx,
2910 + RTX_FRAME_RELATED_P (insn) = 1;
2914 + /* Immediate is larger than k21 We must either check if we can use
2915 + one of the pushed reegisters as temporary storage or we must
2916 + make us a temp register by pushing a register to the stack. */
2917 + rtx temp_reg, const_pool_entry, insn;
2918 + if (saved_reg_mask)
2921 + gen_rtx_REG (SImode,
2922 + INTERNAL_REGNUM (avr32_get_saved_reg
2923 + (saved_reg_mask)));
2927 + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
2928 + emit_move_insn (gen_rtx_MEM
2930 + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
2934 + const_pool_entry =
2935 + force_const_mem (SImode,
2936 + gen_rtx_CONST_INT (SImode, get_frame_size ()));
2937 + emit_move_insn (temp_reg, const_pool_entry);
2939 + insn = emit_insn (gen_rtx_SET (SImode,
2940 + stack_pointer_rtx,
2941 + gen_rtx_MINUS (SImode,
2942 + stack_pointer_rtx,
2945 + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2946 + gen_rtx_PLUS (SImode, stack_pointer_rtx,
2947 + GEN_INT (-get_frame_size ())));
2948 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2949 + dwarf, REG_NOTES (insn));
2950 + RTX_FRAME_RELATED_P (insn) = 1;
2952 + if (!saved_reg_mask)
2955 + emit_move_insn (temp_reg,
2956 + gen_rtx_MEM (SImode,
2957 + gen_rtx_POST_INC (SImode,
2963 + /* Mark the temp register as dead */
2964 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
2965 + REG_NOTES (insn));
2970 + /* Prevent the the stack adjustment to be scheduled after any
2971 + instructions using the frame pointer. */
2972 + emit_insn (gen_blockage ());
2978 + avr32_load_pic_register ();
2980 + /* gcc does not know that load or call instructions might use the pic
2981 + register so it might schedule these instructions before the loading
2982 + of the pic register. To avoid this emit a barrier for now. TODO!
2983 + Find out a better way to let gcc know which instructions might use
2984 + the pic register. */
2985 + emit_insn (gen_blockage ());
2991 +avr32_set_return_address (rtx source, rtx scratch)
2994 + unsigned long saved_regs;
2996 + saved_regs = avr32_compute_save_reg_mask (TRUE);
2998 + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2999 + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
3002 + if (frame_pointer_needed)
3003 + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
3005 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks16"))
3007 + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
3011 + emit_insn (gen_movsi (scratch, GEN_INT (get_frame_size ())));
3014 + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
3020 +/* Return the length of INSN. LENGTH is the initial length computed by
3021 + attributes in the machine-description file. */
3024 +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
3025 + int length ATTRIBUTE_UNUSED)
3031 +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
3032 + int iscond ATTRIBUTE_UNUSED,
3033 + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
3036 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3037 + int insert_ret = TRUE;
3039 + int stack_adjustment = get_frame_size ();
3040 + unsigned int func_type = avr32_current_func_type ();
3041 + FILE *f = asm_out_file;
3043 + /* Naked functions does not have an epilogue */
3044 + if (IS_NAKED (func_type))
3047 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3049 + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
3051 + /* Reset frame pointer */
3052 + if (stack_adjustment > 0)
3054 + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
3056 + fprintf (f, "\tsub\tsp, %i # Reset Frame Pointer\n",
3057 + -stack_adjustment);
3061 + /* TODO! Is it safe to use r8 as scratch?? */
3062 + fprintf (f, "\tmov\tr8, lo(%i) # Reset Frame Pointer\n",
3063 + -stack_adjustment);
3064 + fprintf (f, "\torh\tr8, hi(%i) # Reset Frame Pointer\n",
3065 + -stack_adjustment);
3066 + fprintf (f, "\tadd\tsp, r8 # Reset Frame Pointer\n");
3070 + if (saved_fp_reg_mask)
3072 + char reglist[64]; /* 64 bytes should be enough... */
3073 + avr32_make_fp_reglist_w (saved_fp_reg_mask, (char *) reglist);
3074 + fprintf (f, "\tldcm.w\tcp0, sp++, %s\n", reglist);
3075 + if (saved_fp_reg_mask & ~0xff)
3077 + saved_fp_reg_mask &= ~0xff;
3078 + avr32_make_fp_reglist_d (saved_fp_reg_mask, (char *) reglist);
3079 + fprintf (f, "\tldcm.d\tcp0, sp++, %s\n", reglist);
3083 + if (saved_reg_mask)
3085 + /* Must pop used registers */
3087 + /* Should we use POPM or LDM? */
3088 + int usePOPM = TRUE;
3089 + if (((saved_reg_mask & (1 << 0)) ||
3090 + (saved_reg_mask & (1 << 1)) ||
3091 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
3093 + /* One of R0-R3 should at least be popped */
3094 + if (((saved_reg_mask & (1 << 0)) &&
3095 + (saved_reg_mask & (1 << 1)) &&
3096 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
3098 + /* All should be popped */
3107 + if (((saved_reg_mask & (1 << 4)) ||
3108 + (saved_reg_mask & (1 << 5)) ||
3109 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
3111 + /* One of R0-R3 should at least be popped */
3112 + if (((saved_reg_mask & (1 << 4)) &&
3113 + (saved_reg_mask & (1 << 5)) &&
3114 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
3117 + /* All should be popped */
3126 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
3128 + /* One of R8-R9 should at least be pushed */
3129 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
3132 + /* All should be pushed */
3141 + if (saved_reg_mask & (1 << 10))
3144 + if (saved_reg_mask & (1 << 11))
3147 + if (saved_reg_mask & (1 << 12))
3150 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
3154 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3155 + /* Pop LR into PC. */
3160 + char reglist[64]; /* 64 bytes should be enough... */
3161 + avr32_make_reglist8 (reglist8, (char *) reglist);
3163 + if (reglist8 & 0x80)
3164 + /* This instruction is also a return */
3165 + insert_ret = FALSE;
3167 + if (r12_imm && !insert_ret)
3168 + fprintf (f, "\tpopm\t%s, r12=%li\n", reglist, INTVAL (r12_imm));
3170 + fprintf (f, "\tpopm\t%s\n", reglist);
3175 + char reglist[64]; /* 64 bytes should be enough... */
3176 + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
3177 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3178 + /* This instruction is also a return */
3179 + insert_ret = FALSE;
3181 + if (r12_imm && !insert_ret)
3182 + fprintf (f, "\tldm\tsp++, %s, r12=%li\n", reglist,
3183 + INTVAL (r12_imm));
3185 + fprintf (f, "\tldm\tsp++, %s\n", reglist);
3191 + /* Stack adjustment for exception handler. */
3192 + if (current_function_calls_eh_return)
3193 + fprintf (f, "\tadd\tsp, r%d\n", ASM_REGNUM (EH_RETURN_STACKADJ_REGNO));
3196 + if (IS_INTERRUPT (func_type))
3198 + fprintf (f, "\trete\n");
3200 + else if (insert_ret)
3203 + fprintf (f, "\tretal\t%li\n", INTVAL (r12_imm));
3205 + fprintf (f, "\tretal\tr12\n");
3209 +/* Function for converting a fp-register mask to a
3210 + reglistCPD8 register list string. */
3212 +avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string)
3216 + /* Make sure reglist_string is empty */
3217 + reglist_string[0] = '\0';
3219 + for (i = 0; i < NUM_FP_REGS; i += 2)
3221 + if (reglist_mask & (1 << i))
3223 + strlen (reglist_string) ?
3224 + sprintf (reglist_string, "%s, %s-%s", reglist_string,
3225 + reg_names[INTERNAL_FP_REGNUM (i)],
3226 + reg_names[INTERNAL_FP_REGNUM (i + 1)]) :
3227 + sprintf (reglist_string, "%s-%s",
3228 + reg_names[INTERNAL_FP_REGNUM (i)],
3229 + reg_names[INTERNAL_FP_REGNUM (i + 1)]);
3234 +/* Function for converting a fp-register mask to a
3235 + reglistCP8 register list string. */
3237 +avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string)
3241 + /* Make sure reglist_string is empty */
3242 + reglist_string[0] = '\0';
3244 + for (i = 0; i < NUM_FP_REGS; ++i)
3246 + if (reglist_mask & (1 << i))
3248 + strlen (reglist_string) ?
3249 + sprintf (reglist_string, "%s, %s", reglist_string,
3250 + reg_names[INTERNAL_FP_REGNUM (i)]) :
3251 + sprintf (reglist_string, "%s", reg_names[INTERNAL_FP_REGNUM (i)]);
3257 +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
3261 + /* Make sure reglist16_string is empty */
3262 + reglist16_string[0] = '\0';
3264 + for (i = 0; i < 16; ++i)
3266 + if (reglist16_vect & (1 << i))
3268 + strlen (reglist16_string) ?
3269 + sprintf (reglist16_string, "%s, %s", reglist16_string,
3270 + reg_names[INTERNAL_REGNUM (i)]) :
3271 + sprintf (reglist16_string, "%s", reg_names[INTERNAL_REGNUM (i)]);
3277 +avr32_convert_to_reglist16 (int reglist8_vect)
3279 + int reglist16_vect = 0;
3280 + if (reglist8_vect & 0x1)
3281 + reglist16_vect |= 0xF;
3282 + if (reglist8_vect & 0x2)
3283 + reglist16_vect |= 0xF0;
3284 + if (reglist8_vect & 0x4)
3285 + reglist16_vect |= 0x300;
3286 + if (reglist8_vect & 0x8)
3287 + reglist16_vect |= 0x400;
3288 + if (reglist8_vect & 0x10)
3289 + reglist16_vect |= 0x800;
3290 + if (reglist8_vect & 0x20)
3291 + reglist16_vect |= 0x1000;
3292 + if (reglist8_vect & 0x40)
3293 + reglist16_vect |= 0x4000;
3294 + if (reglist8_vect & 0x80)
3295 + reglist16_vect |= 0x8000;
3297 + return reglist16_vect;
3301 +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
3303 + /* Make sure reglist8_string is empty */
3304 + reglist8_string[0] = '\0';
3306 + if (reglist8_vect & 0x1)
3307 + sprintf (reglist8_string, "r0-r3");
3308 + if (reglist8_vect & 0x2)
3309 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r4-r7",
3310 + reglist8_string) :
3311 + sprintf (reglist8_string, "r4-r7");
3312 + if (reglist8_vect & 0x4)
3313 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r8-r9",
3314 + reglist8_string) :
3315 + sprintf (reglist8_string, "r8-r9");
3316 + if (reglist8_vect & 0x8)
3317 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r10",
3318 + reglist8_string) :
3319 + sprintf (reglist8_string, "r10");
3320 + if (reglist8_vect & 0x10)
3321 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r11",
3322 + reglist8_string) :
3323 + sprintf (reglist8_string, "r11");
3324 + if (reglist8_vect & 0x20)
3325 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r12",
3326 + reglist8_string) :
3327 + sprintf (reglist8_string, "r12");
3328 + if (reglist8_vect & 0x40)
3329 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, lr",
3330 + reglist8_string) :
3331 + sprintf (reglist8_string, "lr");
3332 + if (reglist8_vect & 0x80)
3333 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, pc",
3334 + reglist8_string) :
3335 + sprintf (reglist8_string, "pc");
3339 +avr32_eh_return_data_regno (int n)
3341 + if (n >= 0 && n <= 3)
3344 + return INVALID_REGNUM;
3347 +/* Compute the distance from register FROM to register TO.
3348 + These can be the arg pointer, the frame pointer or
3349 + the stack pointer.
3350 + Typical stack layout looks like this:
3352 + old stack pointer -> | |
3355 + | | saved arguments for
3356 + | | vararg functions
3357 + arg_pointer -> | | /
3367 + stack ptr --> | | /
3375 + For a given funciton some or all of these stack compomnents
3376 + may not be needed, giving rise to the possibility of
3377 + eliminating some of the registers.
3379 + The values returned by this function must reflect the behaviour
3380 + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
3382 + The sign of the number returned reflects the direction of stack
3383 + growth, so the values are positive for all eliminations except
3384 + from the soft frame pointer to the hard frame pointer. */
3388 +avr32_initial_elimination_offset (int from, int to)
3391 + int call_saved_regs = 0;
3392 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3393 + unsigned int local_vars = get_frame_size ();
3395 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
3396 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3398 + for (i = 0; i < 16; ++i)
3400 + if (saved_reg_mask & (1 << i))
3401 + call_saved_regs += 4;
3404 + for (i = 0; i < NUM_FP_REGS; ++i)
3406 + if (saved_fp_reg_mask & (1 << i))
3407 + call_saved_regs += 4;
3412 + case ARG_POINTER_REGNUM:
3415 + case STACK_POINTER_REGNUM:
3416 + return call_saved_regs + local_vars;
3417 + case FRAME_POINTER_REGNUM:
3418 + return call_saved_regs;
3422 + case FRAME_POINTER_REGNUM:
3425 + case STACK_POINTER_REGNUM:
3426 + return local_vars;
3437 + Returns a rtx used when passing the next argument to a function.
3438 + avr32_init_cumulative_args() and avr32_function_arg_advance() sets witch
3442 +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3443 + tree type, int named)
3447 + HOST_WIDE_INT arg_size, arg_rsize;
3450 + arg_size = int_size_in_bytes (type);
3454 + arg_size = GET_MODE_SIZE (mode);
3456 + arg_rsize = PUSH_ROUNDING (arg_size);
3459 + The last time this macro is called, it is called with mode == VOIDmode,
3460 + and its result is passed to the call or call_value pattern as operands 2
3461 + and 3 respectively. */
3462 + if (mode == VOIDmode)
3464 + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
3467 + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
3472 + if (arg_rsize == 8)
3474 + /* use r11:r10 or r9:r8. */
3475 + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
3477 + else if (!(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
3482 + else if (arg_rsize == 4)
3483 + { /* Use first available register */
3485 + while (index <= LAST_CUM_REG_INDEX && GET_USED_INDEX (cum, index))
3487 + if (index > LAST_CUM_REG_INDEX)
3491 + SET_REG_INDEX (cum, index);
3493 + if (GET_REG_INDEX (cum) >= 0)
3494 + return gen_rtx_REG (mode,
3495 + avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
3501 + Set the register used for passing the first argument to a function.
3504 +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum,
3505 + tree fntype ATTRIBUTE_UNUSED,
3506 + rtx libname ATTRIBUTE_UNUSED,
3507 + tree fndecl ATTRIBUTE_UNUSED)
3509 + /* Set all registers as unused. */
3510 + SET_INDEXES_UNUSED (cum);
3512 + /* Reset uses_anonymous_args */
3513 + cum->uses_anonymous_args = 0;
3515 + /* Reset size of stack pushed arguments */
3516 + cum->stack_pushed_args_size = 0;
3520 + Set register used for passing the next argument to a function. Only the
3521 + Scratch Registers are used.
3526 + 13 r13 _SP_________
3527 + FIRST_CUM_REG 12 r12 _||_
3529 + 11 r10 _||_ Scratch Registers
3531 + LAST_SCRATCH_REG 9 r8 _\/_________
3543 +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3544 + tree type, int named ATTRIBUTE_UNUSED)
3546 + HOST_WIDE_INT arg_size, arg_rsize;
3550 + arg_size = int_size_in_bytes (type);
3554 + arg_size = GET_MODE_SIZE (mode);
3556 + arg_rsize = PUSH_ROUNDING (arg_size);
3558 + /* It the argument had to be passed in stack, no register is used. */
3559 + if ((*targetm.calls.must_pass_in_stack) (mode, type))
3561 + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
3565 + /* Mark the used registers as "used". */
3566 + if (GET_REG_INDEX (cum) >= 0)
3568 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3569 + if (arg_rsize == 8)
3571 + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
3576 + /* Had to use stack */
3577 + cum->stack_pushed_args_size += arg_rsize;
3582 + Defines witch direction to go to find the next register to use if the
3583 + argument is larger then one register or for arguments shorter than an
3584 + int which is not promoted, such as the last part of structures with
3585 + size not a multiple of 4. */
3587 +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
3590 + /* Pad upward for all aggregates except byte and halfword sized aggregates
3591 + which can be passed in registers. */
3593 + && AGGREGATE_TYPE_P (type)
3594 + && (int_size_in_bytes (type) != 1)
3595 + && !((int_size_in_bytes (type) == 2)
3596 + && TYPE_ALIGN_UNIT (type) >= 2)
3597 + && (int_size_in_bytes (type) & 0x3))
3606 + Return a rtx used for the return value from a function call.
3609 +avr32_function_value (tree type, tree func, bool outgoing ATTRIBUTE_UNUSED)
3611 + if (avr32_return_in_memory (type, func))
3614 + if (int_size_in_bytes (type) <= 4)
3616 + enum machine_mode mode = TYPE_MODE (type);
3617 + int unsignedp = 0;
3618 + PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
3619 + return gen_rtx_REG (mode, RET_REGISTER);
3621 + else if (int_size_in_bytes (type) <= 8)
3622 + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
3628 + Return a rtx used for the return value from a library function call.
3631 +avr32_libcall_value (enum machine_mode mode)
3634 + if (GET_MODE_SIZE (mode) <= 4)
3635 + return gen_rtx_REG (mode, RET_REGISTER);
3636 + else if (GET_MODE_SIZE (mode) <= 8)
3637 + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
3642 +/* Return TRUE if X references a SYMBOL_REF. */
3644 +symbol_mentioned_p (rtx x)
3649 + if (GET_CODE (x) == SYMBOL_REF)
3652 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3654 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3656 + if (fmt[i] == 'E')
3660 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3661 + if (symbol_mentioned_p (XVECEXP (x, i, j)))
3664 + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3671 +/* Return TRUE if X references a LABEL_REF. */
3673 +label_mentioned_p (rtx x)
3678 + if (GET_CODE (x) == LABEL_REF)
3681 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3682 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3684 + if (fmt[i] == 'E')
3688 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3689 + if (label_mentioned_p (XVECEXP (x, i, j)))
3692 + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3699 +/* Return TRUE if X contains a MEM expression. */
3701 +mem_mentioned_p (rtx x)
3709 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3710 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3712 + if (fmt[i] == 'E')
3716 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3717 + if (mem_mentioned_p (XVECEXP (x, i, j)))
3720 + else if (fmt[i] == 'e' && mem_mentioned_p (XEXP (x, i)))
3728 +avr32_legitimate_pic_operand_p (rtx x)
3731 + /* We can't have const, this must be broken down to a symbol. */
3732 + if (GET_CODE (x) == CONST)
3735 + /* Can't access symbols or labels via the constant pool either */
3736 + if ((GET_CODE (x) == SYMBOL_REF
3737 + && CONSTANT_POOL_ADDRESS_P (x)
3738 + && (symbol_mentioned_p (get_pool_constant (x))
3739 + || label_mentioned_p (get_pool_constant (x)))))