1 Index: gcc-4.2.3/configure.in
2 ===================================================================
3 --- gcc-4.2.3.orig/configure.in 2007-09-15 02:42:24.000000000 +0200
4 +++ gcc-4.2.3/configure.in 2008-05-21 13:45:54.101287819 +0200
7 noconfigdirs="$noconfigdirs ld target-libgloss ${libgcj}"
10 + noconfigdirs="$noconfigdirs target-libiberty target-libmudflap target-libffi ${libgcj}"
13 noconfigdirs="$noconfigdirs target-libiberty target-libstdc++-v3 ${libgcj}"
15 Index: gcc-4.2.3/gcc/builtins.c
16 ===================================================================
17 --- gcc-4.2.3.orig/gcc/builtins.c 2008-01-23 11:38:21.000000000 +0100
18 +++ gcc-4.2.3/gcc/builtins.c 2008-05-21 13:45:54.109288559 +0200
23 - code = va_arg (ap, enum tree_code);
24 + code = va_arg (ap, int);
28 Index: gcc-4.2.3/gcc/calls.c
29 ===================================================================
30 --- gcc-4.2.3.orig/gcc/calls.c 2007-09-01 17:28:30.000000000 +0200
31 +++ gcc-4.2.3/gcc/calls.c 2008-05-21 13:45:54.117288181 +0200
33 for (; count < nargs; count++)
35 rtx val = va_arg (p, rtx);
36 - enum machine_mode mode = va_arg (p, enum machine_mode);
37 + enum machine_mode mode = va_arg (p, int);
39 /* We cannot convert the arg value to the mode the library wants here;
40 must do it earlier where we know the signedness of the arg. */
41 Index: gcc-4.2.3/gcc/config/avr32/avr32.c
42 ===================================================================
43 --- /dev/null 1970-01-01 00:00:00.000000000 +0000
44 +++ gcc-4.2.3/gcc/config/avr32/avr32.c 2008-05-21 13:45:54.145288116 +0200
47 + Target hooks and helper functions for AVR32.
48 + Copyright 2003-2006 Atmel Corporation.
50 + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
51 + Initial porting by Anders �dland.
53 + This file is part of GCC.
55 + This program is free software; you can redistribute it and/or modify
56 + it under the terms of the GNU General Public License as published by
57 + the Free Software Foundation; either version 2 of the License, or
58 + (at your option) any later version.
60 + This program is distributed in the hope that it will be useful,
61 + but WITHOUT ANY WARRANTY; without even the implied warranty of
62 + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63 + GNU General Public License for more details.
65 + You should have received a copy of the GNU General Public License
66 + along with this program; if not, write to the Free Software
67 + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
71 +#include "coretypes.h"
77 +#include "hard-reg-set.h"
79 +#include "insn-config.h"
80 +#include "conditions.h"
82 +#include "insn-attr.h"
85 +#include "function.h"
92 +#include "c-pragma.h"
93 +#include "integrate.h"
95 +#include "langhooks.h"
98 +#include "target-def.h"
102 +/* Forward definitions of types. */
103 +typedef struct minipool_node Mnode;
104 +typedef struct minipool_fixup Mfix;
106 +/* Obstack for minipool constant handling. */
107 +static struct obstack minipool_obstack;
108 +static char *minipool_startobj;
109 +static rtx minipool_vector_label;
111 +/* True if we are currently building a constant table. */
112 +int making_const_table;
114 +/* Some forward function declarations */
115 +static unsigned long avr32_isr_value (tree);
116 +static unsigned long avr32_compute_func_type (void);
117 +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
118 +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
119 +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
120 + int flags, bool * no_add_attrs);
121 +static void avr32_reorg (void);
122 +bool avr32_return_in_msb (tree type);
123 +bool avr32_vector_mode_supported (enum machine_mode mode);
124 +static void avr32_init_libfuncs (void);
128 +avr32_add_gc_roots (void)
130 + gcc_obstack_init (&minipool_obstack);
131 + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
135 +/* List of all known AVR32 parts */
136 +static const struct part_type_s avr32_part_types[] = {
137 + /* name, part_type, architecture type, macro */
138 + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
139 + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
140 + {"ap7010", PART_TYPE_AVR32_AP7010, ARCH_TYPE_AVR32_AP, "__AVR32_AP7010__"},
141 + {"ap7020", PART_TYPE_AVR32_AP7020, ARCH_TYPE_AVR32_AP, "__AVR32_AP7020__"},
142 + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UC,
143 + "__AVR32_UC3A0256__"},
144 + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UC,
145 + "__AVR32_UC3A0512__"},
146 + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UC,
147 + "__AVR32_UC3A1128__"},
148 + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UC,
149 + "__AVR32_UC3A1256__"},
150 + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UC,
151 + "__AVR32_UC3A1512__"},
152 + {"uc3b064", PART_TYPE_AVR32_UC3B064, ARCH_TYPE_AVR32_UC,
153 + "__AVR32_UC3B064__"},
154 + {"uc3b0128", PART_TYPE_AVR32_UC3B0128, ARCH_TYPE_AVR32_UC,
155 + "__AVR32_UC3B0128__"},
156 + {"uc3b0256", PART_TYPE_AVR32_UC3B0256, ARCH_TYPE_AVR32_UC,
157 + "__AVR32_UC3B0256__"},
158 + {"uc3b164", PART_TYPE_AVR32_UC3B164, ARCH_TYPE_AVR32_UC,
159 + "__AVR32_UC3B164__"},
160 + {"uc3b1128", PART_TYPE_AVR32_UC3B1128, ARCH_TYPE_AVR32_UC,
161 + "__AVR32_UC3B1128__"},
162 + {"uc3b1256", PART_TYPE_AVR32_UC3B1256, ARCH_TYPE_AVR32_UC,
163 + "__AVR32_UC3B1256__"},
167 +/* List of all known AVR32 architectures */
168 +static const struct arch_type_s avr32_arch_types[] = {
169 + /* name, architecture type, microarchitecture type, feature flags, macro */
170 + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B,
171 + (FLAG_AVR32_HAS_DSP
172 + | FLAG_AVR32_HAS_SIMD
173 + | FLAG_AVR32_HAS_UNALIGNED_WORD
174 + | FLAG_AVR32_HAS_CACHES
175 + | FLAG_AVR32_HAS_BRANCH_PRED
176 + | FLAG_AVR32_HAS_RETURN_STACK),
178 + {"uc", ARCH_TYPE_AVR32_UC, UARCH_TYPE_AVR32A,
179 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW),
181 + {NULL, 0, 0, 0, NULL}
184 +/* Default arch name */
185 +const char *avr32_arch_name = "ap";
186 +const char *avr32_part_name = "none";
188 +const struct part_type_s *avr32_part;
189 +const struct arch_type_s *avr32_arch;
191 +/* Set default target_flags. */
192 +#undef TARGET_DEFAULT_TARGET_FLAGS
193 +#define TARGET_DEFAULT_TARGET_FLAGS \
194 + (MASK_HAS_ASM_ADDR_PSEUDOS | MASK_MD_REORG_OPTIMIZATION)
197 +avr32_optimization_options (int level,
199 + if (AVR32_ALWAYS_PIC)
202 + /* Enable section anchors if optimization is enabled. */
203 + if (level > 0 || size)
204 + flag_section_anchors = 1;
207 +/* Override command line options */
209 +avr32_override_options (void)
211 + const struct part_type_s *part;
212 + const struct arch_type_s *arch;
214 + /* Check if part type is set. */
215 + for (part = avr32_part_types; part->name; part++)
216 + if (strcmp (part->name, avr32_part_name) == 0)
223 + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
225 + for (part = avr32_part_types; part->name; part++)
226 + fprintf (stderr, "\t%s\n", part->name);
227 + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
230 + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
232 + /* If part was set to "none" then check if arch was set. */
233 + if (strcmp (avr32_part->name, "none") == 0)
235 + /* Check if arch type is set. */
236 + for (arch = avr32_arch_types; arch->name; arch++)
237 + if (strcmp (arch->name, avr32_arch_name) == 0)
244 + fprintf (stderr, "Unknown arch `%s' specified\nKnown arch names:\n",
246 + for (arch = avr32_arch_types; arch->name; arch++)
247 + fprintf (stderr, "\t%s\n", arch->name);
248 + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
252 + /* If optimization level is two or greater, then align start of loops to a
253 + word boundary since this will allow folding the first insn of the loop.
254 + Do this only for targets supporting branch prediction. */
255 + if (optimize >= 2 && TARGET_BRANCH_PRED)
259 + /* Enable section anchors if optimization is enabled. */
260 + if (optimize > 0 || optimize_size)
261 + flag_section_anchors = 1;
263 + /* Enable fast-float library if unsafe math optimizations
265 + if (flag_unsafe_math_optimizations)
266 + target_flags |= MASK_FAST_FLOAT;
268 + /* Check if we should set avr32_imm_in_const_pool
269 + based on if caches are present or not. */
270 + if ( avr32_imm_in_const_pool == -1 )
272 + if ( TARGET_CACHES )
273 + avr32_imm_in_const_pool = 1;
275 + avr32_imm_in_const_pool = 0;
278 + avr32_add_gc_roots ();
283 +If defined, a function that outputs the assembler code for entry to a
284 +function. The prologue is responsible for setting up the stack frame,
285 +initializing the frame pointer register, saving registers that must be
286 +saved, and allocating size additional bytes of storage for the
287 +local variables. size is an integer. file is a stdio
288 +stream to which the assembler code should be output.
290 +The label for the beginning of the function need not be output by this
291 +macro. That has already been done when the macro is run.
293 +To determine which registers to save, the macro can refer to the array
294 +regs_ever_live: element r is nonzero if hard register
295 +r is used anywhere within the function. This implies the function
296 +prologue should save register r, provided it is not one of the
297 +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
300 +On machines that have ``register windows'', the function entry code does
301 +not save on the stack the registers that are in the windows, even if
302 +they are supposed to be preserved by function calls; instead it takes
303 +appropriate steps to ``push'' the register stack, if any non-call-used
304 +registers are used in the function.
306 +On machines where functions may or may not have frame-pointers, the
307 +function entry code must vary accordingly; it must set up the frame
308 +pointer if one is wanted, and not otherwise. To determine whether a
309 +frame pointer is in wanted, the macro can refer to the variable
310 +frame_pointer_needed. The variable's value will be 1 at run
311 +time in a function that needs a frame pointer. (see Elimination).
313 +The function entry code is responsible for allocating any stack space
314 +required for the function. This stack space consists of the regions
315 +listed below. In most cases, these regions are allocated in the
316 +order listed, with the last listed region closest to the top of the
317 +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
318 +the highest address if it is not defined). You can use a different order
319 +for a machine if doing so is more convenient or required for
320 +compatibility reasons. Except in cases where required by standard
321 +or by a debugger, there is no reason why the stack layout used by GCC
322 +need agree with that used by other compilers for a machine.
325 +#undef TARGET_ASM_FUNCTION_PROLOGUE
326 +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
329 +#undef TARGET_DEFAULT_SHORT_ENUMS
330 +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
332 +#undef TARGET_PROMOTE_FUNCTION_ARGS
333 +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
335 +#undef TARGET_PROMOTE_FUNCTION_RETURN
336 +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
338 +#undef TARGET_PROMOTE_PROTOTYPES
339 +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
341 +#undef TARGET_MUST_PASS_IN_STACK
342 +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
344 +#undef TARGET_PASS_BY_REFERENCE
345 +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
347 +#undef TARGET_STRICT_ARGUMENT_NAMING
348 +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
350 +#undef TARGET_VECTOR_MODE_SUPPORTED_P
351 +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
353 +#undef TARGET_RETURN_IN_MEMORY
354 +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
356 +#undef TARGET_RETURN_IN_MSB
357 +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
359 +#undef TARGET_ARG_PARTIAL_BYTES
360 +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
362 +#undef TARGET_STRIP_NAME_ENCODING
363 +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
365 +#define streq(string1, string2) (strcmp (string1, string2) == 0)
367 +#undef TARGET_NARROW_VOLATILE_BITFIELD
368 +#define TARGET_NARROW_VOLATILE_BITFIELD hook_bool_void_false
370 +#undef TARGET_ATTRIBUTE_TABLE
371 +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
373 +#undef TARGET_COMP_TYPE_ATTRIBUTES
374 +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
377 +#undef TARGET_RTX_COSTS
378 +#define TARGET_RTX_COSTS avr32_rtx_costs
380 +#undef TARGET_CANNOT_FORCE_CONST_MEM
381 +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
383 +#undef TARGET_ASM_INTEGER
384 +#define TARGET_ASM_INTEGER avr32_assemble_integer
386 +#undef TARGET_FUNCTION_VALUE
387 +#define TARGET_FUNCTION_VALUE avr32_function_value
389 +#undef TARGET_MIN_ANCHOR_OFFSET
390 +#define TARGET_MIN_ANCHOR_OFFSET (0)
392 +#undef TARGET_MAX_ANCHOR_OFFSET
393 +#define TARGET_MAX_ANCHOR_OFFSET ((1 << 15) - 1)
397 + * Switches to the appropriate section for output of constant pool
398 + * entry x in mode. You can assume that x is some kind of constant in
399 + * RTL. The argument mode is redundant except in the case of a
400 + * const_int rtx. Select the section by calling readonly_data_ section
401 + * or one of the alternatives for other sections. align is the
402 + * constant alignment in bits.
404 + * The default version of this function takes care of putting symbolic
405 + * constants in flag_ pic mode in data_section and everything else in
406 + * readonly_data_section.
408 +//#undef TARGET_ASM_SELECT_RTX_SECTION
409 +//#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
413 + * If non-null, this hook performs a target-specific pass over the
414 + * instruction stream. The compiler will run it at all optimization
415 + * levels, just before the point at which it normally does
416 + * delayed-branch scheduling.
418 + * The exact purpose of the hook varies from target to target. Some
419 + * use it to do transformations that are necessary for correctness,
420 + * such as laying out in-function constant pools or avoiding hardware
421 + * hazards. Others use it as an opportunity to do some
422 + * machine-dependent optimizations.
424 + * You need not implement the hook if it has nothing to do. The
425 + * default definition is null.
427 +#undef TARGET_MACHINE_DEPENDENT_REORG
428 +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
430 +/* Target hook for assembling integer objects.
431 + Need to handle integer vectors */
433 +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
435 + if (avr32_vector_mode_supported (GET_MODE (x)))
439 + if (GET_CODE (x) != CONST_VECTOR)
442 + units = CONST_VECTOR_NUNITS (x);
444 + switch (GET_MODE (x))
456 + for (i = 0; i < units; i++)
460 + elt = CONST_VECTOR_ELT (x, i);
461 + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
467 + return default_assemble_integer (x, size, aligned_p);
471 + * This target hook describes the relative costs of RTL expressions.
473 + * The cost may depend on the precise form of the expression, which is
474 + * available for examination in x, and the rtx code of the expression
475 + * in which it is contained, found in outer_code. code is the
476 + * expression code--redundant, since it can be obtained with GET_CODE
479 + * In implementing this hook, you can use the construct COSTS_N_INSNS
480 + * (n) to specify a cost equal to n fast instructions.
482 + * On entry to the hook, *total contains a default estimate for the
483 + * cost of the expression. The hook should modify this value as
484 + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
485 + * for multiplications, COSTS_N_INSNS (7) for division and modulus
486 + * operations, and COSTS_N_INSNS (1) for all other operations.
488 + * When optimizing for code size, i.e. when optimize_size is non-zero,
489 + * this target hook should be used to estimate the relative size cost
490 + * of an expression, again relative to COSTS_N_INSNS.
492 + * The hook returns true when all subexpressions of x have been
493 + * processed, and false when rtx_cost should recurse.
496 +/* Worker routine for avr32_rtx_costs. */
498 +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
499 + enum rtx_code outer ATTRIBUTE_UNUSED)
501 + enum machine_mode mode = GET_MODE (x);
503 + switch (GET_CODE (x))
506 + /* Using pre decrement / post increment memory operations on the
507 + avr32_uc architecture means that two writebacks must be performed
508 + and hence two cycles are needed. */
510 + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
511 + && avr32_arch->arch_type == ARCH_TYPE_AVR32_UC
512 + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
513 + || GET_CODE (XEXP (x, 0)) == POST_INC))
514 + return COSTS_N_INSNS (5);
516 + /* Memory costs quite a lot for the first word, but subsequent words
517 + load at the equivalent of a single insn each. */
518 + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
519 + return COSTS_N_INSNS (3 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
521 + return COSTS_N_INSNS (4);
524 + /* These are valid for the pseudo insns: lda.w and call which operates
525 + on direct addresses. We assume that the cost of a lda.w is the same
526 + as the cost of a ld.w insn. */
527 + return (outer == SET) ? COSTS_N_INSNS (4) : COSTS_N_INSNS (1);
532 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
536 + if (mode == TImode)
537 + return COSTS_N_INSNS (100);
539 + if (mode == DImode)
540 + return COSTS_N_INSNS (10);
541 + return COSTS_N_INSNS (4);
546 + if (mode == TImode)
547 + return COSTS_N_INSNS (10);
549 + if (mode == DImode)
550 + return COSTS_N_INSNS (4);
551 + return COSTS_N_INSNS (1);
557 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
558 + return COSTS_N_INSNS (100);
560 + if (mode == TImode)
561 + return COSTS_N_INSNS (50);
563 + if (mode == DImode)
564 + return COSTS_N_INSNS (2);
565 + return COSTS_N_INSNS (1);
569 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
570 + return COSTS_N_INSNS (300);
572 + if (mode == TImode)
573 + return COSTS_N_INSNS (16);
575 + if (mode == DImode)
576 + return COSTS_N_INSNS (4);
578 + if (mode == HImode)
579 + return COSTS_N_INSNS (2);
581 + return COSTS_N_INSNS (3);
584 + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
585 + return COSTS_N_INSNS (4);
586 + return COSTS_N_INSNS (1);
589 + /* Sign/Zero extensions of registers cost quite much since these
590 + instrcutions only take one register operand which means that gcc
591 + often must insert some move instrcutions */
592 + if (mode == QImode || mode == HImode)
593 + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
594 + return COSTS_N_INSNS (4);
596 + /* divmod operations */
597 + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
598 + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
600 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
604 + return COSTS_N_INSNS (1);
609 +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
611 + *total = avr32_rtx_costs_1 (x, code, outer_code);
617 +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
619 + /* Do not want symbols in the constant pool when compiling pic or if using
620 + address pseudo instructions. */
621 + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
622 + && avr32_find_symbol (x) != NULL_RTX);
626 +/* Table of machine attributes. */
627 +const struct attribute_spec avr32_attribute_table[] = {
628 + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
629 + /* Interrupt Service Routines have special prologue and epilogue
631 + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
632 + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
633 + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
634 + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
635 + {NULL, 0, 0, false, false, false, NULL}
641 + const char *const arg;
642 + const unsigned long return_value;
646 +static const isr_attribute_arg isr_attribute_args[] = {
647 + {"FULL", AVR32_FT_ISR_FULL},
648 + {"full", AVR32_FT_ISR_FULL},
649 + {"HALF", AVR32_FT_ISR_HALF},
650 + {"half", AVR32_FT_ISR_HALF},
651 + {"NONE", AVR32_FT_ISR_NONE},
652 + {"none", AVR32_FT_ISR_NONE},
653 + {"UNDEF", AVR32_FT_ISR_NONE},
654 + {"undef", AVR32_FT_ISR_NONE},
655 + {"SWI", AVR32_FT_ISR_NONE},
656 + {"swi", AVR32_FT_ISR_NONE},
657 + {NULL, AVR32_FT_ISR_NONE}
660 +/* Returns the (interrupt) function type of the current
661 + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
663 +static unsigned long
664 +avr32_isr_value (tree argument)
666 + const isr_attribute_arg *ptr;
669 + /* No argument - default to ISR_NONE. */
670 + if (argument == NULL_TREE)
671 + return AVR32_FT_ISR_NONE;
673 + /* Get the value of the argument. */
674 + if (TREE_VALUE (argument) == NULL_TREE
675 + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
676 + return AVR32_FT_UNKNOWN;
678 + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
680 + /* Check it against the list of known arguments. */
681 + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
682 + if (streq (arg, ptr->arg))
683 + return ptr->return_value;
685 + /* An unrecognized interrupt type. */
686 + return AVR32_FT_UNKNOWN;
692 +These hooks specify assembly directives for creating certain kinds
693 +of integer object. The TARGET_ASM_BYTE_OP directive creates a
694 +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
695 +aligned two-byte object, and so on. Any of the hooks may be
696 +NULL, indicating that no suitable directive is available.
698 +The compiler will print these strings at the start of a new line,
699 +followed immediately by the object's initial value. In most cases,
700 +the string should contain a tab, a pseudo-op, and then another tab.
702 +#undef TARGET_ASM_BYTE_OP
703 +#define TARGET_ASM_BYTE_OP "\t.byte\t"
704 +#undef TARGET_ASM_ALIGNED_HI_OP
705 +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
706 +#undef TARGET_ASM_ALIGNED_SI_OP
707 +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
708 +#undef TARGET_ASM_ALIGNED_DI_OP
709 +#define TARGET_ASM_ALIGNED_DI_OP NULL
710 +#undef TARGET_ASM_ALIGNED_TI_OP
711 +#define TARGET_ASM_ALIGNED_TI_OP NULL
712 +#undef TARGET_ASM_UNALIGNED_HI_OP
713 +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
714 +#undef TARGET_ASM_UNALIGNED_SI_OP
715 +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
716 +#undef TARGET_ASM_UNALIGNED_DI_OP
717 +#define TARGET_ASM_UNALIGNED_DI_OP NULL
718 +#undef TARGET_ASM_UNALIGNED_TI_OP
719 +#define TARGET_ASM_UNALIGNED_TI_OP NULL
721 +#undef TARGET_ASM_OUTPUT_MI_THUNK
722 +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
724 +#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
725 +#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
728 +avr32_output_mi_thunk (FILE * file,
729 + tree thunk ATTRIBUTE_UNUSED,
730 + HOST_WIDE_INT delta,
731 + HOST_WIDE_INT vcall_offset, tree function)
733 + int mi_delta = delta;
735 + (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function) ?
736 + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
739 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
743 + fputs ("\tpushm\tlr\n", file);
749 + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
751 + fprintf (file, "\tsub\t%s, -0x%x\n", reg_names[this_regno],
756 + /* Immediate is larger than k21 we must make us a temp register by
757 + pushing a register to the stack. */
758 + fprintf (file, "\tmov\tlr, lo(%x)\n", mi_delta);
759 + fprintf (file, "\torh\tlr, hi(%x)\n", mi_delta);
760 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
765 + if (vcall_offset != 0)
767 + fprintf (file, "\tld.w\tlr, %s[0]\n", reg_names[this_regno]);
768 + fprintf (file, "\tld.w\tlr, lr[%i]\n", (int) vcall_offset);
769 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
773 + if ( (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
777 + fputs ("\tpopm\tlr\n", file);
782 + /* Load the got into lr and then load the pointer
783 + to the function from the got and put it on the stack.
784 + We can then call the function and restore lr by issuing
785 + a doubleword load from the stack. We do not use a popm/ldm
786 + since it will be treated as a return and might need a flushing
787 + of the return-stack if available. */
788 + rtx label = gen_label_rtx ();
789 + /* Load the got. */
790 + fputs ("\tlddpc\tlr, 0f\n", file);
791 + (*targetm.asm_out.internal_label) (file, "L",
792 + CODE_LABEL_NUMBER (label));
793 + fputs ("\trsub\tlr, pc\n", file);
794 + /* Load the function pointer. */
795 + fputs ("\tld.w\tlr, lr[", file);
796 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
797 + fputs ("@got]\n", file);
798 + /* Push the function pointer on the stack.*/
799 + fputs ("\tpushm\tlr\n", file);
800 + /* Restore the old lr value and load the function pointer into
802 + fputs ("\tld.d\tlr,sp++\n", file);
803 + fprintf (file, "\t.align 2\n");
804 + fprintf (file, "0:\t.long\t.L%d - _GLOBAL_OFFSET_TABLE_\n", CODE_LABEL_NUMBER (label));
808 + fprintf (file, "\tlddpc\tpc, 0f\n");
809 + fprintf (file, "\t.align 2\n");
810 + fputs ("0:\t.long\t", file);
811 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
812 + fputc ('\n', file);
816 +/* Implements target hook vector_mode_supported. */
818 +avr32_vector_mode_supported (enum machine_mode mode)
820 + if ((mode == V2HImode) || (mode == V4QImode))
827 +#undef TARGET_INIT_LIBFUNCS
828 +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
830 +#undef TARGET_INIT_BUILTINS
831 +#define TARGET_INIT_BUILTINS avr32_init_builtins
833 +#undef TARGET_EXPAND_BUILTIN
834 +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
836 +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
838 +tree void_ftype_int, void_ftype_void, int_ftype_ptr_int;
839 +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
840 +short_ftype_short_short;
841 +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
842 +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
843 +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
844 +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
845 +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
847 +#define def_builtin(NAME, TYPE, CODE) \
848 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
849 + BUILT_IN_MD, NULL, NULL_TREE)
851 +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
855 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
856 + BUILT_IN_MD, NULL, NULL_TREE); \
860 +struct builtin_description
862 + const unsigned int mask;
863 + const enum insn_code icode;
864 + const char *const name;
866 + const enum rtx_code comparison;
867 + const unsigned int flag;
871 +static const struct builtin_description bdesc_2arg[] = {
872 +#define DSP_BUILTIN(code, builtin, ftype) \
873 + { 1, CODE_FOR_##code, "__builtin_" #code , \
874 + AVR32_BUILTIN_##builtin, 0, 0, ftype }
876 + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
877 + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
878 + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
879 + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
880 + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
881 + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
882 + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
883 + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
884 + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
885 + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
886 + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
891 +avr32_init_builtins (void)
894 + const struct builtin_description *d;
895 + tree endlink = void_list_node;
896 + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
897 + tree longlong_endlink =
898 + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
899 + tree short_endlink =
900 + tree_cons (NULL_TREE, short_integer_type_node, endlink);
901 + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
903 + /* int func (int) */
904 + int_ftype_int = build_function_type (integer_type_node, int_endlink);
906 + /* short func (short) */
908 + = build_function_type (short_integer_type_node, short_endlink);
910 + /* short func (short, short) */
911 + short_ftype_short_short
912 + = build_function_type (short_integer_type_node,
913 + tree_cons (NULL_TREE, short_integer_type_node,
916 + /* long long func (long long, short, short) */
917 + longlong_ftype_longlong_short_short
918 + = build_function_type (long_long_integer_type_node,
919 + tree_cons (NULL_TREE, long_long_integer_type_node,
920 + tree_cons (NULL_TREE,
921 + short_integer_type_node,
924 + /* long long func (short, short) */
925 + longlong_ftype_short_short
926 + = build_function_type (long_long_integer_type_node,
927 + tree_cons (NULL_TREE, short_integer_type_node,
930 + /* int func (int, int) */
932 + = build_function_type (integer_type_node,
933 + tree_cons (NULL_TREE, integer_type_node,
936 + /* long long func (int, int) */
937 + longlong_ftype_int_int
938 + = build_function_type (long_long_integer_type_node,
939 + tree_cons (NULL_TREE, integer_type_node,
942 + /* long long int func (long long, int, short) */
943 + longlong_ftype_longlong_int_short
944 + = build_function_type (long_long_integer_type_node,
945 + tree_cons (NULL_TREE, long_long_integer_type_node,
946 + tree_cons (NULL_TREE, integer_type_node,
949 + /* long long int func (int, short) */
950 + longlong_ftype_int_short
951 + = build_function_type (long_long_integer_type_node,
952 + tree_cons (NULL_TREE, integer_type_node,
955 + /* int func (int, short, short) */
956 + int_ftype_int_short_short
957 + = build_function_type (integer_type_node,
958 + tree_cons (NULL_TREE, integer_type_node,
959 + tree_cons (NULL_TREE,
960 + short_integer_type_node,
963 + /* int func (short, short) */
964 + int_ftype_short_short
965 + = build_function_type (integer_type_node,
966 + tree_cons (NULL_TREE, short_integer_type_node,
969 + /* int func (int, short) */
970 + int_ftype_int_short
971 + = build_function_type (integer_type_node,
972 + tree_cons (NULL_TREE, integer_type_node,
975 + /* void func (int, int) */
977 + = build_function_type (void_type_node,
978 + tree_cons (NULL_TREE, integer_type_node,
981 + /* void func (int, int, int) */
982 + void_ftype_int_int_int
983 + = build_function_type (void_type_node,
984 + tree_cons (NULL_TREE, integer_type_node,
985 + tree_cons (NULL_TREE, integer_type_node,
988 + /* void func (int, int, long long) */
989 + void_ftype_int_int_longlong
990 + = build_function_type (void_type_node,
991 + tree_cons (NULL_TREE, integer_type_node,
992 + tree_cons (NULL_TREE, integer_type_node,
993 + longlong_endlink)));
995 + /* void func (int, int, int, int, int) */
996 + void_ftype_int_int_int_int_int
997 + = build_function_type (void_type_node,
998 + tree_cons (NULL_TREE, integer_type_node,
999 + tree_cons (NULL_TREE, integer_type_node,
1000 + tree_cons (NULL_TREE,
1001 + integer_type_node,
1004 + integer_type_node,
1007 + /* void func (void *, int) */
1008 + void_ftype_ptr_int
1009 + = build_function_type (void_type_node,
1010 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1012 + /* void func (int) */
1013 + void_ftype_int = build_function_type (void_type_node, int_endlink);
1015 + /* void func (void) */
1016 + void_ftype_void = build_function_type (void_type_node, void_endlink);
1018 + /* int func (void) */
1019 + int_ftype_void = build_function_type (integer_type_node, void_endlink);
1021 + /* int func (void *, int) */
1023 + = build_function_type (integer_type_node,
1024 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1026 + /* int func (int, int, int) */
1027 + int_ftype_int_int_int
1028 + = build_function_type (integer_type_node,
1029 + tree_cons (NULL_TREE, integer_type_node,
1030 + tree_cons (NULL_TREE, integer_type_node,
1033 + /* Initialize avr32 builtins. */
1034 + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
1035 + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
1036 + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
1037 + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
1038 + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
1039 + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
1040 + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
1041 + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
1042 + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
1043 + def_builtin ("__builtin_breakpoint", void_ftype_void,
1044 + AVR32_BUILTIN_BREAKPOINT);
1045 + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
1046 + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
1047 + def_builtin ("__builtin_bswap_16", short_ftype_short,
1048 + AVR32_BUILTIN_BSWAP16);
1049 + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
1050 + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
1051 + AVR32_BUILTIN_COP);
1052 + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
1053 + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
1054 + AVR32_BUILTIN_MVRC_W);
1055 + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
1056 + AVR32_BUILTIN_MVCR_D);
1057 + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
1058 + AVR32_BUILTIN_MVRC_D);
1059 + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
1060 + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
1061 + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
1062 + AVR32_BUILTIN_SATRNDS);
1063 + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
1064 + AVR32_BUILTIN_SATRNDU);
1065 + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
1066 + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
1067 + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
1068 + AVR32_BUILTIN_MACSATHH_W);
1069 + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
1070 + AVR32_BUILTIN_MACWH_D);
1071 + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
1072 + AVR32_BUILTIN_MACHH_D);
1074 + /* Add all builtins that are more or less simple operations on two
1076 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1078 + /* Use one of the operands; the target can have a different mode for
1079 + mask-generating compares. */
1084 + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
1089 +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
1092 +avr32_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
1095 + tree arg0 = TREE_VALUE (arglist);
1096 + tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1097 + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1098 + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1099 + enum machine_mode tmode = insn_data[icode].operand[0].mode;
1100 + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1101 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1104 + || GET_MODE (target) != tmode
1105 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1106 + target = gen_reg_rtx (tmode);
1108 + /* In case the insn wants input operands in modes different from the
1110 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1112 + /* If op0 is already a reg we must cast it to the correct mode. */
1114 + op0 = convert_to_mode (mode0, op0, 1);
1116 + op0 = copy_to_mode_reg (mode0, op0);
1118 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1120 + /* If op1 is already a reg we must cast it to the correct mode. */
1122 + op1 = convert_to_mode (mode1, op1, 1);
1124 + op1 = copy_to_mode_reg (mode1, op1);
1126 + pat = GEN_FCN (icode) (target, op0, op1);
1133 +/* Expand an expression EXP that calls a built-in function,
1134 + with result going to TARGET if that's convenient
1135 + (and in mode MODE if that's convenient).
1136 + SUBTARGET may be used as the target for computing one of EXP's operands.
1137 + IGNORE is nonzero if the value is to be ignored. */
1140 +avr32_expand_builtin (tree exp,
1142 + rtx subtarget ATTRIBUTE_UNUSED,
1143 + enum machine_mode mode ATTRIBUTE_UNUSED,
1144 + int ignore ATTRIBUTE_UNUSED)
1146 + const struct builtin_description *d;
1148 + enum insn_code icode;
1149 + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1150 + tree arglist = TREE_OPERAND (exp, 1);
1151 + tree arg0, arg1, arg2;
1152 + rtx op0, op1, op2, pat;
1153 + enum machine_mode tmode, mode0, mode1;
1154 + enum machine_mode arg0_mode;
1155 + int fcode = DECL_FUNCTION_CODE (fndecl);
1162 + case AVR32_BUILTIN_SATS:
1163 + case AVR32_BUILTIN_SATU:
1164 + case AVR32_BUILTIN_SATRNDS:
1165 + case AVR32_BUILTIN_SATRNDU:
1167 + const char *fname;
1171 + case AVR32_BUILTIN_SATS:
1172 + icode = CODE_FOR_sats;
1175 + case AVR32_BUILTIN_SATU:
1176 + icode = CODE_FOR_satu;
1179 + case AVR32_BUILTIN_SATRNDS:
1180 + icode = CODE_FOR_satrnds;
1181 + fname = "satrnds";
1183 + case AVR32_BUILTIN_SATRNDU:
1184 + icode = CODE_FOR_satrndu;
1185 + fname = "satrndu";
1189 + arg0 = TREE_VALUE (arglist);
1190 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1191 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1192 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1193 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1194 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1196 + tmode = insn_data[icode].operand[0].mode;
1200 + || GET_MODE (target) != tmode
1201 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1202 + target = gen_reg_rtx (tmode);
1205 + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
1207 + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
1210 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1212 + error ("Parameter 2 to __builtin_%s should be a constant number.",
1217 + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
1219 + error ("Parameter 3 to __builtin_%s should be a constant number.",
1224 + emit_move_insn (target, op0);
1225 + pat = GEN_FCN (icode) (target, op1, op2);
1232 + case AVR32_BUILTIN_MUSTR:
1233 + icode = CODE_FOR_mustr;
1234 + tmode = insn_data[icode].operand[0].mode;
1237 + || GET_MODE (target) != tmode
1238 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1239 + target = gen_reg_rtx (tmode);
1240 + pat = GEN_FCN (icode) (target);
1246 + case AVR32_BUILTIN_MFSR:
1247 + icode = CODE_FOR_mfsr;
1248 + arg0 = TREE_VALUE (arglist);
1249 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1250 + tmode = insn_data[icode].operand[0].mode;
1251 + mode0 = insn_data[icode].operand[1].mode;
1253 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1255 + error ("Parameter 1 to __builtin_mfsr must be a constant number");
1259 + || GET_MODE (target) != tmode
1260 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1261 + target = gen_reg_rtx (tmode);
1262 + pat = GEN_FCN (icode) (target, op0);
1267 + case AVR32_BUILTIN_MTSR:
1268 + icode = CODE_FOR_mtsr;
1269 + arg0 = TREE_VALUE (arglist);
1270 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1271 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1272 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1273 + mode0 = insn_data[icode].operand[0].mode;
1274 + mode1 = insn_data[icode].operand[1].mode;
1276 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1278 + error ("Parameter 1 to __builtin_mtsr must be a constant number");
1279 + return gen_reg_rtx (mode0);
1281 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1282 + op1 = copy_to_mode_reg (mode1, op1);
1283 + pat = GEN_FCN (icode) (op0, op1);
1288 + case AVR32_BUILTIN_MFDR:
1289 + icode = CODE_FOR_mfdr;
1290 + arg0 = TREE_VALUE (arglist);
1291 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1292 + tmode = insn_data[icode].operand[0].mode;
1293 + mode0 = insn_data[icode].operand[1].mode;
1295 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1297 + error ("Parameter 1 to __builtin_mfdr must be a constant number");
1301 + || GET_MODE (target) != tmode
1302 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1303 + target = gen_reg_rtx (tmode);
1304 + pat = GEN_FCN (icode) (target, op0);
1309 + case AVR32_BUILTIN_MTDR:
1310 + icode = CODE_FOR_mtdr;
1311 + arg0 = TREE_VALUE (arglist);
1312 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1313 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1314 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1315 + mode0 = insn_data[icode].operand[0].mode;
1316 + mode1 = insn_data[icode].operand[1].mode;
1318 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1320 + error ("Parameter 1 to __builtin_mtdr must be a constant number");
1321 + return gen_reg_rtx (mode0);
1323 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1324 + op1 = copy_to_mode_reg (mode1, op1);
1325 + pat = GEN_FCN (icode) (op0, op1);
1330 + case AVR32_BUILTIN_CACHE:
1331 + icode = CODE_FOR_cache;
1332 + arg0 = TREE_VALUE (arglist);
1333 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1334 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1335 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1336 + mode0 = insn_data[icode].operand[0].mode;
1337 + mode1 = insn_data[icode].operand[1].mode;
1339 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1341 + error ("Parameter 2 to __builtin_cache must be a constant number");
1342 + return gen_reg_rtx (mode1);
1345 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1346 + op0 = copy_to_mode_reg (mode0, op0);
1348 + pat = GEN_FCN (icode) (op0, op1);
1353 + case AVR32_BUILTIN_SYNC:
1354 + case AVR32_BUILTIN_MUSFR:
1356 + const char *fname;
1360 + case AVR32_BUILTIN_SYNC:
1361 + icode = CODE_FOR_sync;
1364 + case AVR32_BUILTIN_MUSFR:
1365 + icode = CODE_FOR_musfr;
1370 + arg0 = TREE_VALUE (arglist);
1371 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1372 + mode0 = insn_data[icode].operand[0].mode;
1374 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1376 + if (icode == CODE_FOR_musfr)
1377 + op0 = copy_to_mode_reg (mode0, op0);
1380 + error ("Parameter to __builtin_%s is illegal.", fname);
1381 + return gen_reg_rtx (mode0);
1384 + pat = GEN_FCN (icode) (op0);
1390 + case AVR32_BUILTIN_TLBR:
1391 + icode = CODE_FOR_tlbr;
1392 + pat = GEN_FCN (icode) (NULL_RTX);
1397 + case AVR32_BUILTIN_TLBS:
1398 + icode = CODE_FOR_tlbs;
1399 + pat = GEN_FCN (icode) (NULL_RTX);
1404 + case AVR32_BUILTIN_TLBW:
1405 + icode = CODE_FOR_tlbw;
1406 + pat = GEN_FCN (icode) (NULL_RTX);
1411 + case AVR32_BUILTIN_BREAKPOINT:
1412 + icode = CODE_FOR_breakpoint;
1413 + pat = GEN_FCN (icode) (NULL_RTX);
1418 + case AVR32_BUILTIN_XCHG:
1419 + icode = CODE_FOR_sync_lock_test_and_setsi;
1420 + arg0 = TREE_VALUE (arglist);
1421 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1422 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1423 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1424 + tmode = insn_data[icode].operand[0].mode;
1425 + mode0 = insn_data[icode].operand[1].mode;
1426 + mode1 = insn_data[icode].operand[2].mode;
1428 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1430 + op1 = copy_to_mode_reg (mode1, op1);
1433 + op0 = gen_rtx_MEM (SImode, op0);
1434 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1437 + ("Parameter 1 to __builtin_xchg must be a pointer to an integer.");
1441 + || GET_MODE (target) != tmode
1442 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1443 + target = gen_reg_rtx (tmode);
1444 + pat = GEN_FCN (icode) (target, op0, op1);
1449 + case AVR32_BUILTIN_LDXI:
1450 + icode = CODE_FOR_ldxi;
1451 + arg0 = TREE_VALUE (arglist);
1452 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1453 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1454 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1455 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1456 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1457 + tmode = insn_data[icode].operand[0].mode;
1458 + mode0 = insn_data[icode].operand[1].mode;
1459 + mode1 = insn_data[icode].operand[2].mode;
1461 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1463 + op0 = copy_to_mode_reg (mode0, op0);
1466 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1468 + op1 = copy_to_mode_reg (mode1, op1);
1471 + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
1474 + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
1475 + return gen_reg_rtx (mode0);
1479 + || GET_MODE (target) != tmode
1480 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1481 + target = gen_reg_rtx (tmode);
1482 + pat = GEN_FCN (icode) (target, op0, op1, op2);
1487 + case AVR32_BUILTIN_BSWAP16:
1489 + icode = CODE_FOR_bswap_16;
1490 + arg0 = TREE_VALUE (arglist);
1491 + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
1492 + mode0 = insn_data[icode].operand[1].mode;
1493 + if (arg0_mode != mode0)
1494 + arg0 = build1 (NOP_EXPR,
1495 + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
1497 + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
1498 + tmode = insn_data[icode].operand[0].mode;
1501 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1503 + op0 = copy_to_mode_reg (mode0, op0);
1507 + || GET_MODE (target) != tmode
1508 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1510 + target = gen_reg_rtx (tmode);
1514 + pat = GEN_FCN (icode) (target, op0);
1521 + case AVR32_BUILTIN_BSWAP32:
1523 + icode = CODE_FOR_bswap_32;
1524 + arg0 = TREE_VALUE (arglist);
1525 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1526 + tmode = insn_data[icode].operand[0].mode;
1527 + mode0 = insn_data[icode].operand[1].mode;
1529 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1531 + op0 = copy_to_mode_reg (mode0, op0);
1535 + || GET_MODE (target) != tmode
1536 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1537 + target = gen_reg_rtx (tmode);
1540 + pat = GEN_FCN (icode) (target, op0);
1547 + case AVR32_BUILTIN_MVCR_W:
1548 + case AVR32_BUILTIN_MVCR_D:
1550 + arg0 = TREE_VALUE (arglist);
1551 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1552 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1553 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1555 + if (fcode == AVR32_BUILTIN_MVCR_W)
1556 + icode = CODE_FOR_mvcrsi;
1558 + icode = CODE_FOR_mvcrdi;
1560 + tmode = insn_data[icode].operand[0].mode;
1563 + || GET_MODE (target) != tmode
1564 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1565 + target = gen_reg_rtx (tmode);
1567 + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
1570 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1571 + error ("Number should be between 0 and 7.");
1575 + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
1578 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1579 + error ("Number should be between 0 and 15.");
1583 + pat = GEN_FCN (icode) (target, op0, op1);
1590 + case AVR32_BUILTIN_MACSATHH_W:
1591 + case AVR32_BUILTIN_MACWH_D:
1592 + case AVR32_BUILTIN_MACHH_D:
1594 + arg0 = TREE_VALUE (arglist);
1595 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1596 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1597 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1598 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1599 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1601 + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
1602 + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
1603 + CODE_FOR_machh_d);
1605 + tmode = insn_data[icode].operand[0].mode;
1606 + mode0 = insn_data[icode].operand[1].mode;
1607 + mode1 = insn_data[icode].operand[2].mode;
1611 + || GET_MODE (target) != tmode
1612 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1613 + target = gen_reg_rtx (tmode);
1615 + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
1617 + /* If op0 is already a reg we must cast it to the correct mode. */
1619 + op0 = convert_to_mode (tmode, op0, 1);
1621 + op0 = copy_to_mode_reg (tmode, op0);
1624 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
1626 + /* If op1 is already a reg we must cast it to the correct mode. */
1628 + op1 = convert_to_mode (mode0, op1, 1);
1630 + op1 = copy_to_mode_reg (mode0, op1);
1633 + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
1635 + /* If op1 is already a reg we must cast it to the correct mode. */
1637 + op2 = convert_to_mode (mode1, op2, 1);
1639 + op2 = copy_to_mode_reg (mode1, op2);
1642 + emit_move_insn (target, op0);
1644 + pat = GEN_FCN (icode) (target, op1, op2);
1650 + case AVR32_BUILTIN_MVRC_W:
1651 + case AVR32_BUILTIN_MVRC_D:
1653 + arg0 = TREE_VALUE (arglist);
1654 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1655 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1656 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1657 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1658 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1660 + if (fcode == AVR32_BUILTIN_MVRC_W)
1661 + icode = CODE_FOR_mvrcsi;
1663 + icode = CODE_FOR_mvrcdi;
1665 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1667 + error ("Parameter 1 is not a valid coprocessor number.");
1668 + error ("Number should be between 0 and 7.");
1672 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1674 + error ("Parameter 2 is not a valid coprocessor register number.");
1675 + error ("Number should be between 0 and 15.");
1679 + if (GET_CODE (op2) == CONST_INT
1680 + || GET_CODE (op2) == CONST
1681 + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
1683 + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
1686 + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
1687 + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
1690 + pat = GEN_FCN (icode) (op0, op1, op2);
1697 + case AVR32_BUILTIN_COP:
1701 + icode = CODE_FOR_cop;
1702 + arg0 = TREE_VALUE (arglist);
1703 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1704 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1705 + arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
1707 + TREE_VALUE (TREE_CHAIN
1708 + (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist)))));
1709 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1710 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1711 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1712 + op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
1713 + op4 = expand_expr (arg4, NULL_RTX, VOIDmode, 0);
1715 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1718 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1719 + error ("Number should be between 0 and 7.");
1723 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1726 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1727 + error ("Number should be between 0 and 15.");
1731 + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
1734 + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
1735 + error ("Number should be between 0 and 15.");
1739 + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
1742 + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
1743 + error ("Number should be between 0 and 15.");
1747 + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
1750 + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
1751 + error ("Number should be between 0 and 127.");
1755 + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
1764 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1765 + if (d->code == fcode)
1766 + return avr32_expand_binop_builtin (d->icode, arglist, target);
1769 + /* @@@ Should really do something sensible here. */
1774 +/* Handle an "interrupt" or "isr" attribute;
1775 + arguments as in struct attribute_spec.handler. */
1778 +avr32_handle_isr_attribute (tree * node, tree name, tree args,
1779 + int flags, bool * no_add_attrs)
1781 + if (DECL_P (*node))
1783 + if (TREE_CODE (*node) != FUNCTION_DECL)
1785 + warning ("`%s' attribute only applies to functions",
1786 + IDENTIFIER_POINTER (name));
1787 + *no_add_attrs = true;
1789 + /* FIXME: the argument if any is checked for type attributes; should it
1790 + be checked for decl ones? */
1794 + if (TREE_CODE (*node) == FUNCTION_TYPE
1795 + || TREE_CODE (*node) == METHOD_TYPE)
1797 + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
1799 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1800 + *no_add_attrs = true;
1803 + else if (TREE_CODE (*node) == POINTER_TYPE
1804 + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
1805 + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
1806 + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
1808 + *node = build_variant_type_copy (*node);
1809 + TREE_TYPE (*node) = build_type_attribute_variant
1810 + (TREE_TYPE (*node),
1811 + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
1812 + *no_add_attrs = true;
1816 + /* Possibly pass this attribute on from the type to a decl. */
1817 + if (flags & ((int) ATTR_FLAG_DECL_NEXT
1818 + | (int) ATTR_FLAG_FUNCTION_NEXT
1819 + | (int) ATTR_FLAG_ARRAY_NEXT))
1821 + *no_add_attrs = true;
1822 + return tree_cons (name, args, NULL_TREE);
1826 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1834 +/* Handle an attribute requiring a FUNCTION_DECL;
1835 + arguments as in struct attribute_spec.handler. */
1837 +avr32_handle_fndecl_attribute (tree * node, tree name,
1838 + tree args ATTRIBUTE_UNUSED,
1839 + int flags ATTRIBUTE_UNUSED,
1840 + bool * no_add_attrs)
1842 + if (TREE_CODE (*node) != FUNCTION_DECL)
1844 + warning ("%qs attribute only applies to functions",
1845 + IDENTIFIER_POINTER (name));
1846 + *no_add_attrs = true;
1853 +/* Handle an acall attribute;
1854 + arguments as in struct attribute_spec.handler. */
1857 +avr32_handle_acall_attribute (tree * node, tree name,
1858 + tree args ATTRIBUTE_UNUSED,
1859 + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
1861 + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
1863 + warning ("`%s' attribute not yet supported...",
1864 + IDENTIFIER_POINTER (name));
1865 + *no_add_attrs = true;
1869 + warning ("`%s' attribute only applies to functions",
1870 + IDENTIFIER_POINTER (name));
1871 + *no_add_attrs = true;
1876 +/* Return 0 if the attributes for two types are incompatible, 1 if they
1877 + are compatible, and 2 if they are nearly compatible (which causes a
1878 + warning to be generated). */
1881 +avr32_comp_type_attributes (tree type1, tree type2)
1883 + int acall1, acall2, isr1, isr2, naked1, naked2;
1885 + /* Check for mismatch of non-default calling convention. */
1886 + if (TREE_CODE (type1) != FUNCTION_TYPE)
1889 + /* Check for mismatched call attributes. */
1890 + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
1891 + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
1892 + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
1893 + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
1894 + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1896 + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1898 + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1900 + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1902 + if ((acall1 && isr2)
1903 + || (acall2 && isr1) || (naked1 && isr2) || (naked2 && isr1))
1910 +/* Computes the type of the current function. */
1912 +static unsigned long
1913 +avr32_compute_func_type (void)
1915 + unsigned long type = AVR32_FT_UNKNOWN;
1919 + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1922 + /* Decide if the current function is volatile. Such functions never
1923 + return, and many memory cycles can be saved by not storing register
1924 + values that will never be needed again. This optimization was added to
1925 + speed up context switching in a kernel application. */
1927 + && TREE_NOTHROW (current_function_decl)
1928 + && TREE_THIS_VOLATILE (current_function_decl))
1929 + type |= AVR32_FT_VOLATILE;
1931 + if (cfun->static_chain_decl != NULL)
1932 + type |= AVR32_FT_NESTED;
1934 + attr = DECL_ATTRIBUTES (current_function_decl);
1936 + a = lookup_attribute ("isr", attr);
1937 + if (a == NULL_TREE)
1938 + a = lookup_attribute ("interrupt", attr);
1940 + if (a == NULL_TREE)
1941 + type |= AVR32_FT_NORMAL;
1943 + type |= avr32_isr_value (TREE_VALUE (a));
1946 + a = lookup_attribute ("acall", attr);
1947 + if (a != NULL_TREE)
1948 + type |= AVR32_FT_ACALL;
1950 + a = lookup_attribute ("naked", attr);
1951 + if (a != NULL_TREE)
1952 + type |= AVR32_FT_NAKED;
1957 +/* Returns the type of the current function. */
1959 +static unsigned long
1960 +avr32_current_func_type (void)
1962 + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
1963 + cfun->machine->func_type = avr32_compute_func_type ();
1965 + return cfun->machine->func_type;
1969 + This target hook should return true if we should not pass type solely
1970 + in registers. The file expr.h defines a definition that is usually appropriate,
1971 + refer to expr.h for additional documentation.
1974 +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1976 + if (type && AGGREGATE_TYPE_P (type)
1977 + /* If the alignment is less than the size then pass in the struct on
1979 + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
1980 + (unsigned int) int_size_in_bytes (type))
1981 + /* If we support unaligned word accesses then structs of size 4 and 8
1982 + can have any alignment and still be passed in registers. */
1983 + && !(TARGET_UNALIGNED_WORD
1984 + && (int_size_in_bytes (type) == 4
1985 + || int_size_in_bytes (type) == 8))
1986 + /* Double word structs need only a word alignment. */
1987 + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
1990 + if (type && AGGREGATE_TYPE_P (type)
1991 + /* Structs of size 3,5,6,7 are always passed in registers. */
1992 + && (int_size_in_bytes (type) == 3
1993 + || int_size_in_bytes (type) == 5
1994 + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
1998 + return (type && TREE_ADDRESSABLE (type));
2003 +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
2009 + This target hook should return true if an argument at the position indicated
2010 + by cum should be passed by reference. This predicate is queried after target
2011 + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
2013 + If the hook returns true, a copy of that argument is made in memory and a
2014 + pointer to the argument is passed instead of the argument itself. The pointer
2015 + is passed in whatever way is appropriate for passing a pointer to that type.
2018 +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
2019 + enum machine_mode mode ATTRIBUTE_UNUSED,
2020 + tree type, bool named ATTRIBUTE_UNUSED)
2022 + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
2026 +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
2027 + enum machine_mode mode ATTRIBUTE_UNUSED,
2028 + tree type ATTRIBUTE_UNUSED,
2029 + bool named ATTRIBUTE_UNUSED)
2035 +struct gcc_target targetm = TARGET_INITIALIZER;
2038 + Table used to convert from register number in the assembler instructions and
2039 + the register numbers used in gcc.
2041 +const int avr32_function_arg_reglist[] = {
2042 + INTERNAL_REGNUM (12),
2043 + INTERNAL_REGNUM (11),
2044 + INTERNAL_REGNUM (10),
2045 + INTERNAL_REGNUM (9),
2046 + INTERNAL_REGNUM (8)
2049 +rtx avr32_compare_op0 = NULL_RTX;
2050 +rtx avr32_compare_op1 = NULL_RTX;
2051 +rtx avr32_compare_operator = NULL_RTX;
2052 +rtx avr32_acc_cache = NULL_RTX;
2055 + Returns nonzero if it is allowed to store a value of mode mode in hard
2056 + register number regno.
2059 +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
2061 + /* We allow only float modes in the fp-registers */
2062 + if (regnr >= FIRST_FP_REGNUM
2063 + && regnr <= LAST_FP_REGNUM && GET_MODE_CLASS (mode) != MODE_FLOAT)
2070 + case DImode: /* long long */
2071 + case DFmode: /* double */
2072 + case SCmode: /* __complex__ float */
2073 + case CSImode: /* __complex__ int */
2075 + { /* long long int not supported in r12, sp, lr
2081 + if (regnr % 2) /* long long int has to be refered in even
2087 + case CDImode: /* __complex__ long long */
2088 + case DCmode: /* __complex__ double */
2089 + case TImode: /* 16 bytes */
2092 + else if (regnr % 2)
2103 +avr32_rnd_operands (rtx add, rtx shift)
2105 + if (GET_CODE (shift) == CONST_INT &&
2106 + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
2108 + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
2118 +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
2125 + HOST_WIDE_INT min_value = 0, max_value = 0;
2129 + size_str[0] = str[2];
2130 + size_str[1] = str[3];
2131 + size_str[2] = '\0';
2132 + const_size = atoi (size_str);
2134 + if (toupper (str[1]) == 'U')
2137 + max_value = (1 << const_size) - 1;
2139 + else if (toupper (str[1]) == 'S')
2141 + min_value = -(1 << (const_size - 1));
2142 + max_value = (1 << (const_size - 1)) - 1;
2150 + if (value >= min_value && value <= max_value)
2157 + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
2164 +/*Compute mask of which floating-point registers needs saving upon
2165 + entry to this function*/
2166 +static unsigned long
2167 +avr32_compute_save_fp_reg_mask (void)
2169 + unsigned long func_type = avr32_current_func_type ();
2170 + unsigned int save_reg_mask = 0;
2172 + unsigned int max_reg = 7;
2173 + int save_all_call_used_regs = FALSE;
2175 + /* This only applies for hardware floating-point implementation. */
2176 + if (!TARGET_HARD_FLOAT)
2179 + if (IS_INTERRUPT (func_type))
2182 + /* Interrupt functions must not corrupt any registers, even call
2183 + clobbered ones. If this is a leaf function we can just examine the
2184 + registers used by the RTL, but otherwise we have to assume that
2185 + whatever function is called might clobber anything, and so we have
2186 + to save all the call-clobbered registers as well. */
2188 + save_all_call_used_regs = !current_function_is_leaf;
2191 + /* All used registers used must be saved */
2192 + for (reg = 0; reg <= max_reg; reg++)
2193 + if (regs_ever_live[INTERNAL_FP_REGNUM (reg)]
2194 + || (save_all_call_used_regs
2195 + && call_used_regs[INTERNAL_FP_REGNUM (reg)]))
2196 + save_reg_mask |= (1 << reg);
2198 + return save_reg_mask;
2201 +/*Compute mask of registers which needs saving upon function entry */
2202 +static unsigned long
2203 +avr32_compute_save_reg_mask (int push)
2205 + unsigned long func_type;
2206 + unsigned int save_reg_mask = 0;
2209 + func_type = avr32_current_func_type ();
2211 + if (IS_INTERRUPT (func_type))
2213 + unsigned int max_reg = 12;
2216 + /* Get the banking scheme for the interrupt */
2217 + switch (func_type)
2219 + case AVR32_FT_ISR_FULL:
2222 + case AVR32_FT_ISR_HALF:
2225 + case AVR32_FT_ISR_NONE:
2230 + /* Interrupt functions must not corrupt any registers, even call
2231 + clobbered ones. If this is a leaf function we can just examine the
2232 + registers used by the RTL, but otherwise we have to assume that
2233 + whatever function is called might clobber anything, and so we have
2234 + to save all the call-clobbered registers as well. */
2236 + /* Need not push the registers r8-r12 for AVR32A architectures, as this
2237 + is automatially done in hardware. We also do not have any shadow
2239 + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A)
2242 + func_type = AVR32_FT_ISR_NONE;
2245 + /* All registers which are used and is not shadowed must be saved */
2246 + for (reg = 0; reg <= max_reg; reg++)
2247 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2248 + || (!current_function_is_leaf
2249 + && call_used_regs[INTERNAL_REGNUM (reg)]))
2250 + save_reg_mask |= (1 << reg);
2253 + if ((regs_ever_live[LR_REGNUM]
2254 + || !current_function_is_leaf || frame_pointer_needed)
2255 + /* Only non-shadowed register models */
2256 + && (func_type == AVR32_FT_ISR_NONE))
2257 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2259 + /* Make sure that the GOT register is pushed. */
2260 + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
2261 + && current_function_uses_pic_offset_table)
2262 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2267 + int use_pushm = optimize_size;
2269 + /* In the normal case we only need to save those registers which are
2270 + call saved and which are used by this function. */
2271 + for (reg = 0; reg <= 7; reg++)
2272 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2273 + && !call_used_regs[INTERNAL_REGNUM (reg)])
2274 + save_reg_mask |= (1 << reg);
2276 + /* Make sure that the GOT register is pushed. */
2277 + if (current_function_uses_pic_offset_table)
2278 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2281 + /* If we optimize for size and do not have anonymous arguments: use
2282 + popm/pushm always */
2285 + if ((save_reg_mask & (1 << 0))
2286 + || (save_reg_mask & (1 << 1))
2287 + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
2288 + save_reg_mask |= 0xf;
2290 + if ((save_reg_mask & (1 << 4))
2291 + || (save_reg_mask & (1 << 5))
2292 + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
2293 + save_reg_mask |= 0xf0;
2295 + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
2296 + save_reg_mask |= 0x300;
2301 + if ((regs_ever_live[LR_REGNUM]
2302 + || !current_function_is_leaf
2305 + && !current_function_calls_eh_return) || frame_pointer_needed))
2308 + /* Never pop LR into PC for functions which
2309 + calls __builtin_eh_return, since we need to
2310 + fix the SP after the restoring of the registers
2311 + and before returning. */
2312 + || current_function_calls_eh_return)
2315 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2320 + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
2326 + /* Save registers so the exception handler can modify them. */
2327 + if (current_function_calls_eh_return)
2333 + reg = EH_RETURN_DATA_REGNO (i);
2334 + if (reg == INVALID_REGNUM)
2336 + save_reg_mask |= 1 << ASM_REGNUM (reg);
2340 + return save_reg_mask;
2343 +/*Compute total size in bytes of all saved registers */
2345 +avr32_get_reg_mask_size (int reg_mask)
2350 + for (reg = 0; reg <= 15; reg++)
2351 + if (reg_mask & (1 << reg))
2357 +/*Get a register from one of the registers which are saved onto the stack
2358 + upon function entry */
2361 +avr32_get_saved_reg (int save_reg_mask)
2365 + /* Find the first register which is saved in the saved_reg_mask */
2366 + for (reg = 0; reg <= 15; reg++)
2367 + if (save_reg_mask & (1 << reg))
2373 +/* Return 1 if it is possible to return using a single instruction. */
2375 +avr32_use_return_insn (int iscond)
2377 + unsigned int func_type = avr32_current_func_type ();
2378 + unsigned long saved_int_regs;
2379 + unsigned long saved_fp_regs;
2381 + /* Never use a return instruction before reload has run. */
2382 + if (!reload_completed)
2385 + /* Must adjust the stack for vararg functions. */
2386 + if (current_function_args_info.uses_anonymous_args)
2389 + /* If there a stack adjstment. */
2390 + if (get_frame_size ())
2393 + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
2394 + saved_fp_regs = avr32_compute_save_fp_reg_mask ();
2396 + /* Functions which have saved fp-regs on the stack can not be performed in
2397 + one instruction */
2398 + if (saved_fp_regs)
2401 + /* Conditional returns can not be performed in one instruction if we need
2402 + to restore registers from the stack */
2403 + if (iscond && saved_int_regs)
2406 + /* Conditional return can not be used for interrupt handlers. */
2407 + if (iscond && IS_INTERRUPT (func_type))
2410 + /* For interrupt handlers which needs to pop registers */
2411 + if (saved_int_regs && IS_INTERRUPT (func_type))
2415 + /* If there are saved registers but the LR isn't saved, then we need two
2416 + instructions for the return. */
2417 + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2425 +/*Generate some function prologue info in the assembly file*/
2428 +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
2430 + if (IS_NAKED (avr32_current_func_type ()))
2432 + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
2434 + if (IS_INTERRUPT (avr32_current_func_type ()))
2436 + switch (avr32_current_func_type ())
2438 + case AVR32_FT_ISR_FULL:
2440 + "\t# Interrupt Function: Fully shadowed register file\n");
2442 + case AVR32_FT_ISR_HALF:
2444 + "\t# Interrupt Function: Half shadowed register file\n");
2447 + case AVR32_FT_ISR_NONE:
2448 + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
2454 + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
2455 + current_function_args_size, frame_size,
2456 + current_function_pretend_args_size);
2458 + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
2459 + frame_pointer_needed, current_function_is_leaf);
2461 + fprintf (f, "\t# uses_anonymous_args = %i\n",
2462 + current_function_args_info.uses_anonymous_args);
2463 + if (current_function_calls_eh_return)
2464 + fprintf (f, "\t# Calls __builtin_eh_return.\n");
2469 +/* Generate and emit an insn that we will recognize as a pushm or stm.
2470 + Unfortunately, since this insn does not reflect very well the actual
2471 + semantics of the operation, we need to annotate the insn for the benefit
2472 + of DWARF2 frame unwind information. */
2474 +int avr32_convert_to_reglist16 (int reglist8_vect);
2477 +emit_multi_reg_push (int reglist, int usePUSHM)
2489 + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
2490 + reglist = avr32_convert_to_reglist16 (reglist);
2494 + insn = emit_insn (gen_stm (stack_pointer_rtx,
2495 + gen_rtx_CONST_INT (SImode, reglist),
2496 + gen_rtx_CONST_INT (SImode, 1)));
2499 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2500 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2502 + for (i = 15; i >= 0; i--)
2504 + if (reglist & (1 << i))
2506 + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
2507 + tmp = gen_rtx_SET (VOIDmode,
2508 + gen_rtx_MEM (SImode,
2509 + plus_constant (stack_pointer_rtx,
2510 + 4 * index)), reg);
2511 + RTX_FRAME_RELATED_P (tmp) = 1;
2512 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2516 + tmp = gen_rtx_SET (SImode,
2517 + stack_pointer_rtx,
2518 + gen_rtx_PLUS (SImode,
2519 + stack_pointer_rtx,
2520 + GEN_INT (-4 * nr_regs)));
2521 + RTX_FRAME_RELATED_P (tmp) = 1;
2522 + XVECEXP (dwarf, 0, 0) = tmp;
2523 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2524 + REG_NOTES (insn));
2530 +emit_multi_fp_reg_push (int reglist)
2540 + insn = emit_insn (gen_stm_fp (stack_pointer_rtx,
2541 + gen_rtx_CONST_INT (SImode, reglist),
2542 + gen_rtx_CONST_INT (SImode, 1)));
2544 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2545 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2547 + for (i = 15; i >= 0; i--)
2549 + if (reglist & (1 << i))
2551 + reg = gen_rtx_REG (SImode, INTERNAL_FP_REGNUM (i));
2552 + tmp = gen_rtx_SET (VOIDmode,
2553 + gen_rtx_MEM (SImode,
2554 + plus_constant (stack_pointer_rtx,
2555 + 4 * index)), reg);
2556 + RTX_FRAME_RELATED_P (tmp) = 1;
2557 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2561 + tmp = gen_rtx_SET (SImode,
2562 + stack_pointer_rtx,
2563 + gen_rtx_PLUS (SImode,
2564 + stack_pointer_rtx,
2565 + GEN_INT (-4 * nr_regs)));
2566 + RTX_FRAME_RELATED_P (tmp) = 1;
2567 + XVECEXP (dwarf, 0, 0) = tmp;
2568 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2569 + REG_NOTES (insn));
2574 +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
2575 + int write_back, int in_struct_p, int scalar_p)
2582 + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
2586 + XVECEXP (result, 0, 0)
2587 + = gen_rtx_SET (GET_MODE (from), from,
2588 + plus_constant (from, count * 4));
2594 + for (j = 0; i < count; i++, j++)
2597 + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
2598 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2599 + MEM_SCALAR_P (mem) = scalar_p;
2600 + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
2601 + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
2609 +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
2610 + int in_struct_p, int scalar_p)
2615 + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2617 + for (j = 0; i < count; i++, j++)
2619 + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
2620 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2621 + MEM_SCALAR_P (mem) = scalar_p;
2622 + XVECEXP (result, 0, i)
2623 + = gen_rtx_SET (VOIDmode, mem,
2624 + gen_rtx_UNSPEC (VOIDmode,
2625 + gen_rtvec (1, regs[j]),
2626 + UNSPEC_STORE_MULTIPLE));
2633 +/* Move a block of memory if it is word aligned or we support unaligned
2634 + word memory accesses. The size must be maximum 64 bytes. */
2637 +avr32_gen_movmemsi (rtx * operands)
2639 + HOST_WIDE_INT bytes_to_go;
2641 + rtx st_src, st_dst;
2642 + int ptr_offset = 0;
2644 + int dst_in_struct_p, src_in_struct_p;
2645 + int dst_scalar_p, src_scalar_p;
2648 + if (GET_CODE (operands[2]) != CONST_INT
2649 + || GET_CODE (operands[3]) != CONST_INT
2650 + || INTVAL (operands[2]) > 64
2651 + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
2654 + unaligned = (INTVAL (operands[3]) & 3) != 0;
2658 + st_dst = XEXP (operands[0], 0);
2659 + st_src = XEXP (operands[1], 0);
2661 + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2662 + dst_scalar_p = MEM_SCALAR_P (operands[0]);
2663 + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2664 + src_scalar_p = MEM_SCALAR_P (operands[1]);
2666 + dst = copy_to_mode_reg (SImode, st_dst);
2667 + src = copy_to_mode_reg (SImode, st_src);
2669 + bytes_to_go = INTVAL (operands[2]);
2671 + while (bytes_to_go)
2673 + enum machine_mode move_mode;
2674 + /* (Seems to be a problem with reloads for the movti pattern so this is
2675 + disabled until that problem is resolved)
2676 + UPDATE: Problem seems to be solved now.... */
2677 + if (bytes_to_go >= GET_MODE_SIZE (TImode) && !unaligned
2678 + /* Do not emit ldm/stm for UC3 as ld.d/st.d is more optimal. */
2679 + && avr32_arch->arch_type != ARCH_TYPE_AVR32_UC)
2680 + move_mode = TImode;
2681 + else if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
2682 + move_mode = DImode;
2683 + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
2684 + move_mode = SImode;
2686 + move_mode = QImode;
2689 + rtx dst_mem = gen_rtx_MEM (move_mode,
2690 + gen_rtx_PLUS (SImode, dst,
2691 + GEN_INT (ptr_offset)));
2692 + rtx src_mem = gen_rtx_MEM (move_mode,
2693 + gen_rtx_PLUS (SImode, src,
2694 + GEN_INT (ptr_offset)));
2695 + ptr_offset += GET_MODE_SIZE (move_mode);
2696 + bytes_to_go -= GET_MODE_SIZE (move_mode);
2698 + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
2699 + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
2701 + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
2702 + MEM_SCALAR_P (src_mem) = src_scalar_p;
2703 + emit_move_insn (dst_mem, src_mem);
2713 +/*Expand the prologue instruction*/
2715 +avr32_expand_prologue (void)
2718 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2721 + /* Naked functions does not have a prologue */
2722 + if (IS_NAKED (avr32_current_func_type ()))
2725 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
2727 + if (saved_reg_mask)
2729 + /* Must push used registers */
2731 + /* Should we use POPM or LDM? */
2732 + int usePUSHM = TRUE;
2734 + if (((saved_reg_mask & (1 << 0)) ||
2735 + (saved_reg_mask & (1 << 1)) ||
2736 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2738 + /* One of R0-R3 should at least be pushed */
2739 + if (((saved_reg_mask & (1 << 0)) &&
2740 + (saved_reg_mask & (1 << 1)) &&
2741 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2743 + /* All should be pushed */
2752 + if (((saved_reg_mask & (1 << 4)) ||
2753 + (saved_reg_mask & (1 << 5)) ||
2754 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2756 + /* One of R4-R7 should at least be pushed */
2757 + if (((saved_reg_mask & (1 << 4)) &&
2758 + (saved_reg_mask & (1 << 5)) &&
2759 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2762 + /* All should be pushed */
2771 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2773 + /* One of R8-R9 should at least be pushed */
2774 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2777 + /* All should be pushed */
2786 + if (saved_reg_mask & (1 << 10))
2789 + if (saved_reg_mask & (1 << 11))
2792 + if (saved_reg_mask & (1 << 12))
2795 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2803 + insn = emit_multi_reg_push (reglist8, TRUE);
2807 + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
2809 + RTX_FRAME_RELATED_P (insn) = 1;
2811 + /* Prevent this instruction from being scheduled after any other
2813 + emit_insn (gen_blockage ());
2816 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2817 + if (saved_fp_reg_mask)
2819 + insn = emit_multi_fp_reg_push (saved_fp_reg_mask);
2820 + RTX_FRAME_RELATED_P (insn) = 1;
2822 + /* Prevent this instruction from being scheduled after any other
2824 + emit_insn (gen_blockage ());
2827 + /* Set frame pointer */
2828 + if (frame_pointer_needed)
2830 + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2831 + RTX_FRAME_RELATED_P (insn) = 1;
2834 + if (get_frame_size () > 0)
2836 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
2838 + insn = emit_insn (gen_rtx_SET (SImode,
2839 + stack_pointer_rtx,
2840 + gen_rtx_PLUS (SImode,
2841 + stack_pointer_rtx,
2846 + RTX_FRAME_RELATED_P (insn) = 1;
2850 + /* Immediate is larger than k21 We must either check if we can use
2851 + one of the pushed reegisters as temporary storage or we must
2852 + make us a temp register by pushing a register to the stack. */
2853 + rtx temp_reg, const_pool_entry, insn;
2854 + if (saved_reg_mask)
2857 + gen_rtx_REG (SImode,
2858 + INTERNAL_REGNUM (avr32_get_saved_reg
2859 + (saved_reg_mask)));
2863 + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
2864 + emit_move_insn (gen_rtx_MEM
2866 + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
2870 + const_pool_entry =
2871 + force_const_mem (SImode,
2872 + gen_rtx_CONST_INT (SImode, get_frame_size ()));
2873 + emit_move_insn (temp_reg, const_pool_entry);
2875 + insn = emit_insn (gen_rtx_SET (SImode,
2876 + stack_pointer_rtx,
2877 + gen_rtx_MINUS (SImode,
2878 + stack_pointer_rtx,
2881 + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2882 + gen_rtx_PLUS (SImode, stack_pointer_rtx,
2883 + GEN_INT (-get_frame_size ())));
2884 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2885 + dwarf, REG_NOTES (insn));
2886 + RTX_FRAME_RELATED_P (insn) = 1;
2888 + if (!saved_reg_mask)
2891 + emit_move_insn (temp_reg,
2892 + gen_rtx_MEM (SImode,
2893 + gen_rtx_POST_INC (SImode,
2899 + /* Mark the temp register as dead */
2900 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
2901 + REG_NOTES (insn));
2906 + /* Prevent the the stack adjustment to be scheduled after any
2907 + instructions using the frame pointer. */
2908 + emit_insn (gen_blockage ());
2914 + avr32_load_pic_register ();
2916 + /* gcc does not know that load or call instructions might use the pic
2917 + register so it might schedule these instructions before the loading
2918 + of the pic register. To avoid this emit a barrier for now. TODO!
2919 + Find out a better way to let gcc know which instructions might use
2920 + the pic register. */
2921 + emit_insn (gen_blockage ());
2927 +avr32_set_return_address (rtx source, rtx scratch)
2930 + unsigned long saved_regs;
2932 + saved_regs = avr32_compute_save_reg_mask (TRUE);
2934 + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2935 + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
2938 + if (frame_pointer_needed)
2939 + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
2941 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks16"))
2943 + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
2947 + emit_insn (gen_movsi (scratch, GEN_INT (get_frame_size ())));
2950 + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
2956 +/* Return the length of INSN. LENGTH is the initial length computed by
2957 + attributes in the machine-description file. */
2960 +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
2961 + int length ATTRIBUTE_UNUSED)
2967 +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
2968 + int iscond ATTRIBUTE_UNUSED,
2969 + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
2972 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2973 + int insert_ret = TRUE;
2975 + int stack_adjustment = get_frame_size ();
2976 + unsigned int func_type = avr32_current_func_type ();
2977 + FILE *f = asm_out_file;
2979 + /* Naked functions does not have an epilogue */
2980 + if (IS_NAKED (func_type))
2983 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2985 + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
2987 + /* Reset frame pointer */
2988 + if (stack_adjustment > 0)
2990 + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
2992 + fprintf (f, "\tsub\tsp, %i # Reset Frame Pointer\n",
2993 + -stack_adjustment);
2997 + /* TODO! Is it safe to use r8 as scratch?? */
2998 + fprintf (f, "\tmov\tr8, lo(%i) # Reset Frame Pointer\n",
2999 + -stack_adjustment);
3000 + fprintf (f, "\torh\tr8, hi(%i) # Reset Frame Pointer\n",
3001 + -stack_adjustment);
3002 + fprintf (f, "\tadd\tsp, r8 # Reset Frame Pointer\n");
3006 + if (saved_fp_reg_mask)
3008 + char reglist[64]; /* 64 bytes should be enough... */
3009 + avr32_make_fp_reglist_w (saved_fp_reg_mask, (char *) reglist);
3010 + fprintf (f, "\tldcm.w\tcp0, sp++, %s\n", reglist);
3011 + if (saved_fp_reg_mask & ~0xff)
3013 + saved_fp_reg_mask &= ~0xff;
3014 + avr32_make_fp_reglist_d (saved_fp_reg_mask, (char *) reglist);
3015 + fprintf (f, "\tldcm.d\tcp0, sp++, %s\n", reglist);
3019 + if (saved_reg_mask)
3021 + /* Must pop used registers */
3023 + /* Should we use POPM or LDM? */
3024 + int usePOPM = TRUE;
3025 + if (((saved_reg_mask & (1 << 0)) ||
3026 + (saved_reg_mask & (1 << 1)) ||
3027 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
3029 + /* One of R0-R3 should at least be popped */
3030 + if (((saved_reg_mask & (1 << 0)) &&
3031 + (saved_reg_mask & (1 << 1)) &&
3032 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
3034 + /* All should be popped */
3043 + if (((saved_reg_mask & (1 << 4)) ||
3044 + (saved_reg_mask & (1 << 5)) ||
3045 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
3047 + /* One of R0-R3 should at least be popped */
3048 + if (((saved_reg_mask & (1 << 4)) &&
3049 + (saved_reg_mask & (1 << 5)) &&
3050 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
3053 + /* All should be popped */
3062 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
3064 + /* One of R8-R9 should at least be pushed */
3065 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
3068 + /* All should be pushed */
3077 + if (saved_reg_mask & (1 << 10))
3080 + if (saved_reg_mask & (1 << 11))
3083 + if (saved_reg_mask & (1 << 12))
3086 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
3090 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3091 + /* Pop LR into PC. */
3096 + char reglist[64]; /* 64 bytes should be enough... */
3097 + avr32_make_reglist8 (reglist8, (char *) reglist);
3099 + if (reglist8 & 0x80)
3100 + /* This instruction is also a return */
3101 + insert_ret = FALSE;
3103 + if (r12_imm && !insert_ret)
3104 + fprintf (f, "\tpopm\t%s, r12=%li\n", reglist, INTVAL (r12_imm));
3106 + fprintf (f, "\tpopm\t%s\n", reglist);
3111 + char reglist[64]; /* 64 bytes should be enough... */
3112 + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
3113 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3114 + /* This instruction is also a return */
3115 + insert_ret = FALSE;
3117 + if (r12_imm && !insert_ret)
3118 + fprintf (f, "\tldm\tsp++, %s, r12=%li\n", reglist,
3119 + INTVAL (r12_imm));
3121 + fprintf (f, "\tldm\tsp++, %s\n", reglist);
3127 + /* Stack adjustment for exception handler. */
3128 + if (current_function_calls_eh_return)
3129 + fprintf (f, "\tadd\tsp, r%d\n", ASM_REGNUM (EH_RETURN_STACKADJ_REGNO));
3132 + if (IS_INTERRUPT (func_type))
3134 + fprintf (f, "\trete\n");
3136 + else if (insert_ret)
3139 + fprintf (f, "\tretal\t%li\n", INTVAL (r12_imm));
3141 + fprintf (f, "\tretal\tr12\n");
3145 +/* Function for converting a fp-register mask to a
3146 + reglistCPD8 register list string. */
3148 +avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string)
3152 + /* Make sure reglist_string is empty */
3153 + reglist_string[0] = '\0';
3155 + for (i = 0; i < NUM_FP_REGS; i += 2)
3157 + if (reglist_mask & (1 << i))
3159 + strlen (reglist_string) ?
3160 + sprintf (reglist_string, "%s, %s-%s", reglist_string,
3161 + reg_names[INTERNAL_FP_REGNUM (i)],
3162 + reg_names[INTERNAL_FP_REGNUM (i + 1)]) :
3163 + sprintf (reglist_string, "%s-%s",
3164 + reg_names[INTERNAL_FP_REGNUM (i)],
3165 + reg_names[INTERNAL_FP_REGNUM (i + 1)]);
3170 +/* Function for converting a fp-register mask to a
3171 + reglistCP8 register list string. */
3173 +avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string)
3177 + /* Make sure reglist_string is empty */
3178 + reglist_string[0] = '\0';
3180 + for (i = 0; i < NUM_FP_REGS; ++i)
3182 + if (reglist_mask & (1 << i))
3184 + strlen (reglist_string) ?
3185 + sprintf (reglist_string, "%s, %s", reglist_string,
3186 + reg_names[INTERNAL_FP_REGNUM (i)]) :
3187 + sprintf (reglist_string, "%s", reg_names[INTERNAL_FP_REGNUM (i)]);
3193 +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
3197 + /* Make sure reglist16_string is empty */
3198 + reglist16_string[0] = '\0';
3200 + for (i = 0; i < 16; ++i)
3202 + if (reglist16_vect & (1 << i))
3204 + strlen (reglist16_string) ?
3205 + sprintf (reglist16_string, "%s, %s", reglist16_string,
3206 + reg_names[INTERNAL_REGNUM (i)]) :
3207 + sprintf (reglist16_string, "%s", reg_names[INTERNAL_REGNUM (i)]);
3213 +avr32_convert_to_reglist16 (int reglist8_vect)
3215 + int reglist16_vect = 0;
3216 + if (reglist8_vect & 0x1)
3217 + reglist16_vect |= 0xF;
3218 + if (reglist8_vect & 0x2)
3219 + reglist16_vect |= 0xF0;
3220 + if (reglist8_vect & 0x4)
3221 + reglist16_vect |= 0x300;
3222 + if (reglist8_vect & 0x8)
3223 + reglist16_vect |= 0x400;
3224 + if (reglist8_vect & 0x10)
3225 + reglist16_vect |= 0x800;
3226 + if (reglist8_vect & 0x20)
3227 + reglist16_vect |= 0x1000;
3228 + if (reglist8_vect & 0x40)
3229 + reglist16_vect |= 0x4000;
3230 + if (reglist8_vect & 0x80)
3231 + reglist16_vect |= 0x8000;
3233 + return reglist16_vect;
3237 +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
3239 + /* Make sure reglist8_string is empty */
3240 + reglist8_string[0] = '\0';
3242 + if (reglist8_vect & 0x1)
3243 + sprintf (reglist8_string, "r0-r3");
3244 + if (reglist8_vect & 0x2)
3245 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r4-r7",
3246 + reglist8_string) :
3247 + sprintf (reglist8_string, "r4-r7");
3248 + if (reglist8_vect & 0x4)
3249 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r8-r9",
3250 + reglist8_string) :
3251 + sprintf (reglist8_string, "r8-r9");
3252 + if (reglist8_vect & 0x8)
3253 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r10",
3254 + reglist8_string) :
3255 + sprintf (reglist8_string, "r10");
3256 + if (reglist8_vect & 0x10)
3257 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r11",
3258 + reglist8_string) :
3259 + sprintf (reglist8_string, "r11");
3260 + if (reglist8_vect & 0x20)
3261 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r12",
3262 + reglist8_string) :
3263 + sprintf (reglist8_string, "r12");
3264 + if (reglist8_vect & 0x40)
3265 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, lr",
3266 + reglist8_string) :
3267 + sprintf (reglist8_string, "lr");
3268 + if (reglist8_vect & 0x80)
3269 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, pc",
3270 + reglist8_string) :
3271 + sprintf (reglist8_string, "pc");
3275 +avr32_eh_return_data_regno (int n)
3277 + if (n >= 0 && n <= 3)
3280 + return INVALID_REGNUM;
3283 +/* Compute the distance from register FROM to register TO.
3284 + These can be the arg pointer, the frame pointer or
3285 + the stack pointer.
3286 + Typical stack layout looks like this:
3288 + old stack pointer -> | |
3291 + | | saved arguments for
3292 + | | vararg functions
3293 + arg_pointer -> | | /
3303 + stack ptr --> | | /
3311 + For a given funciton some or all of these stack compomnents
3312 + may not be needed, giving rise to the possibility of
3313 + eliminating some of the registers.
3315 + The values returned by this function must reflect the behaviour
3316 + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
3318 + The sign of the number returned reflects the direction of stack
3319 + growth, so the values are positive for all eliminations except
3320 + from the soft frame pointer to the hard frame pointer. */
3324 +avr32_initial_elimination_offset (int from, int to)
3327 + int call_saved_regs = 0;
3328 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3329 + unsigned int local_vars = get_frame_size ();
3331 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
3332 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3334 + for (i = 0; i < 16; ++i)
3336 + if (saved_reg_mask & (1 << i))
3337 + call_saved_regs += 4;
3340 + for (i = 0; i < NUM_FP_REGS; ++i)
3342 + if (saved_fp_reg_mask & (1 << i))
3343 + call_saved_regs += 4;
3348 + case ARG_POINTER_REGNUM:
3351 + case STACK_POINTER_REGNUM:
3352 + return call_saved_regs + local_vars;
3353 + case FRAME_POINTER_REGNUM:
3354 + return call_saved_regs;
3358 + case FRAME_POINTER_REGNUM:
3361 + case STACK_POINTER_REGNUM:
3362 + return local_vars;
3373 + Returns a rtx used when passing the next argument to a function.
3374 + avr32_init_cumulative_args() and avr32_function_arg_advance() sets witch
3378 +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3379 + tree type, int named)
3383 + HOST_WIDE_INT arg_size, arg_rsize;
3386 + arg_size = int_size_in_bytes (type);
3390 + arg_size = GET_MODE_SIZE (mode);
3392 + arg_rsize = PUSH_ROUNDING (arg_size);
3395 + The last time this macro is called, it is called with mode == VOIDmode,
3396 + and its result is passed to the call or call_value pattern as operands 2
3397 + and 3 respectively. */
3398 + if (mode == VOIDmode)
3400 + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
3403 + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
3408 + if (arg_rsize == 8)
3410 + /* use r11:r10 or r9:r8. */
3411 + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
3413 + else if (!(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
3418 + else if (arg_rsize == 4)
3419 + { /* Use first available register */
3421 + while (index <= LAST_CUM_REG_INDEX && GET_USED_INDEX (cum, index))
3423 + if (index > LAST_CUM_REG_INDEX)
3427 + SET_REG_INDEX (cum, index);
3429 + if (GET_REG_INDEX (cum) >= 0)
3430 + return gen_rtx_REG (mode,
3431 + avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
3437 + Set the register used for passing the first argument to a function.
3440 +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype,
3441 + rtx libname ATTRIBUTE_UNUSED,
3442 + tree fndecl ATTRIBUTE_UNUSED)
3444 + /* Set all registers as unused. */
3445 + SET_INDEXES_UNUSED (cum);
3447 + /* Reset uses_anonymous_args */
3448 + cum->uses_anonymous_args = 0;
3450 + /* Reset size of stack pushed arguments */
3451 + cum->stack_pushed_args_size = 0;
3455 + Set register used for passing the next argument to a function. Only the
3456 + Scratch Registers are used.
3461 + 13 r13 _SP_________
3462 + FIRST_CUM_REG 12 r12 _||_
3464 + 11 r10 _||_ Scratch Registers
3466 + LAST_SCRATCH_REG 9 r8 _\/_________
3478 +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3479 + tree type, int named ATTRIBUTE_UNUSED)
3481 + HOST_WIDE_INT arg_size, arg_rsize;
3485 + arg_size = int_size_in_bytes (type);
3489 + arg_size = GET_MODE_SIZE (mode);
3491 + arg_rsize = PUSH_ROUNDING (arg_size);
3493 + /* It the argument had to be passed in stack, no register is used. */
3494 + if ((*targetm.calls.must_pass_in_stack) (mode, type))
3496 + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
3500 + /* Mark the used registers as "used". */
3501 + if (GET_REG_INDEX (cum) >= 0)
3503 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3504 + if (arg_rsize == 8)
3506 + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
3511 + /* Had to use stack */
3512 + cum->stack_pushed_args_size += arg_rsize;
3517 + Defines witch direction to go to find the next register to use if the
3518 + argument is larger then one register or for arguments shorter than an
3519 + int which is not promoted, such as the last part of structures with
3520 + size not a multiple of 4. */
3522 +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
3525 + /* Pad upward for all aggregates except byte and halfword sized aggregates
3526 + which can be passed in registers. */
3528 + && AGGREGATE_TYPE_P (type)
3529 + && (int_size_in_bytes (type) != 1)
3530 + && !((int_size_in_bytes (type) == 2)
3531 + && TYPE_ALIGN_UNIT (type) >= 2)
3532 + && (int_size_in_bytes (type) & 0x3))
3541 + Return a rtx used for the return value from a function call.
3544 +avr32_function_value (tree type, tree func, bool outgoing ATTRIBUTE_UNUSED)
3546 + if (avr32_return_in_memory (type, func))
3549 + if (int_size_in_bytes (type) <= 4)
3550 + if (avr32_return_in_msb (type))
3551 + /* Aggregates of size less than a word which does align the data in the
3552 + MSB must use SImode for r12. */
3553 + return gen_rtx_REG (SImode, RET_REGISTER);
3555 + return gen_rtx_REG (TYPE_MODE (type), RET_REGISTER);
3556 + else if (int_size_in_bytes (type) <= 8)
3557 + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
3563 + Return a rtx used for the return value from a library function call.
3566 +avr32_libcall_value (enum machine_mode mode)
3569 + if (GET_MODE_SIZE (mode) <= 4)
3570 + return gen_rtx_REG (mode, RET_REGISTER);
3571 + else if (GET_MODE_SIZE (mode) <= 8)
3572 + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
3577 +/* Return TRUE if X references a SYMBOL_REF. */
3579 +symbol_mentioned_p (rtx x)
3584 + if (GET_CODE (x) == SYMBOL_REF)
3587 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3589 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3591 + if (fmt[i] == 'E')
3595 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3596 + if (symbol_mentioned_p (XVECEXP (x, i, j)))
3599 + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3606 +/* Return TRUE if X references a LABEL_REF. */
3608 +label_mentioned_p (rtx x)
3613 + if (GET_CODE (x) == LABEL_REF)
3616 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3617 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3619 + if (fmt[i] == 'E')
3623 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3624 + if (label_mentioned_p (XVECEXP (x, i, j)))
3627 + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3636 +avr32_legitimate_pic_operand_p (rtx x)
3639 + /* We can't have const, this must be broken down to a symbol. */
3640 + if (GET_CODE (x) == CONST)
3643 + /* Can't access symbols or labels via the constant pool either */
3644 + if ((GET_CODE (x) == SYMBOL_REF
3645 + && CONSTANT_POOL_ADDRESS_P (x)
3646 + && (symbol_mentioned_p (get_pool_constant (x))
3647 + || label_mentioned_p (get_pool_constant (x)))))
3655 +legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
3659 + if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
3665 + if (no_new_pseudos)
3668 + reg = gen_reg_rtx (Pmode);
3673 + emit_move_insn (reg, orig);
3675 + /* Only set current function as using pic offset table if flag_pic is
3676 + set. This is because this function is also used if
3677 + TARGET_HAS_ASM_ADDR_PSEUDOS is set. */
3679 + current_function_uses_pic_offset_table = 1;
3681 + /* Put a REG_EQUAL note on this insn, so that it can be optimized by
3685 + else if (GET_CODE (orig) == CONST)
3690 + && GET_CODE (XEXP (orig, 0)) == PLUS
3691 + && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3696 + if (no_new_pseudos)
3699 + reg = gen_reg_rtx (Pmode);
3702 + if (GET_CODE (XEXP (orig, 0)) == PLUS)
3705 + legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3707 + legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3708 + base == reg ? 0 : reg);
3713 + if (GET_CODE (offset) == CONST_INT)
3715 + /* The base register doesn't really matter, we only want to test
3716 + the index for the appropriate mode. */
3717 + if (!avr32_const_ok_for_constraint_p (INTVAL (offset), 'I', "Is21"))
3719 + if (!no_new_pseudos)
3720 + offset = force_reg (Pmode, offset);
3725 + if (GET_CODE (offset) == CONST_INT)
3726 + return plus_constant (base, INTVAL (offset));
3729 + return gen_rtx_PLUS (Pmode, base, offset);
3735 +/* Generate code to load the PIC register. */
3737 +avr32_load_pic_register (void)
3740 + rtx global_offset_table;
3742 + if ((current_function_uses_pic_offset_table == 0) || TARGET_NO_INIT_GOT)
3748 + l1 = gen_label_rtx ();
3750 + global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3752 + gen_rtx_CONST (Pmode,
3753 + gen_rtx_MINUS (SImode, gen_rtx_LABEL_REF (Pmode, l1),
3754 + global_offset_table));
3755 + emit_insn (gen_pic_load_addr
3756 + (pic_offset_table_rtx, force_const_mem (SImode, pic_tmp)));
3757 + emit_insn (gen_pic_compute_got_from_pc (pic_offset_table_rtx, l1));
3759 + /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp
3760 + can cause life info to screw up. */
3761 + emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3766 +/* This hook should return true if values of type type are returned at the most
3767 + significant end of a register (in other words, if they are padded at the
3768 + least significant end). You can assume that type is returned in a register;
3769 + the caller is required to check this. Note that the register provided by
3770 + FUNCTION_VALUE must be able to hold the complete return value. For example,
3771 + if a 1-, 2- or 3-byte structure is returned at the most significant end of a
3772 + 4-byte register, FUNCTION_VALUE should provide an SImode rtx. */
3774 +avr32_return_in_msb (tree type ATTRIBUTE_UNUSED)
3776 + /* if ( AGGREGATE_TYPE_P (type) ) if ((int_size_in_bytes(type) == 1) ||
3777 + ((int_size_in_bytes(type) == 2) && TYPE_ALIGN_UNIT(type) >= 2)) return
3778 + false; else return true; */
3785 + Returns one if a certain function value is going to be returned in memory
3786 + and zero if it is going to be returned in a register.
3788 + BLKmode and all other modes that is larger than 64 bits are returned in
3792 +avr32_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3794 + if (TYPE_MODE (type) == VOIDmode)
3797 + if (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3798 + || int_size_in_bytes (type) == -1)
3803 + /* If we have an aggregate then use the same mechanism as when checking if
3804 + it should be passed on the stack. */
3806 + && AGGREGATE_TYPE_P (type)
3807 + && (*targetm.calls.must_pass_in_stack) (TYPE_MODE (type), type))
3814 +/* Output the constant part of the trampoline.
3815 + lddpc r0, pc[0x8:e] ; load static chain register
3816 + lddpc pc, pc[0x8:e] ; jump to subrutine
3817 + .long 0 ; Address to static chain,
3818 + ; filled in by avr32_initialize_trampoline()
3819 + .long 0 ; Address to subrutine,
3820 + ; filled in by avr32_initialize_trampoline()
3823 +avr32_trampoline_template (FILE * file)
3825 + fprintf (file, "\tlddpc r0, pc[8]\n");
3826 + fprintf (file, "\tlddpc pc, pc[8]\n");
3827 + /* make room for the address of the static chain. */
3828 + fprintf (file, "\t.long\t0\n");
3829 + /* make room for the address to the subrutine. */
3830 + fprintf (file, "\t.long\t0\n");
3835 + Initialize the variable parts of a trampoline.
3838 +avr32_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
3840 + /* Store the address to the static chain. */
3841 + emit_move_insn (gen_rtx_MEM
3842 + (SImode, plus_constant (addr, TRAMPOLINE_SIZE - 4)),
3845 + /* Store the address to the function. */
3846 + emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, TRAMPOLINE_SIZE)),
3849 + emit_insn (gen_cache (gen_rtx_REG (SImode, 13),
3850 + gen_rtx_CONST_INT (SImode,
3851 + AVR32_CACHE_INVALIDATE_ICACHE)));
3854 +/* Return nonzero if X is valid as an addressing register. */
3856 +avr32_address_register_rtx_p (rtx x, int strict_p)
3860 + if (!register_operand(x, GET_MODE(x)))
3863 + /* If strict we require the register to be a hard register. */
3868 + regno = REGNO (x);
3871 + return REGNO_OK_FOR_BASE_P (regno);
3873 + return (regno <= LAST_REGNUM || regno >= FIRST_PSEUDO_REGISTER);
3876 +/* Return nonzero if INDEX is valid for an address index operand. */
3878 +avr32_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
3880 + enum rtx_code code = GET_CODE (index);
3882 + if (GET_MODE_SIZE (mode) > 8)
3885 + /* Standard coprocessor addressing modes. */
3886 + if (code == CONST_INT)
3888 + if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
3889 + /* Coprocessor mem insns has a smaller reach than ordinary mem insns */
3890 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ku14");
3892 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ks16");
3895 + if (avr32_address_register_rtx_p (index, strict_p))
3900 + rtx xiop0 = XEXP (index, 0);
3901 + rtx xiop1 = XEXP (index, 1);
3902 + return ((avr32_address_register_rtx_p (xiop0, strict_p)
3903 + && power_of_two_operand (xiop1, SImode)
3904 + && (INTVAL (xiop1) <= 8))
3905 + || (avr32_address_register_rtx_p (xiop1, strict_p)
3906 + && power_of_two_operand (xiop0, SImode)
3907 + && (INTVAL (xiop0) <= 8)));
3909 + else if (code == ASHIFT)
3911 + rtx op = XEXP (index, 1);
3913 + return (avr32_address_register_rtx_p (XEXP (index, 0), strict_p)
3914 + && GET_CODE (op) == CONST_INT
3915 + && INTVAL (op) > 0 && INTVAL (op) <= 3);
3922 + Used in the GO_IF_LEGITIMATE_ADDRESS macro. Returns a nonzero value if
3923 + the RTX x is a legitimate memory address.
3925 + Returns NO_REGS if the address is not legatime, GENERAL_REGS or ALL_REGS
3929 +/* Forward declaration*/
3930 +int is_minipool_label (rtx label);
3933 +avr32_legitimate_address (enum machine_mode mode, rtx x, int strict)
3936 + switch (GET_CODE (x))
3939 + return avr32_address_register_rtx_p (x, strict);
3942 + rtx label = avr32_find_symbol (x);
3945 + ((CONSTANT_POOL_ADDRESS_P (label)
3947 + && (symbol_mentioned_p (get_pool_constant (label))
3948 + || label_mentioned_p (get_pool_constant (label)))))
3949 + /* TODO! Can this ever happen??? */
3950 + || ((GET_CODE (label) == LABEL_REF)
3951 + && GET_CODE (XEXP (label, 0)) == CODE_LABEL
3952 + && is_minipool_label (XEXP (label, 0)))))
3959 + if (GET_CODE (XEXP (x, 0)) == CODE_LABEL
3960 + && is_minipool_label (XEXP (x, 0)))
3967 + if (CONSTANT_POOL_ADDRESS_P (x)
3969 + && (symbol_mentioned_p (get_pool_constant (x))
3970 + || label_mentioned_p (get_pool_constant (x)))))
3973 + A symbol_ref is only legal if it is a function. If all of them are
3974 + legal, a pseudo reg that is a constant will be replaced by a
3975 + symbol_ref and make illegale code. SYMBOL_REF_FLAG is set by
3976 + ENCODE_SECTION_INFO. */
3977 + else if (SYMBOL_REF_RCALL_FUNCTION_P (x))
3981 + case PRE_DEC: /* (pre_dec (...)) */
3982 + case POST_INC: /* (post_inc (...)) */
3983 + return avr32_address_register_rtx_p (XEXP (x, 0), strict);
3984 + case PLUS: /* (plus (...) (...)) */
3986 + rtx xop0 = XEXP (x, 0);
3987 + rtx xop1 = XEXP (x, 1);
3989 + return ((avr32_address_register_rtx_p (xop0, strict)
3990 + && avr32_legitimate_index_p (mode, xop1, strict))
3991 + || (avr32_address_register_rtx_p (xop1, strict)
3992 + && avr32_legitimate_index_p (mode, xop0, strict)));
4003 +avr32_const_double_immediate (rtx value)
4005 + HOST_WIDE_INT hi, lo;
4007 + if (GET_CODE (value) != CONST_DOUBLE)
4010 + if (SCALAR_FLOAT_MODE_P (GET_MODE (value)))
4012 + HOST_WIDE_INT target_float[2];
4014 + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (value),
4015 + GET_MODE (value));
4016 + lo = target_float[0];
4017 + hi = target_float[1];
4021 + hi = CONST_DOUBLE_HIGH (value);
4022 + lo = CONST_DOUBLE_LOW (value);
4025 + if (avr32_const_ok_for_constraint_p (lo, 'K', "Ks21")
4026 + && (GET_MODE (value) == SFmode
4027 + || avr32_const_ok_for_constraint_p (hi, 'K', "Ks21")))
4037 +avr32_legitimate_constant_p (rtx x)
4039 + switch (GET_CODE (x))
4042 + /* Check if we should put large immediate into constant pool
4043 + or load them directly with mov/orh.*/
4044 + if (!avr32_imm_in_const_pool)
4047 + return avr32_const_ok_for_constraint_p (INTVAL (x), 'K', "Ks21");
4048 + case CONST_DOUBLE:
4049 + /* Check if we should put large immediate into constant pool
4050 + or load them directly with mov/orh.*/
4051 + if (!avr32_imm_in_const_pool)
4054 + if (GET_MODE (x) == SFmode
4055 + || GET_MODE (x) == DFmode || GET_MODE (x) == DImode)
4056 + return avr32_const_double_immediate (x);
4060 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
4062 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
4065 + case CONST_VECTOR:
4068 + printf ("%s():\n", __FUNCTION__);
4075 +/* Strip any special encoding from labels */
4077 +avr32_strip_name_encoding (const char *name)
4079 + const char *stripped = name;
4083 + switch (stripped[0])
4086 + stripped = strchr (name + 1, '#') + 1;
4089 + stripped = &stripped[1];
4099 +/* Do anything needed before RTL is emitted for each function. */
4100 +static struct machine_function *
4101 +avr32_init_machine_status (void)
4103 + struct machine_function *machine;
4105 + (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
4107 +#if AVR32_FT_UNKNOWN != 0
4108 + machine->func_type = AVR32_FT_UNKNOWN;
4111 + machine->minipool_label_head = 0;
4112 + machine->minipool_label_tail = 0;
4117 +avr32_init_expanders (void)
4119 + /* Arrange to initialize and mark the machine per-function status. */
4120 + init_machine_status = avr32_init_machine_status;
4124 +/* Return an RTX indicating where the return address to the
4125 + calling function can be found. */
4128 +avr32_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4133 + return get_hard_reg_initial_val (Pmode, LR_REGNUM);
4138 +avr32_encode_section_info (tree decl, rtx rtl, int first)
4141 + if (first && DECL_P (decl))
4143 + /* Set SYMBOL_REG_FLAG for local functions */
4144 + if (!TREE_PUBLIC (decl) && TREE_CODE (decl) == FUNCTION_DECL)
4146 + if ((*targetm.binds_local_p) (decl))
4148 + SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
4156 +avr32_asm_output_ascii (FILE * stream, char *ptr, int len)
4159 + char *new_ptr = xmalloc (4 * len);
4160 + if (new_ptr == NULL)
4161 + internal_error ("Out of memory.");
4163 + for (i = 0; i < len; i++)
4165 + if (ptr[i] == '\n')
4167 + new_ptr[i_new++] = '\\';
4168 + new_ptr[i_new++] = '0';
4169 + new_ptr[i_new++] = '1';
4170 + new_ptr[i_new++] = '2';
4172 + else if (ptr[i] == '\"')
4174 + new_ptr[i_new++] = '\\';
4175 + new_ptr[i_new++] = '\"';
4177 + else if (ptr[i] == '\\')
4179 + new_ptr[i_new++] = '\\';
4180 + new_ptr[i_new++] = '\\';
4182 + else if (ptr[i] == '\0' && i + 1 < len)
4184 + new_ptr[i_new++] = '\\';
4185 + new_ptr[i_new++] = '0';
4189 + new_ptr[i_new++] = ptr[i];
4193 + /* Terminate new_ptr. */
4194 + new_ptr[i_new] = '\0';
4195 + fprintf (stream, "\t.ascii\t\"%s\"\n", new_ptr);
4201 +avr32_asm_output_label (FILE * stream, const char *name)
4203 + name = avr32_strip_name_encoding (name);
4205 + /* Print the label. */
4206 + assemble_name (stream, name);
4207 + fprintf (stream, ":\n");
4213 +avr32_asm_weaken_label (FILE * stream, const char *name)
4215 + fprintf (stream, "\t.weak ");
4216 + assemble_name (stream, name);
4217 + fprintf (stream, "\n");
4221 + Checks if a labelref is equal to a reserved word in the assembler. If it is,
4222 + insert a '_' before the label name.
4225 +avr32_asm_output_labelref (FILE * stream, const char *name)