gcc: refresh patches for gcc 4.1.2
[openwrt/svn-archive/archive.git] / toolchain / gcc / patches / 4.1.2 / 500-avr32.patch
1 --- a/config.sub
2 +++ b/config.sub
3 @@ -239,7 +239,7 @@ case $basic_machine in
4 | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
5 | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
6 | am33_2.0 \
7 - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \
8 + | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
9 | bfin \
10 | c4x | clipper \
11 | d10v | d30v | dlx | dsp16xx \
12 @@ -316,7 +316,7 @@ case $basic_machine in
13 | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
14 | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
15 | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
16 - | avr-* \
17 + | avr-* | avr32-* \
18 | bfin-* | bs2000-* \
19 | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
20 | clipper-* | craynv-* | cydra-* \
21 --- a/configure.in
22 +++ b/configure.in
23 @@ -497,6 +497,9 @@ case "${target}" in
24 arm-*-riscix*)
25 noconfigdirs="$noconfigdirs ld target-libgloss ${libgcj}"
26 ;;
27 + avr32-*-*)
28 + noconfigdirs="$noconfigdirs target-libiberty target-libmudflap target-libffi ${libgcj}"
29 + ;;
30 avr-*-*)
31 noconfigdirs="$noconfigdirs target-libiberty target-libstdc++-v3 ${libgcj}"
32 ;;
33 --- a/gcc/builtins.c
34 +++ b/gcc/builtins.c
35 @@ -9228,7 +9228,7 @@ validate_arglist (tree arglist, ...)
36
37 do
38 {
39 - code = va_arg (ap, enum tree_code);
40 + code = va_arg (ap, int);
41 switch (code)
42 {
43 case 0:
44 --- a/gcc/calls.c
45 +++ b/gcc/calls.c
46 @@ -3434,7 +3434,7 @@ emit_library_call_value_1 (int retval, r
47 for (; count < nargs; count++)
48 {
49 rtx val = va_arg (p, rtx);
50 - enum machine_mode mode = va_arg (p, enum machine_mode);
51 + enum machine_mode mode = va_arg (p, int);
52
53 /* We cannot convert the arg value to the mode the library wants here;
54 must do it earlier where we know the signedness of the arg. */
55 --- /dev/null
56 +++ b/gcc/config/avr32/avr32.c
57 @@ -0,0 +1,7273 @@
58 +/*
59 + Target hooks and helper functions for AVR32.
60 + Copyright 2003-2006 Atmel Corporation.
61 +
62 + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
63 + Initial porting by Anders ´┐Żdland.
64 +
65 + This file is part of GCC.
66 +
67 + This program is free software; you can redistribute it and/or modify
68 + it under the terms of the GNU General Public License as published by
69 + the Free Software Foundation; either version 2 of the License, or
70 + (at your option) any later version.
71 +
72 + This program is distributed in the hope that it will be useful,
73 + but WITHOUT ANY WARRANTY; without even the implied warranty of
74 + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
75 + GNU General Public License for more details.
76 +
77 + You should have received a copy of the GNU General Public License
78 + along with this program; if not, write to the Free Software
79 + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
80 +
81 +#include "config.h"
82 +#include "system.h"
83 +#include "coretypes.h"
84 +#include "tm.h"
85 +#include "rtl.h"
86 +#include "tree.h"
87 +#include "obstack.h"
88 +#include "regs.h"
89 +#include "hard-reg-set.h"
90 +#include "real.h"
91 +#include "insn-config.h"
92 +#include "conditions.h"
93 +#include "output.h"
94 +#include "insn-attr.h"
95 +#include "flags.h"
96 +#include "reload.h"
97 +#include "function.h"
98 +#include "expr.h"
99 +#include "optabs.h"
100 +#include "toplev.h"
101 +#include "recog.h"
102 +#include "ggc.h"
103 +#include "except.h"
104 +#include "c-pragma.h"
105 +#include "integrate.h"
106 +#include "tm_p.h"
107 +#include "langhooks.h"
108 +
109 +#include "target.h"
110 +#include "target-def.h"
111 +
112 +#include <ctype.h>
113 +
114 +/* Forward definitions of types. */
115 +typedef struct minipool_node Mnode;
116 +typedef struct minipool_fixup Mfix;
117 +
118 +/* Obstack for minipool constant handling. */
119 +static struct obstack minipool_obstack;
120 +static char *minipool_startobj;
121 +static rtx minipool_vector_label;
122 +
123 +/* True if we are currently building a constant table. */
124 +int making_const_table;
125 +
126 +/* Some forward function declarations */
127 +static unsigned long avr32_isr_value (tree);
128 +static unsigned long avr32_compute_func_type (void);
129 +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
130 +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
131 +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
132 + int flags, bool * no_add_attrs);
133 +static void avr32_reorg (void);
134 +bool avr32_return_in_msb (tree type);
135 +bool avr32_vector_mode_supported (enum machine_mode mode);
136 +static void avr32_init_libfuncs (void);
137 +void avr32_load_pic_register (void);
138 +
139 +
140 +static void
141 +avr32_add_gc_roots (void)
142 +{
143 + gcc_obstack_init (&minipool_obstack);
144 + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
145 +}
146 +
147 +
148 +/* List of all known AVR32 parts */
149 +static const struct part_type_s avr32_part_types[] = {
150 + /* name, part_type, architecture type, macro */
151 + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
152 + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
153 + {"ap7010", PART_TYPE_AVR32_AP7010, ARCH_TYPE_AVR32_AP, "__AVR32_AP7010__"},
154 + {"ap7020", PART_TYPE_AVR32_AP7020, ARCH_TYPE_AVR32_AP, "__AVR32_AP7020__"},
155 + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A0256__"},
156 + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A0512__"},
157 + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1128__"},
158 + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1256__"},
159 + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1512__"},
160 + {NULL, 0, 0, NULL}
161 +};
162 +
163 +/* List of all known AVR32 architectures */
164 +static const struct arch_type_s avr32_arch_types[] = {
165 + /* name, architecture type, microarchitecture type, feature flags, macro */
166 + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B, FLAG_AVR32_HAS_DSP |
167 + FLAG_AVR32_HAS_SIMD | FLAG_AVR32_HAS_UNALIGNED_WORD |
168 + FLAG_AVR32_HAS_BRANCH_PRED, "__AVR32_AP__"},
169 + {"uc", ARCH_TYPE_AVR32_UC, UARCH_TYPE_AVR32A,
170 + FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW, "__AVR32_UC__"},
171 + {NULL, 0, 0, 0, NULL}
172 +};
173 +
174 +/* Default arch name */
175 +const char *avr32_arch_name = "ap";
176 +const char *avr32_part_name = "none";
177 +
178 +const struct part_type_s *avr32_part;
179 +const struct arch_type_s *avr32_arch;
180 +
181 +
182 +/* Override command line options */
183 +void
184 +avr32_override_options (void)
185 +{
186 + const struct part_type_s *part;
187 + const struct arch_type_s *arch;
188 +
189 + /* Check if part type is set. */
190 + for (part = avr32_part_types; part->name; part++)
191 + if (strcmp (part->name, avr32_part_name) == 0)
192 + break;
193 +
194 + avr32_part = part;
195 +
196 + if (!part->name)
197 + {
198 + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
199 + avr32_part_name);
200 + for (part = avr32_part_types; part->name; part++)
201 + fprintf (stderr, "\t%s\n", part->name);
202 + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
203 + }
204 +
205 + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
206 +
207 + /* If part was set to "none" then check if arch was set. */
208 + if (strcmp (avr32_part->name, "none") == 0)
209 + {
210 + /* Check if arch type is set. */
211 + for (arch = avr32_arch_types; arch->name; arch++)
212 + if (strcmp (arch->name, avr32_arch_name) == 0)
213 + break;
214 +
215 + avr32_arch = arch;
216 +
217 + if (!arch->name)
218 + {
219 + fprintf (stderr, "Unknown arch `%s' specified\nKnown arch names:\n",
220 + avr32_arch_name);
221 + for (arch = avr32_arch_types; arch->name; arch++)
222 + fprintf (stderr, "\t%s\n", arch->name);
223 + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
224 + }
225 + }
226 +
227 + /* If optimization level is two or greater, then align start of loops to a
228 + word boundary since this will allow folding the first insn of the loop.
229 + Do this only for targets supporting branch prediction. */
230 + if (optimize >= 2 && TARGET_BRANCH_PRED)
231 + align_loops = 2;
232 +
233 + if (AVR32_ALWAYS_PIC)
234 + flag_pic = 1;
235 +
236 + if (TARGET_NO_PIC)
237 + flag_pic = 0;
238 +
239 + avr32_add_gc_roots ();
240 +}
241 +
242 +
243 +/*
244 +If defined, a function that outputs the assembler code for entry to a
245 +function. The prologue is responsible for setting up the stack frame,
246 +initializing the frame pointer register, saving registers that must be
247 +saved, and allocating size additional bytes of storage for the
248 +local variables. size is an integer. file is a stdio
249 +stream to which the assembler code should be output.
250 +
251 +The label for the beginning of the function need not be output by this
252 +macro. That has already been done when the macro is run.
253 +
254 +To determine which registers to save, the macro can refer to the array
255 +regs_ever_live: element r is nonzero if hard register
256 +r is used anywhere within the function. This implies the function
257 +prologue should save register r, provided it is not one of the
258 +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
259 +regs_ever_live.)
260 +
261 +On machines that have ``register windows'', the function entry code does
262 +not save on the stack the registers that are in the windows, even if
263 +they are supposed to be preserved by function calls; instead it takes
264 +appropriate steps to ``push'' the register stack, if any non-call-used
265 +registers are used in the function.
266 +
267 +On machines where functions may or may not have frame-pointers, the
268 +function entry code must vary accordingly; it must set up the frame
269 +pointer if one is wanted, and not otherwise. To determine whether a
270 +frame pointer is in wanted, the macro can refer to the variable
271 +frame_pointer_needed. The variable's value will be 1 at run
272 +time in a function that needs a frame pointer. (see Elimination).
273 +
274 +The function entry code is responsible for allocating any stack space
275 +required for the function. This stack space consists of the regions
276 +listed below. In most cases, these regions are allocated in the
277 +order listed, with the last listed region closest to the top of the
278 +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
279 +the highest address if it is not defined). You can use a different order
280 +for a machine if doing so is more convenient or required for
281 +compatibility reasons. Except in cases where required by standard
282 +or by a debugger, there is no reason why the stack layout used by GCC
283 +need agree with that used by other compilers for a machine.
284 +*/
285 +
286 +#undef TARGET_ASM_FUNCTION_PROLOGUE
287 +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
288 +
289 +
290 +#undef TARGET_DEFAULT_SHORT_ENUMS
291 +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
292 +
293 +#undef TARGET_PROMOTE_FUNCTION_ARGS
294 +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
295 +
296 +#undef TARGET_PROMOTE_FUNCTION_RETURN
297 +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
298 +
299 +#undef TARGET_PROMOTE_PROTOTYPES
300 +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
301 +
302 +#undef TARGET_MUST_PASS_IN_STACK
303 +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
304 +
305 +#undef TARGET_PASS_BY_REFERENCE
306 +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
307 +
308 +#undef TARGET_STRICT_ARGUMENT_NAMING
309 +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
310 +
311 +#undef TARGET_VECTOR_MODE_SUPPORTED_P
312 +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
313 +
314 +#undef TARGET_RETURN_IN_MEMORY
315 +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
316 +
317 +#undef TARGET_RETURN_IN_MSB
318 +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
319 +
320 +#undef TARGET_ARG_PARTIAL_BYTES
321 +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
322 +
323 +#undef TARGET_STRIP_NAME_ENCODING
324 +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
325 +
326 +#define streq(string1, string2) (strcmp (string1, string2) == 0)
327 +
328 +#undef TARGET_ATTRIBUTE_TABLE
329 +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
330 +
331 +#undef TARGET_COMP_TYPE_ATTRIBUTES
332 +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
333 +
334 +
335 +#undef TARGET_RTX_COSTS
336 +#define TARGET_RTX_COSTS avr32_rtx_costs
337 +
338 +#undef TARGET_CANNOT_FORCE_CONST_MEM
339 +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
340 +
341 +#undef TARGET_ASM_INTEGER
342 +#define TARGET_ASM_INTEGER avr32_assemble_integer
343 +
344 +/*
345 + * Switches to the appropriate section for output of constant pool
346 + * entry x in mode. You can assume that x is some kind of constant in
347 + * RTL. The argument mode is redundant except in the case of a
348 + * const_int rtx. Select the section by calling readonly_data_ section
349 + * or one of the alternatives for other sections. align is the
350 + * constant alignment in bits.
351 + *
352 + * The default version of this function takes care of putting symbolic
353 + * constants in flag_ pic mode in data_section and everything else in
354 + * readonly_data_section.
355 + */
356 +#undef TARGET_ASM_SELECT_RTX_SECTION
357 +#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
358 +
359 +
360 +/*
361 + * If non-null, this hook performs a target-specific pass over the
362 + * instruction stream. The compiler will run it at all optimization
363 + * levels, just before the point at which it normally does
364 + * delayed-branch scheduling.
365 + *
366 + * The exact purpose of the hook varies from target to target. Some
367 + * use it to do transformations that are necessary for correctness,
368 + * such as laying out in-function constant pools or avoiding hardware
369 + * hazards. Others use it as an opportunity to do some
370 + * machine-dependent optimizations.
371 + *
372 + * You need not implement the hook if it has nothing to do. The
373 + * default definition is null.
374 + */
375 +#undef TARGET_MACHINE_DEPENDENT_REORG
376 +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
377 +
378 +/* Target hook for assembling integer objects.
379 + Need to handle integer vectors */
380 +static bool
381 +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
382 +{
383 + if (avr32_vector_mode_supported (GET_MODE (x)))
384 + {
385 + int i, units;
386 +
387 + if (GET_CODE (x) != CONST_VECTOR)
388 + abort ();
389 +
390 + units = CONST_VECTOR_NUNITS (x);
391 +
392 + switch (GET_MODE (x))
393 + {
394 + case V2HImode:
395 + size = 2;
396 + break;
397 + case V4QImode:
398 + size = 1;
399 + break;
400 + default:
401 + abort ();
402 + }
403 +
404 + for (i = 0; i < units; i++)
405 + {
406 + rtx elt;
407 +
408 + elt = CONST_VECTOR_ELT (x, i);
409 + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
410 + }
411 +
412 + return true;
413 + }
414 +
415 + return default_assemble_integer (x, size, aligned_p);
416 +}
417 +
418 +/*
419 + * This target hook describes the relative costs of RTL expressions.
420 + *
421 + * The cost may depend on the precise form of the expression, which is
422 + * available for examination in x, and the rtx code of the expression
423 + * in which it is contained, found in outer_code. code is the
424 + * expression code--redundant, since it can be obtained with GET_CODE
425 + * (x).
426 + *
427 + * In implementing this hook, you can use the construct COSTS_N_INSNS
428 + * (n) to specify a cost equal to n fast instructions.
429 + *
430 + * On entry to the hook, *total contains a default estimate for the
431 + * cost of the expression. The hook should modify this value as
432 + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
433 + * for multiplications, COSTS_N_INSNS (7) for division and modulus
434 + * operations, and COSTS_N_INSNS (1) for all other operations.
435 + *
436 + * When optimizing for code size, i.e. when optimize_size is non-zero,
437 + * this target hook should be used to estimate the relative size cost
438 + * of an expression, again relative to COSTS_N_INSNS.
439 + *
440 + * The hook returns true when all subexpressions of x have been
441 + * processed, and false when rtx_cost should recurse.
442 + */
443 +
444 +/* Worker routine for avr32_rtx_costs. */
445 +static inline int
446 +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
447 + enum rtx_code outer ATTRIBUTE_UNUSED)
448 +{
449 + enum machine_mode mode = GET_MODE (x);
450 +
451 + switch (GET_CODE (x))
452 + {
453 + case MEM:
454 + /* Using pre decrement / post increment memory operations on the
455 + avr32_uc architecture means that two writebacks must be performed
456 + and hence two cycles are needed. */
457 + if (!optimize_size
458 + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
459 + && avr32_arch->arch_type == ARCH_TYPE_AVR32_UC
460 + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
461 + || GET_CODE (XEXP (x, 0)) == POST_INC))
462 + return COSTS_N_INSNS (4);
463 +
464 + /* Memory costs quite a lot for the first word, but subsequent words
465 + load at the equivalent of a single insn each. */
466 + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
467 + return COSTS_N_INSNS (2 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
468 +
469 + return COSTS_N_INSNS (3);
470 + case SYMBOL_REF:
471 + case CONST:
472 + /* These are valid for the pseudo insns: lda.w and call which operates
473 + on direct addresses. We assume that the cost of a lda.w is the same
474 + as the cost of a ld.w insn. */
475 + return (outer == SET) ? COSTS_N_INSNS (3) : COSTS_N_INSNS (1);
476 + case DIV:
477 + case MOD:
478 + case UDIV:
479 + case UMOD:
480 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
481 +
482 + case ROTATE:
483 + case ROTATERT:
484 + if (mode == TImode)
485 + return COSTS_N_INSNS (100);
486 +
487 + if (mode == DImode)
488 + return COSTS_N_INSNS (10);
489 + return COSTS_N_INSNS (4);
490 + case ASHIFT:
491 + case LSHIFTRT:
492 + case ASHIFTRT:
493 + case NOT:
494 + if (mode == TImode)
495 + return COSTS_N_INSNS (10);
496 +
497 + if (mode == DImode)
498 + return COSTS_N_INSNS (4);
499 + return COSTS_N_INSNS (1);
500 + case PLUS:
501 + case MINUS:
502 + case NEG:
503 + case COMPARE:
504 + case ABS:
505 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
506 + return COSTS_N_INSNS (100);
507 +
508 + if (mode == TImode)
509 + return COSTS_N_INSNS (50);
510 +
511 + if (mode == DImode)
512 + return COSTS_N_INSNS (2);
513 + return COSTS_N_INSNS (1);
514 +
515 + case MULT:
516 + {
517 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
518 + return COSTS_N_INSNS (300);
519 +
520 + if (mode == TImode)
521 + return COSTS_N_INSNS (16);
522 +
523 + if (mode == DImode)
524 + return COSTS_N_INSNS (4);
525 +
526 + if (mode == HImode)
527 + return COSTS_N_INSNS (2);
528 +
529 + return COSTS_N_INSNS (3);
530 + }
531 + case IF_THEN_ELSE:
532 + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
533 + return COSTS_N_INSNS (4);
534 + return COSTS_N_INSNS (1);
535 + case SIGN_EXTEND:
536 + case ZERO_EXTEND:
537 + /* Sign/Zero extensions of registers cost quite much since these
538 + instrcutions only take one register operand which means that gcc
539 + often must insert some move instrcutions */
540 + if (mode == QImode || mode == HImode)
541 + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
542 + return COSTS_N_INSNS (4);
543 + case UNSPEC:
544 + /* divmod operations */
545 + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
546 + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
547 + {
548 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
549 + }
550 + /* Fallthrough */
551 + default:
552 + return COSTS_N_INSNS (1);
553 + }
554 +}
555 +
556 +static bool
557 +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
558 +{
559 + *total = avr32_rtx_costs_1 (x, code, outer_code);
560 + return true;
561 +}
562 +
563 +
564 +bool
565 +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
566 +{
567 + /* Do not want symbols in the constant pool when compiling pic or if using
568 + address pseudo instructions. */
569 + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
570 + && avr32_find_symbol (x) != NULL_RTX);
571 +}
572 +
573 +
574 +/* Table of machine attributes. */
575 +const struct attribute_spec avr32_attribute_table[] = {
576 + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
577 + /* Interrupt Service Routines have special prologue and epilogue
578 + requirements. */
579 + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
580 + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
581 + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
582 + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
583 + {NULL, 0, 0, false, false, false, NULL}
584 +};
585 +
586 +
587 +typedef struct
588 +{
589 + const char *const arg;
590 + const unsigned long return_value;
591 +}
592 +isr_attribute_arg;
593 +
594 +static const isr_attribute_arg isr_attribute_args[] = {
595 + {"FULL", AVR32_FT_ISR_FULL},
596 + {"full", AVR32_FT_ISR_FULL},
597 + {"HALF", AVR32_FT_ISR_HALF},
598 + {"half", AVR32_FT_ISR_HALF},
599 + {"NONE", AVR32_FT_ISR_NONE},
600 + {"none", AVR32_FT_ISR_NONE},
601 + {"UNDEF", AVR32_FT_ISR_NONE},
602 + {"undef", AVR32_FT_ISR_NONE},
603 + {"SWI", AVR32_FT_ISR_NONE},
604 + {"swi", AVR32_FT_ISR_NONE},
605 + {NULL, AVR32_FT_ISR_NONE}
606 +};
607 +
608 +/* Returns the (interrupt) function type of the current
609 + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
610 +
611 +static unsigned long
612 +avr32_isr_value (tree argument)
613 +{
614 + const isr_attribute_arg *ptr;
615 + const char *arg;
616 +
617 + /* No argument - default to ISR_NONE. */
618 + if (argument == NULL_TREE)
619 + return AVR32_FT_ISR_NONE;
620 +
621 + /* Get the value of the argument. */
622 + if (TREE_VALUE (argument) == NULL_TREE
623 + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
624 + return AVR32_FT_UNKNOWN;
625 +
626 + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
627 +
628 + /* Check it against the list of known arguments. */
629 + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
630 + if (streq (arg, ptr->arg))
631 + return ptr->return_value;
632 +
633 + /* An unrecognized interrupt type. */
634 + return AVR32_FT_UNKNOWN;
635 +}
636 +
637 +
638 +
639 +/*
640 +These hooks specify assembly directives for creating certain kinds
641 +of integer object. The TARGET_ASM_BYTE_OP directive creates a
642 +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
643 +aligned two-byte object, and so on. Any of the hooks may be
644 +NULL, indicating that no suitable directive is available.
645 +
646 +The compiler will print these strings at the start of a new line,
647 +followed immediately by the object's initial value. In most cases,
648 +the string should contain a tab, a pseudo-op, and then another tab.
649 +*/
650 +#undef TARGET_ASM_BYTE_OP
651 +#define TARGET_ASM_BYTE_OP "\t.byte\t"
652 +#undef TARGET_ASM_ALIGNED_HI_OP
653 +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
654 +#undef TARGET_ASM_ALIGNED_SI_OP
655 +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
656 +#undef TARGET_ASM_ALIGNED_DI_OP
657 +#define TARGET_ASM_ALIGNED_DI_OP NULL
658 +#undef TARGET_ASM_ALIGNED_TI_OP
659 +#define TARGET_ASM_ALIGNED_TI_OP NULL
660 +#undef TARGET_ASM_UNALIGNED_HI_OP
661 +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
662 +#undef TARGET_ASM_UNALIGNED_SI_OP
663 +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
664 +#undef TARGET_ASM_UNALIGNED_DI_OP
665 +#define TARGET_ASM_UNALIGNED_DI_OP NULL
666 +#undef TARGET_ASM_UNALIGNED_TI_OP
667 +#define TARGET_ASM_UNALIGNED_TI_OP NULL
668 +
669 +#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
670 +#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE avr32_sched_use_dfa_pipeline_interface
671 +
672 +#undef TARGET_ASM_OUTPUT_MI_THUNK
673 +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
674 +
675 +
676 +static void
677 +avr32_output_mi_thunk (FILE * file,
678 + tree thunk ATTRIBUTE_UNUSED,
679 + HOST_WIDE_INT delta,
680 + HOST_WIDE_INT vcall_offset, tree function)
681 +{
682 + int mi_delta = delta;
683 + int this_regno =
684 + (avr32_return_in_memory (DECL_RESULT (function), TREE_TYPE (function)) ?
685 + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
686 +
687 +
688 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
689 + || vcall_offset)
690 + {
691 + fprintf (file, "\tpushm\tr10\n");
692 + }
693 +
694 +
695 + if (mi_delta != 0)
696 + {
697 + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
698 + {
699 + fprintf (file, "\tsub\t%s, -0x%x\n", reg_names[this_regno],
700 + mi_delta);
701 + }
702 + else
703 + {
704 + /* Immediate is larger than k21 we must make us a temp register by
705 + pushing a register to the stack. */
706 + fprintf (file, "\tmov\tr10, lo(%x)\n", mi_delta);
707 + fprintf (file, "\torh\tr10, hi(%x)\n", mi_delta);
708 + fprintf (file, "\tadd\t%s, r10\n", reg_names[this_regno]);
709 + }
710 + }
711 +
712 +
713 + if (vcall_offset != 0)
714 + {
715 + fprintf (file, "\tld.w\tr10, %s[0]\n", reg_names[this_regno]);
716 + fprintf (file, "\tld.w\tr10, r10[%i]\n", (int) vcall_offset);
717 + fprintf (file, "\tadd\t%s, r10\n", reg_names[this_regno]);
718 + }
719 +
720 +
721 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
722 + || vcall_offset)
723 + {
724 + fprintf (file, "\tpopm\tr10\n");
725 + }
726 +
727 + if (flag_pic)
728 + {
729 + /* Don't know how we should do this!!! For now we'll just use an
730 + extended branch instruction and hope that the function will be
731 + reached. */
732 + fprintf (file, "\tbral\t");
733 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
734 + fputc ('\n', file);
735 + }
736 + else
737 + {
738 + fprintf (file, "\tlddpc\tpc, 0f\n");
739 + fprintf (file, "\t.align 2\n");
740 + fputs ("0:\t.long\t", file);
741 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
742 + fputc ('\n', file);
743 + }
744 +}
745 +
746 +/* Implements target hook vector_mode_supported. */
747 +bool
748 +avr32_vector_mode_supported (enum machine_mode mode)
749 +{
750 + if ((mode == V2HImode) || (mode == V4QImode))
751 + return true;
752 +
753 + return false;
754 +}
755 +
756 +
757 +#undef TARGET_INIT_LIBFUNCS
758 +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
759 +
760 +#undef TARGET_INIT_BUILTINS
761 +#define TARGET_INIT_BUILTINS avr32_init_builtins
762 +
763 +#undef TARGET_EXPAND_BUILTIN
764 +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
765 +
766 +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
767 + void_ftype_ptr_int;
768 +tree void_ftype_int, void_ftype_void, int_ftype_ptr_int;
769 +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
770 + short_ftype_short_short;
771 +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
772 +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
773 +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
774 +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
775 +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
776 +
777 +#define def_builtin(NAME, TYPE, CODE) \
778 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
779 + BUILT_IN_MD, NULL, NULL_TREE)
780 +
781 +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
782 + do \
783 + { \
784 + if ((MASK)) \
785 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
786 + BUILT_IN_MD, NULL, NULL_TREE); \
787 + } \
788 + while (0)
789 +
790 +struct builtin_description
791 +{
792 + const unsigned int mask;
793 + const enum insn_code icode;
794 + const char *const name;
795 + const int code;
796 + const enum rtx_code comparison;
797 + const unsigned int flag;
798 + const tree *ftype;
799 +};
800 +
801 +static const struct builtin_description bdesc_2arg[] = {
802 +#define DSP_BUILTIN(code, builtin, ftype) \
803 + { 1, CODE_FOR_##code, "__builtin_" #code , \
804 + AVR32_BUILTIN_##builtin, 0, 0, ftype }
805 +
806 + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
807 + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
808 + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
809 + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
810 + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
811 + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
812 + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
813 + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
814 + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
815 + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
816 + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
817 +};
818 +
819 +
820 +void
821 +avr32_init_builtins (void)
822 +{
823 + unsigned int i;
824 + const struct builtin_description *d;
825 + tree endlink = void_list_node;
826 + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
827 + tree longlong_endlink =
828 + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
829 + tree short_endlink =
830 + tree_cons (NULL_TREE, short_integer_type_node, endlink);
831 + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
832 +
833 + /* int func (int) */
834 + int_ftype_int = build_function_type (integer_type_node, int_endlink);
835 +
836 + /* short func (short) */
837 + short_ftype_short
838 + = build_function_type (short_integer_type_node, short_endlink);
839 +
840 + /* short func (short, short) */
841 + short_ftype_short_short
842 + = build_function_type (short_integer_type_node,
843 + tree_cons (NULL_TREE, short_integer_type_node,
844 + short_endlink));
845 +
846 + /* long long func (long long, short, short) */
847 + longlong_ftype_longlong_short_short
848 + = build_function_type (long_long_integer_type_node,
849 + tree_cons (NULL_TREE, long_long_integer_type_node,
850 + tree_cons (NULL_TREE,
851 + short_integer_type_node,
852 + short_endlink)));
853 +
854 + /* long long func (short, short) */
855 + longlong_ftype_short_short
856 + = build_function_type (long_long_integer_type_node,
857 + tree_cons (NULL_TREE, short_integer_type_node,
858 + short_endlink));
859 +
860 + /* int func (int, int) */
861 + int_ftype_int_int
862 + = build_function_type (integer_type_node,
863 + tree_cons (NULL_TREE, integer_type_node,
864 + int_endlink));
865 +
866 + /* long long func (int, int) */
867 + longlong_ftype_int_int
868 + = build_function_type (long_long_integer_type_node,
869 + tree_cons (NULL_TREE, integer_type_node,
870 + int_endlink));
871 +
872 + /* long long int func (long long, int, short) */
873 + longlong_ftype_longlong_int_short
874 + = build_function_type (long_long_integer_type_node,
875 + tree_cons (NULL_TREE, long_long_integer_type_node,
876 + tree_cons (NULL_TREE, integer_type_node,
877 + short_endlink)));
878 +
879 + /* long long int func (int, short) */
880 + longlong_ftype_int_short
881 + = build_function_type (long_long_integer_type_node,
882 + tree_cons (NULL_TREE, integer_type_node,
883 + short_endlink));
884 +
885 + /* int func (int, short, short) */
886 + int_ftype_int_short_short
887 + = build_function_type (integer_type_node,
888 + tree_cons (NULL_TREE, integer_type_node,
889 + tree_cons (NULL_TREE,
890 + short_integer_type_node,
891 + short_endlink)));
892 +
893 + /* int func (short, short) */
894 + int_ftype_short_short
895 + = build_function_type (integer_type_node,
896 + tree_cons (NULL_TREE, short_integer_type_node,
897 + short_endlink));
898 +
899 + /* int func (int, short) */
900 + int_ftype_int_short
901 + = build_function_type (integer_type_node,
902 + tree_cons (NULL_TREE, integer_type_node,
903 + short_endlink));
904 +
905 + /* void func (int, int) */
906 + void_ftype_int_int
907 + = build_function_type (void_type_node,
908 + tree_cons (NULL_TREE, integer_type_node,
909 + int_endlink));
910 +
911 + /* void func (int, int, int) */
912 + void_ftype_int_int_int
913 + = build_function_type (void_type_node,
914 + tree_cons (NULL_TREE, integer_type_node,
915 + tree_cons (NULL_TREE, integer_type_node,
916 + int_endlink)));
917 +
918 + /* void func (int, int, long long) */
919 + void_ftype_int_int_longlong
920 + = build_function_type (void_type_node,
921 + tree_cons (NULL_TREE, integer_type_node,
922 + tree_cons (NULL_TREE, integer_type_node,
923 + longlong_endlink)));
924 +
925 + /* void func (int, int, int, int, int) */
926 + void_ftype_int_int_int_int_int
927 + = build_function_type (void_type_node,
928 + tree_cons (NULL_TREE, integer_type_node,
929 + tree_cons (NULL_TREE, integer_type_node,
930 + tree_cons (NULL_TREE,
931 + integer_type_node,
932 + tree_cons
933 + (NULL_TREE,
934 + integer_type_node,
935 + int_endlink)))));
936 +
937 + /* void func (void *, int) */
938 + void_ftype_ptr_int
939 + = build_function_type (void_type_node,
940 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
941 +
942 + /* void func (int) */
943 + void_ftype_int = build_function_type (void_type_node, int_endlink);
944 +
945 + /* void func (void) */
946 + void_ftype_void = build_function_type (void_type_node, void_endlink);
947 +
948 + /* int func (void) */
949 + int_ftype_void = build_function_type (integer_type_node, void_endlink);
950 +
951 + /* int func (void *, int) */
952 + int_ftype_ptr_int
953 + = build_function_type (integer_type_node,
954 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
955 +
956 + /* int func (int, int, int) */
957 + int_ftype_int_int_int
958 + = build_function_type (integer_type_node,
959 + tree_cons (NULL_TREE, integer_type_node,
960 + tree_cons (NULL_TREE, integer_type_node,
961 + int_endlink)));
962 +
963 + /* Initialize avr32 builtins. */
964 + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
965 + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
966 + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
967 + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
968 + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
969 + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
970 + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
971 + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
972 + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
973 + def_builtin ("__builtin_breakpoint", void_ftype_void,
974 + AVR32_BUILTIN_BREAKPOINT);
975 + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
976 + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
977 + def_builtin ("__builtin_bswap_16", short_ftype_short,
978 + AVR32_BUILTIN_BSWAP16);
979 + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
980 + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
981 + AVR32_BUILTIN_COP);
982 + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
983 + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
984 + AVR32_BUILTIN_MVRC_W);
985 + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
986 + AVR32_BUILTIN_MVCR_D);
987 + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
988 + AVR32_BUILTIN_MVRC_D);
989 + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
990 + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
991 + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
992 + AVR32_BUILTIN_SATRNDS);
993 + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
994 + AVR32_BUILTIN_SATRNDU);
995 + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
996 + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
997 + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
998 + AVR32_BUILTIN_MACSATHH_W);
999 + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
1000 + AVR32_BUILTIN_MACWH_D);
1001 + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
1002 + AVR32_BUILTIN_MACHH_D);
1003 +
1004 + /* Add all builtins that are more or less simple operations on two
1005 + operands. */
1006 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1007 + {
1008 + /* Use one of the operands; the target can have a different mode for
1009 + mask-generating compares. */
1010 +
1011 + if (d->name == 0)
1012 + continue;
1013 +
1014 + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
1015 + }
1016 +}
1017 +
1018 +
1019 +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
1020 +
1021 +static rtx
1022 +avr32_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
1023 +{
1024 + rtx pat;
1025 + tree arg0 = TREE_VALUE (arglist);
1026 + tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1027 + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1028 + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1029 + enum machine_mode tmode = insn_data[icode].operand[0].mode;
1030 + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1031 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1032 +
1033 + if (!target
1034 + || GET_MODE (target) != tmode
1035 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1036 + target = gen_reg_rtx (tmode);
1037 +
1038 + /* In case the insn wants input operands in modes different from the
1039 + result, abort. */
1040 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1041 + {
1042 + /* If op0 is already a reg we must cast it to the correct mode. */
1043 + if (REG_P (op0))
1044 + op0 = convert_to_mode (mode0, op0, 1);
1045 + else
1046 + op0 = copy_to_mode_reg (mode0, op0);
1047 + }
1048 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1049 + {
1050 + /* If op1 is already a reg we must cast it to the correct mode. */
1051 + if (REG_P (op1))
1052 + op1 = convert_to_mode (mode1, op1, 1);
1053 + else
1054 + op1 = copy_to_mode_reg (mode1, op1);
1055 + }
1056 + pat = GEN_FCN (icode) (target, op0, op1);
1057 + if (!pat)
1058 + return 0;
1059 + emit_insn (pat);
1060 + return target;
1061 +}
1062 +
1063 +/* Expand an expression EXP that calls a built-in function,
1064 + with result going to TARGET if that's convenient
1065 + (and in mode MODE if that's convenient).
1066 + SUBTARGET may be used as the target for computing one of EXP's operands.
1067 + IGNORE is nonzero if the value is to be ignored. */
1068 +
1069 +rtx
1070 +avr32_expand_builtin (tree exp,
1071 + rtx target,
1072 + rtx subtarget ATTRIBUTE_UNUSED,
1073 + enum machine_mode mode ATTRIBUTE_UNUSED,
1074 + int ignore ATTRIBUTE_UNUSED)
1075 +{
1076 + const struct builtin_description *d;
1077 + unsigned int i;
1078 + enum insn_code icode;
1079 + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1080 + tree arglist = TREE_OPERAND (exp, 1);
1081 + tree arg0, arg1, arg2;
1082 + rtx op0, op1, op2, pat;
1083 + enum machine_mode tmode, mode0, mode1;
1084 + enum machine_mode arg0_mode;
1085 + int fcode = DECL_FUNCTION_CODE (fndecl);
1086 +
1087 + switch (fcode)
1088 + {
1089 + default:
1090 + break;
1091 +
1092 + case AVR32_BUILTIN_SATS:
1093 + case AVR32_BUILTIN_SATU:
1094 + case AVR32_BUILTIN_SATRNDS:
1095 + case AVR32_BUILTIN_SATRNDU:
1096 + {
1097 + const char *fname;
1098 + switch (fcode)
1099 + {
1100 + default:
1101 + case AVR32_BUILTIN_SATS:
1102 + icode = CODE_FOR_sats;
1103 + fname = "sats";
1104 + break;
1105 + case AVR32_BUILTIN_SATU:
1106 + icode = CODE_FOR_satu;
1107 + fname = "satu";
1108 + break;
1109 + case AVR32_BUILTIN_SATRNDS:
1110 + icode = CODE_FOR_satrnds;
1111 + fname = "satrnds";
1112 + break;
1113 + case AVR32_BUILTIN_SATRNDU:
1114 + icode = CODE_FOR_satrndu;
1115 + fname = "satrndu";
1116 + break;
1117 + }
1118 +
1119 + arg0 = TREE_VALUE (arglist);
1120 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1121 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1122 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1123 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1124 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1125 +
1126 + tmode = insn_data[icode].operand[0].mode;
1127 +
1128 +
1129 + if (target == 0
1130 + || GET_MODE (target) != tmode
1131 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1132 + target = gen_reg_rtx (tmode);
1133 +
1134 +
1135 + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
1136 + {
1137 + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
1138 + }
1139 +
1140 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1141 + {
1142 + error ("Parameter 2 to __builtin_%s should be a constant number.",
1143 + fname);
1144 + return NULL_RTX;
1145 + }
1146 +
1147 + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
1148 + {
1149 + error ("Parameter 3 to __builtin_%s should be a constant number.",
1150 + fname);
1151 + return NULL_RTX;
1152 + }
1153 +
1154 + emit_move_insn (target, op0);
1155 + pat = GEN_FCN (icode) (target, op1, op2);
1156 + if (!pat)
1157 + return 0;
1158 + emit_insn (pat);
1159 +
1160 + return target;
1161 + }
1162 + case AVR32_BUILTIN_MUSTR:
1163 + icode = CODE_FOR_mustr;
1164 + tmode = insn_data[icode].operand[0].mode;
1165 +
1166 + if (target == 0
1167 + || GET_MODE (target) != tmode
1168 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1169 + target = gen_reg_rtx (tmode);
1170 + pat = GEN_FCN (icode) (target);
1171 + if (!pat)
1172 + return 0;
1173 + emit_insn (pat);
1174 + return target;
1175 +
1176 + case AVR32_BUILTIN_MFSR:
1177 + icode = CODE_FOR_mfsr;
1178 + arg0 = TREE_VALUE (arglist);
1179 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1180 + tmode = insn_data[icode].operand[0].mode;
1181 + mode0 = insn_data[icode].operand[1].mode;
1182 +
1183 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1184 + {
1185 + error ("Parameter 1 to __builtin_mfsr must be a constant number");
1186 + }
1187 +
1188 + if (target == 0
1189 + || GET_MODE (target) != tmode
1190 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1191 + target = gen_reg_rtx (tmode);
1192 + pat = GEN_FCN (icode) (target, op0);
1193 + if (!pat)
1194 + return 0;
1195 + emit_insn (pat);
1196 + return target;
1197 + case AVR32_BUILTIN_MTSR:
1198 + icode = CODE_FOR_mtsr;
1199 + arg0 = TREE_VALUE (arglist);
1200 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1201 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1202 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1203 + mode0 = insn_data[icode].operand[0].mode;
1204 + mode1 = insn_data[icode].operand[1].mode;
1205 +
1206 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1207 + {
1208 + error ("Parameter 1 to __builtin_mtsr must be a constant number");
1209 + return gen_reg_rtx (mode0);
1210 + }
1211 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1212 + op1 = copy_to_mode_reg (mode1, op1);
1213 + pat = GEN_FCN (icode) (op0, op1);
1214 + if (!pat)
1215 + return 0;
1216 + emit_insn (pat);
1217 + return NULL_RTX;
1218 + case AVR32_BUILTIN_MFDR:
1219 + icode = CODE_FOR_mfdr;
1220 + arg0 = TREE_VALUE (arglist);
1221 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1222 + tmode = insn_data[icode].operand[0].mode;
1223 + mode0 = insn_data[icode].operand[1].mode;
1224 +
1225 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1226 + {
1227 + error ("Parameter 1 to __builtin_mfdr must be a constant number");
1228 + }
1229 +
1230 + if (target == 0
1231 + || GET_MODE (target) != tmode
1232 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1233 + target = gen_reg_rtx (tmode);
1234 + pat = GEN_FCN (icode) (target, op0);
1235 + if (!pat)
1236 + return 0;
1237 + emit_insn (pat);
1238 + return target;
1239 + case AVR32_BUILTIN_MTDR:
1240 + icode = CODE_FOR_mtdr;
1241 + arg0 = TREE_VALUE (arglist);
1242 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1243 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1244 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1245 + mode0 = insn_data[icode].operand[0].mode;
1246 + mode1 = insn_data[icode].operand[1].mode;
1247 +
1248 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1249 + {
1250 + error ("Parameter 1 to __builtin_mtdr must be a constant number");
1251 + return gen_reg_rtx (mode0);
1252 + }
1253 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1254 + op1 = copy_to_mode_reg (mode1, op1);
1255 + pat = GEN_FCN (icode) (op0, op1);
1256 + if (!pat)
1257 + return 0;
1258 + emit_insn (pat);
1259 + return NULL_RTX;
1260 + case AVR32_BUILTIN_CACHE:
1261 + icode = CODE_FOR_cache;
1262 + arg0 = TREE_VALUE (arglist);
1263 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1264 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1265 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1266 + mode0 = insn_data[icode].operand[0].mode;
1267 + mode1 = insn_data[icode].operand[1].mode;
1268 +
1269 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1270 + {
1271 + error ("Parameter 2 to __builtin_cache must be a constant number");
1272 + return gen_reg_rtx (mode1);
1273 + }
1274 +
1275 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1276 + op0 = copy_to_mode_reg (mode0, op0);
1277 +
1278 + pat = GEN_FCN (icode) (op0, op1);
1279 + if (!pat)
1280 + return 0;
1281 + emit_insn (pat);
1282 + return NULL_RTX;
1283 + case AVR32_BUILTIN_SYNC:
1284 + case AVR32_BUILTIN_MUSFR:
1285 + {
1286 + const char *fname;
1287 + switch (fcode)
1288 + {
1289 + default:
1290 + case AVR32_BUILTIN_SYNC:
1291 + icode = CODE_FOR_sync;
1292 + fname = "sync";
1293 + break;
1294 + case AVR32_BUILTIN_MUSFR:
1295 + icode = CODE_FOR_musfr;
1296 + fname = "musfr";
1297 + break;
1298 + }
1299 +
1300 + arg0 = TREE_VALUE (arglist);
1301 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1302 + mode0 = insn_data[icode].operand[0].mode;
1303 +
1304 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1305 + {
1306 + if (icode == CODE_FOR_musfr)
1307 + op0 = copy_to_mode_reg (mode0, op0);
1308 + else
1309 + {
1310 + error ("Parameter to __builtin_%s is illegal.", fname);
1311 + return gen_reg_rtx (mode0);
1312 + }
1313 + }
1314 + pat = GEN_FCN (icode) (op0);
1315 + if (!pat)
1316 + return 0;
1317 + emit_insn (pat);
1318 + return NULL_RTX;
1319 + }
1320 + case AVR32_BUILTIN_TLBR:
1321 + icode = CODE_FOR_tlbr;
1322 + pat = GEN_FCN (icode) (NULL_RTX);
1323 + if (!pat)
1324 + return 0;
1325 + emit_insn (pat);
1326 + return NULL_RTX;
1327 + case AVR32_BUILTIN_TLBS:
1328 + icode = CODE_FOR_tlbs;
1329 + pat = GEN_FCN (icode) (NULL_RTX);
1330 + if (!pat)
1331 + return 0;
1332 + emit_insn (pat);
1333 + return NULL_RTX;
1334 + case AVR32_BUILTIN_TLBW:
1335 + icode = CODE_FOR_tlbw;
1336 + pat = GEN_FCN (icode) (NULL_RTX);
1337 + if (!pat)
1338 + return 0;
1339 + emit_insn (pat);
1340 + return NULL_RTX;
1341 + case AVR32_BUILTIN_BREAKPOINT:
1342 + icode = CODE_FOR_breakpoint;
1343 + pat = GEN_FCN (icode) (NULL_RTX);
1344 + if (!pat)
1345 + return 0;
1346 + emit_insn (pat);
1347 + return NULL_RTX;
1348 + case AVR32_BUILTIN_XCHG:
1349 + icode = CODE_FOR_xchg;
1350 + arg0 = TREE_VALUE (arglist);
1351 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1352 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1353 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1354 + tmode = insn_data[icode].operand[0].mode;
1355 + mode0 = insn_data[icode].operand[1].mode;
1356 + mode1 = insn_data[icode].operand[3].mode;
1357 +
1358 + if (!(*insn_data[icode].operand[3].predicate) (op1, mode1))
1359 + {
1360 + op1 = copy_to_mode_reg (mode1, op1);
1361 + }
1362 +
1363 + if (!(*insn_data[icode].operand[2].predicate) (op0, mode0))
1364 + {
1365 + op0 = copy_to_mode_reg (mode0, op0);
1366 + }
1367 +
1368 + if (target == 0
1369 + || GET_MODE (target) != tmode
1370 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1371 + target = gen_reg_rtx (tmode);
1372 + pat = GEN_FCN (icode) (target, op0, op0, op1);
1373 + if (!pat)
1374 + return 0;
1375 + emit_insn (pat);
1376 + return target;
1377 + case AVR32_BUILTIN_LDXI:
1378 + icode = CODE_FOR_ldxi;
1379 + arg0 = TREE_VALUE (arglist);
1380 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1381 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1382 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1383 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1384 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1385 + tmode = insn_data[icode].operand[0].mode;
1386 + mode0 = insn_data[icode].operand[1].mode;
1387 + mode1 = insn_data[icode].operand[2].mode;
1388 +
1389 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1390 + {
1391 + op0 = copy_to_mode_reg (mode0, op0);
1392 + }
1393 +
1394 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1395 + {
1396 + op1 = copy_to_mode_reg (mode1, op1);
1397 + }
1398 +
1399 + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
1400 + {
1401 + error
1402 + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
1403 + return gen_reg_rtx (mode0);
1404 + }
1405 +
1406 + if (target == 0
1407 + || GET_MODE (target) != tmode
1408 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1409 + target = gen_reg_rtx (tmode);
1410 + pat = GEN_FCN (icode) (target, op0, op1, op2);
1411 + if (!pat)
1412 + return 0;
1413 + emit_insn (pat);
1414 + return target;
1415 + case AVR32_BUILTIN_BSWAP16:
1416 + {
1417 + icode = CODE_FOR_bswap_16;
1418 + arg0 = TREE_VALUE (arglist);
1419 + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
1420 + mode0 = insn_data[icode].operand[1].mode;
1421 + if (arg0_mode != mode0)
1422 + arg0 = build1 (NOP_EXPR,
1423 + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
1424 +
1425 + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
1426 + tmode = insn_data[icode].operand[0].mode;
1427 +
1428 +
1429 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1430 + {
1431 + op0 = copy_to_mode_reg (mode0, op0);
1432 + }
1433 +
1434 + if (target == 0
1435 + || GET_MODE (target) != tmode
1436 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1437 + {
1438 + target = gen_reg_rtx (tmode);
1439 + }
1440 +
1441 +
1442 + pat = GEN_FCN (icode) (target, op0);
1443 + if (!pat)
1444 + return 0;
1445 + emit_insn (pat);
1446 +
1447 + return target;
1448 + }
1449 + case AVR32_BUILTIN_BSWAP32:
1450 + {
1451 + icode = CODE_FOR_bswap_32;
1452 + arg0 = TREE_VALUE (arglist);
1453 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1454 + tmode = insn_data[icode].operand[0].mode;
1455 + mode0 = insn_data[icode].operand[1].mode;
1456 +
1457 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1458 + {
1459 + op0 = copy_to_mode_reg (mode0, op0);
1460 + }
1461 +
1462 + if (target == 0
1463 + || GET_MODE (target) != tmode
1464 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1465 + target = gen_reg_rtx (tmode);
1466 +
1467 +
1468 + pat = GEN_FCN (icode) (target, op0);
1469 + if (!pat)
1470 + return 0;
1471 + emit_insn (pat);
1472 +
1473 + return target;
1474 + }
1475 + case AVR32_BUILTIN_MVCR_W:
1476 + case AVR32_BUILTIN_MVCR_D:
1477 + {
1478 + arg0 = TREE_VALUE (arglist);
1479 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1480 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1481 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1482 +
1483 + if (fcode == AVR32_BUILTIN_MVCR_W)
1484 + icode = CODE_FOR_mvcrsi;
1485 + else
1486 + icode = CODE_FOR_mvcrdi;
1487 +
1488 + tmode = insn_data[icode].operand[0].mode;
1489 +
1490 + if (target == 0
1491 + || GET_MODE (target) != tmode
1492 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1493 + target = gen_reg_rtx (tmode);
1494 +
1495 + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
1496 + {
1497 + error
1498 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1499 + error ("Number should be between 0 and 7.");
1500 + return NULL_RTX;
1501 + }
1502 +
1503 + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
1504 + {
1505 + error
1506 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1507 + error ("Number should be between 0 and 15.");
1508 + return NULL_RTX;
1509 + }
1510 +
1511 + pat = GEN_FCN (icode) (target, op0, op1);
1512 + if (!pat)
1513 + return 0;
1514 + emit_insn (pat);
1515 +
1516 + return target;
1517 + }
1518 + case AVR32_BUILTIN_MACSATHH_W:
1519 + case AVR32_BUILTIN_MACWH_D:
1520 + case AVR32_BUILTIN_MACHH_D:
1521 + {
1522 + arg0 = TREE_VALUE (arglist);
1523 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1524 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1525 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1526 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1527 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1528 +
1529 + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
1530 + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
1531 + CODE_FOR_machh_d);
1532 +
1533 + tmode = insn_data[icode].operand[0].mode;
1534 + mode0 = insn_data[icode].operand[1].mode;
1535 + mode1 = insn_data[icode].operand[2].mode;
1536 +
1537 +
1538 + if (!target
1539 + || GET_MODE (target) != tmode
1540 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1541 + target = gen_reg_rtx (tmode);
1542 +
1543 + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
1544 + {
1545 + /* If op0 is already a reg we must cast it to the correct mode. */
1546 + if (REG_P (op0))
1547 + op0 = convert_to_mode (tmode, op0, 1);
1548 + else
1549 + op0 = copy_to_mode_reg (tmode, op0);
1550 + }
1551 +
1552 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
1553 + {
1554 + /* If op1 is already a reg we must cast it to the correct mode. */
1555 + if (REG_P (op1))
1556 + op1 = convert_to_mode (mode0, op1, 1);
1557 + else
1558 + op1 = copy_to_mode_reg (mode0, op1);
1559 + }
1560 +
1561 + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
1562 + {
1563 + /* If op1 is already a reg we must cast it to the correct mode. */
1564 + if (REG_P (op2))
1565 + op2 = convert_to_mode (mode1, op2, 1);
1566 + else
1567 + op2 = copy_to_mode_reg (mode1, op2);
1568 + }
1569 +
1570 + emit_move_insn (target, op0);
1571 +
1572 + pat = GEN_FCN (icode) (target, op1, op2);
1573 + if (!pat)
1574 + return 0;
1575 + emit_insn (pat);
1576 + return target;
1577 + }
1578 + case AVR32_BUILTIN_MVRC_W:
1579 + case AVR32_BUILTIN_MVRC_D:
1580 + {
1581 + arg0 = TREE_VALUE (arglist);
1582 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1583 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1584 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1585 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1586 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1587 +
1588 + if (fcode == AVR32_BUILTIN_MVRC_W)
1589 + icode = CODE_FOR_mvrcsi;
1590 + else
1591 + icode = CODE_FOR_mvrcdi;
1592 +
1593 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1594 + {
1595 + error ("Parameter 1 is not a valid coprocessor number.");
1596 + error ("Number should be between 0 and 7.");
1597 + return NULL_RTX;
1598 + }
1599 +
1600 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1601 + {
1602 + error ("Parameter 2 is not a valid coprocessor register number.");
1603 + error ("Number should be between 0 and 15.");
1604 + return NULL_RTX;
1605 + }
1606 +
1607 + if (GET_CODE (op2) == CONST_INT
1608 + || GET_CODE (op2) == CONST
1609 + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
1610 + {
1611 + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
1612 + }
1613 +
1614 + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
1615 + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
1616 +
1617 +
1618 + pat = GEN_FCN (icode) (op0, op1, op2);
1619 + if (!pat)
1620 + return 0;
1621 + emit_insn (pat);
1622 +
1623 + return NULL_RTX;
1624 + }
1625 + case AVR32_BUILTIN_COP:
1626 + {
1627 + rtx op3, op4;
1628 + tree arg3, arg4;
1629 + icode = CODE_FOR_cop;
1630 + arg0 = TREE_VALUE (arglist);
1631 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1632 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1633 + arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
1634 + arg4 =
1635 + TREE_VALUE (TREE_CHAIN
1636 + (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist)))));
1637 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1638 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1639 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1640 + op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
1641 + op4 = expand_expr (arg4, NULL_RTX, VOIDmode, 0);
1642 +
1643 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1644 + {
1645 + error
1646 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1647 + error ("Number should be between 0 and 7.");
1648 + return NULL_RTX;
1649 + }
1650 +
1651 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1652 + {
1653 + error
1654 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1655 + error ("Number should be between 0 and 15.");
1656 + return NULL_RTX;
1657 + }
1658 +
1659 + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
1660 + {
1661 + error
1662 + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
1663 + error ("Number should be between 0 and 15.");
1664 + return NULL_RTX;
1665 + }
1666 +
1667 + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
1668 + {
1669 + error
1670 + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
1671 + error ("Number should be between 0 and 15.");
1672 + return NULL_RTX;
1673 + }
1674 +
1675 + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
1676 + {
1677 + error
1678 + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
1679 + error ("Number should be between 0 and 127.");
1680 + return NULL_RTX;
1681 + }
1682 +
1683 + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
1684 + if (!pat)
1685 + return 0;
1686 + emit_insn (pat);
1687 +
1688 + return target;
1689 + }
1690 + }
1691 +
1692 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1693 + if (d->code == fcode)
1694 + return avr32_expand_binop_builtin (d->icode, arglist, target);
1695 +
1696 +
1697 + /* @@@ Should really do something sensible here. */
1698 + return NULL_RTX;
1699 +}
1700 +
1701 +
1702 +/* Handle an "interrupt" or "isr" attribute;
1703 + arguments as in struct attribute_spec.handler. */
1704 +
1705 +static tree
1706 +avr32_handle_isr_attribute (tree * node, tree name, tree args,
1707 + int flags, bool * no_add_attrs)
1708 +{
1709 + if (DECL_P (*node))
1710 + {
1711 + if (TREE_CODE (*node) != FUNCTION_DECL)
1712 + {
1713 + warning ("`%s' attribute only applies to functions",
1714 + IDENTIFIER_POINTER (name));
1715 + *no_add_attrs = true;
1716 + }
1717 + /* FIXME: the argument if any is checked for type attributes; should it
1718 + be checked for decl ones? */
1719 + }
1720 + else
1721 + {
1722 + if (TREE_CODE (*node) == FUNCTION_TYPE
1723 + || TREE_CODE (*node) == METHOD_TYPE)
1724 + {
1725 + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
1726 + {
1727 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1728 + *no_add_attrs = true;
1729 + }
1730 + }
1731 + else if (TREE_CODE (*node) == POINTER_TYPE
1732 + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
1733 + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
1734 + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
1735 + {
1736 + *node = build_variant_type_copy (*node);
1737 + TREE_TYPE (*node) = build_type_attribute_variant
1738 + (TREE_TYPE (*node),
1739 + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
1740 + *no_add_attrs = true;
1741 + }
1742 + else
1743 + {
1744 + /* Possibly pass this attribute on from the type to a decl. */
1745 + if (flags & ((int) ATTR_FLAG_DECL_NEXT
1746 + | (int) ATTR_FLAG_FUNCTION_NEXT
1747 + | (int) ATTR_FLAG_ARRAY_NEXT))
1748 + {
1749 + *no_add_attrs = true;
1750 + return tree_cons (name, args, NULL_TREE);
1751 + }
1752 + else
1753 + {
1754 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1755 + }
1756 + }
1757 + }
1758 +
1759 + return NULL_TREE;
1760 +}
1761 +
1762 +/* Handle an attribute requiring a FUNCTION_DECL;
1763 + arguments as in struct attribute_spec.handler. */
1764 +static tree
1765 +avr32_handle_fndecl_attribute (tree * node, tree name,
1766 + tree args ATTRIBUTE_UNUSED,
1767 + int flags ATTRIBUTE_UNUSED,
1768 + bool * no_add_attrs)
1769 +{
1770 + if (TREE_CODE (*node) != FUNCTION_DECL)
1771 + {
1772 + warning ("%qs attribute only applies to functions",
1773 + IDENTIFIER_POINTER (name));
1774 + *no_add_attrs = true;
1775 + }
1776 +
1777 + return NULL_TREE;
1778 +}
1779 +
1780 +
1781 +/* Handle an acall attribute;
1782 + arguments as in struct attribute_spec.handler. */
1783 +
1784 +static tree
1785 +avr32_handle_acall_attribute (tree * node, tree name,
1786 + tree args ATTRIBUTE_UNUSED,
1787 + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
1788 +{
1789 + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
1790 + {
1791 + warning ("`%s' attribute not yet supported...",
1792 + IDENTIFIER_POINTER (name));
1793 + *no_add_attrs = true;
1794 + return NULL_TREE;
1795 + }
1796 +
1797 + warning ("`%s' attribute only applies to functions",
1798 + IDENTIFIER_POINTER (name));
1799 + *no_add_attrs = true;
1800 + return NULL_TREE;
1801 +}
1802 +
1803 +
1804 +/* Return 0 if the attributes for two types are incompatible, 1 if they
1805 + are compatible, and 2 if they are nearly compatible (which causes a
1806 + warning to be generated). */
1807 +
1808 +static int
1809 +avr32_comp_type_attributes (tree type1, tree type2)
1810 +{
1811 + int acall1, acall2, isr1, isr2, naked1, naked2;
1812 +
1813 + /* Check for mismatch of non-default calling convention. */
1814 + if (TREE_CODE (type1) != FUNCTION_TYPE)
1815 + return 1;
1816 +
1817 + /* Check for mismatched call attributes. */
1818 + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
1819 + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
1820 + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
1821 + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
1822 + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1823 + if (!isr1)
1824 + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1825 +
1826 + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1827 + if (!isr2)
1828 + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1829 +
1830 + if ((acall1 && isr2)
1831 + || (acall2 && isr1) || (naked1 && isr2) || (naked2 && isr1))
1832 + return 0;
1833 +
1834 + return 1;
1835 +}
1836 +
1837 +
1838 +/* Computes the type of the current function. */
1839 +
1840 +static unsigned long
1841 +avr32_compute_func_type (void)
1842 +{
1843 + unsigned long type = AVR32_FT_UNKNOWN;
1844 + tree a;
1845 + tree attr;
1846 +
1847 + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1848 + abort ();
1849 +
1850 + /* Decide if the current function is volatile. Such functions never
1851 + return, and many memory cycles can be saved by not storing register
1852 + values that will never be needed again. This optimization was added to
1853 + speed up context switching in a kernel application. */
1854 + if (optimize > 0
1855 + && TREE_NOTHROW (current_function_decl)
1856 + && TREE_THIS_VOLATILE (current_function_decl))
1857 + type |= AVR32_FT_VOLATILE;
1858 +
1859 + if (cfun->static_chain_decl != NULL)
1860 + type |= AVR32_FT_NESTED;
1861 +
1862 + attr = DECL_ATTRIBUTES (current_function_decl);
1863 +
1864 + a = lookup_attribute ("isr", attr);
1865 + if (a == NULL_TREE)
1866 + a = lookup_attribute ("interrupt", attr);
1867 +
1868 + if (a == NULL_TREE)
1869 + type |= AVR32_FT_NORMAL;
1870 + else
1871 + type |= avr32_isr_value (TREE_VALUE (a));
1872 +
1873 +
1874 + a = lookup_attribute ("acall", attr);
1875 + if (a != NULL_TREE)
1876 + type |= AVR32_FT_ACALL;
1877 +
1878 + a = lookup_attribute ("naked", attr);
1879 + if (a != NULL_TREE)
1880 + type |= AVR32_FT_NAKED;
1881 +
1882 + return type;
1883 +}
1884 +
1885 +/* Returns the type of the current function. */
1886 +
1887 +static unsigned long
1888 +avr32_current_func_type (void)
1889 +{
1890 + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
1891 + cfun->machine->func_type = avr32_compute_func_type ();
1892 +
1893 + return cfun->machine->func_type;
1894 +}
1895 +
1896 +/*
1897 + This target hook should return true if we should not pass type solely
1898 + in registers. The file expr.h defines a definition that is usually appropriate,
1899 + refer to expr.h for additional documentation.
1900 +*/
1901 +bool
1902 +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1903 +{
1904 + if (type && AGGREGATE_TYPE_P (type)
1905 + /* If the alignment is less than the size then pass in the struct on
1906 + the stack. */
1907 + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
1908 + (unsigned int) int_size_in_bytes (type))
1909 + /* If we support unaligned word accesses then structs of size 4 and 8
1910 + can have any alignment and still be passed in registers. */
1911 + && !(TARGET_UNALIGNED_WORD
1912 + && (int_size_in_bytes (type) == 4
1913 + || int_size_in_bytes (type) == 8))
1914 + /* Double word structs need only a word alignment. */
1915 + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
1916 + return true;
1917 +
1918 + if (type && AGGREGATE_TYPE_P (type)
1919 + /* Structs of size 3,5,6,7 are always passed in registers. */
1920 + && (int_size_in_bytes (type) == 3
1921 + || int_size_in_bytes (type) == 5
1922 + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
1923 + return true;
1924 +
1925 +
1926 + return (type && TREE_ADDRESSABLE (type));
1927 +}
1928 +
1929 +
1930 +bool
1931 +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
1932 +{
1933 + return true;
1934 +}
1935 +
1936 +/*
1937 + This target hook should return true if an argument at the position indicated
1938 + by cum should be passed by reference. This predicate is queried after target
1939 + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
1940 +
1941 + If the hook returns true, a copy of that argument is made in memory and a
1942 + pointer to the argument is passed instead of the argument itself. The pointer
1943 + is passed in whatever way is appropriate for passing a pointer to that type.
1944 +*/
1945 +bool
1946 +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
1947 + enum machine_mode mode ATTRIBUTE_UNUSED,
1948 + tree type, bool named ATTRIBUTE_UNUSED)
1949 +{
1950 + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
1951 +}
1952 +
1953 +static int
1954 +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
1955 + enum machine_mode mode ATTRIBUTE_UNUSED,
1956 + tree type ATTRIBUTE_UNUSED,
1957 + bool named ATTRIBUTE_UNUSED)
1958 +{
1959 + return 0;
1960 +}
1961 +
1962 +
1963 +struct gcc_target targetm = TARGET_INITIALIZER;
1964 +
1965 +/*
1966 + Table used to convert from register number in the assembler instructions and
1967 + the register numbers used in gcc.
1968 +*/
1969 +const int avr32_function_arg_reglist[] =
1970 +{
1971 + INTERNAL_REGNUM (12),
1972 + INTERNAL_REGNUM (11),
1973 + INTERNAL_REGNUM (10),
1974 + INTERNAL_REGNUM (9),
1975 + INTERNAL_REGNUM (8)
1976 +};
1977 +
1978 +rtx avr32_compare_op0 = NULL_RTX;
1979 +rtx avr32_compare_op1 = NULL_RTX;
1980 +rtx avr32_compare_operator = NULL_RTX;
1981 +rtx avr32_acc_cache = NULL_RTX;
1982 +
1983 +/*
1984 + Returns nonzero if it is allowed to store a value of mode mode in hard
1985 + register number regno.
1986 +*/
1987 +int
1988 +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
1989 +{
1990 + /* We allow only float modes in the fp-registers */
1991 + if (regnr >= FIRST_FP_REGNUM
1992 + && regnr <= LAST_FP_REGNUM && GET_MODE_CLASS (mode) != MODE_FLOAT)
1993 + {
1994 + return 0;
1995 + }
1996 +
1997 + switch (mode)
1998 + {
1999 + case DImode: /* long long */
2000 + case DFmode: /* double */
2001 + case SCmode: /* __complex__ float */
2002 + case CSImode: /* __complex__ int */
2003 + if (regnr < 4)
2004 + { /* long long int not supported in r12, sp, lr
2005 + or pc. */
2006 + return 0;
2007 + }
2008 + else
2009 + {
2010 + if (regnr % 2) /* long long int has to be refered in even
2011 + registers. */
2012 + return 0;
2013 + else
2014 + return 1;
2015 + }
2016 + case CDImode: /* __complex__ long long */
2017 + case DCmode: /* __complex__ double */
2018 + case TImode: /* 16 bytes */
2019 + if (regnr < 7)
2020 + return 0;
2021 + else if (regnr % 2)
2022 + return 0;
2023 + else
2024 + return 1;
2025 + default:
2026 + return 1;
2027 + }
2028 +}
2029 +
2030 +
2031 +int
2032 +avr32_rnd_operands (rtx add, rtx shift)
2033 +{
2034 + if (GET_CODE (shift) == CONST_INT &&
2035 + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
2036 + {
2037 + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
2038 + return TRUE;
2039 + }
2040 +
2041 + return FALSE;
2042 +}
2043 +
2044 +
2045 +
2046 +int
2047 +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
2048 +{
2049 + switch (c)
2050 + {
2051 + case 'K':
2052 + case 'I':
2053 + {
2054 + HOST_WIDE_INT min_value = 0, max_value = 0;
2055 + char size_str[3];
2056 + int const_size;
2057 +
2058 + size_str[0] = str[2];
2059 + size_str[1] = str[3];
2060 + size_str[2] = '\0';
2061 + const_size = atoi (size_str);
2062 +
2063 + if (toupper (str[1]) == 'U')
2064 + {
2065 + min_value = 0;
2066 + max_value = (1 << const_size) - 1;
2067 + }
2068 + else if (toupper (str[1]) == 'S')
2069 + {
2070 + min_value = -(1 << (const_size - 1));
2071 + max_value = (1 << (const_size - 1)) - 1;
2072 + }
2073 +
2074 + if (c == 'I')
2075 + {
2076 + value = -value;
2077 + }
2078 +
2079 + if (value >= min_value && value <= max_value)
2080 + {
2081 + return 1;
2082 + }
2083 + break;
2084 + }
2085 + case 'M':
2086 + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
2087 + }
2088 +
2089 + return 0;
2090 +}
2091 +
2092 +
2093 +/*Compute mask of which floating-point registers needs saving upon
2094 + entry to this function*/
2095 +static unsigned long
2096 +avr32_compute_save_fp_reg_mask (void)
2097 +{
2098 + unsigned long func_type = avr32_current_func_type ();
2099 + unsigned int save_reg_mask = 0;
2100 + unsigned int reg;
2101 + unsigned int max_reg = 7;
2102 + int save_all_call_used_regs = FALSE;
2103 +
2104 + /* This only applies for hardware floating-point implementation. */
2105 + if (!TARGET_HARD_FLOAT)
2106 + return 0;
2107 +
2108 + if (IS_INTERRUPT (func_type))
2109 + {
2110 +
2111 + /* Interrupt functions must not corrupt any registers, even call
2112 + clobbered ones. If this is a leaf function we can just examine the
2113 + registers used by the RTL, but otherwise we have to assume that
2114 + whatever function is called might clobber anything, and so we have
2115 + to save all the call-clobbered registers as well. */
2116 + max_reg = 13;
2117 + save_all_call_used_regs = !current_function_is_leaf;
2118 + }
2119 +
2120 + /* All used registers used must be saved */
2121 + for (reg = 0; reg <= max_reg; reg++)
2122 + if (regs_ever_live[INTERNAL_FP_REGNUM (reg)]
2123 + || (save_all_call_used_regs
2124 + && call_used_regs[INTERNAL_FP_REGNUM (reg)]))
2125 + save_reg_mask |= (1 << reg);
2126 +
2127 + return save_reg_mask;
2128 +}
2129 +
2130 +/*Compute mask of registers which needs saving upon function entry */
2131 +static unsigned long
2132 +avr32_compute_save_reg_mask (int push)
2133 +{
2134 + unsigned long func_type;
2135 + unsigned int save_reg_mask = 0;
2136 + unsigned int reg;
2137 +
2138 + func_type = avr32_current_func_type ();
2139 +
2140 + if (IS_INTERRUPT (func_type))
2141 + {
2142 + unsigned int max_reg = 12;
2143 +
2144 +
2145 + /* Get the banking scheme for the interrupt */
2146 + switch (func_type)
2147 + {
2148 + case AVR32_FT_ISR_FULL:
2149 + max_reg = 0;
2150 + break;
2151 + case AVR32_FT_ISR_HALF:
2152 + max_reg = 7;
2153 + break;
2154 + case AVR32_FT_ISR_NONE:
2155 + max_reg = 12;
2156 + break;
2157 + }
2158 +
2159 + /* Interrupt functions must not corrupt any registers, even call
2160 + clobbered ones. If this is a leaf function we can just examine the
2161 + registers used by the RTL, but otherwise we have to assume that
2162 + whatever function is called might clobber anything, and so we have
2163 + to save all the call-clobbered registers as well. */
2164 +
2165 + /* Need not push the registers r8-r12 for AVR32A architectures, as this
2166 + is automatially done in hardware. We also do not have any shadow
2167 + registers. */
2168 + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A)
2169 + {
2170 + max_reg = 7;
2171 + func_type = AVR32_FT_ISR_NONE;
2172 + }
2173 +
2174 + /* All registers which are used and is not shadowed must be saved */
2175 + for (reg = 0; reg <= max_reg; reg++)
2176 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2177 + || (!current_function_is_leaf
2178 + && call_used_regs[INTERNAL_REGNUM (reg)]))
2179 + save_reg_mask |= (1 << reg);
2180 +
2181 + /* Check LR */
2182 + if ((regs_ever_live[LR_REGNUM] || !current_function_is_leaf || frame_pointer_needed) && (func_type == AVR32_FT_ISR_NONE) /* Only
2183 + non-shadowed
2184 + register
2185 + models
2186 + */ )
2187 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2188 +
2189 + /* Make sure that the GOT register is pushed. */
2190 + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
2191 + && current_function_uses_pic_offset_table)
2192 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2193 +
2194 + }
2195 + else
2196 + {
2197 + int use_pushm = optimize_size;
2198 +
2199 + /* In the normal case we only need to save those registers which are
2200 + call saved and which are used by this function. */
2201 + for (reg = 0; reg <= 7; reg++)
2202 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2203 + && !call_used_regs[INTERNAL_REGNUM (reg)])
2204 + save_reg_mask |= (1 << reg);
2205 +
2206 + /* Make sure that the GOT register is pushed. */
2207 + if (current_function_uses_pic_offset_table)
2208 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2209 +
2210 +
2211 + /* If we optimize for size and do not have anonymous arguments: use
2212 + popm/pushm always */
2213 + if (use_pushm)
2214 + {
2215 + if ((save_reg_mask & (1 << 0))
2216 + || (save_reg_mask & (1 << 1))
2217 + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
2218 + save_reg_mask |= 0xf;
2219 +
2220 + if ((save_reg_mask & (1 << 4))
2221 + || (save_reg_mask & (1 << 5))
2222 + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
2223 + save_reg_mask |= 0xf0;
2224 +
2225 + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
2226 + save_reg_mask |= 0x300;
2227 + }
2228 +
2229 +
2230 + /* Check LR */
2231 + if ((regs_ever_live[LR_REGNUM] || !current_function_is_leaf ||
2232 + (optimize_size && save_reg_mask) || frame_pointer_needed))
2233 + {
2234 + if (push)
2235 + {
2236 + /* Push/Pop LR */
2237 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2238 + }
2239 + else
2240 + {
2241 + /* Pop PC */
2242 + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
2243 + }
2244 + }
2245 + }
2246 +
2247 + return save_reg_mask;
2248 +}
2249 +
2250 +/*Compute total size in bytes of all saved registers */
2251 +static int
2252 +avr32_get_reg_mask_size (int reg_mask)
2253 +{
2254 + int reg, size;
2255 + size = 0;
2256 +
2257 + for (reg = 0; reg <= 15; reg++)
2258 + if (reg_mask & (1 << reg))
2259 + size += 4;
2260 +
2261 + return size;
2262 +}
2263 +
2264 +/*Get a register from one of the registers which are saved onto the stack
2265 + upon function entry */
2266 +
2267 +static int
2268 +avr32_get_saved_reg (int save_reg_mask)
2269 +{
2270 + unsigned int reg;
2271 +
2272 + /* Find the first register which is saved in the saved_reg_mask */
2273 + for (reg = 0; reg <= 15; reg++)
2274 + if (save_reg_mask & (1 << reg))
2275 + return reg;
2276 +
2277 + return -1;
2278 +}
2279 +
2280 +/* Return 1 if it is possible to return using a single instruction. */
2281 +int
2282 +avr32_use_return_insn (int iscond)
2283 +{
2284 + unsigned int func_type = avr32_current_func_type ();
2285 + unsigned long saved_int_regs;
2286 + unsigned long saved_fp_regs;
2287 +
2288 + /* Never use a return instruction before reload has run. */
2289 + if (!reload_completed)
2290 + return 0;
2291 +
2292 + /* Must adjust the stack for vararg functions. */
2293 + if (current_function_args_info.uses_anonymous_args)
2294 + return 0;
2295 +
2296 + /* If there a stack adjstment. */
2297 + if (get_frame_size ())
2298 + return 0;
2299 +
2300 + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
2301 + saved_fp_regs = avr32_compute_save_fp_reg_mask ();
2302 +
2303 + /* Functions which have saved fp-regs on the stack can not be performed in
2304 + one instruction */
2305 + if (saved_fp_regs)
2306 + return 0;
2307 +
2308 + /* Conditional returns can not be performed in one instruction if we need
2309 + to restore registers from the stack */
2310 + if (iscond && saved_int_regs)
2311 + return 0;
2312 +
2313 + /* Conditional return can not be used for interrupt handlers. */
2314 + if (iscond && IS_INTERRUPT (func_type))
2315 + return 0;
2316 +
2317 + /* For interrupt handlers which needs to pop registers */
2318 + if (saved_int_regs && IS_INTERRUPT (func_type))
2319 + return 0;
2320 +
2321 +
2322 + /* If there are saved registers but the LR isn't saved, then we need two
2323 + instructions for the return. */
2324 + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2325 + return 0;
2326 +
2327 +
2328 + return 1;
2329 +}
2330 +
2331 +
2332 +/*Generate some function prologue info in the assembly file*/
2333 +
2334 +void
2335 +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
2336 +{
2337 + if (IS_NAKED (avr32_current_func_type ()))
2338 + fprintf (f,
2339 + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
2340 +
2341 + if (IS_INTERRUPT (avr32_current_func_type ()))
2342 + {
2343 + switch (avr32_current_func_type ())
2344 + {
2345 + case AVR32_FT_ISR_FULL:
2346 + fprintf (f,
2347 + "\t# Interrupt Function: Fully shadowed register file\n");
2348 + break;
2349 + case AVR32_FT_ISR_HALF:
2350 + fprintf (f,
2351 + "\t# Interrupt Function: Half shadowed register file\n");
2352 + break;
2353 + default:
2354 + case AVR32_FT_ISR_NONE:
2355 + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
2356 + break;
2357 + }
2358 + }
2359 +
2360 +
2361 + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
2362 + current_function_args_size, frame_size,
2363 + current_function_pretend_args_size);
2364 +
2365 + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
2366 + frame_pointer_needed, current_function_is_leaf);
2367 +
2368 + fprintf (f, "\t# uses_anonymous_args = %i\n",
2369 + current_function_args_info.uses_anonymous_args);
2370 +}
2371 +
2372 +
2373 +/* Generate and emit an insn that we will recognize as a pushm or stm.
2374 + Unfortunately, since this insn does not reflect very well the actual
2375 + semantics of the operation, we need to annotate the insn for the benefit
2376 + of DWARF2 frame unwind information. */
2377 +
2378 +int avr32_convert_to_reglist16 (int reglist8_vect);
2379 +
2380 +static rtx
2381 +emit_multi_reg_push (int reglist, int usePUSHM)
2382 +{
2383 + rtx insn;
2384 + rtx dwarf;
2385 + rtx tmp;
2386 + rtx reg;
2387 + int i;
2388 + int nr_regs;
2389 + int index = 0;
2390 +
2391 + if (usePUSHM)
2392 + {
2393 + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
2394 + reglist = avr32_convert_to_reglist16 (reglist);
2395 + }
2396 + else
2397 + {
2398 + insn = emit_insn (gen_stm (stack_pointer_rtx,
2399 + gen_rtx_CONST_INT (SImode, reglist),
2400 + gen_rtx_CONST_INT (SImode, 1)));
2401 + }
2402 +
2403 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2404 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2405 +
2406 + for (i = 15; i >= 0; i--)
2407 + {
2408 + if (reglist & (1 << i))
2409 + {
2410 + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
2411 + tmp = gen_rtx_SET (VOIDmode,
2412 + gen_rtx_MEM (SImode,
2413 + plus_constant (stack_pointer_rtx,
2414 + 4 * index)), reg);
2415 + RTX_FRAME_RELATED_P (tmp) = 1;
2416 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2417 + }
2418 + }
2419 +
2420 + tmp = gen_rtx_SET (SImode,
2421 + stack_pointer_rtx,
2422 + gen_rtx_PLUS (SImode,
2423 + stack_pointer_rtx,
2424 + GEN_INT (-4 * nr_regs)));
2425 + RTX_FRAME_RELATED_P (tmp) = 1;
2426 + XVECEXP (dwarf, 0, 0) = tmp;
2427 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2428 + REG_NOTES (insn));
2429 + return insn;
2430 +}
2431 +
2432 +
2433 +static rtx
2434 +emit_multi_fp_reg_push (int reglist)
2435 +{
2436 + rtx insn;
2437 + rtx dwarf;
2438 + rtx tmp;
2439 + rtx reg;
2440 + int i;
2441 + int nr_regs;
2442 + int index = 0;
2443 +
2444 + insn = emit_insn (gen_stm_fp (stack_pointer_rtx,
2445 + gen_rtx_CONST_INT (SImode, reglist),
2446 + gen_rtx_CONST_INT (SImode, 1)));
2447 +
2448 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2449 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2450 +
2451 + for (i = 15; i >= 0; i--)
2452 + {
2453 + if (reglist & (1 << i))
2454 + {
2455 + reg = gen_rtx_REG (SImode, INTERNAL_FP_REGNUM (i));
2456 + tmp = gen_rtx_SET (VOIDmode,
2457 + gen_rtx_MEM (SImode,
2458 + plus_constant (stack_pointer_rtx,
2459 + 4 * index)), reg);
2460 + RTX_FRAME_RELATED_P (tmp) = 1;
2461 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2462 + }
2463 + }
2464 +
2465 + tmp = gen_rtx_SET (SImode,
2466 + stack_pointer_rtx,
2467 + gen_rtx_PLUS (SImode,
2468 + stack_pointer_rtx,
2469 + GEN_INT (-4 * nr_regs)));
2470 + RTX_FRAME_RELATED_P (tmp) = 1;
2471 + XVECEXP (dwarf, 0, 0) = tmp;
2472 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2473 + REG_NOTES (insn));
2474 + return insn;
2475 +}
2476 +
2477 +rtx
2478 +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
2479 + int write_back, int in_struct_p, int scalar_p)
2480 +{
2481 +
2482 + rtx result;
2483 + int i = 0, j;
2484 +
2485 + result =
2486 + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
2487 +
2488 + if (write_back)
2489 + {
2490 + XVECEXP (result, 0, 0)
2491 + = gen_rtx_SET (GET_MODE (from), from,
2492 + plus_constant (from, count * 4));
2493 + i = 1;
2494 + count++;
2495 + }
2496 +
2497 +
2498 + for (j = 0; i < count; i++, j++)
2499 + {
2500 + rtx unspec;
2501 + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
2502 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2503 + MEM_SCALAR_P (mem) = scalar_p;
2504 + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
2505 + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
2506 + }
2507 +
2508 + return result;
2509 +}
2510 +
2511 +
2512 +rtx
2513 +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
2514 + int in_struct_p, int scalar_p)
2515 +{
2516 + rtx result;
2517 + int i = 0, j;
2518 +
2519 + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2520 +
2521 + for (j = 0; i < count; i++, j++)
2522 + {
2523 + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
2524 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2525 + MEM_SCALAR_P (mem) = scalar_p;
2526 + XVECEXP (result, 0, i)
2527 + = gen_rtx_SET (VOIDmode, mem,
2528 + gen_rtx_UNSPEC (VOIDmode,
2529 + gen_rtvec (1, regs[j]),
2530 + UNSPEC_STORE_MULTIPLE));
2531 + }
2532 +
2533 + return result;
2534 +}
2535 +
2536 +
2537 +/* Move a block of memory if it is word aligned or we support unaligned
2538 + word memory accesses. The size must be maximum 64 bytes. */
2539 +
2540 +int
2541 +avr32_gen_movmemsi (rtx * operands)
2542 +{
2543 + HOST_WIDE_INT bytes_to_go;
2544 + rtx src, dst;
2545 + rtx st_src, st_dst;
2546 + int ptr_offset = 0;
2547 + int block_size;
2548 + int dst_in_struct_p, src_in_struct_p;
2549 + int dst_scalar_p, src_scalar_p;
2550 + int unaligned;
2551 +
2552 + if (GET_CODE (operands[2]) != CONST_INT
2553 + || GET_CODE (operands[3]) != CONST_INT
2554 + || INTVAL (operands[2]) > 64
2555 + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
2556 + return 0;
2557 +
2558 + unaligned = (INTVAL (operands[3]) & 3) != 0;
2559 +
2560 + block_size = 4;
2561 +
2562 + st_dst = XEXP (operands[0], 0);
2563 + st_src = XEXP (operands[1], 0);
2564 +
2565 + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2566 + dst_scalar_p = MEM_SCALAR_P (operands[0]);
2567 + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2568 + src_scalar_p = MEM_SCALAR_P (operands[1]);
2569 +
2570 + dst = copy_to_mode_reg (SImode, st_dst);
2571 + src = copy_to_mode_reg (SImode, st_src);
2572 +
2573 + bytes_to_go = INTVAL (operands[2]);
2574 +
2575 + while (bytes_to_go)
2576 + {
2577 + enum machine_mode move_mode;
2578 + /* Seems to be a problem with reloads for the movti pattern so this is
2579 + disabled until that problem is resolved */
2580 +
2581 + /* if ( bytes_to_go >= GET_MODE_SIZE(TImode) ) move_mode = TImode; else
2582 + */
2583 + if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
2584 + move_mode = DImode;
2585 + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
2586 + move_mode = SImode;
2587 + else
2588 + move_mode = QImode;
2589 +
2590 + {
2591 + rtx dst_mem = gen_rtx_MEM (move_mode,
2592 + gen_rtx_PLUS (SImode, dst,
2593 + GEN_INT (ptr_offset)));
2594 + rtx src_mem = gen_rtx_MEM (move_mode,
2595 + gen_rtx_PLUS (SImode, src,
2596 + GEN_INT (ptr_offset)));
2597 + ptr_offset += GET_MODE_SIZE (move_mode);
2598 + bytes_to_go -= GET_MODE_SIZE (move_mode);
2599 +
2600 + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
2601 + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
2602 +
2603 + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
2604 + MEM_SCALAR_P (src_mem) = src_scalar_p;
2605 + emit_move_insn (dst_mem, src_mem);
2606 +
2607 + }
2608 + }
2609 +
2610 + return 1;
2611 +}
2612 +
2613 +
2614 +
2615 +/*Expand the prologue instruction*/
2616 +void
2617 +avr32_expand_prologue (void)
2618 +{
2619 + rtx insn, dwarf;
2620 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2621 + int reglist8 = 0;
2622 +
2623 + /* Naked functions does not have a prologue */
2624 + if (IS_NAKED (avr32_current_func_type ()))
2625 + return;
2626 +
2627 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
2628 +
2629 + if (saved_reg_mask)
2630 + {
2631 + /* Must push used registers */
2632 +
2633 + /* Should we use POPM or LDM? */
2634 + int usePUSHM = TRUE;
2635 + reglist8 = 0;
2636 + if (((saved_reg_mask & (1 << 0)) ||
2637 + (saved_reg_mask & (1 << 1)) ||
2638 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2639 + {
2640 + /* One of R0-R3 should at least be pushed */
2641 + if (((saved_reg_mask & (1 << 0)) &&
2642 + (saved_reg_mask & (1 << 1)) &&
2643 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2644 + {
2645 + /* All should be pushed */
2646 + reglist8 |= 0x01;
2647 + }
2648 + else
2649 + {
2650 + usePUSHM = FALSE;
2651 + }
2652 + }
2653 +
2654 + if (((saved_reg_mask & (1 << 4)) ||
2655 + (saved_reg_mask & (1 << 5)) ||
2656 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2657 + {
2658 + /* One of R4-R7 should at least be pushed */
2659 + if (((saved_reg_mask & (1 << 4)) &&
2660 + (saved_reg_mask & (1 << 5)) &&
2661 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2662 + {
2663 + if (usePUSHM)
2664 + /* All should be pushed */
2665 + reglist8 |= 0x02;
2666 + }
2667 + else
2668 + {
2669 + usePUSHM = FALSE;
2670 + }
2671 + }
2672 +
2673 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2674 + {
2675 + /* One of R8-R9 should at least be pushed */
2676 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2677 + {
2678 + if (usePUSHM)
2679 + /* All should be pushed */
2680 + reglist8 |= 0x04;
2681 + }
2682 + else
2683 + {
2684 + usePUSHM = FALSE;
2685 + }
2686 + }
2687 +
2688 + if (saved_reg_mask & (1 << 10))
2689 + reglist8 |= 0x08;
2690 +
2691 + if (saved_reg_mask & (1 << 11))
2692 + reglist8 |= 0x10;
2693 +
2694 + if (saved_reg_mask & (1 << 12))
2695 + reglist8 |= 0x20;
2696 +
2697 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2698 + {
2699 + /* Push LR */
2700 + reglist8 |= 0x40;
2701 + }
2702 +
2703 + if (usePUSHM)
2704 + {
2705 + insn = emit_multi_reg_push (reglist8, TRUE);
2706 + }
2707 + else
2708 + {
2709 + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
2710 + }
2711 + RTX_FRAME_RELATED_P (insn) = 1;
2712 +
2713 + /* Prevent this instruction from being scheduled after any other
2714 + instructions. */
2715 + emit_insn (gen_blockage ());
2716 + }
2717 +
2718 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2719 + if (saved_fp_reg_mask)
2720 + {
2721 + insn = emit_multi_fp_reg_push (saved_fp_reg_mask);
2722 + RTX_FRAME_RELATED_P (insn) = 1;
2723 +
2724 + /* Prevent this instruction from being scheduled after any other
2725 + instructions. */
2726 + emit_insn (gen_blockage ());
2727 + }
2728 +
2729 + /* Set frame pointer */
2730 + if (frame_pointer_needed)
2731 + {
2732 + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2733 + RTX_FRAME_RELATED_P (insn) = 1;
2734 + }
2735 +
2736 + if (get_frame_size () > 0)
2737 + {
2738 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
2739 + {
2740 + insn = emit_insn (gen_rtx_SET (SImode,
2741 + stack_pointer_rtx,
2742 + gen_rtx_PLUS (SImode,
2743 + stack_pointer_rtx,
2744 + gen_rtx_CONST_INT
2745 + (SImode,
2746 + -get_frame_size
2747 + ()))));
2748 + RTX_FRAME_RELATED_P (insn) = 1;
2749 + }
2750 + else
2751 + {
2752 + /* Immediate is larger than k21 We must either check if we can use
2753 + one of the pushed reegisters as temporary storage or we must
2754 + make us a temp register by pushing a register to the stack. */
2755 + rtx temp_reg, const_pool_entry, insn;
2756 + if (saved_reg_mask)
2757 + {
2758 + temp_reg =
2759 + gen_rtx_REG (SImode,
2760 + INTERNAL_REGNUM (avr32_get_saved_reg
2761 + (saved_reg_mask)));
2762 + }
2763 + else
2764 + {
2765 + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
2766 + emit_move_insn (gen_rtx_MEM
2767 + (SImode,
2768 + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
2769 + temp_reg);
2770 + }
2771 +
2772 + const_pool_entry =
2773 + force_const_mem (SImode,
2774 + gen_rtx_CONST_INT (SImode, get_frame_size ()));
2775 + emit_move_insn (temp_reg, const_pool_entry);
2776 +
2777 + insn = emit_insn (gen_rtx_SET (SImode,
2778 + stack_pointer_rtx,
2779 + gen_rtx_MINUS (SImode,
2780 + stack_pointer_rtx,
2781 + temp_reg)));
2782 +
2783 + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2784 + gen_rtx_PLUS (SImode, stack_pointer_rtx,
2785 + GEN_INT (-get_frame_size ())));
2786 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2787 + dwarf, REG_NOTES (insn));
2788 + RTX_FRAME_RELATED_P (insn) = 1;
2789 +
2790 + if (!saved_reg_mask)
2791 + {
2792 + insn =
2793 + emit_move_insn (temp_reg,
2794 + gen_rtx_MEM (SImode,
2795 + gen_rtx_POST_INC (SImode,
2796 + gen_rtx_REG
2797 + (SImode,
2798 + 13))));
2799 + }
2800 +
2801 + /* Mark the temp register as dead */
2802 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
2803 + REG_NOTES (insn));
2804 +
2805 +
2806 + }
2807 +
2808 + /* Prevent the the stack adjustment to be scheduled after any
2809 + instructions using the frame pointer. */
2810 + emit_insn (gen_blockage ());
2811 + }
2812 +
2813 + /* Load GOT */
2814 + if (flag_pic)
2815 + {
2816 + avr32_load_pic_register ();
2817 +
2818 + /* gcc does not know that load or call instructions might use the pic
2819 + register so it might schedule these instructions before the loading
2820 + of the pic register. To avoid this emit a barrier for now. TODO!
2821 + Find out a better way to let gcc know which instructions might use
2822 + the pic register. */
2823 + emit_insn (gen_blockage ());
2824 + }
2825 + return;
2826 +}
2827 +
2828 +void
2829 +avr32_set_return_address (rtx source)
2830 +{
2831 + rtx addr;
2832 + unsigned long saved_regs;
2833 +
2834 + saved_regs = avr32_compute_save_reg_mask (TRUE);
2835 +
2836 + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2837 + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
2838 + else
2839 + {
2840 + if (frame_pointer_needed)
2841 + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
2842 + else
2843 + /* FIXME: Need to use scratch register if frame is large */
2844 + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
2845 +
2846 + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
2847 + }
2848 +}
2849 +
2850 +
2851 +
2852 +/* Return the length of INSN. LENGTH is the initial length computed by
2853 + attributes in the machine-description file. */
2854 +
2855 +int
2856 +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
2857 + int length ATTRIBUTE_UNUSED)
2858 +{
2859 + return length;
2860 +}
2861 +
2862 +void
2863 +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
2864 + int iscond ATTRIBUTE_UNUSED,
2865 + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
2866 +{
2867 +
2868 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2869 + int insert_ret = TRUE;
2870 + int reglist8 = 0;
2871 + int stack_adjustment = get_frame_size ();
2872 + unsigned int func_type = avr32_current_func_type ();
2873 + FILE *f = asm_out_file;
2874 +
2875 + /* Naked functions does not have an epilogue */
2876 + if (IS_NAKED (func_type))
2877 + return;
2878 +
2879 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2880 +
2881 + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
2882 +
2883 + /* Reset frame pointer */
2884 + if (stack_adjustment > 0)
2885 + {
2886 + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
2887 + {
2888 + fprintf (f, "\tsub sp, %i # Reset Frame Pointer\n",
2889 + -stack_adjustment);
2890 + }
2891 + else
2892 + {
2893 + /* TODO! Is it safe to use r8 as scratch?? */
2894 + fprintf (f, "\tmov r8, lo(%i) # Reset Frame Pointer\n",
2895 + -stack_adjustment);
2896 + fprintf (f, "\torh r8, hi(%i) # Reset Frame Pointer\n",
2897 + -stack_adjustment);
2898 + fprintf (f, "\tadd sp,r8 # Reset Frame Pointer\n");
2899 + }
2900 + }
2901 +
2902 + if (saved_fp_reg_mask)
2903 + {
2904 + char reglist[64]; /* 64 bytes should be enough... */
2905 + avr32_make_fp_reglist_w (saved_fp_reg_mask, (char *) reglist);
2906 + fprintf (f, "\tldcm.w\tcp0, sp++, %s\n", reglist);
2907 + if (saved_fp_reg_mask & ~0xff)
2908 + {
2909 + saved_fp_reg_mask &= ~0xff;
2910 + avr32_make_fp_reglist_d (saved_fp_reg_mask, (char *) reglist);
2911 + fprintf (f, "\tldcm.d\tcp0, sp++, %s\n", reglist);
2912 + }
2913 + }
2914 +
2915 + if (saved_reg_mask)
2916 + {
2917 + /* Must pop used registers */
2918 +
2919 + /* Should we use POPM or LDM? */
2920 + int usePOPM = TRUE;
2921 + if (((saved_reg_mask & (1 << 0)) ||
2922 + (saved_reg_mask & (1 << 1)) ||
2923 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2924 + {
2925 + /* One of R0-R3 should at least be popped */
2926 + if (((saved_reg_mask & (1 << 0)) &&
2927 + (saved_reg_mask & (1 << 1)) &&
2928 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2929 + {
2930 + /* All should be popped */
2931 + reglist8 |= 0x01;
2932 + }
2933 + else
2934 + {
2935 + usePOPM = FALSE;
2936 + }
2937 + }
2938 +
2939 + if (((saved_reg_mask & (1 << 4)) ||
2940 + (saved_reg_mask & (1 << 5)) ||
2941 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2942 + {
2943 + /* One of R0-R3 should at least be popped */
2944 + if (((saved_reg_mask & (1 << 4)) &&
2945 + (saved_reg_mask & (1 << 5)) &&
2946 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2947 + {
2948 + if (usePOPM)
2949 + /* All should be popped */
2950 + reglist8 |= 0x02;
2951 + }
2952 + else
2953 + {
2954 + usePOPM = FALSE;
2955 + }
2956 + }
2957 +
2958 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2959 + {
2960 + /* One of R8-R9 should at least be pushed */
2961 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2962 + {
2963 + if (usePOPM)
2964 + /* All should be pushed */
2965 + reglist8 |= 0x04;
2966 + }
2967 + else
2968 + {
2969 + usePOPM = FALSE;
2970 + }
2971 + }
2972 +
2973 + if (saved_reg_mask & (1 << 10))
2974 + reglist8 |= 0x08;
2975 +
2976 + if (saved_reg_mask & (1 << 11))
2977 + reglist8 |= 0x10;
2978 +
2979 + if (saved_reg_mask & (1 << 12))
2980 + reglist8 |= 0x20;
2981 +
2982 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2983 + /* Pop LR */
2984 + reglist8 |= 0x40;
2985 +
2986 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
2987 + /* Pop LR into PC. */
2988 + reglist8 |= 0x80;
2989 +
2990 + if (usePOPM)
2991 + {
2992 + char reglist[64]; /* 64 bytes should be enough... */
2993 + avr32_make_reglist8 (reglist8, (char *) reglist);
2994 +
2995 + if (reglist8 & 0x80)
2996 + /* This instruction is also a return */
2997 + insert_ret = FALSE;
2998 +
2999 + if (r12_imm && !insert_ret)
3000 + fprintf (f, "\tpopm %s, r12=%li\n", reglist, INTVAL (r12_imm));
3001 + else
3002 + fprintf (f, "\tpopm %s\n", reglist);
3003 +
3004 + }
3005 + else
3006 + {
3007 + char reglist[64]; /* 64 bytes should be enough... */
3008 + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
3009 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3010 + /* This instruction is also a return */
3011 + insert_ret = FALSE;
3012 +
3013 + if (r12_imm && !insert_ret)
3014 + fprintf (f, "\tldm sp++, %s, r12=%li\n", reglist,
3015 + INTVAL (r12_imm));
3016 + else
3017 + fprintf (f, "\tldm sp++, %s\n", reglist);
3018 +
3019 + }
3020 +
3021 + }
3022 +
3023 + if (IS_INTERRUPT (func_type))
3024 + {
3025 + fprintf (f, "\trete\n");
3026 + }
3027 + else if (insert_ret)
3028 + {
3029 + if (r12_imm)
3030 + fprintf (f, "\tretal %li\n", INTVAL (r12_imm));
3031 + else
3032 + fprintf (f, "\tretal r12\n");
3033 + }
3034 +}
3035 +
3036 +/* Function for converting a fp-register mask to a
3037 + reglistCPD8 register list string. */
3038 +void
3039 +avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string)
3040 +{
3041 + int i;
3042 +
3043 + /* Make sure reglist_string is empty */
3044 + reglist_string[0] = '\0';
3045 +
3046 + for (i = 0; i < NUM_FP_REGS; i += 2)
3047 + {
3048 + if (reglist_mask & (1 << i))
3049 + {
3050 + strlen (reglist_string) ?
3051 + sprintf (reglist_string, "%s, %s-%s", reglist_string,
3052 + reg_names[INTERNAL_FP_REGNUM (i)],
3053 + reg_names[INTERNAL_FP_REGNUM (i + 1)]) :
3054 + sprintf (reglist_string, "%s-%s",
3055 + reg_names[INTERNAL_FP_REGNUM (i)],
3056 + reg_names[INTERNAL_FP_REGNUM (i + 1)]);
3057 + }
3058 + }
3059 +}
3060 +
3061 +/* Function for converting a fp-register mask to a
3062 + reglistCP8 register list string. */
3063 +void
3064 +avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string)
3065 +{
3066 + int i;
3067 +
3068 + /* Make sure reglist_string is empty */
3069 + reglist_string[0] = '\0';
3070 +
3071 + for (i = 0; i < NUM_FP_REGS; ++i)
3072 + {
3073 + if (reglist_mask & (1 << i))
3074 + {
3075 + strlen (reglist_string) ?
3076 + sprintf (reglist_string, "%s, %s", reglist_string,
3077 + reg_names[INTERNAL_FP_REGNUM (i)]) :
3078 + sprintf (reglist_string, "%s", reg_names[INTERNAL_FP_REGNUM (i)]);
3079 + }
3080 + }
3081 +}
3082 +
3083 +void
3084 +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
3085 +{
3086 + int i;
3087 +
3088 + /* Make sure reglist16_string is empty */
3089 + reglist16_string[0] = '\0';
3090 +
3091 + for (i = 0; i < 16; ++i)
3092 + {
3093 + if (reglist16_vect & (1 << i))
3094 + {
3095 + strlen (reglist16_string) ?
3096 + sprintf (reglist16_string, "%s, %s", reglist16_string,
3097 + reg_names[INTERNAL_REGNUM (i)]) :
3098 + sprintf (reglist16_string, "%s", reg_names[INTERNAL_REGNUM (i)]);
3099 + }
3100 + }
3101 +}
3102 +
3103 +int
3104 +avr32_convert_to_reglist16 (int reglist8_vect)
3105 +{
3106 + int reglist16_vect = 0;
3107 + if (reglist8_vect & 0x1)
3108 + reglist16_vect |= 0xF;
3109 + if (reglist8_vect & 0x2)
3110 + reglist16_vect |= 0xF0;
3111 + if (reglist8_vect & 0x4)
3112 + reglist16_vect |= 0x300;
3113 + if (reglist8_vect & 0x8)
3114 + reglist16_vect |= 0x400;
3115 + if (reglist8_vect & 0x10)
3116 + reglist16_vect |= 0x800;
3117 + if (reglist8_vect & 0x20)
3118 + reglist16_vect |= 0x1000;
3119 + if (reglist8_vect & 0x40)
3120 + reglist16_vect |= 0x4000;
3121 + if (reglist8_vect & 0x80)
3122 + reglist16_vect |= 0x8000;
3123 +
3124 + return reglist16_vect;
3125 +}
3126 +
3127 +void
3128 +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
3129 +{
3130 + /* Make sure reglist8_string is empty */
3131 + reglist8_string[0] = '\0';
3132 +
3133 + if (reglist8_vect & 0x1)
3134 + sprintf (reglist8_string, "r0-r3");
3135 + if (reglist8_vect & 0x2)
3136 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r4-r7",
3137 + reglist8_string) :
3138 + sprintf (reglist8_string, "r4-r7");
3139 + if (reglist8_vect & 0x4)
3140 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r8-r9",
3141 + reglist8_string) :
3142 + sprintf (reglist8_string, "r8-r9");
3143 + if (reglist8_vect & 0x8)
3144 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r10",
3145 + reglist8_string) :
3146 + sprintf (reglist8_string, "r10");
3147 + if (reglist8_vect & 0x10)
3148 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r11",
3149 + reglist8_string) :
3150 + sprintf (reglist8_string, "r11");
3151 + if (reglist8_vect & 0x20)
3152 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r12",
3153 + reglist8_string) :
3154 + sprintf (reglist8_string, "r12");
3155 + if (reglist8_vect & 0x40)
3156 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, lr",
3157 + reglist8_string) :
3158 + sprintf (reglist8_string, "lr");
3159 + if (reglist8_vect & 0x80)
3160 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, pc",
3161 + reglist8_string) :
3162 + sprintf (reglist8_string, "pc");
3163 +}
3164 +
3165 +int
3166 +avr32_eh_return_data_regno (int n)
3167 +{
3168 + if (n >= 0 && n <= 3)
3169 + return 8 + n;
3170 + else
3171 + return INVALID_REGNUM;
3172 +}
3173 +
3174 +/* Compute the distance from register FROM to register TO.
3175 + These can be the arg pointer, the frame pointer or
3176 + the stack pointer.
3177 + Typical stack layout looks like this:
3178 +
3179 + old stack pointer -> | |
3180 + ----
3181 + | | \
3182 + | | saved arguments for
3183 + | | vararg functions
3184 + arg_pointer -> | | /
3185 + --
3186 + | | \
3187 + | | call saved
3188 + | | registers
3189 + | | /
3190 + frame ptr -> --
3191 + | | \
3192 + | | local
3193 + | | variables
3194 + stack ptr --> | | /
3195 + --
3196 + | | \
3197 + | | outgoing
3198 + | | arguments
3199 + | | /
3200 + --
3201 +
3202 + For a given funciton some or all of these stack compomnents
3203 + may not be needed, giving rise to the possibility of
3204 + eliminating some of the registers.
3205 +
3206 + The values returned by this function must reflect the behaviour
3207 + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
3208 +
3209 + The sign of the number returned reflects the direction of stack
3210 + growth, so the values are positive for all eliminations except
3211 + from the soft frame pointer to the hard frame pointer. */
3212 +
3213 +
3214 +int
3215 +avr32_initial_elimination_offset (int from, int to)
3216 +{
3217 + int i;
3218 + int call_saved_regs = 0;
3219 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3220 + unsigned int local_vars = get_frame_size ();
3221 +
3222 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
3223 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3224 +
3225 + for (i = 0; i < 16; ++i)
3226 + {
3227 + if (saved_reg_mask & (1 << i))
3228 + call_saved_regs += 4;
3229 + }
3230 +
3231 + for (i = 0; i < NUM_FP_REGS; ++i)
3232 + {
3233 + if (saved_fp_reg_mask & (1 << i))
3234 + call_saved_regs += 4;
3235 + }
3236 +
3237 + switch (from)
3238 + {
3239 + case ARG_POINTER_REGNUM:
3240 + switch (to)
3241 + {
3242 + case STACK_POINTER_REGNUM:
3243 + return call_saved_regs + local_vars;
3244 + case FRAME_POINTER_REGNUM:
3245 + return call_saved_regs;
3246 + default:
3247 + abort ();
3248 + }
3249 + case FRAME_POINTER_REGNUM:
3250 + switch (to)
3251 + {
3252 + case STACK_POINTER_REGNUM:
3253 + return local_vars;
3254 + default:
3255 + abort ();
3256 + }
3257 + default:
3258 + abort ();
3259 + }
3260 +}
3261 +
3262 +
3263 +/*
3264 + Returns a rtx used when passing the next argument to a function.
3265 + avr32_init_cumulative_args() and avr32_function_arg_advance() sets witch
3266 + register to use.
3267 +*/
3268 +rtx
3269 +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3270 + tree type, int named)
3271 +{
3272 + int index = -1;
3273 +
3274 + HOST_WIDE_INT arg_size, arg_rsize;
3275 + if (type)
3276 + {
3277 + arg_size = int_size_in_bytes (type);
3278 + }
3279 + else
3280 + {
3281 + arg_size = GET_MODE_SIZE (mode);
3282 + }
3283 + arg_rsize = PUSH_ROUNDING (arg_size);
3284 +
3285 + /*
3286 + The last time this macro is called, it is called with mode == VOIDmode,
3287 + and its result is passed to the call or call_value pattern as operands 2
3288 + and 3 respectively. */
3289 + if (mode == VOIDmode)
3290 + {
3291 + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
3292 + }
3293 +
3294 + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
3295 + {
3296 + return NULL_RTX;
3297 + }
3298 +
3299 + if (arg_rsize == 8)
3300 + {
3301 + /* use r11:r10 or r9:r8. */
3302 + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
3303 + index = 1;
3304 + else if (!(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
3305 + index = 3;
3306 + else
3307 + index = -1;
3308 + }
3309 + else if (arg_rsize == 4)
3310 + { /* Use first available register */
3311 + index = 0;
3312 + while (index <= LAST_CUM_REG_INDEX && GET_USED_INDEX (cum, index))
3313 + index++;
3314 + if (index > LAST_CUM_REG_INDEX)
3315 + index = -1;
3316 + }
3317 +
3318 + SET_REG_INDEX (cum, index);
3319 +
3320 + if (GET_REG_INDEX (cum) >= 0)
3321 + return gen_rtx_REG (mode,
3322 + avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
3323 +
3324 + return NULL_RTX;
3325 +}
3326 +
3327 +/*
3328 + Set the register used for passing the first argument to a function.
3329 +*/
3330 +void
3331 +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype,
3332 + rtx libname ATTRIBUTE_UNUSED,
3333 + tree fndecl ATTRIBUTE_UNUSED)
3334 +{
3335 + /* Set all registers as unused. */
3336 + SET_INDEXES_UNUSED (cum);
3337 +
3338 + /* Reset uses_anonymous_args */
3339 + cum->uses_anonymous_args = 0;
3340 +
3341 + /* Reset size of stack pushed arguments */
3342 + cum->stack_pushed_args_size = 0;
3343 +
3344 + /* If the function is returning a value passed in memory r12 is used as a
3345 + Return Value Pointer. */
3346 +
3347 + if (fntype != 0 && avr32_return_in_memory (TREE_TYPE (fntype), fntype))
3348 + {
3349 + SET_REG_INDEX (cum, 0);
3350 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3351 + }
3352 +}
3353 +
3354 +/*
3355 + Set register used for passing the next argument to a function. Only the
3356 + Scratch Registers are used.
3357 +
3358 + number name
3359 + 15 r15 PC
3360 + 14 r14 LR
3361 + 13 r13 _SP_________
3362 + FIRST_CUM_REG 12 r12 _||_
3363 + 10 r11 ||
3364 + 11 r10 _||_ Scratch Registers
3365 + 8 r9 ||
3366 + LAST_SCRATCH_REG 9 r8 _\/_________
3367 + 6 r7 /\
3368 + 7 r6 ||
3369 + 4 r5 ||
3370 + 5 r4 ||
3371 + 2 r3 ||
3372 + 3 r2 ||
3373 + 0 r1 ||
3374 + 1 r0 _||_________
3375 +
3376 +*/
3377 +void
3378 +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3379 + tree type, int named ATTRIBUTE_UNUSED)
3380 +{
3381 + HOST_WIDE_INT arg_size, arg_rsize;
3382 +
3383 + if (type)
3384 + {
3385 + arg_size = int_size_in_bytes (type);
3386 + }
3387 + else
3388 + {
3389 + arg_size = GET_MODE_SIZE (mode);
3390 + }
3391 + arg_rsize = PUSH_ROUNDING (arg_size);
3392 +
3393 + /* It the argument had to be passed in stack, no register is used. */
3394 + if ((*targetm.calls.must_pass_in_stack) (mode, type))
3395 + {
3396 + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
3397 + return;
3398 + }
3399 +
3400 + /* Mark the used registers as "used". */
3401 + if (GET_REG_INDEX (cum) >= 0)
3402 + {
3403 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3404 + if (arg_rsize == 8)
3405 + {
3406 + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
3407 + }
3408 + }
3409 + else
3410 + {
3411 + /* Had to use stack */
3412 + cum->stack_pushed_args_size += arg_rsize;
3413 + }
3414 +}
3415 +
3416 +/*
3417 + Defines witch direction to go to find the next register to use if the
3418 + argument is larger then one register or for arguments shorter than an
3419 + int which is not promoted, such as the last part of structures with
3420 + size not a multiple of 4. */
3421 +enum direction
3422 +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
3423 + tree type)
3424 +{
3425 + /* Pad upward for all aggregates except byte and halfword sized aggregates
3426 + which can be passed in registers. */
3427 + if (type
3428 + && AGGREGATE_TYPE_P (type)
3429 + && (int_size_in_bytes (type) != 1)
3430 + && !((int_size_in_bytes (type) == 2)
3431 + && TYPE_ALIGN_UNIT (type) >= 2)
3432 + && (int_size_in_bytes (type) & 0x3))
3433 + {
3434 + return upward;
3435 + }
3436 +
3437 + return downward;
3438 +}
3439 +
3440 +/*
3441 + Return a rtx used for the return value from a function call.
3442 +*/
3443 +rtx
3444 +avr32_function_value (tree type, tree func)
3445 +{
3446 + if (avr32_return_in_memory (type, func))
3447 + return NULL_RTX;
3448 +
3449 + if (int_size_in_bytes (type) <= 4)
3450 + if (avr32_return_in_msb (type))
3451 + /* Aggregates of size less than a word which does align the data in the
3452 + MSB must use SImode for r12. */
3453 + return gen_rtx_REG (SImode, RET_REGISTER);
3454 + else
3455 + return gen_rtx_REG (TYPE_MODE (type), RET_REGISTER);
3456 + else if (int_size_in_bytes (type) <= 8)
3457 + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
3458 +
3459 + return NULL_RTX;
3460 +}
3461 +
3462 +/*
3463 + Return a rtx used for the return value from a library function call.
3464 +*/
3465 +rtx
3466 +avr32_libcall_value (enum machine_mode mode)
3467 +{
3468 +
3469 + if (GET_MODE_SIZE (mode) <= 4)
3470 + return gen_rtx_REG (mode, RET_REGISTER);
3471 + else if (GET_MODE_SIZE (mode) <= 8)
3472 + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
3473 + else
3474 + return NULL_RTX;
3475 +}
3476 +
3477 +/* Return TRUE if X references a SYMBOL_REF. */
3478 +int
3479 +symbol_mentioned_p (rtx x)
3480 +{
3481 + const char *fmt;
3482 + int i;
3483 +
3484 + if (GET_CODE (x) == SYMBOL_REF)
3485 + return 1;
3486 +
3487 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3488 +
3489 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3490 + {
3491 + if (fmt[i] == 'E')
3492 + {
3493 + int j;
3494 +
3495 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3496 + if (symbol_mentioned_p (XVECEXP (x, i, j)))
3497 + return 1;
3498 + }
3499 + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3500 + return 1;
3501 + }
3502 +
3503 + return 0;
3504 +}
3505 +
3506 +/* Return TRUE if X references a LABEL_REF. */
3507 +int
3508 +label_mentioned_p (rtx x)
3509 +{
3510 + const char *fmt;
3511 + int i;
3512 +
3513 + if (GET_CODE (x) == LABEL_REF)
3514 + return 1;
3515 +
3516 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3517 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3518 + {
3519 + if (fmt[i] == 'E')
3520 + {
3521 + int j;
3522 +
3523 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3524 + if (label_mentioned_p (XVECEXP (x, i, j)))
3525 + return 1;
3526 + }
3527 + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3528 + return 1;
3529 + }
3530 +
3531 + return 0;
3532 +}
3533 +
3534 +
3535 +int
3536 +avr32_legitimate_pic_operand_p (rtx x)
3537 +{
3538 +
3539 + /* We can't have const, this must be broken down to a symbol. */
3540 + if (GET_CODE (x) == CONST)
3541 + return FALSE;
3542 +
3543 + /* Can't access symbols or labels via the constant pool either */
3544 + if ((GET_CODE (x) == SYMBOL_REF
3545 + && CONSTANT_POOL_ADDRESS_P (x)
3546 + && (symbol_mentioned_p (get_pool_constant (x))
3547 + || label_mentioned_p (get_pool_constant (x)))))
3548 + return FALSE;
3549 +
3550 + return TRUE;
3551 +}
3552 +
3553 +
3554 +rtx
3555 +legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
3556 + rtx reg)
3557 +{
3558 +
3559 + if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
3560 + {
3561 + int subregs = 0;
3562 +
3563 + if (reg == 0)
3564 + {
3565 + if (no_new_pseudos)
3566 + abort ();
3567 + else
3568 + reg = gen_reg_rtx (Pmode);
3569 +
3570 + subregs = 1;
3571 + }
3572 +
3573 + emit_move_insn (reg, orig);
3574 +
3575 + /* Only set current function as using pic offset table if flag_pic is
3576 + set. This is because this function is also used if
3577 + TARGET_HAS_ASM_ADDR_PSEUDOS is set. */
3578 + if (flag_pic)
3579 + current_function_uses_pic_offset_table = 1;
3580 +
3581 + /* Put a REG_EQUAL note on this insn, so that it can be optimized by
3582 + loop. */
3583 + return reg;
3584 + }
3585 + else if (GET_CODE (orig) == CONST)
3586 + {
3587 + rtx base, offset;
3588 +
3589 + if (flag_pic
3590 + && GET_CODE (XEXP (orig, 0)) == PLUS
3591 + && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3592 + return orig;
3593 +
3594 + if (reg == 0)
3595 + {
3596 + if (no_new_pseudos)
3597 + abort ();
3598 + else
3599 + reg = gen_reg_rtx (Pmode);
3600 + }
3601 +
3602 + if (GET_CODE (XEXP (orig, 0)) == PLUS)
3603 + {
3604 + base =
3605 + legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3606 + offset =
3607 + legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3608 + base == reg ? 0 : reg);
3609 + }
3610 + else
3611 + abort ();
3612 +
3613 + if (GET_CODE (offset) == CONST_INT)
3614 + {
3615 + /* The base register doesn't really matter, we only want to test
3616 + the index for the appropriate mode. */
3617 + if (!avr32_const_ok_for_constraint_p (INTVAL (offset), 'I', "Is21"))
3618 + {
3619 + if (!no_new_pseudos)
3620 + offset = force_reg (Pmode, offset);
3621 + else
3622 + abort ();
3623 + }
3624 +
3625 + if (GET_CODE (offset) == CONST_INT)
3626 + return plus_constant (base, INTVAL (offset));
3627 + }
3628 +
3629 + return gen_rtx_PLUS (Pmode, base, offset);
3630 + }
3631 +
3632 + return orig;
3633 +}
3634 +
3635 +/* Generate code to load the PIC register. */
3636 +void
3637 +avr32_load_pic_register (void)
3638 +{
3639 + rtx l1, pic_tmp;
3640 + rtx global_offset_table;
3641 +
3642 + if ((current_function_uses_pic_offset_table == 0) || TARGET_NO_INIT_GOT)
3643 + return;
3644 +
3645 + if (!flag_pic)
3646 + abort ();
3647 +
3648 + l1 = gen_label_rtx ();
3649 +
3650 + global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3651 + pic_tmp =
3652 + gen_rtx_CONST (Pmode,
3653 + gen_rtx_MINUS (SImode, gen_rtx_LABEL_REF (Pmode, l1),
3654 + global_offset_table));
3655 + emit_insn (gen_pic_load_addr
3656 + (pic_offset_table_rtx, force_const_mem (SImode, pic_tmp)));
3657 + emit_insn (gen_pic_compute_got_from_pc (pic_offset_table_rtx, l1));
3658 +
3659 + /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp
3660 + can cause life info to screw up. */
3661 + emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3662 +}
3663 +
3664 +
3665 +
3666 +/* This hook should return true if values of type type are returned at the most
3667 + significant end of a register (in other words, if they are padded at the
3668 + least significant end). You can assume that type is returned in a register;
3669 + the caller is required to check this. Note that the register provided by
3670 + FUNCTION_VALUE must be able to hold the complete return value. For example,
3671 + if a 1-, 2- or 3-byte structure is returned at the most significant end of a
3672 + 4-byte register, FUNCTION_VALUE should provide an SImode rtx. */
3673 +bool
3674 +avr32_return_in_msb (tree type ATTRIBUTE_UNUSED)
3675 +{
3676 + /* if ( AGGREGATE_TYPE_P (type) ) if ((int_size_in_bytes(type) == 1) ||
3677 + ((int_size_in_bytes(type) == 2) && TYPE_ALIGN_UNIT(type) >= 2)) return
3678 + false; else return true; */
3679 +
3680 + return false;
3681 +}
3682 +
3683 +
3684 +/*
3685 + Returns one if a certain function value is going to be returned in memory
3686 + and zero if it is going to be returned in a register.
3687 +
3688 + BLKmode and all other modes that is larger than 64 bits are returned in
3689 + memory.
3690 +*/
3691 +bool
3692 +avr32_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3693 +{
3694 + if (TYPE_MODE (type) == VOIDmode)
3695 + return false;
3696 +
3697 + if (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3698 + || int_size_in_bytes (type) == -1)
3699 + {
3700 + return true;
3701 + }
3702 +