11e24eef2265871e97c7f9f2c1885317ad31c10d
[openwrt/svn-archive/archive.git] / toolchain / gcc / patches / 4.1.2 / 500-avr32.patch
1 diff -Nur gcc-4.1.2/config.sub gcc-4.1.2-owrt/config.sub
2 --- gcc-4.1.2/config.sub 2005-12-16 13:57:40.000000000 +0100
3 +++ gcc-4.1.2-owrt/config.sub 2007-05-24 12:03:28.000000000 +0200
4 @@ -239,7 +239,7 @@
5 | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
6 | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
7 | am33_2.0 \
8 - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr \
9 + | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
10 | bfin \
11 | c4x | clipper \
12 | d10v | d30v | dlx | dsp16xx \
13 @@ -316,7 +316,7 @@
14 | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
15 | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
16 | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
17 - | avr-* \
18 + | avr-* | avr32-* \
19 | bfin-* | bs2000-* \
20 | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
21 | clipper-* | craynv-* | cydra-* \
22 diff -Nur gcc-4.1.2/configure.in gcc-4.1.2-owrt/configure.in
23 --- gcc-4.1.2/configure.in 2006-11-21 18:48:36.000000000 +0100
24 +++ gcc-4.1.2-owrt/configure.in 2007-05-24 12:03:28.000000000 +0200
25 @@ -497,6 +497,9 @@
26 arm-*-riscix*)
27 noconfigdirs="$noconfigdirs ld target-libgloss ${libgcj}"
28 ;;
29 + avr32-*-*)
30 + noconfigdirs="$noconfigdirs target-libiberty target-libmudflap target-libffi ${libgcj}"
31 + ;;
32 avr-*-*)
33 noconfigdirs="$noconfigdirs target-libiberty target-libstdc++-v3 ${libgcj}"
34 ;;
35 diff -Nur gcc-4.1.2/gcc/builtins.c gcc-4.1.2-owrt/gcc/builtins.c
36 --- gcc-4.1.2/gcc/builtins.c 2006-10-06 19:06:52.000000000 +0200
37 +++ gcc-4.1.2-owrt/gcc/builtins.c 2007-05-24 12:03:28.000000000 +0200
38 @@ -9228,7 +9228,7 @@
39
40 do
41 {
42 - code = va_arg (ap, enum tree_code);
43 + code = va_arg (ap, int);
44 switch (code)
45 {
46 case 0:
47 diff -Nur gcc-4.1.2/gcc/calls.c gcc-4.1.2-owrt/gcc/calls.c
48 --- gcc-4.1.2/gcc/calls.c 2007-01-29 18:08:31.000000000 +0100
49 +++ gcc-4.1.2-owrt/gcc/calls.c 2007-05-24 12:03:28.000000000 +0200
50 @@ -3434,7 +3434,7 @@
51 for (; count < nargs; count++)
52 {
53 rtx val = va_arg (p, rtx);
54 - enum machine_mode mode = va_arg (p, enum machine_mode);
55 + enum machine_mode mode = va_arg (p, int);
56
57 /* We cannot convert the arg value to the mode the library wants here;
58 must do it earlier where we know the signedness of the arg. */
59 diff -Nur gcc-4.1.2/gcc/config/avr32/avr32.c gcc-4.1.2-owrt/gcc/config/avr32/avr32.c
60 --- gcc-4.1.2/gcc/config/avr32/avr32.c 1970-01-01 01:00:00.000000000 +0100
61 +++ gcc-4.1.2-owrt/gcc/config/avr32/avr32.c 2007-05-24 12:03:28.000000000 +0200
62 @@ -0,0 +1,7273 @@
63 +/*
64 + Target hooks and helper functions for AVR32.
65 + Copyright 2003-2006 Atmel Corporation.
66 +
67 + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
68 + Initial porting by Anders ´┐Żdland.
69 +
70 + This file is part of GCC.
71 +
72 + This program is free software; you can redistribute it and/or modify
73 + it under the terms of the GNU General Public License as published by
74 + the Free Software Foundation; either version 2 of the License, or
75 + (at your option) any later version.
76 +
77 + This program is distributed in the hope that it will be useful,
78 + but WITHOUT ANY WARRANTY; without even the implied warranty of
79 + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
80 + GNU General Public License for more details.
81 +
82 + You should have received a copy of the GNU General Public License
83 + along with this program; if not, write to the Free Software
84 + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
85 +
86 +#include "config.h"
87 +#include "system.h"
88 +#include "coretypes.h"
89 +#include "tm.h"
90 +#include "rtl.h"
91 +#include "tree.h"
92 +#include "obstack.h"
93 +#include "regs.h"
94 +#include "hard-reg-set.h"
95 +#include "real.h"
96 +#include "insn-config.h"
97 +#include "conditions.h"
98 +#include "output.h"
99 +#include "insn-attr.h"
100 +#include "flags.h"
101 +#include "reload.h"
102 +#include "function.h"
103 +#include "expr.h"
104 +#include "optabs.h"
105 +#include "toplev.h"
106 +#include "recog.h"
107 +#include "ggc.h"
108 +#include "except.h"
109 +#include "c-pragma.h"
110 +#include "integrate.h"
111 +#include "tm_p.h"
112 +#include "langhooks.h"
113 +
114 +#include "target.h"
115 +#include "target-def.h"
116 +
117 +#include <ctype.h>
118 +
119 +/* Forward definitions of types. */
120 +typedef struct minipool_node Mnode;
121 +typedef struct minipool_fixup Mfix;
122 +
123 +/* Obstack for minipool constant handling. */
124 +static struct obstack minipool_obstack;
125 +static char *minipool_startobj;
126 +static rtx minipool_vector_label;
127 +
128 +/* True if we are currently building a constant table. */
129 +int making_const_table;
130 +
131 +/* Some forward function declarations */
132 +static unsigned long avr32_isr_value (tree);
133 +static unsigned long avr32_compute_func_type (void);
134 +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
135 +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
136 +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
137 + int flags, bool * no_add_attrs);
138 +static void avr32_reorg (void);
139 +bool avr32_return_in_msb (tree type);
140 +bool avr32_vector_mode_supported (enum machine_mode mode);
141 +static void avr32_init_libfuncs (void);
142 +void avr32_load_pic_register (void);
143 +
144 +
145 +static void
146 +avr32_add_gc_roots (void)
147 +{
148 + gcc_obstack_init (&minipool_obstack);
149 + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
150 +}
151 +
152 +
153 +/* List of all known AVR32 parts */
154 +static const struct part_type_s avr32_part_types[] = {
155 + /* name, part_type, architecture type, macro */
156 + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
157 + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
158 + {"ap7010", PART_TYPE_AVR32_AP7010, ARCH_TYPE_AVR32_AP, "__AVR32_AP7010__"},
159 + {"ap7020", PART_TYPE_AVR32_AP7020, ARCH_TYPE_AVR32_AP, "__AVR32_AP7020__"},
160 + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A0256__"},
161 + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A0512__"},
162 + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1128__"},
163 + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1256__"},
164 + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UC, "__AVR32_UC3A1512__"},
165 + {NULL, 0, 0, NULL}
166 +};
167 +
168 +/* List of all known AVR32 architectures */
169 +static const struct arch_type_s avr32_arch_types[] = {
170 + /* name, architecture type, microarchitecture type, feature flags, macro */
171 + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B, FLAG_AVR32_HAS_DSP |
172 + FLAG_AVR32_HAS_SIMD | FLAG_AVR32_HAS_UNALIGNED_WORD |
173 + FLAG_AVR32_HAS_BRANCH_PRED, "__AVR32_AP__"},
174 + {"uc", ARCH_TYPE_AVR32_UC, UARCH_TYPE_AVR32A,
175 + FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW, "__AVR32_UC__"},
176 + {NULL, 0, 0, 0, NULL}
177 +};
178 +
179 +/* Default arch name */
180 +const char *avr32_arch_name = "ap";
181 +const char *avr32_part_name = "none";
182 +
183 +const struct part_type_s *avr32_part;
184 +const struct arch_type_s *avr32_arch;
185 +
186 +
187 +/* Override command line options */
188 +void
189 +avr32_override_options (void)
190 +{
191 + const struct part_type_s *part;
192 + const struct arch_type_s *arch;
193 +
194 + /* Check if part type is set. */
195 + for (part = avr32_part_types; part->name; part++)
196 + if (strcmp (part->name, avr32_part_name) == 0)
197 + break;
198 +
199 + avr32_part = part;
200 +
201 + if (!part->name)
202 + {
203 + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
204 + avr32_part_name);
205 + for (part = avr32_part_types; part->name; part++)
206 + fprintf (stderr, "\t%s\n", part->name);
207 + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
208 + }
209 +
210 + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
211 +
212 + /* If part was set to "none" then check if arch was set. */
213 + if (strcmp (avr32_part->name, "none") == 0)
214 + {
215 + /* Check if arch type is set. */
216 + for (arch = avr32_arch_types; arch->name; arch++)
217 + if (strcmp (arch->name, avr32_arch_name) == 0)
218 + break;
219 +
220 + avr32_arch = arch;
221 +
222 + if (!arch->name)
223 + {
224 + fprintf (stderr, "Unknown arch `%s' specified\nKnown arch names:\n",
225 + avr32_arch_name);
226 + for (arch = avr32_arch_types; arch->name; arch++)
227 + fprintf (stderr, "\t%s\n", arch->name);
228 + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
229 + }
230 + }
231 +
232 + /* If optimization level is two or greater, then align start of loops to a
233 + word boundary since this will allow folding the first insn of the loop.
234 + Do this only for targets supporting branch prediction. */
235 + if (optimize >= 2 && TARGET_BRANCH_PRED)
236 + align_loops = 2;
237 +
238 + if (AVR32_ALWAYS_PIC)
239 + flag_pic = 1;
240 +
241 + if (TARGET_NO_PIC)
242 + flag_pic = 0;
243 +
244 + avr32_add_gc_roots ();
245 +}
246 +
247 +
248 +/*
249 +If defined, a function that outputs the assembler code for entry to a
250 +function. The prologue is responsible for setting up the stack frame,
251 +initializing the frame pointer register, saving registers that must be
252 +saved, and allocating size additional bytes of storage for the
253 +local variables. size is an integer. file is a stdio
254 +stream to which the assembler code should be output.
255 +
256 +The label for the beginning of the function need not be output by this
257 +macro. That has already been done when the macro is run.
258 +
259 +To determine which registers to save, the macro can refer to the array
260 +regs_ever_live: element r is nonzero if hard register
261 +r is used anywhere within the function. This implies the function
262 +prologue should save register r, provided it is not one of the
263 +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
264 +regs_ever_live.)
265 +
266 +On machines that have ``register windows'', the function entry code does
267 +not save on the stack the registers that are in the windows, even if
268 +they are supposed to be preserved by function calls; instead it takes
269 +appropriate steps to ``push'' the register stack, if any non-call-used
270 +registers are used in the function.
271 +
272 +On machines where functions may or may not have frame-pointers, the
273 +function entry code must vary accordingly; it must set up the frame
274 +pointer if one is wanted, and not otherwise. To determine whether a
275 +frame pointer is in wanted, the macro can refer to the variable
276 +frame_pointer_needed. The variable's value will be 1 at run
277 +time in a function that needs a frame pointer. (see Elimination).
278 +
279 +The function entry code is responsible for allocating any stack space
280 +required for the function. This stack space consists of the regions
281 +listed below. In most cases, these regions are allocated in the
282 +order listed, with the last listed region closest to the top of the
283 +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
284 +the highest address if it is not defined). You can use a different order
285 +for a machine if doing so is more convenient or required for
286 +compatibility reasons. Except in cases where required by standard
287 +or by a debugger, there is no reason why the stack layout used by GCC
288 +need agree with that used by other compilers for a machine.
289 +*/
290 +
291 +#undef TARGET_ASM_FUNCTION_PROLOGUE
292 +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
293 +
294 +
295 +#undef TARGET_DEFAULT_SHORT_ENUMS
296 +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
297 +
298 +#undef TARGET_PROMOTE_FUNCTION_ARGS
299 +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
300 +
301 +#undef TARGET_PROMOTE_FUNCTION_RETURN
302 +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
303 +
304 +#undef TARGET_PROMOTE_PROTOTYPES
305 +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
306 +
307 +#undef TARGET_MUST_PASS_IN_STACK
308 +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
309 +
310 +#undef TARGET_PASS_BY_REFERENCE
311 +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
312 +
313 +#undef TARGET_STRICT_ARGUMENT_NAMING
314 +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
315 +
316 +#undef TARGET_VECTOR_MODE_SUPPORTED_P
317 +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
318 +
319 +#undef TARGET_RETURN_IN_MEMORY
320 +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
321 +
322 +#undef TARGET_RETURN_IN_MSB
323 +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
324 +
325 +#undef TARGET_ARG_PARTIAL_BYTES
326 +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
327 +
328 +#undef TARGET_STRIP_NAME_ENCODING
329 +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
330 +
331 +#define streq(string1, string2) (strcmp (string1, string2) == 0)
332 +
333 +#undef TARGET_ATTRIBUTE_TABLE
334 +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
335 +
336 +#undef TARGET_COMP_TYPE_ATTRIBUTES
337 +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
338 +
339 +
340 +#undef TARGET_RTX_COSTS
341 +#define TARGET_RTX_COSTS avr32_rtx_costs
342 +
343 +#undef TARGET_CANNOT_FORCE_CONST_MEM
344 +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
345 +
346 +#undef TARGET_ASM_INTEGER
347 +#define TARGET_ASM_INTEGER avr32_assemble_integer
348 +
349 +/*
350 + * Switches to the appropriate section for output of constant pool
351 + * entry x in mode. You can assume that x is some kind of constant in
352 + * RTL. The argument mode is redundant except in the case of a
353 + * const_int rtx. Select the section by calling readonly_data_ section
354 + * or one of the alternatives for other sections. align is the
355 + * constant alignment in bits.
356 + *
357 + * The default version of this function takes care of putting symbolic
358 + * constants in flag_ pic mode in data_section and everything else in
359 + * readonly_data_section.
360 + */
361 +#undef TARGET_ASM_SELECT_RTX_SECTION
362 +#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
363 +
364 +
365 +/*
366 + * If non-null, this hook performs a target-specific pass over the
367 + * instruction stream. The compiler will run it at all optimization
368 + * levels, just before the point at which it normally does
369 + * delayed-branch scheduling.
370 + *
371 + * The exact purpose of the hook varies from target to target. Some
372 + * use it to do transformations that are necessary for correctness,
373 + * such as laying out in-function constant pools or avoiding hardware
374 + * hazards. Others use it as an opportunity to do some
375 + * machine-dependent optimizations.
376 + *
377 + * You need not implement the hook if it has nothing to do. The
378 + * default definition is null.
379 + */
380 +#undef TARGET_MACHINE_DEPENDENT_REORG
381 +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
382 +
383 +/* Target hook for assembling integer objects.
384 + Need to handle integer vectors */
385 +static bool
386 +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
387 +{
388 + if (avr32_vector_mode_supported (GET_MODE (x)))
389 + {
390 + int i, units;
391 +
392 + if (GET_CODE (x) != CONST_VECTOR)
393 + abort ();
394 +
395 + units = CONST_VECTOR_NUNITS (x);
396 +
397 + switch (GET_MODE (x))
398 + {
399 + case V2HImode:
400 + size = 2;
401 + break;
402 + case V4QImode:
403 + size = 1;
404 + break;
405 + default:
406 + abort ();
407 + }
408 +
409 + for (i = 0; i < units; i++)
410 + {
411 + rtx elt;
412 +
413 + elt = CONST_VECTOR_ELT (x, i);
414 + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
415 + }
416 +
417 + return true;
418 + }
419 +
420 + return default_assemble_integer (x, size, aligned_p);
421 +}
422 +
423 +/*
424 + * This target hook describes the relative costs of RTL expressions.
425 + *
426 + * The cost may depend on the precise form of the expression, which is
427 + * available for examination in x, and the rtx code of the expression
428 + * in which it is contained, found in outer_code. code is the
429 + * expression code--redundant, since it can be obtained with GET_CODE
430 + * (x).
431 + *
432 + * In implementing this hook, you can use the construct COSTS_N_INSNS
433 + * (n) to specify a cost equal to n fast instructions.
434 + *
435 + * On entry to the hook, *total contains a default estimate for the
436 + * cost of the expression. The hook should modify this value as
437 + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
438 + * for multiplications, COSTS_N_INSNS (7) for division and modulus
439 + * operations, and COSTS_N_INSNS (1) for all other operations.
440 + *
441 + * When optimizing for code size, i.e. when optimize_size is non-zero,
442 + * this target hook should be used to estimate the relative size cost
443 + * of an expression, again relative to COSTS_N_INSNS.
444 + *
445 + * The hook returns true when all subexpressions of x have been
446 + * processed, and false when rtx_cost should recurse.
447 + */
448 +
449 +/* Worker routine for avr32_rtx_costs. */
450 +static inline int
451 +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
452 + enum rtx_code outer ATTRIBUTE_UNUSED)
453 +{
454 + enum machine_mode mode = GET_MODE (x);
455 +
456 + switch (GET_CODE (x))
457 + {
458 + case MEM:
459 + /* Using pre decrement / post increment memory operations on the
460 + avr32_uc architecture means that two writebacks must be performed
461 + and hence two cycles are needed. */
462 + if (!optimize_size
463 + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
464 + && avr32_arch->arch_type == ARCH_TYPE_AVR32_UC
465 + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
466 + || GET_CODE (XEXP (x, 0)) == POST_INC))
467 + return COSTS_N_INSNS (4);
468 +
469 + /* Memory costs quite a lot for the first word, but subsequent words
470 + load at the equivalent of a single insn each. */
471 + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
472 + return COSTS_N_INSNS (2 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
473 +
474 + return COSTS_N_INSNS (3);
475 + case SYMBOL_REF:
476 + case CONST:
477 + /* These are valid for the pseudo insns: lda.w and call which operates
478 + on direct addresses. We assume that the cost of a lda.w is the same
479 + as the cost of a ld.w insn. */
480 + return (outer == SET) ? COSTS_N_INSNS (3) : COSTS_N_INSNS (1);
481 + case DIV:
482 + case MOD:
483 + case UDIV:
484 + case UMOD:
485 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
486 +
487 + case ROTATE:
488 + case ROTATERT:
489 + if (mode == TImode)
490 + return COSTS_N_INSNS (100);
491 +
492 + if (mode == DImode)
493 + return COSTS_N_INSNS (10);
494 + return COSTS_N_INSNS (4);
495 + case ASHIFT:
496 + case LSHIFTRT:
497 + case ASHIFTRT:
498 + case NOT:
499 + if (mode == TImode)
500 + return COSTS_N_INSNS (10);
501 +
502 + if (mode == DImode)
503 + return COSTS_N_INSNS (4);
504 + return COSTS_N_INSNS (1);
505 + case PLUS:
506 + case MINUS:
507 + case NEG:
508 + case COMPARE:
509 + case ABS:
510 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
511 + return COSTS_N_INSNS (100);
512 +
513 + if (mode == TImode)
514 + return COSTS_N_INSNS (50);
515 +
516 + if (mode == DImode)
517 + return COSTS_N_INSNS (2);
518 + return COSTS_N_INSNS (1);
519 +
520 + case MULT:
521 + {
522 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
523 + return COSTS_N_INSNS (300);
524 +
525 + if (mode == TImode)
526 + return COSTS_N_INSNS (16);
527 +
528 + if (mode == DImode)
529 + return COSTS_N_INSNS (4);
530 +
531 + if (mode == HImode)
532 + return COSTS_N_INSNS (2);
533 +
534 + return COSTS_N_INSNS (3);
535 + }
536 + case IF_THEN_ELSE:
537 + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
538 + return COSTS_N_INSNS (4);
539 + return COSTS_N_INSNS (1);
540 + case SIGN_EXTEND:
541 + case ZERO_EXTEND:
542 + /* Sign/Zero extensions of registers cost quite much since these
543 + instrcutions only take one register operand which means that gcc
544 + often must insert some move instrcutions */
545 + if (mode == QImode || mode == HImode)
546 + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
547 + return COSTS_N_INSNS (4);
548 + case UNSPEC:
549 + /* divmod operations */
550 + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
551 + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
552 + {
553 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
554 + }
555 + /* Fallthrough */
556 + default:
557 + return COSTS_N_INSNS (1);
558 + }
559 +}
560 +
561 +static bool
562 +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
563 +{
564 + *total = avr32_rtx_costs_1 (x, code, outer_code);
565 + return true;
566 +}
567 +
568 +
569 +bool
570 +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
571 +{
572 + /* Do not want symbols in the constant pool when compiling pic or if using
573 + address pseudo instructions. */
574 + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
575 + && avr32_find_symbol (x) != NULL_RTX);
576 +}
577 +
578 +
579 +/* Table of machine attributes. */
580 +const struct attribute_spec avr32_attribute_table[] = {
581 + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
582 + /* Interrupt Service Routines have special prologue and epilogue
583 + requirements. */
584 + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
585 + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
586 + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
587 + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
588 + {NULL, 0, 0, false, false, false, NULL}
589 +};
590 +
591 +
592 +typedef struct
593 +{
594 + const char *const arg;
595 + const unsigned long return_value;
596 +}
597 +isr_attribute_arg;
598 +
599 +static const isr_attribute_arg isr_attribute_args[] = {
600 + {"FULL", AVR32_FT_ISR_FULL},
601 + {"full", AVR32_FT_ISR_FULL},
602 + {"HALF", AVR32_FT_ISR_HALF},
603 + {"half", AVR32_FT_ISR_HALF},
604 + {"NONE", AVR32_FT_ISR_NONE},
605 + {"none", AVR32_FT_ISR_NONE},
606 + {"UNDEF", AVR32_FT_ISR_NONE},
607 + {"undef", AVR32_FT_ISR_NONE},
608 + {"SWI", AVR32_FT_ISR_NONE},
609 + {"swi", AVR32_FT_ISR_NONE},
610 + {NULL, AVR32_FT_ISR_NONE}
611 +};
612 +
613 +/* Returns the (interrupt) function type of the current
614 + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
615 +
616 +static unsigned long
617 +avr32_isr_value (tree argument)
618 +{
619 + const isr_attribute_arg *ptr;
620 + const char *arg;
621 +
622 + /* No argument - default to ISR_NONE. */
623 + if (argument == NULL_TREE)
624 + return AVR32_FT_ISR_NONE;
625 +
626 + /* Get the value of the argument. */
627 + if (TREE_VALUE (argument) == NULL_TREE
628 + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
629 + return AVR32_FT_UNKNOWN;
630 +
631 + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
632 +
633 + /* Check it against the list of known arguments. */
634 + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
635 + if (streq (arg, ptr->arg))
636 + return ptr->return_value;
637 +
638 + /* An unrecognized interrupt type. */
639 + return AVR32_FT_UNKNOWN;
640 +}
641 +
642 +
643 +
644 +/*
645 +These hooks specify assembly directives for creating certain kinds
646 +of integer object. The TARGET_ASM_BYTE_OP directive creates a
647 +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
648 +aligned two-byte object, and so on. Any of the hooks may be
649 +NULL, indicating that no suitable directive is available.
650 +
651 +The compiler will print these strings at the start of a new line,
652 +followed immediately by the object's initial value. In most cases,
653 +the string should contain a tab, a pseudo-op, and then another tab.
654 +*/
655 +#undef TARGET_ASM_BYTE_OP
656 +#define TARGET_ASM_BYTE_OP "\t.byte\t"
657 +#undef TARGET_ASM_ALIGNED_HI_OP
658 +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
659 +#undef TARGET_ASM_ALIGNED_SI_OP
660 +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
661 +#undef TARGET_ASM_ALIGNED_DI_OP
662 +#define TARGET_ASM_ALIGNED_DI_OP NULL
663 +#undef TARGET_ASM_ALIGNED_TI_OP
664 +#define TARGET_ASM_ALIGNED_TI_OP NULL
665 +#undef TARGET_ASM_UNALIGNED_HI_OP
666 +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
667 +#undef TARGET_ASM_UNALIGNED_SI_OP
668 +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
669 +#undef TARGET_ASM_UNALIGNED_DI_OP
670 +#define TARGET_ASM_UNALIGNED_DI_OP NULL
671 +#undef TARGET_ASM_UNALIGNED_TI_OP
672 +#define TARGET_ASM_UNALIGNED_TI_OP NULL
673 +
674 +#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
675 +#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE avr32_sched_use_dfa_pipeline_interface
676 +
677 +#undef TARGET_ASM_OUTPUT_MI_THUNK
678 +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
679 +
680 +
681 +static void
682 +avr32_output_mi_thunk (FILE * file,
683 + tree thunk ATTRIBUTE_UNUSED,
684 + HOST_WIDE_INT delta,
685 + HOST_WIDE_INT vcall_offset, tree function)
686 +{
687 + int mi_delta = delta;
688 + int this_regno =
689 + (avr32_return_in_memory (DECL_RESULT (function), TREE_TYPE (function)) ?
690 + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
691 +
692 +
693 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
694 + || vcall_offset)
695 + {
696 + fprintf (file, "\tpushm\tr10\n");
697 + }
698 +
699 +
700 + if (mi_delta != 0)
701 + {
702 + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
703 + {
704 + fprintf (file, "\tsub\t%s, -0x%x\n", reg_names[this_regno],
705 + mi_delta);
706 + }
707 + else
708 + {
709 + /* Immediate is larger than k21 we must make us a temp register by
710 + pushing a register to the stack. */
711 + fprintf (file, "\tmov\tr10, lo(%x)\n", mi_delta);
712 + fprintf (file, "\torh\tr10, hi(%x)\n", mi_delta);
713 + fprintf (file, "\tadd\t%s, r10\n", reg_names[this_regno]);
714 + }
715 + }
716 +
717 +
718 + if (vcall_offset != 0)
719 + {
720 + fprintf (file, "\tld.w\tr10, %s[0]\n", reg_names[this_regno]);
721 + fprintf (file, "\tld.w\tr10, r10[%i]\n", (int) vcall_offset);
722 + fprintf (file, "\tadd\t%s, r10\n", reg_names[this_regno]);
723 + }
724 +
725 +
726 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
727 + || vcall_offset)
728 + {
729 + fprintf (file, "\tpopm\tr10\n");
730 + }
731 +
732 + if (flag_pic)
733 + {
734 + /* Don't know how we should do this!!! For now we'll just use an
735 + extended branch instruction and hope that the function will be
736 + reached. */
737 + fprintf (file, "\tbral\t");
738 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
739 + fputc ('\n', file);
740 + }
741 + else
742 + {
743 + fprintf (file, "\tlddpc\tpc, 0f\n");
744 + fprintf (file, "\t.align 2\n");
745 + fputs ("0:\t.long\t", file);
746 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
747 + fputc ('\n', file);
748 + }
749 +}
750 +
751 +/* Implements target hook vector_mode_supported. */
752 +bool
753 +avr32_vector_mode_supported (enum machine_mode mode)
754 +{
755 + if ((mode == V2HImode) || (mode == V4QImode))
756 + return true;
757 +
758 + return false;
759 +}
760 +
761 +
762 +#undef TARGET_INIT_LIBFUNCS
763 +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
764 +
765 +#undef TARGET_INIT_BUILTINS
766 +#define TARGET_INIT_BUILTINS avr32_init_builtins
767 +
768 +#undef TARGET_EXPAND_BUILTIN
769 +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
770 +
771 +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
772 + void_ftype_ptr_int;
773 +tree void_ftype_int, void_ftype_void, int_ftype_ptr_int;
774 +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
775 + short_ftype_short_short;
776 +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
777 +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
778 +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
779 +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
780 +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
781 +
782 +#define def_builtin(NAME, TYPE, CODE) \
783 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
784 + BUILT_IN_MD, NULL, NULL_TREE)
785 +
786 +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
787 + do \
788 + { \
789 + if ((MASK)) \
790 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
791 + BUILT_IN_MD, NULL, NULL_TREE); \
792 + } \
793 + while (0)
794 +
795 +struct builtin_description
796 +{
797 + const unsigned int mask;
798 + const enum insn_code icode;
799 + const char *const name;
800 + const int code;
801 + const enum rtx_code comparison;
802 + const unsigned int flag;
803 + const tree *ftype;
804 +};
805 +
806 +static const struct builtin_description bdesc_2arg[] = {
807 +#define DSP_BUILTIN(code, builtin, ftype) \
808 + { 1, CODE_FOR_##code, "__builtin_" #code , \
809 + AVR32_BUILTIN_##builtin, 0, 0, ftype }
810 +
811 + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
812 + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
813 + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
814 + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
815 + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
816 + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
817 + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
818 + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
819 + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
820 + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
821 + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
822 +};
823 +
824 +
825 +void
826 +avr32_init_builtins (void)
827 +{
828 + unsigned int i;
829 + const struct builtin_description *d;
830 + tree endlink = void_list_node;
831 + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
832 + tree longlong_endlink =
833 + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
834 + tree short_endlink =
835 + tree_cons (NULL_TREE, short_integer_type_node, endlink);
836 + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
837 +
838 + /* int func (int) */
839 + int_ftype_int = build_function_type (integer_type_node, int_endlink);
840 +
841 + /* short func (short) */
842 + short_ftype_short
843 + = build_function_type (short_integer_type_node, short_endlink);
844 +
845 + /* short func (short, short) */
846 + short_ftype_short_short
847 + = build_function_type (short_integer_type_node,
848 + tree_cons (NULL_TREE, short_integer_type_node,
849 + short_endlink));
850 +
851 + /* long long func (long long, short, short) */
852 + longlong_ftype_longlong_short_short
853 + = build_function_type (long_long_integer_type_node,
854 + tree_cons (NULL_TREE, long_long_integer_type_node,
855 + tree_cons (NULL_TREE,
856 + short_integer_type_node,
857 + short_endlink)));
858 +
859 + /* long long func (short, short) */
860 + longlong_ftype_short_short
861 + = build_function_type (long_long_integer_type_node,
862 + tree_cons (NULL_TREE, short_integer_type_node,
863 + short_endlink));
864 +
865 + /* int func (int, int) */
866 + int_ftype_int_int
867 + = build_function_type (integer_type_node,
868 + tree_cons (NULL_TREE, integer_type_node,
869 + int_endlink));
870 +
871 + /* long long func (int, int) */
872 + longlong_ftype_int_int
873 + = build_function_type (long_long_integer_type_node,
874 + tree_cons (NULL_TREE, integer_type_node,
875 + int_endlink));
876 +
877 + /* long long int func (long long, int, short) */
878 + longlong_ftype_longlong_int_short
879 + = build_function_type (long_long_integer_type_node,
880 + tree_cons (NULL_TREE, long_long_integer_type_node,
881 + tree_cons (NULL_TREE, integer_type_node,
882 + short_endlink)));
883 +
884 + /* long long int func (int, short) */
885 + longlong_ftype_int_short
886 + = build_function_type (long_long_integer_type_node,
887 + tree_cons (NULL_TREE, integer_type_node,
888 + short_endlink));
889 +
890 + /* int func (int, short, short) */
891 + int_ftype_int_short_short
892 + = build_function_type (integer_type_node,
893 + tree_cons (NULL_TREE, integer_type_node,
894 + tree_cons (NULL_TREE,
895 + short_integer_type_node,
896 + short_endlink)));
897 +
898 + /* int func (short, short) */
899 + int_ftype_short_short
900 + = build_function_type (integer_type_node,
901 + tree_cons (NULL_TREE, short_integer_type_node,
902 + short_endlink));
903 +
904 + /* int func (int, short) */
905 + int_ftype_int_short
906 + = build_function_type (integer_type_node,
907 + tree_cons (NULL_TREE, integer_type_node,
908 + short_endlink));
909 +
910 + /* void func (int, int) */
911 + void_ftype_int_int
912 + = build_function_type (void_type_node,
913 + tree_cons (NULL_TREE, integer_type_node,
914 + int_endlink));
915 +
916 + /* void func (int, int, int) */
917 + void_ftype_int_int_int
918 + = build_function_type (void_type_node,
919 + tree_cons (NULL_TREE, integer_type_node,
920 + tree_cons (NULL_TREE, integer_type_node,
921 + int_endlink)));
922 +
923 + /* void func (int, int, long long) */
924 + void_ftype_int_int_longlong
925 + = build_function_type (void_type_node,
926 + tree_cons (NULL_TREE, integer_type_node,
927 + tree_cons (NULL_TREE, integer_type_node,
928 + longlong_endlink)));
929 +
930 + /* void func (int, int, int, int, int) */
931 + void_ftype_int_int_int_int_int
932 + = build_function_type (void_type_node,
933 + tree_cons (NULL_TREE, integer_type_node,
934 + tree_cons (NULL_TREE, integer_type_node,
935 + tree_cons (NULL_TREE,
936 + integer_type_node,
937 + tree_cons
938 + (NULL_TREE,
939 + integer_type_node,
940 + int_endlink)))));
941 +
942 + /* void func (void *, int) */
943 + void_ftype_ptr_int
944 + = build_function_type (void_type_node,
945 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
946 +
947 + /* void func (int) */
948 + void_ftype_int = build_function_type (void_type_node, int_endlink);
949 +
950 + /* void func (void) */
951 + void_ftype_void = build_function_type (void_type_node, void_endlink);
952 +
953 + /* int func (void) */
954 + int_ftype_void = build_function_type (integer_type_node, void_endlink);
955 +
956 + /* int func (void *, int) */
957 + int_ftype_ptr_int
958 + = build_function_type (integer_type_node,
959 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
960 +
961 + /* int func (int, int, int) */
962 + int_ftype_int_int_int
963 + = build_function_type (integer_type_node,
964 + tree_cons (NULL_TREE, integer_type_node,
965 + tree_cons (NULL_TREE, integer_type_node,
966 + int_endlink)));
967 +
968 + /* Initialize avr32 builtins. */
969 + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
970 + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
971 + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
972 + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
973 + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
974 + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
975 + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
976 + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
977 + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
978 + def_builtin ("__builtin_breakpoint", void_ftype_void,
979 + AVR32_BUILTIN_BREAKPOINT);
980 + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
981 + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
982 + def_builtin ("__builtin_bswap_16", short_ftype_short,
983 + AVR32_BUILTIN_BSWAP16);
984 + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
985 + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
986 + AVR32_BUILTIN_COP);
987 + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
988 + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
989 + AVR32_BUILTIN_MVRC_W);
990 + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
991 + AVR32_BUILTIN_MVCR_D);
992 + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
993 + AVR32_BUILTIN_MVRC_D);
994 + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
995 + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
996 + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
997 + AVR32_BUILTIN_SATRNDS);
998 + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
999 + AVR32_BUILTIN_SATRNDU);
1000 + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
1001 + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
1002 + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
1003 + AVR32_BUILTIN_MACSATHH_W);
1004 + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
1005 + AVR32_BUILTIN_MACWH_D);
1006 + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
1007 + AVR32_BUILTIN_MACHH_D);
1008 +
1009 + /* Add all builtins that are more or less simple operations on two
1010 + operands. */
1011 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1012 + {
1013 + /* Use one of the operands; the target can have a different mode for
1014 + mask-generating compares. */
1015 +
1016 + if (d->name == 0)
1017 + continue;
1018 +
1019 + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
1020 + }
1021 +}
1022 +
1023 +
1024 +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
1025 +
1026 +static rtx
1027 +avr32_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
1028 +{
1029 + rtx pat;
1030 + tree arg0 = TREE_VALUE (arglist);
1031 + tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1032 + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1033 + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1034 + enum machine_mode tmode = insn_data[icode].operand[0].mode;
1035 + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1036 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1037 +
1038 + if (!target
1039 + || GET_MODE (target) != tmode
1040 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1041 + target = gen_reg_rtx (tmode);
1042 +
1043 + /* In case the insn wants input operands in modes different from the
1044 + result, abort. */
1045 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1046 + {
1047 + /* If op0 is already a reg we must cast it to the correct mode. */
1048 + if (REG_P (op0))
1049 + op0 = convert_to_mode (mode0, op0, 1);
1050 + else
1051 + op0 = copy_to_mode_reg (mode0, op0);
1052 + }
1053 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1054 + {
1055 + /* If op1 is already a reg we must cast it to the correct mode. */
1056 + if (REG_P (op1))
1057 + op1 = convert_to_mode (mode1, op1, 1);
1058 + else
1059 + op1 = copy_to_mode_reg (mode1, op1);
1060 + }
1061 + pat = GEN_FCN (icode) (target, op0, op1);
1062 + if (!pat)
1063 + return 0;
1064 + emit_insn (pat);
1065 + return target;
1066 +}
1067 +
1068 +/* Expand an expression EXP that calls a built-in function,
1069 + with result going to TARGET if that's convenient
1070 + (and in mode MODE if that's convenient).
1071 + SUBTARGET may be used as the target for computing one of EXP's operands.
1072 + IGNORE is nonzero if the value is to be ignored. */
1073 +
1074 +rtx
1075 +avr32_expand_builtin (tree exp,
1076 + rtx target,
1077 + rtx subtarget ATTRIBUTE_UNUSED,
1078 + enum machine_mode mode ATTRIBUTE_UNUSED,
1079 + int ignore ATTRIBUTE_UNUSED)
1080 +{
1081 + const struct builtin_description *d;
1082 + unsigned int i;
1083 + enum insn_code icode;
1084 + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1085 + tree arglist = TREE_OPERAND (exp, 1);
1086 + tree arg0, arg1, arg2;
1087 + rtx op0, op1, op2, pat;
1088 + enum machine_mode tmode, mode0, mode1;
1089 + enum machine_mode arg0_mode;
1090 + int fcode = DECL_FUNCTION_CODE (fndecl);
1091 +
1092 + switch (fcode)
1093 + {
1094 + default:
1095 + break;
1096 +
1097 + case AVR32_BUILTIN_SATS:
1098 + case AVR32_BUILTIN_SATU:
1099 + case AVR32_BUILTIN_SATRNDS:
1100 + case AVR32_BUILTIN_SATRNDU:
1101 + {
1102 + const char *fname;
1103 + switch (fcode)
1104 + {
1105 + default:
1106 + case AVR32_BUILTIN_SATS:
1107 + icode = CODE_FOR_sats;
1108 + fname = "sats";
1109 + break;
1110 + case AVR32_BUILTIN_SATU:
1111 + icode = CODE_FOR_satu;
1112 + fname = "satu";
1113 + break;
1114 + case AVR32_BUILTIN_SATRNDS:
1115 + icode = CODE_FOR_satrnds;
1116 + fname = "satrnds";
1117 + break;
1118 + case AVR32_BUILTIN_SATRNDU:
1119 + icode = CODE_FOR_satrndu;
1120 + fname = "satrndu";
1121 + break;
1122 + }
1123 +
1124 + arg0 = TREE_VALUE (arglist);
1125 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1126 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1127 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1128 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1129 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1130 +
1131 + tmode = insn_data[icode].operand[0].mode;
1132 +
1133 +
1134 + if (target == 0
1135 + || GET_MODE (target) != tmode
1136 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1137 + target = gen_reg_rtx (tmode);
1138 +
1139 +
1140 + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
1141 + {
1142 + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
1143 + }
1144 +
1145 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1146 + {
1147 + error ("Parameter 2 to __builtin_%s should be a constant number.",
1148 + fname);
1149 + return NULL_RTX;
1150 + }
1151 +
1152 + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
1153 + {
1154 + error ("Parameter 3 to __builtin_%s should be a constant number.",
1155 + fname);
1156 + return NULL_RTX;
1157 + }
1158 +
1159 + emit_move_insn (target, op0);
1160 + pat = GEN_FCN (icode) (target, op1, op2);
1161 + if (!pat)
1162 + return 0;
1163 + emit_insn (pat);
1164 +
1165 + return target;
1166 + }
1167 + case AVR32_BUILTIN_MUSTR:
1168 + icode = CODE_FOR_mustr;
1169 + tmode = insn_data[icode].operand[0].mode;
1170 +
1171 + if (target == 0
1172 + || GET_MODE (target) != tmode
1173 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1174 + target = gen_reg_rtx (tmode);
1175 + pat = GEN_FCN (icode) (target);
1176 + if (!pat)
1177 + return 0;
1178 + emit_insn (pat);
1179 + return target;
1180 +
1181 + case AVR32_BUILTIN_MFSR:
1182 + icode = CODE_FOR_mfsr;
1183 + arg0 = TREE_VALUE (arglist);
1184 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1185 + tmode = insn_data[icode].operand[0].mode;
1186 + mode0 = insn_data[icode].operand[1].mode;
1187 +
1188 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1189 + {
1190 + error ("Parameter 1 to __builtin_mfsr must be a constant number");
1191 + }
1192 +
1193 + if (target == 0
1194 + || GET_MODE (target) != tmode
1195 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1196 + target = gen_reg_rtx (tmode);
1197 + pat = GEN_FCN (icode) (target, op0);
1198 + if (!pat)
1199 + return 0;
1200 + emit_insn (pat);
1201 + return target;
1202 + case AVR32_BUILTIN_MTSR:
1203 + icode = CODE_FOR_mtsr;
1204 + arg0 = TREE_VALUE (arglist);
1205 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1206 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1207 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1208 + mode0 = insn_data[icode].operand[0].mode;
1209 + mode1 = insn_data[icode].operand[1].mode;
1210 +
1211 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1212 + {
1213 + error ("Parameter 1 to __builtin_mtsr must be a constant number");
1214 + return gen_reg_rtx (mode0);
1215 + }
1216 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1217 + op1 = copy_to_mode_reg (mode1, op1);
1218 + pat = GEN_FCN (icode) (op0, op1);
1219 + if (!pat)
1220 + return 0;
1221 + emit_insn (pat);
1222 + return NULL_RTX;
1223 + case AVR32_BUILTIN_MFDR:
1224 + icode = CODE_FOR_mfdr;
1225 + arg0 = TREE_VALUE (arglist);
1226 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1227 + tmode = insn_data[icode].operand[0].mode;
1228 + mode0 = insn_data[icode].operand[1].mode;
1229 +
1230 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1231 + {
1232 + error ("Parameter 1 to __builtin_mfdr must be a constant number");
1233 + }
1234 +
1235 + if (target == 0
1236 + || GET_MODE (target) != tmode
1237 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1238 + target = gen_reg_rtx (tmode);
1239 + pat = GEN_FCN (icode) (target, op0);
1240 + if (!pat)
1241 + return 0;
1242 + emit_insn (pat);
1243 + return target;
1244 + case AVR32_BUILTIN_MTDR:
1245 + icode = CODE_FOR_mtdr;
1246 + arg0 = TREE_VALUE (arglist);
1247 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1248 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1249 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1250 + mode0 = insn_data[icode].operand[0].mode;
1251 + mode1 = insn_data[icode].operand[1].mode;
1252 +
1253 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1254 + {
1255 + error ("Parameter 1 to __builtin_mtdr must be a constant number");
1256 + return gen_reg_rtx (mode0);
1257 + }
1258 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1259 + op1 = copy_to_mode_reg (mode1, op1);
1260 + pat = GEN_FCN (icode) (op0, op1);
1261 + if (!pat)
1262 + return 0;
1263 + emit_insn (pat);
1264 + return NULL_RTX;
1265 + case AVR32_BUILTIN_CACHE:
1266 + icode = CODE_FOR_cache;
1267 + arg0 = TREE_VALUE (arglist);
1268 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1269 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1270 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1271 + mode0 = insn_data[icode].operand[0].mode;
1272 + mode1 = insn_data[icode].operand[1].mode;
1273 +
1274 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1275 + {
1276 + error ("Parameter 2 to __builtin_cache must be a constant number");
1277 + return gen_reg_rtx (mode1);
1278 + }
1279 +
1280 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1281 + op0 = copy_to_mode_reg (mode0, op0);
1282 +
1283 + pat = GEN_FCN (icode) (op0, op1);
1284 + if (!pat)
1285 + return 0;
1286 + emit_insn (pat);
1287 + return NULL_RTX;
1288 + case AVR32_BUILTIN_SYNC:
1289 + case AVR32_BUILTIN_MUSFR:
1290 + {
1291 + const char *fname;
1292 + switch (fcode)
1293 + {
1294 + default:
1295 + case AVR32_BUILTIN_SYNC:
1296 + icode = CODE_FOR_sync;
1297 + fname = "sync";
1298 + break;
1299 + case AVR32_BUILTIN_MUSFR:
1300 + icode = CODE_FOR_musfr;
1301 + fname = "musfr";
1302 + break;
1303 + }
1304 +
1305 + arg0 = TREE_VALUE (arglist);
1306 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1307 + mode0 = insn_data[icode].operand[0].mode;
1308 +
1309 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1310 + {
1311 + if (icode == CODE_FOR_musfr)
1312 + op0 = copy_to_mode_reg (mode0, op0);
1313 + else
1314 + {
1315 + error ("Parameter to __builtin_%s is illegal.", fname);
1316 + return gen_reg_rtx (mode0);
1317 + }
1318 + }
1319 + pat = GEN_FCN (icode) (op0);
1320 + if (!pat)
1321 + return 0;
1322 + emit_insn (pat);
1323 + return NULL_RTX;
1324 + }
1325 + case AVR32_BUILTIN_TLBR:
1326 + icode = CODE_FOR_tlbr;
1327 + pat = GEN_FCN (icode) (NULL_RTX);
1328 + if (!pat)
1329 + return 0;
1330 + emit_insn (pat);
1331 + return NULL_RTX;
1332 + case AVR32_BUILTIN_TLBS:
1333 + icode = CODE_FOR_tlbs;
1334 + pat = GEN_FCN (icode) (NULL_RTX);
1335 + if (!pat)
1336 + return 0;
1337 + emit_insn (pat);
1338 + return NULL_RTX;
1339 + case AVR32_BUILTIN_TLBW:
1340 + icode = CODE_FOR_tlbw;
1341 + pat = GEN_FCN (icode) (NULL_RTX);
1342 + if (!pat)
1343 + return 0;
1344 + emit_insn (pat);
1345 + return NULL_RTX;
1346 + case AVR32_BUILTIN_BREAKPOINT:
1347 + icode = CODE_FOR_breakpoint;
1348 + pat = GEN_FCN (icode) (NULL_RTX);
1349 + if (!pat)
1350 + return 0;
1351 + emit_insn (pat);
1352 + return NULL_RTX;
1353 + case AVR32_BUILTIN_XCHG:
1354 + icode = CODE_FOR_xchg;
1355 + arg0 = TREE_VALUE (arglist);
1356 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1357 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1358 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1359 + tmode = insn_data[icode].operand[0].mode;
1360 + mode0 = insn_data[icode].operand[1].mode;
1361 + mode1 = insn_data[icode].operand[3].mode;
1362 +
1363 + if (!(*insn_data[icode].operand[3].predicate) (op1, mode1))
1364 + {
1365 + op1 = copy_to_mode_reg (mode1, op1);
1366 + }
1367 +
1368 + if (!(*insn_data[icode].operand[2].predicate) (op0, mode0))
1369 + {
1370 + op0 = copy_to_mode_reg (mode0, op0);
1371 + }
1372 +
1373 + if (target == 0
1374 + || GET_MODE (target) != tmode
1375 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1376 + target = gen_reg_rtx (tmode);
1377 + pat = GEN_FCN (icode) (target, op0, op0, op1);
1378 + if (!pat)
1379 + return 0;
1380 + emit_insn (pat);
1381 + return target;
1382 + case AVR32_BUILTIN_LDXI:
1383 + icode = CODE_FOR_ldxi;
1384 + arg0 = TREE_VALUE (arglist);
1385 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1386 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1387 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1388 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1389 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1390 + tmode = insn_data[icode].operand[0].mode;
1391 + mode0 = insn_data[icode].operand[1].mode;
1392 + mode1 = insn_data[icode].operand[2].mode;
1393 +
1394 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1395 + {
1396 + op0 = copy_to_mode_reg (mode0, op0);
1397 + }
1398 +
1399 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1400 + {
1401 + op1 = copy_to_mode_reg (mode1, op1);
1402 + }
1403 +
1404 + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
1405 + {
1406 + error
1407 + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
1408 + return gen_reg_rtx (mode0);
1409 + }
1410 +
1411 + if (target == 0
1412 + || GET_MODE (target) != tmode
1413 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1414 + target = gen_reg_rtx (tmode);
1415 + pat = GEN_FCN (icode) (target, op0, op1, op2);
1416 + if (!pat)
1417 + return 0;
1418 + emit_insn (pat);
1419 + return target;
1420 + case AVR32_BUILTIN_BSWAP16:
1421 + {
1422 + icode = CODE_FOR_bswap_16;
1423 + arg0 = TREE_VALUE (arglist);
1424 + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
1425 + mode0 = insn_data[icode].operand[1].mode;
1426 + if (arg0_mode != mode0)
1427 + arg0 = build1 (NOP_EXPR,
1428 + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
1429 +
1430 + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
1431 + tmode = insn_data[icode].operand[0].mode;
1432 +
1433 +
1434 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1435 + {
1436 + op0 = copy_to_mode_reg (mode0, op0);
1437 + }
1438 +
1439 + if (target == 0
1440 + || GET_MODE (target) != tmode
1441 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1442 + {
1443 + target = gen_reg_rtx (tmode);
1444 + }
1445 +
1446 +
1447 + pat = GEN_FCN (icode) (target, op0);
1448 + if (!pat)
1449 + return 0;
1450 + emit_insn (pat);
1451 +
1452 + return target;
1453 + }
1454 + case AVR32_BUILTIN_BSWAP32:
1455 + {
1456 + icode = CODE_FOR_bswap_32;
1457 + arg0 = TREE_VALUE (arglist);
1458 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1459 + tmode = insn_data[icode].operand[0].mode;
1460 + mode0 = insn_data[icode].operand[1].mode;
1461 +
1462 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1463 + {
1464 + op0 = copy_to_mode_reg (mode0, op0);
1465 + }
1466 +
1467 + if (target == 0
1468 + || GET_MODE (target) != tmode
1469 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1470 + target = gen_reg_rtx (tmode);
1471 +
1472 +
1473 + pat = GEN_FCN (icode) (target, op0);
1474 + if (!pat)
1475 + return 0;
1476 + emit_insn (pat);
1477 +
1478 + return target;
1479 + }
1480 + case AVR32_BUILTIN_MVCR_W:
1481 + case AVR32_BUILTIN_MVCR_D:
1482 + {
1483 + arg0 = TREE_VALUE (arglist);
1484 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1485 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1486 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1487 +
1488 + if (fcode == AVR32_BUILTIN_MVCR_W)
1489 + icode = CODE_FOR_mvcrsi;
1490 + else
1491 + icode = CODE_FOR_mvcrdi;
1492 +
1493 + tmode = insn_data[icode].operand[0].mode;
1494 +
1495 + if (target == 0
1496 + || GET_MODE (target) != tmode
1497 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1498 + target = gen_reg_rtx (tmode);
1499 +
1500 + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
1501 + {
1502 + error
1503 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1504 + error ("Number should be between 0 and 7.");
1505 + return NULL_RTX;
1506 + }
1507 +
1508 + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
1509 + {
1510 + error
1511 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1512 + error ("Number should be between 0 and 15.");
1513 + return NULL_RTX;
1514 + }
1515 +
1516 + pat = GEN_FCN (icode) (target, op0, op1);
1517 + if (!pat)
1518 + return 0;
1519 + emit_insn (pat);
1520 +
1521 + return target;
1522 + }
1523 + case AVR32_BUILTIN_MACSATHH_W:
1524 + case AVR32_BUILTIN_MACWH_D:
1525 + case AVR32_BUILTIN_MACHH_D:
1526 + {
1527 + arg0 = TREE_VALUE (arglist);
1528 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1529 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1530 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1531 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1532 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1533 +
1534 + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
1535 + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
1536 + CODE_FOR_machh_d);
1537 +
1538 + tmode = insn_data[icode].operand[0].mode;
1539 + mode0 = insn_data[icode].operand[1].mode;
1540 + mode1 = insn_data[icode].operand[2].mode;
1541 +
1542 +
1543 + if (!target
1544 + || GET_MODE (target) != tmode
1545 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1546 + target = gen_reg_rtx (tmode);
1547 +
1548 + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
1549 + {
1550 + /* If op0 is already a reg we must cast it to the correct mode. */
1551 + if (REG_P (op0))
1552 + op0 = convert_to_mode (tmode, op0, 1);
1553 + else
1554 + op0 = copy_to_mode_reg (tmode, op0);
1555 + }
1556 +
1557 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
1558 + {
1559 + /* If op1 is already a reg we must cast it to the correct mode. */
1560 + if (REG_P (op1))
1561 + op1 = convert_to_mode (mode0, op1, 1);
1562 + else
1563 + op1 = copy_to_mode_reg (mode0, op1);
1564 + }
1565 +
1566 + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
1567 + {
1568 + /* If op1 is already a reg we must cast it to the correct mode. */
1569 + if (REG_P (op2))
1570 + op2 = convert_to_mode (mode1, op2, 1);
1571 + else
1572 + op2 = copy_to_mode_reg (mode1, op2);
1573 + }
1574 +
1575 + emit_move_insn (target, op0);
1576 +
1577 + pat = GEN_FCN (icode) (target, op1, op2);
1578 + if (!pat)
1579 + return 0;
1580 + emit_insn (pat);
1581 + return target;
1582 + }
1583 + case AVR32_BUILTIN_MVRC_W:
1584 + case AVR32_BUILTIN_MVRC_D:
1585 + {
1586 + arg0 = TREE_VALUE (arglist);
1587 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1588 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1589 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1590 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1591 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1592 +
1593 + if (fcode == AVR32_BUILTIN_MVRC_W)
1594 + icode = CODE_FOR_mvrcsi;
1595 + else
1596 + icode = CODE_FOR_mvrcdi;
1597 +
1598 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1599 + {
1600 + error ("Parameter 1 is not a valid coprocessor number.");
1601 + error ("Number should be between 0 and 7.");
1602 + return NULL_RTX;
1603 + }
1604 +
1605 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1606 + {
1607 + error ("Parameter 2 is not a valid coprocessor register number.");
1608 + error ("Number should be between 0 and 15.");
1609 + return NULL_RTX;
1610 + }
1611 +
1612 + if (GET_CODE (op2) == CONST_INT
1613 + || GET_CODE (op2) == CONST
1614 + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
1615 + {
1616 + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
1617 + }
1618 +
1619 + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
1620 + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
1621 +
1622 +
1623 + pat = GEN_FCN (icode) (op0, op1, op2);
1624 + if (!pat)
1625 + return 0;
1626 + emit_insn (pat);
1627 +
1628 + return NULL_RTX;
1629 + }
1630 + case AVR32_BUILTIN_COP:
1631 + {
1632 + rtx op3, op4;
1633 + tree arg3, arg4;
1634 + icode = CODE_FOR_cop;
1635 + arg0 = TREE_VALUE (arglist);
1636 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1637 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1638 + arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
1639 + arg4 =
1640 + TREE_VALUE (TREE_CHAIN
1641 + (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist)))));
1642 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1643 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1644 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1645 + op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
1646 + op4 = expand_expr (arg4, NULL_RTX, VOIDmode, 0);
1647 +
1648 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1649 + {
1650 + error
1651 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1652 + error ("Number should be between 0 and 7.");
1653 + return NULL_RTX;
1654 + }
1655 +
1656 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1657 + {
1658 + error
1659 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1660 + error ("Number should be between 0 and 15.");
1661 + return NULL_RTX;
1662 + }
1663 +
1664 + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
1665 + {
1666 + error
1667 + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
1668 + error ("Number should be between 0 and 15.");
1669 + return NULL_RTX;
1670 + }
1671 +
1672 + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
1673 + {
1674 + error
1675 + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
1676 + error ("Number should be between 0 and 15.");
1677 + return NULL_RTX;
1678 + }
1679 +
1680 + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
1681 + {
1682 + error
1683 + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
1684 + error ("Number should be between 0 and 127.");
1685 + return NULL_RTX;
1686 + }
1687 +
1688 + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
1689 + if (!pat)
1690 + return 0;
1691 + emit_insn (pat);
1692 +
1693 + return target;
1694 + }
1695 + }
1696 +
1697 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1698 + if (d->code == fcode)
1699 + return avr32_expand_binop_builtin (d->icode, arglist, target);
1700 +
1701 +
1702 + /* @@@ Should really do something sensible here. */
1703 + return NULL_RTX;
1704 +}
1705 +
1706 +
1707 +/* Handle an "interrupt" or "isr" attribute;
1708 + arguments as in struct attribute_spec.handler. */
1709 +
1710 +static tree
1711 +avr32_handle_isr_attribute (tree * node, tree name, tree args,
1712 + int flags, bool * no_add_attrs)
1713 +{
1714 + if (DECL_P (*node))
1715 + {
1716 + if (TREE_CODE (*node) != FUNCTION_DECL)
1717 + {
1718 + warning ("`%s' attribute only applies to functions",
1719 + IDENTIFIER_POINTER (name));
1720 + *no_add_attrs = true;
1721 + }
1722 + /* FIXME: the argument if any is checked for type attributes; should it
1723 + be checked for decl ones? */
1724 + }
1725 + else
1726 + {
1727 + if (TREE_CODE (*node) == FUNCTION_TYPE
1728 + || TREE_CODE (*node) == METHOD_TYPE)
1729 + {
1730 + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
1731 + {
1732 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1733 + *no_add_attrs = true;
1734 + }
1735 + }
1736 + else if (TREE_CODE (*node) == POINTER_TYPE
1737 + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
1738 + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
1739 + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
1740 + {
1741 + *node = build_variant_type_copy (*node);
1742 + TREE_TYPE (*node) = build_type_attribute_variant
1743 + (TREE_TYPE (*node),
1744 + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
1745 + *no_add_attrs = true;
1746 + }
1747 + else
1748 + {
1749 + /* Possibly pass this attribute on from the type to a decl. */
1750 + if (flags & ((int) ATTR_FLAG_DECL_NEXT
1751 + | (int) ATTR_FLAG_FUNCTION_NEXT
1752 + | (int) ATTR_FLAG_ARRAY_NEXT))
1753 + {
1754 + *no_add_attrs = true;
1755 + return tree_cons (name, args, NULL_TREE);
1756 + }
1757 + else
1758 + {
1759 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1760 + }
1761 + }
1762 + }
1763 +
1764 + return NULL_TREE;
1765 +}
1766 +
1767 +/* Handle an attribute requiring a FUNCTION_DECL;
1768 + arguments as in struct attribute_spec.handler. */
1769 +static tree
1770 +avr32_handle_fndecl_attribute (tree * node, tree name,
1771 + tree args ATTRIBUTE_UNUSED,
1772 + int flags ATTRIBUTE_UNUSED,
1773 + bool * no_add_attrs)
1774 +{
1775 + if (TREE_CODE (*node) != FUNCTION_DECL)
1776 + {
1777 + warning ("%qs attribute only applies to functions",
1778 + IDENTIFIER_POINTER (name));
1779 + *no_add_attrs = true;
1780 + }
1781 +
1782 + return NULL_TREE;
1783 +}
1784 +
1785 +
1786 +/* Handle an acall attribute;
1787 + arguments as in struct attribute_spec.handler. */
1788 +
1789 +static tree
1790 +avr32_handle_acall_attribute (tree * node, tree name,
1791 + tree args ATTRIBUTE_UNUSED,
1792 + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
1793 +{
1794 + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
1795 + {
1796 + warning ("`%s' attribute not yet supported...",
1797 + IDENTIFIER_POINTER (name));
1798 + *no_add_attrs = true;
1799 + return NULL_TREE;
1800 + }
1801 +
1802 + warning ("`%s' attribute only applies to functions",
1803 + IDENTIFIER_POINTER (name));
1804 + *no_add_attrs = true;
1805 + return NULL_TREE;
1806 +}
1807 +
1808 +
1809 +/* Return 0 if the attributes for two types are incompatible, 1 if they
1810 + are compatible, and 2 if they are nearly compatible (which causes a
1811 + warning to be generated). */
1812 +
1813 +static int
1814 +avr32_comp_type_attributes (tree type1, tree type2)
1815 +{
1816 + int acall1, acall2, isr1, isr2, naked1, naked2;
1817 +
1818 + /* Check for mismatch of non-default calling convention. */
1819 + if (TREE_CODE (type1) != FUNCTION_TYPE)
1820 + return 1;
1821 +
1822 + /* Check for mismatched call attributes. */
1823 + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
1824 + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
1825 + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
1826 + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
1827 + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1828 + if (!isr1)
1829 + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1830 +
1831 + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1832 + if (!isr2)
1833 + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1834 +
1835 + if ((acall1 && isr2)
1836 + || (acall2 && isr1) || (naked1 && isr2) || (naked2 && isr1))
1837 + return 0;
1838 +
1839 + return 1;
1840 +}
1841 +
1842 +
1843 +/* Computes the type of the current function. */
1844 +
1845 +static unsigned long
1846 +avr32_compute_func_type (void)
1847 +{
1848 + unsigned long type = AVR32_FT_UNKNOWN;
1849 + tree a;
1850 + tree attr;
1851 +
1852 + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1853 + abort ();
1854 +
1855 + /* Decide if the current function is volatile. Such functions never
1856 + return, and many memory cycles can be saved by not storing register
1857 + values that will never be needed again. This optimization was added to
1858 + speed up context switching in a kernel application. */
1859 + if (optimize > 0
1860 + && TREE_NOTHROW (current_function_decl)
1861 + && TREE_THIS_VOLATILE (current_function_decl))
1862 + type |= AVR32_FT_VOLATILE;
1863 +
1864 + if (cfun->static_chain_decl != NULL)
1865 + type |= AVR32_FT_NESTED;
1866 +
1867 + attr = DECL_ATTRIBUTES (current_function_decl);
1868 +
1869 + a = lookup_attribute ("isr", attr);
1870 + if (a == NULL_TREE)
1871 + a = lookup_attribute ("interrupt", attr);
1872 +
1873 + if (a == NULL_TREE)
1874 + type |= AVR32_FT_NORMAL;
1875 + else
1876 + type |= avr32_isr_value (TREE_VALUE (a));
1877 +
1878 +
1879 + a = lookup_attribute ("acall", attr);
1880 + if (a != NULL_TREE)
1881 + type |= AVR32_FT_ACALL;
1882 +
1883 + a = lookup_attribute ("naked", attr);
1884 + if (a != NULL_TREE)
1885 + type |= AVR32_FT_NAKED;
1886 +
1887 + return type;
1888 +}
1889 +
1890 +/* Returns the type of the current function. */
1891 +
1892 +static unsigned long
1893 +avr32_current_func_type (void)
1894 +{
1895 + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
1896 + cfun->machine->func_type = avr32_compute_func_type ();
1897 +
1898 + return cfun->machine->func_type;
1899 +}
1900 +
1901 +/*
1902 + This target hook should return true if we should not pass type solely
1903 + in registers. The file expr.h defines a definition that is usually appropriate,
1904 + refer to expr.h for additional documentation.
1905 +*/
1906 +bool
1907 +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1908 +{
1909 + if (type && AGGREGATE_TYPE_P (type)
1910 + /* If the alignment is less than the size then pass in the struct on
1911 + the stack. */
1912 + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
1913 + (unsigned int) int_size_in_bytes (type))
1914 + /* If we support unaligned word accesses then structs of size 4 and 8
1915 + can have any alignment and still be passed in registers. */
1916 + && !(TARGET_UNALIGNED_WORD
1917 + && (int_size_in_bytes (type) == 4
1918 + || int_size_in_bytes (type) == 8))
1919 + /* Double word structs need only a word alignment. */
1920 + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
1921 + return true;
1922 +
1923 + if (type && AGGREGATE_TYPE_P (type)
1924 + /* Structs of size 3,5,6,7 are always passed in registers. */
1925 + && (int_size_in_bytes (type) == 3
1926 + || int_size_in_bytes (type) == 5
1927 + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
1928 + return true;
1929 +
1930 +
1931 + return (type && TREE_ADDRESSABLE (type));
1932 +}
1933 +
1934 +
1935 +bool
1936 +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
1937 +{
1938 + return true;
1939 +}
1940 +
1941 +/*
1942 + This target hook should return true if an argument at the position indicated
1943 + by cum should be passed by reference. This predicate is queried after target
1944 + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
1945 +
1946 + If the hook returns true, a copy of that argument is made in memory and a
1947 + pointer to the argument is passed instead of the argument itself. The pointer
1948 + is passed in whatever way is appropriate for passing a pointer to that type.
1949 +*/
1950 +bool
1951 +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
1952 + enum machine_mode mode ATTRIBUTE_UNUSED,
1953 + tree type, bool named ATTRIBUTE_UNUSED)
1954 +{
1955 + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
1956 +}
1957 +
1958 +static int
1959 +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
1960 + enum machine_mode mode ATTRIBUTE_UNUSED,
1961 + tree type ATTRIBUTE_UNUSED,
1962 + bool named ATTRIBUTE_UNUSED)
1963 +{
1964 + return 0;
1965 +}
1966 +
1967 +
1968 +struct gcc_target targetm = TARGET_INITIALIZER;
1969 +
1970 +/*
1971 + Table used to convert from register number in the assembler instructions and
1972 + the register numbers used in gcc.
1973 +*/
1974 +const int avr32_function_arg_reglist[] =
1975 +{
1976 + INTERNAL_REGNUM (12),
1977 + INTERNAL_REGNUM (11),
1978 + INTERNAL_REGNUM (10),
1979 + INTERNAL_REGNUM (9),
1980 + INTERNAL_REGNUM (8)
1981 +};
1982 +
1983 +rtx avr32_compare_op0 = NULL_RTX;
1984 +rtx avr32_compare_op1 = NULL_RTX;
1985 +rtx avr32_compare_operator = NULL_RTX;
1986 +rtx avr32_acc_cache = NULL_RTX;
1987 +
1988 +/*
1989 + Returns nonzero if it is allowed to store a value of mode mode in hard
1990 + register number regno.
1991 +*/
1992 +int
1993 +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
1994 +{
1995 + /* We allow only float modes in the fp-registers */
1996 + if (regnr >= FIRST_FP_REGNUM
1997 + && regnr <= LAST_FP_REGNUM && GET_MODE_CLASS (mode) != MODE_FLOAT)
1998 + {
1999 + return 0;
2000 + }
2001 +
2002 + switch (mode)
2003 + {
2004 + case DImode: /* long long */
2005 + case DFmode: /* double */
2006 + case SCmode: /* __complex__ float */
2007 + case CSImode: /* __complex__ int */
2008 + if (regnr < 4)
2009 + { /* long long int not supported in r12, sp, lr
2010 + or pc. */
2011 + return 0;
2012 + }
2013 + else
2014 + {
2015 + if (regnr % 2) /* long long int has to be refered in even
2016 + registers. */
2017 + return 0;
2018 + else
2019 + return 1;
2020 + }
2021 + case CDImode: /* __complex__ long long */
2022 + case DCmode: /* __complex__ double */
2023 + case TImode: /* 16 bytes */
2024 + if (regnr < 7)
2025 + return 0;
2026 + else if (regnr % 2)
2027 + return 0;
2028 + else
2029 + return 1;
2030 + default:
2031 + return 1;
2032 + }
2033 +}
2034 +
2035 +
2036 +int
2037 +avr32_rnd_operands (rtx add, rtx shift)
2038 +{
2039 + if (GET_CODE (shift) == CONST_INT &&
2040 + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
2041 + {
2042 + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
2043 + return TRUE;
2044 + }
2045 +
2046 + return FALSE;
2047 +}
2048 +
2049 +
2050 +
2051 +int
2052 +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
2053 +{
2054 + switch (c)
2055 + {
2056 + case 'K':
2057 + case 'I':
2058 + {
2059 + HOST_WIDE_INT min_value = 0, max_value = 0;
2060 + char size_str[3];
2061 + int const_size;
2062 +
2063 + size_str[0] = str[2];
2064 + size_str[1] = str[3];
2065 + size_str[2] = '\0';
2066 + const_size = atoi (size_str);
2067 +
2068 + if (toupper (str[1]) == 'U')
2069 + {
2070 + min_value = 0;
2071 + max_value = (1 << const_size) - 1;
2072 + }
2073 + else if (toupper (str[1]) == 'S')
2074 + {
2075 + min_value = -(1 << (const_size - 1));
2076 + max_value = (1 << (const_size - 1)) - 1;
2077 + }
2078 +
2079 + if (c == 'I')
2080 + {
2081 + value = -value;
2082 + }
2083 +
2084 + if (value >= min_value && value <= max_value)
2085 + {
2086 + return 1;
2087 + }
2088 + break;
2089 + }
2090 + case 'M':
2091 + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
2092 + }
2093 +
2094 + return 0;
2095 +}
2096 +
2097 +
2098 +/*Compute mask of which floating-point registers needs saving upon
2099 + entry to this function*/
2100 +static unsigned long
2101 +avr32_compute_save_fp_reg_mask (void)
2102 +{
2103 + unsigned long func_type = avr32_current_func_type ();
2104 + unsigned int save_reg_mask = 0;
2105 + unsigned int reg;
2106 + unsigned int max_reg = 7;
2107 + int save_all_call_used_regs = FALSE;
2108 +
2109 + /* This only applies for hardware floating-point implementation. */
2110 + if (!TARGET_HARD_FLOAT)
2111 + return 0;
2112 +
2113 + if (IS_INTERRUPT (func_type))
2114 + {
2115 +
2116 + /* Interrupt functions must not corrupt any registers, even call
2117 + clobbered ones. If this is a leaf function we can just examine the
2118 + registers used by the RTL, but otherwise we have to assume that
2119 + whatever function is called might clobber anything, and so we have
2120 + to save all the call-clobbered registers as well. */
2121 + max_reg = 13;
2122 + save_all_call_used_regs = !current_function_is_leaf;
2123 + }
2124 +
2125 + /* All used registers used must be saved */
2126 + for (reg = 0; reg <= max_reg; reg++)
2127 + if (regs_ever_live[INTERNAL_FP_REGNUM (reg)]
2128 + || (save_all_call_used_regs
2129 + && call_used_regs[INTERNAL_FP_REGNUM (reg)]))
2130 + save_reg_mask |= (1 << reg);
2131 +
2132 + return save_reg_mask;
2133 +}
2134 +
2135 +/*Compute mask of registers which needs saving upon function entry */
2136 +static unsigned long
2137 +avr32_compute_save_reg_mask (int push)
2138 +{
2139 + unsigned long func_type;
2140 + unsigned int save_reg_mask = 0;
2141 + unsigned int reg;
2142 +
2143 + func_type = avr32_current_func_type ();
2144 +
2145 + if (IS_INTERRUPT (func_type))
2146 + {
2147 + unsigned int max_reg = 12;
2148 +
2149 +
2150 + /* Get the banking scheme for the interrupt */
2151 + switch (func_type)
2152 + {
2153 + case AVR32_FT_ISR_FULL:
2154 + max_reg = 0;
2155 + break;
2156 + case AVR32_FT_ISR_HALF:
2157 + max_reg = 7;
2158 + break;
2159 + case AVR32_FT_ISR_NONE:
2160 + max_reg = 12;
2161 + break;
2162 + }
2163 +
2164 + /* Interrupt functions must not corrupt any registers, even call
2165 + clobbered ones. If this is a leaf function we can just examine the
2166 + registers used by the RTL, but otherwise we have to assume that
2167 + whatever function is called might clobber anything, and so we have
2168 + to save all the call-clobbered registers as well. */
2169 +
2170 + /* Need not push the registers r8-r12 for AVR32A architectures, as this
2171 + is automatially done in hardware. We also do not have any shadow
2172 + registers. */
2173 + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A)
2174 + {
2175 + max_reg = 7;
2176 + func_type = AVR32_FT_ISR_NONE;
2177 + }
2178 +
2179 + /* All registers which are used and is not shadowed must be saved */
2180 + for (reg = 0; reg <= max_reg; reg++)
2181 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2182 + || (!current_function_is_leaf
2183 + && call_used_regs[INTERNAL_REGNUM (reg)]))
2184 + save_reg_mask |= (1 << reg);
2185 +
2186 + /* Check LR */
2187 + if ((regs_ever_live[LR_REGNUM] || !current_function_is_leaf || frame_pointer_needed) && (func_type == AVR32_FT_ISR_NONE) /* Only
2188 + non-shadowed
2189 + register
2190 + models
2191 + */ )
2192 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2193 +
2194 + /* Make sure that the GOT register is pushed. */
2195 + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
2196 + && current_function_uses_pic_offset_table)
2197 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2198 +
2199 + }
2200 + else
2201 + {
2202 + int use_pushm = optimize_size;
2203 +
2204 + /* In the normal case we only need to save those registers which are
2205 + call saved and which are used by this function. */
2206 + for (reg = 0; reg <= 7; reg++)
2207 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2208 + && !call_used_regs[INTERNAL_REGNUM (reg)])
2209 + save_reg_mask |= (1 << reg);
2210 +
2211 + /* Make sure that the GOT register is pushed. */
2212 + if (current_function_uses_pic_offset_table)
2213 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2214 +
2215 +
2216 + /* If we optimize for size and do not have anonymous arguments: use
2217 + popm/pushm always */
2218 + if (use_pushm)
2219 + {
2220 + if ((save_reg_mask & (1 << 0))
2221 + || (save_reg_mask & (1 << 1))
2222 + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
2223 + save_reg_mask |= 0xf;
2224 +
2225 + if ((save_reg_mask & (1 << 4))
2226 + || (save_reg_mask & (1 << 5))
2227 + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
2228 + save_reg_mask |= 0xf0;
2229 +
2230 + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
2231 + save_reg_mask |= 0x300;
2232 + }
2233 +
2234 +
2235 + /* Check LR */
2236 + if ((regs_ever_live[LR_REGNUM] || !current_function_is_leaf ||
2237 + (optimize_size && save_reg_mask) || frame_pointer_needed))
2238 + {
2239 + if (push)
2240 + {
2241 + /* Push/Pop LR */
2242 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2243 + }
2244 + else
2245 + {
2246 + /* Pop PC */
2247 + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
2248 + }
2249 + }
2250 + }
2251 +
2252 + return save_reg_mask;
2253 +}
2254 +
2255 +/*Compute total size in bytes of all saved registers */
2256 +static int
2257 +avr32_get_reg_mask_size (int reg_mask)
2258 +{
2259 + int reg, size;
2260 + size = 0;
2261 +
2262 + for (reg = 0; reg <= 15; reg++)
2263 + if (reg_mask & (1 << reg))
2264 + size += 4;
2265 +
2266 + return size;
2267 +}
2268 +
2269 +/*Get a register from one of the registers which are saved onto the stack
2270 + upon function entry */
2271 +
2272 +static int
2273 +avr32_get_saved_reg (int save_reg_mask)
2274 +{
2275 + unsigned int reg;
2276 +
2277 + /* Find the first register which is saved in the saved_reg_mask */
2278 + for (reg = 0; reg <= 15; reg++)
2279 + if (save_reg_mask & (1 << reg))
2280 + return reg;
2281 +
2282 + return -1;
2283 +}
2284 +
2285 +/* Return 1 if it is possible to return using a single instruction. */
2286 +int
2287 +avr32_use_return_insn (int iscond)
2288 +{
2289 + unsigned int func_type = avr32_current_func_type ();
2290 + unsigned long saved_int_regs;
2291 + unsigned long saved_fp_regs;
2292 +
2293 + /* Never use a return instruction before reload has run. */
2294 + if (!reload_completed)
2295 + return 0;
2296 +
2297 + /* Must adjust the stack for vararg functions. */
2298 + if (current_function_args_info.uses_anonymous_args)
2299 + return 0;
2300 +
2301 + /* If there a stack adjstment. */
2302 + if (get_frame_size ())
2303 + return 0;
2304 +
2305 + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
2306 + saved_fp_regs = avr32_compute_save_fp_reg_mask ();
2307 +
2308 + /* Functions which have saved fp-regs on the stack can not be performed in
2309 + one instruction */
2310 + if (saved_fp_regs)
2311 + return 0;
2312 +
2313 + /* Conditional returns can not be performed in one instruction if we need
2314 + to restore registers from the stack */
2315 + if (iscond && saved_int_regs)
2316 + return 0;
2317 +
2318 + /* Conditional return can not be used for interrupt handlers. */
2319 + if (iscond && IS_INTERRUPT (func_type))
2320 + return 0;
2321 +
2322 + /* For interrupt handlers which needs to pop registers */
2323 + if (saved_int_regs && IS_INTERRUPT (func_type))
2324 + return 0;
2325 +
2326 +
2327 + /* If there are saved registers but the LR isn't saved, then we need two
2328 + instructions for the return. */
2329 + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2330 + return 0;
2331 +
2332 +
2333 + return 1;
2334 +}
2335 +
2336 +
2337 +/*Generate some function prologue info in the assembly file*/
2338 +
2339 +void
2340 +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
2341 +{
2342 + if (IS_NAKED (avr32_current_func_type ()))
2343 + fprintf (f,
2344 + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
2345 +
2346 + if (IS_INTERRUPT (avr32_current_func_type ()))
2347 + {
2348 + switch (avr32_current_func_type ())
2349 + {
2350 + case AVR32_FT_ISR_FULL:
2351 + fprintf (f,
2352 + "\t# Interrupt Function: Fully shadowed register file\n");
2353 + break;
2354 + case AVR32_FT_ISR_HALF:
2355 + fprintf (f,
2356 + "\t# Interrupt Function: Half shadowed register file\n");
2357 + break;
2358 + default:
2359 + case AVR32_FT_ISR_NONE:
2360 + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
2361 + break;
2362 + }
2363 + }
2364 +
2365 +
2366 + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
2367 + current_function_args_size, frame_size,
2368 + current_function_pretend_args_size);
2369 +
2370 + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
2371 + frame_pointer_needed, current_function_is_leaf);
2372 +
2373 + fprintf (f, "\t# uses_anonymous_args = %i\n",
2374 + current_function_args_info.uses_anonymous_args);
2375 +}
2376 +
2377 +
2378 +/* Generate and emit an insn that we will recognize as a pushm or stm.
2379 + Unfortunately, since this insn does not reflect very well the actual
2380 + semantics of the operation, we need to annotate the insn for the benefit
2381 + of DWARF2 frame unwind information. */
2382 +
2383 +int avr32_convert_to_reglist16 (int reglist8_vect);
2384 +
2385 +static rtx
2386 +emit_multi_reg_push (int reglist, int usePUSHM)
2387 +{
2388 + rtx insn;
2389 + rtx dwarf;
2390 + rtx tmp;
2391 + rtx reg;
2392 + int i;
2393 + int nr_regs;
2394 + int index = 0;
2395 +
2396 + if (usePUSHM)
2397 + {
2398 + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
2399 + reglist = avr32_convert_to_reglist16 (reglist);
2400 + }
2401 + else
2402 + {
2403 + insn = emit_insn (gen_stm (stack_pointer_rtx,
2404 + gen_rtx_CONST_INT (SImode, reglist),
2405 + gen_rtx_CONST_INT (SImode, 1)));
2406 + }
2407 +
2408 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2409 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2410 +
2411 + for (i = 15; i >= 0; i--)
2412 + {
2413 + if (reglist & (1 << i))
2414 + {
2415 + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
2416 + tmp = gen_rtx_SET (VOIDmode,
2417 + gen_rtx_MEM (SImode,
2418 + plus_constant (stack_pointer_rtx,
2419 + 4 * index)), reg);
2420 + RTX_FRAME_RELATED_P (tmp) = 1;
2421 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2422 + }
2423 + }
2424 +
2425 + tmp = gen_rtx_SET (SImode,
2426 + stack_pointer_rtx,
2427 + gen_rtx_PLUS (SImode,
2428 + stack_pointer_rtx,
2429 + GEN_INT (-4 * nr_regs)));
2430 + RTX_FRAME_RELATED_P (tmp) = 1;
2431 + XVECEXP (dwarf, 0, 0) = tmp;
2432 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2433 + REG_NOTES (insn));
2434 + return insn;
2435 +}
2436 +
2437 +
2438 +static rtx
2439 +emit_multi_fp_reg_push (int reglist)
2440 +{
2441 + rtx insn;
2442 + rtx dwarf;
2443 + rtx tmp;
2444 + rtx reg;
2445 + int i;
2446 + int nr_regs;
2447 + int index = 0;
2448 +
2449 + insn = emit_insn (gen_stm_fp (stack_pointer_rtx,
2450 + gen_rtx_CONST_INT (SImode, reglist),
2451 + gen_rtx_CONST_INT (SImode, 1)));
2452 +
2453 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2454 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2455 +
2456 + for (i = 15; i >= 0; i--)
2457 + {
2458 + if (reglist & (1 << i))
2459 + {
2460 + reg = gen_rtx_REG (SImode, INTERNAL_FP_REGNUM (i));
2461 + tmp = gen_rtx_SET (VOIDmode,
2462 + gen_rtx_MEM (SImode,
2463 + plus_constant (stack_pointer_rtx,
2464 + 4 * index)), reg);
2465 + RTX_FRAME_RELATED_P (tmp) = 1;
2466 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2467 + }
2468 + }
2469 +
2470 + tmp = gen_rtx_SET (SImode,
2471 + stack_pointer_rtx,
2472 + gen_rtx_PLUS (SImode,
2473 + stack_pointer_rtx,
2474 + GEN_INT (-4 * nr_regs)));
2475 + RTX_FRAME_RELATED_P (tmp) = 1;
2476 + XVECEXP (dwarf, 0, 0) = tmp;
2477 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2478 + REG_NOTES (insn));
2479 + return insn;
2480 +}
2481 +
2482 +rtx
2483 +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
2484 + int write_back, int in_struct_p, int scalar_p)
2485 +{
2486 +
2487 + rtx result;
2488 + int i = 0, j;
2489 +
2490 + result =
2491 + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
2492 +
2493 + if (write_back)
2494 + {
2495 + XVECEXP (result, 0, 0)
2496 + = gen_rtx_SET (GET_MODE (from), from,
2497 + plus_constant (from, count * 4));
2498 + i = 1;
2499 + count++;
2500 + }
2501 +
2502 +
2503 + for (j = 0; i < count; i++, j++)
2504 + {
2505 + rtx unspec;
2506 + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
2507 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2508 + MEM_SCALAR_P (mem) = scalar_p;
2509 + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
2510 + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
2511 + }
2512 +
2513 + return result;
2514 +}
2515 +
2516 +
2517 +rtx
2518 +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
2519 + int in_struct_p, int scalar_p)
2520 +{
2521 + rtx result;
2522 + int i = 0, j;
2523 +
2524 + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2525 +
2526 + for (j = 0; i < count; i++, j++)
2527 + {
2528 + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
2529 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2530 + MEM_SCALAR_P (mem) = scalar_p;
2531 + XVECEXP (result, 0, i)
2532 + = gen_rtx_SET (VOIDmode, mem,
2533 + gen_rtx_UNSPEC (VOIDmode,
2534 + gen_rtvec (1, regs[j]),
2535 + UNSPEC_STORE_MULTIPLE));
2536 + }
2537 +
2538 + return result;
2539 +}
2540 +
2541 +
2542 +/* Move a block of memory if it is word aligned or we support unaligned
2543 + word memory accesses. The size must be maximum 64 bytes. */
2544 +
2545 +int
2546 +avr32_gen_movmemsi (rtx * operands)
2547 +{
2548 + HOST_WIDE_INT bytes_to_go;
2549 + rtx src, dst;
2550 + rtx st_src, st_dst;
2551 + int ptr_offset = 0;
2552 + int block_size;
2553 + int dst_in_struct_p, src_in_struct_p;
2554 + int dst_scalar_p, src_scalar_p;
2555 + int unaligned;
2556 +
2557 + if (GET_CODE (operands[2]) != CONST_INT
2558 + || GET_CODE (operands[3]) != CONST_INT
2559 + || INTVAL (operands[2]) > 64
2560 + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
2561 + return 0;
2562 +
2563 + unaligned = (INTVAL (operands[3]) & 3) != 0;
2564 +
2565 + block_size = 4;
2566 +
2567 + st_dst = XEXP (operands[0], 0);
2568 + st_src = XEXP (operands[1], 0);
2569 +
2570 + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2571 + dst_scalar_p = MEM_SCALAR_P (operands[0]);
2572 + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2573 + src_scalar_p = MEM_SCALAR_P (operands[1]);
2574 +
2575 + dst = copy_to_mode_reg (SImode, st_dst);
2576 + src = copy_to_mode_reg (SImode, st_src);
2577 +
2578 + bytes_to_go = INTVAL (operands[2]);
2579 +
2580 + while (bytes_to_go)
2581 + {
2582 + enum machine_mode move_mode;
2583 + /* Seems to be a problem with reloads for the movti pattern so this is
2584 + disabled until that problem is resolved */
2585 +
2586 + /* if ( bytes_to_go >= GET_MODE_SIZE(TImode) ) move_mode = TImode; else
2587 + */
2588 + if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
2589 + move_mode = DImode;
2590 + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
2591 + move_mode = SImode;
2592 + else
2593 + move_mode = QImode;
2594 +
2595 + {
2596 + rtx dst_mem = gen_rtx_MEM (move_mode,
2597 + gen_rtx_PLUS (SImode, dst,
2598 + GEN_INT (ptr_offset)));
2599 + rtx src_mem = gen_rtx_MEM (move_mode,
2600 + gen_rtx_PLUS (SImode, src,
2601 + GEN_INT (ptr_offset)));
2602 + ptr_offset += GET_MODE_SIZE (move_mode);
2603 + bytes_to_go -= GET_MODE_SIZE (move_mode);
2604 +
2605 + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
2606 + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
2607 +
2608 + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
2609 + MEM_SCALAR_P (src_mem) = src_scalar_p;
2610 + emit_move_insn (dst_mem, src_mem);
2611 +
2612 + }
2613 + }
2614 +
2615 + return 1;
2616 +}
2617 +
2618 +
2619 +
2620 +/*Expand the prologue instruction*/
2621 +void
2622 +avr32_expand_prologue (void)
2623 +{
2624 + rtx insn, dwarf;
2625 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2626 + int reglist8 = 0;
2627 +
2628 + /* Naked functions does not have a prologue */
2629 + if (IS_NAKED (avr32_current_func_type ()))
2630 + return;
2631 +
2632 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
2633 +
2634 + if (saved_reg_mask)
2635 + {
2636 + /* Must push used registers */
2637 +
2638 + /* Should we use POPM or LDM? */
2639 + int usePUSHM = TRUE;
2640 + reglist8 = 0;
2641 + if (((saved_reg_mask & (1 << 0)) ||
2642 + (saved_reg_mask & (1 << 1)) ||
2643 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2644 + {
2645 + /* One of R0-R3 should at least be pushed */
2646 + if (((saved_reg_mask & (1 << 0)) &&
2647 + (saved_reg_mask & (1 << 1)) &&
2648 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2649 + {
2650 + /* All should be pushed */
2651 + reglist8 |= 0x01;
2652 + }
2653 + else
2654 + {
2655 + usePUSHM = FALSE;
2656 + }
2657 + }
2658 +
2659 + if (((saved_reg_mask & (1 << 4)) ||
2660 + (saved_reg_mask & (1 << 5)) ||
2661 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2662 + {
2663 + /* One of R4-R7 should at least be pushed */
2664 + if (((saved_reg_mask & (1 << 4)) &&
2665 + (saved_reg_mask & (1 << 5)) &&
2666 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2667 + {
2668 + if (usePUSHM)
2669 + /* All should be pushed */
2670 + reglist8 |= 0x02;
2671 + }
2672 + else
2673 + {
2674 + usePUSHM = FALSE;
2675 + }
2676 + }
2677 +
2678 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2679 + {
2680 + /* One of R8-R9 should at least be pushed */
2681 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2682 + {
2683 + if (usePUSHM)
2684 + /* All should be pushed */
2685 + reglist8 |= 0x04;
2686 + }
2687 + else
2688 + {
2689 + usePUSHM = FALSE;
2690 + }
2691 + }
2692 +
2693 + if (saved_reg_mask & (1 << 10))
2694 + reglist8 |= 0x08;
2695 +
2696 + if (saved_reg_mask & (1 << 11))
2697 + reglist8 |= 0x10;
2698 +
2699 + if (saved_reg_mask & (1 << 12))
2700 + reglist8 |= 0x20;
2701 +
2702 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2703 + {
2704 + /* Push LR */
2705 + reglist8 |= 0x40;
2706 + }
2707 +
2708 + if (usePUSHM)
2709 + {
2710 + insn = emit_multi_reg_push (reglist8, TRUE);
2711 + }
2712 + else
2713 + {
2714 + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
2715 + }
2716 + RTX_FRAME_RELATED_P (insn) = 1;
2717 +
2718 + /* Prevent this instruction from being scheduled after any other
2719 + instructions. */
2720 + emit_insn (gen_blockage ());
2721 + }
2722 +
2723 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2724 + if (saved_fp_reg_mask)
2725 + {
2726 + insn = emit_multi_fp_reg_push (saved_fp_reg_mask);
2727 + RTX_FRAME_RELATED_P (insn) = 1;
2728 +
2729 + /* Prevent this instruction from being scheduled after any other
2730 + instructions. */
2731 + emit_insn (gen_blockage ());
2732 + }
2733 +
2734 + /* Set frame pointer */
2735 + if (frame_pointer_needed)
2736 + {
2737 + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2738 + RTX_FRAME_RELATED_P (insn) = 1;
2739 + }
2740 +
2741 + if (get_frame_size () > 0)
2742 + {
2743 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
2744 + {
2745 + insn = emit_insn (gen_rtx_SET (SImode,
2746 + stack_pointer_rtx,
2747 + gen_rtx_PLUS (SImode,
2748 + stack_pointer_rtx,
2749 + gen_rtx_CONST_INT
2750 + (SImode,
2751 + -get_frame_size
2752 + ()))));
2753 + RTX_FRAME_RELATED_P (insn) = 1;
2754 + }
2755 + else
2756 + {
2757 + /* Immediate is larger than k21 We must either check if we can use
2758 + one of the pushed reegisters as temporary storage or we must
2759 + make us a temp register by pushing a register to the stack. */
2760 + rtx temp_reg, const_pool_entry, insn;
2761 + if (saved_reg_mask)
2762 + {
2763 + temp_reg =
2764 + gen_rtx_REG (SImode,
2765 + INTERNAL_REGNUM (avr32_get_saved_reg
2766 + (saved_reg_mask)));
2767 + }
2768 + else
2769 + {
2770 + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
2771 + emit_move_insn (gen_rtx_MEM
2772 + (SImode,
2773 + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
2774 + temp_reg);
2775 + }
2776 +
2777 + const_pool_entry =
2778 + force_const_mem (SImode,
2779 + gen_rtx_CONST_INT (SImode, get_frame_size ()));
2780 + emit_move_insn (temp_reg, const_pool_entry);
2781 +
2782 + insn = emit_insn (gen_rtx_SET (SImode,
2783 + stack_pointer_rtx,
2784 + gen_rtx_MINUS (SImode,
2785 + stack_pointer_rtx,
2786 + temp_reg)));
2787 +
2788 + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2789 + gen_rtx_PLUS (SImode, stack_pointer_rtx,
2790 + GEN_INT (-get_frame_size ())));
2791 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2792 + dwarf, REG_NOTES (insn));
2793 + RTX_FRAME_RELATED_P (insn) = 1;
2794 +
2795 + if (!saved_reg_mask)
2796 + {
2797 + insn =
2798 + emit_move_insn (temp_reg,
2799 + gen_rtx_MEM (SImode,
2800 + gen_rtx_POST_INC (SImode,
2801 + gen_rtx_REG
2802 + (SImode,
2803 + 13))));
2804 + }
2805 +
2806 + /* Mark the temp register as dead */
2807 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
2808 + REG_NOTES (insn));
2809 +
2810 +
2811 + }
2812 +
2813 + /* Prevent the the stack adjustment to be scheduled after any
2814 + instructions using the frame pointer. */
2815 + emit_insn (gen_blockage ());
2816 + }
2817 +
2818 + /* Load GOT */
2819 + if (flag_pic)
2820 + {
2821 + avr32_load_pic_register ();
2822 +
2823 + /* gcc does not know that load or call instructions might use the pic
2824 + register so it might schedule these instructions before the loading
2825 + of the pic register. To avoid this emit a barrier for now. TODO!
2826 + Find out a better way to let gcc know which instructions might use
2827 + the pic register. */
2828 + emit_insn (gen_blockage ());
2829 + }
2830 + return;
2831 +}
2832 +
2833 +void
2834 +avr32_set_return_address (rtx source)
2835 +{
2836 + rtx addr;
2837 + unsigned long saved_regs;
2838 +
2839 + saved_regs = avr32_compute_save_reg_mask (TRUE);
2840 +
2841 + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2842 + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
2843 + else
2844 + {
2845 + if (frame_pointer_needed)
2846 + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
2847 + else
2848 + /* FIXME: Need to use scratch register if frame is large */
2849 + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
2850 +
2851 + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
2852 + }
2853 +}
2854 +
2855 +
2856 +
2857 +/* Return the length of INSN. LENGTH is the initial length computed by
2858 + attributes in the machine-description file. */
2859 +
2860 +int
2861 +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
2862 + int length ATTRIBUTE_UNUSED)
2863 +{
2864 + return length;
2865 +}
2866 +
2867 +void
2868 +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
2869 + int iscond ATTRIBUTE_UNUSED,
2870 + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
2871 +{
2872 +
2873 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2874 + int insert_ret = TRUE;
2875 + int reglist8 = 0;
2876 + int stack_adjustment = get_frame_size ();
2877 + unsigned int func_type = avr32_current_func_type ();
2878 + FILE *f = asm_out_file;
2879 +
2880 + /* Naked functions does not have an epilogue */
2881 + if (IS_NAKED (func_type))
2882 + return;
2883 +
2884 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2885 +
2886 + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
2887 +
2888 + /* Reset frame pointer */
2889 + if (stack_adjustment > 0)
2890 + {
2891 + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
2892 + {
2893 + fprintf (f, "\tsub sp, %i # Reset Frame Pointer\n",
2894 + -stack_adjustment);
2895 + }
2896 + else
2897 + {
2898 + /* TODO! Is it safe to use r8 as scratch?? */
2899 + fprintf (f, "\tmov r8, lo(%i) # Reset Frame Pointer\n",
2900 + -stack_adjustment);
2901 + fprintf (f, "\torh r8, hi(%i) # Reset Frame Pointer\n",
2902 + -stack_adjustment);
2903 + fprintf (f, "\tadd sp,r8 # Reset Frame Pointer\n");
2904 + }
2905 + }
2906 +
2907 + if (saved_fp_reg_mask)
2908 + {
2909 + char reglist[64]; /* 64 bytes should be enough... */
2910 + avr32_make_fp_reglist_w (saved_fp_reg_mask, (char *) reglist);
2911 + fprintf (f, "\tldcm.w\tcp0, sp++, %s\n", reglist);
2912 + if (saved_fp_reg_mask & ~0xff)
2913 + {
2914 + saved_fp_reg_mask &= ~0xff;
2915 + avr32_make_fp_reglist_d (saved_fp_reg_mask, (char *) reglist);
2916 + fprintf (f, "\tldcm.d\tcp0, sp++, %s\n", reglist);
2917 + }
2918 + }
2919 +
2920 + if (saved_reg_mask)
2921 + {
2922 + /* Must pop used registers */
2923 +
2924 + /* Should we use POPM or LDM? */
2925 + int usePOPM = TRUE;
2926 + if (((saved_reg_mask & (1 << 0)) ||
2927 + (saved_reg_mask & (1 << 1)) ||
2928 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2929 + {
2930 + /* One of R0-R3 should at least be popped */
2931 + if (((saved_reg_mask & (1 << 0)) &&
2932 + (saved_reg_mask & (1 << 1)) &&
2933 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2934 + {
2935 + /* All should be popped */
2936 + reglist8 |= 0x01;
2937 + }
2938 + else
2939 + {
2940 + usePOPM = FALSE;
2941 + }
2942 + }
2943 +
2944 + if (((saved_reg_mask & (1 << 4)) ||
2945 + (saved_reg_mask & (1 << 5)) ||
2946 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2947 + {
2948 + /* One of R0-R3 should at least be popped */
2949 + if (((saved_reg_mask & (1 << 4)) &&
2950 + (saved_reg_mask & (1 << 5)) &&
2951 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2952 + {
2953 + if (usePOPM)
2954 + /* All should be popped */
2955 + reglist8 |= 0x02;
2956 + }
2957 + else
2958 + {
2959 + usePOPM = FALSE;
2960 + }
2961 + }
2962 +
2963 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2964 + {
2965 + /* One of R8-R9 should at least be pushed */
2966 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2967 + {
2968 + if (usePOPM)
2969 + /* All should be pushed */
2970 + reglist8 |= 0x04;
2971 + }
2972 + else
2973 + {
2974 + usePOPM = FALSE;
2975 + }
2976 + }
2977 +
2978 + if (saved_reg_mask & (1 << 10))
2979 + reglist8 |= 0x08;
2980 +
2981 + if (saved_reg_mask & (1 << 11))
2982 + reglist8 |= 0x10;
2983 +
2984 + if (saved_reg_mask & (1 << 12))
2985 + reglist8 |= 0x20;
2986 +
2987 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2988 + /* Pop LR */
2989 + reglist8 |= 0x40;
2990 +
2991 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
2992 + /* Pop LR into PC. */
2993 + reglist8 |= 0x80;
2994 +
2995 + if (usePOPM)
2996 + {
2997 + char reglist[64]; /* 64 bytes should be enough... */
2998 + avr32_make_reglist8 (reglist8, (char *) reglist);
2999 +
3000 + if (reglist8 & 0x80)
3001 + /* This instruction is also a return */
3002 + insert_ret = FALSE;
3003 +
3004 + if (r12_imm && !insert_ret)
3005 + fprintf (f, "\tpopm %s, r12=%li\n", reglist, INTVAL (r12_imm));
3006 + else
3007 + fprintf (f, "\tpopm %s\n", reglist);
3008 +
3009 + }
3010 + else
3011 + {
3012 + char reglist[64]; /* 64 bytes should be enough... */
3013 + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
3014 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3015 + /* This instruction is also a return */
3016 + insert_ret = FALSE;
3017 +
3018 + if (r12_imm && !insert_ret)
3019 + fprintf (f, "\tldm sp++, %s, r12=%li\n", reglist,
3020 + INTVAL (r12_imm));
3021 + else
3022 + fprintf (f, "\tldm sp++, %s\n", reglist);
3023 +
3024 + }
3025 +
3026 + }
3027 +
3028 + if (IS_INTERRUPT (func_type))
3029 + {
3030 + fprintf (f, "\trete\n");
3031 + }
3032 + else if (insert_ret)
3033 + {
3034 + if (r12_imm)
3035 + fprintf (f, "\tretal %li\n", INTVAL (r12_imm));
3036 + else
3037 + fprintf (f, "\tretal r12\n");
3038 + }
3039 +}
3040 +
3041 +/* Function for converting a fp-register mask to a
3042 + reglistCPD8 register list string. */
3043 +void
3044 +avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string)
3045 +{
3046 + int i;
3047 +
3048 + /* Make sure reglist_string is empty */
3049 + reglist_string[0] = '\0';
3050 +
3051 + for (i = 0; i < NUM_FP_REGS; i += 2)
3052 + {
3053 + if (reglist_mask & (1 << i))
3054 + {
3055 + strlen (reglist_string) ?
3056 + sprintf (reglist_string, "%s, %s-%s", reglist_string,
3057 + reg_names[INTERNAL_FP_REGNUM (i)],
3058 + reg_names[INTERNAL_FP_REGNUM (i + 1)]) :
3059 + sprintf (reglist_string, "%s-%s",
3060 + reg_names[INTERNAL_FP_REGNUM (i)],
3061 + reg_names[INTERNAL_FP_REGNUM (i + 1)]);
3062 + }
3063 + }
3064 +}
3065 +
3066 +/* Function for converting a fp-register mask to a
3067 + reglistCP8 register list string. */
3068 +void
3069 +avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string)
3070 +{
3071 + int i;
3072 +
3073 + /* Make sure reglist_string is empty */
3074 + reglist_string[0] = '\0';
3075 +
3076 + for (i = 0; i < NUM_FP_REGS; ++i)
3077 + {
3078 + if (reglist_mask & (1 << i))
3079 + {
3080 + strlen (reglist_string) ?
3081 + sprintf (reglist_string, "%s, %s", reglist_string,
3082 + reg_names[INTERNAL_FP_REGNUM (i)]) :
3083 + sprintf (reglist_string, "%s", reg_names[INTERNAL_FP_REGNUM (i)]);
3084 + }
3085 + }
3086 +}
3087 +
3088 +void
3089 +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
3090 +{
3091 + int i;
3092 +
3093 + /* Make sure reglist16_string is empty */
3094 + reglist16_string[0] = '\0';
3095 +
3096 + for (i = 0; i < 16; ++i)
3097 + {
3098 + if (reglist16_vect & (1 << i))
3099 + {
3100 + strlen (reglist16_string) ?
3101 + sprintf (reglist16_string, "%s, %s", reglist16_string,
3102 + reg_names[INTERNAL_REGNUM (i)]) :
3103 + sprintf (reglist16_string, "%s", reg_names[INTERNAL_REGNUM (i)]);
3104 + }
3105 + }
3106 +}
3107 +
3108 +int
3109 +avr32_convert_to_reglist16 (int reglist8_vect)
3110 +{
3111 + int reglist16_vect = 0;
3112 + if (reglist8_vect & 0x1)
3113 + reglist16_vect |= 0xF;
3114 + if (reglist8_vect & 0x2)
3115 + reglist16_vect |= 0xF0;
3116 + if (reglist8_vect & 0x4)
3117 + reglist16_vect |= 0x300;
3118 + if (reglist8_vect & 0x8)
3119 + reglist16_vect |= 0x400;
3120 + if (reglist8_vect & 0x10)
3121 + reglist16_vect |= 0x800;
3122 + if (reglist8_vect & 0x20)
3123 + reglist16_vect |= 0x1000;
3124 + if (reglist8_vect & 0x40)
3125 + reglist16_vect |= 0x4000;
3126 + if (reglist8_vect & 0x80)
3127 + reglist16_vect |= 0x8000;
3128 +
3129 + return reglist16_vect;
3130 +}
3131 +
3132 +void
3133 +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
3134 +{
3135 + /* Make sure reglist8_string is empty */
3136 + reglist8_string[0] = '\0';
3137 +
3138 + if (reglist8_vect & 0x1)
3139 + sprintf (reglist8_string, "r0-r3");
3140 + if (reglist8_vect & 0x2)
3141 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r4-r7",
3142 + reglist8_string) :
3143 + sprintf (reglist8_string, "r4-r7");
3144 + if (reglist8_vect & 0x4)
3145 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r8-r9",
3146 + reglist8_string) :
3147 + sprintf (reglist8_string, "r8-r9");
3148 + if (reglist8_vect & 0x8)
3149 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r10",
3150 + reglist8_string) :
3151 + sprintf (reglist8_string, "r10");
3152 + if (reglist8_vect & 0x10)
3153 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r11",
3154 + reglist8_string) :
3155 + sprintf (reglist8_string, "r11");
3156 + if (reglist8_vect & 0x20)
3157 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r12",
3158 + reglist8_string) :
3159 + sprintf (reglist8_string, "r12");
3160 + if (reglist8_vect & 0x40)
3161 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, lr",
3162 + reglist8_string) :
3163 + sprintf (reglist8_string, "lr");
3164 + if (reglist8_vect & 0x80)
3165 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, pc",
3166 + reglist8_string) :
3167 + sprintf (reglist8_string, "pc");
3168 +}
3169 +
3170 +int
3171 +avr32_eh_return_data_regno (int n)
3172 +{
3173 + if (n >= 0 && n <= 3)
3174 + return 8 + n;
3175 + else
3176 + return INVALID_REGNUM;
3177 +}
3178 +
3179 +/* Compute the distance from register FROM to register TO.
3180 + These can be the arg pointer, the frame pointer or
3181 + the stack pointer.
3182 + Typical stack layout looks like this:
3183 +
3184 + old stack pointer -> | |
3185 + ----
3186 + | | \
3187 + | | saved arguments for
3188 + | | vararg functions
3189 + arg_pointer -> | | /
3190 + --
3191 + | | \
3192 + | | call saved
3193 + | | registers
3194 + | | /
3195 + frame ptr -> --
3196 + | | \
3197 + | | local
3198 + | | variables
3199 + stack ptr --> | | /
3200 + --
3201 + | | \
3202 + | | outgoing
3203 + | | arguments
3204 + | | /
3205 + --
3206 +
3207 + For a given funciton some or all of these stack compomnents
3208 + may not be needed, giving rise to the possibility of
3209 + eliminating some of the registers.
3210 +
3211 + The values returned by this function must reflect the behaviour
3212 + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
3213 +
3214 + The sign of the number returned reflects the direction of stack
3215 + growth, so the values are positive for all eliminations except
3216 + from the soft frame pointer to the hard frame pointer. */
3217 +
3218 +
3219 +int
3220 +avr32_initial_elimination_offset (int from, int to)
3221 +{
3222 + int i;
3223 + int call_saved_regs = 0;
3224 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3225 + unsigned int local_vars = get_frame_size ();
3226 +
3227 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
3228 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3229 +
3230 + for (i = 0; i < 16; ++i)
3231 + {
3232 + if (saved_reg_mask & (1 << i))
3233 + call_saved_regs += 4;
3234 + }
3235 +
3236 + for (i = 0; i < NUM_FP_REGS; ++i)
3237 + {
3238 + if (saved_fp_reg_mask & (1 << i))
3239 + call_saved_regs += 4;
3240 + }
3241 +
3242 + switch (from)
3243 + {
3244 + case ARG_POINTER_REGNUM:
3245 + switch (to)
3246 + {
3247 + case STACK_POINTER_REGNUM:
3248 + return call_saved_regs + local_vars;
3249 + case FRAME_POINTER_REGNUM:
3250 + return call_saved_regs;
3251 + default:
3252 + abort ();
3253 + }
3254 + case FRAME_POINTER_REGNUM:
3255 + switch (to)
3256 + {
3257 + case STACK_POINTER_REGNUM:
3258 + return local_vars;
3259 + default:
3260 + abort ();
3261 + }
3262 + default:
3263 + abort ();
3264 + }
3265 +}
3266 +
3267 +
3268 +/*
3269 + Returns a rtx used when passing the next argument to a function.
3270 + avr32_init_cumulative_args() and avr32_function_arg_advance() sets witch
3271 + register to use.
3272 +*/
3273 +rtx
3274 +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3275 + tree type, int named)
3276 +{
3277 + int index = -1;
3278 +
3279 + HOST_WIDE_INT arg_size, arg_rsize;
3280 + if (type)
3281 + {
3282 + arg_size = int_size_in_bytes (type);
3283 + }
3284 + else
3285 + {
3286 + arg_size = GET_MODE_SIZE (mode);
3287 + }
3288 + arg_rsize = PUSH_ROUNDING (arg_size);
3289 +
3290 + /*
3291 + The last time this macro is called, it is called with mode == VOIDmode,
3292 + and its result is passed to the call or call_value pattern as operands 2
3293 + and 3 respectively. */
3294 + if (mode == VOIDmode)
3295 + {
3296 + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
3297 + }
3298 +
3299 + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
3300 + {
3301 + return NULL_RTX;
3302 + }
3303 +
3304 + if (arg_rsize == 8)
3305 + {
3306 + /* use r11:r10 or r9:r8. */
3307 + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
3308 + index = 1;
3309 + else if (!(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
3310 + index = 3;
3311 + else
3312 + index = -1;
3313 + }
3314 + else if (arg_rsize == 4)
3315 + { /* Use first available register */
3316 + index = 0;
3317 + while (index <= LAST_CUM_REG_INDEX && GET_USED_INDEX (cum, index))
3318 + index++;
3319 + if (index > LAST_CUM_REG_INDEX)
3320 + index = -1;
3321 + }
3322 +
3323 + SET_REG_INDEX (cum, index);
3324 +
3325 + if (GET_REG_INDEX (cum) >= 0)
3326 + return gen_rtx_REG (mode,
3327 + avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
3328 +
3329 + return NULL_RTX;
3330 +}
3331 +
3332 +/*
3333 + Set the register used for passing the first argument to a function.
3334 +*/
3335 +void
3336 +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype,
3337 + rtx libname ATTRIBUTE_UNUSED,
3338 + tree fndecl ATTRIBUTE_UNUSED)
3339 +{
3340 + /* Set all registers as unused. */
3341 + SET_INDEXES_UNUSED (cum);
3342 +
3343 + /* Reset uses_anonymous_args */
3344 + cum->uses_anonymous_args = 0;
3345 +
3346 + /* Reset size of stack pushed arguments */
3347 + cum->stack_pushed_args_size = 0;
3348 +
3349 + /* If the function is returning a value passed in memory r12 is used as a
3350 + Return Value Pointer. */
3351 +
3352 + if (fntype != 0 && avr32_return_in_memory (TREE_TYPE (fntype), fntype))
3353 + {
3354 + SET_REG_INDEX (cum, 0);
3355 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3356 + }
3357 +}
3358 +
3359 +/*
3360 + Set register used for passing the next argument to a function. Only the
3361 + Scratch Registers are used.
3362 +
3363 + number name
3364 + 15 r15 PC
3365 + 14 r14 LR
3366 + 13 r13 _SP_________
3367 + FIRST_CUM_REG 12 r12 _||_
3368 + 10 r11 ||
3369 + 11 r10 _||_ Scratch Registers
3370 + 8 r9 ||
3371 + LAST_SCRATCH_REG 9 r8 _\/_________
3372 + 6 r7 /\
3373 + 7 r6 ||
3374 + 4 r5 ||
3375 + 5 r4 ||
3376 + 2 r3 ||
3377 + 3 r2 ||
3378 + 0 r1 ||
3379 + 1 r0 _||_________
3380 +
3381 +*/
3382 +void
3383 +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3384 + tree type, int named ATTRIBUTE_UNUSED)
3385 +{
3386 + HOST_WIDE_INT arg_size, arg_rsize;
3387 +
3388 + if (type)
3389 + {
3390 + arg_size = int_size_in_bytes (type);
3391 + }
3392 + else
3393 + {
3394 + arg_size = GET_MODE_SIZE (mode);
3395 + }
3396 + arg_rsize = PUSH_ROUNDING (arg_size);
3397 +
3398 + /* It the argument had to be passed in stack, no register is used. */
3399 + if ((*targetm.calls.must_pass_in_stack) (mode, type))
3400 + {
3401 + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
3402 + return;
3403 + }
3404 +
3405 + /* Mark the used registers as "used". */
3406 + if (GET_REG_INDEX (cum) >= 0)
3407 + {
3408 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3409 + if (arg_rsize == 8)
3410 + {
3411 + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
3412 + }
3413 + }
3414 + else
3415 + {
3416 + /* Had to use stack */
3417 + cum->stack_pushed_args_size += arg_rsize;
3418 + }
3419 +}
3420 +
3421 +/*
3422 + Defines witch direction to go to find the next register to use if the
3423 + argument is larger then one register or for arguments shorter than an
3424 + int which is not promoted, such as the last part of structures with
3425 + size not a multiple of 4. */
3426 +enum direction
3427 +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
3428 + tree type)
3429 +{
3430 + /* Pad upward for all aggregates except byte and halfword sized aggregates
3431 + which can be passed in registers. */
3432 + if (type
3433 + && AGGREGATE_TYPE_P (type)
3434 + && (int_size_in_bytes (type) != 1)
3435 + && !((int_size_in_bytes (type) == 2)
3436 + && TYPE_ALIGN_UNIT (type) >= 2)
3437 + && (int_size_in_bytes (type) & 0x3))
3438 + {
3439 + return upward;
3440 + }
3441 +
3442 + return downward;
3443 +}
3444 +
3445 +/*
3446 + Return a rtx used for the return value from a function call.
3447 +*/
3448 +rtx
3449 +avr32_function_value (tree type, tree func)
3450 +{
3451 + if (avr32_return_in_memory (type, func))
3452 + return NULL_RTX;
3453 +
3454 + if (int_size_in_bytes (type) <= 4)
3455 + if (avr32_return_in_msb (type))
3456 + /* Aggregates of size less than a word which does align the data in the
3457 + MSB must use SImode for r12. */
3458 + return gen_rtx_REG (SImode, RET_REGISTER);
3459 + else
3460 + return gen_rtx_REG (TYPE_MODE (type), RET_REGISTER);
3461 + else if (int_size_in_bytes (type) <= 8)
3462 + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
3463 +
3464 + return NULL_RTX;
3465 +}
3466 +
3467 +/*
3468 + Return a rtx used for the return value from a library function call.
3469 +*/
3470 +rtx
3471 +avr32_libcall_value (enum machine_mode mode)
3472 +{
3473 +
3474 + if (GET_MODE_SIZE (mode) <= 4)
3475 + return gen_rtx_REG (mode, RET_REGISTER);
3476 + else if (GET_MODE_SIZE (mode) <= 8)
3477 + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
3478 + else
3479 + return NULL_RTX;
3480 +}
3481 +
3482 +/* Return TRUE if X references a SYMBOL_REF. */
3483 +int
3484 +symbol_mentioned_p (rtx x)
3485 +{
3486 + const char *fmt;
3487 + int i;
3488 +
3489 + if (GET_CODE (x) == SYMBOL_REF)
3490 + return 1;
3491 +
3492 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3493 +
3494 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3495 + {
3496 + if (fmt[i] == 'E')
3497 + {
3498 + int j;
3499 +
3500 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3501 + if (symbol_mentioned_p (XVECEXP (x, i, j)))
3502 + return 1;
3503 + }
3504 + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3505 + return 1;
3506 + }
3507 +
3508 + return 0;
3509 +}
3510 +
3511 +/* Return TRUE if X references a LABEL_REF. */
3512 +int
3513 +label_mentioned_p (rtx x)
3514 +{
3515 + const char *fmt;
3516 + int i;
3517 +
3518 + if (GET_CODE (x) == LABEL_REF)
3519 + return 1;
3520 +
3521 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3522 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3523 + {
3524 + if (fmt[i] == 'E')
3525 + {
3526 + int j;
3527 +
3528 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3529 + if (label_mentioned_p (XVECEXP (x, i, j)))
3530 + return 1;
3531 + }
3532 + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3533 + return 1;
3534 + }
3535 +
3536 + return 0;
3537 +}
3538 +
3539 +
3540 +int
3541 +avr32_legitimate_pic_operand_p (rtx x)
3542 +{
3543 +
3544 + /* We can't have const, this must be broken down to a symbol. */
3545 + if (GET_CODE (x) == CONST)
3546 + return FALSE;
3547 +
3548 + /* Can't access symbols or labels via the constant pool either */
3549 + if ((GET_CODE (x) == SYMBOL_REF
3550 + && CONSTANT_POOL_ADDRESS_P (x)
3551 + && (symbol_mentioned_p (get_pool_constant (x))
3552 + || label_mentioned_p (get_pool_constant (x)))))
3553 + return FALSE;
3554 +
3555 + return TRUE;
3556 +}
3557 +
3558 +
3559 +rtx
3560 +legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
3561 + rtx reg)
3562 +{
3563 +
3564 + if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
3565 + {
3566 + int subregs = 0;
3567 +
3568 + if (reg == 0)
3569 + {
3570 + if (no_new_pseudos)
3571 + abort ();
3572 + else
3573 + reg = gen_reg_rtx (Pmode);
3574 +
3575 + subregs = 1;
3576 + }
3577 +
3578 + emit_move_insn (reg, orig);
3579 +
3580 + /* Only set current function as using pic offset table if flag_pic is
3581 + set. This is because this function is also used if
3582 + TARGET_HAS_ASM_ADDR_PSEUDOS is set. */
3583 + if (flag_pic)
3584 + current_function_uses_pic_offset_table = 1;
3585 +
3586 + /* Put a REG_EQUAL note on this insn, so that it can be optimized by
3587 + loop. */
3588 + return reg;
3589 + }
3590 + else if (GET_CODE (orig) == CONST)
3591 + {
3592 + rtx base, offset;
3593 +
3594 + if (flag_pic
3595 + && GET_CODE (XEXP (orig, 0)) == PLUS
3596 + && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3597 + return orig;
3598 +
3599 + if (reg == 0)
3600 + {
3601 + if (no_new_pseudos)
3602 + abort ();
3603 + else
3604 + reg = gen_reg_rtx (Pmode);
3605 + }
3606 +
3607 + if (GET_CODE (XEXP (orig, 0)) == PLUS)
3608 + {
3609 + base =
3610 + legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3611 + offset =
3612 + legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3613 + base == reg ? 0 : reg);
3614 + }
3615 + else
3616 + abort ();
3617 +
3618 + if (GET_CODE (offset) == CONST_INT)
3619 + {
3620 + /* The base register doesn't really matter, we only want to test
3621 + the index for the appropriate mode. */
3622 + if (!avr32_const_ok_for_constraint_p (INTVAL (offset), 'I', "Is21"))
3623 + {
3624 + if (!no_new_pseudos)
3625 + offset = force_reg (Pmode, offset);
3626 + else
3627 + abort ();
3628 + }
3629 +
3630 + if (GET_CODE (offset) == CONST_INT)
3631 + return plus_constant (base, INTVAL (offset));
3632 + }
3633 +
3634 + return gen_rtx_PLUS (Pmode, base, offset);
3635 + }
3636 +
3637 + return orig;
3638 +}
3639 +
3640 +/* Generate code to load the PIC register. */
3641 +void
3642 +avr32_load_pic_register (void)
3643 +{
3644 + rtx l1, pic_tmp;
3645 + rtx global_offset_table;
3646 +
3647 + if ((current_function_uses_pic_offset_table == 0) || TARGET_NO_INIT_GOT)
3648 + return;
3649 +
3650 + if (!flag_pic)
3651 + abort ();
3652 +
3653 + l1 = gen_label_rtx ();
3654 +
3655 + global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3656 + pic_tmp =
3657 + gen_rtx_CONST (Pmode,
3658 + gen_rtx_MINUS (SImode, gen_rtx_LABEL_REF (Pmode, l1),
3659 + global_offset_table));
3660 + emit_insn (gen_pic_load_addr
3661 + (pic_offset_table_rtx, force_const_mem (SImode, pic_tmp)));
3662 + emit_insn (gen_pic_compute_got_from_pc (pic_offset_table_rtx, l1));
3663 +
3664 + /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp
3665 + can cause life info to screw up. */
3666 + emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3667 +}
3668 +
3669 +
3670 +
3671 +/* This hook should return true if values of type type are returned at the most
3672 + significant end of a register (in other words, if they are padded at the
3673 + least significant end). You can assume that type is returned in a register;
3674 + the caller is required to check this. Note that the register provided by
3675 + FUNCTION_VALUE must be able to hold the complete return value. For example,
3676 + if a 1-, 2- or 3-byte structure is returned at the most significant end of a
3677 + 4-byte register, FUNCTION_VALUE should provide an SImode rtx. */
3678 +bool
3679 +avr32_return_in_msb (tree type ATTRIBUTE_UNUSED)
3680 +{
3681 + /* if ( AGGREGATE_TYPE_P (type) ) if ((int_size_in_bytes(type) == 1) ||
3682 + ((int_size_in_bytes(type) == 2) && TYPE_ALIGN_UNIT(type) >= 2)) return
3683 + false; else return true; */
3684 +
3685 + return false;
3686 +}
3687 +
3688 +
3689 +/*
3690 + Returns one if a certain function value is going to be returned in memory
3691 + and zero if it is going to be returned in a register.
3692 +
3693 + BLKmode and all other modes that is larger than 64 bits are returned in
3694 + memory.
3695 +*/
3696 +bool
3697 +avr32_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3698 +{
3699 + if (TYPE_MODE (type) == VOIDmode)
3700 + return false;
3701 +
3702 + if (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3703 + || int_size_in_bytes (type) == -1)
3704 + {
3705 + return true;
3706 + }
3707 +
3708 + /* If we have an aggregate then use the same mechanism as when checking if
3709 + it should be passed on the stack. */
3710 + if (type
3711 + && AGGREGATE_TYPE_P (type)
3712 + && (*targetm.calls.must_pass_in_stack) (TYPE_MODE (type), type))
3713 + return true;
3714 +
3715 + return false;
3716 +}
3717 +
3718 +
3719 +/* Output the constant part of the trampoline.
3720 + lddpc r0, pc[0x8:e] ; load static chain register
3721 + lddpc pc, pc[0x8:e] ; jump to subrutine
3722 + .long 0 ; Address to static chain,
3723 + ; filled in by avr32_initialize_trampoline()
3724 + .long 0 ; Address to subrutine,
3725 + ; filled in by avr32_initialize_trampoline()
3726 +*/
3727 +void
3728 +avr32_trampoline_template (FILE * file)
3729 +{
3730 + fprintf (file, "\tlddpc r0, pc[8]\n");
3731 + fprintf (file, "\tlddpc pc, pc[8]\n");
3732 + /* make room for the address of the static chain. */
3733 + fprintf (file, "\t.long\t0\n");
3734 + /* make room for the address to the subrutine. */
3735 + fprintf (file, "\t.long\t0\n");
3736 +}
3737 +
3738 +
3739 +/*
3740 + Initialize the variable parts of a trampoline.
3741 +*/
3742 +void
3743 +avr32_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
3744 +{
3745 + /* Store the address to the static chain. */
3746 + emit_move_insn (gen_rtx_MEM
3747 + (SImode, plus_constant (addr, TRAMPOLINE_SIZE - 4)),
3748 + static_chain);
3749 +
3750 + /* Store the address to the function. */
3751 + emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, TRAMPOLINE_SIZE)),
3752 + fnaddr);
3753 +
3754 + emit_insn (gen_cache (gen_rtx_REG (SImode, 13),
3755 + gen_rtx_CONST_INT (SImode,
3756 + AVR32_CACHE_INVALIDATE_ICACHE)));
3757 +}
3758 +
3759 +/* Return nonzero if X is valid as an addressing register. */
3760 +int
3761 +avr32_address_register_rtx_p (rtx x, int strict_p)
3762 +{
3763 + int regno;
3764 +
3765 + if (GET_CODE (x) != REG)
3766 + return 0;
3767 +
3768 + regno = REGNO (x);
3769 +
3770 + if (strict_p)
3771 + return REGNO_OK_FOR_BASE_P (regno);
3772 +
3773 + return (regno <= LAST_REGNUM || regno >= FIRST_PSEUDO_REGISTER);
3774 +}
3775 +
3776 +/* Return nonzero if INDEX is valid for an address index operand. */
3777 +int
3778 +avr32_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
3779 +{
3780 + enum rtx_code code = GET_CODE (index);
3781 +
3782 + if (mode == TImode)
3783 + return 0;
3784 +
3785 + /* Standard coprocessor addressing modes. */
3786 + if (code == CONST_INT)
3787 + {
3788 + if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
3789 + /* Coprocessor mem insns has a smaller reach than ordinary mem insns */
3790 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ku14");
3791 + else
3792 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ks16");
3793 + }
3794 +
3795 + if (avr32_address_register_rtx_p (index, strict_p))
3796 + return 1;
3797 +
3798 + if (code == MULT)
3799 + {
3800 + rtx xiop0 = XEXP (index, 0);
3801 + rtx xiop1 = XEXP (index, 1);
3802 + return ((avr32_address_register_rtx_p (xiop0, strict_p)
3803 + && power_of_two_operand (xiop1, SImode)
3804 + && (INTVAL (xiop1) <= 8))
3805 + || (avr32_address_register_rtx_p (xiop1, strict_p)
3806 + && power_of_two_operand (xiop0, SImode)
3807 + && (INTVAL (xiop0) <= 8)));
3808 + }
3809 + else if (code == ASHIFT)
3810 + {
3811 + rtx op = XEXP (index, 1);
3812 +
3813 + return (avr32_address_register_rtx_p (XEXP (index, 0), strict_p)
3814 + && GET_CODE (op) == CONST_INT
3815 + && INTVAL (op) > 0 && INTVAL (op) <= 3);
3816 + }
3817 +
3818 + return 0;
3819 +}
3820 +
3821 +/*
3822 + Used in the GO_IF_LEGITIMATE_ADDRESS macro. Returns a nonzero value if
3823 + the RTX x is a legitimate memory address.
3824 +
3825 + Returns NO_REGS if the address is not legatime, GENERAL_REGS or ALL_REGS
3826 + if it is.
3827 +*/
3828 +
3829 +/* Forward declaration*/
3830 +int is_minipool_label (rtx label);
3831 +
3832 +int
3833 +avr32_legitimate_address (enum machine_mode mode ATTRIBUTE_UNUSED,
3834 + rtx x, int strict)
3835 +{
3836 +
3837 + switch (GET_CODE (x))
3838 + {
3839 + case REG:
3840 + return avr32_address_register_rtx_p (x, strict);
3841 + case CONST:
3842 + {
3843 + rtx label = avr32_find_symbol (x);
3844 + if (label
3845 + &&
3846 + ( (CONSTANT_POOL_ADDRESS_P (label)
3847 + && !(flag_pic
3848 + && (symbol_mentioned_p (get_pool_constant (label))
3849 + || label_mentioned_p (get_pool_constant(label)))))
3850 + /* TODO! Can this ever happen??? */
3851 + || ((GET_CODE (label) == LABEL_REF)
3852 + && GET_CODE (XEXP (label, 0)) == CODE_LABEL
3853 + && is_minipool_label (XEXP (label, 0)))))
3854 + {
3855 + return TRUE;
3856 + }
3857 + }
3858 + break;
3859 + case LABEL_REF:
3860 + if (GET_CODE (XEXP (x, 0)) == CODE_LABEL
3861 + && is_minipool_label (XEXP (x, 0)))
3862 + {
3863 + return TRUE;
3864 + }
3865 + break;
3866 + case SYMBOL_REF:
3867 + {
3868 + if (CONSTANT_POOL_ADDRESS_P (x)
3869 + && !(flag_pic
3870 + && (symbol_mentioned_p (get_pool_constant (x))
3871 + || label_mentioned_p (get_pool_constant (x)))))
3872 + return TRUE;
3873 + /*
3874 + A symbol_ref is only legal if it is a function. If all of them are
3875 + legal, a pseudo reg that is a constant will be replaced by a
3876 + symbol_ref and make illegale code. SYMBOL_REF_FLAG is set by
3877 + ENCODE_SECTION_INFO. */
3878 + else if (SYMBOL_REF_RCALL_FUNCTION_P (x))
3879 + return TRUE;
3880 + break;
3881 + }
3882 + case PRE_DEC: /* (pre_dec (...)) */
3883 + case POST_INC: /* (post_inc (...)) */
3884 + return avr32_address_register_rtx_p (XEXP (x, 0), strict);
3885 + case PLUS: /* (plus (...) (...)) */
3886 + {
3887 + rtx xop0 = XEXP (x, 0);
3888 + rtx xop1 = XEXP (x, 1);
3889 +
3890 + return ((avr32_address_register_rtx_p (xop0, strict)
3891 + && avr32_legitimate_index_p (mode, xop1, strict))
3892 + || (avr32_address_register_rtx_p (xop1, strict)
3893 + && avr32_legitimate_index_p (mode, xop0, strict)));
3894 + }
3895 + default:
3896 + break;
3897 + }
3898 +
3899 + return FALSE;
3900 +}
3901 +
3902 +
3903 +int
3904 +avr32_const_double_immediate (rtx value)
3905 +{
3906 + HOST_WIDE_INT hi, lo;
3907 +
3908 + if (GET_CODE (value) != CONST_DOUBLE)
3909 + return FALSE;
3910 +
3911 + if (GET_MODE (value) == DImode)
3912 + {
3913 + hi = CONST_DOUBLE_HIGH (value);
3914 + lo = CONST_DOUBLE_LOW (value);
3915 + }
3916 + else
3917 + {
3918 + HOST_WIDE_INT target_float[2];
3919 + hi = lo = 0;
3920 + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (value),
3921 + GET_MODE (value));
3922 + lo = target_float[0];
3923 + hi = target_float[1];
3924 + }
3925 + if (avr32_const_ok_for_constraint_p (lo, 'K', "Ks21")
3926 + && ((GET_MODE (value) == SFmode)
3927 + || avr32_const_ok_for_constraint_p (hi, 'K', "Ks21")))
3928 + {
3929 + return TRUE;
3930 + }
3931 +
3932 + return FALSE;
3933 +}
3934 +
3935 +
3936 +int
3937 +avr32_legitimate_constant_p (rtx x)
3938 +{
3939 + switch (GET_CODE (x))
3940 + {
3941 + case CONST_INT:
3942 + return avr32_const_ok_for_constraint_p (INTVAL (x), 'K', "Ks21");
3943 + case CONST_DOUBLE:
3944 + if (GET_MODE (x) == SFmode
3945 + || GET_MODE (x) == DFmode || GET_MODE (x) == DImode)
3946 + return avr32_const_double_immediate (x);
3947 + else
3948 + return 0;
3949 + case LABEL_REF:
3950 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
3951 + case SYMBOL_REF:
3952 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
3953 + case CONST:
3954 + /* We must handle this one in the movsi expansion in order for gcc not
3955 + to put it in the constant pool. */
3956 + return 0 /* flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS */ ;
3957 + case HIGH:
3958 + case CONST_VECTOR:
3959 + return 0;
3960 + default:
3961 + printf ("%s():\n", __FUNCTION__);
3962 + debug_rtx (x);
3963 + return 1;
3964 + }
3965 +}
3966 +
3967 +
3968 +/* Strip any special encoding from labels */
3969 +const char *
3970 +avr32_strip_name_encoding (const char *name)
3971 +{
3972 + const char *stripped = name;
3973 +
3974 + while (1)
3975 + {
3976 + switch (stripped[0])
3977 + {
3978 + case '#':
3979 + stripped = strchr (name + 1, '#') + 1;
3980 + break;
3981 + case '*':
3982 + stripped = &stripped[1];
3983 + break;
3984 + default:
3985 + return stripped;
3986 + }
3987 + }
3988 +}
3989 +
3990 +
3991 +
3992 +/* Do anything needed before RTL is emitted for each function. */
3993 +static struct machine_function *
3994 +avr32_init_machine_status (void)
3995 +{
3996 + struct machine_function *machine;
3997 + machine =
3998 + (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
3999 +
4000 +#if AVR32_FT_UNKNOWN != 0
4001 + machine->func_type = AVR32_FT_UNKNOWN;
4002 +#endif
4003 +
4004 + machine->minipool_label_head = 0;
4005 + machine->minipool_label_tail = 0;
4006 + return machine;
4007 +}
4008 +
4009 +void
4010 +avr32_init_expanders (void)
4011 +{
4012 + /* Arrange to initialize and mark the machine per-function status. */
4013 + init_machine_status = avr32_init_machine_status;
4014 +}
4015 +
4016 +
4017 +/* Return an RTX indicating where the return address to the
4018 + calling function can be found. */
4019 +
4020 +rtx
4021 +avr32_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4022 +{
4023 + if (count != 0)
4024 + return NULL_RTX;
4025 +
4026 + return get_hard_reg_initial_val (Pmode, LR_REGNUM);
4027 +}
4028 +
4029 +
4030 +void
4031 +avr32_encode_section_info (tree decl, rtx rtl, int first)
4032 +{
4033 +
4034 + if (first && DECL_P (decl))
4035 + {
4036 + /* Set SYMBOL_REG_FLAG for local functions */
4037 + if (!TREE_PUBLIC (decl) && TREE_CODE (decl) == FUNCTION_DECL)
4038 + {
4039 + if ((*targetm.binds_local_p) (decl))
4040 + {
4041 + SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
4042 + }
4043 + }
4044 + }
4045 +}
4046 +
4047 +
4048 +void
4049 +avr32_asm_output_ascii (FILE * stream, char *ptr, int len)
4050 +{
4051 + int i, i_new = 0;
4052 + char *new_ptr = xmalloc (4 * len);
4053 + if (new_ptr == NULL)
4054 + internal_error ("Out of memory.");
4055 +
4056 + for (i = 0; i < len; i++)
4057 + {
4058 + if (ptr[i] == '\n')
4059 + {
4060 + new_ptr[i_new++] = '\\';
4061 + new_ptr[i_new++] = '0';
4062 + new_ptr[i_new++] = '1';
4063 + new_ptr[i_new++] = '2';
4064 + }
4065 + else if (ptr[i] == '\"')
4066 + {
4067 + new_ptr[i_new++] = '\\';
4068 + new_ptr[i_new++] = '\"';
4069 + }
4070 + else if (ptr[i] == '\\')
4071 + {
4072 + new_ptr[i_new++] = '\\';
4073 + new_ptr[i_new++] = '\\';
4074 + }
4075 + else if (ptr[i] == '\0' && i + 1 < len)
4076 + {
4077 + new_ptr[i_new++] = '\\';
4078 + new_ptr[i_new++] = '0';
4079 + }
4080 + else
4081 + {
4082 + new_ptr[i_new++] = ptr[i];
4083 + }
4084 + }
4085 +
4086 + /* Terminate new_ptr. */
4087 + new_ptr[i_new] = '\0';
4088 + fprintf (stream, "\t.ascii\t\"%s\"\n", new_ptr);
4089 + free (new_ptr);
4090 +}
4091 +
4092 +
4093 +void
4094 +avr32_asm_output_label (FILE * stream, const char *name)
4095 +{
4096 + name = avr32_strip_name_encoding (name);
4097 +
4098 + /* Print the label. */
4099 + assemble_name (stream, name);
4100 + fprintf (stream, ":\n");
4101 +}
4102 +
4103 +
4104 +
4105 +void
4106 +avr32_asm_weaken_label (FILE * stream, const char *name)
4107 +{
4108 + fprintf (stream, "\t.weak ");
4109 + assemble_name (stream, name);
4110 + fprintf (stream, "\n");
4111 +}
4112 +
4113 +/*
4114 + Checks if a labelref is equal to a reserved word in the assembler. If it is,
4115 + insert a '_' before the label name.
4116 +*/
4117 +void
4118 +avr32_asm_output_labelref (FILE * stream, const char *name)
4119 +{
4120 + int verbatim = FALSE;
4121 + const char *stripped = name;
4122 + int strip_finished = FALSE;
4123 +
4124 + while (!strip_finished)
4125 + {
4126 + switch (stripped[0])
4127 + {
4128 + case '#':
4129 + stripped = strchr (name + 1, '#') + 1;
4130 + break;
4131 + case '*':
4132 + stripped = &stripped[1];
4133 + verbatim = TRUE;
4134 + break;
4135 + default:
4136 + strip_finished = TRUE;
4137 + break;
4138 + }
4139 + }
4140 +
4141 + if (verbatim)
4142 + fputs (stripped, stream);
4143 + else
4144 + asm_fprintf (stream, "%U%s", stripped);
4145 +}
4146 +
4147 +
4148 +
4149 +/*
4150 + Check if the comparison in compare_exp is redundant
4151 + for the condition given in next_cond given that the
4152 + needed flags are already set by an earlier instruction.
4153 + Uses cc_prev_status to check this.
4154 +
4155 + Returns NULL_RTX if the compare is not redundant
4156 + or the new condition to use in the conditional
4157 + instruction if the compare is redundant.
4158 +*/
4159 +static rtx
4160 +is_compare_redundant (rtx compare_exp, rtx next_cond)
4161 +{
4162 + int z_flag_valid = FALSE;
4163 + int n_flag_valid = FALSE;
4164 + rtx new_cond;
4165 +
4166 + if (GET_CODE (compare_exp) != COMPARE)
4167 + return NULL_RTX;
4168 +
4169 +
4170 + if (GET_MODE (compare_exp) != SImode)
4171 + return NULL_RTX;
4172 +
4173 + if (rtx_equal_p (cc_prev_status.mdep.value, compare_exp))
4174 + {
4175 + /* cc0 already contains the correct comparison -> delete cmp insn */
4176 + return next_cond;
4177 + }
4178 +
4179 + switch (cc_prev_status.mdep.flags)
4180 + {
4181 + case CC_SET_VNCZ:
4182 + case CC_SET_NCZ:
4183 + n_flag_valid = TRUE;
4184 + case CC_SET_CZ:
4185 + case CC_SET_Z:
4186 + z_flag_valid = TRUE;
4187 + }
4188 +
4189 + if (cc_prev_status.mdep.value
4190 + && REG_P (XEXP (compare_exp, 0))
4191 + && REGNO (XEXP (compare_exp, 0)) == REGNO (cc_prev_status.mdep.value)
4192 + && GET_CODE (XEXP (compare_exp, 1)) == CONST_INT
4193 + && next_cond != NULL_RTX)
4194 + {
4195 + if (INTVAL (XEXP (compare_exp, 1)) == 0
4196 + && z_flag_valid
4197 + && (GET_CODE (next_cond) == EQ || GET_CODE (next_cond) == NE))
4198 + /* We can skip comparison Z flag is already reflecting ops[0] */
4199 + return next_cond;
4200 + else if (n_flag_valid
4201 + && ((INTVAL (XEXP (compare_exp, 1)) == 0
4202 + && (GET_CODE (next_cond) == GE
4203 + || GET_CODE (next_cond) == LT))
4204 + || (INTVAL (XEXP (compare_exp, 1)) == -1
4205 + && (GET_CODE (next_cond) == GT
4206 + || GET_CODE (next_cond) == LE))))
4207 + {
4208 + /* We can skip comparison N flag is already reflecting ops[0],
4209 + which means that we can use the mi/pl conditions to check if
4210 + ops[0] is GE or LT 0. */
4211 + if ((GET_CODE (next_cond) == GE) || (GET_CODE (next_cond) == GT))
4212 + new_cond =
4213 + gen_rtx_UNSPEC (CCmode, gen_rtvec (2, cc0_rtx, const0_rtx),
4214 + UNSPEC_COND_PL);
4215 + else
4216 + new_cond =
4217 + gen_rtx_UNSPEC (CCmode, gen_rtvec (2, cc0_rtx, const0_rtx),
4218 + UNSPEC_COND_MI);
4219 + return new_cond;
4220 + }
4221 + }
4222 + return NULL_RTX;
4223 +}
4224 +
4225 +/* Updates cc_status. */
4226 +void
4227 +avr32_notice_update_cc (rtx exp, rtx insn)
4228 +{
4229 + switch (get_attr_cc (insn))
4230 + {
4231 + case CC_CALL_SET:
4232 + CC_STATUS_INIT;
4233 + FPCC_STATUS_INIT;
4234 + /* Check if the function call returns a value in r12 */
4235 + if (REG_P (recog_data.operand[0])
4236 + && REGNO (recog_data.operand[0]) == RETVAL_REGNUM)
4237 + {
4238 + cc_status.flags = 0;
4239 + cc_status.mdep.value =
4240 + gen_rtx_COMPARE (SImode, recog_data.operand[0], const0_rtx);
4241 + cc_status.mdep.flags = CC_SET_VNCZ;
4242 +
4243 + }
4244 + break;
4245 + case CC_COMPARE:
4246 + /* Check that compare will not be optimized away if so nothing should
4247 + be done */
4248 + if (is_compare_redundant (SET_SRC (exp), get_next_insn_cond (insn))
4249 + == NULL_RTX)
4250 + {
4251 +
4252 + /* Reset the nonstandard flag */
4253 + CC_STATUS_INIT;
4254 + cc_status.flags = 0;
4255 + cc_status.mdep.value = SET_SRC (exp);
4256 + cc_status.mdep.flags = CC_SET_VNCZ;
4257 + }
4258 + break;
4259 + case CC_FPCOMPARE:
4260 + /* Check that floating-point compare will not be optimized away if so
4261 + nothing should be done */
4262 + if (!rtx_equal_p (cc_prev_status.mdep.fpvalue, SET_SRC (exp)))
4263 + {
4264 + /* cc0 already contains the correct comparison -> delete cmp insn */
4265 + /* Reset the nonstandard flag */
4266 + cc_status.mdep.fpvalue = SET_SRC (exp);
4267 + cc_status.mdep.fpflags = CC_SET_CZ;
4268 + }
4269 + break;
4270 + case CC_FROM_FPCC:
4271 + /* Flags are updated with flags from Floating-point coprocessor, set
4272 + CC_NOT_SIGNED flag since the flags are set so that unsigned
4273 + condidion codes can be used directly. */
4274 + CC_STATUS_INIT;
4275 + cc_status.flags = CC_NOT_SIGNED;
4276 + cc_status.mdep.value = cc_status.mdep.fpvalue;
4277 + cc_status.mdep.flags = cc_status.mdep.fpflags;
4278 + break;
4279 + case CC_BLD:
4280 + /* Bit load is kind of like an inverted testsi, because the Z flag is
4281 + inverted */
4282 + CC_STATUS_INIT;
4283 + cc_status.flags = CC_INVERTED;
4284 + cc_status.mdep.value = SET_SRC (exp);
4285 + cc_status.mdep.flags = CC_SET_Z;
4286 + break;
4287 + case CC_NONE:
4288 + /* Insn does not affect CC at all. Check if the instruction updates
4289 + some of the register currently reflected in cc0 */
4290 +
4291 + if ((GET_CODE (exp) == SET)
4292 + && (cc_status.value1 || cc_status.value2 || cc_status.mdep.value)
4293 + && (reg_mentioned_p (SET_DEST (exp), cc_status.value1)
4294 + || reg_mentioned_p (SET_DEST (exp), cc_status.value2)
4295 + || reg_mentioned_p (SET_DEST (exp), cc_status.mdep.value)))
4296 + {
4297 + CC_STATUS_INIT;
4298 + }
4299 +
4300 + /* If this is a parallel we must step through each of the parallel
4301 + expressions */
4302 + if (GET_CODE (exp) == PARALLEL)
4303 + {
4304 + int i;
4305 + for (i = 0; i < XVECLEN (exp, 0); ++i)
4306 + {
4307 + rtx vec_exp = XVECEXP (exp, 0, i);
4308 + if ((GET_CODE (vec_exp) == SET)
4309 + && (cc_status.value1 || cc_status.value2
4310 + || cc_status.mdep.value)
4311 + && (reg_mentioned_p (SET_DEST (vec_exp), cc_status.value1)
4312 + || reg_mentioned_p (SET_DEST (vec_exp),
4313 + cc_status.value2)
4314 + || reg_mentioned_p (SET_DEST (vec_exp),
4315 + cc_status.mdep.value)))
4316 + {
4317 + CC_STATUS_INIT;
4318 + }
4319 + }
4320 + }
4321 +
4322 + /* Check if we have memory opartions with post_inc or pre_dec on the
4323 + register currently reflected in cc0 */
4324 + if (GET_CODE (exp) == SET
4325 + && GET_CODE (SET_SRC (exp)) == MEM
4326 + && (GET_CODE (XEXP (SET_SRC (exp), 0)) == POST_INC
4327 + || GET_CODE (XEXP (SET_SRC (exp), 0)) == PRE_DEC)
4328 + &&
4329 + (reg_mentioned_p
4330 + (XEXP (XEXP (SET_SRC (exp), 0), 0), cc_status.value1)
4331 + || reg_mentioned_p (XEXP (XEXP (SET_SRC (exp), 0), 0),
4332 + cc_status.value2)
4333 + || reg_mentioned_p (XEXP (XEXP (SET_SRC (exp), 0), 0),
4334 + cc_status.mdep.value)))
4335 + CC_STATUS_INIT;
4336 +
4337 + if (GET_CODE (exp) == SET
4338 + && GET_CODE (SET_DEST (exp)) == MEM
4339 + && (GET_CODE (XEXP (SET_DEST (exp), 0)) == POST_INC
4340 + || GET_CODE (XEXP (SET_DEST (exp), 0)) == PRE_DEC)
4341 + &&
4342 + (reg_mentioned_p
4343 + (XEXP (XEXP (SET_DEST (exp), 0), 0), cc_status.value1)
4344 + || reg_mentioned_p (XEXP (XEXP (SET_DEST (exp), 0), 0),
4345 + cc_status.value2)
4346 + || reg_mentioned_p (XEXP (XEXP (SET_DEST (exp), 0), 0),
4347 + cc_status.mdep.value)))
4348 + CC_STATUS_INIT;
4349 + break;
4350 +
4351 + case CC_SET_VNCZ:
4352 + CC_STATUS_INIT;
4353 + cc_status.mdep.value = recog_data.operand[0];
4354 + cc_status.mdep.flags = CC_SET_VNCZ;
4355 + break;
4356 +
4357 + case CC_SET_NCZ:
4358 + CC_STATUS_INIT;
4359 + cc_status.mdep.value = recog_data.operand[0];
4360 + cc_status.mdep.flags = CC_SET_NCZ;
4361 + break;
4362 +
4363 + case CC_SET_CZ:
4364 + CC_STATUS_INIT;
4365 + cc_status.mdep.value = recog_data.operand[0];
4366 + cc_status.mdep.flags = CC_SET_CZ;
4367 + break;
4368 +
4369 + case CC_SET_Z:
4370 + CC_STATUS_INIT;
4371 + cc_status.mdep.value = recog_data.operand[0];
4372 + cc_status.mdep.flags = CC_SET_Z;
4373 + break;
4374 +
4375 + case CC_CLOBBER:
4376 + CC_STATUS_INIT;
4377 + break;
4378 +
4379 + default:
4380 + CC_STATUS_INIT;
4381 + }
4382 +}
4383 +
4384 +
4385 +/*
4386 + Outputs to stdio stream stream the assembler syntax for an instruction
4387 + operand x. x is an RTL expression.
4388 +*/
4389 +void
4390 +avr32_print_operand (FILE * stream, rtx x, int code)
4391 +{
4392 + int error = 0;
4393 +
4394 + switch (GET_CODE (x))
4395 + {
4396 + case UNSPEC:
4397 + switch (XINT (x, 1))
4398 + {
4399 + case UNSPEC_COND_PL: