gcc 4.2.4 support
[openwrt/svn-archive/archive.git] / toolchain / gcc / patches / 4.2.4 / 900-avr32_support.patch
1 Index: gcc-4.2.3/configure.in
2 ===================================================================
3 --- gcc-4.2.3.orig/configure.in 2007-09-15 02:42:24.000000000 +0200
4 +++ gcc-4.2.3/configure.in 2008-05-21 13:45:54.101287819 +0200
5 @@ -503,6 +503,9 @@
6 arm-*-riscix*)
7 noconfigdirs="$noconfigdirs ld target-libgloss ${libgcj}"
8 ;;
9 + avr32-*-*)
10 + noconfigdirs="$noconfigdirs target-libiberty target-libmudflap target-libffi ${libgcj}"
11 + ;;
12 avr-*-*)
13 noconfigdirs="$noconfigdirs target-libiberty target-libstdc++-v3 ${libgcj}"
14 ;;
15 Index: gcc-4.2.3/gcc/builtins.c
16 ===================================================================
17 --- gcc-4.2.3.orig/gcc/builtins.c 2008-01-23 11:38:21.000000000 +0100
18 +++ gcc-4.2.3/gcc/builtins.c 2008-05-21 13:45:54.109288559 +0200
19 @@ -9223,7 +9223,7 @@
20
21 do
22 {
23 - code = va_arg (ap, enum tree_code);
24 + code = va_arg (ap, int);
25 switch (code)
26 {
27 case 0:
28 Index: gcc-4.2.3/gcc/calls.c
29 ===================================================================
30 --- gcc-4.2.3.orig/gcc/calls.c 2007-09-01 17:28:30.000000000 +0200
31 +++ gcc-4.2.3/gcc/calls.c 2008-05-21 13:45:54.117288181 +0200
32 @@ -3447,7 +3447,7 @@
33 for (; count < nargs; count++)
34 {
35 rtx val = va_arg (p, rtx);
36 - enum machine_mode mode = va_arg (p, enum machine_mode);
37 + enum machine_mode mode = va_arg (p, int);
38
39 /* We cannot convert the arg value to the mode the library wants here;
40 must do it earlier where we know the signedness of the arg. */
41 Index: gcc-4.2.3/gcc/config/avr32/avr32.c
42 ===================================================================
43 --- /dev/null 1970-01-01 00:00:00.000000000 +0000
44 +++ gcc-4.2.3/gcc/config/avr32/avr32.c 2008-05-21 13:45:54.145288116 +0200
45 @@ -0,0 +1,7060 @@
46 +/*
47 + Target hooks and helper functions for AVR32.
48 + Copyright 2003-2006 Atmel Corporation.
49 +
50 + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
51 + Initial porting by Anders �dland.
52 +
53 + This file is part of GCC.
54 +
55 + This program is free software; you can redistribute it and/or modify
56 + it under the terms of the GNU General Public License as published by
57 + the Free Software Foundation; either version 2 of the License, or
58 + (at your option) any later version.
59 +
60 + This program is distributed in the hope that it will be useful,
61 + but WITHOUT ANY WARRANTY; without even the implied warranty of
62 + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
63 + GNU General Public License for more details.
64 +
65 + You should have received a copy of the GNU General Public License
66 + along with this program; if not, write to the Free Software
67 + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
68 +
69 +#include "config.h"
70 +#include "system.h"
71 +#include "coretypes.h"
72 +#include "tm.h"
73 +#include "rtl.h"
74 +#include "tree.h"
75 +#include "obstack.h"
76 +#include "regs.h"
77 +#include "hard-reg-set.h"
78 +#include "real.h"
79 +#include "insn-config.h"
80 +#include "conditions.h"
81 +#include "output.h"
82 +#include "insn-attr.h"
83 +#include "flags.h"
84 +#include "reload.h"
85 +#include "function.h"
86 +#include "expr.h"
87 +#include "optabs.h"
88 +#include "toplev.h"
89 +#include "recog.h"
90 +#include "ggc.h"
91 +#include "except.h"
92 +#include "c-pragma.h"
93 +#include "integrate.h"
94 +#include "tm_p.h"
95 +#include "langhooks.h"
96 +
97 +#include "target.h"
98 +#include "target-def.h"
99 +
100 +#include <ctype.h>
101 +
102 +/* Forward definitions of types. */
103 +typedef struct minipool_node Mnode;
104 +typedef struct minipool_fixup Mfix;
105 +
106 +/* Obstack for minipool constant handling. */
107 +static struct obstack minipool_obstack;
108 +static char *minipool_startobj;
109 +static rtx minipool_vector_label;
110 +
111 +/* True if we are currently building a constant table. */
112 +int making_const_table;
113 +
114 +/* Some forward function declarations */
115 +static unsigned long avr32_isr_value (tree);
116 +static unsigned long avr32_compute_func_type (void);
117 +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
118 +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
119 +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
120 + int flags, bool * no_add_attrs);
121 +static void avr32_reorg (void);
122 +bool avr32_return_in_msb (tree type);
123 +bool avr32_vector_mode_supported (enum machine_mode mode);
124 +static void avr32_init_libfuncs (void);
125 +
126 +
127 +static void
128 +avr32_add_gc_roots (void)
129 + {
130 + gcc_obstack_init (&minipool_obstack);
131 + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
132 + }
133 +
134 +
135 +/* List of all known AVR32 parts */
136 +static const struct part_type_s avr32_part_types[] = {
137 + /* name, part_type, architecture type, macro */
138 + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
139 + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
140 + {"ap7010", PART_TYPE_AVR32_AP7010, ARCH_TYPE_AVR32_AP, "__AVR32_AP7010__"},
141 + {"ap7020", PART_TYPE_AVR32_AP7020, ARCH_TYPE_AVR32_AP, "__AVR32_AP7020__"},
142 + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UC,
143 + "__AVR32_UC3A0256__"},
144 + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UC,
145 + "__AVR32_UC3A0512__"},
146 + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UC,
147 + "__AVR32_UC3A1128__"},
148 + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UC,
149 + "__AVR32_UC3A1256__"},
150 + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UC,
151 + "__AVR32_UC3A1512__"},
152 + {"uc3b064", PART_TYPE_AVR32_UC3B064, ARCH_TYPE_AVR32_UC,
153 + "__AVR32_UC3B064__"},
154 + {"uc3b0128", PART_TYPE_AVR32_UC3B0128, ARCH_TYPE_AVR32_UC,
155 + "__AVR32_UC3B0128__"},
156 + {"uc3b0256", PART_TYPE_AVR32_UC3B0256, ARCH_TYPE_AVR32_UC,
157 + "__AVR32_UC3B0256__"},
158 + {"uc3b164", PART_TYPE_AVR32_UC3B164, ARCH_TYPE_AVR32_UC,
159 + "__AVR32_UC3B164__"},
160 + {"uc3b1128", PART_TYPE_AVR32_UC3B1128, ARCH_TYPE_AVR32_UC,
161 + "__AVR32_UC3B1128__"},
162 + {"uc3b1256", PART_TYPE_AVR32_UC3B1256, ARCH_TYPE_AVR32_UC,
163 + "__AVR32_UC3B1256__"},
164 + {NULL, 0, 0, NULL}
165 +};
166 +
167 +/* List of all known AVR32 architectures */
168 +static const struct arch_type_s avr32_arch_types[] = {
169 + /* name, architecture type, microarchitecture type, feature flags, macro */
170 + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B,
171 + (FLAG_AVR32_HAS_DSP
172 + | FLAG_AVR32_HAS_SIMD
173 + | FLAG_AVR32_HAS_UNALIGNED_WORD
174 + | FLAG_AVR32_HAS_CACHES
175 + | FLAG_AVR32_HAS_BRANCH_PRED
176 + | FLAG_AVR32_HAS_RETURN_STACK),
177 + "__AVR32_AP__"},
178 + {"uc", ARCH_TYPE_AVR32_UC, UARCH_TYPE_AVR32A,
179 + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW),
180 + "__AVR32_UC__"},
181 + {NULL, 0, 0, 0, NULL}
182 +};
183 +
184 +/* Default arch name */
185 +const char *avr32_arch_name = "ap";
186 +const char *avr32_part_name = "none";
187 +
188 +const struct part_type_s *avr32_part;
189 +const struct arch_type_s *avr32_arch;
190 +
191 +/* Set default target_flags. */
192 +#undef TARGET_DEFAULT_TARGET_FLAGS
193 +#define TARGET_DEFAULT_TARGET_FLAGS \
194 + (MASK_HAS_ASM_ADDR_PSEUDOS | MASK_MD_REORG_OPTIMIZATION)
195 +
196 +void
197 +avr32_optimization_options (int level,
198 + int size){
199 + if (AVR32_ALWAYS_PIC)
200 + flag_pic = 1;
201 +
202 + /* Enable section anchors if optimization is enabled. */
203 + if (level > 0 || size)
204 + flag_section_anchors = 1;
205 +}
206 +
207 +/* Override command line options */
208 +void
209 +avr32_override_options (void)
210 + {
211 + const struct part_type_s *part;
212 + const struct arch_type_s *arch;
213 +
214 + /* Check if part type is set. */
215 + for (part = avr32_part_types; part->name; part++)
216 + if (strcmp (part->name, avr32_part_name) == 0)
217 + break;
218 +
219 + avr32_part = part;
220 +
221 + if (!part->name)
222 + {
223 + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
224 + avr32_part_name);
225 + for (part = avr32_part_types; part->name; part++)
226 + fprintf (stderr, "\t%s\n", part->name);
227 + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
228 + }
229 +
230 + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
231 +
232 + /* If part was set to "none" then check if arch was set. */
233 + if (strcmp (avr32_part->name, "none") == 0)
234 + {
235 + /* Check if arch type is set. */
236 + for (arch = avr32_arch_types; arch->name; arch++)
237 + if (strcmp (arch->name, avr32_arch_name) == 0)
238 + break;
239 +
240 + avr32_arch = arch;
241 +
242 + if (!arch->name)
243 + {
244 + fprintf (stderr, "Unknown arch `%s' specified\nKnown arch names:\n",
245 + avr32_arch_name);
246 + for (arch = avr32_arch_types; arch->name; arch++)
247 + fprintf (stderr, "\t%s\n", arch->name);
248 + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
249 + }
250 + }
251 +
252 + /* If optimization level is two or greater, then align start of loops to a
253 + word boundary since this will allow folding the first insn of the loop.
254 + Do this only for targets supporting branch prediction. */
255 + if (optimize >= 2 && TARGET_BRANCH_PRED)
256 + align_loops = 2;
257 +
258 +
259 + /* Enable section anchors if optimization is enabled. */
260 + if (optimize > 0 || optimize_size)
261 + flag_section_anchors = 1;
262 +
263 + /* Enable fast-float library if unsafe math optimizations
264 + are used. */
265 + if (flag_unsafe_math_optimizations)
266 + target_flags |= MASK_FAST_FLOAT;
267 +
268 + /* Check if we should set avr32_imm_in_const_pool
269 + based on if caches are present or not. */
270 + if ( avr32_imm_in_const_pool == -1 )
271 + {
272 + if ( TARGET_CACHES )
273 + avr32_imm_in_const_pool = 1;
274 + else
275 + avr32_imm_in_const_pool = 0;
276 + }
277 +
278 + avr32_add_gc_roots ();
279 + }
280 +
281 +
282 +/*
283 +If defined, a function that outputs the assembler code for entry to a
284 +function. The prologue is responsible for setting up the stack frame,
285 +initializing the frame pointer register, saving registers that must be
286 +saved, and allocating size additional bytes of storage for the
287 +local variables. size is an integer. file is a stdio
288 +stream to which the assembler code should be output.
289 +
290 +The label for the beginning of the function need not be output by this
291 +macro. That has already been done when the macro is run.
292 +
293 +To determine which registers to save, the macro can refer to the array
294 +regs_ever_live: element r is nonzero if hard register
295 +r is used anywhere within the function. This implies the function
296 +prologue should save register r, provided it is not one of the
297 +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
298 +regs_ever_live.)
299 +
300 +On machines that have ``register windows'', the function entry code does
301 +not save on the stack the registers that are in the windows, even if
302 +they are supposed to be preserved by function calls; instead it takes
303 +appropriate steps to ``push'' the register stack, if any non-call-used
304 +registers are used in the function.
305 +
306 +On machines where functions may or may not have frame-pointers, the
307 +function entry code must vary accordingly; it must set up the frame
308 +pointer if one is wanted, and not otherwise. To determine whether a
309 +frame pointer is in wanted, the macro can refer to the variable
310 +frame_pointer_needed. The variable's value will be 1 at run
311 +time in a function that needs a frame pointer. (see Elimination).
312 +
313 +The function entry code is responsible for allocating any stack space
314 +required for the function. This stack space consists of the regions
315 +listed below. In most cases, these regions are allocated in the
316 +order listed, with the last listed region closest to the top of the
317 +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
318 +the highest address if it is not defined). You can use a different order
319 +for a machine if doing so is more convenient or required for
320 +compatibility reasons. Except in cases where required by standard
321 +or by a debugger, there is no reason why the stack layout used by GCC
322 +need agree with that used by other compilers for a machine.
323 + */
324 +
325 +#undef TARGET_ASM_FUNCTION_PROLOGUE
326 +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
327 +
328 +
329 +#undef TARGET_DEFAULT_SHORT_ENUMS
330 +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
331 +
332 +#undef TARGET_PROMOTE_FUNCTION_ARGS
333 +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
334 +
335 +#undef TARGET_PROMOTE_FUNCTION_RETURN
336 +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
337 +
338 +#undef TARGET_PROMOTE_PROTOTYPES
339 +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
340 +
341 +#undef TARGET_MUST_PASS_IN_STACK
342 +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
343 +
344 +#undef TARGET_PASS_BY_REFERENCE
345 +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
346 +
347 +#undef TARGET_STRICT_ARGUMENT_NAMING
348 +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
349 +
350 +#undef TARGET_VECTOR_MODE_SUPPORTED_P
351 +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
352 +
353 +#undef TARGET_RETURN_IN_MEMORY
354 +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
355 +
356 +#undef TARGET_RETURN_IN_MSB
357 +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
358 +
359 +#undef TARGET_ARG_PARTIAL_BYTES
360 +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
361 +
362 +#undef TARGET_STRIP_NAME_ENCODING
363 +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
364 +
365 +#define streq(string1, string2) (strcmp (string1, string2) == 0)
366 +
367 +#undef TARGET_NARROW_VOLATILE_BITFIELD
368 +#define TARGET_NARROW_VOLATILE_BITFIELD hook_bool_void_false
369 +
370 +#undef TARGET_ATTRIBUTE_TABLE
371 +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
372 +
373 +#undef TARGET_COMP_TYPE_ATTRIBUTES
374 +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
375 +
376 +
377 +#undef TARGET_RTX_COSTS
378 +#define TARGET_RTX_COSTS avr32_rtx_costs
379 +
380 +#undef TARGET_CANNOT_FORCE_CONST_MEM
381 +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
382 +
383 +#undef TARGET_ASM_INTEGER
384 +#define TARGET_ASM_INTEGER avr32_assemble_integer
385 +
386 +#undef TARGET_FUNCTION_VALUE
387 +#define TARGET_FUNCTION_VALUE avr32_function_value
388 +
389 +#undef TARGET_MIN_ANCHOR_OFFSET
390 +#define TARGET_MIN_ANCHOR_OFFSET (0)
391 +
392 +#undef TARGET_MAX_ANCHOR_OFFSET
393 +#define TARGET_MAX_ANCHOR_OFFSET ((1 << 15) - 1)
394 +
395 +
396 +/*
397 + * Switches to the appropriate section for output of constant pool
398 + * entry x in mode. You can assume that x is some kind of constant in
399 + * RTL. The argument mode is redundant except in the case of a
400 + * const_int rtx. Select the section by calling readonly_data_ section
401 + * or one of the alternatives for other sections. align is the
402 + * constant alignment in bits.
403 + *
404 + * The default version of this function takes care of putting symbolic
405 + * constants in flag_ pic mode in data_section and everything else in
406 + * readonly_data_section.
407 + */
408 +//#undef TARGET_ASM_SELECT_RTX_SECTION
409 +//#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
410 +
411 +
412 +/*
413 + * If non-null, this hook performs a target-specific pass over the
414 + * instruction stream. The compiler will run it at all optimization
415 + * levels, just before the point at which it normally does
416 + * delayed-branch scheduling.
417 + *
418 + * The exact purpose of the hook varies from target to target. Some
419 + * use it to do transformations that are necessary for correctness,
420 + * such as laying out in-function constant pools or avoiding hardware
421 + * hazards. Others use it as an opportunity to do some
422 + * machine-dependent optimizations.
423 + *
424 + * You need not implement the hook if it has nothing to do. The
425 + * default definition is null.
426 + */
427 +#undef TARGET_MACHINE_DEPENDENT_REORG
428 +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
429 +
430 +/* Target hook for assembling integer objects.
431 + Need to handle integer vectors */
432 +static bool
433 +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
434 + {
435 + if (avr32_vector_mode_supported (GET_MODE (x)))
436 + {
437 + int i, units;
438 +
439 + if (GET_CODE (x) != CONST_VECTOR)
440 + abort ();
441 +
442 + units = CONST_VECTOR_NUNITS (x);
443 +
444 + switch (GET_MODE (x))
445 + {
446 + case V2HImode:
447 + size = 2;
448 + break;
449 + case V4QImode:
450 + size = 1;
451 + break;
452 + default:
453 + abort ();
454 + }
455 +
456 + for (i = 0; i < units; i++)
457 + {
458 + rtx elt;
459 +
460 + elt = CONST_VECTOR_ELT (x, i);
461 + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
462 + }
463 +
464 + return true;
465 + }
466 +
467 + return default_assemble_integer (x, size, aligned_p);
468 + }
469 +
470 +/*
471 + * This target hook describes the relative costs of RTL expressions.
472 + *
473 + * The cost may depend on the precise form of the expression, which is
474 + * available for examination in x, and the rtx code of the expression
475 + * in which it is contained, found in outer_code. code is the
476 + * expression code--redundant, since it can be obtained with GET_CODE
477 + * (x).
478 + *
479 + * In implementing this hook, you can use the construct COSTS_N_INSNS
480 + * (n) to specify a cost equal to n fast instructions.
481 + *
482 + * On entry to the hook, *total contains a default estimate for the
483 + * cost of the expression. The hook should modify this value as
484 + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
485 + * for multiplications, COSTS_N_INSNS (7) for division and modulus
486 + * operations, and COSTS_N_INSNS (1) for all other operations.
487 + *
488 + * When optimizing for code size, i.e. when optimize_size is non-zero,
489 + * this target hook should be used to estimate the relative size cost
490 + * of an expression, again relative to COSTS_N_INSNS.
491 + *
492 + * The hook returns true when all subexpressions of x have been
493 + * processed, and false when rtx_cost should recurse.
494 + */
495 +
496 +/* Worker routine for avr32_rtx_costs. */
497 +static inline int
498 +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
499 + enum rtx_code outer ATTRIBUTE_UNUSED)
500 + {
501 + enum machine_mode mode = GET_MODE (x);
502 +
503 + switch (GET_CODE (x))
504 + {
505 + case MEM:
506 + /* Using pre decrement / post increment memory operations on the
507 + avr32_uc architecture means that two writebacks must be performed
508 + and hence two cycles are needed. */
509 + if (!optimize_size
510 + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
511 + && avr32_arch->arch_type == ARCH_TYPE_AVR32_UC
512 + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
513 + || GET_CODE (XEXP (x, 0)) == POST_INC))
514 + return COSTS_N_INSNS (5);
515 +
516 + /* Memory costs quite a lot for the first word, but subsequent words
517 + load at the equivalent of a single insn each. */
518 + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
519 + return COSTS_N_INSNS (3 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
520 +
521 + return COSTS_N_INSNS (4);
522 + case SYMBOL_REF:
523 + case CONST:
524 + /* These are valid for the pseudo insns: lda.w and call which operates
525 + on direct addresses. We assume that the cost of a lda.w is the same
526 + as the cost of a ld.w insn. */
527 + return (outer == SET) ? COSTS_N_INSNS (4) : COSTS_N_INSNS (1);
528 + case DIV:
529 + case MOD:
530 + case UDIV:
531 + case UMOD:
532 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
533 +
534 + case ROTATE:
535 + case ROTATERT:
536 + if (mode == TImode)
537 + return COSTS_N_INSNS (100);
538 +
539 + if (mode == DImode)
540 + return COSTS_N_INSNS (10);
541 + return COSTS_N_INSNS (4);
542 + case ASHIFT:
543 + case LSHIFTRT:
544 + case ASHIFTRT:
545 + case NOT:
546 + if (mode == TImode)
547 + return COSTS_N_INSNS (10);
548 +
549 + if (mode == DImode)
550 + return COSTS_N_INSNS (4);
551 + return COSTS_N_INSNS (1);
552 + case PLUS:
553 + case MINUS:
554 + case NEG:
555 + case COMPARE:
556 + case ABS:
557 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
558 + return COSTS_N_INSNS (100);
559 +
560 + if (mode == TImode)
561 + return COSTS_N_INSNS (50);
562 +
563 + if (mode == DImode)
564 + return COSTS_N_INSNS (2);
565 + return COSTS_N_INSNS (1);
566 +
567 + case MULT:
568 + {
569 + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
570 + return COSTS_N_INSNS (300);
571 +
572 + if (mode == TImode)
573 + return COSTS_N_INSNS (16);
574 +
575 + if (mode == DImode)
576 + return COSTS_N_INSNS (4);
577 +
578 + if (mode == HImode)
579 + return COSTS_N_INSNS (2);
580 +
581 + return COSTS_N_INSNS (3);
582 + }
583 + case IF_THEN_ELSE:
584 + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
585 + return COSTS_N_INSNS (4);
586 + return COSTS_N_INSNS (1);
587 + case SIGN_EXTEND:
588 + case ZERO_EXTEND:
589 + /* Sign/Zero extensions of registers cost quite much since these
590 + instrcutions only take one register operand which means that gcc
591 + often must insert some move instrcutions */
592 + if (mode == QImode || mode == HImode)
593 + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
594 + return COSTS_N_INSNS (4);
595 + case UNSPEC:
596 + /* divmod operations */
597 + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
598 + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
599 + {
600 + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
601 + }
602 + /* Fallthrough */
603 + default:
604 + return COSTS_N_INSNS (1);
605 + }
606 + }
607 +
608 +static bool
609 +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
610 + {
611 + *total = avr32_rtx_costs_1 (x, code, outer_code);
612 + return true;
613 + }
614 +
615 +
616 +bool
617 +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
618 + {
619 + /* Do not want symbols in the constant pool when compiling pic or if using
620 + address pseudo instructions. */
621 + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
622 + && avr32_find_symbol (x) != NULL_RTX);
623 + }
624 +
625 +
626 +/* Table of machine attributes. */
627 +const struct attribute_spec avr32_attribute_table[] = {
628 + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
629 + /* Interrupt Service Routines have special prologue and epilogue
630 + requirements. */
631 + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
632 + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
633 + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
634 + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
635 + {NULL, 0, 0, false, false, false, NULL}
636 +};
637 +
638 +
639 +typedef struct
640 +{
641 + const char *const arg;
642 + const unsigned long return_value;
643 +}
644 +isr_attribute_arg;
645 +
646 +static const isr_attribute_arg isr_attribute_args[] = {
647 + {"FULL", AVR32_FT_ISR_FULL},
648 + {"full", AVR32_FT_ISR_FULL},
649 + {"HALF", AVR32_FT_ISR_HALF},
650 + {"half", AVR32_FT_ISR_HALF},
651 + {"NONE", AVR32_FT_ISR_NONE},
652 + {"none", AVR32_FT_ISR_NONE},
653 + {"UNDEF", AVR32_FT_ISR_NONE},
654 + {"undef", AVR32_FT_ISR_NONE},
655 + {"SWI", AVR32_FT_ISR_NONE},
656 + {"swi", AVR32_FT_ISR_NONE},
657 + {NULL, AVR32_FT_ISR_NONE}
658 +};
659 +
660 +/* Returns the (interrupt) function type of the current
661 + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
662 +
663 +static unsigned long
664 +avr32_isr_value (tree argument)
665 + {
666 + const isr_attribute_arg *ptr;
667 + const char *arg;
668 +
669 + /* No argument - default to ISR_NONE. */
670 + if (argument == NULL_TREE)
671 + return AVR32_FT_ISR_NONE;
672 +
673 + /* Get the value of the argument. */
674 + if (TREE_VALUE (argument) == NULL_TREE
675 + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
676 + return AVR32_FT_UNKNOWN;
677 +
678 + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
679 +
680 + /* Check it against the list of known arguments. */
681 + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
682 + if (streq (arg, ptr->arg))
683 + return ptr->return_value;
684 +
685 + /* An unrecognized interrupt type. */
686 + return AVR32_FT_UNKNOWN;
687 + }
688 +
689 +
690 +
691 +/*
692 +These hooks specify assembly directives for creating certain kinds
693 +of integer object. The TARGET_ASM_BYTE_OP directive creates a
694 +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
695 +aligned two-byte object, and so on. Any of the hooks may be
696 +NULL, indicating that no suitable directive is available.
697 +
698 +The compiler will print these strings at the start of a new line,
699 +followed immediately by the object's initial value. In most cases,
700 +the string should contain a tab, a pseudo-op, and then another tab.
701 + */
702 +#undef TARGET_ASM_BYTE_OP
703 +#define TARGET_ASM_BYTE_OP "\t.byte\t"
704 +#undef TARGET_ASM_ALIGNED_HI_OP
705 +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
706 +#undef TARGET_ASM_ALIGNED_SI_OP
707 +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
708 +#undef TARGET_ASM_ALIGNED_DI_OP
709 +#define TARGET_ASM_ALIGNED_DI_OP NULL
710 +#undef TARGET_ASM_ALIGNED_TI_OP
711 +#define TARGET_ASM_ALIGNED_TI_OP NULL
712 +#undef TARGET_ASM_UNALIGNED_HI_OP
713 +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
714 +#undef TARGET_ASM_UNALIGNED_SI_OP
715 +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
716 +#undef TARGET_ASM_UNALIGNED_DI_OP
717 +#define TARGET_ASM_UNALIGNED_DI_OP NULL
718 +#undef TARGET_ASM_UNALIGNED_TI_OP
719 +#define TARGET_ASM_UNALIGNED_TI_OP NULL
720 +
721 +#undef TARGET_ASM_OUTPUT_MI_THUNK
722 +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
723 +
724 +#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
725 +#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
726 +
727 +static void
728 +avr32_output_mi_thunk (FILE * file,
729 + tree thunk ATTRIBUTE_UNUSED,
730 + HOST_WIDE_INT delta,
731 + HOST_WIDE_INT vcall_offset, tree function)
732 + {
733 + int mi_delta = delta;
734 + int this_regno =
735 + (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function) ?
736 + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
737 +
738 +
739 + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
740 + || vcall_offset
741 + || flag_pic)
742 + {
743 + fputs ("\tpushm\tlr\n", file);
744 + }
745 +
746 +
747 + if (mi_delta != 0)
748 + {
749 + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
750 + {
751 + fprintf (file, "\tsub\t%s, -0x%x\n", reg_names[this_regno],
752 + mi_delta);
753 + }
754 + else
755 + {
756 + /* Immediate is larger than k21 we must make us a temp register by
757 + pushing a register to the stack. */
758 + fprintf (file, "\tmov\tlr, lo(%x)\n", mi_delta);
759 + fprintf (file, "\torh\tlr, hi(%x)\n", mi_delta);
760 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
761 + }
762 + }
763 +
764 +
765 + if (vcall_offset != 0)
766 + {
767 + fprintf (file, "\tld.w\tlr, %s[0]\n", reg_names[this_regno]);
768 + fprintf (file, "\tld.w\tlr, lr[%i]\n", (int) vcall_offset);
769 + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
770 + }
771 +
772 +
773 + if ( (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
774 + || vcall_offset)
775 + && !flag_pic )
776 + {
777 + fputs ("\tpopm\tlr\n", file);
778 + }
779 +
780 + if (flag_pic)
781 + {
782 + /* Load the got into lr and then load the pointer
783 + to the function from the got and put it on the stack.
784 + We can then call the function and restore lr by issuing
785 + a doubleword load from the stack. We do not use a popm/ldm
786 + since it will be treated as a return and might need a flushing
787 + of the return-stack if available. */
788 + rtx label = gen_label_rtx ();
789 + /* Load the got. */
790 + fputs ("\tlddpc\tlr, 0f\n", file);
791 + (*targetm.asm_out.internal_label) (file, "L",
792 + CODE_LABEL_NUMBER (label));
793 + fputs ("\trsub\tlr, pc\n", file);
794 + /* Load the function pointer. */
795 + fputs ("\tld.w\tlr, lr[", file);
796 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
797 + fputs ("@got]\n", file);
798 + /* Push the function pointer on the stack.*/
799 + fputs ("\tpushm\tlr\n", file);
800 + /* Restore the old lr value and load the function pointer into
801 + pc. */
802 + fputs ("\tld.d\tlr,sp++\n", file);
803 + fprintf (file, "\t.align 2\n");
804 + fprintf (file, "0:\t.long\t.L%d - _GLOBAL_OFFSET_TABLE_\n", CODE_LABEL_NUMBER (label));
805 + }
806 + else
807 + {
808 + fprintf (file, "\tlddpc\tpc, 0f\n");
809 + fprintf (file, "\t.align 2\n");
810 + fputs ("0:\t.long\t", file);
811 + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
812 + fputc ('\n', file);
813 + }
814 + }
815 +
816 +/* Implements target hook vector_mode_supported. */
817 +bool
818 +avr32_vector_mode_supported (enum machine_mode mode)
819 + {
820 + if ((mode == V2HImode) || (mode == V4QImode))
821 + return true;
822 +
823 + return false;
824 + }
825 +
826 +
827 +#undef TARGET_INIT_LIBFUNCS
828 +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
829 +
830 +#undef TARGET_INIT_BUILTINS
831 +#define TARGET_INIT_BUILTINS avr32_init_builtins
832 +
833 +#undef TARGET_EXPAND_BUILTIN
834 +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
835 +
836 +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
837 +void_ftype_ptr_int;
838 +tree void_ftype_int, void_ftype_void, int_ftype_ptr_int;
839 +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
840 +short_ftype_short_short;
841 +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
842 +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
843 +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
844 +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
845 +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
846 +
847 +#define def_builtin(NAME, TYPE, CODE) \
848 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
849 + BUILT_IN_MD, NULL, NULL_TREE)
850 +
851 +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
852 + do \
853 + { \
854 + if ((MASK)) \
855 + lang_hooks.builtin_function ((NAME), (TYPE), (CODE), \
856 + BUILT_IN_MD, NULL, NULL_TREE); \
857 + } \
858 + while (0)
859 +
860 +struct builtin_description
861 +{
862 + const unsigned int mask;
863 + const enum insn_code icode;
864 + const char *const name;
865 + const int code;
866 + const enum rtx_code comparison;
867 + const unsigned int flag;
868 + const tree *ftype;
869 +};
870 +
871 +static const struct builtin_description bdesc_2arg[] = {
872 +#define DSP_BUILTIN(code, builtin, ftype) \
873 + { 1, CODE_FOR_##code, "__builtin_" #code , \
874 + AVR32_BUILTIN_##builtin, 0, 0, ftype }
875 +
876 + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
877 + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
878 + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
879 + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
880 + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
881 + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
882 + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
883 + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
884 + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
885 + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
886 + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
887 +};
888 +
889 +
890 +void
891 +avr32_init_builtins (void)
892 + {
893 + unsigned int i;
894 + const struct builtin_description *d;
895 + tree endlink = void_list_node;
896 + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
897 + tree longlong_endlink =
898 + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
899 + tree short_endlink =
900 + tree_cons (NULL_TREE, short_integer_type_node, endlink);
901 + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
902 +
903 + /* int func (int) */
904 + int_ftype_int = build_function_type (integer_type_node, int_endlink);
905 +
906 + /* short func (short) */
907 + short_ftype_short
908 + = build_function_type (short_integer_type_node, short_endlink);
909 +
910 + /* short func (short, short) */
911 + short_ftype_short_short
912 + = build_function_type (short_integer_type_node,
913 + tree_cons (NULL_TREE, short_integer_type_node,
914 + short_endlink));
915 +
916 + /* long long func (long long, short, short) */
917 + longlong_ftype_longlong_short_short
918 + = build_function_type (long_long_integer_type_node,
919 + tree_cons (NULL_TREE, long_long_integer_type_node,
920 + tree_cons (NULL_TREE,
921 + short_integer_type_node,
922 + short_endlink)));
923 +
924 + /* long long func (short, short) */
925 + longlong_ftype_short_short
926 + = build_function_type (long_long_integer_type_node,
927 + tree_cons (NULL_TREE, short_integer_type_node,
928 + short_endlink));
929 +
930 + /* int func (int, int) */
931 + int_ftype_int_int
932 + = build_function_type (integer_type_node,
933 + tree_cons (NULL_TREE, integer_type_node,
934 + int_endlink));
935 +
936 + /* long long func (int, int) */
937 + longlong_ftype_int_int
938 + = build_function_type (long_long_integer_type_node,
939 + tree_cons (NULL_TREE, integer_type_node,
940 + int_endlink));
941 +
942 + /* long long int func (long long, int, short) */
943 + longlong_ftype_longlong_int_short
944 + = build_function_type (long_long_integer_type_node,
945 + tree_cons (NULL_TREE, long_long_integer_type_node,
946 + tree_cons (NULL_TREE, integer_type_node,
947 + short_endlink)));
948 +
949 + /* long long int func (int, short) */
950 + longlong_ftype_int_short
951 + = build_function_type (long_long_integer_type_node,
952 + tree_cons (NULL_TREE, integer_type_node,
953 + short_endlink));
954 +
955 + /* int func (int, short, short) */
956 + int_ftype_int_short_short
957 + = build_function_type (integer_type_node,
958 + tree_cons (NULL_TREE, integer_type_node,
959 + tree_cons (NULL_TREE,
960 + short_integer_type_node,
961 + short_endlink)));
962 +
963 + /* int func (short, short) */
964 + int_ftype_short_short
965 + = build_function_type (integer_type_node,
966 + tree_cons (NULL_TREE, short_integer_type_node,
967 + short_endlink));
968 +
969 + /* int func (int, short) */
970 + int_ftype_int_short
971 + = build_function_type (integer_type_node,
972 + tree_cons (NULL_TREE, integer_type_node,
973 + short_endlink));
974 +
975 + /* void func (int, int) */
976 + void_ftype_int_int
977 + = build_function_type (void_type_node,
978 + tree_cons (NULL_TREE, integer_type_node,
979 + int_endlink));
980 +
981 + /* void func (int, int, int) */
982 + void_ftype_int_int_int
983 + = build_function_type (void_type_node,
984 + tree_cons (NULL_TREE, integer_type_node,
985 + tree_cons (NULL_TREE, integer_type_node,
986 + int_endlink)));
987 +
988 + /* void func (int, int, long long) */
989 + void_ftype_int_int_longlong
990 + = build_function_type (void_type_node,
991 + tree_cons (NULL_TREE, integer_type_node,
992 + tree_cons (NULL_TREE, integer_type_node,
993 + longlong_endlink)));
994 +
995 + /* void func (int, int, int, int, int) */
996 + void_ftype_int_int_int_int_int
997 + = build_function_type (void_type_node,
998 + tree_cons (NULL_TREE, integer_type_node,
999 + tree_cons (NULL_TREE, integer_type_node,
1000 + tree_cons (NULL_TREE,
1001 + integer_type_node,
1002 + tree_cons
1003 + (NULL_TREE,
1004 + integer_type_node,
1005 + int_endlink)))));
1006 +
1007 + /* void func (void *, int) */
1008 + void_ftype_ptr_int
1009 + = build_function_type (void_type_node,
1010 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1011 +
1012 + /* void func (int) */
1013 + void_ftype_int = build_function_type (void_type_node, int_endlink);
1014 +
1015 + /* void func (void) */
1016 + void_ftype_void = build_function_type (void_type_node, void_endlink);
1017 +
1018 + /* int func (void) */
1019 + int_ftype_void = build_function_type (integer_type_node, void_endlink);
1020 +
1021 + /* int func (void *, int) */
1022 + int_ftype_ptr_int
1023 + = build_function_type (integer_type_node,
1024 + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
1025 +
1026 + /* int func (int, int, int) */
1027 + int_ftype_int_int_int
1028 + = build_function_type (integer_type_node,
1029 + tree_cons (NULL_TREE, integer_type_node,
1030 + tree_cons (NULL_TREE, integer_type_node,
1031 + int_endlink)));
1032 +
1033 + /* Initialize avr32 builtins. */
1034 + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
1035 + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
1036 + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
1037 + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
1038 + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
1039 + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
1040 + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
1041 + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
1042 + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
1043 + def_builtin ("__builtin_breakpoint", void_ftype_void,
1044 + AVR32_BUILTIN_BREAKPOINT);
1045 + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
1046 + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
1047 + def_builtin ("__builtin_bswap_16", short_ftype_short,
1048 + AVR32_BUILTIN_BSWAP16);
1049 + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
1050 + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
1051 + AVR32_BUILTIN_COP);
1052 + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
1053 + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
1054 + AVR32_BUILTIN_MVRC_W);
1055 + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
1056 + AVR32_BUILTIN_MVCR_D);
1057 + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
1058 + AVR32_BUILTIN_MVRC_D);
1059 + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
1060 + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
1061 + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
1062 + AVR32_BUILTIN_SATRNDS);
1063 + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
1064 + AVR32_BUILTIN_SATRNDU);
1065 + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
1066 + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
1067 + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
1068 + AVR32_BUILTIN_MACSATHH_W);
1069 + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
1070 + AVR32_BUILTIN_MACWH_D);
1071 + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
1072 + AVR32_BUILTIN_MACHH_D);
1073 +
1074 + /* Add all builtins that are more or less simple operations on two
1075 + operands. */
1076 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1077 + {
1078 + /* Use one of the operands; the target can have a different mode for
1079 + mask-generating compares. */
1080 +
1081 + if (d->name == 0)
1082 + continue;
1083 +
1084 + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
1085 + }
1086 + }
1087 +
1088 +
1089 +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
1090 +
1091 +static rtx
1092 +avr32_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
1093 + {
1094 + rtx pat;
1095 + tree arg0 = TREE_VALUE (arglist);
1096 + tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1097 + rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1098 + rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1099 + enum machine_mode tmode = insn_data[icode].operand[0].mode;
1100 + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1101 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1102 +
1103 + if (!target
1104 + || GET_MODE (target) != tmode
1105 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1106 + target = gen_reg_rtx (tmode);
1107 +
1108 + /* In case the insn wants input operands in modes different from the
1109 + result, abort. */
1110 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1111 + {
1112 + /* If op0 is already a reg we must cast it to the correct mode. */
1113 + if (REG_P (op0))
1114 + op0 = convert_to_mode (mode0, op0, 1);
1115 + else
1116 + op0 = copy_to_mode_reg (mode0, op0);
1117 + }
1118 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1119 + {
1120 + /* If op1 is already a reg we must cast it to the correct mode. */
1121 + if (REG_P (op1))
1122 + op1 = convert_to_mode (mode1, op1, 1);
1123 + else
1124 + op1 = copy_to_mode_reg (mode1, op1);
1125 + }
1126 + pat = GEN_FCN (icode) (target, op0, op1);
1127 + if (!pat)
1128 + return 0;
1129 + emit_insn (pat);
1130 + return target;
1131 + }
1132 +
1133 +/* Expand an expression EXP that calls a built-in function,
1134 + with result going to TARGET if that's convenient
1135 + (and in mode MODE if that's convenient).
1136 + SUBTARGET may be used as the target for computing one of EXP's operands.
1137 + IGNORE is nonzero if the value is to be ignored. */
1138 +
1139 +rtx
1140 +avr32_expand_builtin (tree exp,
1141 + rtx target,
1142 + rtx subtarget ATTRIBUTE_UNUSED,
1143 + enum machine_mode mode ATTRIBUTE_UNUSED,
1144 + int ignore ATTRIBUTE_UNUSED)
1145 + {
1146 + const struct builtin_description *d;
1147 + unsigned int i;
1148 + enum insn_code icode;
1149 + tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1150 + tree arglist = TREE_OPERAND (exp, 1);
1151 + tree arg0, arg1, arg2;
1152 + rtx op0, op1, op2, pat;
1153 + enum machine_mode tmode, mode0, mode1;
1154 + enum machine_mode arg0_mode;
1155 + int fcode = DECL_FUNCTION_CODE (fndecl);
1156 +
1157 + switch (fcode)
1158 + {
1159 + default:
1160 + break;
1161 +
1162 + case AVR32_BUILTIN_SATS:
1163 + case AVR32_BUILTIN_SATU:
1164 + case AVR32_BUILTIN_SATRNDS:
1165 + case AVR32_BUILTIN_SATRNDU:
1166 + {
1167 + const char *fname;
1168 + switch (fcode)
1169 + {
1170 + default:
1171 + case AVR32_BUILTIN_SATS:
1172 + icode = CODE_FOR_sats;
1173 + fname = "sats";
1174 + break;
1175 + case AVR32_BUILTIN_SATU:
1176 + icode = CODE_FOR_satu;
1177 + fname = "satu";
1178 + break;
1179 + case AVR32_BUILTIN_SATRNDS:
1180 + icode = CODE_FOR_satrnds;
1181 + fname = "satrnds";
1182 + break;
1183 + case AVR32_BUILTIN_SATRNDU:
1184 + icode = CODE_FOR_satrndu;
1185 + fname = "satrndu";
1186 + break;
1187 + }
1188 +
1189 + arg0 = TREE_VALUE (arglist);
1190 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1191 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1192 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1193 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1194 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1195 +
1196 + tmode = insn_data[icode].operand[0].mode;
1197 +
1198 +
1199 + if (target == 0
1200 + || GET_MODE (target) != tmode
1201 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1202 + target = gen_reg_rtx (tmode);
1203 +
1204 +
1205 + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
1206 + {
1207 + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
1208 + }
1209 +
1210 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1211 + {
1212 + error ("Parameter 2 to __builtin_%s should be a constant number.",
1213 + fname);
1214 + return NULL_RTX;
1215 + }
1216 +
1217 + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
1218 + {
1219 + error ("Parameter 3 to __builtin_%s should be a constant number.",
1220 + fname);
1221 + return NULL_RTX;
1222 + }
1223 +
1224 + emit_move_insn (target, op0);
1225 + pat = GEN_FCN (icode) (target, op1, op2);
1226 + if (!pat)
1227 + return 0;
1228 + emit_insn (pat);
1229 +
1230 + return target;
1231 + }
1232 + case AVR32_BUILTIN_MUSTR:
1233 + icode = CODE_FOR_mustr;
1234 + tmode = insn_data[icode].operand[0].mode;
1235 +
1236 + if (target == 0
1237 + || GET_MODE (target) != tmode
1238 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1239 + target = gen_reg_rtx (tmode);
1240 + pat = GEN_FCN (icode) (target);
1241 + if (!pat)
1242 + return 0;
1243 + emit_insn (pat);
1244 + return target;
1245 +
1246 + case AVR32_BUILTIN_MFSR:
1247 + icode = CODE_FOR_mfsr;
1248 + arg0 = TREE_VALUE (arglist);
1249 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1250 + tmode = insn_data[icode].operand[0].mode;
1251 + mode0 = insn_data[icode].operand[1].mode;
1252 +
1253 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1254 + {
1255 + error ("Parameter 1 to __builtin_mfsr must be a constant number");
1256 + }
1257 +
1258 + if (target == 0
1259 + || GET_MODE (target) != tmode
1260 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1261 + target = gen_reg_rtx (tmode);
1262 + pat = GEN_FCN (icode) (target, op0);
1263 + if (!pat)
1264 + return 0;
1265 + emit_insn (pat);
1266 + return target;
1267 + case AVR32_BUILTIN_MTSR:
1268 + icode = CODE_FOR_mtsr;
1269 + arg0 = TREE_VALUE (arglist);
1270 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1271 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1272 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1273 + mode0 = insn_data[icode].operand[0].mode;
1274 + mode1 = insn_data[icode].operand[1].mode;
1275 +
1276 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1277 + {
1278 + error ("Parameter 1 to __builtin_mtsr must be a constant number");
1279 + return gen_reg_rtx (mode0);
1280 + }
1281 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1282 + op1 = copy_to_mode_reg (mode1, op1);
1283 + pat = GEN_FCN (icode) (op0, op1);
1284 + if (!pat)
1285 + return 0;
1286 + emit_insn (pat);
1287 + return NULL_RTX;
1288 + case AVR32_BUILTIN_MFDR:
1289 + icode = CODE_FOR_mfdr;
1290 + arg0 = TREE_VALUE (arglist);
1291 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1292 + tmode = insn_data[icode].operand[0].mode;
1293 + mode0 = insn_data[icode].operand[1].mode;
1294 +
1295 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1296 + {
1297 + error ("Parameter 1 to __builtin_mfdr must be a constant number");
1298 + }
1299 +
1300 + if (target == 0
1301 + || GET_MODE (target) != tmode
1302 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1303 + target = gen_reg_rtx (tmode);
1304 + pat = GEN_FCN (icode) (target, op0);
1305 + if (!pat)
1306 + return 0;
1307 + emit_insn (pat);
1308 + return target;
1309 + case AVR32_BUILTIN_MTDR:
1310 + icode = CODE_FOR_mtdr;
1311 + arg0 = TREE_VALUE (arglist);
1312 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1313 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1314 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1315 + mode0 = insn_data[icode].operand[0].mode;
1316 + mode1 = insn_data[icode].operand[1].mode;
1317 +
1318 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1319 + {
1320 + error ("Parameter 1 to __builtin_mtdr must be a constant number");
1321 + return gen_reg_rtx (mode0);
1322 + }
1323 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1324 + op1 = copy_to_mode_reg (mode1, op1);
1325 + pat = GEN_FCN (icode) (op0, op1);
1326 + if (!pat)
1327 + return 0;
1328 + emit_insn (pat);
1329 + return NULL_RTX;
1330 + case AVR32_BUILTIN_CACHE:
1331 + icode = CODE_FOR_cache;
1332 + arg0 = TREE_VALUE (arglist);
1333 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1334 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1335 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1336 + mode0 = insn_data[icode].operand[0].mode;
1337 + mode1 = insn_data[icode].operand[1].mode;
1338 +
1339 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
1340 + {
1341 + error ("Parameter 2 to __builtin_cache must be a constant number");
1342 + return gen_reg_rtx (mode1);
1343 + }
1344 +
1345 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1346 + op0 = copy_to_mode_reg (mode0, op0);
1347 +
1348 + pat = GEN_FCN (icode) (op0, op1);
1349 + if (!pat)
1350 + return 0;
1351 + emit_insn (pat);
1352 + return NULL_RTX;
1353 + case AVR32_BUILTIN_SYNC:
1354 + case AVR32_BUILTIN_MUSFR:
1355 + {
1356 + const char *fname;
1357 + switch (fcode)
1358 + {
1359 + default:
1360 + case AVR32_BUILTIN_SYNC:
1361 + icode = CODE_FOR_sync;
1362 + fname = "sync";
1363 + break;
1364 + case AVR32_BUILTIN_MUSFR:
1365 + icode = CODE_FOR_musfr;
1366 + fname = "musfr";
1367 + break;
1368 + }
1369 +
1370 + arg0 = TREE_VALUE (arglist);
1371 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1372 + mode0 = insn_data[icode].operand[0].mode;
1373 +
1374 + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
1375 + {
1376 + if (icode == CODE_FOR_musfr)
1377 + op0 = copy_to_mode_reg (mode0, op0);
1378 + else
1379 + {
1380 + error ("Parameter to __builtin_%s is illegal.", fname);
1381 + return gen_reg_rtx (mode0);
1382 + }
1383 + }
1384 + pat = GEN_FCN (icode) (op0);
1385 + if (!pat)
1386 + return 0;
1387 + emit_insn (pat);
1388 + return NULL_RTX;
1389 + }
1390 + case AVR32_BUILTIN_TLBR:
1391 + icode = CODE_FOR_tlbr;
1392 + pat = GEN_FCN (icode) (NULL_RTX);
1393 + if (!pat)
1394 + return 0;
1395 + emit_insn (pat);
1396 + return NULL_RTX;
1397 + case AVR32_BUILTIN_TLBS:
1398 + icode = CODE_FOR_tlbs;
1399 + pat = GEN_FCN (icode) (NULL_RTX);
1400 + if (!pat)
1401 + return 0;
1402 + emit_insn (pat);
1403 + return NULL_RTX;
1404 + case AVR32_BUILTIN_TLBW:
1405 + icode = CODE_FOR_tlbw;
1406 + pat = GEN_FCN (icode) (NULL_RTX);
1407 + if (!pat)
1408 + return 0;
1409 + emit_insn (pat);
1410 + return NULL_RTX;
1411 + case AVR32_BUILTIN_BREAKPOINT:
1412 + icode = CODE_FOR_breakpoint;
1413 + pat = GEN_FCN (icode) (NULL_RTX);
1414 + if (!pat)
1415 + return 0;
1416 + emit_insn (pat);
1417 + return NULL_RTX;
1418 + case AVR32_BUILTIN_XCHG:
1419 + icode = CODE_FOR_sync_lock_test_and_setsi;
1420 + arg0 = TREE_VALUE (arglist);
1421 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1422 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1423 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1424 + tmode = insn_data[icode].operand[0].mode;
1425 + mode0 = insn_data[icode].operand[1].mode;
1426 + mode1 = insn_data[icode].operand[2].mode;
1427 +
1428 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1429 + {
1430 + op1 = copy_to_mode_reg (mode1, op1);
1431 + }
1432 +
1433 + op0 = gen_rtx_MEM (SImode, op0);
1434 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1435 + {
1436 + error
1437 + ("Parameter 1 to __builtin_xchg must be a pointer to an integer.");
1438 + }
1439 +
1440 + if (target == 0
1441 + || GET_MODE (target) != tmode
1442 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1443 + target = gen_reg_rtx (tmode);
1444 + pat = GEN_FCN (icode) (target, op0, op1);
1445 + if (!pat)
1446 + return 0;
1447 + emit_insn (pat);
1448 + return target;
1449 + case AVR32_BUILTIN_LDXI:
1450 + icode = CODE_FOR_ldxi;
1451 + arg0 = TREE_VALUE (arglist);
1452 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1453 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1454 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1455 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1456 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1457 + tmode = insn_data[icode].operand[0].mode;
1458 + mode0 = insn_data[icode].operand[1].mode;
1459 + mode1 = insn_data[icode].operand[2].mode;
1460 +
1461 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1462 + {
1463 + op0 = copy_to_mode_reg (mode0, op0);
1464 + }
1465 +
1466 + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
1467 + {
1468 + op1 = copy_to_mode_reg (mode1, op1);
1469 + }
1470 +
1471 + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
1472 + {
1473 + error
1474 + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
1475 + return gen_reg_rtx (mode0);
1476 + }
1477 +
1478 + if (target == 0
1479 + || GET_MODE (target) != tmode
1480 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1481 + target = gen_reg_rtx (tmode);
1482 + pat = GEN_FCN (icode) (target, op0, op1, op2);
1483 + if (!pat)
1484 + return 0;
1485 + emit_insn (pat);
1486 + return target;
1487 + case AVR32_BUILTIN_BSWAP16:
1488 + {
1489 + icode = CODE_FOR_bswap_16;
1490 + arg0 = TREE_VALUE (arglist);
1491 + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
1492 + mode0 = insn_data[icode].operand[1].mode;
1493 + if (arg0_mode != mode0)
1494 + arg0 = build1 (NOP_EXPR,
1495 + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
1496 +
1497 + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
1498 + tmode = insn_data[icode].operand[0].mode;
1499 +
1500 +
1501 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1502 + {
1503 + op0 = copy_to_mode_reg (mode0, op0);
1504 + }
1505 +
1506 + if (target == 0
1507 + || GET_MODE (target) != tmode
1508 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1509 + {
1510 + target = gen_reg_rtx (tmode);
1511 + }
1512 +
1513 +
1514 + pat = GEN_FCN (icode) (target, op0);
1515 + if (!pat)
1516 + return 0;
1517 + emit_insn (pat);
1518 +
1519 + return target;
1520 + }
1521 + case AVR32_BUILTIN_BSWAP32:
1522 + {
1523 + icode = CODE_FOR_bswap_32;
1524 + arg0 = TREE_VALUE (arglist);
1525 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1526 + tmode = insn_data[icode].operand[0].mode;
1527 + mode0 = insn_data[icode].operand[1].mode;
1528 +
1529 + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
1530 + {
1531 + op0 = copy_to_mode_reg (mode0, op0);
1532 + }
1533 +
1534 + if (target == 0
1535 + || GET_MODE (target) != tmode
1536 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1537 + target = gen_reg_rtx (tmode);
1538 +
1539 +
1540 + pat = GEN_FCN (icode) (target, op0);
1541 + if (!pat)
1542 + return 0;
1543 + emit_insn (pat);
1544 +
1545 + return target;
1546 + }
1547 + case AVR32_BUILTIN_MVCR_W:
1548 + case AVR32_BUILTIN_MVCR_D:
1549 + {
1550 + arg0 = TREE_VALUE (arglist);
1551 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1552 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1553 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1554 +
1555 + if (fcode == AVR32_BUILTIN_MVCR_W)
1556 + icode = CODE_FOR_mvcrsi;
1557 + else
1558 + icode = CODE_FOR_mvcrdi;
1559 +
1560 + tmode = insn_data[icode].operand[0].mode;
1561 +
1562 + if (target == 0
1563 + || GET_MODE (target) != tmode
1564 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1565 + target = gen_reg_rtx (tmode);
1566 +
1567 + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
1568 + {
1569 + error
1570 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1571 + error ("Number should be between 0 and 7.");
1572 + return NULL_RTX;
1573 + }
1574 +
1575 + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
1576 + {
1577 + error
1578 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1579 + error ("Number should be between 0 and 15.");
1580 + return NULL_RTX;
1581 + }
1582 +
1583 + pat = GEN_FCN (icode) (target, op0, op1);
1584 + if (!pat)
1585 + return 0;
1586 + emit_insn (pat);
1587 +
1588 + return target;
1589 + }
1590 + case AVR32_BUILTIN_MACSATHH_W:
1591 + case AVR32_BUILTIN_MACWH_D:
1592 + case AVR32_BUILTIN_MACHH_D:
1593 + {
1594 + arg0 = TREE_VALUE (arglist);
1595 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1596 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1597 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1598 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1599 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1600 +
1601 + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
1602 + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
1603 + CODE_FOR_machh_d);
1604 +
1605 + tmode = insn_data[icode].operand[0].mode;
1606 + mode0 = insn_data[icode].operand[1].mode;
1607 + mode1 = insn_data[icode].operand[2].mode;
1608 +
1609 +
1610 + if (!target
1611 + || GET_MODE (target) != tmode
1612 + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1613 + target = gen_reg_rtx (tmode);
1614 +
1615 + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
1616 + {
1617 + /* If op0 is already a reg we must cast it to the correct mode. */
1618 + if (REG_P (op0))
1619 + op0 = convert_to_mode (tmode, op0, 1);
1620 + else
1621 + op0 = copy_to_mode_reg (tmode, op0);
1622 + }
1623 +
1624 + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
1625 + {
1626 + /* If op1 is already a reg we must cast it to the correct mode. */
1627 + if (REG_P (op1))
1628 + op1 = convert_to_mode (mode0, op1, 1);
1629 + else
1630 + op1 = copy_to_mode_reg (mode0, op1);
1631 + }
1632 +
1633 + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
1634 + {
1635 + /* If op1 is already a reg we must cast it to the correct mode. */
1636 + if (REG_P (op2))
1637 + op2 = convert_to_mode (mode1, op2, 1);
1638 + else
1639 + op2 = copy_to_mode_reg (mode1, op2);
1640 + }
1641 +
1642 + emit_move_insn (target, op0);
1643 +
1644 + pat = GEN_FCN (icode) (target, op1, op2);
1645 + if (!pat)
1646 + return 0;
1647 + emit_insn (pat);
1648 + return target;
1649 + }
1650 + case AVR32_BUILTIN_MVRC_W:
1651 + case AVR32_BUILTIN_MVRC_D:
1652 + {
1653 + arg0 = TREE_VALUE (arglist);
1654 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1655 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1656 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1657 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1658 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1659 +
1660 + if (fcode == AVR32_BUILTIN_MVRC_W)
1661 + icode = CODE_FOR_mvrcsi;
1662 + else
1663 + icode = CODE_FOR_mvrcdi;
1664 +
1665 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1666 + {
1667 + error ("Parameter 1 is not a valid coprocessor number.");
1668 + error ("Number should be between 0 and 7.");
1669 + return NULL_RTX;
1670 + }
1671 +
1672 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1673 + {
1674 + error ("Parameter 2 is not a valid coprocessor register number.");
1675 + error ("Number should be between 0 and 15.");
1676 + return NULL_RTX;
1677 + }
1678 +
1679 + if (GET_CODE (op2) == CONST_INT
1680 + || GET_CODE (op2) == CONST
1681 + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
1682 + {
1683 + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
1684 + }
1685 +
1686 + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
1687 + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
1688 +
1689 +
1690 + pat = GEN_FCN (icode) (op0, op1, op2);
1691 + if (!pat)
1692 + return 0;
1693 + emit_insn (pat);
1694 +
1695 + return NULL_RTX;
1696 + }
1697 + case AVR32_BUILTIN_COP:
1698 + {
1699 + rtx op3, op4;
1700 + tree arg3, arg4;
1701 + icode = CODE_FOR_cop;
1702 + arg0 = TREE_VALUE (arglist);
1703 + arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1704 + arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1705 + arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
1706 + arg4 =
1707 + TREE_VALUE (TREE_CHAIN
1708 + (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist)))));
1709 + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
1710 + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
1711 + op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
1712 + op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
1713 + op4 = expand_expr (arg4, NULL_RTX, VOIDmode, 0);
1714 +
1715 + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
1716 + {
1717 + error
1718 + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
1719 + error ("Number should be between 0 and 7.");
1720 + return NULL_RTX;
1721 + }
1722 +
1723 + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
1724 + {
1725 + error
1726 + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
1727 + error ("Number should be between 0 and 15.");
1728 + return NULL_RTX;
1729 + }
1730 +
1731 + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
1732 + {
1733 + error
1734 + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
1735 + error ("Number should be between 0 and 15.");
1736 + return NULL_RTX;
1737 + }
1738 +
1739 + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
1740 + {
1741 + error
1742 + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
1743 + error ("Number should be between 0 and 15.");
1744 + return NULL_RTX;
1745 + }
1746 +
1747 + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
1748 + {
1749 + error
1750 + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
1751 + error ("Number should be between 0 and 127.");
1752 + return NULL_RTX;
1753 + }
1754 +
1755 + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
1756 + if (!pat)
1757 + return 0;
1758 + emit_insn (pat);
1759 +
1760 + return target;
1761 + }
1762 + }
1763 +
1764 + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
1765 + if (d->code == fcode)
1766 + return avr32_expand_binop_builtin (d->icode, arglist, target);
1767 +
1768 +
1769 + /* @@@ Should really do something sensible here. */
1770 + return NULL_RTX;
1771 + }
1772 +
1773 +
1774 +/* Handle an "interrupt" or "isr" attribute;
1775 + arguments as in struct attribute_spec.handler. */
1776 +
1777 +static tree
1778 +avr32_handle_isr_attribute (tree * node, tree name, tree args,
1779 + int flags, bool * no_add_attrs)
1780 + {
1781 + if (DECL_P (*node))
1782 + {
1783 + if (TREE_CODE (*node) != FUNCTION_DECL)
1784 + {
1785 + warning ("`%s' attribute only applies to functions",
1786 + IDENTIFIER_POINTER (name));
1787 + *no_add_attrs = true;
1788 + }
1789 + /* FIXME: the argument if any is checked for type attributes; should it
1790 + be checked for decl ones? */
1791 + }
1792 + else
1793 + {
1794 + if (TREE_CODE (*node) == FUNCTION_TYPE
1795 + || TREE_CODE (*node) == METHOD_TYPE)
1796 + {
1797 + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
1798 + {
1799 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1800 + *no_add_attrs = true;
1801 + }
1802 + }
1803 + else if (TREE_CODE (*node) == POINTER_TYPE
1804 + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
1805 + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
1806 + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
1807 + {
1808 + *node = build_variant_type_copy (*node);
1809 + TREE_TYPE (*node) = build_type_attribute_variant
1810 + (TREE_TYPE (*node),
1811 + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
1812 + *no_add_attrs = true;
1813 + }
1814 + else
1815 + {
1816 + /* Possibly pass this attribute on from the type to a decl. */
1817 + if (flags & ((int) ATTR_FLAG_DECL_NEXT
1818 + | (int) ATTR_FLAG_FUNCTION_NEXT
1819 + | (int) ATTR_FLAG_ARRAY_NEXT))
1820 + {
1821 + *no_add_attrs = true;
1822 + return tree_cons (name, args, NULL_TREE);
1823 + }
1824 + else
1825 + {
1826 + warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
1827 + }
1828 + }
1829 + }
1830 +
1831 + return NULL_TREE;
1832 + }
1833 +
1834 +/* Handle an attribute requiring a FUNCTION_DECL;
1835 + arguments as in struct attribute_spec.handler. */
1836 +static tree
1837 +avr32_handle_fndecl_attribute (tree * node, tree name,
1838 + tree args ATTRIBUTE_UNUSED,
1839 + int flags ATTRIBUTE_UNUSED,
1840 + bool * no_add_attrs)
1841 + {
1842 + if (TREE_CODE (*node) != FUNCTION_DECL)
1843 + {
1844 + warning ("%qs attribute only applies to functions",
1845 + IDENTIFIER_POINTER (name));
1846 + *no_add_attrs = true;
1847 + }
1848 +
1849 + return NULL_TREE;
1850 + }
1851 +
1852 +
1853 +/* Handle an acall attribute;
1854 + arguments as in struct attribute_spec.handler. */
1855 +
1856 +static tree
1857 +avr32_handle_acall_attribute (tree * node, tree name,
1858 + tree args ATTRIBUTE_UNUSED,
1859 + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
1860 + {
1861 + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
1862 + {
1863 + warning ("`%s' attribute not yet supported...",
1864 + IDENTIFIER_POINTER (name));
1865 + *no_add_attrs = true;
1866 + return NULL_TREE;
1867 + }
1868 +
1869 + warning ("`%s' attribute only applies to functions",
1870 + IDENTIFIER_POINTER (name));
1871 + *no_add_attrs = true;
1872 + return NULL_TREE;
1873 + }
1874 +
1875 +
1876 +/* Return 0 if the attributes for two types are incompatible, 1 if they
1877 + are compatible, and 2 if they are nearly compatible (which causes a
1878 + warning to be generated). */
1879 +
1880 +static int
1881 +avr32_comp_type_attributes (tree type1, tree type2)
1882 + {
1883 + int acall1, acall2, isr1, isr2, naked1, naked2;
1884 +
1885 + /* Check for mismatch of non-default calling convention. */
1886 + if (TREE_CODE (type1) != FUNCTION_TYPE)
1887 + return 1;
1888 +
1889 + /* Check for mismatched call attributes. */
1890 + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
1891 + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
1892 + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
1893 + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
1894 + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1895 + if (!isr1)
1896 + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1897 +
1898 + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1899 + if (!isr2)
1900 + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1901 +
1902 + if ((acall1 && isr2)
1903 + || (acall2 && isr1) || (naked1 && isr2) || (naked2 && isr1))
1904 + return 0;
1905 +
1906 + return 1;
1907 + }
1908 +
1909 +
1910 +/* Computes the type of the current function. */
1911 +
1912 +static unsigned long
1913 +avr32_compute_func_type (void)
1914 + {
1915 + unsigned long type = AVR32_FT_UNKNOWN;
1916 + tree a;
1917 + tree attr;
1918 +
1919 + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
1920 + abort ();
1921 +
1922 + /* Decide if the current function is volatile. Such functions never
1923 + return, and many memory cycles can be saved by not storing register
1924 + values that will never be needed again. This optimization was added to
1925 + speed up context switching in a kernel application. */
1926 + if (optimize > 0
1927 + && TREE_NOTHROW (current_function_decl)
1928 + && TREE_THIS_VOLATILE (current_function_decl))
1929 + type |= AVR32_FT_VOLATILE;
1930 +
1931 + if (cfun->static_chain_decl != NULL)
1932 + type |= AVR32_FT_NESTED;
1933 +
1934 + attr = DECL_ATTRIBUTES (current_function_decl);
1935 +
1936 + a = lookup_attribute ("isr", attr);
1937 + if (a == NULL_TREE)
1938 + a = lookup_attribute ("interrupt", attr);
1939 +
1940 + if (a == NULL_TREE)
1941 + type |= AVR32_FT_NORMAL;
1942 + else
1943 + type |= avr32_isr_value (TREE_VALUE (a));
1944 +
1945 +
1946 + a = lookup_attribute ("acall", attr);
1947 + if (a != NULL_TREE)
1948 + type |= AVR32_FT_ACALL;
1949 +
1950 + a = lookup_attribute ("naked", attr);
1951 + if (a != NULL_TREE)
1952 + type |= AVR32_FT_NAKED;
1953 +
1954 + return type;
1955 + }
1956 +
1957 +/* Returns the type of the current function. */
1958 +
1959 +static unsigned long
1960 +avr32_current_func_type (void)
1961 + {
1962 + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
1963 + cfun->machine->func_type = avr32_compute_func_type ();
1964 +
1965 + return cfun->machine->func_type;
1966 + }
1967 +
1968 +/*
1969 + This target hook should return true if we should not pass type solely
1970 + in registers. The file expr.h defines a definition that is usually appropriate,
1971 + refer to expr.h for additional documentation.
1972 + */
1973 +bool
1974 +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
1975 + {
1976 + if (type && AGGREGATE_TYPE_P (type)
1977 + /* If the alignment is less than the size then pass in the struct on
1978 + the stack. */
1979 + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
1980 + (unsigned int) int_size_in_bytes (type))
1981 + /* If we support unaligned word accesses then structs of size 4 and 8
1982 + can have any alignment and still be passed in registers. */
1983 + && !(TARGET_UNALIGNED_WORD
1984 + && (int_size_in_bytes (type) == 4
1985 + || int_size_in_bytes (type) == 8))
1986 + /* Double word structs need only a word alignment. */
1987 + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
1988 + return true;
1989 +
1990 + if (type && AGGREGATE_TYPE_P (type)
1991 + /* Structs of size 3,5,6,7 are always passed in registers. */
1992 + && (int_size_in_bytes (type) == 3
1993 + || int_size_in_bytes (type) == 5
1994 + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
1995 + return true;
1996 +
1997 +
1998 + return (type && TREE_ADDRESSABLE (type));
1999 + }
2000 +
2001 +
2002 +bool
2003 +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
2004 + {
2005 + return true;
2006 + }
2007 +
2008 +/*
2009 + This target hook should return true if an argument at the position indicated
2010 + by cum should be passed by reference. This predicate is queried after target
2011 + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
2012 +
2013 + If the hook returns true, a copy of that argument is made in memory and a
2014 + pointer to the argument is passed instead of the argument itself. The pointer
2015 + is passed in whatever way is appropriate for passing a pointer to that type.
2016 + */
2017 +bool
2018 +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
2019 + enum machine_mode mode ATTRIBUTE_UNUSED,
2020 + tree type, bool named ATTRIBUTE_UNUSED)
2021 + {
2022 + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
2023 + }
2024 +
2025 +static int
2026 +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
2027 + enum machine_mode mode ATTRIBUTE_UNUSED,
2028 + tree type ATTRIBUTE_UNUSED,
2029 + bool named ATTRIBUTE_UNUSED)
2030 + {
2031 + return 0;
2032 + }
2033 +
2034 +
2035 +struct gcc_target targetm = TARGET_INITIALIZER;
2036 +
2037 +/*
2038 + Table used to convert from register number in the assembler instructions and
2039 + the register numbers used in gcc.
2040 + */
2041 +const int avr32_function_arg_reglist[] = {
2042 + INTERNAL_REGNUM (12),
2043 + INTERNAL_REGNUM (11),
2044 + INTERNAL_REGNUM (10),
2045 + INTERNAL_REGNUM (9),
2046 + INTERNAL_REGNUM (8)
2047 +};
2048 +
2049 +rtx avr32_compare_op0 = NULL_RTX;
2050 +rtx avr32_compare_op1 = NULL_RTX;
2051 +rtx avr32_compare_operator = NULL_RTX;
2052 +rtx avr32_acc_cache = NULL_RTX;
2053 +
2054 +/*
2055 + Returns nonzero if it is allowed to store a value of mode mode in hard
2056 + register number regno.
2057 + */
2058 +int
2059 +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
2060 + {
2061 + /* We allow only float modes in the fp-registers */
2062 + if (regnr >= FIRST_FP_REGNUM
2063 + && regnr <= LAST_FP_REGNUM && GET_MODE_CLASS (mode) != MODE_FLOAT)
2064 + {
2065 + return 0;
2066 + }
2067 +
2068 + switch (mode)
2069 + {
2070 + case DImode: /* long long */
2071 + case DFmode: /* double */
2072 + case SCmode: /* __complex__ float */
2073 + case CSImode: /* __complex__ int */
2074 + if (regnr < 4)
2075 + { /* long long int not supported in r12, sp, lr
2076 + or pc. */
2077 + return 0;
2078 + }
2079 + else
2080 + {
2081 + if (regnr % 2) /* long long int has to be refered in even
2082 + registers. */
2083 + return 0;
2084 + else
2085 + return 1;
2086 + }
2087 + case CDImode: /* __complex__ long long */
2088 + case DCmode: /* __complex__ double */
2089 + case TImode: /* 16 bytes */
2090 + if (regnr < 7)
2091 + return 0;
2092 + else if (regnr % 2)
2093 + return 0;
2094 + else
2095 + return 1;
2096 + default:
2097 + return 1;
2098 + }
2099 + }
2100 +
2101 +
2102 +int
2103 +avr32_rnd_operands (rtx add, rtx shift)
2104 + {
2105 + if (GET_CODE (shift) == CONST_INT &&
2106 + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
2107 + {
2108 + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
2109 + return TRUE;
2110 + }
2111 +
2112 + return FALSE;
2113 + }
2114 +
2115 +
2116 +
2117 +int
2118 +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
2119 + {
2120 + switch (c)
2121 + {
2122 + case 'K':
2123 + case 'I':
2124 + {
2125 + HOST_WIDE_INT min_value = 0, max_value = 0;
2126 + char size_str[3];
2127 + int const_size;
2128 +
2129 + size_str[0] = str[2];
2130 + size_str[1] = str[3];
2131 + size_str[2] = '\0';
2132 + const_size = atoi (size_str);
2133 +
2134 + if (toupper (str[1]) == 'U')
2135 + {
2136 + min_value = 0;
2137 + max_value = (1 << const_size) - 1;
2138 + }
2139 + else if (toupper (str[1]) == 'S')
2140 + {
2141 + min_value = -(1 << (const_size - 1));
2142 + max_value = (1 << (const_size - 1)) - 1;
2143 + }
2144 +
2145 + if (c == 'I')
2146 + {
2147 + value = -value;
2148 + }
2149 +
2150 + if (value >= min_value && value <= max_value)
2151 + {
2152 + return 1;
2153 + }
2154 + break;
2155 + }
2156 + case 'M':
2157 + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
2158 + }
2159 +
2160 + return 0;
2161 + }
2162 +
2163 +
2164 +/*Compute mask of which floating-point registers needs saving upon
2165 + entry to this function*/
2166 +static unsigned long
2167 +avr32_compute_save_fp_reg_mask (void)
2168 + {
2169 + unsigned long func_type = avr32_current_func_type ();
2170 + unsigned int save_reg_mask = 0;
2171 + unsigned int reg;
2172 + unsigned int max_reg = 7;
2173 + int save_all_call_used_regs = FALSE;
2174 +
2175 + /* This only applies for hardware floating-point implementation. */
2176 + if (!TARGET_HARD_FLOAT)
2177 + return 0;
2178 +
2179 + if (IS_INTERRUPT (func_type))
2180 + {
2181 +
2182 + /* Interrupt functions must not corrupt any registers, even call
2183 + clobbered ones. If this is a leaf function we can just examine the
2184 + registers used by the RTL, but otherwise we have to assume that
2185 + whatever function is called might clobber anything, and so we have
2186 + to save all the call-clobbered registers as well. */
2187 + max_reg = 13;
2188 + save_all_call_used_regs = !current_function_is_leaf;
2189 + }
2190 +
2191 + /* All used registers used must be saved */
2192 + for (reg = 0; reg <= max_reg; reg++)
2193 + if (regs_ever_live[INTERNAL_FP_REGNUM (reg)]
2194 + || (save_all_call_used_regs
2195 + && call_used_regs[INTERNAL_FP_REGNUM (reg)]))
2196 + save_reg_mask |= (1 << reg);
2197 +
2198 + return save_reg_mask;
2199 + }
2200 +
2201 +/*Compute mask of registers which needs saving upon function entry */
2202 +static unsigned long
2203 +avr32_compute_save_reg_mask (int push)
2204 + {
2205 + unsigned long func_type;
2206 + unsigned int save_reg_mask = 0;
2207 + unsigned int reg;
2208 +
2209 + func_type = avr32_current_func_type ();
2210 +
2211 + if (IS_INTERRUPT (func_type))
2212 + {
2213 + unsigned int max_reg = 12;
2214 +
2215 +
2216 + /* Get the banking scheme for the interrupt */
2217 + switch (func_type)
2218 + {
2219 + case AVR32_FT_ISR_FULL:
2220 + max_reg = 0;
2221 + break;
2222 + case AVR32_FT_ISR_HALF:
2223 + max_reg = 7;
2224 + break;
2225 + case AVR32_FT_ISR_NONE:
2226 + max_reg = 12;
2227 + break;
2228 + }
2229 +
2230 + /* Interrupt functions must not corrupt any registers, even call
2231 + clobbered ones. If this is a leaf function we can just examine the
2232 + registers used by the RTL, but otherwise we have to assume that
2233 + whatever function is called might clobber anything, and so we have
2234 + to save all the call-clobbered registers as well. */
2235 +
2236 + /* Need not push the registers r8-r12 for AVR32A architectures, as this
2237 + is automatially done in hardware. We also do not have any shadow
2238 + registers. */
2239 + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A)
2240 + {
2241 + max_reg = 7;
2242 + func_type = AVR32_FT_ISR_NONE;
2243 + }
2244 +
2245 + /* All registers which are used and is not shadowed must be saved */
2246 + for (reg = 0; reg <= max_reg; reg++)
2247 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2248 + || (!current_function_is_leaf
2249 + && call_used_regs[INTERNAL_REGNUM (reg)]))
2250 + save_reg_mask |= (1 << reg);
2251 +
2252 + /* Check LR */
2253 + if ((regs_ever_live[LR_REGNUM]
2254 + || !current_function_is_leaf || frame_pointer_needed)
2255 + /* Only non-shadowed register models */
2256 + && (func_type == AVR32_FT_ISR_NONE))
2257 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2258 +
2259 + /* Make sure that the GOT register is pushed. */
2260 + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
2261 + && current_function_uses_pic_offset_table)
2262 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2263 +
2264 + }
2265 + else
2266 + {
2267 + int use_pushm = optimize_size;
2268 +
2269 + /* In the normal case we only need to save those registers which are
2270 + call saved and which are used by this function. */
2271 + for (reg = 0; reg <= 7; reg++)
2272 + if (regs_ever_live[INTERNAL_REGNUM (reg)]
2273 + && !call_used_regs[INTERNAL_REGNUM (reg)])
2274 + save_reg_mask |= (1 << reg);
2275 +
2276 + /* Make sure that the GOT register is pushed. */
2277 + if (current_function_uses_pic_offset_table)
2278 + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
2279 +
2280 +
2281 + /* If we optimize for size and do not have anonymous arguments: use
2282 + popm/pushm always */
2283 + if (use_pushm)
2284 + {
2285 + if ((save_reg_mask & (1 << 0))
2286 + || (save_reg_mask & (1 << 1))
2287 + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
2288 + save_reg_mask |= 0xf;
2289 +
2290 + if ((save_reg_mask & (1 << 4))
2291 + || (save_reg_mask & (1 << 5))
2292 + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
2293 + save_reg_mask |= 0xf0;
2294 +
2295 + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
2296 + save_reg_mask |= 0x300;
2297 + }
2298 +
2299 +
2300 + /* Check LR */
2301 + if ((regs_ever_live[LR_REGNUM]
2302 + || !current_function_is_leaf
2303 + || (optimize_size
2304 + && save_reg_mask
2305 + && !current_function_calls_eh_return) || frame_pointer_needed))
2306 + {
2307 + if (push
2308 + /* Never pop LR into PC for functions which
2309 + calls __builtin_eh_return, since we need to
2310 + fix the SP after the restoring of the registers
2311 + and before returning. */
2312 + || current_function_calls_eh_return)
2313 + {
2314 + /* Push/Pop LR */
2315 + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
2316 + }
2317 + else
2318 + {
2319 + /* Pop PC */
2320 + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
2321 + }
2322 + }
2323 + }
2324 +
2325 +
2326 + /* Save registers so the exception handler can modify them. */
2327 + if (current_function_calls_eh_return)
2328 + {
2329 + unsigned int i;
2330 +
2331 + for (i = 0;; i++)
2332 + {
2333 + reg = EH_RETURN_DATA_REGNO (i);
2334 + if (reg == INVALID_REGNUM)
2335 + break;
2336 + save_reg_mask |= 1 << ASM_REGNUM (reg);
2337 + }
2338 + }
2339 +
2340 + return save_reg_mask;
2341 + }
2342 +
2343 +/*Compute total size in bytes of all saved registers */
2344 +static int
2345 +avr32_get_reg_mask_size (int reg_mask)
2346 + {
2347 + int reg, size;
2348 + size = 0;
2349 +
2350 + for (reg = 0; reg <= 15; reg++)
2351 + if (reg_mask & (1 << reg))
2352 + size += 4;
2353 +
2354 + return size;
2355 + }
2356 +
2357 +/*Get a register from one of the registers which are saved onto the stack
2358 + upon function entry */
2359 +
2360 +static int
2361 +avr32_get_saved_reg (int save_reg_mask)
2362 + {
2363 + unsigned int reg;
2364 +
2365 + /* Find the first register which is saved in the saved_reg_mask */
2366 + for (reg = 0; reg <= 15; reg++)
2367 + if (save_reg_mask & (1 << reg))
2368 + return reg;
2369 +
2370 + return -1;
2371 + }
2372 +
2373 +/* Return 1 if it is possible to return using a single instruction. */
2374 +int
2375 +avr32_use_return_insn (int iscond)
2376 + {
2377 + unsigned int func_type = avr32_current_func_type ();
2378 + unsigned long saved_int_regs;
2379 + unsigned long saved_fp_regs;
2380 +
2381 + /* Never use a return instruction before reload has run. */
2382 + if (!reload_completed)
2383 + return 0;
2384 +
2385 + /* Must adjust the stack for vararg functions. */
2386 + if (current_function_args_info.uses_anonymous_args)
2387 + return 0;
2388 +
2389 + /* If there a stack adjstment. */
2390 + if (get_frame_size ())
2391 + return 0;
2392 +
2393 + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
2394 + saved_fp_regs = avr32_compute_save_fp_reg_mask ();
2395 +
2396 + /* Functions which have saved fp-regs on the stack can not be performed in
2397 + one instruction */
2398 + if (saved_fp_regs)
2399 + return 0;
2400 +
2401 + /* Conditional returns can not be performed in one instruction if we need
2402 + to restore registers from the stack */
2403 + if (iscond && saved_int_regs)
2404 + return 0;
2405 +
2406 + /* Conditional return can not be used for interrupt handlers. */
2407 + if (iscond && IS_INTERRUPT (func_type))
2408 + return 0;
2409 +
2410 + /* For interrupt handlers which needs to pop registers */
2411 + if (saved_int_regs && IS_INTERRUPT (func_type))
2412 + return 0;
2413 +
2414 +
2415 + /* If there are saved registers but the LR isn't saved, then we need two
2416 + instructions for the return. */
2417 + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2418 + return 0;
2419 +
2420 +
2421 + return 1;
2422 + }
2423 +
2424 +
2425 +/*Generate some function prologue info in the assembly file*/
2426 +
2427 +void
2428 +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
2429 + {
2430 + if (IS_NAKED (avr32_current_func_type ()))
2431 + fprintf (f,
2432 + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
2433 +
2434 + if (IS_INTERRUPT (avr32_current_func_type ()))
2435 + {
2436 + switch (avr32_current_func_type ())
2437 + {
2438 + case AVR32_FT_ISR_FULL:
2439 + fprintf (f,
2440 + "\t# Interrupt Function: Fully shadowed register file\n");
2441 + break;
2442 + case AVR32_FT_ISR_HALF:
2443 + fprintf (f,
2444 + "\t# Interrupt Function: Half shadowed register file\n");
2445 + break;
2446 + default:
2447 + case AVR32_FT_ISR_NONE:
2448 + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
2449 + break;
2450 + }
2451 + }
2452 +
2453 +
2454 + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
2455 + current_function_args_size, frame_size,
2456 + current_function_pretend_args_size);
2457 +
2458 + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
2459 + frame_pointer_needed, current_function_is_leaf);
2460 +
2461 + fprintf (f, "\t# uses_anonymous_args = %i\n",
2462 + current_function_args_info.uses_anonymous_args);
2463 + if (current_function_calls_eh_return)
2464 + fprintf (f, "\t# Calls __builtin_eh_return.\n");
2465 +
2466 + }
2467 +
2468 +
2469 +/* Generate and emit an insn that we will recognize as a pushm or stm.
2470 + Unfortunately, since this insn does not reflect very well the actual
2471 + semantics of the operation, we need to annotate the insn for the benefit
2472 + of DWARF2 frame unwind information. */
2473 +
2474 +int avr32_convert_to_reglist16 (int reglist8_vect);
2475 +
2476 +static rtx
2477 +emit_multi_reg_push (int reglist, int usePUSHM)
2478 + {
2479 + rtx insn;
2480 + rtx dwarf;
2481 + rtx tmp;
2482 + rtx reg;
2483 + int i;
2484 + int nr_regs;
2485 + int index = 0;
2486 +
2487 + if (usePUSHM)
2488 + {
2489 + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
2490 + reglist = avr32_convert_to_reglist16 (reglist);
2491 + }
2492 + else
2493 + {
2494 + insn = emit_insn (gen_stm (stack_pointer_rtx,
2495 + gen_rtx_CONST_INT (SImode, reglist),
2496 + gen_rtx_CONST_INT (SImode, 1)));
2497 + }
2498 +
2499 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2500 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2501 +
2502 + for (i = 15; i >= 0; i--)
2503 + {
2504 + if (reglist & (1 << i))
2505 + {
2506 + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
2507 + tmp = gen_rtx_SET (VOIDmode,
2508 + gen_rtx_MEM (SImode,
2509 + plus_constant (stack_pointer_rtx,
2510 + 4 * index)), reg);
2511 + RTX_FRAME_RELATED_P (tmp) = 1;
2512 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2513 + }
2514 + }
2515 +
2516 + tmp = gen_rtx_SET (SImode,
2517 + stack_pointer_rtx,
2518 + gen_rtx_PLUS (SImode,
2519 + stack_pointer_rtx,
2520 + GEN_INT (-4 * nr_regs)));
2521 + RTX_FRAME_RELATED_P (tmp) = 1;
2522 + XVECEXP (dwarf, 0, 0) = tmp;
2523 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2524 + REG_NOTES (insn));
2525 + return insn;
2526 + }
2527 +
2528 +
2529 +static rtx
2530 +emit_multi_fp_reg_push (int reglist)
2531 + {
2532 + rtx insn;
2533 + rtx dwarf;
2534 + rtx tmp;
2535 + rtx reg;
2536 + int i;
2537 + int nr_regs;
2538 + int index = 0;
2539 +
2540 + insn = emit_insn (gen_stm_fp (stack_pointer_rtx,
2541 + gen_rtx_CONST_INT (SImode, reglist),
2542 + gen_rtx_CONST_INT (SImode, 1)));
2543 +
2544 + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
2545 + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
2546 +
2547 + for (i = 15; i >= 0; i--)
2548 + {
2549 + if (reglist & (1 << i))
2550 + {
2551 + reg = gen_rtx_REG (SImode, INTERNAL_FP_REGNUM (i));
2552 + tmp = gen_rtx_SET (VOIDmode,
2553 + gen_rtx_MEM (SImode,
2554 + plus_constant (stack_pointer_rtx,
2555 + 4 * index)), reg);
2556 + RTX_FRAME_RELATED_P (tmp) = 1;
2557 + XVECEXP (dwarf, 0, 1 + index++) = tmp;
2558 + }
2559 + }
2560 +
2561 + tmp = gen_rtx_SET (SImode,
2562 + stack_pointer_rtx,
2563 + gen_rtx_PLUS (SImode,
2564 + stack_pointer_rtx,
2565 + GEN_INT (-4 * nr_regs)));
2566 + RTX_FRAME_RELATED_P (tmp) = 1;
2567 + XVECEXP (dwarf, 0, 0) = tmp;
2568 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
2569 + REG_NOTES (insn));
2570 + return insn;
2571 + }
2572 +
2573 +rtx
2574 +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
2575 + int write_back, int in_struct_p, int scalar_p)
2576 + {
2577 +
2578 + rtx result;
2579 + int i = 0, j;
2580 +
2581 + result =
2582 + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
2583 +
2584 + if (write_back)
2585 + {
2586 + XVECEXP (result, 0, 0)
2587 + = gen_rtx_SET (GET_MODE (from), from,
2588 + plus_constant (from, count * 4));
2589 + i = 1;
2590 + count++;
2591 + }
2592 +
2593 +
2594 + for (j = 0; i < count; i++, j++)
2595 + {
2596 + rtx unspec;
2597 + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
2598 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2599 + MEM_SCALAR_P (mem) = scalar_p;
2600 + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
2601 + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
2602 + }
2603 +
2604 + return result;
2605 + }
2606 +
2607 +
2608 +rtx
2609 +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
2610 + int in_struct_p, int scalar_p)
2611 + {
2612 + rtx result;
2613 + int i = 0, j;
2614 +
2615 + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2616 +
2617 + for (j = 0; i < count; i++, j++)
2618 + {
2619 + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
2620 + MEM_IN_STRUCT_P (mem) = in_struct_p;
2621 + MEM_SCALAR_P (mem) = scalar_p;
2622 + XVECEXP (result, 0, i)
2623 + = gen_rtx_SET (VOIDmode, mem,
2624 + gen_rtx_UNSPEC (VOIDmode,
2625 + gen_rtvec (1, regs[j]),
2626 + UNSPEC_STORE_MULTIPLE));
2627 + }
2628 +
2629 + return result;
2630 + }
2631 +
2632 +
2633 +/* Move a block of memory if it is word aligned or we support unaligned
2634 + word memory accesses. The size must be maximum 64 bytes. */
2635 +
2636 +int
2637 +avr32_gen_movmemsi (rtx * operands)
2638 + {
2639 + HOST_WIDE_INT bytes_to_go;
2640 + rtx src, dst;
2641 + rtx st_src, st_dst;
2642 + int ptr_offset = 0;
2643 + int block_size;
2644 + int dst_in_struct_p, src_in_struct_p;
2645 + int dst_scalar_p, src_scalar_p;
2646 + int unaligned;
2647 +
2648 + if (GET_CODE (operands[2]) != CONST_INT
2649 + || GET_CODE (operands[3]) != CONST_INT
2650 + || INTVAL (operands[2]) > 64
2651 + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
2652 + return 0;
2653 +
2654 + unaligned = (INTVAL (operands[3]) & 3) != 0;
2655 +
2656 + block_size = 4;
2657 +
2658 + st_dst = XEXP (operands[0], 0);
2659 + st_src = XEXP (operands[1], 0);
2660 +
2661 + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2662 + dst_scalar_p = MEM_SCALAR_P (operands[0]);
2663 + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2664 + src_scalar_p = MEM_SCALAR_P (operands[1]);
2665 +
2666 + dst = copy_to_mode_reg (SImode, st_dst);
2667 + src = copy_to_mode_reg (SImode, st_src);
2668 +
2669 + bytes_to_go = INTVAL (operands[2]);
2670 +
2671 + while (bytes_to_go)
2672 + {
2673 + enum machine_mode move_mode;
2674 + /* (Seems to be a problem with reloads for the movti pattern so this is
2675 + disabled until that problem is resolved)
2676 + UPDATE: Problem seems to be solved now.... */
2677 + if (bytes_to_go >= GET_MODE_SIZE (TImode) && !unaligned
2678 + /* Do not emit ldm/stm for UC3 as ld.d/st.d is more optimal. */
2679 + && avr32_arch->arch_type != ARCH_TYPE_AVR32_UC)
2680 + move_mode = TImode;
2681 + else if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
2682 + move_mode = DImode;
2683 + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
2684 + move_mode = SImode;
2685 + else
2686 + move_mode = QImode;
2687 +
2688 + {
2689 + rtx dst_mem = gen_rtx_MEM (move_mode,
2690 + gen_rtx_PLUS (SImode, dst,
2691 + GEN_INT (ptr_offset)));
2692 + rtx src_mem = gen_rtx_MEM (move_mode,
2693 + gen_rtx_PLUS (SImode, src,
2694 + GEN_INT (ptr_offset)));
2695 + ptr_offset += GET_MODE_SIZE (move_mode);
2696 + bytes_to_go -= GET_MODE_SIZE (move_mode);
2697 +
2698 + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
2699 + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
2700 +
2701 + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
2702 + MEM_SCALAR_P (src_mem) = src_scalar_p;
2703 + emit_move_insn (dst_mem, src_mem);
2704 +
2705 + }
2706 + }
2707 +
2708 + return 1;
2709 + }
2710 +
2711 +
2712 +
2713 +/*Expand the prologue instruction*/
2714 +void
2715 +avr32_expand_prologue (void)
2716 + {
2717 + rtx insn, dwarf;
2718 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2719 + int reglist8 = 0;
2720 +
2721 + /* Naked functions does not have a prologue */
2722 + if (IS_NAKED (avr32_current_func_type ()))
2723 + return;
2724 +
2725 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
2726 +
2727 + if (saved_reg_mask)
2728 + {
2729 + /* Must push used registers */
2730 +
2731 + /* Should we use POPM or LDM? */
2732 + int usePUSHM = TRUE;
2733 + reglist8 = 0;
2734 + if (((saved_reg_mask & (1 << 0)) ||
2735 + (saved_reg_mask & (1 << 1)) ||
2736 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
2737 + {
2738 + /* One of R0-R3 should at least be pushed */
2739 + if (((saved_reg_mask & (1 << 0)) &&
2740 + (saved_reg_mask & (1 << 1)) &&
2741 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
2742 + {
2743 + /* All should be pushed */
2744 + reglist8 |= 0x01;
2745 + }
2746 + else
2747 + {
2748 + usePUSHM = FALSE;
2749 + }
2750 + }
2751 +
2752 + if (((saved_reg_mask & (1 << 4)) ||
2753 + (saved_reg_mask & (1 << 5)) ||
2754 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
2755 + {
2756 + /* One of R4-R7 should at least be pushed */
2757 + if (((saved_reg_mask & (1 << 4)) &&
2758 + (saved_reg_mask & (1 << 5)) &&
2759 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
2760 + {
2761 + if (usePUSHM)
2762 + /* All should be pushed */
2763 + reglist8 |= 0x02;
2764 + }
2765 + else
2766 + {
2767 + usePUSHM = FALSE;
2768 + }
2769 + }
2770 +
2771 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
2772 + {
2773 + /* One of R8-R9 should at least be pushed */
2774 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
2775 + {
2776 + if (usePUSHM)
2777 + /* All should be pushed */
2778 + reglist8 |= 0x04;
2779 + }
2780 + else
2781 + {
2782 + usePUSHM = FALSE;
2783 + }
2784 + }
2785 +
2786 + if (saved_reg_mask & (1 << 10))
2787 + reglist8 |= 0x08;
2788 +
2789 + if (saved_reg_mask & (1 << 11))
2790 + reglist8 |= 0x10;
2791 +
2792 + if (saved_reg_mask & (1 << 12))
2793 + reglist8 |= 0x20;
2794 +
2795 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
2796 + {
2797 + /* Push LR */
2798 + reglist8 |= 0x40;
2799 + }
2800 +
2801 + if (usePUSHM)
2802 + {
2803 + insn = emit_multi_reg_push (reglist8, TRUE);
2804 + }
2805 + else
2806 + {
2807 + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
2808 + }
2809 + RTX_FRAME_RELATED_P (insn) = 1;
2810 +
2811 + /* Prevent this instruction from being scheduled after any other
2812 + instructions. */
2813 + emit_insn (gen_blockage ());
2814 + }
2815 +
2816 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2817 + if (saved_fp_reg_mask)
2818 + {
2819 + insn = emit_multi_fp_reg_push (saved_fp_reg_mask);
2820 + RTX_FRAME_RELATED_P (insn) = 1;
2821 +
2822 + /* Prevent this instruction from being scheduled after any other
2823 + instructions. */
2824 + emit_insn (gen_blockage ());
2825 + }
2826 +
2827 + /* Set frame pointer */
2828 + if (frame_pointer_needed)
2829 + {
2830 + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
2831 + RTX_FRAME_RELATED_P (insn) = 1;
2832 + }
2833 +
2834 + if (get_frame_size () > 0)
2835 + {
2836 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
2837 + {
2838 + insn = emit_insn (gen_rtx_SET (SImode,
2839 + stack_pointer_rtx,
2840 + gen_rtx_PLUS (SImode,
2841 + stack_pointer_rtx,
2842 + gen_rtx_CONST_INT
2843 + (SImode,
2844 + -get_frame_size
2845 + ()))));
2846 + RTX_FRAME_RELATED_P (insn) = 1;
2847 + }
2848 + else
2849 + {
2850 + /* Immediate is larger than k21 We must either check if we can use
2851 + one of the pushed reegisters as temporary storage or we must
2852 + make us a temp register by pushing a register to the stack. */
2853 + rtx temp_reg, const_pool_entry, insn;
2854 + if (saved_reg_mask)
2855 + {
2856 + temp_reg =
2857 + gen_rtx_REG (SImode,
2858 + INTERNAL_REGNUM (avr32_get_saved_reg
2859 + (saved_reg_mask)));
2860 + }
2861 + else
2862 + {
2863 + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
2864 + emit_move_insn (gen_rtx_MEM
2865 + (SImode,
2866 + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
2867 + temp_reg);
2868 + }
2869 +
2870 + const_pool_entry =
2871 + force_const_mem (SImode,
2872 + gen_rtx_CONST_INT (SImode, get_frame_size ()));
2873 + emit_move_insn (temp_reg, const_pool_entry);
2874 +
2875 + insn = emit_insn (gen_rtx_SET (SImode,
2876 + stack_pointer_rtx,
2877 + gen_rtx_MINUS (SImode,
2878 + stack_pointer_rtx,
2879 + temp_reg)));
2880 +
2881 + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2882 + gen_rtx_PLUS (SImode, stack_pointer_rtx,
2883 + GEN_INT (-get_frame_size ())));
2884 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2885 + dwarf, REG_NOTES (insn));
2886 + RTX_FRAME_RELATED_P (insn) = 1;
2887 +
2888 + if (!saved_reg_mask)
2889 + {
2890 + insn =
2891 + emit_move_insn (temp_reg,
2892 + gen_rtx_MEM (SImode,
2893 + gen_rtx_POST_INC (SImode,
2894 + gen_rtx_REG
2895 + (SImode,
2896 + 13))));
2897 + }
2898 +
2899 + /* Mark the temp register as dead */
2900 + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
2901 + REG_NOTES (insn));
2902 +
2903 +
2904 + }
2905 +
2906 + /* Prevent the the stack adjustment to be scheduled after any
2907 + instructions using the frame pointer. */
2908 + emit_insn (gen_blockage ());
2909 + }
2910 +
2911 + /* Load GOT */
2912 + if (flag_pic)
2913 + {
2914 + avr32_load_pic_register ();
2915 +
2916 + /* gcc does not know that load or call instructions might use the pic
2917 + register so it might schedule these instructions before the loading
2918 + of the pic register. To avoid this emit a barrier for now. TODO!
2919 + Find out a better way to let gcc know which instructions might use
2920 + the pic register. */
2921 + emit_insn (gen_blockage ());
2922 + }
2923 + return;
2924 + }
2925 +
2926 +void
2927 +avr32_set_return_address (rtx source, rtx scratch)
2928 + {
2929 + rtx addr;
2930 + unsigned long saved_regs;
2931 +
2932 + saved_regs = avr32_compute_save_reg_mask (TRUE);
2933 +
2934 + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
2935 + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
2936 + else
2937 + {
2938 + if (frame_pointer_needed)
2939 + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
2940 + else
2941 + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks16"))
2942 + {
2943 + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
2944 + }
2945 + else
2946 + {
2947 + emit_insn (gen_movsi (scratch, GEN_INT (get_frame_size ())));
2948 + addr = scratch;
2949 + }
2950 + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
2951 + }
2952 + }
2953 +
2954 +
2955 +
2956 +/* Return the length of INSN. LENGTH is the initial length computed by
2957 + attributes in the machine-description file. */
2958 +
2959 +int
2960 +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
2961 + int length ATTRIBUTE_UNUSED)
2962 + {
2963 + return length;
2964 + }
2965 +
2966 +void
2967 +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
2968 + int iscond ATTRIBUTE_UNUSED,
2969 + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
2970 + {
2971 +
2972 + unsigned long saved_reg_mask, saved_fp_reg_mask;
2973 + int insert_ret = TRUE;
2974 + int reglist8 = 0;
2975 + int stack_adjustment = get_frame_size ();
2976 + unsigned int func_type = avr32_current_func_type ();
2977 + FILE *f = asm_out_file;
2978 +
2979 + /* Naked functions does not have an epilogue */
2980 + if (IS_NAKED (func_type))
2981 + return;
2982 +
2983 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
2984 +
2985 + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
2986 +
2987 + /* Reset frame pointer */
2988 + if (stack_adjustment > 0)
2989 + {
2990 + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
2991 + {
2992 + fprintf (f, "\tsub\tsp, %i # Reset Frame Pointer\n",
2993 + -stack_adjustment);
2994 + }
2995 + else
2996 + {
2997 + /* TODO! Is it safe to use r8 as scratch?? */
2998 + fprintf (f, "\tmov\tr8, lo(%i) # Reset Frame Pointer\n",
2999 + -stack_adjustment);
3000 + fprintf (f, "\torh\tr8, hi(%i) # Reset Frame Pointer\n",
3001 + -stack_adjustment);
3002 + fprintf (f, "\tadd\tsp, r8 # Reset Frame Pointer\n");
3003 + }
3004 + }
3005 +
3006 + if (saved_fp_reg_mask)
3007 + {
3008 + char reglist[64]; /* 64 bytes should be enough... */
3009 + avr32_make_fp_reglist_w (saved_fp_reg_mask, (char *) reglist);
3010 + fprintf (f, "\tldcm.w\tcp0, sp++, %s\n", reglist);
3011 + if (saved_fp_reg_mask & ~0xff)
3012 + {
3013 + saved_fp_reg_mask &= ~0xff;
3014 + avr32_make_fp_reglist_d (saved_fp_reg_mask, (char *) reglist);
3015 + fprintf (f, "\tldcm.d\tcp0, sp++, %s\n", reglist);
3016 + }
3017 + }
3018 +
3019 + if (saved_reg_mask)
3020 + {
3021 + /* Must pop used registers */
3022 +
3023 + /* Should we use POPM or LDM? */
3024 + int usePOPM = TRUE;
3025 + if (((saved_reg_mask & (1 << 0)) ||
3026 + (saved_reg_mask & (1 << 1)) ||
3027 + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
3028 + {
3029 + /* One of R0-R3 should at least be popped */
3030 + if (((saved_reg_mask & (1 << 0)) &&
3031 + (saved_reg_mask & (1 << 1)) &&
3032 + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
3033 + {
3034 + /* All should be popped */
3035 + reglist8 |= 0x01;
3036 + }
3037 + else
3038 + {
3039 + usePOPM = FALSE;
3040 + }
3041 + }
3042 +
3043 + if (((saved_reg_mask & (1 << 4)) ||
3044 + (saved_reg_mask & (1 << 5)) ||
3045 + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
3046 + {
3047 + /* One of R0-R3 should at least be popped */
3048 + if (((saved_reg_mask & (1 << 4)) &&
3049 + (saved_reg_mask & (1 << 5)) &&
3050 + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
3051 + {
3052 + if (usePOPM)
3053 + /* All should be popped */
3054 + reglist8 |= 0x02;
3055 + }
3056 + else
3057 + {
3058 + usePOPM = FALSE;
3059 + }
3060 + }
3061 +
3062 + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
3063 + {
3064 + /* One of R8-R9 should at least be pushed */
3065 + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
3066 + {
3067 + if (usePOPM)
3068 + /* All should be pushed */
3069 + reglist8 |= 0x04;
3070 + }
3071 + else
3072 + {
3073 + usePOPM = FALSE;
3074 + }
3075 + }
3076 +
3077 + if (saved_reg_mask & (1 << 10))
3078 + reglist8 |= 0x08;
3079 +
3080 + if (saved_reg_mask & (1 << 11))
3081 + reglist8 |= 0x10;
3082 +
3083 + if (saved_reg_mask & (1 << 12))
3084 + reglist8 |= 0x20;
3085 +
3086 + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
3087 + /* Pop LR */
3088 + reglist8 |= 0x40;
3089 +
3090 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3091 + /* Pop LR into PC. */
3092 + reglist8 |= 0x80;
3093 +
3094 + if (usePOPM)
3095 + {
3096 + char reglist[64]; /* 64 bytes should be enough... */
3097 + avr32_make_reglist8 (reglist8, (char *) reglist);
3098 +
3099 + if (reglist8 & 0x80)
3100 + /* This instruction is also a return */
3101 + insert_ret = FALSE;
3102 +
3103 + if (r12_imm && !insert_ret)
3104 + fprintf (f, "\tpopm\t%s, r12=%li\n", reglist, INTVAL (r12_imm));
3105 + else
3106 + fprintf (f, "\tpopm\t%s\n", reglist);
3107 +
3108 + }
3109 + else
3110 + {
3111 + char reglist[64]; /* 64 bytes should be enough... */
3112 + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
3113 + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
3114 + /* This instruction is also a return */
3115 + insert_ret = FALSE;
3116 +
3117 + if (r12_imm && !insert_ret)
3118 + fprintf (f, "\tldm\tsp++, %s, r12=%li\n", reglist,
3119 + INTVAL (r12_imm));
3120 + else
3121 + fprintf (f, "\tldm\tsp++, %s\n", reglist);
3122 +
3123 + }
3124 +
3125 + }
3126 +
3127 + /* Stack adjustment for exception handler. */
3128 + if (current_function_calls_eh_return)
3129 + fprintf (f, "\tadd\tsp, r%d\n", ASM_REGNUM (EH_RETURN_STACKADJ_REGNO));
3130 +
3131 +
3132 + if (IS_INTERRUPT (func_type))
3133 + {
3134 + fprintf (f, "\trete\n");
3135 + }
3136 + else if (insert_ret)
3137 + {
3138 + if (r12_imm)
3139 + fprintf (f, "\tretal\t%li\n", INTVAL (r12_imm));
3140 + else
3141 + fprintf (f, "\tretal\tr12\n");
3142 + }
3143 + }
3144 +
3145 +/* Function for converting a fp-register mask to a
3146 + reglistCPD8 register list string. */
3147 +void
3148 +avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string)
3149 + {
3150 + int i;
3151 +
3152 + /* Make sure reglist_string is empty */
3153 + reglist_string[0] = '\0';
3154 +
3155 + for (i = 0; i < NUM_FP_REGS; i += 2)
3156 + {
3157 + if (reglist_mask & (1 << i))
3158 + {
3159 + strlen (reglist_string) ?
3160 + sprintf (reglist_string, "%s, %s-%s", reglist_string,
3161 + reg_names[INTERNAL_FP_REGNUM (i)],
3162 + reg_names[INTERNAL_FP_REGNUM (i + 1)]) :
3163 + sprintf (reglist_string, "%s-%s",
3164 + reg_names[INTERNAL_FP_REGNUM (i)],
3165 + reg_names[INTERNAL_FP_REGNUM (i + 1)]);
3166 + }
3167 + }
3168 + }
3169 +
3170 +/* Function for converting a fp-register mask to a
3171 + reglistCP8 register list string. */
3172 +void
3173 +avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string)
3174 + {
3175 + int i;
3176 +
3177 + /* Make sure reglist_string is empty */
3178 + reglist_string[0] = '\0';
3179 +
3180 + for (i = 0; i < NUM_FP_REGS; ++i)
3181 + {
3182 + if (reglist_mask & (1 << i))
3183 + {
3184 + strlen (reglist_string) ?
3185 + sprintf (reglist_string, "%s, %s", reglist_string,
3186 + reg_names[INTERNAL_FP_REGNUM (i)]) :
3187 + sprintf (reglist_string, "%s", reg_names[INTERNAL_FP_REGNUM (i)]);
3188 + }
3189 + }
3190 + }
3191 +
3192 +void
3193 +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
3194 + {
3195 + int i;
3196 +
3197 + /* Make sure reglist16_string is empty */
3198 + reglist16_string[0] = '\0';
3199 +
3200 + for (i = 0; i < 16; ++i)
3201 + {
3202 + if (reglist16_vect & (1 << i))
3203 + {
3204 + strlen (reglist16_string) ?
3205 + sprintf (reglist16_string, "%s, %s", reglist16_string,
3206 + reg_names[INTERNAL_REGNUM (i)]) :
3207 + sprintf (reglist16_string, "%s", reg_names[INTERNAL_REGNUM (i)]);
3208 + }
3209 + }
3210 + }
3211 +
3212 +int
3213 +avr32_convert_to_reglist16 (int reglist8_vect)
3214 + {
3215 + int reglist16_vect = 0;
3216 + if (reglist8_vect & 0x1)
3217 + reglist16_vect |= 0xF;
3218 + if (reglist8_vect & 0x2)
3219 + reglist16_vect |= 0xF0;
3220 + if (reglist8_vect & 0x4)
3221 + reglist16_vect |= 0x300;
3222 + if (reglist8_vect & 0x8)
3223 + reglist16_vect |= 0x400;
3224 + if (reglist8_vect & 0x10)
3225 + reglist16_vect |= 0x800;
3226 + if (reglist8_vect & 0x20)
3227 + reglist16_vect |= 0x1000;
3228 + if (reglist8_vect & 0x40)
3229 + reglist16_vect |= 0x4000;
3230 + if (reglist8_vect & 0x80)
3231 + reglist16_vect |= 0x8000;
3232 +
3233 + return reglist16_vect;
3234 + }
3235 +
3236 +void
3237 +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
3238 + {
3239 + /* Make sure reglist8_string is empty */
3240 + reglist8_string[0] = '\0';
3241 +
3242 + if (reglist8_vect & 0x1)
3243 + sprintf (reglist8_string, "r0-r3");
3244 + if (reglist8_vect & 0x2)
3245 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r4-r7",
3246 + reglist8_string) :
3247 + sprintf (reglist8_string, "r4-r7");
3248 + if (reglist8_vect & 0x4)
3249 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r8-r9",
3250 + reglist8_string) :
3251 + sprintf (reglist8_string, "r8-r9");
3252 + if (reglist8_vect & 0x8)
3253 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r10",
3254 + reglist8_string) :
3255 + sprintf (reglist8_string, "r10");
3256 + if (reglist8_vect & 0x10)
3257 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r11",
3258 + reglist8_string) :
3259 + sprintf (reglist8_string, "r11");
3260 + if (reglist8_vect & 0x20)
3261 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, r12",
3262 + reglist8_string) :
3263 + sprintf (reglist8_string, "r12");
3264 + if (reglist8_vect & 0x40)
3265 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, lr",
3266 + reglist8_string) :
3267 + sprintf (reglist8_string, "lr");
3268 + if (reglist8_vect & 0x80)
3269 + strlen (reglist8_string) ? sprintf (reglist8_string, "%s, pc",
3270 + reglist8_string) :
3271 + sprintf (reglist8_string, "pc");
3272 + }
3273 +
3274 +int
3275 +avr32_eh_return_data_regno (int n)
3276 + {
3277 + if (n >= 0 && n <= 3)
3278 + return 8 + n;
3279 + else
3280 + return INVALID_REGNUM;
3281 + }
3282 +
3283 +/* Compute the distance from register FROM to register TO.
3284 + These can be the arg pointer, the frame pointer or
3285 + the stack pointer.
3286 + Typical stack layout looks like this:
3287 +
3288 + old stack pointer -> | |
3289 + ----
3290 + | | \
3291 + | | saved arguments for
3292 + | | vararg functions
3293 + arg_pointer -> | | /
3294 + --
3295 + | | \
3296 + | | call saved
3297 + | | registers
3298 + | | /
3299 + frame ptr -> --
3300 + | | \
3301 + | | local
3302 + | | variables
3303 + stack ptr --> | | /
3304 + --
3305 + | | \
3306 + | | outgoing
3307 + | | arguments
3308 + | | /
3309 + --
3310 +
3311 + For a given funciton some or all of these stack compomnents
3312 + may not be needed, giving rise to the possibility of
3313 + eliminating some of the registers.
3314 +
3315 + The values returned by this function must reflect the behaviour
3316 + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
3317 +
3318 + The sign of the number returned reflects the direction of stack
3319 + growth, so the values are positive for all eliminations except
3320 + from the soft frame pointer to the hard frame pointer. */
3321 +
3322 +
3323 +int
3324 +avr32_initial_elimination_offset (int from, int to)
3325 + {
3326 + int i;
3327 + int call_saved_regs = 0;
3328 + unsigned long saved_reg_mask, saved_fp_reg_mask;
3329 + unsigned int local_vars = get_frame_size ();
3330 +
3331 + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
3332 + saved_fp_reg_mask = avr32_compute_save_fp_reg_mask ();
3333 +
3334 + for (i = 0; i < 16; ++i)
3335 + {
3336 + if (saved_reg_mask & (1 << i))
3337 + call_saved_regs += 4;
3338 + }
3339 +
3340 + for (i = 0; i < NUM_FP_REGS; ++i)
3341 + {
3342 + if (saved_fp_reg_mask & (1 << i))
3343 + call_saved_regs += 4;
3344 + }
3345 +
3346 + switch (from)
3347 + {
3348 + case ARG_POINTER_REGNUM:
3349 + switch (to)
3350 + {
3351 + case STACK_POINTER_REGNUM:
3352 + return call_saved_regs + local_vars;
3353 + case FRAME_POINTER_REGNUM:
3354 + return call_saved_regs;
3355 + default:
3356 + abort ();
3357 + }
3358 + case FRAME_POINTER_REGNUM:
3359 + switch (to)
3360 + {
3361 + case STACK_POINTER_REGNUM:
3362 + return local_vars;
3363 + default:
3364 + abort ();
3365 + }
3366 + default:
3367 + abort ();
3368 + }
3369 + }
3370 +
3371 +
3372 +/*
3373 + Returns a rtx used when passing the next argument to a function.
3374 + avr32_init_cumulative_args() and avr32_function_arg_advance() sets witch
3375 + register to use.
3376 + */
3377 +rtx
3378 +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3379 + tree type, int named)
3380 + {
3381 + int index = -1;
3382 +
3383 + HOST_WIDE_INT arg_size, arg_rsize;
3384 + if (type)
3385 + {
3386 + arg_size = int_size_in_bytes (type);
3387 + }
3388 + else
3389 + {
3390 + arg_size = GET_MODE_SIZE (mode);
3391 + }
3392 + arg_rsize = PUSH_ROUNDING (arg_size);
3393 +
3394 + /*
3395 + The last time this macro is called, it is called with mode == VOIDmode,
3396 + and its result is passed to the call or call_value pattern as operands 2
3397 + and 3 respectively. */
3398 + if (mode == VOIDmode)
3399 + {
3400 + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
3401 + }
3402 +
3403 + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
3404 + {
3405 + return NULL_RTX;
3406 + }
3407 +
3408 + if (arg_rsize == 8)
3409 + {
3410 + /* use r11:r10 or r9:r8. */
3411 + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
3412 + index = 1;
3413 + else if (!(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
3414 + index = 3;
3415 + else
3416 + index = -1;
3417 + }
3418 + else if (arg_rsize == 4)
3419 + { /* Use first available register */
3420 + index = 0;
3421 + while (index <= LAST_CUM_REG_INDEX && GET_USED_INDEX (cum, index))
3422 + index++;
3423 + if (index > LAST_CUM_REG_INDEX)
3424 + index = -1;
3425 + }
3426 +
3427 + SET_REG_INDEX (cum, index);
3428 +
3429 + if (GET_REG_INDEX (cum) >= 0)
3430 + return gen_rtx_REG (mode,
3431 + avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
3432 +
3433 + return NULL_RTX;
3434 + }
3435 +
3436 +/*
3437 + Set the register used for passing the first argument to a function.
3438 + */
3439 +void
3440 +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype,
3441 + rtx libname ATTRIBUTE_UNUSED,
3442 + tree fndecl ATTRIBUTE_UNUSED)
3443 + {
3444 + /* Set all registers as unused. */
3445 + SET_INDEXES_UNUSED (cum);
3446 +
3447 + /* Reset uses_anonymous_args */
3448 + cum->uses_anonymous_args = 0;
3449 +
3450 + /* Reset size of stack pushed arguments */
3451 + cum->stack_pushed_args_size = 0;
3452 + }
3453 +
3454 +/*
3455 + Set register used for passing the next argument to a function. Only the
3456 + Scratch Registers are used.
3457 +
3458 + number name
3459 + 15 r15 PC
3460 + 14 r14 LR
3461 + 13 r13 _SP_________
3462 + FIRST_CUM_REG 12 r12 _||_
3463 + 10 r11 ||
3464 + 11 r10 _||_ Scratch Registers
3465 + 8 r9 ||
3466 + LAST_SCRATCH_REG 9 r8 _\/_________
3467 + 6 r7 /\
3468 + 7 r6 ||
3469 + 4 r5 ||
3470 + 5 r4 ||
3471 + 2 r3 ||
3472 + 3 r2 ||
3473 + 0 r1 ||
3474 + 1 r0 _||_________
3475 +
3476 + */
3477 +void
3478 +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
3479 + tree type, int named ATTRIBUTE_UNUSED)
3480 + {
3481 + HOST_WIDE_INT arg_size, arg_rsize;
3482 +
3483 + if (type)
3484 + {
3485 + arg_size = int_size_in_bytes (type);
3486 + }
3487 + else
3488 + {
3489 + arg_size = GET_MODE_SIZE (mode);
3490 + }
3491 + arg_rsize = PUSH_ROUNDING (arg_size);
3492 +
3493 + /* It the argument had to be passed in stack, no register is used. */
3494 + if ((*targetm.calls.must_pass_in_stack) (mode, type))
3495 + {
3496 + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
3497 + return;
3498 + }
3499 +
3500 + /* Mark the used registers as "used". */
3501 + if (GET_REG_INDEX (cum) >= 0)
3502 + {
3503 + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
3504 + if (arg_rsize == 8)
3505 + {
3506 + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
3507 + }
3508 + }
3509 + else
3510 + {
3511 + /* Had to use stack */
3512 + cum->stack_pushed_args_size += arg_rsize;
3513 + }
3514 + }
3515 +
3516 +/*
3517 + Defines witch direction to go to find the next register to use if the
3518 + argument is larger then one register or for arguments shorter than an
3519 + int which is not promoted, such as the last part of structures with
3520 + size not a multiple of 4. */
3521 +enum direction
3522 +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
3523 + tree type)
3524 + {
3525 + /* Pad upward for all aggregates except byte and halfword sized aggregates
3526 + which can be passed in registers. */
3527 + if (type
3528 + && AGGREGATE_TYPE_P (type)
3529 + && (int_size_in_bytes (type) != 1)
3530 + && !((int_size_in_bytes (type) == 2)
3531 + && TYPE_ALIGN_UNIT (type) >= 2)
3532 + && (int_size_in_bytes (type) & 0x3))
3533 + {
3534 + return upward;
3535 + }
3536 +
3537 + return downward;
3538 + }
3539 +
3540 +/*
3541 + Return a rtx used for the return value from a function call.
3542 + */
3543 +rtx
3544 +avr32_function_value (tree type, tree func, bool outgoing ATTRIBUTE_UNUSED)
3545 + {
3546 + if (avr32_return_in_memory (type, func))
3547 + return NULL_RTX;
3548 +
3549 + if (int_size_in_bytes (type) <= 4)
3550 + if (avr32_return_in_msb (type))
3551 + /* Aggregates of size less than a word which does align the data in the
3552 + MSB must use SImode for r12. */
3553 + return gen_rtx_REG (SImode, RET_REGISTER);
3554 + else
3555 + return gen_rtx_REG (TYPE_MODE (type), RET_REGISTER);
3556 + else if (int_size_in_bytes (type) <= 8)
3557 + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
3558 +
3559 + return NULL_RTX;
3560 + }
3561 +
3562 +/*
3563 + Return a rtx used for the return value from a library function call.
3564 + */
3565 +rtx
3566 +avr32_libcall_value (enum machine_mode mode)
3567 + {
3568 +
3569 + if (GET_MODE_SIZE (mode) <= 4)
3570 + return gen_rtx_REG (mode, RET_REGISTER);
3571 + else if (GET_MODE_SIZE (mode) <= 8)
3572 + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
3573 + else
3574 + return NULL_RTX;
3575 + }
3576 +
3577 +/* Return TRUE if X references a SYMBOL_REF. */
3578 +int
3579 +symbol_mentioned_p (rtx x)
3580 + {
3581 + const char *fmt;
3582 + int i;
3583 +
3584 + if (GET_CODE (x) == SYMBOL_REF)
3585 + return 1;
3586 +
3587 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3588 +
3589 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3590 + {
3591 + if (fmt[i] == 'E')
3592 + {
3593 + int j;
3594 +
3595 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3596 + if (symbol_mentioned_p (XVECEXP (x, i, j)))
3597 + return 1;
3598 + }
3599 + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3600 + return 1;
3601 + }
3602 +
3603 + return 0;
3604 + }
3605 +
3606 +/* Return TRUE if X references a LABEL_REF. */
3607 +int
3608 +label_mentioned_p (rtx x)
3609 + {
3610 + const char *fmt;
3611 + int i;
3612 +
3613 + if (GET_CODE (x) == LABEL_REF)
3614 + return 1;
3615 +
3616 + fmt = GET_RTX_FORMAT (GET_CODE (x));
3617 + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3618 + {
3619 + if (fmt[i] == 'E')
3620 + {
3621 + int j;
3622 +
3623 + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3624 + if (label_mentioned_p (XVECEXP (x, i, j)))
3625 + return 1;
3626 + }
3627 + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3628 + return 1;
3629 + }
3630 +
3631 + return 0;
3632 + }
3633 +
3634 +
3635 +int
3636 +avr32_legitimate_pic_operand_p (rtx x)
3637 + {
3638 +
3639 + /* We can't have const, this must be broken down to a symbol. */
3640 + if (GET_CODE (x) == CONST)
3641 + return FALSE;
3642 +
3643 + /* Can't access symbols or labels via the constant pool either */
3644 + if ((GET_CODE (x) == SYMBOL_REF
3645 + && CONSTANT_POOL_ADDRESS_P (x)
3646 + && (symbol_mentioned_p (get_pool_constant (x))
3647 + || label_mentioned_p (get_pool_constant (x)))))
3648 + return FALSE;
3649 +
3650 + return TRUE;
3651 + }
3652 +
3653 +
3654 +rtx
3655 +legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
3656 + rtx reg)
3657 + {
3658 +
3659 + if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
3660 + {
3661 + int subregs = 0;
3662 +
3663 + if (reg == 0)
3664 + {
3665 + if (no_new_pseudos)
3666 + abort ();
3667 + else
3668 + reg = gen_reg_rtx (Pmode);
3669 +
3670 + subregs = 1;
3671 + }
3672 +
3673 + emit_move_insn (reg, orig);
3674 +
3675 + /* Only set current function as using pic offset table if flag_pic is
3676 + set. This is because this function is also used if
3677 + TARGET_HAS_ASM_ADDR_PSEUDOS is set. */
3678 + if (flag_pic)
3679 + current_function_uses_pic_offset_table = 1;
3680 +
3681 + /* Put a REG_EQUAL note on this insn, so that it can be optimized by
3682 + loop. */
3683 + return reg;
3684 + }
3685 + else if (GET_CODE (orig) == CONST)
3686 + {
3687 + rtx base, offset;
3688 +
3689 + if (flag_pic
3690 + && GET_CODE (XEXP (orig, 0)) == PLUS
3691 + && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
3692 + return orig;
3693 +
3694 + if (reg == 0)
3695 + {
3696 + if (no_new_pseudos)
3697 + abort ();
3698 + else
3699 + reg = gen_reg_rtx (Pmode);
3700 + }
3701 +
3702 + if (GET_CODE (XEXP (orig, 0)) == PLUS)
3703 + {
3704 + base =
3705 + legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
3706 + offset =
3707 + legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
3708 + base == reg ? 0 : reg);
3709 + }
3710 + else
3711 + abort ();
3712 +
3713 + if (GET_CODE (offset) == CONST_INT)
3714 + {
3715 + /* The base register doesn't really matter, we only want to test
3716 + the index for the appropriate mode. */
3717 + if (!avr32_const_ok_for_constraint_p (INTVAL (offset), 'I', "Is21"))
3718 + {
3719 + if (!no_new_pseudos)
3720 + offset = force_reg (Pmode, offset);
3721 + else
3722 + abort ();
3723 + }
3724 +
3725 + if (GET_CODE (offset) == CONST_INT)
3726 + return plus_constant (base, INTVAL (offset));
3727 + }
3728 +
3729 + return gen_rtx_PLUS (Pmode, base, offset);
3730 + }
3731 +
3732 + return orig;
3733 + }
3734 +
3735 +/* Generate code to load the PIC register. */
3736 +void
3737 +avr32_load_pic_register (void)
3738 + {
3739 + rtx l1, pic_tmp;
3740 + rtx global_offset_table;
3741 +
3742 + if ((current_function_uses_pic_offset_table == 0) || TARGET_NO_INIT_GOT)
3743 + return;
3744 +
3745 + if (!flag_pic)
3746 + abort ();
3747 +
3748 + l1 = gen_label_rtx ();
3749 +
3750 + global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3751 + pic_tmp =
3752 + gen_rtx_CONST (Pmode,
3753 + gen_rtx_MINUS (SImode, gen_rtx_LABEL_REF (Pmode, l1),
3754 + global_offset_table));
3755 + emit_insn (gen_pic_load_addr
3756 + (pic_offset_table_rtx, force_const_mem (SImode, pic_tmp)));
3757 + emit_insn (gen_pic_compute_got_from_pc (pic_offset_table_rtx, l1));
3758 +
3759 + /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp
3760 + can cause life info to screw up. */
3761 + emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
3762 + }
3763 +
3764 +
3765 +
3766 +/* This hook should return true if values of type type are returned at the most
3767 + significant end of a register (in other words, if they are padded at the
3768 + least significant end). You can assume that type is returned in a register;
3769 + the caller is required to check this. Note that the register provided by
3770 + FUNCTION_VALUE must be able to hold the complete return value. For example,
3771 + if a 1-, 2- or 3-byte structure is returned at the most significant end of a
3772 + 4-byte register, FUNCTION_VALUE should provide an SImode rtx. */
3773 +bool
3774 +avr32_return_in_msb (tree type ATTRIBUTE_UNUSED)
3775 + {
3776 + /* if ( AGGREGATE_TYPE_P (type) ) if ((int_size_in_bytes(type) == 1) ||
3777 + ((int_size_in_bytes(type) == 2) && TYPE_ALIGN_UNIT(type) >= 2)) return
3778 + false; else return true; */
3779 +
3780 + return false;
3781 + }
3782 +
3783 +
3784 +/*
3785 + Returns one if a certain function value is going to be returned in memory
3786 + and zero if it is going to be returned in a register.
3787 +
3788 + BLKmode and all other modes that is larger than 64 bits are returned in
3789 + memory.
3790 + */
3791 +bool
3792 +avr32_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3793 + {
3794 + if (TYPE_MODE (type) == VOIDmode)
3795 + return false;
3796 +
3797 + if (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
3798 + || int_size_in_bytes (type) == -1)
3799 + {
3800 + return true;
3801 + }
3802 +
3803 + /* If we have an aggregate then use the same mechanism as when checking if
3804 + it should be passed on the stack. */
3805 + if (type
3806 + && AGGREGATE_TYPE_P (type)
3807 + && (*targetm.calls.must_pass_in_stack) (TYPE_MODE (type), type))
3808 + return true;
3809 +
3810 + return false;
3811 + }
3812 +
3813 +
3814 +/* Output the constant part of the trampoline.
3815 + lddpc r0, pc[0x8:e] ; load static chain register
3816 + lddpc pc, pc[0x8:e] ; jump to subrutine
3817 + .long 0 ; Address to static chain,
3818 + ; filled in by avr32_initialize_trampoline()
3819 + .long 0 ; Address to subrutine,
3820 + ; filled in by avr32_initialize_trampoline()
3821 + */
3822 +void
3823 +avr32_trampoline_template (FILE * file)
3824 + {
3825 + fprintf (file, "\tlddpc r0, pc[8]\n");
3826 + fprintf (file, "\tlddpc pc, pc[8]\n");
3827 + /* make room for the address of the static chain. */
3828 + fprintf (file, "\t.long\t0\n");
3829 + /* make room for the address to the subrutine. */
3830 + fprintf (file, "\t.long\t0\n");
3831 + }
3832 +
3833 +
3834 +/*
3835 + Initialize the variable parts of a trampoline.
3836 + */
3837 +void
3838 +avr32_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
3839 + {
3840 + /* Store the address to the static chain. */
3841 + emit_move_insn (gen_rtx_MEM
3842 + (SImode, plus_constant (addr, TRAMPOLINE_SIZE - 4)),
3843 + static_chain);
3844 +
3845 + /* Store the address to the function. */
3846 + emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, TRAMPOLINE_SIZE)),
3847 + fnaddr);
3848 +
3849 + emit_insn (gen_cache (gen_rtx_REG (SImode, 13),
3850 + gen_rtx_CONST_INT (SImode,
3851 + AVR32_CACHE_INVALIDATE_ICACHE)));
3852 + }
3853 +
3854 +/* Return nonzero if X is valid as an addressing register. */
3855 +int
3856 +avr32_address_register_rtx_p (rtx x, int strict_p)
3857 + {
3858 + int regno;
3859 +
3860 + if (!register_operand(x, GET_MODE(x)))
3861 + return 0;
3862 +
3863 + /* If strict we require the register to be a hard register. */
3864 + if (strict_p
3865 + && !REG_P(x))
3866 + return 0;
3867 +
3868 + regno = REGNO (x);
3869 +
3870 + if (strict_p)
3871 + return REGNO_OK_FOR_BASE_P (regno);
3872 +
3873 + return (regno <= LAST_REGNUM || regno >= FIRST_PSEUDO_REGISTER);
3874 + }
3875 +
3876 +/* Return nonzero if INDEX is valid for an address index operand. */
3877 +int
3878 +avr32_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
3879 + {
3880 + enum rtx_code code = GET_CODE (index);
3881 +
3882 + if (GET_MODE_SIZE (mode) > 8)
3883 + return 0;
3884 +
3885 + /* Standard coprocessor addressing modes. */
3886 + if (code == CONST_INT)
3887 + {
3888 + if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
3889 + /* Coprocessor mem insns has a smaller reach than ordinary mem insns */
3890 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ku14");
3891 + else
3892 + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ks16");
3893 + }
3894 +
3895 + if (avr32_address_register_rtx_p (index, strict_p))
3896 + return 1;
3897 +
3898 + if (code == MULT)
3899 + {
3900 + rtx xiop0 = XEXP (index, 0);
3901 + rtx xiop1 = XEXP (index, 1);
3902 + return ((avr32_address_register_rtx_p (xiop0, strict_p)
3903 + && power_of_two_operand (xiop1, SImode)
3904 + && (INTVAL (xiop1) <= 8))
3905 + || (avr32_address_register_rtx_p (xiop1, strict_p)
3906 + && power_of_two_operand (xiop0, SImode)
3907 + && (INTVAL (xiop0) <= 8)));
3908 + }
3909 + else if (code == ASHIFT)
3910 + {
3911 + rtx op = XEXP (index, 1);
3912 +
3913 + return (avr32_address_register_rtx_p (XEXP (index, 0), strict_p)
3914 + && GET_CODE (op) == CONST_INT
3915 + && INTVAL (op) > 0 && INTVAL (op) <= 3);
3916 + }
3917 +
3918 + return 0;
3919 + }
3920 +
3921 +/*
3922 + Used in the GO_IF_LEGITIMATE_ADDRESS macro. Returns a nonzero value if
3923 + the RTX x is a legitimate memory address.
3924 +
3925 + Returns NO_REGS if the address is not legatime, GENERAL_REGS or ALL_REGS
3926 + if it is.
3927 + */
3928 +
3929 +/* Forward declaration*/
3930 +int is_minipool_label (rtx label);
3931 +
3932 +int
3933 +avr32_legitimate_address (enum machine_mode mode, rtx x, int strict)
3934 + {
3935 +
3936 + switch (GET_CODE (x))
3937 + {
3938 + case REG:
3939 + return avr32_address_register_rtx_p (x, strict);
3940 + case CONST:
3941 + {
3942 + rtx label = avr32_find_symbol (x);
3943 + if (label
3944 + &&
3945 + ((CONSTANT_POOL_ADDRESS_P (label)
3946 + && !(flag_pic
3947 + && (symbol_mentioned_p (get_pool_constant (label))
3948 + || label_mentioned_p (get_pool_constant (label)))))
3949 + /* TODO! Can this ever happen??? */
3950 + || ((GET_CODE (label) == LABEL_REF)
3951 + && GET_CODE (XEXP (label, 0)) == CODE_LABEL
3952 + && is_minipool_label (XEXP (label, 0)))))
3953 + {
3954 + return TRUE;
3955 + }
3956 + }
3957 + break;
3958 + case LABEL_REF:
3959 + if (GET_CODE (XEXP (x, 0)) == CODE_LABEL
3960 + && is_minipool_label (XEXP (x, 0)))
3961 + {
3962 + return TRUE;
3963 + }
3964 + break;
3965 + case SYMBOL_REF:
3966 + {
3967 + if (CONSTANT_POOL_ADDRESS_P (x)
3968 + && !(flag_pic
3969 + && (symbol_mentioned_p (get_pool_constant (x))
3970 + || label_mentioned_p (get_pool_constant (x)))))
3971 + return TRUE;
3972 + /*
3973 + A symbol_ref is only legal if it is a function. If all of them are
3974 + legal, a pseudo reg that is a constant will be replaced by a
3975 + symbol_ref and make illegale code. SYMBOL_REF_FLAG is set by
3976 + ENCODE_SECTION_INFO. */
3977 + else if (SYMBOL_REF_RCALL_FUNCTION_P (x))
3978 + return TRUE;
3979 + break;
3980 + }
3981 + case PRE_DEC: /* (pre_dec (...)) */
3982 + case POST_INC: /* (post_inc (...)) */
3983 + return avr32_address_register_rtx_p (XEXP (x, 0), strict);
3984 + case PLUS: /* (plus (...) (...)) */
3985 + {
3986 + rtx xop0 = XEXP (x, 0);
3987 + rtx xop1 = XEXP (x, 1);
3988 +
3989 + return ((avr32_address_register_rtx_p (xop0, strict)
3990 + && avr32_legitimate_index_p (mode, xop1, strict))
3991 + || (avr32_address_register_rtx_p (xop1, strict)
3992 + && avr32_legitimate_index_p (mode, xop0, strict)));
3993 + }
3994 + default:
3995 + break;
3996 + }
3997 +
3998 + return FALSE;
3999 + }
4000 +
4001 +
4002 +int
4003 +avr32_const_double_immediate (rtx value)
4004 + {
4005 + HOST_WIDE_INT hi, lo;
4006 +
4007 + if (GET_CODE (value) != CONST_DOUBLE)
4008 + return FALSE;
4009 +
4010 + if (SCALAR_FLOAT_MODE_P (GET_MODE (value)))
4011 + {
4012 + HOST_WIDE_INT target_float[2];
4013 + hi = lo = 0;
4014 + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (value),
4015 + GET_MODE (value));
4016 + lo = target_float[0];
4017 + hi = target_float[1];
4018 + }
4019 + else
4020 + {
4021 + hi = CONST_DOUBLE_HIGH (value);
4022 + lo = CONST_DOUBLE_LOW (value);
4023 + }
4024 +
4025 + if (avr32_const_ok_for_constraint_p (lo, 'K', "Ks21")
4026 + && (GET_MODE (value) == SFmode
4027 + || avr32_const_ok_for_constraint_p (hi, 'K', "Ks21")))
4028 + {
4029 + return TRUE;
4030 + }
4031 +
4032 + return FALSE;
4033 + }
4034 +
4035 +
4036 +int
4037 +avr32_legitimate_constant_p (rtx x)
4038 + {
4039 + switch (GET_CODE (x))
4040 + {
4041 + case CONST_INT:
4042 + /* Check if we should put large immediate into constant pool
4043 + or load them directly with mov/orh.*/
4044 + if (!avr32_imm_in_const_pool)
4045 + return 1;
4046 +
4047 + return avr32_const_ok_for_constraint_p (INTVAL (x), 'K', "Ks21");
4048 + case CONST_DOUBLE:
4049 + /* Check if we should put large immediate into constant pool
4050 + or load them directly with mov/orh.*/
4051 + if (!avr32_imm_in_const_pool)
4052 + return 1;
4053 +
4054 + if (GET_MODE (x) == SFmode
4055 + || GET_MODE (x) == DFmode || GET_MODE (x) == DImode)
4056 + return avr32_const_double_immediate (x);
4057 + else
4058 + return 0;
4059 + case LABEL_REF:
4060 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
4061 + case SYMBOL_REF:
4062 + return flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS;
4063 + case CONST:
4064 + case HIGH:
4065 + case CONST_VECTOR:
4066 + return 0;
4067 + default:
4068 + printf ("%s():\n", __FUNCTION__);
4069 + debug_rtx (x);
4070 + return 1;
4071 + }
4072 + }
4073 +
4074 +
4075 +/* Strip any special encoding from labels */
4076 +const char *
4077 +avr32_strip_name_encoding (const char *name)
4078 +{
4079 + const char *stripped = name;
4080 +
4081 + while (1)
4082 + {
4083 + switch (stripped[0])
4084 + {
4085 + case '#':
4086 + stripped = strchr (name + 1, '#') + 1;
4087 + break;
4088 + case '*':
4089 + stripped = &stripped[1];
4090 + break;
4091 + default:
4092 + return stripped;
4093 + }
4094 + }
4095 +}
4096 +
4097 +
4098 +
4099 +/* Do anything needed before RTL is emitted for each function. */
4100 +static struct machine_function *
4101 +avr32_init_machine_status (void)
4102 +{
4103 + struct machine_function *machine;
4104 + machine =
4105 + (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
4106 +
4107 +#if AVR32_FT_UNKNOWN != 0
4108 + machine->func_type = AVR32_FT_UNKNOWN;
4109 +#endif
4110 +
4111 + machine->minipool_label_head = 0;
4112 + machine->minipool_label_tail = 0;
4113 + return machine;
4114 +}
4115 +
4116 +void
4117 +avr32_init_expanders (void)
4118 + {
4119 + /* Arrange to initialize and mark the machine per-function status. */
4120 + init_machine_status = avr32_init_machine_status;
4121 + }
4122 +
4123 +
4124 +/* Return an RTX indicating where the return address to the
4125 + calling function can be found. */
4126 +
4127 +rtx
4128 +avr32_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4129 + {
4130 + if (count != 0)
4131 + return NULL_RTX;
4132 +
4133 + return get_hard_reg_initial_val (Pmode, LR_REGNUM);
4134 + }
4135 +
4136 +
4137 +void
4138 +avr32_encode_section_info (tree decl, rtx rtl, int first)
4139 + {
4140 +
4141 + if (first && DECL_P (decl))
4142 + {
4143 + /* Set SYMBOL_REG_FLAG for local functions */
4144 + if (!TREE_PUBLIC (decl) && TREE_CODE (decl) == FUNCTION_DECL)
4145 + {
4146 + if ((*targetm.binds_local_p) (decl))
4147 + {
4148 + SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
4149 + }
4150 + }
4151 + }
4152 + }
4153 +
4154 +
4155 +void
4156 +avr32_asm_output_ascii (FILE * stream, char *ptr, int len)
4157 + {
4158 + int i, i_new = 0;
4159 + char *new_ptr = xmalloc (4 * len);
4160 + if (new_ptr == NULL)
4161 + internal_error ("Out of memory.");
4162 +
4163 + for (i = 0; i < len; i++)
4164 + {
4165 + if (ptr[i] == '\n')
4166 + {
4167 + new_ptr[i_new++] = '\\';
4168 + new_ptr[i_new++] = '0';
4169 + new_ptr[i_new++] = '1';
4170 + new_ptr[i_new++] = '2';
4171 + }
4172 + else if (ptr[i] == '\"')
4173 + {
4174 + new_ptr[i_new++] = '\\';
4175 + new_ptr[i_new++] = '\"';
4176 + }
4177 + else if (ptr[i] == '\\')
4178 + {
4179 + new_ptr[i_new++] = '\\';
4180 + new_ptr[i_new++] = '\\';
4181 + }
4182 + else if (ptr[i] == '\0' && i + 1 < len)
4183 + {
4184 + new_ptr[i_new++] = '\\';
4185 + new_ptr[i_new++] = '0';
4186 + }
4187 + else
4188 + {
4189 + new_ptr[i_new++] = ptr[i];
4190 + }
4191 + }
4192 +
4193 + /* Terminate new_ptr. */
4194 + new_ptr[i_new] = '\0';
4195 + fprintf (stream, "\t.ascii\t\"%s\"\n", new_ptr);
4196 + free (new_ptr);
4197 + }
4198 +
4199 +
4200 +void
4201 +avr32_asm_output_label (FILE * stream, const char *name)
4202 + {
4203 + name = avr32_strip_name_encoding (name);
4204 +
4205 + /* Print the label. */
4206 + assemble_name (stream, name);
4207 + fprintf (stream, ":\n");
4208 + }
4209 +
4210 +
4211 +
4212 +void
4213 +avr32_asm_weaken_label (FILE * stream, const char *name)
4214 + {
4215 + fprintf (stream, "\t.weak ");
4216 + assemble_name (stream, name);
4217 + fprintf (stream, "\n");
4218 + }
4219 +
4220 +/*
4221 + Checks if a labelref is equal to a reserved word in the assembler. If it is,
4222 + insert a '_' before the label name.
4223 + */
4224 +void
4225 +avr32_asm_output_labelref (FILE * stream, const char *name)
4226 + {