OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [config/] [avr/] [avr.c] - Blame information for rev 709

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 709 jeremybenn
/* Subroutines for insn-output.c for ATMEL AVR micro controllers
2
   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3
   2009, 2010, 2011 Free Software Foundation, Inc.
4
   Contributed by Denis Chertykov (chertykov@gmail.com)
5
 
6
   This file is part of GCC.
7
 
8
   GCC is free software; you can redistribute it and/or modify
9
   it under the terms of the GNU General Public License as published by
10
   the Free Software Foundation; either version 3, or (at your option)
11
   any later version.
12
 
13
   GCC is distributed in the hope that it will be useful,
14
   but WITHOUT ANY WARRANTY; without even the implied warranty of
15
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
   GNU General Public License for more details.
17
 
18
   You should have received a copy of the GNU General Public License
19
   along with GCC; see the file COPYING3.  If not see
20
   <http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "regs.h"
28
#include "hard-reg-set.h"
29
#include "insn-config.h"
30
#include "conditions.h"
31
#include "insn-attr.h"
32
#include "insn-codes.h"
33
#include "flags.h"
34
#include "reload.h"
35
#include "tree.h"
36
#include "output.h"
37
#include "expr.h"
38
#include "c-family/c-common.h"
39
#include "diagnostic-core.h"
40
#include "obstack.h"
41
#include "function.h"
42
#include "recog.h"
43
#include "optabs.h"
44
#include "ggc.h"
45
#include "langhooks.h"
46
#include "tm_p.h"
47
#include "target.h"
48
#include "target-def.h"
49
#include "params.h"
50
#include "df.h"
51
 
52
/* Maximal allowed offset for an address in the LD command */
53
#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54
 
55
/* Return true if STR starts with PREFIX and false, otherwise.  */
56
#define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
57
 
58
/* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59
   address space where data is to be located.
60
   As the only non-generic address spaces are all located in Flash,
61
   this can be used to test if data shall go into some .progmem* section.
62
   This must be the rightmost field of machine dependent section flags.  */
63
#define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
64
 
65
/* Similar 4-bit region for SYMBOL_REF_FLAGS.  */
66
#define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
67
 
68
/* Similar 4-bit region in SYMBOL_REF_FLAGS:
69
   Set address-space AS in SYMBOL_REF_FLAGS of SYM  */
70
#define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS)                       \
71
  do {                                                          \
72
    SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM;         \
73
    SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP;      \
74
  } while (0)
75
 
76
/* Read address-space from SYMBOL_REF_FLAGS of SYM  */
77
#define AVR_SYMBOL_GET_ADDR_SPACE(SYM)                          \
78
  ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM)           \
79
   / SYMBOL_FLAG_MACH_DEP)
80
 
81
/* Known address spaces.  The order must be the same as in the respective
82
   enum from avr.h (or designated initialized must be used).  */
83
const avr_addrspace_t avr_addrspace[] =
84
{
85
    { ADDR_SPACE_RAM,  0, 2, ""     ,   0 },
86
    { ADDR_SPACE_FLASH,  1, 2, "__flash",   0 },
87
    { ADDR_SPACE_FLASH1, 1, 2, "__flash1",  1 },
88
    { ADDR_SPACE_FLASH2, 1, 2, "__flash2",  2 },
89
    { ADDR_SPACE_FLASH3, 1, 2, "__flash3",  3 },
90
    { ADDR_SPACE_FLASH4, 1, 2, "__flash4",  4 },
91
    { ADDR_SPACE_FLASH5, 1, 2, "__flash5",  5 },
92
    { ADDR_SPACE_MEMX, 1, 3, "__memx",  0 },
93
    { 0              , 0, 0, NULL,      0 }
94
};
95
 
96
/* Map 64-k Flash segment to section prefix.  */
97
static const char* const progmem_section_prefix[6] =
98
  {
99
    ".progmem.data",
100
    ".progmem1.data",
101
    ".progmem2.data",
102
    ".progmem3.data",
103
    ".progmem4.data",
104
    ".progmem5.data"
105
  };
106
 
107
/* Holding RAM addresses of some SFRs used by the compiler and that
108
   are unique over all devices in an architecture like 'avr4'.  */
109
 
110
typedef struct
111
{
112
  /* SREG: The pocessor status */
113
  int sreg;
114
 
115
  /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
116
  int ccp;
117
  int rampd;
118
  int rampx;
119
  int rampy;
120
 
121
  /* RAMPZ: The high byte of 24-bit address used with ELPM */
122
  int rampz;
123
 
124
  /* SP: The stack pointer and its low and high byte */
125
  int sp_l;
126
  int sp_h;
127
} avr_addr_t;
128
 
129
static avr_addr_t avr_addr;
130
 
131
 
132
/* Prototypes for local helper functions.  */
133
 
134
static const char* out_movqi_r_mr (rtx, rtx[], int*);
135
static const char* out_movhi_r_mr (rtx, rtx[], int*);
136
static const char* out_movsi_r_mr (rtx, rtx[], int*);
137
static const char* out_movqi_mr_r (rtx, rtx[], int*);
138
static const char* out_movhi_mr_r (rtx, rtx[], int*);
139
static const char* out_movsi_mr_r (rtx, rtx[], int*);
140
 
141
static int avr_naked_function_p (tree);
142
static int interrupt_function_p (tree);
143
static int signal_function_p (tree);
144
static int avr_OS_task_function_p (tree);
145
static int avr_OS_main_function_p (tree);
146
static int avr_regs_to_save (HARD_REG_SET *);
147
static int get_sequence_length (rtx insns);
148
static int sequent_regs_live (void);
149
static const char *ptrreg_to_str (int);
150
static const char *cond_string (enum rtx_code);
151
static int avr_num_arg_regs (enum machine_mode, const_tree);
152
static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
153
                                 int, bool);
154
static void output_reload_in_const (rtx*, rtx, int*, bool);
155
static struct machine_function * avr_init_machine_status (void);
156
 
157
 
158
/* Prototypes for hook implementors if needed before their implementation.  */
159
 
160
static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
161
 
162
 
163
/* Allocate registers from r25 to r8 for parameters for function calls.  */
164
#define FIRST_CUM_REG 26
165
 
166
/* Implicit target register of LPM instruction (R0) */
167
extern GTY(()) rtx lpm_reg_rtx;
168
rtx lpm_reg_rtx;
169
 
170
/* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171
extern GTY(()) rtx lpm_addr_reg_rtx;
172
rtx lpm_addr_reg_rtx;
173
 
174
/* Temporary register RTX (reg:QI TMP_REGNO) */
175
extern GTY(()) rtx tmp_reg_rtx;
176
rtx tmp_reg_rtx;
177
 
178
/* Zeroed register RTX (reg:QI ZERO_REGNO) */
179
extern GTY(()) rtx zero_reg_rtx;
180
rtx zero_reg_rtx;
181
 
182
/* RTXs for all general purpose registers as QImode */
183
extern GTY(()) rtx all_regs_rtx[32];
184
rtx all_regs_rtx[32];
185
 
186
/* SREG, the processor status */
187
extern GTY(()) rtx sreg_rtx;
188
rtx sreg_rtx;
189
 
190
/* RAMP* special function registers */
191
extern GTY(()) rtx rampd_rtx;
192
extern GTY(()) rtx rampx_rtx;
193
extern GTY(()) rtx rampy_rtx;
194
extern GTY(()) rtx rampz_rtx;
195
rtx rampd_rtx;
196
rtx rampx_rtx;
197
rtx rampy_rtx;
198
rtx rampz_rtx;
199
 
200
/* RTX containing the strings "" and "e", respectively */
201
static GTY(()) rtx xstring_empty;
202
static GTY(()) rtx xstring_e;
203
 
204
/* Preprocessor macros to define depending on MCU type.  */
205
const char *avr_extra_arch_macro;
206
 
207
/* Current architecture.  */
208
const struct base_arch_s *avr_current_arch;
209
 
210
/* Current device.  */
211
const struct mcu_type_s *avr_current_device;
212
 
213
/* Section to put switch tables in.  */
214
static GTY(()) section *progmem_swtable_section;
215
 
216
/* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217
   or to address space __flash*.  */
218
static GTY(()) section *progmem_section[6];
219
 
220
/* Condition for insns/expanders from avr-dimode.md.  */
221
bool avr_have_dimode = true;
222
 
223
/* To track if code will use .bss and/or .data.  */
224
bool avr_need_clear_bss_p = false;
225
bool avr_need_copy_data_p = false;
226
 
227
 
228
 
229
/* Custom function to count number of set bits.  */
230
 
231
static inline int
232
avr_popcount (unsigned int val)
233
{
234
  int pop = 0;
235
 
236
  while (val)
237
    {
238
      val &= val-1;
239
      pop++;
240
    }
241
 
242
  return pop;
243
}
244
 
245
 
246
/* Constraint helper function.  XVAL is a CONST_INT or a CONST_DOUBLE.
247
   Return true if the least significant N_BYTES bytes of XVAL all have a
248
   popcount in POP_MASK and false, otherwise.  POP_MASK represents a subset
249
   of integers which contains an integer N iff bit N of POP_MASK is set.  */
250
 
251
bool
252
avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
253
{
254
  int i;
255
 
256
  enum machine_mode mode = GET_MODE (xval);
257
 
258
  if (VOIDmode == mode)
259
    mode = SImode;
260
 
261
  for (i = 0; i < n_bytes; i++)
262
    {
263
      rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264
      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
265
 
266
      if (0 == (pop_mask & (1 << avr_popcount (val8))))
267
        return false;
268
    }
269
 
270
  return true;
271
}
272
 
273
static void
274
avr_option_override (void)
275
{
276
  flag_delete_null_pointer_checks = 0;
277
 
278
  /* caller-save.c looks for call-clobbered hard registers that are assigned
279
     to pseudos that cross calls and tries so save-restore them around calls
280
     in order to reduce the number of stack slots needed.
281
 
282
     This might leads to situations where reload is no more able to cope
283
     with the challenge of AVR's very few address registers and fails to
284
     perform the requested spills.  */
285
 
286
  if (avr_strict_X)
287
    flag_caller_saves = 0;
288
 
289
  /* Unwind tables currently require a frame pointer for correctness,
290
     see toplev.c:process_options().  */
291
 
292
  if ((flag_unwind_tables
293
       || flag_non_call_exceptions
294
       || flag_asynchronous_unwind_tables)
295
      && !ACCUMULATE_OUTGOING_ARGS)
296
    {
297
      flag_omit_frame_pointer = 0;
298
    }
299
 
300
  avr_current_device = &avr_mcu_types[avr_mcu_index];
301
  avr_current_arch = &avr_arch_types[avr_current_device->arch];
302
  avr_extra_arch_macro = avr_current_device->macro;
303
 
304
  /* RAM addresses of some SFRs common to all Devices in respective Arch. */
305
 
306
  /* SREG: Status Register containing flags like I (global IRQ) */
307
  avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
308
 
309
  /* RAMPZ: Address' high part when loading via ELPM */
310
  avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
311
 
312
  avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
313
  avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
314
  avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
315
  avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
316
 
317
  /* SP: Stack Pointer (SP_H:SP_L) */
318
  avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
319
  avr_addr.sp_h = avr_addr.sp_l + 1;
320
 
321
  init_machine_status = avr_init_machine_status;
322
 
323
  avr_log_set_avr_log();
324
}
325
 
326
/* Function to set up the backend function structure.  */
327
 
328
static struct machine_function *
329
avr_init_machine_status (void)
330
{
331
  return ggc_alloc_cleared_machine_function ();
332
}
333
 
334
 
335
/* Implement `INIT_EXPANDERS'.  */
336
/* The function works like a singleton.  */
337
 
338
void
339
avr_init_expanders (void)
340
{
341
  int regno;
342
 
343
  for (regno = 0; regno < 32; regno ++)
344
    all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
345
 
346
  lpm_reg_rtx  = all_regs_rtx[LPM_REGNO];
347
  tmp_reg_rtx  = all_regs_rtx[TMP_REGNO];
348
  zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
349
 
350
  lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
351
 
352
  sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
353
  rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
354
  rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
355
  rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
356
  rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
357
 
358
  xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
359
  xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
360
}
361
 
362
 
363
/* Return register class for register R.  */
364
 
365
enum reg_class
366
avr_regno_reg_class (int r)
367
{
368
  static const enum reg_class reg_class_tab[] =
369
    {
370
      R0_REG,
371
      /* r1 - r15 */
372
      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
373
      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
374
      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
375
      NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
376
      /* r16 - r23 */
377
      SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
378
      SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
379
      /* r24, r25 */
380
      ADDW_REGS, ADDW_REGS,
381
      /* X: r26, 27 */
382
      POINTER_X_REGS, POINTER_X_REGS,
383
      /* Y: r28, r29 */
384
      POINTER_Y_REGS, POINTER_Y_REGS,
385
      /* Z: r30, r31 */
386
      POINTER_Z_REGS, POINTER_Z_REGS,
387
      /* SP: SPL, SPH */
388
      STACK_REG, STACK_REG
389
    };
390
 
391
  if (r <= 33)
392
    return reg_class_tab[r];
393
 
394
  return ALL_REGS;
395
}
396
 
397
 
398
static bool
399
avr_scalar_mode_supported_p (enum machine_mode mode)
400
{
401
  if (PSImode == mode)
402
    return true;
403
 
404
  return default_scalar_mode_supported_p (mode);
405
}
406
 
407
 
408
/* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise.  */
409
 
410
static bool
411
avr_decl_flash_p (tree decl)
412
{
413
  if (TREE_CODE (decl) != VAR_DECL
414
      || TREE_TYPE (decl) == error_mark_node)
415
    {
416
      return false;
417
    }
418
 
419
  return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
420
}
421
 
422
 
423
/* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424
   address space and FALSE, otherwise.  */
425
 
426
static bool
427
avr_decl_memx_p (tree decl)
428
{
429
  if (TREE_CODE (decl) != VAR_DECL
430
      || TREE_TYPE (decl) == error_mark_node)
431
    {
432
      return false;
433
    }
434
 
435
  return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
436
}
437
 
438
 
439
/* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise.  */
440
 
441
bool
442
avr_mem_flash_p (rtx x)
443
{
444
  return (MEM_P (x)
445
          && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
446
}
447
 
448
 
449
/* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450
   address space and FALSE, otherwise.  */
451
 
452
bool
453
avr_mem_memx_p (rtx x)
454
{
455
  return (MEM_P (x)
456
          && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
457
}
458
 
459
 
460
/* A helper for the subsequent function attribute used to dig for
461
   attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
462
 
463
static inline int
464
avr_lookup_function_attribute1 (const_tree func, const char *name)
465
{
466
  if (FUNCTION_DECL == TREE_CODE (func))
467
    {
468
      if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
469
        {
470
          return true;
471
        }
472
 
473
      func = TREE_TYPE (func);
474
    }
475
 
476
  gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
477
              || TREE_CODE (func) == METHOD_TYPE);
478
 
479
  return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
480
}
481
 
482
/* Return nonzero if FUNC is a naked function.  */
483
 
484
static int
485
avr_naked_function_p (tree func)
486
{
487
  return avr_lookup_function_attribute1 (func, "naked");
488
}
489
 
490
/* Return nonzero if FUNC is an interrupt function as specified
491
   by the "interrupt" attribute.  */
492
 
493
static int
494
interrupt_function_p (tree func)
495
{
496
  return avr_lookup_function_attribute1 (func, "interrupt");
497
}
498
 
499
/* Return nonzero if FUNC is a signal function as specified
500
   by the "signal" attribute.  */
501
 
502
static int
503
signal_function_p (tree func)
504
{
505
  return avr_lookup_function_attribute1 (func, "signal");
506
}
507
 
508
/* Return nonzero if FUNC is an OS_task function.  */
509
 
510
static int
511
avr_OS_task_function_p (tree func)
512
{
513
  return avr_lookup_function_attribute1 (func, "OS_task");
514
}
515
 
516
/* Return nonzero if FUNC is an OS_main function.  */
517
 
518
static int
519
avr_OS_main_function_p (tree func)
520
{
521
  return avr_lookup_function_attribute1 (func, "OS_main");
522
}
523
 
524
 
525
/* Implement `ACCUMULATE_OUTGOING_ARGS'.  */
526
 
527
int
528
avr_accumulate_outgoing_args (void)
529
{
530
  if (!cfun)
531
    return TARGET_ACCUMULATE_OUTGOING_ARGS;
532
 
533
  /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534
        what offset is correct.  In some cases it is relative to
535
        virtual_outgoing_args_rtx and in others it is relative to
536
        virtual_stack_vars_rtx.  For example code see
537
            gcc.c-torture/execute/built-in-setjmp.c
538
            gcc.c-torture/execute/builtins/sprintf-chk.c   */
539
 
540
  return (TARGET_ACCUMULATE_OUTGOING_ARGS
541
          && !(cfun->calls_setjmp
542
               || cfun->has_nonlocal_label));
543
}
544
 
545
 
546
/* Report contribution of accumulated outgoing arguments to stack size.  */
547
 
548
static inline int
549
avr_outgoing_args_size (void)
550
{
551
  return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
552
}
553
 
554
 
555
/* Implement `STARTING_FRAME_OFFSET'.  */
556
/* This is the offset from the frame pointer register to the first stack slot
557
   that contains a variable living in the frame.  */
558
 
559
int
560
avr_starting_frame_offset (void)
561
{
562
  return 1 + avr_outgoing_args_size ();
563
}
564
 
565
 
566
/* Return the number of hard registers to push/pop in the prologue/epilogue
567
   of the current function, and optionally store these registers in SET.  */
568
 
569
static int
570
avr_regs_to_save (HARD_REG_SET *set)
571
{
572
  int reg, count;
573
  int int_or_sig_p = (interrupt_function_p (current_function_decl)
574
                      || signal_function_p (current_function_decl));
575
 
576
  if (set)
577
    CLEAR_HARD_REG_SET (*set);
578
  count = 0;
579
 
580
  /* No need to save any registers if the function never returns or
581
     has the "OS_task" or "OS_main" attribute.  */
582
  if (TREE_THIS_VOLATILE (current_function_decl)
583
      || cfun->machine->is_OS_task
584
      || cfun->machine->is_OS_main)
585
    return 0;
586
 
587
  for (reg = 0; reg < 32; reg++)
588
    {
589
      /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590
         any global register variables.  */
591
      if (fixed_regs[reg])
592
        continue;
593
 
594
      if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
595
          || (df_regs_ever_live_p (reg)
596
              && (int_or_sig_p || !call_used_regs[reg])
597
              /* Don't record frame pointer registers here.  They are treated
598
                 indivitually in prologue.  */
599
              && !(frame_pointer_needed
600
                   && (reg == REG_Y || reg == (REG_Y+1)))))
601
        {
602
          if (set)
603
            SET_HARD_REG_BIT (*set, reg);
604
          count++;
605
        }
606
    }
607
  return count;
608
}
609
 
610
/* Return true if register FROM can be eliminated via register TO.  */
611
 
612
static bool
613
avr_can_eliminate (const int from, const int to)
614
{
615
  return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
616
          || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
617
          || ((from == FRAME_POINTER_REGNUM
618
               || from == FRAME_POINTER_REGNUM + 1)
619
              && !frame_pointer_needed));
620
}
621
 
622
/* Compute offset between arg_pointer and frame_pointer.  */
623
 
624
int
625
avr_initial_elimination_offset (int from, int to)
626
{
627
  if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
628
    return 0;
629
  else
630
    {
631
      int offset = frame_pointer_needed ? 2 : 0;
632
      int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
633
 
634
      offset += avr_regs_to_save (NULL);
635
      return (get_frame_size () + avr_outgoing_args_size()
636
              + avr_pc_size + 1 + offset);
637
    }
638
}
639
 
640
/* Actual start of frame is virtual_stack_vars_rtx this is offset from
641
   frame pointer by +STARTING_FRAME_OFFSET.
642
   Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643
   avoids creating add/sub of offset in nonlocal goto and setjmp.  */
644
 
645
static rtx
646
avr_builtin_setjmp_frame_value (void)
647
{
648
  return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
649
                        gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
650
}
651
 
652
/* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653
   This is return address of function.  */
654
rtx
655
avr_return_addr_rtx (int count, rtx tem)
656
{
657
  rtx r;
658
 
659
  /* Can only return this function's return address. Others not supported.  */
660
  if (count)
661
     return NULL;
662
 
663
  if (AVR_3_BYTE_PC)
664
    {
665
      r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
666
      warning (0, "'builtin_return_address' contains only 2 bytes of address");
667
    }
668
  else
669
    r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
670
 
671
  r = gen_rtx_PLUS (Pmode, tem, r);
672
  r = gen_frame_mem (Pmode, memory_address (Pmode, r));
673
  r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
674
  return  r;
675
}
676
 
677
/* Return 1 if the function epilogue is just a single "ret".  */
678
 
679
int
680
avr_simple_epilogue (void)
681
{
682
  return (! frame_pointer_needed
683
          && get_frame_size () == 0
684
          && avr_outgoing_args_size() == 0
685
          && avr_regs_to_save (NULL) == 0
686
          && ! interrupt_function_p (current_function_decl)
687
          && ! signal_function_p (current_function_decl)
688
          && ! avr_naked_function_p (current_function_decl)
689
          && ! TREE_THIS_VOLATILE (current_function_decl));
690
}
691
 
692
/* This function checks sequence of live registers.  */
693
 
694
static int
695
sequent_regs_live (void)
696
{
697
  int reg;
698
  int live_seq=0;
699
  int cur_seq=0;
700
 
701
  for (reg = 0; reg < 18; ++reg)
702
    {
703
      if (fixed_regs[reg])
704
        {
705
          /* Don't recognize sequences that contain global register
706
             variables.  */
707
 
708
          if (live_seq != 0)
709
            return 0;
710
          else
711
            continue;
712
        }
713
 
714
      if (!call_used_regs[reg])
715
        {
716
          if (df_regs_ever_live_p (reg))
717
            {
718
              ++live_seq;
719
              ++cur_seq;
720
            }
721
          else
722
            cur_seq = 0;
723
        }
724
    }
725
 
726
  if (!frame_pointer_needed)
727
    {
728
      if (df_regs_ever_live_p (REG_Y))
729
        {
730
          ++live_seq;
731
          ++cur_seq;
732
        }
733
      else
734
        cur_seq = 0;
735
 
736
      if (df_regs_ever_live_p (REG_Y+1))
737
        {
738
          ++live_seq;
739
          ++cur_seq;
740
        }
741
      else
742
        cur_seq = 0;
743
    }
744
  else
745
    {
746
      cur_seq += 2;
747
      live_seq += 2;
748
    }
749
  return (cur_seq == live_seq) ? live_seq : 0;
750
}
751
 
752
/* Obtain the length sequence of insns.  */
753
 
754
int
755
get_sequence_length (rtx insns)
756
{
757
  rtx insn;
758
  int length;
759
 
760
  for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
761
    length += get_attr_length (insn);
762
 
763
  return length;
764
}
765
 
766
/*  Implement INCOMING_RETURN_ADDR_RTX.  */
767
 
768
rtx
769
avr_incoming_return_addr_rtx (void)
770
{
771
  /* The return address is at the top of the stack.  Note that the push
772
     was via post-decrement, which means the actual address is off by one.  */
773
  return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
774
}
775
 
776
/*  Helper for expand_prologue.  Emit a push of a byte register.  */
777
 
778
static void
779
emit_push_byte (unsigned regno, bool frame_related_p)
780
{
781
  rtx mem, reg, insn;
782
 
783
  mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
784
  mem = gen_frame_mem (QImode, mem);
785
  reg = gen_rtx_REG (QImode, regno);
786
 
787
  insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
788
  if (frame_related_p)
789
    RTX_FRAME_RELATED_P (insn) = 1;
790
 
791
  cfun->machine->stack_usage++;
792
}
793
 
794
 
795
/*  Helper for expand_prologue.  Emit a push of a SFR via tmp_reg.
796
    SFR is a MEM representing the memory location of the SFR.
797
    If CLR_P then clear the SFR after the push using zero_reg.  */
798
 
799
static void
800
emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
801
{
802
  rtx insn;
803
 
804
  gcc_assert (MEM_P (sfr));
805
 
806
  /* IN __tmp_reg__, IO(SFR) */
807
  insn = emit_move_insn (tmp_reg_rtx, sfr);
808
  if (frame_related_p)
809
    RTX_FRAME_RELATED_P (insn) = 1;
810
 
811
  /* PUSH __tmp_reg__ */
812
  emit_push_byte (TMP_REGNO, frame_related_p);
813
 
814
  if (clr_p)
815
    {
816
      /* OUT IO(SFR), __zero_reg__ */
817
      insn = emit_move_insn (sfr, const0_rtx);
818
      if (frame_related_p)
819
        RTX_FRAME_RELATED_P (insn) = 1;
820
    }
821
}
822
 
823
static void
824
avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
825
{
826
  rtx insn;
827
  bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
828
  int live_seq = sequent_regs_live ();
829
 
830
  bool minimize = (TARGET_CALL_PROLOGUES
831
                   && live_seq
832
                   && !isr_p
833
                   && !cfun->machine->is_OS_task
834
                   && !cfun->machine->is_OS_main);
835
 
836
  if (minimize
837
      && (frame_pointer_needed
838
          || avr_outgoing_args_size() > 8
839
          || (AVR_2_BYTE_PC && live_seq > 6)
840
          || live_seq > 7))
841
    {
842
      rtx pattern;
843
      int first_reg, reg, offset;
844
 
845
      emit_move_insn (gen_rtx_REG (HImode, REG_X),
846
                      gen_int_mode (size, HImode));
847
 
848
      pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
849
                                         gen_int_mode (live_seq+size, HImode));
850
      insn = emit_insn (pattern);
851
      RTX_FRAME_RELATED_P (insn) = 1;
852
 
853
      /* Describe the effect of the unspec_volatile call to prologue_saves.
854
         Note that this formulation assumes that add_reg_note pushes the
855
         notes to the front.  Thus we build them in the reverse order of
856
         how we want dwarf2out to process them.  */
857
 
858
      /* The function does always set frame_pointer_rtx, but whether that
859
         is going to be permanent in the function is frame_pointer_needed.  */
860
 
861
      add_reg_note (insn, REG_CFA_ADJUST_CFA,
862
                    gen_rtx_SET (VOIDmode, (frame_pointer_needed
863
                                            ? frame_pointer_rtx
864
                                            : stack_pointer_rtx),
865
                                 plus_constant (stack_pointer_rtx,
866
                                                -(size + live_seq))));
867
 
868
      /* Note that live_seq always contains r28+r29, but the other
869
         registers to be saved are all below 18.  */
870
 
871
      first_reg = 18 - (live_seq - 2);
872
 
873
      for (reg = 29, offset = -live_seq + 1;
874
           reg >= first_reg;
875
           reg = (reg == 28 ? 17 : reg - 1), ++offset)
876
        {
877
          rtx m, r;
878
 
879
          m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
880
          r = gen_rtx_REG (QImode, reg);
881
          add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
882
        }
883
 
884
      cfun->machine->stack_usage += size + live_seq;
885
    }
886
  else /* !minimize */
887
    {
888
      int reg;
889
 
890
      for (reg = 0; reg < 32; ++reg)
891
        if (TEST_HARD_REG_BIT (set, reg))
892
          emit_push_byte (reg, true);
893
 
894
      if (frame_pointer_needed
895
          && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
896
        {
897
          /* Push frame pointer.  Always be consistent about the
898
             ordering of pushes -- epilogue_restores expects the
899
             register pair to be pushed low byte first.  */
900
 
901
          emit_push_byte (REG_Y, true);
902
          emit_push_byte (REG_Y + 1, true);
903
        }
904
 
905
      if (frame_pointer_needed
906
          && size == 0)
907
        {
908
          insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
909
          RTX_FRAME_RELATED_P (insn) = 1;
910
        }
911
 
912
      if (size != 0)
913
        {
914
          /*  Creating a frame can be done by direct manipulation of the
915
              stack or via the frame pointer. These two methods are:
916
                  fp =  sp
917
                  fp -= size
918
                  sp =  fp
919
              or
920
                  sp -= size
921
                  fp =  sp    (*)
922
              the optimum method depends on function type, stack and
923
              frame size.  To avoid a complex logic, both methods are
924
              tested and shortest is selected.
925
 
926
              There is also the case where SIZE != 0 and no frame pointer is
927
              needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
928
              In that case, insn (*) is not needed in that case.
929
              We use the X register as scratch. This is save because in X
930
              is call-clobbered.
931
                 In an interrupt routine, the case of SIZE != 0 together with
932
              !frame_pointer_needed can only occur if the function is not a
933
              leaf function and thus X has already been saved.  */
934
 
935
          int irq_state = -1;
936
          rtx fp_plus_insns, fp, my_fp;
937
 
938
          gcc_assert (frame_pointer_needed
939
                      || !isr_p
940
                      || !current_function_is_leaf);
941
 
942
          fp = my_fp = (frame_pointer_needed
943
                        ? frame_pointer_rtx
944
                        : gen_rtx_REG (Pmode, REG_X));
945
 
946
          if (AVR_HAVE_8BIT_SP)
947
            {
948
              /* The high byte (r29) does not change:
949
                 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size).  */
950
 
951
              my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
952
            }
953
 
954
          /************  Method 1: Adjust frame pointer  ************/
955
 
956
          start_sequence ();
957
 
958
          /* Normally, the dwarf2out frame-related-expr interpreter does
959
             not expect to have the CFA change once the frame pointer is
960
             set up.  Thus, we avoid marking the move insn below and
961
             instead indicate that the entire operation is complete after
962
             the frame pointer subtraction is done.  */
963
 
964
          insn = emit_move_insn (fp, stack_pointer_rtx);
965
          if (frame_pointer_needed)
966
            {
967
              RTX_FRAME_RELATED_P (insn) = 1;
968
              add_reg_note (insn, REG_CFA_ADJUST_CFA,
969
                            gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
970
            }
971
 
972
          insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
973
          if (frame_pointer_needed)
974
            {
975
              RTX_FRAME_RELATED_P (insn) = 1;
976
              add_reg_note (insn, REG_CFA_ADJUST_CFA,
977
                            gen_rtx_SET (VOIDmode, fp,
978
                                         plus_constant (fp, -size)));
979
            }
980
 
981
          /* Copy to stack pointer.  Note that since we've already
982
             changed the CFA to the frame pointer this operation
983
             need not be annotated if frame pointer is needed.
984
             Always move through unspec, see PR50063.
985
             For meaning of irq_state see movhi_sp_r insn.  */
986
 
987
          if (cfun->machine->is_interrupt)
988
            irq_state = 1;
989
 
990
          if (TARGET_NO_INTERRUPTS
991
              || cfun->machine->is_signal
992
              || cfun->machine->is_OS_main)
993
            irq_state = 0;
994
 
995
          if (AVR_HAVE_8BIT_SP)
996
            irq_state = 2;
997
 
998
          insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
999
                                            fp, GEN_INT (irq_state)));
1000
          if (!frame_pointer_needed)
1001
            {
1002
              RTX_FRAME_RELATED_P (insn) = 1;
1003
              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1004
                            gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1005
                                         plus_constant (stack_pointer_rtx,
1006
                                                        -size)));
1007
            }
1008
 
1009
          fp_plus_insns = get_insns ();
1010
          end_sequence ();
1011
 
1012
          /************  Method 2: Adjust Stack pointer  ************/
1013
 
1014
          /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1015
             can only handle specific offsets.  */
1016
 
1017
          if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1018
            {
1019
              rtx sp_plus_insns;
1020
 
1021
              start_sequence ();
1022
 
1023
              insn = emit_move_insn (stack_pointer_rtx,
1024
                                     plus_constant (stack_pointer_rtx, -size));
1025
              RTX_FRAME_RELATED_P (insn) = 1;
1026
              add_reg_note (insn, REG_CFA_ADJUST_CFA,
1027
                            gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1028
                                         plus_constant (stack_pointer_rtx,
1029
                                                        -size)));
1030
              if (frame_pointer_needed)
1031
                {
1032
                  insn = emit_move_insn (fp, stack_pointer_rtx);
1033
                  RTX_FRAME_RELATED_P (insn) = 1;
1034
                }
1035
 
1036
              sp_plus_insns = get_insns ();
1037
              end_sequence ();
1038
 
1039
              /************ Use shortest method  ************/
1040
 
1041
              emit_insn (get_sequence_length (sp_plus_insns)
1042
                         < get_sequence_length (fp_plus_insns)
1043
                         ? sp_plus_insns
1044
                         : fp_plus_insns);
1045
            }
1046
          else
1047
            {
1048
              emit_insn (fp_plus_insns);
1049
            }
1050
 
1051
          cfun->machine->stack_usage += size;
1052
        } /* !minimize && size != 0 */
1053
    } /* !minimize */
1054
}
1055
 
1056
 
1057
/*  Output function prologue.  */
1058
 
1059
void
1060
expand_prologue (void)
1061
{
1062
  HARD_REG_SET set;
1063
  HOST_WIDE_INT size;
1064
 
1065
  size = get_frame_size() + avr_outgoing_args_size();
1066
 
1067
  /* Init cfun->machine.  */
1068
  cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1069
  cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1070
  cfun->machine->is_signal = signal_function_p (current_function_decl);
1071
  cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1072
  cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1073
  cfun->machine->stack_usage = 0;
1074
 
1075
  /* Prologue: naked.  */
1076
  if (cfun->machine->is_naked)
1077
    {
1078
      return;
1079
    }
1080
 
1081
  avr_regs_to_save (&set);
1082
 
1083
  if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1084
    {
1085
      /* Enable interrupts.  */
1086
      if (cfun->machine->is_interrupt)
1087
        emit_insn (gen_enable_interrupt ());
1088
 
1089
      /* Push zero reg.  */
1090
      emit_push_byte (ZERO_REGNO, true);
1091
 
1092
      /* Push tmp reg.  */
1093
      emit_push_byte (TMP_REGNO, true);
1094
 
1095
      /* Push SREG.  */
1096
      /* ??? There's no dwarf2 column reserved for SREG.  */
1097
      emit_push_sfr (sreg_rtx, false, false /* clr */);
1098
 
1099
      /* Clear zero reg.  */
1100
      emit_move_insn (zero_reg_rtx, const0_rtx);
1101
 
1102
      /* Prevent any attempt to delete the setting of ZERO_REG!  */
1103
      emit_use (zero_reg_rtx);
1104
 
1105
      /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1106
         ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z.  */
1107
 
1108
      if (AVR_HAVE_RAMPD)
1109
        emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1110
 
1111
      if (AVR_HAVE_RAMPX
1112
          && TEST_HARD_REG_BIT (set, REG_X)
1113
          && TEST_HARD_REG_BIT (set, REG_X + 1))
1114
        {
1115
          emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1116
        }
1117
 
1118
      if (AVR_HAVE_RAMPY
1119
          && (frame_pointer_needed
1120
              || (TEST_HARD_REG_BIT (set, REG_Y)
1121
                  && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1122
        {
1123
          emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1124
        }
1125
 
1126
      if (AVR_HAVE_RAMPZ
1127
          && TEST_HARD_REG_BIT (set, REG_Z)
1128
          && TEST_HARD_REG_BIT (set, REG_Z + 1))
1129
        {
1130
          emit_push_sfr (rampz_rtx, false /* frame-related */, true /* clr */);
1131
        }
1132
    }  /* is_interrupt is_signal */
1133
 
1134
  avr_prologue_setup_frame (size, set);
1135
 
1136
  if (flag_stack_usage_info)
1137
    current_function_static_stack_size = cfun->machine->stack_usage;
1138
}
1139
 
1140
/* Output summary at end of function prologue.  */
1141
 
1142
static void
1143
avr_asm_function_end_prologue (FILE *file)
1144
{
1145
  if (cfun->machine->is_naked)
1146
    {
1147
      fputs ("/* prologue: naked */\n", file);
1148
    }
1149
  else
1150
    {
1151
      if (cfun->machine->is_interrupt)
1152
        {
1153
          fputs ("/* prologue: Interrupt */\n", file);
1154
        }
1155
      else if (cfun->machine->is_signal)
1156
        {
1157
          fputs ("/* prologue: Signal */\n", file);
1158
        }
1159
      else
1160
        fputs ("/* prologue: function */\n", file);
1161
    }
1162
 
1163
  if (ACCUMULATE_OUTGOING_ARGS)
1164
    fprintf (file, "/* outgoing args size = %d */\n",
1165
             avr_outgoing_args_size());
1166
 
1167
  fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1168
                 get_frame_size());
1169
  fprintf (file, "/* stack size = %d */\n",
1170
                 cfun->machine->stack_usage);
1171
  /* Create symbol stack offset here so all functions have it. Add 1 to stack
1172
     usage for offset so that SP + .L__stack_offset = return address.  */
1173
  fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1174
}
1175
 
1176
 
1177
/* Implement EPILOGUE_USES.  */
1178
 
1179
int
1180
avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1181
{
1182
  if (reload_completed
1183
      && cfun->machine
1184
      && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1185
    return 1;
1186
  return 0;
1187
}
1188
 
1189
/*  Helper for expand_epilogue.  Emit a pop of a byte register.  */
1190
 
1191
static void
1192
emit_pop_byte (unsigned regno)
1193
{
1194
  rtx mem, reg;
1195
 
1196
  mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1197
  mem = gen_frame_mem (QImode, mem);
1198
  reg = gen_rtx_REG (QImode, regno);
1199
 
1200
  emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1201
}
1202
 
1203
/*  Output RTL epilogue.  */
1204
 
1205
void
1206
expand_epilogue (bool sibcall_p)
1207
{
1208
  int reg;
1209
  int live_seq;
1210
  HARD_REG_SET set;
1211
  int minimize;
1212
  HOST_WIDE_INT size;
1213
  bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1214
 
1215
  size = get_frame_size() + avr_outgoing_args_size();
1216
 
1217
  /* epilogue: naked  */
1218
  if (cfun->machine->is_naked)
1219
    {
1220
      gcc_assert (!sibcall_p);
1221
 
1222
      emit_jump_insn (gen_return ());
1223
      return;
1224
    }
1225
 
1226
  avr_regs_to_save (&set);
1227
  live_seq = sequent_regs_live ();
1228
 
1229
  minimize = (TARGET_CALL_PROLOGUES
1230
              && live_seq
1231
              && !isr_p
1232
              && !cfun->machine->is_OS_task
1233
              && !cfun->machine->is_OS_main);
1234
 
1235
  if (minimize
1236
      && (live_seq > 4
1237
          || frame_pointer_needed
1238
          || size))
1239
    {
1240
      /*  Get rid of frame.  */
1241
 
1242
      if (!frame_pointer_needed)
1243
        {
1244
          emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1245
        }
1246
 
1247
      if (size)
1248
        {
1249
          emit_move_insn (frame_pointer_rtx,
1250
                          plus_constant (frame_pointer_rtx, size));
1251
        }
1252
 
1253
      emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1254
      return;
1255
    }
1256
 
1257
  if (size)
1258
    {
1259
      /* Try two methods to adjust stack and select shortest.  */
1260
 
1261
      int irq_state = -1;
1262
      rtx fp, my_fp;
1263
      rtx fp_plus_insns;
1264
 
1265
      gcc_assert (frame_pointer_needed
1266
                  || !isr_p
1267
                  || !current_function_is_leaf);
1268
 
1269
      fp = my_fp = (frame_pointer_needed
1270
                    ? frame_pointer_rtx
1271
                    : gen_rtx_REG (Pmode, REG_X));
1272
 
1273
      if (AVR_HAVE_8BIT_SP)
1274
        {
1275
          /* The high byte (r29) does not change:
1276
             Prefer SUBI (1 cycle) over SBIW (2 cycles).  */
1277
 
1278
          my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1279
        }
1280
 
1281
      /********** Method 1: Adjust fp register  **********/
1282
 
1283
      start_sequence ();
1284
 
1285
      if (!frame_pointer_needed)
1286
        emit_move_insn (fp, stack_pointer_rtx);
1287
 
1288
      emit_move_insn (my_fp, plus_constant (my_fp, size));
1289
 
1290
      /* Copy to stack pointer.  */
1291
 
1292
      if (TARGET_NO_INTERRUPTS)
1293
        irq_state = 0;
1294
 
1295
      if (AVR_HAVE_8BIT_SP)
1296
        irq_state = 2;
1297
 
1298
      emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1299
                                 GEN_INT (irq_state)));
1300
 
1301
      fp_plus_insns = get_insns ();
1302
      end_sequence ();
1303
 
1304
      /********** Method 2: Adjust Stack pointer  **********/
1305
 
1306
      if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1307
        {
1308
          rtx sp_plus_insns;
1309
 
1310
          start_sequence ();
1311
 
1312
          emit_move_insn (stack_pointer_rtx,
1313
                          plus_constant (stack_pointer_rtx, size));
1314
 
1315
          sp_plus_insns = get_insns ();
1316
          end_sequence ();
1317
 
1318
          /************ Use shortest method  ************/
1319
 
1320
          emit_insn (get_sequence_length (sp_plus_insns)
1321
                     < get_sequence_length (fp_plus_insns)
1322
                     ? sp_plus_insns
1323
                     : fp_plus_insns);
1324
        }
1325
      else
1326
        emit_insn (fp_plus_insns);
1327
    } /* size != 0 */
1328
 
1329
  if (frame_pointer_needed
1330
      && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1331
    {
1332
      /* Restore previous frame_pointer.  See expand_prologue for
1333
         rationale for not using pophi.  */
1334
 
1335
      emit_pop_byte (REG_Y + 1);
1336
      emit_pop_byte (REG_Y);
1337
    }
1338
 
1339
  /* Restore used registers.  */
1340
 
1341
  for (reg = 31; reg >= 0; --reg)
1342
    if (TEST_HARD_REG_BIT (set, reg))
1343
      emit_pop_byte (reg);
1344
 
1345
  if (isr_p)
1346
    {
1347
      /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1348
         The conditions to restore them must be tha same as in prologue.  */
1349
 
1350
      if (AVR_HAVE_RAMPX
1351
          && TEST_HARD_REG_BIT (set, REG_X)
1352
          && TEST_HARD_REG_BIT (set, REG_X + 1))
1353
        {
1354
          emit_pop_byte (TMP_REGNO);
1355
          emit_move_insn (rampx_rtx, tmp_reg_rtx);
1356
        }
1357
 
1358
      if (AVR_HAVE_RAMPY
1359
          && (frame_pointer_needed
1360
              || (TEST_HARD_REG_BIT (set, REG_Y)
1361
                  && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1362
        {
1363
          emit_pop_byte (TMP_REGNO);
1364
          emit_move_insn (rampy_rtx, tmp_reg_rtx);
1365
        }
1366
 
1367
      if (AVR_HAVE_RAMPZ
1368
          && TEST_HARD_REG_BIT (set, REG_Z)
1369
          && TEST_HARD_REG_BIT (set, REG_Z + 1))
1370
        {
1371
          emit_pop_byte (TMP_REGNO);
1372
          emit_move_insn (rampz_rtx, tmp_reg_rtx);
1373
        }
1374
 
1375
      if (AVR_HAVE_RAMPD)
1376
        {
1377
          emit_pop_byte (TMP_REGNO);
1378
          emit_move_insn (rampd_rtx, tmp_reg_rtx);
1379
        }
1380
 
1381
      /* Restore SREG using tmp_reg as scratch.  */
1382
 
1383
      emit_pop_byte (TMP_REGNO);
1384
      emit_move_insn (sreg_rtx, tmp_reg_rtx);
1385
 
1386
      /* Restore tmp REG.  */
1387
      emit_pop_byte (TMP_REGNO);
1388
 
1389
      /* Restore zero REG.  */
1390
      emit_pop_byte (ZERO_REGNO);
1391
    }
1392
 
1393
  if (!sibcall_p)
1394
    emit_jump_insn (gen_return ());
1395
}
1396
 
1397
/* Output summary messages at beginning of function epilogue.  */
1398
 
1399
static void
1400
avr_asm_function_begin_epilogue (FILE *file)
1401
{
1402
  fprintf (file, "/* epilogue start */\n");
1403
}
1404
 
1405
 
1406
/* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1407
 
1408
static bool
1409
avr_cannot_modify_jumps_p (void)
1410
{
1411
 
1412
  /* Naked Functions must not have any instructions after
1413
     their epilogue, see PR42240 */
1414
 
1415
  if (reload_completed
1416
      && cfun->machine
1417
      && cfun->machine->is_naked)
1418
    {
1419
      return true;
1420
    }
1421
 
1422
  return false;
1423
}
1424
 
1425
 
1426
/* Helper function for `avr_legitimate_address_p'.  */
1427
 
1428
static inline bool
1429
avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1430
                       RTX_CODE outer_code, bool strict)
1431
{
1432
  return (REG_P (reg)
1433
          && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1434
                                                 as, outer_code, UNKNOWN)
1435
              || (!strict
1436
                  && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1437
}
1438
 
1439
 
1440
/* Return nonzero if X (an RTX) is a legitimate memory address on the target
1441
   machine for a memory operand of mode MODE.  */
1442
 
1443
static bool
1444
avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1445
{
1446
  bool ok = CONSTANT_ADDRESS_P (x);
1447
 
1448
  switch (GET_CODE (x))
1449
    {
1450
    case REG:
1451
      ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1452
                                  MEM, strict);
1453
 
1454
      if (strict
1455
          && DImode == mode
1456
          && REG_X == REGNO (x))
1457
        {
1458
          ok = false;
1459
        }
1460
      break;
1461
 
1462
    case POST_INC:
1463
    case PRE_DEC:
1464
      ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1465
                                  GET_CODE (x), strict);
1466
      break;
1467
 
1468
    case PLUS:
1469
      {
1470
        rtx reg = XEXP (x, 0);
1471
        rtx op1 = XEXP (x, 1);
1472
 
1473
        if (REG_P (reg)
1474
            && CONST_INT_P (op1)
1475
            && INTVAL (op1) >= 0)
1476
          {
1477
            bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1478
 
1479
            if (fit)
1480
              {
1481
                ok = (! strict
1482
                      || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1483
                                                PLUS, strict));
1484
 
1485
                if (reg == frame_pointer_rtx
1486
                    || reg == arg_pointer_rtx)
1487
                  {
1488
                    ok = true;
1489
                  }
1490
              }
1491
            else if (frame_pointer_needed
1492
                     && reg == frame_pointer_rtx)
1493
              {
1494
                ok = true;
1495
              }
1496
          }
1497
      }
1498
      break;
1499
 
1500
    default:
1501
      break;
1502
    }
1503
 
1504
  if (avr_log.legitimate_address_p)
1505
    {
1506
      avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1507
                 "reload_completed=%d reload_in_progress=%d %s:",
1508
                 ok, mode, strict, reload_completed, reload_in_progress,
1509
                 reg_renumber ? "(reg_renumber)" : "");
1510
 
1511
      if (GET_CODE (x) == PLUS
1512
          && REG_P (XEXP (x, 0))
1513
          && CONST_INT_P (XEXP (x, 1))
1514
          && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1515
          && reg_renumber)
1516
        {
1517
          avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1518
                     true_regnum (XEXP (x, 0)));
1519
        }
1520
 
1521
      avr_edump ("\n%r\n", x);
1522
    }
1523
 
1524
  return ok;
1525
}
1526
 
1527
 
1528
/* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1529
   now only a helper for avr_addr_space_legitimize_address.  */
1530
/* Attempts to replace X with a valid
1531
   memory address for an operand of mode MODE  */
1532
 
1533
static rtx
1534
avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1535
{
1536
  bool big_offset_p = false;
1537
 
1538
  x = oldx;
1539
 
1540
  if (GET_CODE (oldx) == PLUS
1541
      && REG_P (XEXP (oldx, 0)))
1542
    {
1543
      if (REG_P (XEXP (oldx, 1)))
1544
        x = force_reg (GET_MODE (oldx), oldx);
1545
      else if (CONST_INT_P (XEXP (oldx, 1)))
1546
        {
1547
          int offs = INTVAL (XEXP (oldx, 1));
1548
          if (frame_pointer_rtx != XEXP (oldx, 0)
1549
              && offs > MAX_LD_OFFSET (mode))
1550
            {
1551
              big_offset_p = true;
1552
              x = force_reg (GET_MODE (oldx), oldx);
1553
            }
1554
        }
1555
    }
1556
 
1557
  if (avr_log.legitimize_address)
1558
    {
1559
      avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1560
 
1561
      if (x != oldx)
1562
        avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1563
    }
1564
 
1565
  return x;
1566
}
1567
 
1568
 
1569
/* Implement `LEGITIMIZE_RELOAD_ADDRESS'.  */
1570
/* This will allow register R26/27 to be used where it is no worse than normal
1571
   base pointers R28/29 or R30/31.  For example, if base offset is greater
1572
   than 63 bytes or for R++ or --R addressing.  */
1573
 
1574
rtx
1575
avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1576
                               int opnum, int type, int addr_type,
1577
                               int ind_levels ATTRIBUTE_UNUSED,
1578
                               rtx (*mk_memloc)(rtx,int))
1579
{
1580
  rtx x = *px;
1581
 
1582
  if (avr_log.legitimize_reload_address)
1583
    avr_edump ("\n%?:%m %r\n", mode, x);
1584
 
1585
  if (1 && (GET_CODE (x) == POST_INC
1586
            || GET_CODE (x) == PRE_DEC))
1587
    {
1588
      push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1589
                   POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1590
                   opnum, RELOAD_OTHER);
1591
 
1592
      if (avr_log.legitimize_reload_address)
1593
        avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1594
                   POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1595
 
1596
      return x;
1597
    }
1598
 
1599
  if (GET_CODE (x) == PLUS
1600
      && REG_P (XEXP (x, 0))
1601
      && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1602
      && CONST_INT_P (XEXP (x, 1))
1603
      && INTVAL (XEXP (x, 1)) >= 1)
1604
    {
1605
      bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1606
 
1607
      if (fit)
1608
        {
1609
          if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1610
            {
1611
              int regno = REGNO (XEXP (x, 0));
1612
              rtx mem = mk_memloc (x, regno);
1613
 
1614
              push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1615
                           POINTER_REGS, Pmode, VOIDmode, 0, 0,
1616
                           1, addr_type);
1617
 
1618
              if (avr_log.legitimize_reload_address)
1619
                avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1620
                           POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1621
 
1622
              push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1623
                           BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1624
                           opnum, type);
1625
 
1626
              if (avr_log.legitimize_reload_address)
1627
                avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1628
                           BASE_POINTER_REGS, mem, NULL_RTX);
1629
 
1630
              return x;
1631
            }
1632
        }
1633
      else if (! (frame_pointer_needed
1634
                  && XEXP (x, 0) == frame_pointer_rtx))
1635
        {
1636
          push_reload (x, NULL_RTX, px, NULL,
1637
                       POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1638
                       opnum, type);
1639
 
1640
          if (avr_log.legitimize_reload_address)
1641
            avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1642
                       POINTER_REGS, x, NULL_RTX);
1643
 
1644
          return x;
1645
        }
1646
    }
1647
 
1648
  return NULL_RTX;
1649
}
1650
 
1651
 
1652
/* Helper function to print assembler resp. track instruction
1653
   sequence lengths.  Always return "".
1654
 
1655
   If PLEN == NULL:
1656
       Output assembler code from template TPL with operands supplied
1657
       by OPERANDS.  This is just forwarding to output_asm_insn.
1658
 
1659
   If PLEN != NULL:
1660
       If N_WORDS >= 0  Add N_WORDS to *PLEN.
1661
       If N_WORDS < 0   Set *PLEN to -N_WORDS.
1662
       Don't output anything.
1663
*/
1664
 
1665
static const char*
1666
avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1667
{
1668
  if (NULL == plen)
1669
    {
1670
      output_asm_insn (tpl, operands);
1671
    }
1672
  else
1673
    {
1674
      if (n_words < 0)
1675
        *plen = -n_words;
1676
      else
1677
        *plen += n_words;
1678
    }
1679
 
1680
  return "";
1681
}
1682
 
1683
 
1684
/* Return a pointer register name as a string.  */
1685
 
1686
static const char *
1687
ptrreg_to_str (int regno)
1688
{
1689
  switch (regno)
1690
    {
1691
    case REG_X: return "X";
1692
    case REG_Y: return "Y";
1693
    case REG_Z: return "Z";
1694
    default:
1695
      output_operand_lossage ("address operand requires constraint for"
1696
                              " X, Y, or Z register");
1697
    }
1698
  return NULL;
1699
}
1700
 
1701
/* Return the condition name as a string.
1702
   Used in conditional jump constructing  */
1703
 
1704
static const char *
1705
cond_string (enum rtx_code code)
1706
{
1707
  switch (code)
1708
    {
1709
    case NE:
1710
      return "ne";
1711
    case EQ:
1712
      return "eq";
1713
    case GE:
1714
      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1715
        return "pl";
1716
      else
1717
        return "ge";
1718
    case LT:
1719
      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1720
        return "mi";
1721
      else
1722
        return "lt";
1723
    case GEU:
1724
      return "sh";
1725
    case LTU:
1726
      return "lo";
1727
    default:
1728
      gcc_unreachable ();
1729
    }
1730
 
1731
  return "";
1732
}
1733
 
1734
 
1735
/* Implement `TARGET_PRINT_OPERAND_ADDRESS'.  */
1736
/* Output ADDR to FILE as address.  */
1737
 
1738
static void
1739
avr_print_operand_address (FILE *file, rtx addr)
1740
{
1741
  switch (GET_CODE (addr))
1742
    {
1743
    case REG:
1744
      fprintf (file, ptrreg_to_str (REGNO (addr)));
1745
      break;
1746
 
1747
    case PRE_DEC:
1748
      fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1749
      break;
1750
 
1751
    case POST_INC:
1752
      fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1753
      break;
1754
 
1755
    default:
1756
      if (CONSTANT_ADDRESS_P (addr)
1757
          && text_segment_operand (addr, VOIDmode))
1758
        {
1759
          rtx x = addr;
1760
          if (GET_CODE (x) == CONST)
1761
            x = XEXP (x, 0);
1762
          if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1763
            {
1764
              /* Assembler gs() will implant word address. Make offset
1765
                 a byte offset inside gs() for assembler. This is
1766
                 needed because the more logical (constant+gs(sym)) is not
1767
                 accepted by gas. For 128K and lower devices this is ok.
1768
                 For large devices it will create a Trampoline to offset
1769
                 from symbol which may not be what the user really wanted.  */
1770
              fprintf (file, "gs(");
1771
              output_addr_const (file, XEXP (x,0));
1772
              fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1773
                       2 * INTVAL (XEXP (x, 1)));
1774
              if (AVR_3_BYTE_PC)
1775
                if (warning (0, "pointer offset from symbol maybe incorrect"))
1776
                  {
1777
                    output_addr_const (stderr, addr);
1778
                    fprintf(stderr,"\n");
1779
                  }
1780
            }
1781
          else
1782
            {
1783
              fprintf (file, "gs(");
1784
              output_addr_const (file, addr);
1785
              fprintf (file, ")");
1786
            }
1787
        }
1788
      else
1789
        output_addr_const (file, addr);
1790
    }
1791
}
1792
 
1793
 
1794
/* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'.  */
1795
 
1796
static bool
1797
avr_print_operand_punct_valid_p (unsigned char code)
1798
{
1799
  return code == '~' || code == '!';
1800
}
1801
 
1802
 
1803
/* Implement `TARGET_PRINT_OPERAND'.  */
1804
/* Output X as assembler operand to file FILE.
1805
   For a description of supported %-codes, see top of avr.md.  */
1806
 
1807
static void
1808
avr_print_operand (FILE *file, rtx x, int code)
1809
{
1810
  int abcd = 0;
1811
 
1812
  if (code >= 'A' && code <= 'D')
1813
    abcd = code - 'A';
1814
 
1815
  if (code == '~')
1816
    {
1817
      if (!AVR_HAVE_JMP_CALL)
1818
        fputc ('r', file);
1819
    }
1820
  else if (code == '!')
1821
    {
1822
      if (AVR_HAVE_EIJMP_EICALL)
1823
        fputc ('e', file);
1824
    }
1825
  else if (code == 't'
1826
           || code == 'T')
1827
    {
1828
      static int t_regno = -1;
1829
      static int t_nbits = -1;
1830
 
1831
      if (REG_P (x) && t_regno < 0 && code == 'T')
1832
        {
1833
          t_regno = REGNO (x);
1834
          t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1835
        }
1836
      else if (CONST_INT_P (x) && t_regno >= 0
1837
               && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1838
        {
1839
          int bpos = INTVAL (x);
1840
 
1841
          fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1842
          if (code == 'T')
1843
            fprintf (file, ",%d", bpos % 8);
1844
 
1845
          t_regno = -1;
1846
        }
1847
      else
1848
        fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1849
    }
1850
  else if (REG_P (x))
1851
    {
1852
      if (x == zero_reg_rtx)
1853
        fprintf (file, "__zero_reg__");
1854
      else
1855
        fprintf (file, reg_names[true_regnum (x) + abcd]);
1856
    }
1857
  else if (CONST_INT_P (x))
1858
    {
1859
      HOST_WIDE_INT ival = INTVAL (x);
1860
 
1861
      if ('i' != code)
1862
        fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1863
      else if (low_io_address_operand (x, VOIDmode)
1864
               || high_io_address_operand (x, VOIDmode))
1865
        {
1866
          if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1867
            fprintf (file, "__RAMPZ__");
1868
          else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1869
            fprintf (file, "__RAMPY__");
1870
          else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1871
            fprintf (file, "__RAMPX__");
1872
          else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1873
            fprintf (file, "__RAMPD__");
1874
          else if (AVR_XMEGA && ival == avr_addr.ccp)
1875
            fprintf (file, "__CCP__");
1876
          else if (ival == avr_addr.sreg)   fprintf (file, "__SREG__");
1877
          else if (ival == avr_addr.sp_l)   fprintf (file, "__SP_L__");
1878
          else if (ival == avr_addr.sp_h)   fprintf (file, "__SP_H__");
1879
          else
1880
            {
1881
              fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1882
                       ival - avr_current_arch->sfr_offset);
1883
            }
1884
        }
1885
      else
1886
        fatal_insn ("bad address, not an I/O address:", x);
1887
    }
1888
  else if (MEM_P (x))
1889
    {
1890
      rtx addr = XEXP (x, 0);
1891
 
1892
      if (code == 'm')
1893
        {
1894
          if (!CONSTANT_P (addr))
1895
            fatal_insn ("bad address, not a constant:", addr);
1896
          /* Assembler template with m-code is data - not progmem section */
1897
          if (text_segment_operand (addr, VOIDmode))
1898
            if (warning (0, "accessing data memory with"
1899
                         " program memory address"))
1900
              {
1901
                output_addr_const (stderr, addr);
1902
                fprintf(stderr,"\n");
1903
              }
1904
          output_addr_const (file, addr);
1905
        }
1906
      else if (code == 'i')
1907
        {
1908
          avr_print_operand (file, addr, 'i');
1909
        }
1910
      else if (code == 'o')
1911
        {
1912
          if (GET_CODE (addr) != PLUS)
1913
            fatal_insn ("bad address, not (reg+disp):", addr);
1914
 
1915
          avr_print_operand (file, XEXP (addr, 1), 0);
1916
        }
1917
      else if (code == 'p' || code == 'r')
1918
        {
1919
          if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1920
            fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1921
 
1922
          if (code == 'p')
1923
            avr_print_operand_address (file, XEXP (addr, 0));  /* X, Y, Z */
1924
          else
1925
            avr_print_operand (file, XEXP (addr, 0), 0);  /* r26, r28, r30 */
1926
        }
1927
      else if (GET_CODE (addr) == PLUS)
1928
        {
1929
          avr_print_operand_address (file, XEXP (addr,0));
1930
          if (REGNO (XEXP (addr, 0)) == REG_X)
1931
            fatal_insn ("internal compiler error.  Bad address:"
1932
                        ,addr);
1933
          fputc ('+', file);
1934
          avr_print_operand (file, XEXP (addr,1), code);
1935
        }
1936
      else
1937
        avr_print_operand_address (file, addr);
1938
    }
1939
  else if (code == 'i')
1940
    {
1941
      fatal_insn ("bad address, not an I/O address:", x);
1942
    }
1943
  else if (code == 'x')
1944
    {
1945
      /* Constant progmem address - like used in jmp or call */
1946
      if (0 == text_segment_operand (x, VOIDmode))
1947
        if (warning (0, "accessing program memory"
1948
                     " with data memory address"))
1949
          {
1950
            output_addr_const (stderr, x);
1951
            fprintf(stderr,"\n");
1952
          }
1953
      /* Use normal symbol for direct address no linker trampoline needed */
1954
      output_addr_const (file, x);
1955
    }
1956
  else if (GET_CODE (x) == CONST_DOUBLE)
1957
    {
1958
      long val;
1959
      REAL_VALUE_TYPE rv;
1960
      if (GET_MODE (x) != SFmode)
1961
        fatal_insn ("internal compiler error.  Unknown mode:", x);
1962
      REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1963
      REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1964
      fprintf (file, "0x%lx", val);
1965
    }
1966
  else if (GET_CODE (x) == CONST_STRING)
1967
    fputs (XSTR (x, 0), file);
1968
  else if (code == 'j')
1969
    fputs (cond_string (GET_CODE (x)), file);
1970
  else if (code == 'k')
1971
    fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1972
  else
1973
    avr_print_operand_address (file, x);
1974
}
1975
 
1976
/* Update the condition code in the INSN.  */
1977
 
1978
void
1979
notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1980
{
1981
  rtx set;
1982
  enum attr_cc cc = get_attr_cc (insn);
1983
 
1984
  switch (cc)
1985
    {
1986
    default:
1987
      break;
1988
 
1989
    case CC_OUT_PLUS:
1990
    case CC_OUT_PLUS_NOCLOBBER:
1991
    case CC_LDI:
1992
      {
1993
        rtx *op = recog_data.operand;
1994
        int len_dummy, icc;
1995
 
1996
        /* Extract insn's operands.  */
1997
        extract_constrain_insn_cached (insn);
1998
 
1999
        switch (cc)
2000
          {
2001
          default:
2002
            gcc_unreachable();
2003
 
2004
          case CC_OUT_PLUS:
2005
            avr_out_plus (op, &len_dummy, &icc);
2006
            cc = (enum attr_cc) icc;
2007
            break;
2008
 
2009
          case CC_OUT_PLUS_NOCLOBBER:
2010
            avr_out_plus_noclobber (op, &len_dummy, &icc);
2011
            cc = (enum attr_cc) icc;
2012
            break;
2013
 
2014
          case CC_LDI:
2015
 
2016
            cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2017
                  && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2018
              /* Loading zero-reg with 0 uses CLI and thus clobbers cc0.  */
2019
              ? CC_CLOBBER
2020
              /* Any other "r,rL" combination does not alter cc0.  */
2021
              : CC_NONE;
2022
 
2023
            break;
2024
          } /* inner switch */
2025
 
2026
        break;
2027
      }
2028
    } /* outer swicth */
2029
 
2030
  switch (cc)
2031
    {
2032
    default:
2033
      /* Special values like CC_OUT_PLUS from above have been
2034
         mapped to "standard" CC_* values so we never come here.  */
2035
 
2036
      gcc_unreachable();
2037
      break;
2038
 
2039
    case CC_NONE:
2040
      /* Insn does not affect CC at all.  */
2041
      break;
2042
 
2043
    case CC_SET_N:
2044
      CC_STATUS_INIT;
2045
      break;
2046
 
2047
    case CC_SET_ZN:
2048
      set = single_set (insn);
2049
      CC_STATUS_INIT;
2050
      if (set)
2051
        {
2052
          cc_status.flags |= CC_NO_OVERFLOW;
2053
          cc_status.value1 = SET_DEST (set);
2054
        }
2055
      break;
2056
 
2057
    case CC_SET_CZN:
2058
      /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059
         The V flag may or may not be known but that's ok because
2060
         alter_cond will change tests to use EQ/NE.  */
2061
      set = single_set (insn);
2062
      CC_STATUS_INIT;
2063
      if (set)
2064
        {
2065
          cc_status.value1 = SET_DEST (set);
2066
          cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2067
        }
2068
      break;
2069
 
2070
    case CC_COMPARE:
2071
      set = single_set (insn);
2072
      CC_STATUS_INIT;
2073
      if (set)
2074
        cc_status.value1 = SET_SRC (set);
2075
      break;
2076
 
2077
    case CC_CLOBBER:
2078
      /* Insn doesn't leave CC in a usable state.  */
2079
      CC_STATUS_INIT;
2080
      break;
2081
    }
2082
}
2083
 
2084
/* Choose mode for jump insn:
2085
   1 - relative jump in range -63 <= x <= 62 ;
2086
   2 - relative jump in range -2046 <= x <= 2045 ;
2087
   3 - absolute jump (only for ATmega[16]03).  */
2088
 
2089
int
2090
avr_jump_mode (rtx x, rtx insn)
2091
{
2092
  int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2093
                                            ? XEXP (x, 0) : x));
2094
  int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2095
  int jump_distance = cur_addr - dest_addr;
2096
 
2097
  if (-63 <= jump_distance && jump_distance <= 62)
2098
    return 1;
2099
  else if (-2046 <= jump_distance && jump_distance <= 2045)
2100
    return 2;
2101
  else if (AVR_HAVE_JMP_CALL)
2102
    return 3;
2103
 
2104
  return 2;
2105
}
2106
 
2107
/* return an AVR condition jump commands.
2108
   X is a comparison RTX.
2109
   LEN is a number returned by avr_jump_mode function.
2110
   if REVERSE nonzero then condition code in X must be reversed.  */
2111
 
2112
const char *
2113
ret_cond_branch (rtx x, int len, int reverse)
2114
{
2115
  RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2116
 
2117
  switch (cond)
2118
    {
2119
    case GT:
2120
      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2121
        return (len == 1 ? ("breq .+2" CR_TAB
2122
                            "brpl %0") :
2123
                len == 2 ? ("breq .+4" CR_TAB
2124
                            "brmi .+2" CR_TAB
2125
                            "rjmp %0") :
2126
                ("breq .+6" CR_TAB
2127
                 "brmi .+4" CR_TAB
2128
                 "jmp %0"));
2129
 
2130
      else
2131
        return (len == 1 ? ("breq .+2" CR_TAB
2132
                            "brge %0") :
2133
                len == 2 ? ("breq .+4" CR_TAB
2134
                            "brlt .+2" CR_TAB
2135
                            "rjmp %0") :
2136
                ("breq .+6" CR_TAB
2137
                 "brlt .+4" CR_TAB
2138
                 "jmp %0"));
2139
    case GTU:
2140
      return (len == 1 ? ("breq .+2" CR_TAB
2141
                          "brsh %0") :
2142
              len == 2 ? ("breq .+4" CR_TAB
2143
                          "brlo .+2" CR_TAB
2144
                          "rjmp %0") :
2145
              ("breq .+6" CR_TAB
2146
               "brlo .+4" CR_TAB
2147
               "jmp %0"));
2148
    case LE:
2149
      if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2150
        return (len == 1 ? ("breq %0" CR_TAB
2151
                            "brmi %0") :
2152
                len == 2 ? ("breq .+2" CR_TAB
2153
                            "brpl .+2" CR_TAB
2154
                            "rjmp %0") :
2155
                ("breq .+2" CR_TAB
2156
                 "brpl .+4" CR_TAB
2157
                 "jmp %0"));
2158
      else
2159
        return (len == 1 ? ("breq %0" CR_TAB
2160
                            "brlt %0") :
2161
                len == 2 ? ("breq .+2" CR_TAB
2162
                            "brge .+2" CR_TAB
2163
                            "rjmp %0") :
2164
                ("breq .+2" CR_TAB
2165
                 "brge .+4" CR_TAB
2166
                 "jmp %0"));
2167
    case LEU:
2168
      return (len == 1 ? ("breq %0" CR_TAB
2169
                          "brlo %0") :
2170
              len == 2 ? ("breq .+2" CR_TAB
2171
                          "brsh .+2" CR_TAB
2172
                          "rjmp %0") :
2173
              ("breq .+2" CR_TAB
2174
               "brsh .+4" CR_TAB
2175
               "jmp %0"));
2176
    default:
2177
      if (reverse)
2178
        {
2179
          switch (len)
2180
            {
2181
            case 1:
2182
              return "br%k1 %0";
2183
            case 2:
2184
              return ("br%j1 .+2" CR_TAB
2185
                      "rjmp %0");
2186
            default:
2187
              return ("br%j1 .+4" CR_TAB
2188
                      "jmp %0");
2189
            }
2190
        }
2191
      else
2192
        {
2193
          switch (len)
2194
            {
2195
            case 1:
2196
              return "br%j1 %0";
2197
            case 2:
2198
              return ("br%k1 .+2" CR_TAB
2199
                      "rjmp %0");
2200
            default:
2201
              return ("br%k1 .+4" CR_TAB
2202
                      "jmp %0");
2203
            }
2204
        }
2205
    }
2206
  return "";
2207
}
2208
 
2209
/* Output insn cost for next insn.  */
2210
 
2211
void
2212
final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2213
                    int num_operands ATTRIBUTE_UNUSED)
2214
{
2215
  if (avr_log.rtx_costs)
2216
    {
2217
      rtx set = single_set (insn);
2218
 
2219
      if (set)
2220
        fprintf (asm_out_file, "/* DEBUG: cost = %d.  */\n",
2221
                 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2222
      else
2223
        fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d.  */\n",
2224
                 rtx_cost (PATTERN (insn), INSN, 0,
2225
                           optimize_insn_for_speed_p()));
2226
    }
2227
}
2228
 
2229
/* Return 0 if undefined, 1 if always true or always false.  */
2230
 
2231
int
2232
avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2233
{
2234
  unsigned int max = (mode == QImode ? 0xff :
2235
                      mode == HImode ? 0xffff :
2236
                      mode == PSImode ? 0xffffff :
2237
                      mode == SImode ? 0xffffffff : 0);
2238
  if (max && op && GET_CODE (x) == CONST_INT)
2239
    {
2240
      if (unsigned_condition (op) != op)
2241
        max >>= 1;
2242
 
2243
      if (max != (INTVAL (x) & max)
2244
          && INTVAL (x) != 0xff)
2245
        return 1;
2246
    }
2247
  return 0;
2248
}
2249
 
2250
 
2251
/* Returns nonzero if REGNO is the number of a hard
2252
   register in which function arguments are sometimes passed.  */
2253
 
2254
int
2255
function_arg_regno_p(int r)
2256
{
2257
  return (r >= 8 && r <= 25);
2258
}
2259
 
2260
/* Initializing the variable cum for the state at the beginning
2261
   of the argument list.  */
2262
 
2263
void
2264
init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2265
                      tree fndecl ATTRIBUTE_UNUSED)
2266
{
2267
  cum->nregs = 18;
2268
  cum->regno = FIRST_CUM_REG;
2269
  if (!libname && stdarg_p (fntype))
2270
    cum->nregs = 0;
2271
 
2272
  /* Assume the calle may be tail called */
2273
 
2274
  cfun->machine->sibcall_fails = 0;
2275
}
2276
 
2277
/* Returns the number of registers to allocate for a function argument.  */
2278
 
2279
static int
2280
avr_num_arg_regs (enum machine_mode mode, const_tree type)
2281
{
2282
  int size;
2283
 
2284
  if (mode == BLKmode)
2285
    size = int_size_in_bytes (type);
2286
  else
2287
    size = GET_MODE_SIZE (mode);
2288
 
2289
  /* Align all function arguments to start in even-numbered registers.
2290
     Odd-sized arguments leave holes above them.  */
2291
 
2292
  return (size + 1) & ~1;
2293
}
2294
 
2295
/* Controls whether a function argument is passed
2296
   in a register, and which register.  */
2297
 
2298
static rtx
2299
avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2300
                  const_tree type, bool named ATTRIBUTE_UNUSED)
2301
{
2302
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2303
  int bytes = avr_num_arg_regs (mode, type);
2304
 
2305
  if (cum->nregs && bytes <= cum->nregs)
2306
    return gen_rtx_REG (mode, cum->regno - bytes);
2307
 
2308
  return NULL_RTX;
2309
}
2310
 
2311
/* Update the summarizer variable CUM to advance past an argument
2312
   in the argument list.  */
2313
 
2314
static void
2315
avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2316
                          const_tree type, bool named ATTRIBUTE_UNUSED)
2317
{
2318
  CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2319
  int bytes = avr_num_arg_regs (mode, type);
2320
 
2321
  cum->nregs -= bytes;
2322
  cum->regno -= bytes;
2323
 
2324
  /* A parameter is being passed in a call-saved register. As the original
2325
     contents of these regs has to be restored before leaving the function,
2326
     a function must not pass arguments in call-saved regs in order to get
2327
     tail-called. */
2328
 
2329
  if (cum->regno >= 8
2330
      && cum->nregs >= 0
2331
      && !call_used_regs[cum->regno])
2332
    {
2333
      /* FIXME: We ship info on failing tail-call in struct machine_function.
2334
         This uses internals of calls.c:expand_call() and the way args_so_far
2335
         is used. targetm.function_ok_for_sibcall() needs to be extended to
2336
         pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337
         dependent so that such an extension is not wanted. */
2338
 
2339
      cfun->machine->sibcall_fails = 1;
2340
    }
2341
 
2342
  /* Test if all registers needed by the ABI are actually available.  If the
2343
     user has fixed a GPR needed to pass an argument, an (implicit) function
2344
     call will clobber that fixed register.  See PR45099 for an example.  */
2345
 
2346
  if (cum->regno >= 8
2347
      && cum->nregs >= 0)
2348
    {
2349
      int regno;
2350
 
2351
      for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2352
        if (fixed_regs[regno])
2353
          warning (0, "fixed register %s used to pass parameter to function",
2354
                   reg_names[regno]);
2355
    }
2356
 
2357
  if (cum->nregs <= 0)
2358
    {
2359
      cum->nregs = 0;
2360
      cum->regno = FIRST_CUM_REG;
2361
    }
2362
}
2363
 
2364
/* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365
/* Decide whether we can make a sibling call to a function.  DECL is the
2366
   declaration of the function being targeted by the call and EXP is the
2367
   CALL_EXPR representing the call. */
2368
 
2369
static bool
2370
avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2371
{
2372
  tree fntype_callee;
2373
 
2374
  /* Tail-calling must fail if callee-saved regs are used to pass
2375
     function args.  We must not tail-call when `epilogue_restores'
2376
     is used.  Unfortunately, we cannot tell at this point if that
2377
     actually will happen or not, and we cannot step back from
2378
     tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2379
 
2380
  if (cfun->machine->sibcall_fails
2381
      || TARGET_CALL_PROLOGUES)
2382
    {
2383
      return false;
2384
    }
2385
 
2386
  fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2387
 
2388
  if (decl_callee)
2389
    {
2390
      decl_callee = TREE_TYPE (decl_callee);
2391
    }
2392
  else
2393
    {
2394
      decl_callee = fntype_callee;
2395
 
2396
      while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2397
             && METHOD_TYPE != TREE_CODE (decl_callee))
2398
        {
2399
          decl_callee = TREE_TYPE (decl_callee);
2400
        }
2401
    }
2402
 
2403
  /* Ensure that caller and callee have compatible epilogues */
2404
 
2405
  if (interrupt_function_p (current_function_decl)
2406
      || signal_function_p (current_function_decl)
2407
      || avr_naked_function_p (decl_callee)
2408
      || avr_naked_function_p (current_function_decl)
2409
      /* FIXME: For OS_task and OS_main, we are over-conservative.
2410
         This is due to missing documentation of these attributes
2411
         and what they actually should do and should not do. */
2412
      || (avr_OS_task_function_p (decl_callee)
2413
          != avr_OS_task_function_p (current_function_decl))
2414
      || (avr_OS_main_function_p (decl_callee)
2415
          != avr_OS_main_function_p (current_function_decl)))
2416
    {
2417
      return false;
2418
    }
2419
 
2420
  return true;
2421
}
2422
 
2423
/***********************************************************************
2424
  Functions for outputting various mov's for a various modes
2425
************************************************************************/
2426
 
2427
/* Return true if a value of mode MODE is read from flash by
2428
   __load_* function from libgcc.  */
2429
 
2430
bool
2431
avr_load_libgcc_p (rtx op)
2432
{
2433
  enum machine_mode mode = GET_MODE (op);
2434
  int n_bytes = GET_MODE_SIZE (mode);
2435
 
2436
  return (n_bytes > 2
2437
          && !AVR_HAVE_LPMX
2438
          && avr_mem_flash_p (op));
2439
}
2440
 
2441
/* Return true if a value of mode MODE is read by __xload_* function.  */
2442
 
2443
bool
2444
avr_xload_libgcc_p (enum machine_mode mode)
2445
{
2446
  int n_bytes = GET_MODE_SIZE (mode);
2447
 
2448
  return (n_bytes > 1
2449
          || avr_current_device->n_flash > 1);
2450
}
2451
 
2452
 
2453
/* Find an unused d-register to be used as scratch in INSN.
2454
   EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455
   is a register, skip all possible return values that overlap EXCLUDE.
2456
   The policy for the returned register is similar to that of
2457
   `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2458
   of INSN.
2459
 
2460
   Return a QImode d-register or NULL_RTX if nothing found.  */
2461
 
2462
static rtx
2463
avr_find_unused_d_reg (rtx insn, rtx exclude)
2464
{
2465
  int regno;
2466
  bool isr_p = (interrupt_function_p (current_function_decl)
2467
                || signal_function_p (current_function_decl));
2468
 
2469
  for (regno = 16; regno < 32; regno++)
2470
    {
2471
      rtx reg = all_regs_rtx[regno];
2472
 
2473
      if ((exclude
2474
           && reg_overlap_mentioned_p (exclude, reg))
2475
          || fixed_regs[regno])
2476
        {
2477
          continue;
2478
        }
2479
 
2480
      /* Try non-live register */
2481
 
2482
      if (!df_regs_ever_live_p (regno)
2483
          && (TREE_THIS_VOLATILE (current_function_decl)
2484
              || cfun->machine->is_OS_task
2485
              || cfun->machine->is_OS_main
2486
              || (!isr_p && call_used_regs[regno])))
2487
        {
2488
          return reg;
2489
        }
2490
 
2491
      /* Any live register can be used if it is unused after.
2492
         Prologue/epilogue will care for it as needed.  */
2493
 
2494
      if (df_regs_ever_live_p (regno)
2495
          && reg_unused_after (insn, reg))
2496
        {
2497
          return reg;
2498
        }
2499
    }
2500
 
2501
  return NULL_RTX;
2502
}
2503
 
2504
 
2505
/* Helper function for the next function in the case where only restricted
2506
   version of LPM instruction is available.  */
2507
 
2508
static const char*
2509
avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2510
{
2511
  rtx dest = xop[0];
2512
  rtx addr = xop[1];
2513
  int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2514
  int regno_dest;
2515
 
2516
  regno_dest = REGNO (dest);
2517
 
2518
  /* The implicit target register of LPM.  */
2519
  xop[3] = lpm_reg_rtx;
2520
 
2521
  switch (GET_CODE (addr))
2522
    {
2523
    default:
2524
      gcc_unreachable();
2525
 
2526
    case REG:
2527
 
2528
      gcc_assert (REG_Z == REGNO (addr));
2529
 
2530
      switch (n_bytes)
2531
        {
2532
        default:
2533
          gcc_unreachable();
2534
 
2535
        case 1:
2536
          avr_asm_len ("%4lpm", xop, plen, 1);
2537
 
2538
          if (regno_dest != LPM_REGNO)
2539
            avr_asm_len ("mov %0,%3", xop, plen, 1);
2540
 
2541
          return "";
2542
 
2543
        case 2:
2544
          if (REGNO (dest) == REG_Z)
2545
            return avr_asm_len ("%4lpm"      CR_TAB
2546
                                "push %3"    CR_TAB
2547
                                "adiw %2,1"  CR_TAB
2548
                                "%4lpm"      CR_TAB
2549
                                "mov %B0,%3" CR_TAB
2550
                                "pop %A0", xop, plen, 6);
2551
 
2552
          avr_asm_len ("%4lpm"      CR_TAB
2553
                       "mov %A0,%3" CR_TAB
2554
                       "adiw %2,1"  CR_TAB
2555
                       "%4lpm"      CR_TAB
2556
                       "mov %B0,%3", xop, plen, 5);
2557
 
2558
          if (!reg_unused_after (insn, addr))
2559
            avr_asm_len ("sbiw %2,1", xop, plen, 1);
2560
 
2561
          break; /* 2 */
2562
        }
2563
 
2564
      break; /* REG */
2565
 
2566
    case POST_INC:
2567
 
2568
      gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2569
                  && n_bytes <= 4);
2570
 
2571
      if (regno_dest == LPM_REGNO)
2572
        avr_asm_len ("%4lpm"      CR_TAB
2573
                     "adiw %2,1", xop, plen, 2);
2574
      else
2575
        avr_asm_len ("%4lpm"      CR_TAB
2576
                     "mov %A0,%3" CR_TAB
2577
                     "adiw %2,1", xop, plen, 3);
2578
 
2579
      if (n_bytes >= 2)
2580
        avr_asm_len ("%4lpm"      CR_TAB
2581
                     "mov %B0,%3" CR_TAB
2582
                     "adiw %2,1", xop, plen, 3);
2583
 
2584
      if (n_bytes >= 3)
2585
        avr_asm_len ("%4lpm"      CR_TAB
2586
                     "mov %C0,%3" CR_TAB
2587
                     "adiw %2,1", xop, plen, 3);
2588
 
2589
      if (n_bytes >= 4)
2590
        avr_asm_len ("%4lpm"      CR_TAB
2591
                     "mov %D0,%3" CR_TAB
2592
                     "adiw %2,1", xop, plen, 3);
2593
 
2594
      break; /* POST_INC */
2595
 
2596
    } /* switch CODE (addr) */
2597
 
2598
  return "";
2599
}
2600
 
2601
 
2602
/* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603
   OP[1] in AS1 to register OP[0].
2604
   If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2605
   Return "".  */
2606
 
2607
static const char*
2608
avr_out_lpm (rtx insn, rtx *op, int *plen)
2609
{
2610
  rtx xop[6];
2611
  rtx dest = op[0];
2612
  rtx src = SET_SRC (single_set (insn));
2613
  rtx addr;
2614
  int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2615
  int regno_dest;
2616
  int segment;
2617
  RTX_CODE code;
2618
  addr_space_t as = MEM_ADDR_SPACE (src);
2619
 
2620
  if (plen)
2621
    *plen = 0;
2622
 
2623
  if (MEM_P (dest))
2624
    {
2625
      warning (0, "writing to address space %qs not supported",
2626
               avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2627
 
2628
      return "";
2629
    }
2630
 
2631
  addr = XEXP (src, 0);
2632
  code = GET_CODE (addr);
2633
 
2634
  gcc_assert (REG_P (dest));
2635
  gcc_assert (REG == code || POST_INC == code);
2636
 
2637
  xop[0] = dest;
2638
  xop[1] = addr;
2639
  xop[2] = lpm_addr_reg_rtx;
2640
  xop[4] = xstring_empty;
2641
  xop[5] = tmp_reg_rtx;
2642
 
2643
  regno_dest = REGNO (dest);
2644
 
2645
  segment = avr_addrspace[as].segment;
2646
 
2647
  /* Set RAMPZ as needed.  */
2648
 
2649
  if (segment)
2650
    {
2651
      xop[4] = GEN_INT (segment);
2652
 
2653
      if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2654
          xop[3])
2655
        {
2656
          avr_asm_len ("ldi %3,%4" CR_TAB
2657
                       "out __RAMPZ__,%3", xop, plen, 2);
2658
        }
2659
      else if (segment == 1)
2660
        {
2661
          avr_asm_len ("clr %5" CR_TAB
2662
                       "inc %5" CR_TAB
2663
                       "out __RAMPZ__,%5", xop, plen, 3);
2664
        }
2665
      else
2666
        {
2667
          avr_asm_len ("mov %5,%2"         CR_TAB
2668
                       "ldi %2,%4"         CR_TAB
2669
                       "out __RAMPZ__,%2"  CR_TAB
2670
                       "mov %2,%5", xop, plen, 4);
2671
        }
2672
 
2673
      xop[4] = xstring_e;
2674
 
2675
      if (!AVR_HAVE_ELPMX)
2676
        return avr_out_lpm_no_lpmx (insn, xop, plen);
2677
    }
2678
  else if (!AVR_HAVE_LPMX)
2679
    {
2680
      return avr_out_lpm_no_lpmx (insn, xop, plen);
2681
    }
2682
 
2683
  /* We have [E]LPMX: Output reading from Flash the comfortable way.  */
2684
 
2685
  switch (GET_CODE (addr))
2686
    {
2687
    default:
2688
      gcc_unreachable();
2689
 
2690
    case REG:
2691
 
2692
      gcc_assert (REG_Z == REGNO (addr));
2693
 
2694
      switch (n_bytes)
2695
        {
2696
        default:
2697
          gcc_unreachable();
2698
 
2699
        case 1:
2700
          return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2701
 
2702
        case 2:
2703
          if (REGNO (dest) == REG_Z)
2704
            return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705
                                "%4lpm %B0,%a2" CR_TAB
2706
                                "mov %A0,%5", xop, plen, 3);
2707
          else
2708
            {
2709
              avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710
                           "%4lpm %B0,%a2", xop, plen, 2);
2711
 
2712
              if (!reg_unused_after (insn, addr))
2713
                avr_asm_len ("sbiw %2,1", xop, plen, 1);
2714
            }
2715
 
2716
          break; /* 2 */
2717
 
2718
        case 3:
2719
 
2720
          avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721
                       "%4lpm %B0,%a2+" CR_TAB
2722
                       "%4lpm %C0,%a2", xop, plen, 3);
2723
 
2724
          if (!reg_unused_after (insn, addr))
2725
            avr_asm_len ("sbiw %2,2", xop, plen, 1);
2726
 
2727
          break; /* 3 */
2728
 
2729
        case 4:
2730
 
2731
          avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732
                       "%4lpm %B0,%a2+", xop, plen, 2);
2733
 
2734
          if (REGNO (dest) == REG_Z - 2)
2735
            return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736
                                "%4lpm %C0,%a2"          CR_TAB
2737
                                "mov %D0,%5", xop, plen, 3);
2738
          else
2739
            {
2740
              avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741
                           "%4lpm %D0,%a2", xop, plen, 2);
2742
 
2743
              if (!reg_unused_after (insn, addr))
2744
                avr_asm_len ("sbiw %2,3", xop, plen, 1);
2745
            }
2746
 
2747
          break; /* 4 */
2748
        } /* n_bytes */
2749
 
2750
      break; /* REG */
2751
 
2752
    case POST_INC:
2753
 
2754
      gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2755
                  && n_bytes <= 4);
2756
 
2757
      avr_asm_len                    ("%4lpm %A0,%a2+", xop, plen, 1);
2758
      if (n_bytes >= 2)  avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759
      if (n_bytes >= 3)  avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760
      if (n_bytes >= 4)  avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2761
 
2762
      break; /* POST_INC */
2763
 
2764
    } /* switch CODE (addr) */
2765
 
2766
  return "";
2767
}
2768
 
2769
 
2770
/* Worker function for xload_8 insn.  */
2771
 
2772
const char*
2773
avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2774
{
2775
  rtx xop[4];
2776
 
2777
  xop[0] = op[0];
2778
  xop[1] = op[1];
2779
  xop[2] = lpm_addr_reg_rtx;
2780
  xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2781
 
2782
  if (plen)
2783
    *plen = 0;
2784
 
2785
  avr_asm_len ("ld %3,%a2" CR_TAB
2786
               "sbrs %1,7", xop, plen, 2);
2787
 
2788
  avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2789
 
2790
  if (REGNO (xop[0]) != REGNO (xop[3]))
2791
    avr_asm_len ("mov %0,%3", xop, plen, 1);
2792
 
2793
  return "";
2794
}
2795
 
2796
 
2797
const char *
2798
output_movqi (rtx insn, rtx operands[], int *l)
2799
{
2800
  int dummy;
2801
  rtx dest = operands[0];
2802
  rtx src = operands[1];
2803
  int *real_l = l;
2804
 
2805
  if (avr_mem_flash_p (src)
2806
      || avr_mem_flash_p (dest))
2807
    {
2808
      return avr_out_lpm (insn, operands, real_l);
2809
    }
2810
 
2811
  if (!l)
2812
    l = &dummy;
2813
 
2814
  *l = 1;
2815
 
2816
  if (register_operand (dest, QImode))
2817
    {
2818
      if (register_operand (src, QImode)) /* mov r,r */
2819
        {
2820
          if (test_hard_reg_class (STACK_REG, dest))
2821
            return "out %0,%1";
2822
          else if (test_hard_reg_class (STACK_REG, src))
2823
            return "in %0,%1";
2824
 
2825
          return "mov %0,%1";
2826
        }
2827
      else if (CONSTANT_P (src))
2828
        {
2829
          output_reload_in_const (operands, NULL_RTX, real_l, false);
2830
          return "";
2831
        }
2832
      else if (GET_CODE (src) == MEM)
2833
        return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2834
    }
2835
  else if (GET_CODE (dest) == MEM)
2836
    {
2837
      rtx xop[2];
2838
 
2839
      xop[0] = dest;
2840
      xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2841
 
2842
      return out_movqi_mr_r (insn, xop, real_l);
2843
    }
2844
  return "";
2845
}
2846
 
2847
 
2848
const char *
2849
output_movhi (rtx insn, rtx xop[], int *plen)
2850
{
2851
  rtx dest = xop[0];
2852
  rtx src = xop[1];
2853
 
2854
  gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2855
 
2856
  if (avr_mem_flash_p (src)
2857
      || avr_mem_flash_p (dest))
2858
    {
2859
      return avr_out_lpm (insn, xop, plen);
2860
    }
2861
 
2862
  if (REG_P (dest))
2863
    {
2864
      if (REG_P (src)) /* mov r,r */
2865
        {
2866
          if (test_hard_reg_class (STACK_REG, dest))
2867
            {
2868
              if (AVR_HAVE_8BIT_SP)
2869
                return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2870
 
2871
              if (AVR_XMEGA)
2872
                return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2873
                                    "out __SP_H__,%B1", xop, plen, -2);
2874
 
2875
              /* Use simple load of SP if no interrupts are  used.  */
2876
 
2877
              return TARGET_NO_INTERRUPTS
2878
                ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2879
                               "out __SP_L__,%A1", xop, plen, -2)
2880
 
2881
                : avr_asm_len ("in __tmp_reg__,__SREG__"  CR_TAB
2882
                               "cli"                      CR_TAB
2883
                               "out __SP_H__,%B1"         CR_TAB
2884
                               "out __SREG__,__tmp_reg__" CR_TAB
2885
                               "out __SP_L__,%A1", xop, plen, -5);
2886
            }
2887
          else if (test_hard_reg_class (STACK_REG, src))
2888
            {
2889
              return AVR_HAVE_8BIT_SP
2890
                ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2891
                               "clr %B0", xop, plen, -2)
2892
 
2893
                : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2894
                               "in %B0,__SP_H__", xop, plen, -2);
2895
            }
2896
 
2897
          return AVR_HAVE_MOVW
2898
            ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2899
 
2900
            : avr_asm_len ("mov %A0,%A1" CR_TAB
2901
                           "mov %B0,%B1", xop, plen, -2);
2902
        } /* REG_P (src) */
2903
      else if (CONSTANT_P (src))
2904
        {
2905
          return output_reload_inhi (xop, NULL, plen);
2906
        }
2907
      else if (MEM_P (src))
2908
        {
2909
          return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2910
        }
2911
    }
2912
  else if (MEM_P (dest))
2913
    {
2914
      rtx xop[2];
2915
 
2916
      xop[0] = dest;
2917
      xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2918
 
2919
      return out_movhi_mr_r (insn, xop, plen);
2920
    }
2921
 
2922
  fatal_insn ("invalid insn:", insn);
2923
 
2924
  return "";
2925
}
2926
 
2927
static const char*
2928
out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2929
{
2930
  rtx dest = op[0];
2931
  rtx src = op[1];
2932
  rtx x = XEXP (src, 0);
2933
 
2934
  if (CONSTANT_ADDRESS_P (x))
2935
    {
2936
      return optimize > 0 && io_address_operand (x, QImode)
2937
        ? avr_asm_len ("in %0,%i1", op, plen, -1)
2938
        : avr_asm_len ("lds %0,%m1", op, plen, -2);
2939
    }
2940
  else if (GET_CODE (x) == PLUS
2941
           && REG_P (XEXP (x, 0))
2942
           && CONST_INT_P (XEXP (x, 1)))
2943
    {
2944
      /* memory access by reg+disp */
2945
 
2946
      int disp = INTVAL (XEXP (x, 1));
2947
 
2948
      if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2949
        {
2950
          if (REGNO (XEXP (x, 0)) != REG_Y)
2951
            fatal_insn ("incorrect insn:",insn);
2952
 
2953
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2954
            return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2955
                                "ldd %0,Y+63"     CR_TAB
2956
                                "sbiw r28,%o1-63", op, plen, -3);
2957
 
2958
          return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2959
                              "sbci r29,hi8(-%o1)" CR_TAB
2960
                              "ld %0,Y"            CR_TAB
2961
                              "subi r28,lo8(%o1)"  CR_TAB
2962
                              "sbci r29,hi8(%o1)", op, plen, -5);
2963
        }
2964
      else if (REGNO (XEXP (x, 0)) == REG_X)
2965
        {
2966
          /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2967
             it but I have this situation with extremal optimizing options.  */
2968
 
2969
          avr_asm_len ("adiw r26,%o1" CR_TAB
2970
                       "ld %0,X", op, plen, -2);
2971
 
2972
          if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2973
              && !reg_unused_after (insn, XEXP (x,0)))
2974
            {
2975
              avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2976
            }
2977
 
2978
          return "";
2979
        }
2980
 
2981
      return avr_asm_len ("ldd %0,%1", op, plen, -1);
2982
    }
2983
 
2984
  return avr_asm_len ("ld %0,%1", op, plen, -1);
2985
}
2986
 
2987
static const char*
2988
out_movhi_r_mr (rtx insn, rtx op[], int *plen)
2989
{
2990
  rtx dest = op[0];
2991
  rtx src = op[1];
2992
  rtx base = XEXP (src, 0);
2993
  int reg_dest = true_regnum (dest);
2994
  int reg_base = true_regnum (base);
2995
  /* "volatile" forces reading low byte first, even if less efficient,
2996
     for correct operation with 16-bit I/O registers.  */
2997
  int mem_volatile_p = MEM_VOLATILE_P (src);
2998
 
2999
  if (reg_base > 0)
3000
    {
3001
      if (reg_dest == reg_base)         /* R = (R) */
3002
        return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3003
                            "ld %B0,%1"          CR_TAB
3004
                            "mov %A0,__tmp_reg__", op, plen, -3);
3005
 
3006
      if (reg_base != REG_X)
3007
        return avr_asm_len ("ld %A0,%1" CR_TAB
3008
                            "ldd %B0,%1+1", op, plen, -2);
3009
 
3010
      avr_asm_len ("ld %A0,X+" CR_TAB
3011
                   "ld %B0,X", op, plen, -2);
3012
 
3013
      if (!reg_unused_after (insn, base))
3014
        avr_asm_len ("sbiw r26,1", op, plen, 1);
3015
 
3016
      return "";
3017
    }
3018
  else if (GET_CODE (base) == PLUS) /* (R + i) */
3019
    {
3020
      int disp = INTVAL (XEXP (base, 1));
3021
      int reg_base = true_regnum (XEXP (base, 0));
3022
 
3023
      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3024
        {
3025
          if (REGNO (XEXP (base, 0)) != REG_Y)
3026
            fatal_insn ("incorrect insn:",insn);
3027
 
3028
          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3029
            ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3030
                           "ldd %A0,Y+62"    CR_TAB
3031
                           "ldd %B0,Y+63"    CR_TAB
3032
                           "sbiw r28,%o1-62", op, plen, -4)
3033
 
3034
            : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3035
                           "sbci r29,hi8(-%o1)" CR_TAB
3036
                           "ld %A0,Y"           CR_TAB
3037
                           "ldd %B0,Y+1"        CR_TAB
3038
                           "subi r28,lo8(%o1)"  CR_TAB
3039
                           "sbci r29,hi8(%o1)", op, plen, -6);
3040
        }
3041
 
3042
      /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3043
         it but I have this situation with extremal
3044
         optimization options.  */
3045
 
3046
      if (reg_base == REG_X)
3047
        return reg_base == reg_dest
3048
          ? avr_asm_len ("adiw r26,%o1"      CR_TAB
3049
                         "ld __tmp_reg__,X+" CR_TAB
3050
                         "ld %B0,X"          CR_TAB
3051
                         "mov %A0,__tmp_reg__", op, plen, -4)
3052
 
3053
          : avr_asm_len ("adiw r26,%o1" CR_TAB
3054
                         "ld %A0,X+"    CR_TAB
3055
                         "ld %B0,X"     CR_TAB
3056
                         "sbiw r26,%o1+1", op, plen, -4);
3057
 
3058
      return reg_base == reg_dest
3059
        ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3060
                       "ldd %B0,%B1"         CR_TAB
3061
                       "mov %A0,__tmp_reg__", op, plen, -3)
3062
 
3063
        : avr_asm_len ("ldd %A0,%A1" CR_TAB
3064
                       "ldd %B0,%B1", op, plen, -2);
3065
    }
3066
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3067
    {
3068
      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3069
        fatal_insn ("incorrect insn:", insn);
3070
 
3071
      if (!mem_volatile_p)
3072
        return avr_asm_len ("ld %B0,%1" CR_TAB
3073
                            "ld %A0,%1", op, plen, -2);
3074
 
3075
      return REGNO (XEXP (base, 0)) == REG_X
3076
        ? avr_asm_len ("sbiw r26,2"  CR_TAB
3077
                       "ld %A0,X+"   CR_TAB
3078
                       "ld %B0,X"    CR_TAB
3079
                       "sbiw r26,1", op, plen, -4)
3080
 
3081
        : avr_asm_len ("sbiw %r1,2"  CR_TAB
3082
                       "ld %A0,%p1"  CR_TAB
3083
                       "ldd %B0,%p1+1", op, plen, -3);
3084
    }
3085
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3086
    {
3087
      if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3088
        fatal_insn ("incorrect insn:", insn);
3089
 
3090
      return avr_asm_len ("ld %A0,%1"  CR_TAB
3091
                          "ld %B0,%1", op, plen, -2);
3092
    }
3093
  else if (CONSTANT_ADDRESS_P (base))
3094
    {
3095
      return optimize > 0 && io_address_operand (base, HImode)
3096
        ? avr_asm_len ("in %A0,%i1" CR_TAB
3097
                       "in %B0,%i1+1", op, plen, -2)
3098
 
3099
        : avr_asm_len ("lds %A0,%m1" CR_TAB
3100
                       "lds %B0,%m1+1", op, plen, -4);
3101
    }
3102
 
3103
  fatal_insn ("unknown move insn:",insn);
3104
  return "";
3105
}
3106
 
3107
static const char*
3108
out_movsi_r_mr (rtx insn, rtx op[], int *l)
3109
{
3110
  rtx dest = op[0];
3111
  rtx src = op[1];
3112
  rtx base = XEXP (src, 0);
3113
  int reg_dest = true_regnum (dest);
3114
  int reg_base = true_regnum (base);
3115
  int tmp;
3116
 
3117
  if (!l)
3118
    l = &tmp;
3119
 
3120
  if (reg_base > 0)
3121
    {
3122
      if (reg_base == REG_X)        /* (R26) */
3123
        {
3124
          if (reg_dest == REG_X)
3125
            /* "ld r26,-X" is undefined */
3126
            return *l=7, ("adiw r26,3"        CR_TAB
3127
                          "ld r29,X"          CR_TAB
3128
                          "ld r28,-X"         CR_TAB
3129
                          "ld __tmp_reg__,-X" CR_TAB
3130
                          "sbiw r26,1"        CR_TAB
3131
                          "ld r26,X"          CR_TAB
3132
                          "mov r27,__tmp_reg__");
3133
          else if (reg_dest == REG_X - 2)
3134
            return *l=5, ("ld %A0,X+"          CR_TAB
3135
                          "ld %B0,X+"          CR_TAB
3136
                          "ld __tmp_reg__,X+"  CR_TAB
3137
                          "ld %D0,X"           CR_TAB
3138
                          "mov %C0,__tmp_reg__");
3139
          else if (reg_unused_after (insn, base))
3140
            return  *l=4, ("ld %A0,X+"  CR_TAB
3141
                           "ld %B0,X+" CR_TAB
3142
                           "ld %C0,X+" CR_TAB
3143
                           "ld %D0,X");
3144
          else
3145
            return  *l=5, ("ld %A0,X+"  CR_TAB
3146
                           "ld %B0,X+" CR_TAB
3147
                           "ld %C0,X+" CR_TAB
3148
                           "ld %D0,X"  CR_TAB
3149
                           "sbiw r26,3");
3150
        }
3151
      else
3152
        {
3153
          if (reg_dest == reg_base)
3154
            return *l=5, ("ldd %D0,%1+3" CR_TAB
3155
                          "ldd %C0,%1+2" CR_TAB
3156
                          "ldd __tmp_reg__,%1+1"  CR_TAB
3157
                          "ld %A0,%1"  CR_TAB
3158
                          "mov %B0,__tmp_reg__");
3159
          else if (reg_base == reg_dest + 2)
3160
            return *l=5, ("ld %A0,%1"             CR_TAB
3161
                          "ldd %B0,%1+1"          CR_TAB
3162
                          "ldd __tmp_reg__,%1+2"  CR_TAB
3163
                          "ldd %D0,%1+3"          CR_TAB
3164
                          "mov %C0,__tmp_reg__");
3165
          else
3166
            return *l=4, ("ld %A0,%1"    CR_TAB
3167
                          "ldd %B0,%1+1" CR_TAB
3168
                          "ldd %C0,%1+2" CR_TAB
3169
                          "ldd %D0,%1+3");
3170
        }
3171
    }
3172
  else if (GET_CODE (base) == PLUS) /* (R + i) */
3173
    {
3174
      int disp = INTVAL (XEXP (base, 1));
3175
 
3176
      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3177
        {
3178
          if (REGNO (XEXP (base, 0)) != REG_Y)
3179
            fatal_insn ("incorrect insn:",insn);
3180
 
3181
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3182
            return *l = 6, ("adiw r28,%o1-60" CR_TAB
3183
                            "ldd %A0,Y+60"    CR_TAB
3184
                            "ldd %B0,Y+61"    CR_TAB
3185
                            "ldd %C0,Y+62"    CR_TAB
3186
                            "ldd %D0,Y+63"    CR_TAB
3187
                            "sbiw r28,%o1-60");
3188
 
3189
          return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3190
                          "sbci r29,hi8(-%o1)" CR_TAB
3191
                          "ld %A0,Y"           CR_TAB
3192
                          "ldd %B0,Y+1"        CR_TAB
3193
                          "ldd %C0,Y+2"        CR_TAB
3194
                          "ldd %D0,Y+3"        CR_TAB
3195
                          "subi r28,lo8(%o1)"  CR_TAB
3196
                          "sbci r29,hi8(%o1)");
3197
        }
3198
 
3199
      reg_base = true_regnum (XEXP (base, 0));
3200
      if (reg_base == REG_X)
3201
        {
3202
          /* R = (X + d) */
3203
          if (reg_dest == REG_X)
3204
            {
3205
              *l = 7;
3206
              /* "ld r26,-X" is undefined */
3207
              return ("adiw r26,%o1+3"    CR_TAB
3208
                      "ld r29,X"          CR_TAB
3209
                      "ld r28,-X"         CR_TAB
3210
                      "ld __tmp_reg__,-X" CR_TAB
3211
                      "sbiw r26,1"        CR_TAB
3212
                      "ld r26,X"          CR_TAB
3213
                      "mov r27,__tmp_reg__");
3214
            }
3215
          *l = 6;
3216
          if (reg_dest == REG_X - 2)
3217
            return ("adiw r26,%o1"      CR_TAB
3218
                    "ld r24,X+"         CR_TAB
3219
                    "ld r25,X+"         CR_TAB
3220
                    "ld __tmp_reg__,X+" CR_TAB
3221
                    "ld r27,X"          CR_TAB
3222
                    "mov r26,__tmp_reg__");
3223
 
3224
          return ("adiw r26,%o1" CR_TAB
3225
                  "ld %A0,X+"    CR_TAB
3226
                  "ld %B0,X+"    CR_TAB
3227
                  "ld %C0,X+"    CR_TAB
3228
                  "ld %D0,X"     CR_TAB
3229
                  "sbiw r26,%o1+3");
3230
        }
3231
      if (reg_dest == reg_base)
3232
        return *l=5, ("ldd %D0,%D1"          CR_TAB
3233
                      "ldd %C0,%C1"          CR_TAB
3234
                      "ldd __tmp_reg__,%B1"  CR_TAB
3235
                      "ldd %A0,%A1"          CR_TAB
3236
                      "mov %B0,__tmp_reg__");
3237
      else if (reg_dest == reg_base - 2)
3238
        return *l=5, ("ldd %A0,%A1"          CR_TAB
3239
                      "ldd %B0,%B1"          CR_TAB
3240
                      "ldd __tmp_reg__,%C1"  CR_TAB
3241
                      "ldd %D0,%D1"          CR_TAB
3242
                      "mov %C0,__tmp_reg__");
3243
      return *l=4, ("ldd %A0,%A1" CR_TAB
3244
                    "ldd %B0,%B1" CR_TAB
3245
                    "ldd %C0,%C1" CR_TAB
3246
                    "ldd %D0,%D1");
3247
    }
3248
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3249
    return *l=4, ("ld %D0,%1" CR_TAB
3250
                  "ld %C0,%1" CR_TAB
3251
                  "ld %B0,%1" CR_TAB
3252
                  "ld %A0,%1");
3253
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3254
    return *l=4, ("ld %A0,%1" CR_TAB
3255
                  "ld %B0,%1" CR_TAB
3256
                  "ld %C0,%1" CR_TAB
3257
                  "ld %D0,%1");
3258
  else if (CONSTANT_ADDRESS_P (base))
3259
    return *l=8, ("lds %A0,%m1"   CR_TAB
3260
                  "lds %B0,%m1+1" CR_TAB
3261
                  "lds %C0,%m1+2" CR_TAB
3262
                  "lds %D0,%m1+3");
3263
 
3264
  fatal_insn ("unknown move insn:",insn);
3265
  return "";
3266
}
3267
 
3268
static const char*
3269
out_movsi_mr_r (rtx insn, rtx op[], int *l)
3270
{
3271
  rtx dest = op[0];
3272
  rtx src = op[1];
3273
  rtx base = XEXP (dest, 0);
3274
  int reg_base = true_regnum (base);
3275
  int reg_src = true_regnum (src);
3276
  int tmp;
3277
 
3278
  if (!l)
3279
    l = &tmp;
3280
 
3281
  if (CONSTANT_ADDRESS_P (base))
3282
    return *l=8,("sts %m0,%A1" CR_TAB
3283
                 "sts %m0+1,%B1" CR_TAB
3284
                 "sts %m0+2,%C1" CR_TAB
3285
                 "sts %m0+3,%D1");
3286
  if (reg_base > 0)                 /* (r) */
3287
    {
3288
      if (reg_base == REG_X)                /* (R26) */
3289
        {
3290
          if (reg_src == REG_X)
3291
            {
3292
              /* "st X+,r26" is undefined */
3293
              if (reg_unused_after (insn, base))
3294
                return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3295
                              "st X,r26"            CR_TAB
3296
                              "adiw r26,1"          CR_TAB
3297
                              "st X+,__tmp_reg__"   CR_TAB
3298
                              "st X+,r28"           CR_TAB
3299
                              "st X,r29");
3300
              else
3301
                return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3302
                              "st X,r26"            CR_TAB
3303
                              "adiw r26,1"          CR_TAB
3304
                              "st X+,__tmp_reg__"   CR_TAB
3305
                              "st X+,r28"           CR_TAB
3306
                              "st X,r29"            CR_TAB
3307
                              "sbiw r26,3");
3308
            }
3309
          else if (reg_base == reg_src + 2)
3310
            {
3311
              if (reg_unused_after (insn, base))
3312
                return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3313
                              "mov __tmp_reg__,%D1"  CR_TAB
3314
                              "st %0+,%A1"           CR_TAB
3315
                              "st %0+,%B1"           CR_TAB
3316
                              "st %0+,__zero_reg__"  CR_TAB
3317
                              "st %0,__tmp_reg__"    CR_TAB
3318
                              "clr __zero_reg__");
3319
              else
3320
                return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3321
                              "mov __tmp_reg__,%D1"  CR_TAB
3322
                              "st %0+,%A1"           CR_TAB
3323
                              "st %0+,%B1"           CR_TAB
3324
                              "st %0+,__zero_reg__"  CR_TAB
3325
                              "st %0,__tmp_reg__"    CR_TAB
3326
                              "clr __zero_reg__"     CR_TAB
3327
                              "sbiw r26,3");
3328
            }
3329
          return *l=5, ("st %0+,%A1" CR_TAB
3330
                        "st %0+,%B1" CR_TAB
3331
                        "st %0+,%C1" CR_TAB
3332
                        "st %0,%D1"  CR_TAB
3333
                        "sbiw r26,3");
3334
        }
3335
      else
3336
        return *l=4, ("st %0,%A1"    CR_TAB
3337
                      "std %0+1,%B1" CR_TAB
3338
                      "std %0+2,%C1" CR_TAB
3339
                      "std %0+3,%D1");
3340
    }
3341
  else if (GET_CODE (base) == PLUS) /* (R + i) */
3342
    {
3343
      int disp = INTVAL (XEXP (base, 1));
3344
      reg_base = REGNO (XEXP (base, 0));
3345
      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3346
        {
3347
          if (reg_base != REG_Y)
3348
            fatal_insn ("incorrect insn:",insn);
3349
 
3350
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3351
            return *l = 6, ("adiw r28,%o0-60" CR_TAB
3352
                            "std Y+60,%A1"    CR_TAB
3353
                            "std Y+61,%B1"    CR_TAB
3354
                            "std Y+62,%C1"    CR_TAB
3355
                            "std Y+63,%D1"    CR_TAB
3356
                            "sbiw r28,%o0-60");
3357
 
3358
          return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3359
                          "sbci r29,hi8(-%o0)" CR_TAB
3360
                          "st Y,%A1"           CR_TAB
3361
                          "std Y+1,%B1"        CR_TAB
3362
                          "std Y+2,%C1"        CR_TAB
3363
                          "std Y+3,%D1"        CR_TAB
3364
                          "subi r28,lo8(%o0)"  CR_TAB
3365
                          "sbci r29,hi8(%o0)");
3366
        }
3367
      if (reg_base == REG_X)
3368
        {
3369
          /* (X + d) = R */
3370
          if (reg_src == REG_X)
3371
            {
3372
              *l = 9;
3373
              return ("mov __tmp_reg__,r26"  CR_TAB
3374
                      "mov __zero_reg__,r27" CR_TAB
3375
                      "adiw r26,%o0"         CR_TAB
3376
                      "st X+,__tmp_reg__"    CR_TAB
3377
                      "st X+,__zero_reg__"   CR_TAB
3378
                      "st X+,r28"            CR_TAB
3379
                      "st X,r29"             CR_TAB
3380
                      "clr __zero_reg__"     CR_TAB
3381
                      "sbiw r26,%o0+3");
3382
            }
3383
          else if (reg_src == REG_X - 2)
3384
            {
3385
              *l = 9;
3386
              return ("mov __tmp_reg__,r26"  CR_TAB
3387
                      "mov __zero_reg__,r27" CR_TAB
3388
                      "adiw r26,%o0"         CR_TAB
3389
                      "st X+,r24"            CR_TAB
3390
                      "st X+,r25"            CR_TAB
3391
                      "st X+,__tmp_reg__"    CR_TAB
3392
                      "st X,__zero_reg__"    CR_TAB
3393
                      "clr __zero_reg__"     CR_TAB
3394
                      "sbiw r26,%o0+3");
3395
            }
3396
          *l = 6;
3397
          return ("adiw r26,%o0" CR_TAB
3398
                  "st X+,%A1"    CR_TAB
3399
                  "st X+,%B1"    CR_TAB
3400
                  "st X+,%C1"    CR_TAB
3401
                  "st X,%D1"     CR_TAB
3402
                  "sbiw r26,%o0+3");
3403
        }
3404
      return *l=4, ("std %A0,%A1" CR_TAB
3405
                    "std %B0,%B1" CR_TAB
3406
                    "std %C0,%C1" CR_TAB
3407
                    "std %D0,%D1");
3408
    }
3409
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3410
    return *l=4, ("st %0,%D1" CR_TAB
3411
                  "st %0,%C1" CR_TAB
3412
                  "st %0,%B1" CR_TAB
3413
                  "st %0,%A1");
3414
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3415
    return *l=4, ("st %0,%A1" CR_TAB
3416
                  "st %0,%B1" CR_TAB
3417
                  "st %0,%C1" CR_TAB
3418
                  "st %0,%D1");
3419
  fatal_insn ("unknown move insn:",insn);
3420
  return "";
3421
}
3422
 
3423
const char *
3424
output_movsisf (rtx insn, rtx operands[], int *l)
3425
{
3426
  int dummy;
3427
  rtx dest = operands[0];
3428
  rtx src = operands[1];
3429
  int *real_l = l;
3430
 
3431
  if (avr_mem_flash_p (src)
3432
      || avr_mem_flash_p (dest))
3433
    {
3434
      return avr_out_lpm (insn, operands, real_l);
3435
    }
3436
 
3437
  if (!l)
3438
    l = &dummy;
3439
 
3440
  if (register_operand (dest, VOIDmode))
3441
    {
3442
      if (register_operand (src, VOIDmode)) /* mov r,r */
3443
        {
3444
          if (true_regnum (dest) > true_regnum (src))
3445
            {
3446
              if (AVR_HAVE_MOVW)
3447
                {
3448
                  *l = 2;
3449
                  return ("movw %C0,%C1" CR_TAB
3450
                          "movw %A0,%A1");
3451
                }
3452
              *l = 4;
3453
              return ("mov %D0,%D1" CR_TAB
3454
                      "mov %C0,%C1" CR_TAB
3455
                      "mov %B0,%B1" CR_TAB
3456
                      "mov %A0,%A1");
3457
            }
3458
          else
3459
            {
3460
              if (AVR_HAVE_MOVW)
3461
                {
3462
                  *l = 2;
3463
                  return ("movw %A0,%A1" CR_TAB
3464
                          "movw %C0,%C1");
3465
                }
3466
              *l = 4;
3467
              return ("mov %A0,%A1" CR_TAB
3468
                      "mov %B0,%B1" CR_TAB
3469
                      "mov %C0,%C1" CR_TAB
3470
                      "mov %D0,%D1");
3471
            }
3472
        }
3473
      else if (CONSTANT_P (src))
3474
        {
3475
          return output_reload_insisf (operands, NULL_RTX, real_l);
3476
        }
3477
      else if (GET_CODE (src) == MEM)
3478
        return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3479
    }
3480
  else if (GET_CODE (dest) == MEM)
3481
    {
3482
      const char *templ;
3483
 
3484
      if (src == CONST0_RTX (GET_MODE (dest)))
3485
          operands[1] = zero_reg_rtx;
3486
 
3487
      templ = out_movsi_mr_r (insn, operands, real_l);
3488
 
3489
      if (!real_l)
3490
        output_asm_insn (templ, operands);
3491
 
3492
      operands[1] = src;
3493
      return "";
3494
    }
3495
  fatal_insn ("invalid insn:", insn);
3496
  return "";
3497
}
3498
 
3499
 
3500
/* Handle loads of 24-bit types from memory to register.  */
3501
 
3502
static const char*
3503
avr_out_load_psi (rtx insn, rtx *op, int *plen)
3504
{
3505
  rtx dest = op[0];
3506
  rtx src = op[1];
3507
  rtx base = XEXP (src, 0);
3508
  int reg_dest = true_regnum (dest);
3509
  int reg_base = true_regnum (base);
3510
 
3511
  if (reg_base > 0)
3512
    {
3513
      if (reg_base == REG_X)        /* (R26) */
3514
        {
3515
          if (reg_dest == REG_X)
3516
            /* "ld r26,-X" is undefined */
3517
            return avr_asm_len ("adiw r26,2"        CR_TAB
3518
                                "ld r28,X"          CR_TAB
3519
                                "ld __tmp_reg__,-X" CR_TAB
3520
                                "sbiw r26,1"        CR_TAB
3521
                                "ld r26,X"          CR_TAB
3522
                                "mov r27,__tmp_reg__", op, plen, -6);
3523
          else
3524
            {
3525
              avr_asm_len ("ld %A0,X+" CR_TAB
3526
                           "ld %B0,X+" CR_TAB
3527
                           "ld %C0,X", op, plen, -3);
3528
 
3529
              if (reg_dest != REG_X - 2
3530
                  && !reg_unused_after (insn, base))
3531
                {
3532
                  avr_asm_len ("sbiw r26,2", op, plen, 1);
3533
                }
3534
 
3535
              return "";
3536
            }
3537
        }
3538
      else /* reg_base != REG_X */
3539
        {
3540
          if (reg_dest == reg_base)
3541
            return avr_asm_len ("ldd %C0,%1+2"          CR_TAB
3542
                                "ldd __tmp_reg__,%1+1"  CR_TAB
3543
                                "ld  %A0,%1"            CR_TAB
3544
                                "mov %B0,__tmp_reg__", op, plen, -4);
3545
          else
3546
            return avr_asm_len ("ld  %A0,%1"    CR_TAB
3547
                                "ldd %B0,%1+1"  CR_TAB
3548
                                "ldd %C0,%1+2", op, plen, -3);
3549
        }
3550
    }
3551
  else if (GET_CODE (base) == PLUS) /* (R + i) */
3552
    {
3553
      int disp = INTVAL (XEXP (base, 1));
3554
 
3555
      if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3556
        {
3557
          if (REGNO (XEXP (base, 0)) != REG_Y)
3558
            fatal_insn ("incorrect insn:",insn);
3559
 
3560
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3561
            return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3562
                                "ldd %A0,Y+61"    CR_TAB
3563
                                "ldd %B0,Y+62"    CR_TAB
3564
                                "ldd %C0,Y+63"    CR_TAB
3565
                                "sbiw r28,%o1-61", op, plen, -5);
3566
 
3567
          return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3568
                              "sbci r29,hi8(-%o1)" CR_TAB
3569
                              "ld  %A0,Y"           CR_TAB
3570
                              "ldd %B0,Y+1"        CR_TAB
3571
                              "ldd %C0,Y+2"        CR_TAB
3572
                              "subi r28,lo8(%o1)"  CR_TAB
3573
                              "sbci r29,hi8(%o1)", op, plen, -7);
3574
        }
3575
 
3576
      reg_base = true_regnum (XEXP (base, 0));
3577
      if (reg_base == REG_X)
3578
        {
3579
          /* R = (X + d) */
3580
          if (reg_dest == REG_X)
3581
            {
3582
              /* "ld r26,-X" is undefined */
3583
              return avr_asm_len ("adiw r26,%o1+2"     CR_TAB
3584
                                  "ld  r28,X"          CR_TAB
3585
                                  "ld  __tmp_reg__,-X" CR_TAB
3586
                                  "sbiw r26,1"         CR_TAB
3587
                                  "ld  r26,X"          CR_TAB
3588
                                  "mov r27,__tmp_reg__", op, plen, -6);
3589
            }
3590
 
3591
            avr_asm_len ("adiw r26,%o1"      CR_TAB
3592
                         "ld r24,X+"         CR_TAB
3593
                         "ld r25,X+"         CR_TAB
3594
                         "ld r26,X", op, plen, -4);
3595
 
3596
            if (reg_dest != REG_X - 2)
3597
              avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3598
 
3599
            return "";
3600
        }
3601
 
3602
      if (reg_dest == reg_base)
3603
        return avr_asm_len ("ldd %C0,%C1" CR_TAB
3604
                            "ldd __tmp_reg__,%B1"  CR_TAB
3605
                            "ldd %A0,%A1" CR_TAB
3606
                            "mov %B0,__tmp_reg__", op, plen, -4);
3607
 
3608
        return avr_asm_len ("ldd %A0,%A1" CR_TAB
3609
                            "ldd %B0,%B1" CR_TAB
3610
                            "ldd %C0,%C1", op, plen, -3);
3611
    }
3612
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3613
    return avr_asm_len ("ld %C0,%1" CR_TAB
3614
                        "ld %B0,%1" CR_TAB
3615
                        "ld %A0,%1", op, plen, -3);
3616
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3617
    return avr_asm_len ("ld %A0,%1" CR_TAB
3618
                        "ld %B0,%1" CR_TAB
3619
                        "ld %C0,%1", op, plen, -3);
3620
 
3621
  else if (CONSTANT_ADDRESS_P (base))
3622
    return avr_asm_len ("lds %A0,%m1" CR_TAB
3623
                        "lds %B0,%m1+1" CR_TAB
3624
                        "lds %C0,%m1+2", op, plen , -6);
3625
 
3626
  fatal_insn ("unknown move insn:",insn);
3627
  return "";
3628
}
3629
 
3630
/* Handle store of 24-bit type from register or zero to memory.  */
3631
 
3632
static const char*
3633
avr_out_store_psi (rtx insn, rtx *op, int *plen)
3634
{
3635
  rtx dest = op[0];
3636
  rtx src = op[1];
3637
  rtx base = XEXP (dest, 0);
3638
  int reg_base = true_regnum (base);
3639
 
3640
  if (CONSTANT_ADDRESS_P (base))
3641
    return avr_asm_len ("sts %m0,%A1"   CR_TAB
3642
                        "sts %m0+1,%B1" CR_TAB
3643
                        "sts %m0+2,%C1", op, plen, -6);
3644
 
3645
  if (reg_base > 0)                 /* (r) */
3646
    {
3647
      if (reg_base == REG_X)        /* (R26) */
3648
        {
3649
          gcc_assert (!reg_overlap_mentioned_p (base, src));
3650
 
3651
          avr_asm_len ("st %0+,%A1"  CR_TAB
3652
                       "st %0+,%B1" CR_TAB
3653
                       "st %0,%C1", op, plen, -3);
3654
 
3655
          if (!reg_unused_after (insn, base))
3656
            avr_asm_len ("sbiw r26,2", op, plen, 1);
3657
 
3658
          return "";
3659
        }
3660
      else
3661
        return avr_asm_len ("st %0,%A1"    CR_TAB
3662
                            "std %0+1,%B1" CR_TAB
3663
                            "std %0+2,%C1", op, plen, -3);
3664
    }
3665
  else if (GET_CODE (base) == PLUS) /* (R + i) */
3666
    {
3667
      int disp = INTVAL (XEXP (base, 1));
3668
      reg_base = REGNO (XEXP (base, 0));
3669
 
3670
      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3671
        {
3672
          if (reg_base != REG_Y)
3673
            fatal_insn ("incorrect insn:",insn);
3674
 
3675
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3676
            return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3677
                                "std Y+61,%A1"    CR_TAB
3678
                                "std Y+62,%B1"    CR_TAB
3679
                                "std Y+63,%C1"    CR_TAB
3680
                                "sbiw r28,%o0-60", op, plen, -5);
3681
 
3682
          return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3683
                              "sbci r29,hi8(-%o0)" CR_TAB
3684
                              "st Y,%A1"           CR_TAB
3685
                              "std Y+1,%B1"        CR_TAB
3686
                              "std Y+2,%C1"        CR_TAB
3687
                              "subi r28,lo8(%o0)"  CR_TAB
3688
                              "sbci r29,hi8(%o0)", op, plen, -7);
3689
        }
3690
      if (reg_base == REG_X)
3691
        {
3692
          /* (X + d) = R */
3693
          gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3694
 
3695
          avr_asm_len ("adiw r26,%o0" CR_TAB
3696
                       "st X+,%A1"    CR_TAB
3697
                       "st X+,%B1"    CR_TAB
3698
                       "st X,%C1", op, plen, -4);
3699
 
3700
          if (!reg_unused_after (insn, XEXP (base, 0)))
3701
            avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3702
 
3703
          return "";
3704
        }
3705
 
3706
      return avr_asm_len ("std %A0,%A1" CR_TAB
3707
                          "std %B0,%B1" CR_TAB
3708
                          "std %C0,%C1", op, plen, -3);
3709
    }
3710
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3711
    return avr_asm_len ("st %0,%C1" CR_TAB
3712
                        "st %0,%B1" CR_TAB
3713
                        "st %0,%A1", op, plen, -3);
3714
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3715
    return avr_asm_len ("st %0,%A1" CR_TAB
3716
                        "st %0,%B1" CR_TAB
3717
                        "st %0,%C1", op, plen, -3);
3718
 
3719
  fatal_insn ("unknown move insn:",insn);
3720
  return "";
3721
}
3722
 
3723
 
3724
/* Move around 24-bit stuff.  */
3725
 
3726
const char *
3727
avr_out_movpsi (rtx insn, rtx *op, int *plen)
3728
{
3729
  rtx dest = op[0];
3730
  rtx src = op[1];
3731
 
3732
  if (avr_mem_flash_p (src)
3733
      || avr_mem_flash_p (dest))
3734
    {
3735
      return avr_out_lpm (insn, op, plen);
3736
    }
3737
 
3738
  if (register_operand (dest, VOIDmode))
3739
    {
3740
      if (register_operand (src, VOIDmode)) /* mov r,r */
3741
        {
3742
          if (true_regnum (dest) > true_regnum (src))
3743
            {
3744
              avr_asm_len ("mov %C0,%C1", op, plen, -1);
3745
 
3746
              if (AVR_HAVE_MOVW)
3747
                return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3748
              else
3749
                return avr_asm_len ("mov %B0,%B1"  CR_TAB
3750
                                    "mov %A0,%A1", op, plen, 2);
3751
            }
3752
          else
3753
            {
3754
              if (AVR_HAVE_MOVW)
3755
                avr_asm_len ("movw %A0,%A1", op, plen, -1);
3756
              else
3757
                avr_asm_len ("mov %A0,%A1"  CR_TAB
3758
                             "mov %B0,%B1", op, plen, -2);
3759
 
3760
              return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3761
            }
3762
        }
3763
      else if (CONSTANT_P (src))
3764
        {
3765
          return avr_out_reload_inpsi (op, NULL_RTX, plen);
3766
        }
3767
      else if (MEM_P (src))
3768
        return avr_out_load_psi (insn, op, plen); /* mov r,m */
3769
    }
3770
  else if (MEM_P (dest))
3771
    {
3772
      rtx xop[2];
3773
 
3774
      xop[0] = dest;
3775
      xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3776
 
3777
      return avr_out_store_psi (insn, xop, plen);
3778
    }
3779
 
3780
  fatal_insn ("invalid insn:", insn);
3781
  return "";
3782
}
3783
 
3784
 
3785
static const char*
3786
out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3787
{
3788
  rtx dest = op[0];
3789
  rtx src = op[1];
3790
  rtx x = XEXP (dest, 0);
3791
 
3792
  if (CONSTANT_ADDRESS_P (x))
3793
    {
3794
      return optimize > 0 && io_address_operand (x, QImode)
3795
        ? avr_asm_len ("out %i0,%1", op, plen, -1)
3796
        : avr_asm_len ("sts %m0,%1", op, plen, -2);
3797
    }
3798
  else if (GET_CODE (x) == PLUS
3799
           && REG_P (XEXP (x, 0))
3800
           && CONST_INT_P (XEXP (x, 1)))
3801
    {
3802
      /* memory access by reg+disp */
3803
 
3804
      int disp = INTVAL (XEXP (x, 1));
3805
 
3806
      if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3807
        {
3808
          if (REGNO (XEXP (x, 0)) != REG_Y)
3809
            fatal_insn ("incorrect insn:",insn);
3810
 
3811
          if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3812
            return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3813
                                "std Y+63,%1"     CR_TAB
3814
                                "sbiw r28,%o0-63", op, plen, -3);
3815
 
3816
          return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3817
                              "sbci r29,hi8(-%o0)" CR_TAB
3818
                              "st Y,%1"            CR_TAB
3819
                              "subi r28,lo8(%o0)"  CR_TAB
3820
                              "sbci r29,hi8(%o0)", op, plen, -5);
3821
        }
3822
      else if (REGNO (XEXP (x,0)) == REG_X)
3823
        {
3824
          if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3825
            {
3826
              avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3827
                           "adiw r26,%o0"       CR_TAB
3828
                           "st X,__tmp_reg__", op, plen, -3);
3829
            }
3830
          else
3831
            {
3832
              avr_asm_len ("adiw r26,%o0" CR_TAB
3833
                           "st X,%1", op, plen, -2);
3834
            }
3835
 
3836
          if (!reg_unused_after (insn, XEXP (x,0)))
3837
            avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3838
 
3839
          return "";
3840
        }
3841
 
3842
      return avr_asm_len ("std %0,%1", op, plen, -1);
3843
    }
3844
 
3845
  return avr_asm_len ("st %0,%1", op, plen, -1);
3846
}
3847
 
3848
 
3849
/* Helper for the next function for XMEGA.  It does the same
3850
   but with low byte first.  */
3851
 
3852
static const char*
3853
avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3854
{
3855
  rtx dest = op[0];
3856
  rtx src = op[1];
3857
  rtx base = XEXP (dest, 0);
3858
  int reg_base = true_regnum (base);
3859
  int reg_src = true_regnum (src);
3860
 
3861
  /* "volatile" forces writing low byte first, even if less efficient,
3862
     for correct operation with 16-bit I/O registers like SP.  */
3863
  int mem_volatile_p = MEM_VOLATILE_P (dest);
3864
 
3865
  if (CONSTANT_ADDRESS_P (base))
3866
    return optimize > 0 && io_address_operand (base, HImode)
3867
      ? avr_asm_len ("out %i0,%A1" CR_TAB
3868
                     "out %i0+1,%B1", op, plen, -2)
3869
 
3870
      : avr_asm_len ("sts %m0,%A1" CR_TAB
3871
                     "sts %m0+1,%B1", op, plen, -4);
3872
 
3873
  if (reg_base > 0)
3874
    {
3875
      if (reg_base != REG_X)
3876
        return avr_asm_len ("st %0,%A1" CR_TAB
3877
                            "std %0+1,%B1", op, plen, -2);
3878
 
3879
      if (reg_src == REG_X)
3880
        /* "st X+,r26" and "st -X,r26" are undefined.  */
3881
        avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3882
                     "st X,r26"            CR_TAB
3883
                     "adiw r26,1"          CR_TAB
3884
                     "st X,__tmp_reg__", op, plen, -4);
3885
      else
3886
        avr_asm_len ("st X+,%A1" CR_TAB
3887
                     "st X,%B1", op, plen, -2);
3888
 
3889
      return reg_unused_after (insn, base)
3890
        ? ""
3891
        : avr_asm_len ("sbiw r26,1", op, plen, 1);
3892
    }
3893
  else if (GET_CODE (base) == PLUS)
3894
    {
3895
      int disp = INTVAL (XEXP (base, 1));
3896
      reg_base = REGNO (XEXP (base, 0));
3897
      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3898
        {
3899
          if (reg_base != REG_Y)
3900
            fatal_insn ("incorrect insn:",insn);
3901
 
3902
          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3903
            ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3904
                           "std Y+62,%A1"    CR_TAB
3905
                           "std Y+63,%B1"    CR_TAB
3906
                           "sbiw r28,%o0-62", op, plen, -4)
3907
 
3908
            : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3909
                           "sbci r29,hi8(-%o0)" CR_TAB
3910
                           "st Y,%A1"           CR_TAB
3911
                           "std Y+1,%B1"        CR_TAB
3912
                           "subi r28,lo8(%o0)"  CR_TAB
3913
                           "sbci r29,hi8(%o0)", op, plen, -6);
3914
        }
3915
 
3916
      if (reg_base != REG_X)
3917
        return avr_asm_len ("std %A0,%A1" CR_TAB
3918
                            "std %B0,%B1", op, plen, -2);
3919
      /* (X + d) = R */
3920
      return reg_src == REG_X
3921
        ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
3922
                       "mov __zero_reg__,r27" CR_TAB
3923
                       "adiw r26,%o0"         CR_TAB
3924
                       "st X+,__tmp_reg__"    CR_TAB
3925
                       "st X,__zero_reg__"    CR_TAB
3926
                       "clr __zero_reg__"     CR_TAB
3927
                       "sbiw r26,%o0+1", op, plen, -7)
3928
 
3929
        : avr_asm_len ("adiw r26,%o0" CR_TAB
3930
                       "st X+,%A1"    CR_TAB
3931
                       "st X,%B1"     CR_TAB
3932
                       "sbiw r26,%o0+1", op, plen, -4);
3933
    }
3934
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3935
    {
3936
      if (!mem_volatile_p)
3937
        return avr_asm_len ("st %0,%B1" CR_TAB
3938
                            "st %0,%A1", op, plen, -2);
3939
 
3940
      return REGNO (XEXP (base, 0)) == REG_X
3941
        ? avr_asm_len ("sbiw r26,2"  CR_TAB
3942
                       "st X+,%A1"   CR_TAB
3943
                       "st X,%B1"    CR_TAB
3944
                       "sbiw r26,1", op, plen, -4)
3945
 
3946
        : avr_asm_len ("sbiw %r0,2"  CR_TAB
3947
                       "st %p0,%A1"  CR_TAB
3948
                       "std %p0+1,%B1", op, plen, -3);
3949
    }
3950
  else if (GET_CODE (base) == POST_INC) /* (R++) */
3951
    {
3952
      return avr_asm_len ("st %0,%A1"  CR_TAB
3953
                          "st %0,%B1", op, plen, -2);
3954
 
3955
    }
3956
  fatal_insn ("unknown move insn:",insn);
3957
  return "";
3958
}
3959
 
3960
 
3961
static const char*
3962
out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3963
{
3964
  rtx dest = op[0];
3965
  rtx src = op[1];
3966
  rtx base = XEXP (dest, 0);
3967
  int reg_base = true_regnum (base);
3968
  int reg_src = true_regnum (src);
3969
  int mem_volatile_p;
3970
 
3971
  /* "volatile" forces writing high-byte first (no-xmega) resp.
3972
     low-byte first (xmega) even if less efficient, for correct
3973
     operation with 16-bit I/O registers like.  */
3974
 
3975
  if (AVR_XMEGA)
3976
    return avr_out_movhi_mr_r_xmega (insn, op, plen);
3977
 
3978
  mem_volatile_p = MEM_VOLATILE_P (dest);
3979
 
3980
  if (CONSTANT_ADDRESS_P (base))
3981
    return optimize > 0 && io_address_operand (base, HImode)
3982
      ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3983
                     "out %i0,%A1", op, plen, -2)
3984
 
3985
      : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3986
                     "sts %m0,%A1", op, plen, -4);
3987
 
3988
  if (reg_base > 0)
3989
    {
3990
      if (reg_base != REG_X)
3991
        return avr_asm_len ("std %0+1,%B1" CR_TAB
3992
                            "st %0,%A1", op, plen, -2);
3993
 
3994
      if (reg_src == REG_X)
3995
        /* "st X+,r26" and "st -X,r26" are undefined.  */
3996
        return !mem_volatile_p && reg_unused_after (insn, src)
3997
          ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3998
                         "st X,r26"            CR_TAB
3999
                         "adiw r26,1"          CR_TAB
4000
                         "st X,__tmp_reg__", op, plen, -4)
4001
 
4002
          : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4003
                         "adiw r26,1"          CR_TAB
4004
                         "st X,__tmp_reg__"    CR_TAB
4005
                         "sbiw r26,1"          CR_TAB
4006
                         "st X,r26", op, plen, -5);
4007
 
4008
      return !mem_volatile_p && reg_unused_after (insn, base)
4009
        ? avr_asm_len ("st X+,%A1" CR_TAB
4010
                       "st X,%B1", op, plen, -2)
4011
        : avr_asm_len ("adiw r26,1" CR_TAB
4012
                       "st X,%B1"   CR_TAB
4013
                       "st -X,%A1", op, plen, -3);
4014
    }
4015
  else if (GET_CODE (base) == PLUS)
4016
    {
4017
      int disp = INTVAL (XEXP (base, 1));
4018
      reg_base = REGNO (XEXP (base, 0));
4019
      if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4020
        {
4021
          if (reg_base != REG_Y)
4022
            fatal_insn ("incorrect insn:",insn);
4023
 
4024
          return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4025
            ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4026
                           "std Y+63,%B1"    CR_TAB
4027
                           "std Y+62,%A1"    CR_TAB
4028
                           "sbiw r28,%o0-62", op, plen, -4)
4029
 
4030
            : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4031
                           "sbci r29,hi8(-%o0)" CR_TAB
4032
                           "std Y+1,%B1"        CR_TAB
4033
                           "st Y,%A1"           CR_TAB
4034
                           "subi r28,lo8(%o0)"  CR_TAB
4035
                           "sbci r29,hi8(%o0)", op, plen, -6);
4036
        }
4037
 
4038
      if (reg_base != REG_X)
4039
        return avr_asm_len ("std %B0,%B1" CR_TAB
4040
                            "std %A0,%A1", op, plen, -2);
4041
      /* (X + d) = R */
4042
      return reg_src == REG_X
4043
        ? avr_asm_len ("mov __tmp_reg__,r26"  CR_TAB
4044
                       "mov __zero_reg__,r27" CR_TAB
4045
                       "adiw r26,%o0+1"       CR_TAB
4046
                       "st X,__zero_reg__"    CR_TAB
4047
                       "st -X,__tmp_reg__"    CR_TAB
4048
                       "clr __zero_reg__"     CR_TAB
4049
                       "sbiw r26,%o0", op, plen, -7)
4050
 
4051
        : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4052
                       "st X,%B1"       CR_TAB
4053
                       "st -X,%A1"      CR_TAB
4054
                       "sbiw r26,%o0", op, plen, -4);
4055
    }
4056
  else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4057
    {
4058
      return avr_asm_len ("st %0,%B1" CR_TAB
4059
                          "st %0,%A1", op, plen, -2);
4060
    }
4061
  else if (GET_CODE (base) == POST_INC) /* (R++) */
4062
    {
4063
      if (!mem_volatile_p)
4064
        return avr_asm_len ("st %0,%A1"  CR_TAB
4065
                            "st %0,%B1", op, plen, -2);
4066
 
4067
      return REGNO (XEXP (base, 0)) == REG_X
4068
        ? avr_asm_len ("adiw r26,1"  CR_TAB
4069
                       "st X,%B1"    CR_TAB
4070
                       "st -X,%A1"   CR_TAB
4071
                       "adiw r26,2", op, plen, -4)
4072
 
4073
        : avr_asm_len ("std %p0+1,%B1" CR_TAB
4074
                       "st %p0,%A1"    CR_TAB
4075
                       "adiw %r0,2", op, plen, -3);
4076
    }
4077
  fatal_insn ("unknown move insn:",insn);
4078
  return "";
4079
}
4080
 
4081
/* Return 1 if frame pointer for current function required.  */
4082
 
4083
static bool
4084
avr_frame_pointer_required_p (void)
4085
{
4086
  return (cfun->calls_alloca
4087
          || cfun->calls_setjmp
4088
          || cfun->has_nonlocal_label
4089
          || crtl->args.info.nregs == 0
4090
          || get_frame_size () > 0);
4091
}
4092
 
4093
/* Returns the condition of compare insn INSN, or UNKNOWN.  */
4094
 
4095
static RTX_CODE
4096
compare_condition (rtx insn)
4097
{
4098
  rtx next = next_real_insn (insn);
4099
 
4100
  if (next && JUMP_P (next))
4101
    {
4102
      rtx pat = PATTERN (next);
4103
      rtx src = SET_SRC (pat);
4104
 
4105
      if (IF_THEN_ELSE == GET_CODE (src))
4106
        return GET_CODE (XEXP (src, 0));
4107
    }
4108
 
4109
  return UNKNOWN;
4110
}
4111
 
4112
 
4113
/* Returns true iff INSN is a tst insn that only tests the sign.  */
4114
 
4115
static bool
4116
compare_sign_p (rtx insn)
4117
{
4118
  RTX_CODE cond = compare_condition (insn);
4119
  return (cond == GE || cond == LT);
4120
}
4121
 
4122
 
4123
/* Returns true iff the next insn is a JUMP_INSN with a condition
4124
   that needs to be swapped (GT, GTU, LE, LEU).  */
4125
 
4126
static bool
4127
compare_diff_p (rtx insn)
4128
{
4129
  RTX_CODE cond = compare_condition (insn);
4130
  return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4131
}
4132
 
4133
/* Returns true iff INSN is a compare insn with the EQ or NE condition.  */
4134
 
4135
static bool
4136
compare_eq_p (rtx insn)
4137
{
4138
  RTX_CODE cond = compare_condition (insn);
4139
  return (cond == EQ || cond == NE);
4140
}
4141
 
4142
 
4143
/* Output compare instruction
4144
 
4145
      compare (XOP[0], XOP[1])
4146
 
4147
   for an HI/SI register XOP[0] and an integer XOP[1].  Return "".
4148
   XOP[2] is an 8-bit scratch register as needed.
4149
 
4150
   PLEN == NULL:  Output instructions.
4151
   PLEN != NULL:  Set *PLEN to the length (in words) of the sequence.
4152
                  Don't output anything.  */
4153
 
4154
const char*
4155
avr_out_compare (rtx insn, rtx *xop, int *plen)
4156
{
4157
  /* Register to compare and value to compare against. */
4158
  rtx xreg = xop[0];
4159
  rtx xval = xop[1];
4160
 
4161
  /* MODE of the comparison.  */
4162
  enum machine_mode mode = GET_MODE (xreg);
4163
 
4164
  /* Number of bytes to operate on.  */
4165
  int i, n_bytes = GET_MODE_SIZE (mode);
4166
 
4167
  /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown.  */
4168
  int clobber_val = -1;
4169
 
4170
  gcc_assert (REG_P (xreg));
4171
  gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4172
              || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4173
 
4174
  if (plen)
4175
    *plen = 0;
4176
 
4177
  /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4178
     against 0 by ORing the bytes.  This is one instruction shorter.
4179
     Notice that DImode comparisons are always against reg:DI 18
4180
     and therefore don't use this.  */
4181
 
4182
  if (!test_hard_reg_class (LD_REGS, xreg)
4183
      && compare_eq_p (insn)
4184
      && reg_unused_after (insn, xreg))
4185
    {
4186
      if (xval == const1_rtx)
4187
        {
4188
          avr_asm_len ("dec %A0" CR_TAB
4189
                       "or %A0,%B0", xop, plen, 2);
4190
 
4191
          if (n_bytes >= 3)
4192
            avr_asm_len ("or %A0,%C0", xop, plen, 1);
4193
 
4194
          if (n_bytes >= 4)
4195
            avr_asm_len ("or %A0,%D0", xop, plen, 1);
4196
 
4197
          return "";
4198
        }
4199
      else if (xval == constm1_rtx)
4200
        {
4201
          if (n_bytes >= 4)
4202
            avr_asm_len ("and %A0,%D0", xop, plen, 1);
4203
 
4204
          if (n_bytes >= 3)
4205
            avr_asm_len ("and %A0,%C0", xop, plen, 1);
4206
 
4207
          return avr_asm_len ("and %A0,%B0" CR_TAB
4208
                              "com %A0", xop, plen, 2);
4209
        }
4210
    }
4211
 
4212
  for (i = 0; i < n_bytes; i++)
4213
    {
4214
      /* We compare byte-wise.  */
4215
      rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4216
      rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4217
 
4218
      /* 8-bit value to compare with this byte.  */
4219
      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4220
 
4221
      /* Registers R16..R31 can operate with immediate.  */
4222
      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4223
 
4224
      xop[0] = reg8;
4225
      xop[1] = gen_int_mode (val8, QImode);
4226
 
4227
      /* Word registers >= R24 can use SBIW/ADIW with 0..63.  */
4228
 
4229
      if (i == 0
4230
          && test_hard_reg_class (ADDW_REGS, reg8))
4231
        {
4232
          int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4233
 
4234
          if (IN_RANGE (val16, 0, 63)
4235
              && (val8 == 0
4236
                  || reg_unused_after (insn, xreg)))
4237
            {
4238
              avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4239
              i++;
4240
              continue;
4241
            }
4242
 
4243
          if (n_bytes == 2
4244
              && IN_RANGE (val16, -63, -1)
4245
              && compare_eq_p (insn)
4246
              && reg_unused_after (insn, xreg))
4247
            {
4248
              return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4249
            }
4250
        }
4251
 
4252
      /* Comparing against 0 is easy.  */
4253
 
4254
      if (val8 == 0)
4255
        {
4256
          avr_asm_len (i == 0
4257
                       ? "cp %0,__zero_reg__"
4258
                       : "cpc %0,__zero_reg__", xop, plen, 1);
4259
          continue;
4260
        }
4261
 
4262
      /* Upper registers can compare and subtract-with-carry immediates.
4263
         Notice that compare instructions do the same as respective subtract
4264
         instruction; the only difference is that comparisons don't write
4265
         the result back to the target register.  */
4266
 
4267
      if (ld_reg_p)
4268
        {
4269
          if (i == 0)
4270
            {
4271
              avr_asm_len ("cpi %0,%1", xop, plen, 1);
4272
              continue;
4273
            }
4274
          else if (reg_unused_after (insn, xreg))
4275
            {
4276
              avr_asm_len ("sbci %0,%1", xop, plen, 1);
4277
              continue;
4278
            }
4279
        }
4280
 
4281
      /* Must load the value into the scratch register.  */
4282
 
4283
      gcc_assert (REG_P (xop[2]));
4284
 
4285
      if (clobber_val != (int) val8)
4286
        avr_asm_len ("ldi %2,%1", xop, plen, 1);
4287
      clobber_val = (int) val8;
4288
 
4289
      avr_asm_len (i == 0
4290
                   ? "cp %0,%2"
4291
                   : "cpc %0,%2", xop, plen, 1);
4292
    }
4293
 
4294
  return "";
4295
}
4296
 
4297
 
4298
/* Prepare operands of compare_const_di2 to be used with avr_out_compare.  */
4299
 
4300
const char*
4301
avr_out_compare64 (rtx insn, rtx *op, int *plen)
4302
{
4303
  rtx xop[3];
4304
 
4305
  xop[0] = gen_rtx_REG (DImode, 18);
4306
  xop[1] = op[0];
4307
  xop[2] = op[1];
4308
 
4309
  return avr_out_compare (insn, xop, plen);
4310
}
4311
 
4312
/* Output test instruction for HImode.  */
4313
 
4314
const char*
4315
avr_out_tsthi (rtx insn, rtx *op, int *plen)
4316
{
4317
  if (compare_sign_p (insn))
4318
    {
4319
      avr_asm_len ("tst %B0", op, plen, -1);
4320
    }
4321
  else if (reg_unused_after (insn, op[0])
4322
           && compare_eq_p (insn))
4323
    {
4324
      /* Faster than sbiw if we can clobber the operand.  */
4325
      avr_asm_len ("or %A0,%B0", op, plen, -1);
4326
    }
4327
  else
4328
    {
4329
      avr_out_compare (insn, op, plen);
4330
    }
4331
 
4332
  return "";
4333
}
4334
 
4335
 
4336
/* Output test instruction for PSImode.  */
4337
 
4338
const char*
4339
avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4340
{
4341
  if (compare_sign_p (insn))
4342
    {
4343
      avr_asm_len ("tst %C0", op, plen, -1);
4344
    }
4345
  else if (reg_unused_after (insn, op[0])
4346
           && compare_eq_p (insn))
4347
    {
4348
      /* Faster than sbiw if we can clobber the operand.  */
4349
      avr_asm_len ("or %A0,%B0" CR_TAB
4350
                   "or %A0,%C0", op, plen, -2);
4351
    }
4352
  else
4353
    {
4354
      avr_out_compare (insn, op, plen);
4355
    }
4356
 
4357
  return "";
4358
}
4359
 
4360
 
4361
/* Output test instruction for SImode.  */
4362
 
4363
const char*
4364
avr_out_tstsi (rtx insn, rtx *op, int *plen)
4365
{
4366
  if (compare_sign_p (insn))
4367
    {
4368
      avr_asm_len ("tst %D0", op, plen, -1);
4369
    }
4370
  else if (reg_unused_after (insn, op[0])
4371
           && compare_eq_p (insn))
4372
    {
4373
      /* Faster than sbiw if we can clobber the operand.  */
4374
      avr_asm_len ("or %A0,%B0" CR_TAB
4375
                   "or %A0,%C0" CR_TAB
4376
                   "or %A0,%D0", op, plen, -3);
4377
    }
4378
  else
4379
    {
4380
      avr_out_compare (insn, op, plen);
4381
    }
4382
 
4383
  return "";
4384
}
4385
 
4386
 
4387
/* Generate asm equivalent for various shifts.  This only handles cases
4388
   that are not already carefully hand-optimized in ?sh??i3_out.
4389
 
4390
   OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4391
   OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4392
   OPERANDS[3] is a QImode scratch register from LD regs if
4393
               available and SCRATCH, otherwise (no scratch available)
4394
 
4395
   TEMPL is an assembler template that shifts by one position.
4396
   T_LEN is the length of this template.  */
4397
 
4398
void
4399
out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4400
                    int *plen, int t_len)
4401
{
4402
  bool second_label = true;
4403
  bool saved_in_tmp = false;
4404
  bool use_zero_reg = false;
4405
  rtx op[5];
4406
 
4407
  op[0] = operands[0];
4408
  op[1] = operands[1];
4409
  op[2] = operands[2];
4410
  op[3] = operands[3];
4411
 
4412
  if (plen)
4413
    *plen = 0;
4414
 
4415
  if (CONST_INT_P (operands[2]))
4416
    {
4417
      bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4418
                      && REG_P (operands[3]));
4419
      int count = INTVAL (operands[2]);
4420
      int max_len = 10;  /* If larger than this, always use a loop.  */
4421
 
4422
      if (count <= 0)
4423
          return;
4424
 
4425
      if (count < 8 && !scratch)
4426
        use_zero_reg = true;
4427
 
4428
      if (optimize_size)
4429
        max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4430
 
4431
      if (t_len * count <= max_len)
4432
        {
4433
          /* Output shifts inline with no loop - faster.  */
4434
 
4435
          while (count-- > 0)
4436
            avr_asm_len (templ, op, plen, t_len);
4437
 
4438
          return;
4439
        }
4440
 
4441
      if (scratch)
4442
        {
4443
          avr_asm_len ("ldi %3,%2", op, plen, 1);
4444
        }
4445
      else if (use_zero_reg)
4446
        {
4447
          /* Hack to save one word: use __zero_reg__ as loop counter.
4448
             Set one bit, then shift in a loop until it is 0 again.  */
4449
 
4450
          op[3] = zero_reg_rtx;
4451
 
4452
          avr_asm_len ("set" CR_TAB
4453
                       "bld %3,%2-1", op, plen, 2);
4454
        }
4455
      else
4456
        {
4457
          /* No scratch register available, use one from LD_REGS (saved in
4458
             __tmp_reg__) that doesn't overlap with registers to shift.  */
4459
 
4460
          op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4461
          op[4] = tmp_reg_rtx;
4462
          saved_in_tmp = true;
4463
 
4464
          avr_asm_len ("mov %4,%3" CR_TAB
4465
                       "ldi %3,%2", op, plen, 2);
4466
        }
4467
 
4468
      second_label = false;
4469
    }
4470
  else if (MEM_P (op[2]))
4471
    {
4472
      rtx op_mov[2];
4473
 
4474
      op_mov[0] = op[3] = tmp_reg_rtx;
4475
      op_mov[1] = op[2];
4476
 
4477
      out_movqi_r_mr (insn, op_mov, plen);
4478
    }
4479
  else if (register_operand (op[2], QImode))
4480
    {
4481
      op[3] = op[2];
4482
 
4483
      if (!reg_unused_after (insn, op[2])
4484
          || reg_overlap_mentioned_p (op[0], op[2]))
4485
        {
4486
          op[3] = tmp_reg_rtx;
4487
          avr_asm_len ("mov %3,%2", op, plen, 1);
4488
        }
4489
    }
4490
  else
4491
    fatal_insn ("bad shift insn:", insn);
4492
 
4493
  if (second_label)
4494
      avr_asm_len ("rjmp 2f", op, plen, 1);
4495
 
4496
  avr_asm_len ("1:", op, plen, 0);
4497
  avr_asm_len (templ, op, plen, t_len);
4498
 
4499
  if (second_label)
4500
    avr_asm_len ("2:", op, plen, 0);
4501
 
4502
  avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4503
  avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4504
 
4505
  if (saved_in_tmp)
4506
    avr_asm_len ("mov %3,%4", op, plen, 1);
4507
}
4508
 
4509
 
4510
/* 8bit shift left ((char)x << i)   */
4511
 
4512
const char *
4513
ashlqi3_out (rtx insn, rtx operands[], int *len)
4514
{
4515
  if (GET_CODE (operands[2]) == CONST_INT)
4516
    {
4517
      int k;
4518
 
4519
      if (!len)
4520
        len = &k;
4521
 
4522
      switch (INTVAL (operands[2]))
4523
        {
4524
        default:
4525
          if (INTVAL (operands[2]) < 8)
4526
            break;
4527
 
4528
          *len = 1;
4529
          return "clr %0";
4530
 
4531
        case 1:
4532
          *len = 1;
4533
          return "lsl %0";
4534
 
4535
        case 2:
4536
          *len = 2;
4537
          return ("lsl %0" CR_TAB
4538
                  "lsl %0");
4539
 
4540
        case 3:
4541
          *len = 3;
4542
          return ("lsl %0" CR_TAB
4543
                  "lsl %0" CR_TAB
4544
                  "lsl %0");
4545
 
4546
        case 4:
4547
          if (test_hard_reg_class (LD_REGS, operands[0]))
4548
            {
4549
              *len = 2;
4550
              return ("swap %0" CR_TAB
4551
                      "andi %0,0xf0");
4552
            }
4553
          *len = 4;
4554
          return ("lsl %0" CR_TAB
4555
                  "lsl %0" CR_TAB
4556
                  "lsl %0" CR_TAB
4557
                  "lsl %0");
4558
 
4559
        case 5:
4560
          if (test_hard_reg_class (LD_REGS, operands[0]))
4561
            {
4562
              *len = 3;
4563
              return ("swap %0" CR_TAB
4564
                      "lsl %0"  CR_TAB
4565
                      "andi %0,0xe0");
4566
            }
4567
          *len = 5;
4568
          return ("lsl %0" CR_TAB
4569
                  "lsl %0" CR_TAB
4570
                  "lsl %0" CR_TAB
4571
                  "lsl %0" CR_TAB
4572
                  "lsl %0");
4573
 
4574
        case 6:
4575
          if (test_hard_reg_class (LD_REGS, operands[0]))
4576
            {
4577
              *len = 4;
4578
              return ("swap %0" CR_TAB
4579
                      "lsl %0"  CR_TAB
4580
                      "lsl %0"  CR_TAB
4581
                      "andi %0,0xc0");
4582
            }
4583
          *len = 6;
4584
          return ("lsl %0" CR_TAB
4585
                  "lsl %0" CR_TAB
4586
                  "lsl %0" CR_TAB
4587
                  "lsl %0" CR_TAB
4588
                  "lsl %0" CR_TAB
4589
                  "lsl %0");
4590
 
4591
        case 7:
4592
          *len = 3;
4593
          return ("ror %0" CR_TAB
4594
                  "clr %0" CR_TAB
4595
                  "ror %0");
4596
        }
4597
    }
4598
  else if (CONSTANT_P (operands[2]))
4599
    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
4600
 
4601
  out_shift_with_cnt ("lsl %0",
4602
                      insn, operands, len, 1);
4603
  return "";
4604
}
4605
 
4606
 
4607
/* 16bit shift left ((short)x << i)   */
4608
 
4609
const char *
4610
ashlhi3_out (rtx insn, rtx operands[], int *len)
4611
{
4612
  if (GET_CODE (operands[2]) == CONST_INT)
4613
    {
4614
      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4615
      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4616
      int k;
4617
      int *t = len;
4618
 
4619
      if (!len)
4620
        len = &k;
4621
 
4622
      switch (INTVAL (operands[2]))
4623
        {
4624
        default:
4625
          if (INTVAL (operands[2]) < 16)
4626
            break;
4627
 
4628
          *len = 2;
4629
          return ("clr %B0" CR_TAB
4630
                  "clr %A0");
4631
 
4632
        case 4:
4633
          if (optimize_size && scratch)
4634
            break;  /* 5 */
4635
          if (ldi_ok)
4636
            {
4637
              *len = 6;
4638
              return ("swap %A0"      CR_TAB
4639
                      "swap %B0"      CR_TAB
4640
                      "andi %B0,0xf0" CR_TAB
4641
                      "eor %B0,%A0"   CR_TAB
4642
                      "andi %A0,0xf0" CR_TAB
4643
                      "eor %B0,%A0");
4644
            }
4645
          if (scratch)
4646
            {
4647
              *len = 7;
4648
              return ("swap %A0"    CR_TAB
4649
                      "swap %B0"    CR_TAB
4650
                      "ldi %3,0xf0" CR_TAB
4651
                      "and %B0,%3"      CR_TAB
4652
                      "eor %B0,%A0" CR_TAB
4653
                      "and %A0,%3"      CR_TAB
4654
                      "eor %B0,%A0");
4655
            }
4656
          break;  /* optimize_size ? 6 : 8 */
4657
 
4658
        case 5:
4659
          if (optimize_size)
4660
            break;  /* scratch ? 5 : 6 */
4661
          if (ldi_ok)
4662
            {
4663
              *len = 8;
4664
              return ("lsl %A0"       CR_TAB
4665
                      "rol %B0"       CR_TAB
4666
                      "swap %A0"      CR_TAB
4667
                      "swap %B0"      CR_TAB
4668
                      "andi %B0,0xf0" CR_TAB
4669
                      "eor %B0,%A0"   CR_TAB
4670
                      "andi %A0,0xf0" CR_TAB
4671
                      "eor %B0,%A0");
4672
            }
4673
          if (scratch)
4674
            {
4675
              *len = 9;
4676
              return ("lsl %A0"     CR_TAB
4677
                      "rol %B0"     CR_TAB
4678
                      "swap %A0"    CR_TAB
4679
                      "swap %B0"    CR_TAB
4680
                      "ldi %3,0xf0" CR_TAB
4681
                      "and %B0,%3"      CR_TAB
4682
                      "eor %B0,%A0" CR_TAB
4683
                      "and %A0,%3"      CR_TAB
4684
                      "eor %B0,%A0");
4685
            }
4686
          break;  /* 10 */
4687
 
4688
        case 6:
4689
          if (optimize_size)
4690
            break;  /* scratch ? 5 : 6 */
4691
          *len = 9;
4692
          return ("clr __tmp_reg__" CR_TAB
4693
                  "lsr %B0"         CR_TAB
4694
                  "ror %A0"         CR_TAB
4695
                  "ror __tmp_reg__" CR_TAB
4696
                  "lsr %B0"         CR_TAB
4697
                  "ror %A0"         CR_TAB
4698
                  "ror __tmp_reg__" CR_TAB
4699
                  "mov %B0,%A0"     CR_TAB
4700
                  "mov %A0,__tmp_reg__");
4701
 
4702
        case 7:
4703
          *len = 5;
4704
          return ("lsr %B0"     CR_TAB
4705
                  "mov %B0,%A0" CR_TAB
4706
                  "clr %A0"     CR_TAB
4707
                  "ror %B0"     CR_TAB
4708
                  "ror %A0");
4709
 
4710
        case 8:
4711
          return *len = 2, ("mov %B0,%A1" CR_TAB
4712
                            "clr %A0");
4713
 
4714
        case 9:
4715
          *len = 3;
4716
          return ("mov %B0,%A0" CR_TAB
4717
                  "clr %A0"     CR_TAB
4718
                  "lsl %B0");
4719
 
4720
        case 10:
4721
          *len = 4;
4722
          return ("mov %B0,%A0" CR_TAB
4723
                  "clr %A0"     CR_TAB
4724
                  "lsl %B0"     CR_TAB
4725
                  "lsl %B0");
4726
 
4727
        case 11:
4728
          *len = 5;
4729
          return ("mov %B0,%A0" CR_TAB
4730
                  "clr %A0"     CR_TAB
4731
                  "lsl %B0"     CR_TAB
4732
                  "lsl %B0"     CR_TAB
4733
                  "lsl %B0");
4734
 
4735
        case 12:
4736
          if (ldi_ok)
4737
            {
4738
              *len = 4;
4739
              return ("mov %B0,%A0" CR_TAB
4740
                      "clr %A0"     CR_TAB
4741
                      "swap %B0"    CR_TAB
4742
                      "andi %B0,0xf0");
4743
            }
4744
          if (scratch)
4745
            {
4746
              *len = 5;
4747
              return ("mov %B0,%A0" CR_TAB
4748
                      "clr %A0"     CR_TAB
4749
                      "swap %B0"    CR_TAB
4750
                      "ldi %3,0xf0" CR_TAB
4751
                      "and %B0,%3");
4752
            }
4753
          *len = 6;
4754
          return ("mov %B0,%A0" CR_TAB
4755
                  "clr %A0"     CR_TAB
4756
                  "lsl %B0"     CR_TAB
4757
                  "lsl %B0"     CR_TAB
4758
                  "lsl %B0"     CR_TAB
4759
                  "lsl %B0");
4760
 
4761
        case 13:
4762
          if (ldi_ok)
4763
            {
4764
              *len = 5;
4765
              return ("mov %B0,%A0" CR_TAB
4766
                      "clr %A0"     CR_TAB
4767
                      "swap %B0"    CR_TAB
4768
                      "lsl %B0"     CR_TAB
4769
                      "andi %B0,0xe0");
4770
            }
4771
          if (AVR_HAVE_MUL && scratch)
4772
            {
4773
              *len = 5;
4774
              return ("ldi %3,0x20" CR_TAB
4775
                      "mul %A0,%3"  CR_TAB
4776
                      "mov %B0,r0"  CR_TAB
4777
                      "clr %A0"     CR_TAB
4778
                      "clr __zero_reg__");
4779
            }
4780
          if (optimize_size && scratch)
4781
            break;  /* 5 */
4782
          if (scratch)
4783
            {
4784
              *len = 6;
4785
              return ("mov %B0,%A0" CR_TAB
4786
                      "clr %A0"     CR_TAB
4787
                      "swap %B0"    CR_TAB
4788
                      "lsl %B0"     CR_TAB
4789
                      "ldi %3,0xe0" CR_TAB
4790
                      "and %B0,%3");
4791
            }
4792
          if (AVR_HAVE_MUL)
4793
            {
4794
              *len = 6;
4795
              return ("set"            CR_TAB
4796
                      "bld r1,5"   CR_TAB
4797
                      "mul %A0,r1" CR_TAB
4798
                      "mov %B0,r0" CR_TAB
4799
                      "clr %A0"    CR_TAB
4800
                      "clr __zero_reg__");
4801
            }
4802
          *len = 7;
4803
          return ("mov %B0,%A0" CR_TAB
4804
                  "clr %A0"     CR_TAB
4805
                  "lsl %B0"     CR_TAB
4806
                  "lsl %B0"     CR_TAB
4807
                  "lsl %B0"     CR_TAB
4808
                  "lsl %B0"     CR_TAB
4809
                  "lsl %B0");
4810
 
4811
        case 14:
4812
          if (AVR_HAVE_MUL && ldi_ok)
4813
            {
4814
              *len = 5;
4815
              return ("ldi %B0,0x40" CR_TAB
4816
                      "mul %A0,%B0"  CR_TAB
4817
                      "mov %B0,r0"   CR_TAB
4818
                      "clr %A0"      CR_TAB
4819
                      "clr __zero_reg__");
4820
            }
4821
          if (AVR_HAVE_MUL && scratch)
4822
            {
4823
              *len = 5;
4824
              return ("ldi %3,0x40" CR_TAB
4825
                      "mul %A0,%3"  CR_TAB
4826
                      "mov %B0,r0"  CR_TAB
4827
                      "clr %A0"     CR_TAB
4828
                      "clr __zero_reg__");
4829
            }
4830
          if (optimize_size && ldi_ok)
4831
            {
4832
              *len = 5;
4833
              return ("mov %B0,%A0" CR_TAB
4834
                      "ldi %A0,6" "\n1:\t"
4835
                      "lsl %B0"     CR_TAB
4836
                      "dec %A0"     CR_TAB
4837
                      "brne 1b");
4838
            }
4839
          if (optimize_size && scratch)
4840
            break;  /* 5 */
4841
          *len = 6;
4842
          return ("clr %B0" CR_TAB
4843
                  "lsr %A0" CR_TAB
4844
                  "ror %B0" CR_TAB
4845
                  "lsr %A0" CR_TAB
4846
                  "ror %B0" CR_TAB
4847
                  "clr %A0");
4848
 
4849
        case 15:
4850
          *len = 4;
4851
          return ("clr %B0" CR_TAB
4852
                  "lsr %A0" CR_TAB
4853
                  "ror %B0" CR_TAB
4854
                  "clr %A0");
4855
        }
4856
      len = t;
4857
    }
4858
  out_shift_with_cnt ("lsl %A0" CR_TAB
4859
                      "rol %B0", insn, operands, len, 2);
4860
  return "";
4861
}
4862
 
4863
 
4864
/* 24-bit shift left */
4865
 
4866
const char*
4867
avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4868
{
4869
  if (plen)
4870
    *plen = 0;
4871
 
4872
  if (CONST_INT_P (op[2]))
4873
    {
4874
      switch (INTVAL (op[2]))
4875
        {
4876
        default:
4877
          if (INTVAL (op[2]) < 24)
4878
            break;
4879
 
4880
          return avr_asm_len ("clr %A0" CR_TAB
4881
                              "clr %B0" CR_TAB
4882
                              "clr %C0", op, plen, 3);
4883
 
4884
        case 8:
4885
          {
4886
            int reg0 = REGNO (op[0]);
4887
            int reg1 = REGNO (op[1]);
4888
 
4889
            if (reg0 >= reg1)
4890
              return avr_asm_len ("mov %C0,%B1"  CR_TAB
4891
                                  "mov %B0,%A1"  CR_TAB
4892
                                  "clr %A0", op, plen, 3);
4893
            else
4894
              return avr_asm_len ("clr %A0"      CR_TAB
4895
                                  "mov %B0,%A1"  CR_TAB
4896
                                  "mov %C0,%B1", op, plen, 3);
4897
          }
4898
 
4899
        case 16:
4900
          {
4901
            int reg0 = REGNO (op[0]);
4902
            int reg1 = REGNO (op[1]);
4903
 
4904
            if (reg0 + 2 != reg1)
4905
              avr_asm_len ("mov %C0,%A0", op, plen, 1);
4906
 
4907
            return avr_asm_len ("clr %B0"  CR_TAB
4908
                                "clr %A0", op, plen, 2);
4909
          }
4910
 
4911
        case 23:
4912
          return avr_asm_len ("clr %C0" CR_TAB
4913
                              "lsr %A0" CR_TAB
4914
                              "ror %C0" CR_TAB
4915
                              "clr %B0" CR_TAB
4916
                              "clr %A0", op, plen, 5);
4917
        }
4918
    }
4919
 
4920
  out_shift_with_cnt ("lsl %A0" CR_TAB
4921
                      "rol %B0" CR_TAB
4922
                      "rol %C0", insn, op, plen, 3);
4923
  return "";
4924
}
4925
 
4926
 
4927
/* 32bit shift left ((long)x << i)   */
4928
 
4929
const char *
4930
ashlsi3_out (rtx insn, rtx operands[], int *len)
4931
{
4932
  if (GET_CODE (operands[2]) == CONST_INT)
4933
    {
4934
      int k;
4935
      int *t = len;
4936
 
4937
      if (!len)
4938
        len = &k;
4939
 
4940
      switch (INTVAL (operands[2]))
4941
        {
4942
        default:
4943
          if (INTVAL (operands[2]) < 32)
4944
            break;
4945
 
4946
          if (AVR_HAVE_MOVW)
4947
            return *len = 3, ("clr %D0" CR_TAB
4948
                              "clr %C0" CR_TAB
4949
                              "movw %A0,%C0");
4950
          *len = 4;
4951
          return ("clr %D0" CR_TAB
4952
                  "clr %C0" CR_TAB
4953
                  "clr %B0" CR_TAB
4954
                  "clr %A0");
4955
 
4956
        case 8:
4957
          {
4958
            int reg0 = true_regnum (operands[0]);
4959
            int reg1 = true_regnum (operands[1]);
4960
            *len = 4;
4961
            if (reg0 >= reg1)
4962
              return ("mov %D0,%C1"  CR_TAB
4963
                      "mov %C0,%B1"  CR_TAB
4964
                      "mov %B0,%A1"  CR_TAB
4965
                      "clr %A0");
4966
            else
4967
              return ("clr %A0"      CR_TAB
4968
                      "mov %B0,%A1"  CR_TAB
4969
                      "mov %C0,%B1"  CR_TAB
4970
                      "mov %D0,%C1");
4971
          }
4972
 
4973
        case 16:
4974
          {
4975
            int reg0 = true_regnum (operands[0]);
4976
            int reg1 = true_regnum (operands[1]);
4977
            if (reg0 + 2 == reg1)
4978
              return *len = 2, ("clr %B0"      CR_TAB
4979
                                "clr %A0");
4980
            if (AVR_HAVE_MOVW)
4981
              return *len = 3, ("movw %C0,%A1" CR_TAB
4982
                                "clr %B0"      CR_TAB
4983
                                "clr %A0");
4984
            else
4985
              return *len = 4, ("mov %C0,%A1"  CR_TAB
4986
                                "mov %D0,%B1"  CR_TAB
4987
                                "clr %B0"      CR_TAB
4988
                                "clr %A0");
4989
          }
4990
 
4991
        case 24:
4992
          *len = 4;
4993
          return ("mov %D0,%A1"  CR_TAB
4994
                  "clr %C0"      CR_TAB
4995
                  "clr %B0"      CR_TAB
4996
                  "clr %A0");
4997
 
4998
        case 31:
4999
          *len = 6;
5000
          return ("clr %D0" CR_TAB
5001
                  "lsr %A0" CR_TAB
5002
                  "ror %D0" CR_TAB
5003
                  "clr %C0" CR_TAB
5004
                  "clr %B0" CR_TAB
5005
                  "clr %A0");
5006
        }
5007
      len = t;
5008
    }
5009
  out_shift_with_cnt ("lsl %A0" CR_TAB
5010
                      "rol %B0" CR_TAB
5011
                      "rol %C0" CR_TAB
5012
                      "rol %D0", insn, operands, len, 4);
5013
  return "";
5014
}
5015
 
5016
/* 8bit arithmetic shift right  ((signed char)x >> i) */
5017
 
5018
const char *
5019
ashrqi3_out (rtx insn, rtx operands[], int *len)
5020
{
5021
  if (GET_CODE (operands[2]) == CONST_INT)
5022
    {
5023
      int k;
5024
 
5025
      if (!len)
5026
        len = &k;
5027
 
5028
      switch (INTVAL (operands[2]))
5029
        {
5030
        case 1:
5031
          *len = 1;
5032
          return "asr %0";
5033
 
5034
        case 2:
5035
          *len = 2;
5036
          return ("asr %0" CR_TAB
5037
                  "asr %0");
5038
 
5039
        case 3:
5040
          *len = 3;
5041
          return ("asr %0" CR_TAB
5042
                  "asr %0" CR_TAB
5043
                  "asr %0");
5044
 
5045
        case 4:
5046
          *len = 4;
5047
          return ("asr %0" CR_TAB
5048
                  "asr %0" CR_TAB
5049
                  "asr %0" CR_TAB
5050
                  "asr %0");
5051
 
5052
        case 5:
5053
          *len = 5;
5054
          return ("asr %0" CR_TAB
5055
                  "asr %0" CR_TAB
5056
                  "asr %0" CR_TAB
5057
                  "asr %0" CR_TAB
5058
                  "asr %0");
5059
 
5060
        case 6:
5061
          *len = 4;
5062
          return ("bst %0,6"  CR_TAB
5063
                  "lsl %0"    CR_TAB
5064
                  "sbc %0,%0" CR_TAB
5065
                  "bld %0,0");
5066
 
5067
        default:
5068
          if (INTVAL (operands[2]) < 8)
5069
            break;
5070
 
5071
          /* fall through */
5072
 
5073
        case 7:
5074
          *len = 2;
5075
          return ("lsl %0" CR_TAB
5076
                  "sbc %0,%0");
5077
        }
5078
    }
5079
  else if (CONSTANT_P (operands[2]))
5080
    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5081
 
5082
  out_shift_with_cnt ("asr %0",
5083
                      insn, operands, len, 1);
5084
  return "";
5085
}
5086
 
5087
 
5088
/* 16bit arithmetic shift right  ((signed short)x >> i) */
5089
 
5090
const char *
5091
ashrhi3_out (rtx insn, rtx operands[], int *len)
5092
{
5093
  if (GET_CODE (operands[2]) == CONST_INT)
5094
    {
5095
      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5096
      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5097
      int k;
5098
      int *t = len;
5099
 
5100
      if (!len)
5101
        len = &k;
5102
 
5103
      switch (INTVAL (operands[2]))
5104
        {
5105
        case 4:
5106
        case 5:
5107
          /* XXX try to optimize this too? */
5108
          break;
5109
 
5110
        case 6:
5111
          if (optimize_size)
5112
            break;  /* scratch ? 5 : 6 */
5113
          *len = 8;
5114
          return ("mov __tmp_reg__,%A0" CR_TAB
5115
                  "mov %A0,%B0"         CR_TAB
5116
                  "lsl __tmp_reg__"     CR_TAB
5117
                  "rol %A0"             CR_TAB
5118
                  "sbc %B0,%B0"         CR_TAB
5119
                  "lsl __tmp_reg__"     CR_TAB
5120
                  "rol %A0"             CR_TAB
5121
                  "rol %B0");
5122
 
5123
        case 7:
5124
          *len = 4;
5125
          return ("lsl %A0"     CR_TAB
5126
                  "mov %A0,%B0" CR_TAB
5127
                  "rol %A0"     CR_TAB
5128
                  "sbc %B0,%B0");
5129
 
5130
        case 8:
5131
          {
5132
            int reg0 = true_regnum (operands[0]);
5133
            int reg1 = true_regnum (operands[1]);
5134
 
5135
            if (reg0 == reg1)
5136
              return *len = 3, ("mov %A0,%B0" CR_TAB
5137
                                "lsl %B0"     CR_TAB
5138
                                "sbc %B0,%B0");
5139
            else
5140
              return *len = 4, ("mov %A0,%B1" CR_TAB
5141
                                "clr %B0"     CR_TAB
5142
                                "sbrc %A0,7"  CR_TAB
5143
                                "dec %B0");
5144
          }
5145
 
5146
        case 9:
5147
          *len = 4;
5148
          return ("mov %A0,%B0" CR_TAB
5149
                  "lsl %B0"      CR_TAB
5150
                  "sbc %B0,%B0" CR_TAB
5151
                  "asr %A0");
5152
 
5153
        case 10:
5154
          *len = 5;
5155
          return ("mov %A0,%B0" CR_TAB
5156
                  "lsl %B0"     CR_TAB
5157
                  "sbc %B0,%B0" CR_TAB
5158
                  "asr %A0"     CR_TAB
5159
                  "asr %A0");
5160
 
5161
        case 11:
5162
          if (AVR_HAVE_MUL && ldi_ok)
5163
            {
5164
              *len = 5;
5165
              return ("ldi %A0,0x20" CR_TAB
5166
                      "muls %B0,%A0" CR_TAB
5167
                      "mov %A0,r1"   CR_TAB
5168
                      "sbc %B0,%B0"  CR_TAB
5169
                      "clr __zero_reg__");
5170
            }
5171
          if (optimize_size && scratch)
5172
            break;  /* 5 */
5173
          *len = 6;
5174
          return ("mov %A0,%B0" CR_TAB
5175
                  "lsl %B0"     CR_TAB
5176
                  "sbc %B0,%B0" CR_TAB
5177
                  "asr %A0"     CR_TAB
5178
                  "asr %A0"     CR_TAB
5179
                  "asr %A0");
5180
 
5181
        case 12:
5182
          if (AVR_HAVE_MUL && ldi_ok)
5183
            {
5184
              *len = 5;
5185
              return ("ldi %A0,0x10" CR_TAB
5186
                      "muls %B0,%A0" CR_TAB
5187
                      "mov %A0,r1"   CR_TAB
5188
                      "sbc %B0,%B0"  CR_TAB
5189
                      "clr __zero_reg__");
5190
            }
5191
          if (optimize_size && scratch)
5192
            break;  /* 5 */
5193
          *len = 7;
5194
          return ("mov %A0,%B0" CR_TAB
5195
                  "lsl %B0"     CR_TAB
5196
                  "sbc %B0,%B0" CR_TAB
5197
                  "asr %A0"     CR_TAB
5198
                  "asr %A0"     CR_TAB
5199
                  "asr %A0"     CR_TAB
5200
                  "asr %A0");
5201
 
5202
        case 13:
5203
          if (AVR_HAVE_MUL && ldi_ok)
5204
            {
5205
              *len = 5;
5206
              return ("ldi %A0,0x08" CR_TAB
5207
                      "muls %B0,%A0" CR_TAB
5208
                      "mov %A0,r1"   CR_TAB
5209
                      "sbc %B0,%B0"  CR_TAB
5210
                      "clr __zero_reg__");
5211
            }
5212
          if (optimize_size)
5213
            break;  /* scratch ? 5 : 7 */
5214
          *len = 8;
5215
          return ("mov %A0,%B0" CR_TAB
5216
                  "lsl %B0"     CR_TAB
5217
                  "sbc %B0,%B0" CR_TAB
5218
                  "asr %A0"     CR_TAB
5219
                  "asr %A0"     CR_TAB
5220
                  "asr %A0"     CR_TAB
5221
                  "asr %A0"     CR_TAB
5222
                  "asr %A0");
5223
 
5224
        case 14:
5225
          *len = 5;
5226
          return ("lsl %B0"     CR_TAB
5227
                  "sbc %A0,%A0" CR_TAB
5228
                  "lsl %B0"     CR_TAB
5229
                  "mov %B0,%A0" CR_TAB
5230
                  "rol %A0");
5231
 
5232
        default:
5233
          if (INTVAL (operands[2]) < 16)
5234
            break;
5235
 
5236
          /* fall through */
5237
 
5238
        case 15:
5239
          return *len = 3, ("lsl %B0"     CR_TAB
5240
                            "sbc %A0,%A0" CR_TAB
5241
                            "mov %B0,%A0");
5242
        }
5243
      len = t;
5244
    }
5245
  out_shift_with_cnt ("asr %B0" CR_TAB
5246
                      "ror %A0", insn, operands, len, 2);
5247
  return "";
5248
}
5249
 
5250
 
5251
/* 24-bit arithmetic shift right */
5252
 
5253
const char*
5254
avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5255
{
5256
  int dest = REGNO (op[0]);
5257
  int src = REGNO (op[1]);
5258
 
5259
  if (CONST_INT_P (op[2]))
5260
    {
5261
      if (plen)
5262
        *plen = 0;
5263
 
5264
      switch (INTVAL (op[2]))
5265
        {
5266
        case 8:
5267
          if (dest <= src)
5268
            return avr_asm_len ("mov %A0,%B1" CR_TAB
5269
                                "mov %B0,%C1" CR_TAB
5270
                                "clr %C0"     CR_TAB
5271
                                "sbrc %B0,7"  CR_TAB
5272
                                "dec %C0", op, plen, 5);
5273
          else
5274
            return avr_asm_len ("clr %C0"     CR_TAB
5275
                                "sbrc %C1,7"  CR_TAB
5276
                                "dec %C0"     CR_TAB
5277
                                "mov %B0,%C1" CR_TAB
5278
                                "mov %A0,%B1", op, plen, 5);
5279
 
5280
        case 16:
5281
          if (dest != src + 2)
5282
            avr_asm_len ("mov %A0,%C1", op, plen, 1);
5283
 
5284
          return avr_asm_len ("clr %B0"     CR_TAB
5285
                              "sbrc %A0,7"  CR_TAB
5286
                              "com %B0"     CR_TAB
5287
                              "mov %C0,%B0", op, plen, 4);
5288
 
5289
        default:
5290
          if (INTVAL (op[2]) < 24)
5291
            break;
5292
 
5293
          /* fall through */
5294
 
5295
        case 23:
5296
          return avr_asm_len ("lsl %C0"     CR_TAB
5297
                              "sbc %A0,%A0" CR_TAB
5298
                              "mov %B0,%A0" CR_TAB
5299
                              "mov %C0,%A0", op, plen, 4);
5300
        } /* switch */
5301
    }
5302
 
5303
  out_shift_with_cnt ("asr %C0" CR_TAB
5304
                      "ror %B0" CR_TAB
5305
                      "ror %A0", insn, op, plen, 3);
5306
  return "";
5307
}
5308
 
5309
 
5310
/* 32bit arithmetic shift right  ((signed long)x >> i) */
5311
 
5312
const char *
5313
ashrsi3_out (rtx insn, rtx operands[], int *len)
5314
{
5315
  if (GET_CODE (operands[2]) == CONST_INT)
5316
    {
5317
      int k;
5318
      int *t = len;
5319
 
5320
      if (!len)
5321
        len = &k;
5322
 
5323
      switch (INTVAL (operands[2]))
5324
        {
5325
        case 8:
5326
          {
5327
            int reg0 = true_regnum (operands[0]);
5328
            int reg1 = true_regnum (operands[1]);
5329
            *len=6;
5330
            if (reg0 <= reg1)
5331
              return ("mov %A0,%B1" CR_TAB
5332
                      "mov %B0,%C1" CR_TAB
5333
                      "mov %C0,%D1" CR_TAB
5334
                      "clr %D0"     CR_TAB
5335
                      "sbrc %C0,7"  CR_TAB
5336
                      "dec %D0");
5337
            else
5338
              return ("clr %D0"     CR_TAB
5339
                      "sbrc %D1,7"  CR_TAB
5340
                      "dec %D0"     CR_TAB
5341
                      "mov %C0,%D1" CR_TAB
5342
                      "mov %B0,%C1" CR_TAB
5343
                      "mov %A0,%B1");
5344
          }
5345
 
5346
        case 16:
5347
          {
5348
            int reg0 = true_regnum (operands[0]);
5349
            int reg1 = true_regnum (operands[1]);
5350
 
5351
            if (reg0 == reg1 + 2)
5352
              return *len = 4, ("clr %D0"     CR_TAB
5353
                                "sbrc %B0,7"  CR_TAB
5354
                                "com %D0"     CR_TAB
5355
                                "mov %C0,%D0");
5356
            if (AVR_HAVE_MOVW)
5357
              return *len = 5, ("movw %A0,%C1" CR_TAB
5358
                                "clr %D0"      CR_TAB
5359
                                "sbrc %B0,7"   CR_TAB
5360
                                "com %D0"      CR_TAB
5361
                                "mov %C0,%D0");
5362
            else
5363
              return *len = 6, ("mov %B0,%D1" CR_TAB
5364
                                "mov %A0,%C1" CR_TAB
5365
                                "clr %D0"     CR_TAB
5366
                                "sbrc %B0,7"  CR_TAB
5367
                                "com %D0"     CR_TAB
5368
                                "mov %C0,%D0");
5369
          }
5370
 
5371
        case 24:
5372
          return *len = 6, ("mov %A0,%D1" CR_TAB
5373
                            "clr %D0"     CR_TAB
5374
                            "sbrc %A0,7"  CR_TAB
5375
                            "com %D0"     CR_TAB
5376
                            "mov %B0,%D0" CR_TAB
5377
                            "mov %C0,%D0");
5378
 
5379
        default:
5380
          if (INTVAL (operands[2]) < 32)
5381
            break;
5382
 
5383
          /* fall through */
5384
 
5385
        case 31:
5386
          if (AVR_HAVE_MOVW)
5387
            return *len = 4, ("lsl %D0"     CR_TAB
5388
                              "sbc %A0,%A0" CR_TAB
5389
                              "mov %B0,%A0" CR_TAB
5390
                              "movw %C0,%A0");
5391
          else
5392
            return *len = 5, ("lsl %D0"     CR_TAB
5393
                              "sbc %A0,%A0" CR_TAB
5394
                              "mov %B0,%A0" CR_TAB
5395
                              "mov %C0,%A0" CR_TAB
5396
                              "mov %D0,%A0");
5397
        }
5398
      len = t;
5399
    }
5400
  out_shift_with_cnt ("asr %D0" CR_TAB
5401
                      "ror %C0" CR_TAB
5402
                      "ror %B0" CR_TAB
5403
                      "ror %A0", insn, operands, len, 4);
5404
  return "";
5405
}
5406
 
5407
/* 8bit logic shift right ((unsigned char)x >> i) */
5408
 
5409
const char *
5410
lshrqi3_out (rtx insn, rtx operands[], int *len)
5411
{
5412
  if (GET_CODE (operands[2]) == CONST_INT)
5413
    {
5414
      int k;
5415
 
5416
      if (!len)
5417
        len = &k;
5418
 
5419
      switch (INTVAL (operands[2]))
5420
        {
5421
        default:
5422
          if (INTVAL (operands[2]) < 8)
5423
            break;
5424
 
5425
          *len = 1;
5426
          return "clr %0";
5427
 
5428
        case 1:
5429
          *len = 1;
5430
          return "lsr %0";
5431
 
5432
        case 2:
5433
          *len = 2;
5434
          return ("lsr %0" CR_TAB
5435
                  "lsr %0");
5436
        case 3:
5437
          *len = 3;
5438
          return ("lsr %0" CR_TAB
5439
                  "lsr %0" CR_TAB
5440
                  "lsr %0");
5441
 
5442
        case 4:
5443
          if (test_hard_reg_class (LD_REGS, operands[0]))
5444
            {
5445
              *len=2;
5446
              return ("swap %0" CR_TAB
5447
                      "andi %0,0x0f");
5448
            }
5449
          *len = 4;
5450
          return ("lsr %0" CR_TAB
5451
                  "lsr %0" CR_TAB
5452
                  "lsr %0" CR_TAB
5453
                  "lsr %0");
5454
 
5455
        case 5:
5456
          if (test_hard_reg_class (LD_REGS, operands[0]))
5457
            {
5458
              *len = 3;
5459
              return ("swap %0" CR_TAB
5460
                      "lsr %0"  CR_TAB
5461
                      "andi %0,0x7");
5462
            }
5463
          *len = 5;
5464
          return ("lsr %0" CR_TAB
5465
                  "lsr %0" CR_TAB
5466
                  "lsr %0" CR_TAB
5467
                  "lsr %0" CR_TAB
5468
                  "lsr %0");
5469
 
5470
        case 6:
5471
          if (test_hard_reg_class (LD_REGS, operands[0]))
5472
            {
5473
              *len = 4;
5474
              return ("swap %0" CR_TAB
5475
                      "lsr %0"  CR_TAB
5476
                      "lsr %0"  CR_TAB
5477
                      "andi %0,0x3");
5478
            }
5479
          *len = 6;
5480
          return ("lsr %0" CR_TAB
5481
                  "lsr %0" CR_TAB
5482
                  "lsr %0" CR_TAB
5483
                  "lsr %0" CR_TAB
5484
                  "lsr %0" CR_TAB
5485
                  "lsr %0");
5486
 
5487
        case 7:
5488
          *len = 3;
5489
          return ("rol %0" CR_TAB
5490
                  "clr %0" CR_TAB
5491
                  "rol %0");
5492
        }
5493
    }
5494
  else if (CONSTANT_P (operands[2]))
5495
    fatal_insn ("internal compiler error.  Incorrect shift:", insn);
5496
 
5497
  out_shift_with_cnt ("lsr %0",
5498
                      insn, operands, len, 1);
5499
  return "";
5500
}
5501
 
5502
/* 16bit logic shift right ((unsigned short)x >> i) */
5503
 
5504
const char *
5505
lshrhi3_out (rtx insn, rtx operands[], int *len)
5506
{
5507
  if (GET_CODE (operands[2]) == CONST_INT)
5508
    {
5509
      int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5510
      int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5511
      int k;
5512
      int *t = len;
5513
 
5514
      if (!len)
5515
        len = &k;
5516
 
5517
      switch (INTVAL (operands[2]))
5518
        {
5519
        default:
5520
          if (INTVAL (operands[2]) < 16)
5521
            break;
5522
 
5523
          *len = 2;
5524
          return ("clr %B0" CR_TAB
5525
                  "clr %A0");
5526
 
5527
        case 4:
5528
          if (optimize_size && scratch)
5529
            break;  /* 5 */
5530
          if (ldi_ok)
5531
            {
5532
              *len = 6;
5533
              return ("swap %B0"      CR_TAB
5534
                      "swap %A0"      CR_TAB
5535
                      "andi %A0,0x0f" CR_TAB
5536
                      "eor %A0,%B0"   CR_TAB
5537
                      "andi %B0,0x0f" CR_TAB
5538
                      "eor %A0,%B0");
5539
            }
5540
          if (scratch)
5541
            {
5542
              *len = 7;
5543
              return ("swap %B0"    CR_TAB
5544
                      "swap %A0"    CR_TAB
5545
                      "ldi %3,0x0f" CR_TAB
5546
                      "and %A0,%3"      CR_TAB
5547
                      "eor %A0,%B0" CR_TAB
5548
                      "and %B0,%3"      CR_TAB
5549
                      "eor %A0,%B0");
5550
            }
5551
          break;  /* optimize_size ? 6 : 8 */
5552
 
5553
        case 5:
5554
          if (optimize_size)
5555
            break;  /* scratch ? 5 : 6 */
5556
          if (ldi_ok)
5557
            {
5558
              *len = 8;
5559
              return ("lsr %B0"       CR_TAB
5560
                      "ror %A0"       CR_TAB
5561
                      "swap %B0"      CR_TAB
5562
                      "swap %A0"      CR_TAB
5563
                      "andi %A0,0x0f" CR_TAB
5564
                      "eor %A0,%B0"   CR_TAB
5565
                      "andi %B0,0x0f" CR_TAB
5566
                      "eor %A0,%B0");
5567
            }
5568
          if (scratch)
5569
            {
5570
              *len = 9;
5571
              return ("lsr %B0"     CR_TAB
5572
                      "ror %A0"     CR_TAB
5573
                      "swap %B0"    CR_TAB
5574
                      "swap %A0"    CR_TAB
5575
                      "ldi %3,0x0f" CR_TAB
5576
                      "and %A0,%3"      CR_TAB
5577
                      "eor %A0,%B0" CR_TAB
5578
                      "and %B0,%3"      CR_TAB
5579
                      "eor %A0,%B0");
5580
            }
5581
          break;  /* 10 */
5582
 
5583
        case 6:
5584
          if (optimize_size)
5585
            break;  /* scratch ? 5 : 6 */
5586
          *len = 9;
5587
          return ("clr __tmp_reg__" CR_TAB
5588
                  "lsl %A0"         CR_TAB
5589
                  "rol %B0"         CR_TAB
5590
                  "rol __tmp_reg__" CR_TAB
5591
                  "lsl %A0"         CR_TAB
5592
                  "rol %B0"         CR_TAB
5593
                  "rol __tmp_reg__" CR_TAB
5594
                  "mov %A0,%B0"     CR_TAB
5595
                  "mov %B0,__tmp_reg__");
5596
 
5597
        case 7:
5598
          *len = 5;
5599
          return ("lsl %A0"     CR_TAB
5600
                  "mov %A0,%B0" CR_TAB
5601
                  "rol %A0"     CR_TAB
5602
                  "sbc %B0,%B0" CR_TAB
5603
                  "neg %B0");
5604
 
5605
        case 8:
5606
          return *len = 2, ("mov %A0,%B1" CR_TAB
5607
                            "clr %B0");
5608
 
5609
        case 9:
5610
          *len = 3;
5611
          return ("mov %A0,%B0" CR_TAB
5612
                  "clr %B0"     CR_TAB
5613
                  "lsr %A0");
5614
 
5615
        case 10:
5616
          *len = 4;
5617
          return ("mov %A0,%B0" CR_TAB
5618
                  "clr %B0"     CR_TAB
5619
                  "lsr %A0"     CR_TAB
5620
                  "lsr %A0");
5621
 
5622
        case 11:
5623
          *len = 5;
5624
          return ("mov %A0,%B0" CR_TAB
5625
                  "clr %B0"     CR_TAB
5626
                  "lsr %A0"     CR_TAB
5627
                  "lsr %A0"     CR_TAB
5628
                  "lsr %A0");
5629
 
5630
        case 12:
5631
          if (ldi_ok)
5632
            {
5633
              *len = 4;
5634
              return ("mov %A0,%B0" CR_TAB
5635
                      "clr %B0"     CR_TAB
5636
                      "swap %A0"    CR_TAB
5637
                      "andi %A0,0x0f");
5638
            }
5639
          if (scratch)
5640
            {
5641
              *len = 5;
5642
              return ("mov %A0,%B0" CR_TAB
5643
                      "clr %B0"     CR_TAB
5644
                      "swap %A0"    CR_TAB
5645
                      "ldi %3,0x0f" CR_TAB
5646
                      "and %A0,%3");
5647
            }
5648
          *len = 6;
5649
          return ("mov %A0,%B0" CR_TAB
5650
                  "clr %B0"     CR_TAB
5651
                  "lsr %A0"     CR_TAB
5652
                  "lsr %A0"     CR_TAB
5653
                  "lsr %A0"     CR_TAB
5654
                  "lsr %A0");
5655
 
5656
        case 13:
5657
          if (ldi_ok)
5658
            {
5659
              *len = 5;
5660
              return ("mov %A0,%B0" CR_TAB
5661
                      "clr %B0"     CR_TAB
5662
                      "swap %A0"    CR_TAB
5663
                      "lsr %A0"     CR_TAB
5664
                      "andi %A0,0x07");
5665
            }
5666
          if (AVR_HAVE_MUL && scratch)
5667
            {
5668
              *len = 5;
5669
              return ("ldi %3,0x08" CR_TAB
5670
                      "mul %B0,%3"  CR_TAB
5671
                      "mov %A0,r1"  CR_TAB
5672
                      "clr %B0"     CR_TAB
5673
                      "clr __zero_reg__");
5674
            }
5675
          if (optimize_size && scratch)
5676
            break;  /* 5 */
5677
          if (scratch)
5678
            {
5679
              *len = 6;
5680
              return ("mov %A0,%B0" CR_TAB
5681
                      "clr %B0"     CR_TAB
5682
                      "swap %A0"    CR_TAB
5683
                      "lsr %A0"     CR_TAB
5684
                      "ldi %3,0x07" CR_TAB
5685
                      "and %A0,%3");
5686
            }
5687
          if (AVR_HAVE_MUL)
5688
            {
5689
              *len = 6;
5690
              return ("set"            CR_TAB
5691
                      "bld r1,3"   CR_TAB
5692
                      "mul %B0,r1" CR_TAB
5693
                      "mov %A0,r1" CR_TAB
5694
                      "clr %B0"    CR_TAB
5695
                      "clr __zero_reg__");
5696
            }
5697
          *len = 7;
5698
          return ("mov %A0,%B0" CR_TAB
5699
                  "clr %B0"     CR_TAB
5700
                  "lsr %A0"     CR_TAB
5701
                  "lsr %A0"     CR_TAB
5702
                  "lsr %A0"     CR_TAB
5703
                  "lsr %A0"     CR_TAB
5704
                  "lsr %A0");
5705
 
5706
        case 14:
5707
          if (AVR_HAVE_MUL && ldi_ok)
5708
            {
5709
              *len = 5;
5710
              return ("ldi %A0,0x04" CR_TAB
5711
                      "mul %B0,%A0"  CR_TAB
5712
                      "mov %A0,r1"   CR_TAB
5713
                      "clr %B0"      CR_TAB
5714
                      "clr __zero_reg__");
5715
            }
5716
          if (AVR_HAVE_MUL && scratch)
5717
            {
5718
              *len = 5;
5719
              return ("ldi %3,0x04" CR_TAB
5720
                      "mul %B0,%3"  CR_TAB
5721
                      "mov %A0,r1"  CR_TAB
5722
                      "clr %B0"     CR_TAB
5723
                      "clr __zero_reg__");
5724
            }
5725
          if (optimize_size && ldi_ok)
5726
            {
5727
              *len = 5;
5728
              return ("mov %A0,%B0" CR_TAB
5729
                      "ldi %B0,6" "\n1:\t"
5730
                      "lsr %A0"     CR_TAB
5731
                      "dec %B0"     CR_TAB
5732
                      "brne 1b");
5733
            }
5734
          if (optimize_size && scratch)
5735
            break;  /* 5 */
5736
          *len = 6;
5737
          return ("clr %A0" CR_TAB
5738
                  "lsl %B0" CR_TAB
5739
                  "rol %A0" CR_TAB
5740
                  "lsl %B0" CR_TAB
5741
                  "rol %A0" CR_TAB
5742
                  "clr %B0");
5743
 
5744
        case 15:
5745
          *len = 4;
5746
          return ("clr %A0" CR_TAB
5747
                  "lsl %B0" CR_TAB
5748
                  "rol %A0" CR_TAB
5749
                  "clr %B0");
5750
        }
5751
      len = t;
5752
    }
5753
  out_shift_with_cnt ("lsr %B0" CR_TAB
5754
                      "ror %A0", insn, operands, len, 2);
5755
  return "";
5756
}
5757
 
5758
 
5759
/* 24-bit logic shift right */
5760
 
5761
const char*
5762
avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5763
{
5764
  int dest = REGNO (op[0]);
5765
  int src = REGNO (op[1]);
5766
 
5767
  if (CONST_INT_P (op[2]))
5768
    {
5769
      if (plen)
5770
        *plen = 0;
5771
 
5772
      switch (INTVAL (op[2]))
5773
        {
5774
        case 8:
5775
          if (dest <= src)
5776
            return avr_asm_len ("mov %A0,%B1" CR_TAB
5777
                                "mov %B0,%C1" CR_TAB
5778
                                "clr %C0", op, plen, 3);
5779
          else
5780
            return avr_asm_len ("clr %C0"     CR_TAB
5781
                                "mov %B0,%C1" CR_TAB
5782
                                "mov %A0,%B1", op, plen, 3);
5783
 
5784
        case 16:
5785
          if (dest != src + 2)
5786
            avr_asm_len ("mov %A0,%C1", op, plen, 1);
5787
 
5788
          return avr_asm_len ("clr %B0"  CR_TAB
5789
                              "clr %C0", op, plen, 2);
5790
 
5791
        default:
5792
          if (INTVAL (op[2]) < 24)
5793
            break;
5794
 
5795
          /* fall through */
5796
 
5797
        case 23:
5798
          return avr_asm_len ("clr %A0"    CR_TAB
5799
                              "sbrc %C0,7" CR_TAB
5800
                              "inc %A0"    CR_TAB
5801
                              "clr %B0"    CR_TAB
5802
                              "clr %C0", op, plen, 5);
5803
        } /* switch */
5804
    }
5805
 
5806
  out_shift_with_cnt ("lsr %C0" CR_TAB
5807
                      "ror %B0" CR_TAB
5808
                      "ror %A0", insn, op, plen, 3);
5809
  return "";
5810
}
5811
 
5812
 
5813
/* 32bit logic shift right ((unsigned int)x >> i) */
5814
 
5815
const char *
5816
lshrsi3_out (rtx insn, rtx operands[], int *len)
5817
{
5818
  if (GET_CODE (operands[2]) == CONST_INT)
5819
    {
5820
      int k;
5821
      int *t = len;
5822
 
5823
      if (!len)
5824
        len = &k;
5825
 
5826
      switch (INTVAL (operands[2]))
5827
        {
5828
        default:
5829
          if (INTVAL (operands[2]) < 32)
5830
            break;
5831
 
5832
          if (AVR_HAVE_MOVW)
5833
            return *len = 3, ("clr %D0" CR_TAB
5834
                              "clr %C0" CR_TAB
5835
                              "movw %A0,%C0");
5836
          *len = 4;
5837
          return ("clr %D0" CR_TAB
5838
                  "clr %C0" CR_TAB
5839
                  "clr %B0" CR_TAB
5840
                  "clr %A0");
5841
 
5842
        case 8:
5843
          {
5844
            int reg0 = true_regnum (operands[0]);
5845
            int reg1 = true_regnum (operands[1]);
5846
            *len = 4;
5847
            if (reg0 <= reg1)
5848
              return ("mov %A0,%B1" CR_TAB
5849
                      "mov %B0,%C1" CR_TAB
5850
                      "mov %C0,%D1" CR_TAB
5851
                      "clr %D0");
5852
            else
5853
              return ("clr %D0"     CR_TAB
5854
                      "mov %C0,%D1" CR_TAB
5855
                      "mov %B0,%C1" CR_TAB
5856
                      "mov %A0,%B1");
5857
          }
5858
 
5859
        case 16:
5860
          {
5861
            int reg0 = true_regnum (operands[0]);
5862
            int reg1 = true_regnum (operands[1]);
5863
 
5864
            if (reg0 == reg1 + 2)
5865
              return *len = 2, ("clr %C0"     CR_TAB
5866
                                "clr %D0");
5867
            if (AVR_HAVE_MOVW)
5868
              return *len = 3, ("movw %A0,%C1" CR_TAB
5869
                                "clr %C0"      CR_TAB
5870
                                "clr %D0");
5871
            else
5872
              return *len = 4, ("mov %B0,%D1" CR_TAB
5873
                                "mov %A0,%C1" CR_TAB
5874
                                "clr %C0"     CR_TAB
5875
                                "clr %D0");
5876
          }
5877
 
5878
        case 24:
5879
          return *len = 4, ("mov %A0,%D1" CR_TAB
5880
                            "clr %B0"     CR_TAB
5881
                            "clr %C0"     CR_TAB
5882
                            "clr %D0");
5883
 
5884
        case 31:
5885
          *len = 6;
5886
          return ("clr %A0"    CR_TAB
5887
                  "sbrc %D0,7" CR_TAB
5888
                  "inc %A0"    CR_TAB
5889
                  "clr %B0"    CR_TAB
5890
                  "clr %C0"    CR_TAB
5891
                  "clr %D0");
5892
        }
5893
      len = t;
5894
    }
5895
  out_shift_with_cnt ("lsr %D0" CR_TAB
5896
                      "ror %C0" CR_TAB
5897
                      "ror %B0" CR_TAB
5898
                      "ror %A0", insn, operands, len, 4);
5899
  return "";
5900
}
5901
 
5902
 
5903
/* Output addition of register XOP[0] and compile time constant XOP[2]:
5904
 
5905
      XOP[0] = XOP[0] + XOP[2]
5906
 
5907
   and return "".  If PLEN == NULL, print assembler instructions to perform the
5908
   addition; otherwise, set *PLEN to the length of the instruction sequence (in
5909
   words) printed with PLEN == NULL.  XOP[3] is an 8-bit scratch register.
5910
   CODE == PLUS:  perform addition by using ADD instructions.
5911
   CODE == MINUS: perform addition by using SUB instructions.
5912
   Set *PCC to effect on cc0 according to respective CC_* insn attribute.  */
5913
 
5914
static void
5915
avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5916
{
5917
  /* MODE of the operation.  */
5918
  enum machine_mode mode = GET_MODE (xop[0]);
5919
 
5920
  /* Number of bytes to operate on.  */
5921
  int i, n_bytes = GET_MODE_SIZE (mode);
5922
 
5923
  /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
5924
  int clobber_val = -1;
5925
 
5926
  /* op[0]: 8-bit destination register
5927
     op[1]: 8-bit const int
5928
     op[2]: 8-bit scratch register */
5929
  rtx op[3];
5930
 
5931
  /* Started the operation?  Before starting the operation we may skip
5932
     adding 0.  This is no more true after the operation started because
5933
     carry must be taken into account.  */
5934
  bool started = false;
5935
 
5936
  /* Value to add.  There are two ways to add VAL: R += VAL and R -= -VAL.  */
5937
  rtx xval = xop[2];
5938
 
5939
  /* Except in the case of ADIW with 16-bit register (see below)
5940
     addition does not set cc0 in a usable way.  */
5941
 
5942
  *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5943
 
5944
  if (MINUS == code)
5945
    xval = simplify_unary_operation (NEG, mode, xval, mode);
5946
 
5947
  op[2] = xop[3];
5948
 
5949
  if (plen)
5950
    *plen = 0;
5951
 
5952
  for (i = 0; i < n_bytes; i++)
5953
    {
5954
      /* We operate byte-wise on the destination.  */
5955
      rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5956
      rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5957
 
5958
      /* 8-bit value to operate with this byte. */
5959
      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5960
 
5961
      /* Registers R16..R31 can operate with immediate.  */
5962
      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5963
 
5964
      op[0] = reg8;
5965
      op[1] = gen_int_mode (val8, QImode);
5966
 
5967
      /* To get usable cc0 no low-bytes must have been skipped.  */
5968
 
5969
      if (i && !started)
5970
        *pcc = CC_CLOBBER;
5971
 
5972
      if (!started
5973
          && i % 2 == 0
5974
          && i + 2 <= n_bytes
5975
          && test_hard_reg_class (ADDW_REGS, reg8))
5976
        {
5977
          rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5978
          unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5979
 
5980
          /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5981
             i.e. operate word-wise.  */
5982
 
5983
          if (val16 < 64)
5984
            {
5985
              if (val16 != 0)
5986
                {
5987
                  started = true;
5988
                  avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5989
                               op, plen, 1);
5990
 
5991
                  if (n_bytes == 2 && PLUS == code)
5992
                      *pcc = CC_SET_ZN;
5993
                }
5994
 
5995
              i++;
5996
              continue;
5997
            }
5998
        }
5999
 
6000
      if (val8 == 0)
6001
        {
6002
          if (started)
6003
            avr_asm_len (code == PLUS
6004
                         ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6005
                         op, plen, 1);
6006
          continue;
6007
        }
6008
      else if ((val8 == 1 || val8 == 0xff)
6009
               && !started
6010
               && i == n_bytes - 1)
6011
        {
6012
          avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6013
                       op, plen, 1);
6014
          break;
6015
        }
6016
 
6017
      switch (code)
6018
        {
6019
        case PLUS:
6020
 
6021
          gcc_assert (plen != NULL || REG_P (op[2]));
6022
 
6023
          if (clobber_val != (int) val8)
6024
            avr_asm_len ("ldi %2,%1", op, plen, 1);
6025
          clobber_val = (int) val8;
6026
 
6027
          avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6028
 
6029
          break; /* PLUS */
6030
 
6031
        case MINUS:
6032
 
6033
          if (ld_reg_p)
6034
            avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6035
          else
6036
            {
6037
              gcc_assert (plen != NULL || REG_P (op[2]));
6038
 
6039
              if (clobber_val != (int) val8)
6040
                avr_asm_len ("ldi %2,%1", op, plen, 1);
6041
              clobber_val = (int) val8;
6042
 
6043
              avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6044
            }
6045
 
6046
          break; /* MINUS */
6047
 
6048
        default:
6049
          /* Unknown code */
6050
          gcc_unreachable();
6051
        }
6052
 
6053
      started = true;
6054
 
6055
    } /* for all sub-bytes */
6056
 
6057
  /* No output doesn't change cc0.  */
6058
 
6059
  if (plen && *plen == 0)
6060
    *pcc = CC_NONE;
6061
}
6062
 
6063
 
6064
/* Output addition of register XOP[0] and compile time constant XOP[2]:
6065
 
6066
      XOP[0] = XOP[0] + XOP[2]
6067
 
6068
   and return "".  If PLEN == NULL, print assembler instructions to perform the
6069
   addition; otherwise, set *PLEN to the length of the instruction sequence (in
6070
   words) printed with PLEN == NULL.
6071
   If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6072
   condition code (with respect to XOP[0]).  */
6073
 
6074
const char*
6075
avr_out_plus (rtx *xop, int *plen, int *pcc)
6076
{
6077
  int len_plus, len_minus;
6078
  int cc_plus, cc_minus, cc_dummy;
6079
 
6080
  if (!pcc)
6081
    pcc = &cc_dummy;
6082
 
6083
  /* Work out if  XOP[0] += XOP[2]  is better or  XOP[0] -= -XOP[2].  */
6084
 
6085
  avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6086
  avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6087
 
6088
  /* Prefer MINUS over PLUS if size is equal because it sets cc0.  */
6089
 
6090
  if (plen)
6091
    {
6092
      *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6093
      *pcc  = (len_minus <= len_plus) ? cc_minus : cc_plus;
6094
    }
6095
  else if (len_minus <= len_plus)
6096
    avr_out_plus_1 (xop, NULL, MINUS, pcc);
6097
  else
6098
    avr_out_plus_1 (xop, NULL, PLUS, pcc);
6099
 
6100
  return "";
6101
}
6102
 
6103
 
6104
/* Same as above but XOP has just 3 entries.
6105
   Supply a dummy 4th operand.  */
6106
 
6107
const char*
6108
avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6109
{
6110
  rtx op[4];
6111
 
6112
  op[0] = xop[0];
6113
  op[1] = xop[1];
6114
  op[2] = xop[2];
6115
  op[3] = NULL_RTX;
6116
 
6117
  return avr_out_plus (op, plen, pcc);
6118
}
6119
 
6120
 
6121
/* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1.  */
6122
 
6123
const char*
6124
avr_out_plus64 (rtx addend, int *plen)
6125
{
6126
  int cc_dummy;
6127
  rtx op[4];
6128
 
6129
  op[0] = gen_rtx_REG (DImode, 18);
6130
  op[1] = op[0];
6131
  op[2] = addend;
6132
  op[3] = NULL_RTX;
6133
 
6134
  avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6135
 
6136
  return "";
6137
}
6138
 
6139
/* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6140
   time constant XOP[2]:
6141
 
6142
      XOP[0] = XOP[0] <op> XOP[2]
6143
 
6144
   and return "".  If PLEN == NULL, print assembler instructions to perform the
6145
   operation; otherwise, set *PLEN to the length of the instruction sequence
6146
   (in words) printed with PLEN == NULL.  XOP[3] is either an 8-bit clobber
6147
   register or SCRATCH if no clobber register is needed for the operation.  */
6148
 
6149
const char*
6150
avr_out_bitop (rtx insn, rtx *xop, int *plen)
6151
{
6152
  /* CODE and MODE of the operation.  */
6153
  enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6154
  enum machine_mode mode = GET_MODE (xop[0]);
6155
 
6156
  /* Number of bytes to operate on.  */
6157
  int i, n_bytes = GET_MODE_SIZE (mode);
6158
 
6159
  /* Value of T-flag (0 or 1) or -1 if unknow.  */
6160
  int set_t = -1;
6161
 
6162
  /* Value (0..0xff) held in clobber register op[3] or -1 if unknown.  */
6163
  int clobber_val = -1;
6164
 
6165
  /* op[0]: 8-bit destination register
6166
     op[1]: 8-bit const int
6167
     op[2]: 8-bit clobber register or SCRATCH
6168
     op[3]: 8-bit register containing 0xff or NULL_RTX  */
6169
  rtx op[4];
6170
 
6171
  op[2] = xop[3];
6172
  op[3] = NULL_RTX;
6173
 
6174
  if (plen)
6175
    *plen = 0;
6176
 
6177
  for (i = 0; i < n_bytes; i++)
6178
    {
6179
      /* We operate byte-wise on the destination.  */
6180
      rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6181
      rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6182
 
6183
      /* 8-bit value to operate with this byte. */
6184
      unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6185
 
6186
      /* Number of bits set in the current byte of the constant.  */
6187
      int pop8 = avr_popcount (val8);
6188
 
6189
      /* Registers R16..R31 can operate with immediate.  */
6190
      bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6191
 
6192
      op[0] = reg8;
6193
      op[1] = GEN_INT (val8);
6194
 
6195
      switch (code)
6196
        {
6197
        case IOR:
6198
 
6199
          if (0 == pop8)
6200
            continue;
6201
          else if (ld_reg_p)
6202
            avr_asm_len ("ori %0,%1", op, plen, 1);
6203
          else if (1 == pop8)
6204
            {
6205
              if (set_t != 1)
6206
                avr_asm_len ("set", op, plen, 1);
6207
              set_t = 1;
6208
 
6209
              op[1] = GEN_INT (exact_log2 (val8));
6210
              avr_asm_len ("bld %0,%1", op, plen, 1);
6211
            }
6212
          else if (8 == pop8)
6213
            {
6214
              if (op[3] != NULL_RTX)
6215
                avr_asm_len ("mov %0,%3", op, plen, 1);
6216
              else
6217
                avr_asm_len ("clr %0" CR_TAB
6218
                             "dec %0", op, plen, 2);
6219
 
6220
              op[3] = op[0];
6221
            }
6222
          else
6223
            {
6224
              if (clobber_val != (int) val8)
6225
                avr_asm_len ("ldi %2,%1", op, plen, 1);
6226
              clobber_val = (int) val8;
6227
 
6228
              avr_asm_len ("or %0,%2", op, plen, 1);
6229
            }
6230
 
6231
          continue; /* IOR */
6232
 
6233
        case AND:
6234
 
6235
          if (8 == pop8)
6236
            continue;
6237
          else if (0 == pop8)
6238
            avr_asm_len ("clr %0", op, plen, 1);
6239
          else if (ld_reg_p)
6240
            avr_asm_len ("andi %0,%1", op, plen, 1);
6241
          else if (7 == pop8)
6242
            {
6243
              if (set_t != 0)
6244
                avr_asm_len ("clt", op, plen, 1);
6245
              set_t = 0;
6246
 
6247
              op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6248
              avr_asm_len ("bld %0,%1", op, plen, 1);
6249
            }
6250
          else
6251
            {
6252
              if (clobber_val != (int) val8)
6253
                avr_asm_len ("ldi %2,%1", op, plen, 1);
6254
              clobber_val = (int) val8;
6255
 
6256
              avr_asm_len ("and %0,%2", op, plen, 1);
6257
            }
6258
 
6259
          continue; /* AND */
6260
 
6261
        case XOR:
6262
 
6263
          if (0 == pop8)
6264
            continue;
6265
          else if (8 == pop8)
6266
            avr_asm_len ("com %0", op, plen, 1);
6267
          else if (ld_reg_p && val8 == (1 << 7))
6268
            avr_asm_len ("subi %0,%1", op, plen, 1);
6269
          else
6270
            {
6271
              if (clobber_val != (int) val8)
6272
                avr_asm_len ("ldi %2,%1", op, plen, 1);
6273
              clobber_val = (int) val8;
6274
 
6275
              avr_asm_len ("eor %0,%2", op, plen, 1);
6276
            }
6277
 
6278
          continue; /* XOR */
6279
 
6280
        default:
6281
          /* Unknown rtx_code */
6282
          gcc_unreachable();
6283
        }
6284
    } /* for all sub-bytes */
6285
 
6286
  return "";
6287
}
6288
 
6289
 
6290
/* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6291
   PLEN != NULL: Set *PLEN to the length of that sequence.
6292
   Return "".  */
6293
 
6294
const char*
6295
avr_out_addto_sp (rtx *op, int *plen)
6296
{
6297
  int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6298
  int addend = INTVAL (op[0]);
6299
 
6300
  if (plen)
6301
    *plen = 0;
6302
 
6303
  if (addend < 0)
6304
    {
6305
      if (flag_verbose_asm || flag_print_asm_name)
6306
        avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6307
 
6308
      while (addend <= -pc_len)
6309
        {
6310
          addend += pc_len;
6311
          avr_asm_len ("rcall .", op, plen, 1);
6312
        }
6313
 
6314
      while (addend++ < 0)
6315
        avr_asm_len ("push __zero_reg__", op, plen, 1);
6316
    }
6317
  else if (addend > 0)
6318
    {
6319
      if (flag_verbose_asm || flag_print_asm_name)
6320
        avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6321
 
6322
      while (addend-- > 0)
6323
        avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6324
    }
6325
 
6326
  return "";
6327
}
6328
 
6329
 
6330
/* Create RTL split patterns for byte sized rotate expressions.  This
6331
  produces a series of move instructions and considers overlap situations.
6332
  Overlapping non-HImode operands need a scratch register.  */
6333
 
6334
bool
6335
avr_rotate_bytes (rtx operands[])
6336
{
6337
    int i, j;
6338
    enum machine_mode mode = GET_MODE (operands[0]);
6339
    bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6340
    bool same_reg = rtx_equal_p (operands[0], operands[1]);
6341
    int num = INTVAL (operands[2]);
6342
    rtx scratch = operands[3];
6343
    /* Work out if byte or word move is needed.  Odd byte rotates need QImode.
6344
       Word move if no scratch is needed, otherwise use size of scratch.  */
6345
    enum machine_mode move_mode = QImode;
6346
    int move_size, offset, size;
6347
 
6348
    if (num & 0xf)
6349
      move_mode = QImode;
6350
    else if ((mode == SImode && !same_reg) || !overlapped)
6351
      move_mode = HImode;
6352
    else
6353
      move_mode = GET_MODE (scratch);
6354
 
6355
    /* Force DI rotate to use QI moves since other DI moves are currently split
6356
       into QI moves so forward propagation works better.  */
6357
    if (mode == DImode)
6358
      move_mode = QImode;
6359
    /* Make scratch smaller if needed.  */
6360
    if (SCRATCH != GET_CODE (scratch)
6361
        && HImode == GET_MODE (scratch)
6362
        && QImode == move_mode)
6363
      scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6364
 
6365
    move_size = GET_MODE_SIZE (move_mode);
6366
    /* Number of bytes/words to rotate.  */
6367
    offset = (num  >> 3) / move_size;
6368
    /* Number of moves needed.  */
6369
    size = GET_MODE_SIZE (mode) / move_size;
6370
    /* Himode byte swap is special case to avoid a scratch register.  */
6371
    if (mode == HImode && same_reg)
6372
      {
6373
        /* HImode byte swap, using xor.  This is as quick as using scratch.  */
6374
        rtx src, dst;
6375
        src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6376
        dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6377
        if (!rtx_equal_p (dst, src))
6378
          {
6379
             emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6380
             emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6381
             emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6382
          }
6383
      }
6384
    else
6385
      {
6386
#define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode)  */
6387
        /* Create linked list of moves to determine move order.  */
6388
        struct {
6389
          rtx src, dst;
6390
          int links;
6391
        } move[MAX_SIZE + 8];
6392
        int blocked, moves;
6393
 
6394
        gcc_assert (size <= MAX_SIZE);
6395
        /* Generate list of subreg moves.  */
6396
        for (i = 0; i < size; i++)
6397
          {
6398
            int from = i;
6399
            int to = (from + offset) % size;
6400
            move[i].src = simplify_gen_subreg (move_mode, operands[1],
6401
                                                mode, from * move_size);
6402
            move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6403
                                                mode, to   * move_size);
6404
            move[i].links = -1;
6405
           }
6406
        /* Mark dependence where a dst of one move is the src of another move.
6407
           The first move is a conflict as it must wait until second is
6408
           performed.  We ignore moves to self - we catch this later.  */
6409
        if (overlapped)
6410
          for (i = 0; i < size; i++)
6411
            if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6412
              for (j = 0; j < size; j++)
6413
                if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6414
                  {
6415
                    /* The dst of move i is the src of move j.  */
6416
                    move[i].links = j;
6417
                    break;
6418
                  }
6419
 
6420
        blocked = -1;
6421
        moves = 0;
6422
        /* Go through move list and perform non-conflicting moves.  As each
6423
           non-overlapping move is made, it may remove other conflicts
6424
           so the process is repeated until no conflicts remain.  */
6425
        do
6426
          {
6427
            blocked = -1;
6428
            moves = 0;
6429
            /* Emit move where dst is not also a src or we have used that
6430
               src already.  */
6431
            for (i = 0; i < size; i++)
6432
              if (move[i].src != NULL_RTX)
6433
                {
6434
                  if (move[i].links == -1
6435
                      || move[move[i].links].src == NULL_RTX)
6436
                    {
6437
                      moves++;
6438
                      /* Ignore NOP moves to self.  */
6439
                      if (!rtx_equal_p (move[i].dst, move[i].src))
6440
                        emit_move_insn (move[i].dst, move[i].src);
6441
 
6442
                      /* Remove  conflict from list.  */
6443
                      move[i].src = NULL_RTX;
6444
                    }
6445
                  else
6446
                    blocked = i;
6447
                }
6448
 
6449
            /* Check for deadlock. This is when no moves occurred and we have
6450
               at least one blocked move.  */
6451
            if (moves == 0 && blocked != -1)
6452
              {
6453
                /* Need to use scratch register to break deadlock.
6454
                   Add move to put dst of blocked move into scratch.
6455
                   When this move occurs, it will break chain deadlock.
6456
                   The scratch register is substituted for real move.  */
6457
 
6458
                gcc_assert (SCRATCH != GET_CODE (scratch));
6459
 
6460
                move[size].src = move[blocked].dst;
6461
                move[size].dst =  scratch;
6462
                /* Scratch move is never blocked.  */
6463
                move[size].links = -1;
6464
                /* Make sure we have valid link.  */
6465
                gcc_assert (move[blocked].links != -1);
6466
                /* Replace src of  blocking move with scratch reg.  */
6467
                move[move[blocked].links].src = scratch;
6468
                /* Make dependent on scratch move occuring.  */
6469
                move[blocked].links = size;
6470
                size=size+1;
6471
              }
6472
          }
6473
        while (blocked != -1);
6474
      }
6475
    return true;
6476
}
6477
 
6478
/* Modifies the length assigned to instruction INSN
6479
   LEN is the initially computed length of the insn.  */
6480
 
6481
int
6482
adjust_insn_length (rtx insn, int len)
6483
{
6484
  rtx *op = recog_data.operand;
6485
  enum attr_adjust_len adjust_len;
6486
 
6487
  /* Some complex insns don't need length adjustment and therefore
6488
     the length need not/must not be adjusted for these insns.
6489
     It is easier to state this in an insn attribute "adjust_len" than
6490
     to clutter up code here...  */
6491
 
6492
  if (-1 == recog_memoized (insn))
6493
    {
6494
      return len;
6495
    }
6496
 
6497
  /* Read from insn attribute "adjust_len" if/how length is to be adjusted.  */
6498
 
6499
  adjust_len = get_attr_adjust_len (insn);
6500
 
6501
  if (adjust_len == ADJUST_LEN_NO)
6502
    {
6503
      /* Nothing to adjust: The length from attribute "length" is fine.
6504
         This is the default.  */
6505
 
6506
      return len;
6507
    }
6508
 
6509
  /* Extract insn's operands.  */
6510
 
6511
  extract_constrain_insn_cached (insn);
6512
 
6513
  /* Dispatch to right function.  */
6514
 
6515
  switch (adjust_len)
6516
    {
6517
    case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6518
    case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6519
    case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6520
 
6521
    case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6522
 
6523
    case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6524
    case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6525
    case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6526
      avr_out_plus_noclobber (op, &len, NULL); break;
6527
 
6528
    case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6529
 
6530
    case ADJUST_LEN_MOV8:  output_movqi (insn, op, &len); break;
6531
    case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6532
    case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6533
    case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6534
    case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6535
    case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6536
 
6537
    case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6538
    case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6539
    case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6540
    case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6541
    case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6542
 
6543
    case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6544
    case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6545
    case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6546
 
6547
    case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6548
    case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6549
    case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6550
 
6551
    case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6552
    case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6553
    case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6554
 
6555
    case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6556
    case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6557
    case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6558
 
6559
    case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6560
 
6561
    case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6562
 
6563
    default:
6564
      gcc_unreachable();
6565
    }
6566
 
6567
  return len;
6568
}
6569
 
6570
/* Return nonzero if register REG dead after INSN.  */
6571
 
6572
int
6573
reg_unused_after (rtx insn, rtx reg)
6574
{
6575
  return (dead_or_set_p (insn, reg)
6576
          || (REG_P(reg) && _reg_unused_after (insn, reg)));
6577
}
6578
 
6579
/* Return nonzero if REG is not used after INSN.
6580
   We assume REG is a reload reg, and therefore does
6581
   not live past labels.  It may live past calls or jumps though.  */
6582
 
6583
int
6584
_reg_unused_after (rtx insn, rtx reg)
6585
{
6586
  enum rtx_code code;
6587
  rtx set;
6588
 
6589
  /* If the reg is set by this instruction, then it is safe for our
6590
     case.  Disregard the case where this is a store to memory, since
6591
     we are checking a register used in the store address.  */
6592
  set = single_set (insn);
6593
  if (set && GET_CODE (SET_DEST (set)) != MEM
6594
      && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6595
    return 1;
6596
 
6597
  while ((insn = NEXT_INSN (insn)))
6598
    {
6599
      rtx set;
6600
      code = GET_CODE (insn);
6601
 
6602
#if 0
6603
      /* If this is a label that existed before reload, then the register
6604
         if dead here.  However, if this is a label added by reorg, then
6605
         the register may still be live here.  We can't tell the difference,
6606
         so we just ignore labels completely.  */
6607
      if (code == CODE_LABEL)
6608
        return 1;
6609
      /* else */
6610
#endif
6611
 
6612
      if (!INSN_P (insn))
6613
        continue;
6614
 
6615
      if (code == JUMP_INSN)
6616
        return 0;
6617
 
6618
      /* If this is a sequence, we must handle them all at once.
6619
         We could have for instance a call that sets the target register,
6620
         and an insn in a delay slot that uses the register.  In this case,
6621
         we must return 0.  */
6622
      else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6623
        {
6624
          int i;
6625
          int retval = 0;
6626
 
6627
          for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6628
            {
6629
              rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6630
              rtx set = single_set (this_insn);
6631
 
6632
              if (GET_CODE (this_insn) == CALL_INSN)
6633
                code = CALL_INSN;
6634
              else if (GET_CODE (this_insn) == JUMP_INSN)
6635
                {
6636
                  if (INSN_ANNULLED_BRANCH_P (this_insn))
6637
                    return 0;
6638
                  code = JUMP_INSN;
6639
                }
6640
 
6641
              if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6642
                return 0;
6643
              if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6644
                {
6645
                  if (GET_CODE (SET_DEST (set)) != MEM)
6646
                    retval = 1;
6647
                  else
6648
                    return 0;
6649
                }
6650
              if (set == 0
6651
                  && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6652
                return 0;
6653
            }
6654
          if (retval == 1)
6655
            return 1;
6656
          else if (code == JUMP_INSN)
6657
            return 0;
6658
        }
6659
 
6660
      if (code == CALL_INSN)
6661
        {
6662
          rtx tem;
6663
          for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6664
            if (GET_CODE (XEXP (tem, 0)) == USE
6665
                && REG_P (XEXP (XEXP (tem, 0), 0))
6666
                && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6667
              return 0;
6668
          if (call_used_regs[REGNO (reg)])
6669
            return 1;
6670
        }
6671
 
6672
      set = single_set (insn);
6673
 
6674
      if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6675
        return 0;
6676
      if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6677
        return GET_CODE (SET_DEST (set)) != MEM;
6678
      if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6679
        return 0;
6680
    }
6681
  return 1;
6682
}
6683
 
6684
 
6685
/* Return RTX that represents the lower 16 bits of a constant address.
6686
   Unfortunately, simplify_gen_subreg does not handle this case.  */
6687
 
6688
static rtx
6689
avr_const_address_lo16 (rtx x)
6690
{
6691
  rtx lo16;
6692
 
6693
  switch (GET_CODE (x))
6694
    {
6695
    default:
6696
      break;
6697
 
6698
    case CONST:
6699
      if (PLUS == GET_CODE (XEXP (x, 0))
6700
          && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6701
          && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6702
        {
6703
          HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6704
          const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6705
 
6706
          lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6707
          lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6708
 
6709
          return lo16;
6710
        }
6711
 
6712
      break;
6713
 
6714
    case SYMBOL_REF:
6715
      {
6716
        const char *name = XSTR (x, 0);
6717
 
6718
        return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6719
      }
6720
    }
6721
 
6722
  avr_edump ("\n%?: %r\n", x);
6723
  gcc_unreachable();
6724
}
6725
 
6726
 
6727
/* Target hook for assembling integer objects.  The AVR version needs
6728
   special handling for references to certain labels.  */
6729
 
6730
static bool
6731
avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6732
{
6733
  if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6734
      && text_segment_operand (x, VOIDmode) )
6735
    {
6736
      fputs ("\t.word\tgs(", asm_out_file);
6737
      output_addr_const (asm_out_file, x);
6738
      fputs (")\n", asm_out_file);
6739
 
6740
      return true;
6741
    }
6742
  else if (GET_MODE (x) == PSImode)
6743
    {
6744
      default_assemble_integer (avr_const_address_lo16 (x),
6745
                                GET_MODE_SIZE (HImode), aligned_p);
6746
 
6747
      fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6748
             " extension for hh8(", asm_out_file);
6749
      output_addr_const (asm_out_file, x);
6750
      fputs (")\"\n", asm_out_file);
6751
 
6752
      fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6753
      output_addr_const (asm_out_file, x);
6754
      fputs (")\n", asm_out_file);
6755
 
6756
      return true;
6757
    }
6758
 
6759
  return default_assemble_integer (x, size, aligned_p);
6760
}
6761
 
6762
 
6763
/* Worker function for ASM_DECLARE_FUNCTION_NAME.  */
6764
 
6765
void
6766
avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6767
{
6768
 
6769
  /* If the function has the 'signal' or 'interrupt' attribute, test to
6770
     make sure that the name of the function is "__vector_NN" so as to
6771
     catch when the user misspells the interrupt vector name.  */
6772
 
6773
  if (cfun->machine->is_interrupt)
6774
    {
6775
      if (!STR_PREFIX_P (name, "__vector"))
6776
        {
6777
          warning_at (DECL_SOURCE_LOCATION (decl), 0,
6778
                      "%qs appears to be a misspelled interrupt handler",
6779
                      name);
6780
        }
6781
    }
6782
  else if (cfun->machine->is_signal)
6783
    {
6784
      if (!STR_PREFIX_P (name, "__vector"))
6785
        {
6786
           warning_at (DECL_SOURCE_LOCATION (decl), 0,
6787
                       "%qs appears to be a misspelled signal handler",
6788
                       name);
6789
        }
6790
    }
6791
 
6792
  ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6793
  ASM_OUTPUT_LABEL (file, name);
6794
}
6795
 
6796
 
6797
/* Return value is nonzero if pseudos that have been
6798
   assigned to registers of class CLASS would likely be spilled
6799
   because registers of CLASS are needed for spill registers.  */
6800
 
6801
static bool
6802
avr_class_likely_spilled_p (reg_class_t c)
6803
{
6804
  return (c != ALL_REGS && c != ADDW_REGS);
6805
}
6806
 
6807
/* Valid attributes:
6808
   progmem - put data to program memory;
6809
   signal - make a function to be hardware interrupt. After function
6810
   prologue interrupts are disabled;
6811
   interrupt - make a function to be hardware interrupt. After function
6812
   prologue interrupts are enabled;
6813
   naked     - don't generate function prologue/epilogue and `ret' command.
6814
 
6815
   Only `progmem' attribute valid for type.  */
6816
 
6817
/* Handle a "progmem" attribute; arguments as in
6818
   struct attribute_spec.handler.  */
6819
static tree
6820
avr_handle_progmem_attribute (tree *node, tree name,
6821
                              tree args ATTRIBUTE_UNUSED,
6822
                              int flags ATTRIBUTE_UNUSED,
6823
                              bool *no_add_attrs)
6824
{
6825
  if (DECL_P (*node))
6826
    {
6827
      if (TREE_CODE (*node) == TYPE_DECL)
6828
        {
6829
          /* This is really a decl attribute, not a type attribute,
6830
             but try to handle it for GCC 3.0 backwards compatibility.  */
6831
 
6832
          tree type = TREE_TYPE (*node);
6833
          tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6834
          tree newtype = build_type_attribute_variant (type, attr);
6835
 
6836
          TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6837
          TREE_TYPE (*node) = newtype;
6838
          *no_add_attrs = true;
6839
        }
6840
      else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6841
        {
6842
          *no_add_attrs = false;
6843
        }
6844
      else
6845
        {
6846
          warning (OPT_Wattributes, "%qE attribute ignored",
6847
                   name);
6848
          *no_add_attrs = true;
6849
        }
6850
    }
6851
 
6852
  return NULL_TREE;
6853
}
6854
 
6855
/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6856
   struct attribute_spec.handler.  */
6857
 
6858
static tree
6859
avr_handle_fndecl_attribute (tree *node, tree name,
6860
                             tree args ATTRIBUTE_UNUSED,
6861
                             int flags ATTRIBUTE_UNUSED,
6862
                             bool *no_add_attrs)
6863
{
6864
  if (TREE_CODE (*node) != FUNCTION_DECL)
6865
    {
6866
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
6867
               name);
6868
      *no_add_attrs = true;
6869
    }
6870
 
6871
  return NULL_TREE;
6872
}
6873
 
6874
static tree
6875
avr_handle_fntype_attribute (tree *node, tree name,
6876
                             tree args ATTRIBUTE_UNUSED,
6877
                             int flags ATTRIBUTE_UNUSED,
6878
                             bool *no_add_attrs)
6879
{
6880
  if (TREE_CODE (*node) != FUNCTION_TYPE)
6881
    {
6882
      warning (OPT_Wattributes, "%qE attribute only applies to functions",
6883
               name);
6884
      *no_add_attrs = true;
6885
    }
6886
 
6887
  return NULL_TREE;
6888
}
6889
 
6890
 
6891
/* AVR attributes.  */
6892
static const struct attribute_spec
6893
avr_attribute_table[] =
6894
{
6895
  /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6896
       affects_type_identity } */
6897
  { "progmem",   0, 0, false, false, false,  avr_handle_progmem_attribute,
6898
    false },
6899
  { "signal",    0, 0, true,  false, false,  avr_handle_fndecl_attribute,
6900
    false },
6901
  { "interrupt", 0, 0, true,  false, false,  avr_handle_fndecl_attribute,
6902
    false },
6903
  { "naked",     0, 0, false, true,  true,   avr_handle_fntype_attribute,
6904
    false },
6905
  { "OS_task",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
6906
    false },
6907
  { "OS_main",   0, 0, false, true,  true,   avr_handle_fntype_attribute,
6908
    false },
6909
  { NULL,        0, 0, false, false, false, NULL, false }
6910
};
6911
 
6912
 
6913
/* Look if DECL shall be placed in program memory space by
6914
   means of attribute `progmem' or some address-space qualifier.
6915
   Return non-zero if DECL is data that must end up in Flash and
6916
   zero if the data lives in RAM (.bss, .data, .rodata, ...).
6917
 
6918
   Return 2   if DECL is located in 24-bit flash address-space
6919
   Return 1   if DECL is located in 16-bit flash address-space
6920
   Return -1  if attribute `progmem' occurs in DECL or ATTRIBUTES
6921
   Return 0   otherwise  */
6922
 
6923
int
6924
avr_progmem_p (tree decl, tree attributes)
6925
{
6926
  tree a;
6927
 
6928
  if (TREE_CODE (decl) != VAR_DECL)
6929
    return 0;
6930
 
6931
  if (avr_decl_memx_p (decl))
6932
    return 2;
6933
 
6934
  if (avr_decl_flash_p (decl))
6935
    return 1;
6936
 
6937
  if (NULL_TREE
6938
      != lookup_attribute ("progmem", attributes))
6939
    return -1;
6940
 
6941
  a = decl;
6942
 
6943
  do
6944
    a = TREE_TYPE(a);
6945
  while (TREE_CODE (a) == ARRAY_TYPE);
6946
 
6947
  if (a == error_mark_node)
6948
    return 0;
6949
 
6950
  if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6951
    return -1;
6952
 
6953
  return 0;
6954
}
6955
 
6956
 
6957
/* Scan type TYP for pointer references to address space ASn.
6958
   Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6959
   the AS are also declared to be CONST.
6960
   Otherwise, return the respective addres space, i.e. a value != 0.  */
6961
 
6962
static addr_space_t
6963
avr_nonconst_pointer_addrspace (tree typ)
6964
{
6965
  while (ARRAY_TYPE == TREE_CODE (typ))
6966
    typ = TREE_TYPE (typ);
6967
 
6968
  if (POINTER_TYPE_P (typ))
6969
    {
6970
      addr_space_t as;
6971
      tree target = TREE_TYPE (typ);
6972
 
6973
      /* Pointer to function: Test the function's return type.  */
6974
 
6975
      if (FUNCTION_TYPE == TREE_CODE (target))
6976
        return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6977
 
6978
      /* "Ordinary" pointers... */
6979
 
6980
      while (TREE_CODE (target) == ARRAY_TYPE)
6981
        target = TREE_TYPE (target);
6982
 
6983
      /* Pointers to non-generic address space must be const.
6984
         Refuse address spaces outside the device's flash.  */
6985
 
6986
      as = TYPE_ADDR_SPACE (target);
6987
 
6988
      if (!ADDR_SPACE_GENERIC_P (as)
6989
          && (!TYPE_READONLY (target)
6990
              || avr_addrspace[as].segment >= avr_current_device->n_flash))
6991
        {
6992
          return as;
6993
        }
6994
 
6995
      /* Scan pointer's target type.  */
6996
 
6997
      return avr_nonconst_pointer_addrspace (target);
6998
    }
6999
 
7000
  return ADDR_SPACE_GENERIC;
7001
}
7002
 
7003
 
7004
/* Sanity check NODE so that all pointers targeting non-generic addres spaces
7005
   go along with CONST qualifier.  Writing to these address spaces should
7006
   be detected and complained about as early as possible.  */
7007
 
7008
static bool
7009
avr_pgm_check_var_decl (tree node)
7010
{
7011
  const char *reason = NULL;
7012
 
7013
  addr_space_t as = ADDR_SPACE_GENERIC;
7014
 
7015
  gcc_assert (as == 0);
7016
 
7017
  if (avr_log.progmem)
7018
    avr_edump ("%?: %t\n", node);
7019
 
7020
  switch (TREE_CODE (node))
7021
    {
7022
    default:
7023
      break;
7024
 
7025
    case VAR_DECL:
7026
      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7027
        reason = "variable";
7028
      break;
7029
 
7030
    case PARM_DECL:
7031
      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7032
        reason = "function parameter";
7033
      break;
7034
 
7035
    case FIELD_DECL:
7036
      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7037
        reason = "structure field";
7038
      break;
7039
 
7040
    case FUNCTION_DECL:
7041
      if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7042
          as)
7043
        reason = "return type of function";
7044
      break;
7045
 
7046
    case POINTER_TYPE:
7047
      if (as = avr_nonconst_pointer_addrspace (node), as)
7048
        reason = "pointer";
7049
      break;
7050
    }
7051
 
7052
  if (reason)
7053
    {
7054
      avr_edump ("%?: %s, %d, %d\n",
7055
                 avr_addrspace[as].name,
7056
                 avr_addrspace[as].segment, avr_current_device->n_flash);
7057
      if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7058
        {
7059
          if (TYPE_P (node))
7060
            error ("%qT uses address space %qs beyond flash of %qs",
7061
                   node, avr_addrspace[as].name, avr_current_device->name);
7062
          else
7063
            error ("%s %q+D uses address space %qs beyond flash of %qs",
7064
                   reason, node, avr_addrspace[as].name,
7065
                   avr_current_device->name);
7066
        }
7067
      else
7068
        {
7069
          if (TYPE_P (node))
7070
            error ("pointer targeting address space %qs must be const in %qT",
7071
                   avr_addrspace[as].name, node);
7072
          else
7073
            error ("pointer targeting address space %qs must be const"
7074
                   " in %s %q+D",
7075
                   avr_addrspace[as].name, reason, node);
7076
        }
7077
    }
7078
 
7079
  return reason == NULL;
7080
}
7081
 
7082
 
7083
/* Add the section attribute if the variable is in progmem.  */
7084
 
7085
static void
7086
avr_insert_attributes (tree node, tree *attributes)
7087
{
7088
  avr_pgm_check_var_decl (node);
7089
 
7090
  if (TREE_CODE (node) == VAR_DECL
7091
      && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7092
      && avr_progmem_p (node, *attributes))
7093
    {
7094
      addr_space_t as;
7095
      tree node0 = node;
7096
 
7097
      /* For C++, we have to peel arrays in order to get correct
7098
         determination of readonlyness.  */
7099
 
7100
      do
7101
        node0 = TREE_TYPE (node0);
7102
      while (TREE_CODE (node0) == ARRAY_TYPE);
7103
 
7104
      if (error_mark_node == node0)
7105
        return;
7106
 
7107
      as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7108
 
7109
      if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7110
        {
7111
          error ("variable %q+D located in address space %qs"
7112
                 " beyond flash of %qs",
7113
                 node, avr_addrspace[as].name, avr_current_device->name);
7114
        }
7115
 
7116
      if (!TYPE_READONLY (node0)
7117
          && !TREE_READONLY (node))
7118
        {
7119
          const char *reason = "__attribute__((progmem))";
7120
 
7121
          if (!ADDR_SPACE_GENERIC_P (as))
7122
            reason = avr_addrspace[as].name;
7123
 
7124
          if (avr_log.progmem)
7125
            avr_edump ("\n%?: %t\n%t\n", node, node0);
7126
 
7127
          error ("variable %q+D must be const in order to be put into"
7128
                 " read-only section by means of %qs", node, reason);
7129
        }
7130
    }
7131
}
7132
 
7133
 
7134
/* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'.  */
7135
/* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'.  */
7136
/* Track need of __do_clear_bss.  */
7137
 
7138
void
7139
avr_asm_output_aligned_decl_common (FILE * stream,
7140
                                    const_tree decl ATTRIBUTE_UNUSED,
7141
                                    const char *name,
7142
                                    unsigned HOST_WIDE_INT size,
7143
                                    unsigned int align, bool local_p)
7144
{
7145
  /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7146
     There is no need to trigger __do_clear_bss code for them.  */
7147
 
7148
  if (!STR_PREFIX_P (name, "__gnu_lto"))
7149
    avr_need_clear_bss_p = true;
7150
 
7151
  if (local_p)
7152
    ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7153
  else
7154
    ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7155
}
7156
 
7157
 
7158
/* Unnamed section callback for data_section
7159
   to track need of __do_copy_data.  */
7160
 
7161
static void
7162
avr_output_data_section_asm_op (const void *data)
7163
{
7164
  avr_need_copy_data_p = true;
7165
 
7166
  /* Dispatch to default.  */
7167
  output_section_asm_op (data);
7168
}
7169
 
7170
 
7171
/* Unnamed section callback for bss_section
7172
   to track need of __do_clear_bss.  */
7173
 
7174
static void
7175
avr_output_bss_section_asm_op (const void *data)
7176
{
7177
  avr_need_clear_bss_p = true;
7178
 
7179
  /* Dispatch to default.  */
7180
  output_section_asm_op (data);
7181
}
7182
 
7183
 
7184
/* Unnamed section callback for progmem*.data sections.  */
7185
 
7186
static void
7187
avr_output_progmem_section_asm_op (const void *data)
7188
{
7189
  fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7190
           (const char*) data);
7191
}
7192
 
7193
 
7194
/* Implement `TARGET_ASM_INIT_SECTIONS'.  */
7195
 
7196
static void
7197
avr_asm_init_sections (void)
7198
{
7199
  unsigned int n;
7200
 
7201
  /* Set up a section for jump tables.  Alignment is handled by
7202
     ASM_OUTPUT_BEFORE_CASE_LABEL.  */
7203
 
7204
  if (AVR_HAVE_JMP_CALL)
7205
    {
7206
      progmem_swtable_section
7207
        = get_unnamed_section (0, output_section_asm_op,
7208
                               "\t.section\t.progmem.gcc_sw_table"
7209
                               ",\"a\",@progbits");
7210
    }
7211
  else
7212
    {
7213
      progmem_swtable_section
7214
        = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7215
                               "\t.section\t.progmem.gcc_sw_table"
7216
                               ",\"ax\",@progbits");
7217
    }
7218
 
7219
  for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7220
    {
7221
      progmem_section[n]
7222
        = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7223
                               progmem_section_prefix[n]);
7224
    }
7225
 
7226
  /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7227
     resp. `avr_need_copy_data_p'.  */
7228
 
7229
  readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7230
  data_section->unnamed.callback = avr_output_data_section_asm_op;
7231
  bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7232
}
7233
 
7234
 
7235
/* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'.  */
7236
 
7237
static section*
7238
avr_asm_function_rodata_section (tree decl)
7239
{
7240
  /* If a function is unused and optimized out by -ffunction-sections
7241
     and --gc-sections, ensure that the same will happen for its jump
7242
     tables by putting them into individual sections.  */
7243
 
7244
  unsigned int flags;
7245
  section * frodata;
7246
 
7247
  /* Get the frodata section from the default function in varasm.c
7248
     but treat function-associated data-like jump tables as code
7249
     rather than as user defined data.  AVR has no constant pools.  */
7250
  {
7251
    int fdata = flag_data_sections;
7252
 
7253
    flag_data_sections = flag_function_sections;
7254
    frodata = default_function_rodata_section (decl);
7255
    flag_data_sections = fdata;
7256
    flags = frodata->common.flags;
7257
  }
7258
 
7259
  if (frodata != readonly_data_section
7260
      && flags & SECTION_NAMED)
7261
    {
7262
      /* Adjust section flags and replace section name prefix.  */
7263
 
7264
      unsigned int i;
7265
 
7266
      static const char* const prefix[] =
7267
        {
7268
          ".rodata",          ".progmem.gcc_sw_table",
7269
          ".gnu.linkonce.r.", ".gnu.linkonce.t."
7270
        };
7271
 
7272
      for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7273
        {
7274
          const char * old_prefix = prefix[i];
7275
          const char * new_prefix = prefix[i+1];
7276
          const char * name = frodata->named.name;
7277
 
7278
          if (STR_PREFIX_P (name, old_prefix))
7279
            {
7280
              const char *rname = ACONCAT ((new_prefix,
7281
                                            name + strlen (old_prefix), NULL));
7282
              flags &= ~SECTION_CODE;
7283
              flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7284
 
7285
              return get_section (rname, flags, frodata->named.decl);
7286
            }
7287
        }
7288
    }
7289
 
7290
  return progmem_swtable_section;
7291
}
7292
 
7293
 
7294
/* Implement `TARGET_ASM_NAMED_SECTION'.  */
7295
/* Track need of __do_clear_bss, __do_copy_data for named sections.  */
7296
 
7297
static void
7298
avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7299
{
7300
  if (flags & AVR_SECTION_PROGMEM)
7301
    {
7302
      addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7303
      int segment = avr_addrspace[as].segment;
7304
      const char *old_prefix = ".rodata";
7305
      const char *new_prefix = progmem_section_prefix[segment];
7306
 
7307
      if (STR_PREFIX_P (name, old_prefix))
7308
        {
7309
          const char *sname = ACONCAT ((new_prefix,
7310
                                        name + strlen (old_prefix), NULL));
7311
          default_elf_asm_named_section (sname, flags, decl);
7312
          return;
7313
        }
7314
 
7315
      default_elf_asm_named_section (new_prefix, flags, decl);
7316
      return;
7317
    }
7318
 
7319
  if (!avr_need_copy_data_p)
7320
    avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7321
                            || STR_PREFIX_P (name, ".rodata")
7322
                            || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7323
 
7324
  if (!avr_need_clear_bss_p)
7325
    avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7326
 
7327
  default_elf_asm_named_section (name, flags, decl);
7328
}
7329
 
7330
static unsigned int
7331
avr_section_type_flags (tree decl, const char *name, int reloc)
7332
{
7333
  unsigned int flags = default_section_type_flags (decl, name, reloc);
7334
 
7335
  if (STR_PREFIX_P (name, ".noinit"))
7336
    {
7337
      if (decl && TREE_CODE (decl) == VAR_DECL
7338
          && DECL_INITIAL (decl) == NULL_TREE)
7339
        flags |= SECTION_BSS;  /* @nobits */
7340
      else
7341
        warning (0, "only uninitialized variables can be placed in the "
7342
                 ".noinit section");
7343
    }
7344
 
7345
  if (decl && DECL_P (decl)
7346
      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7347
    {
7348
      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7349
 
7350
      /* Attribute progmem puts data in generic address space.
7351
         Set section flags as if it was in __flash to get the right
7352
         section prefix in the remainder.  */
7353
 
7354
      if (ADDR_SPACE_GENERIC_P (as))
7355
        as = ADDR_SPACE_FLASH;
7356
 
7357
      flags |= as * SECTION_MACH_DEP;
7358
      flags &= ~SECTION_WRITE;
7359
      flags &= ~SECTION_BSS;
7360
    }
7361
 
7362
  return flags;
7363
}
7364
 
7365
 
7366
/* Implement `TARGET_ENCODE_SECTION_INFO'.  */
7367
 
7368
static void
7369
avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7370
{
7371
  /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7372
     readily available, see PR34734.  So we postpone the warning
7373
     about uninitialized data in program memory section until here.  */
7374
 
7375
  if (new_decl_p
7376
      && decl && DECL_P (decl)
7377
      && NULL_TREE == DECL_INITIAL (decl)
7378
      && !DECL_EXTERNAL (decl)
7379
      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7380
    {
7381
      warning (OPT_Wuninitialized,
7382
               "uninitialized variable %q+D put into "
7383
               "program memory area", decl);
7384
    }
7385
 
7386
  default_encode_section_info (decl, rtl, new_decl_p);
7387
 
7388
  if (decl && DECL_P (decl)
7389
      && TREE_CODE (decl) != FUNCTION_DECL
7390
      && MEM_P (rtl)
7391
      && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7392
   {
7393
      rtx sym = XEXP (rtl, 0);
7394
      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7395
 
7396
      /* PSTR strings are in generic space but located in flash:
7397
         patch address space.  */
7398
 
7399
      if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7400
        as = ADDR_SPACE_FLASH;
7401
 
7402
      AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7403
    }
7404
}
7405
 
7406
 
7407
/* Implement `TARGET_ASM_SELECT_SECTION' */
7408
 
7409
static section *
7410
avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7411
{
7412
  section * sect = default_elf_select_section (decl, reloc, align);
7413
 
7414
  if (decl && DECL_P (decl)
7415
      && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7416
    {
7417
      addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7418
      int segment = avr_addrspace[as].segment;
7419
 
7420
      if (sect->common.flags & SECTION_NAMED)
7421
        {
7422
          const char * name = sect->named.name;
7423
          const char * old_prefix = ".rodata";
7424
          const char * new_prefix = progmem_section_prefix[segment];
7425
 
7426
          if (STR_PREFIX_P (name, old_prefix))
7427
            {
7428
              const char *sname = ACONCAT ((new_prefix,
7429
                                            name + strlen (old_prefix), NULL));
7430
              return get_section (sname, sect->common.flags, sect->named.decl);
7431
            }
7432
        }
7433
 
7434
      return progmem_section[segment];
7435
    }
7436
 
7437
  return sect;
7438
}
7439
 
7440
/* Implement `TARGET_ASM_FILE_START'.  */
7441
/* Outputs some text at the start of each assembler file.  */
7442
 
7443
static void
7444
avr_file_start (void)
7445
{
7446
  int sfr_offset = avr_current_arch->sfr_offset;
7447
 
7448
  if (avr_current_arch->asm_only)
7449
    error ("MCU %qs supported for assembler only", avr_current_device->name);
7450
 
7451
  default_file_start ();
7452
 
7453
  /* Print I/O addresses of some SFRs used with IN and OUT.  */
7454
 
7455
  if (!AVR_HAVE_8BIT_SP)
7456
    fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7457
 
7458
  fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7459
  fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7460
  if (AVR_HAVE_RAMPZ)
7461
    fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7462
  if (AVR_HAVE_RAMPY)
7463
    fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7464
  if (AVR_HAVE_RAMPX)
7465
    fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7466
  if (AVR_HAVE_RAMPD)
7467
    fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7468
  if (AVR_XMEGA)
7469
    fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7470
  fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7471
  fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7472
}
7473
 
7474
 
7475
/* Implement `TARGET_ASM_FILE_END'.  */
7476
/* Outputs to the stdio stream FILE some
7477
   appropriate text to go at the end of an assembler file.  */
7478
 
7479
static void
7480
avr_file_end (void)
7481
{
7482
  /* Output these only if there is anything in the
7483
     .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7484
     input section(s) - some code size can be saved by not
7485
     linking in the initialization code from libgcc if resp.
7486
     sections are empty.  */
7487
 
7488
  if (avr_need_copy_data_p)
7489
    fputs (".global __do_copy_data\n", asm_out_file);
7490
 
7491
  if (avr_need_clear_bss_p)
7492
    fputs (".global __do_clear_bss\n", asm_out_file);
7493
}
7494
 
7495
/* Choose the order in which to allocate hard registers for
7496
   pseudo-registers local to a basic block.
7497
 
7498
   Store the desired register order in the array `reg_alloc_order'.
7499
   Element 0 should be the register to allocate first; element 1, the
7500
   next register; and so on.  */
7501
 
7502
void
7503
order_regs_for_local_alloc (void)
7504
{
7505
  unsigned int i;
7506
  static const int order_0[] = {
7507
    24,25,
7508
    18,19,
7509
    20,21,
7510
    22,23,
7511
    30,31,
7512
    26,27,
7513
    28,29,
7514
    17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7515
    0,1,
7516
    32,33,34,35
7517
  };
7518
  static const int order_1[] = {
7519
    18,19,
7520
    20,21,
7521
    22,23,
7522
    24,25,
7523
    30,31,
7524
    26,27,
7525
    28,29,
7526
    17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7527
    0,1,
7528
    32,33,34,35
7529
  };
7530
  static const int order_2[] = {
7531
    25,24,
7532
    23,22,
7533
    21,20,
7534
    19,18,
7535
    30,31,
7536
    26,27,
7537
    28,29,
7538
    17,16,
7539
    15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7540
    1,0,
7541
    32,33,34,35
7542
  };
7543
 
7544
  const int *order = (TARGET_ORDER_1 ? order_1 :
7545
                      TARGET_ORDER_2 ? order_2 :
7546
                      order_0);
7547
  for (i=0; i < ARRAY_SIZE (order_0); ++i)
7548
      reg_alloc_order[i] = order[i];
7549
}
7550
 
7551
 
7552
/* Implement `TARGET_REGISTER_MOVE_COST' */
7553
 
7554
static int
7555
avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7556
                        reg_class_t from, reg_class_t to)
7557
{
7558
  return (from == STACK_REG ? 6
7559
          : to == STACK_REG ? 12
7560
          : 2);
7561
}
7562
 
7563
 
7564
/* Implement `TARGET_MEMORY_MOVE_COST' */
7565
 
7566
static int
7567
avr_memory_move_cost (enum machine_mode mode,
7568
                      reg_class_t rclass ATTRIBUTE_UNUSED,
7569
                      bool in ATTRIBUTE_UNUSED)
7570
{
7571
  return (mode == QImode ? 2
7572
          : mode == HImode ? 4
7573
          : mode == SImode ? 8
7574
          : mode == SFmode ? 8
7575
          : 16);
7576
}
7577
 
7578
 
7579
/* Mutually recursive subroutine of avr_rtx_cost for calculating the
7580
   cost of an RTX operand given its context.  X is the rtx of the
7581
   operand, MODE is its mode, and OUTER is the rtx_code of this
7582
   operand's parent operator.  */
7583
 
7584
static int
7585
avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7586
                      int opno, bool speed)
7587
{
7588
  enum rtx_code code = GET_CODE (x);
7589
  int total;
7590
 
7591
  switch (code)
7592
    {
7593
    case REG:
7594
    case SUBREG:
7595
      return 0;
7596
 
7597
    case CONST_INT:
7598
    case CONST_DOUBLE:
7599
      return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7600
 
7601
    default:
7602
      break;
7603
    }
7604
 
7605
  total = 0;
7606
  avr_rtx_costs (x, code, outer, opno, &total, speed);
7607
  return total;
7608
}
7609
 
7610
/* Worker function for AVR backend's rtx_cost function.
7611
   X is rtx expression whose cost is to be calculated.
7612
   Return true if the complete cost has been computed.
7613
   Return false if subexpressions should be scanned.
7614
   In either case, *TOTAL contains the cost result.  */
7615
 
7616
static bool
7617
avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7618
                 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7619
{
7620
  enum rtx_code code = (enum rtx_code) codearg;
7621
  enum machine_mode mode = GET_MODE (x);
7622
  HOST_WIDE_INT val;
7623
 
7624
  switch (code)
7625
    {
7626
    case CONST_INT:
7627
    case CONST_DOUBLE:
7628
    case SYMBOL_REF:
7629
    case CONST:
7630
    case LABEL_REF:
7631
      /* Immediate constants are as cheap as registers.  */
7632
      *total = 0;
7633
      return true;
7634
 
7635
    case MEM:
7636
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7637
      return true;
7638
 
7639
    case NEG:
7640
      switch (mode)
7641
        {
7642
        case QImode:
7643
        case SFmode:
7644
          *total = COSTS_N_INSNS (1);
7645
          break;
7646
 
7647
        case HImode:
7648
        case PSImode:
7649
        case SImode:
7650
          *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7651
          break;
7652
 
7653
        default:
7654
          return false;
7655
        }
7656
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7657
      return true;
7658
 
7659
    case ABS:
7660
      switch (mode)
7661
        {
7662
        case QImode:
7663
        case SFmode:
7664
          *total = COSTS_N_INSNS (1);
7665
          break;
7666
 
7667
        default:
7668
          return false;
7669
        }
7670
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7671
      return true;
7672
 
7673
    case NOT:
7674
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7675
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7676
      return true;
7677
 
7678
    case ZERO_EXTEND:
7679
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7680
                              - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7681
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7682
      return true;
7683
 
7684
    case SIGN_EXTEND:
7685
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7686
                              - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7687
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7688
      return true;
7689
 
7690
    case PLUS:
7691
      switch (mode)
7692
        {
7693
        case QImode:
7694
          if (AVR_HAVE_MUL
7695
              && MULT == GET_CODE (XEXP (x, 0))
7696
              && register_operand (XEXP (x, 1), QImode))
7697
            {
7698
              /* multiply-add */
7699
              *total = COSTS_N_INSNS (speed ? 4 : 3);
7700
              /* multiply-add with constant: will be split and load constant. */
7701
              if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7702
                *total = COSTS_N_INSNS (1) + *total;
7703
              return true;
7704
            }
7705
          *total = COSTS_N_INSNS (1);
7706
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7707
            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7708
          break;
7709
 
7710
        case HImode:
7711
          if (AVR_HAVE_MUL
7712
              && (MULT == GET_CODE (XEXP (x, 0))
7713
                  || ASHIFT == GET_CODE (XEXP (x, 0)))
7714
              && register_operand (XEXP (x, 1), HImode)
7715
              && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7716
                  || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7717
            {
7718
              /* multiply-add */
7719
              *total = COSTS_N_INSNS (speed ? 5 : 4);
7720
              /* multiply-add with constant: will be split and load constant. */
7721
              if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7722
                *total = COSTS_N_INSNS (1) + *total;
7723
              return true;
7724
            }
7725
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7726
            {
7727
              *total = COSTS_N_INSNS (2);
7728
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7729
                                              speed);
7730
            }
7731
          else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7732
            *total = COSTS_N_INSNS (1);
7733
          else
7734
            *total = COSTS_N_INSNS (2);
7735
          break;
7736
 
7737
        case PSImode:
7738
          if (!CONST_INT_P (XEXP (x, 1)))
7739
            {
7740
              *total = COSTS_N_INSNS (3);
7741
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7742
                                              speed);
7743
            }
7744
          else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7745
            *total = COSTS_N_INSNS (2);
7746
          else
7747
            *total = COSTS_N_INSNS (3);
7748
          break;
7749
 
7750
        case SImode:
7751
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7752
            {
7753
              *total = COSTS_N_INSNS (4);
7754
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7755
                                              speed);
7756
            }
7757
          else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7758
            *total = COSTS_N_INSNS (1);
7759
          else
7760
            *total = COSTS_N_INSNS (4);
7761
          break;
7762
 
7763
        default:
7764
          return false;
7765
        }
7766
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7767
      return true;
7768
 
7769
    case MINUS:
7770
      if (AVR_HAVE_MUL
7771
          && QImode == mode
7772
          && register_operand (XEXP (x, 0), QImode)
7773
          && MULT == GET_CODE (XEXP (x, 1)))
7774
        {
7775
          /* multiply-sub */
7776
          *total = COSTS_N_INSNS (speed ? 4 : 3);
7777
          /* multiply-sub with constant: will be split and load constant. */
7778
          if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7779
            *total = COSTS_N_INSNS (1) + *total;
7780
          return true;
7781
        }
7782
      if (AVR_HAVE_MUL
7783
          && HImode == mode
7784
          && register_operand (XEXP (x, 0), HImode)
7785
          && (MULT == GET_CODE (XEXP (x, 1))
7786
              || ASHIFT == GET_CODE (XEXP (x, 1)))
7787
          && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7788
              || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7789
        {
7790
          /* multiply-sub */
7791
          *total = COSTS_N_INSNS (speed ? 5 : 4);
7792
          /* multiply-sub with constant: will be split and load constant. */
7793
          if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7794
            *total = COSTS_N_INSNS (1) + *total;
7795
          return true;
7796
        }
7797
      /* FALLTHRU */
7798
    case AND:
7799
    case IOR:
7800
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7801
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7802
      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7803
        *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7804
      return true;
7805
 
7806
    case XOR:
7807
      *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7808
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7809
      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7810
      return true;
7811
 
7812
    case MULT:
7813
      switch (mode)
7814
        {
7815
        case QImode:
7816
          if (AVR_HAVE_MUL)
7817
            *total = COSTS_N_INSNS (!speed ? 3 : 4);
7818
          else if (!speed)
7819
            *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7820
          else
7821
            return false;
7822
          break;
7823
 
7824
        case HImode:
7825
          if (AVR_HAVE_MUL)
7826
            {
7827
              rtx op0 = XEXP (x, 0);
7828
              rtx op1 = XEXP (x, 1);
7829
              enum rtx_code code0 = GET_CODE (op0);
7830
              enum rtx_code code1 = GET_CODE (op1);
7831
              bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7832
              bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7833
 
7834
              if (ex0
7835
                  && (u8_operand (op1, HImode)
7836
                      || s8_operand (op1, HImode)))
7837
                {
7838
                  *total = COSTS_N_INSNS (!speed ? 4 : 6);
7839
                  return true;
7840
                }
7841
              if (ex0
7842
                  && register_operand (op1, HImode))
7843
                {
7844
                  *total = COSTS_N_INSNS (!speed ? 5 : 8);
7845
                  return true;
7846
                }
7847
              else if (ex0 || ex1)
7848
                {
7849
                  *total = COSTS_N_INSNS (!speed ? 3 : 5);
7850
                  return true;
7851
                }
7852
              else if (register_operand (op0, HImode)
7853
                       && (u8_operand (op1, HImode)
7854
                           || s8_operand (op1, HImode)))
7855
                {
7856
                  *total = COSTS_N_INSNS (!speed ? 6 : 9);
7857
                  return true;
7858
                }
7859
              else
7860
                *total = COSTS_N_INSNS (!speed ? 7 : 10);
7861
            }
7862
          else if (!speed)
7863
            *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7864
          else
7865
            return false;
7866
          break;
7867
 
7868
        case PSImode:
7869
          if (!speed)
7870
            *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7871
          else
7872
            *total = 10;
7873
          break;
7874
 
7875
        case SImode:
7876
          if (AVR_HAVE_MUL)
7877
            {
7878
              if (!speed)
7879
                {
7880
                  /* Add some additional costs besides CALL like moves etc.  */
7881
 
7882
                  *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7883
                }
7884
              else
7885
                {
7886
                  /* Just a rough estimate.  Even with -O2 we don't want bulky
7887
                     code expanded inline.  */
7888
 
7889
                  *total = COSTS_N_INSNS (25);
7890
                }
7891
            }
7892
          else
7893
            {
7894
              if (speed)
7895
                *total = COSTS_N_INSNS (300);
7896
              else
7897
                /* Add some additional costs besides CALL like moves etc.  */
7898
                *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7899
            }
7900
 
7901
          return true;
7902
 
7903
        default:
7904
          return false;
7905
        }
7906
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7907
      *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7908
      return true;
7909
 
7910
    case DIV:
7911
    case MOD:
7912
    case UDIV:
7913
    case UMOD:
7914
      if (!speed)
7915
        *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7916
      else
7917
        *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7918
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7919
      /* For div/mod with const-int divisor we have at least the cost of
7920
         loading the divisor. */
7921
      if (CONST_INT_P (XEXP (x, 1)))
7922
        *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7923
      /* Add some overall penaly for clobbering and moving around registers */
7924
      *total += COSTS_N_INSNS (2);
7925
      return true;
7926
 
7927
    case ROTATE:
7928
      switch (mode)
7929
        {
7930
        case QImode:
7931
          if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7932
            *total = COSTS_N_INSNS (1);
7933
 
7934
          break;
7935
 
7936
        case HImode:
7937
          if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7938
            *total = COSTS_N_INSNS (3);
7939
 
7940
          break;
7941
 
7942
        case SImode:
7943
          if (CONST_INT_P (XEXP (x, 1)))
7944
            switch (INTVAL (XEXP (x, 1)))
7945
              {
7946
              case 8:
7947
              case 24:
7948
                *total = COSTS_N_INSNS (5);
7949
                break;
7950
              case 16:
7951
                *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7952
                break;
7953
              }
7954
          break;
7955
 
7956
        default:
7957
          return false;
7958
        }
7959
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7960
      return true;
7961
 
7962
    case ASHIFT:
7963
      switch (mode)
7964
        {
7965
        case QImode:
7966
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7967
            {
7968
              *total = COSTS_N_INSNS (!speed ? 4 : 17);
7969
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7970
                                              speed);
7971
            }
7972
          else
7973
            {
7974
              val = INTVAL (XEXP (x, 1));
7975
              if (val == 7)
7976
                *total = COSTS_N_INSNS (3);
7977
              else if (val >= 0 && val <= 7)
7978
                *total = COSTS_N_INSNS (val);
7979
              else
7980
                *total = COSTS_N_INSNS (1);
7981
            }
7982
          break;
7983
 
7984
        case HImode:
7985
          if (AVR_HAVE_MUL)
7986
            {
7987
              if (const_2_to_7_operand (XEXP (x, 1), HImode)
7988
                  && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7989
                      || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7990
                {
7991
                  *total = COSTS_N_INSNS (!speed ? 4 : 6);
7992
                  return true;
7993
                }
7994
            }
7995
 
7996
          if (const1_rtx == (XEXP (x, 1))
7997
              && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
7998
            {
7999
              *total = COSTS_N_INSNS (2);
8000
              return true;
8001
            }
8002
 
8003
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8004
            {
8005
              *total = COSTS_N_INSNS (!speed ? 5 : 41);
8006
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8007
                                              speed);
8008
            }
8009
          else
8010
            switch (INTVAL (XEXP (x, 1)))
8011
              {
8012
              case 0:
8013
                *total = 0;
8014
                break;
8015
              case 1:
8016
              case 8:
8017
                *total = COSTS_N_INSNS (2);
8018
                break;
8019
              case 9:
8020
                *total = COSTS_N_INSNS (3);
8021
                break;
8022
              case 2:
8023
              case 3:
8024
              case 10:
8025
              case 15:
8026
                *total = COSTS_N_INSNS (4);
8027
                break;
8028
              case 7:
8029
              case 11:
8030
              case 12:
8031
                *total = COSTS_N_INSNS (5);
8032
                break;
8033
              case 4:
8034
                *total = COSTS_N_INSNS (!speed ? 5 : 8);
8035
                break;
8036
              case 6:
8037
                *total = COSTS_N_INSNS (!speed ? 5 : 9);
8038
                break;
8039
              case 5:
8040
                *total = COSTS_N_INSNS (!speed ? 5 : 10);
8041
                break;
8042
              default:
8043
                *total = COSTS_N_INSNS (!speed ? 5 : 41);
8044
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8045
                                                speed);
8046
              }
8047
          break;
8048
 
8049
        case PSImode:
8050
          if (!CONST_INT_P (XEXP (x, 1)))
8051
            {
8052
              *total = COSTS_N_INSNS (!speed ? 6 : 73);
8053
            }
8054
          else
8055
            switch (INTVAL (XEXP (x, 1)))
8056
              {
8057
              case 0:
8058
                *total = 0;
8059
                break;
8060
              case 1:
8061
              case 8:
8062
              case 16:
8063
                *total = COSTS_N_INSNS (3);
8064
                break;
8065
              case 23:
8066
                *total = COSTS_N_INSNS (5);
8067
                break;
8068
              default:
8069
                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8070
                break;
8071
              }
8072
          break;
8073
 
8074
        case SImode:
8075
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8076
            {
8077
              *total = COSTS_N_INSNS (!speed ? 7 : 113);
8078
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8079
                                              speed);
8080
            }
8081
          else
8082
            switch (INTVAL (XEXP (x, 1)))
8083
              {
8084
              case 0:
8085
                *total = 0;
8086
                break;
8087
              case 24:
8088
                *total = COSTS_N_INSNS (3);
8089
                break;
8090
              case 1:
8091
              case 8:
8092
              case 16:
8093
                *total = COSTS_N_INSNS (4);
8094
                break;
8095
              case 31:
8096
                *total = COSTS_N_INSNS (6);
8097
                break;
8098
              case 2:
8099
                *total = COSTS_N_INSNS (!speed ? 7 : 8);
8100
                break;
8101
              default:
8102
                *total = COSTS_N_INSNS (!speed ? 7 : 113);
8103
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8104
                                                speed);
8105
              }
8106
          break;
8107
 
8108
        default:
8109
          return false;
8110
        }
8111
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8112
      return true;
8113
 
8114
    case ASHIFTRT:
8115
      switch (mode)
8116
        {
8117
        case QImode:
8118
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8119
            {
8120
              *total = COSTS_N_INSNS (!speed ? 4 : 17);
8121
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8122
                                              speed);
8123
            }
8124
          else
8125
            {
8126
              val = INTVAL (XEXP (x, 1));
8127
              if (val == 6)
8128
                *total = COSTS_N_INSNS (4);
8129
              else if (val == 7)
8130
                *total = COSTS_N_INSNS (2);
8131
              else if (val >= 0 && val <= 7)
8132
                *total = COSTS_N_INSNS (val);
8133
              else
8134
                *total = COSTS_N_INSNS (1);
8135
            }
8136
          break;
8137
 
8138
        case HImode:
8139
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8140
            {
8141
              *total = COSTS_N_INSNS (!speed ? 5 : 41);
8142
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8143
                                              speed);
8144
            }
8145
          else
8146
            switch (INTVAL (XEXP (x, 1)))
8147
              {
8148
              case 0:
8149
                *total = 0;
8150
                break;
8151
              case 1:
8152
                *total = COSTS_N_INSNS (2);
8153
                break;
8154
              case 15:
8155
                *total = COSTS_N_INSNS (3);
8156
                break;
8157
              case 2:
8158
              case 7:
8159
              case 8:
8160
              case 9:
8161
                *total = COSTS_N_INSNS (4);
8162
                break;
8163
              case 10:
8164
              case 14:
8165
                *total = COSTS_N_INSNS (5);
8166
                break;
8167
              case 11:
8168
                *total = COSTS_N_INSNS (!speed ? 5 : 6);
8169
                break;
8170
              case 12:
8171
                *total = COSTS_N_INSNS (!speed ? 5 : 7);
8172
                break;
8173
              case 6:
8174
              case 13:
8175
                *total = COSTS_N_INSNS (!speed ? 5 : 8);
8176
                break;
8177
              default:
8178
                *total = COSTS_N_INSNS (!speed ? 5 : 41);
8179
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8180
                                                speed);
8181
              }
8182
          break;
8183
 
8184
        case PSImode:
8185
          if (!CONST_INT_P (XEXP (x, 1)))
8186
            {
8187
              *total = COSTS_N_INSNS (!speed ? 6 : 73);
8188
            }
8189
          else
8190
            switch (INTVAL (XEXP (x, 1)))
8191
              {
8192
              case 0:
8193
                *total = 0;
8194
                break;
8195
              case 1:
8196
                *total = COSTS_N_INSNS (3);
8197
                break;
8198
              case 16:
8199
              case 8:
8200
                *total = COSTS_N_INSNS (5);
8201
                break;
8202
              case 23:
8203
                *total = COSTS_N_INSNS (4);
8204
                break;
8205
              default:
8206
                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8207
                break;
8208
              }
8209
          break;
8210
 
8211
        case SImode:
8212
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8213
            {
8214
              *total = COSTS_N_INSNS (!speed ? 7 : 113);
8215
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8216
                                              speed);
8217
            }
8218
          else
8219
            switch (INTVAL (XEXP (x, 1)))
8220
              {
8221
              case 0:
8222
                *total = 0;
8223
                break;
8224
              case 1:
8225
                *total = COSTS_N_INSNS (4);
8226
                break;
8227
              case 8:
8228
              case 16:
8229
              case 24:
8230
                *total = COSTS_N_INSNS (6);
8231
                break;
8232
              case 2:
8233
                *total = COSTS_N_INSNS (!speed ? 7 : 8);
8234
                break;
8235
              case 31:
8236
                *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8237
                break;
8238
              default:
8239
                *total = COSTS_N_INSNS (!speed ? 7 : 113);
8240
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8241
                                                speed);
8242
              }
8243
          break;
8244
 
8245
        default:
8246
          return false;
8247
        }
8248
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8249
      return true;
8250
 
8251
    case LSHIFTRT:
8252
      switch (mode)
8253
        {
8254
        case QImode:
8255
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8256
            {
8257
              *total = COSTS_N_INSNS (!speed ? 4 : 17);
8258
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8259
                                              speed);
8260
            }
8261
          else
8262
            {
8263
              val = INTVAL (XEXP (x, 1));
8264
              if (val == 7)
8265
                *total = COSTS_N_INSNS (3);
8266
              else if (val >= 0 && val <= 7)
8267
                *total = COSTS_N_INSNS (val);
8268
              else
8269
                *total = COSTS_N_INSNS (1);
8270
            }
8271
          break;
8272
 
8273
        case HImode:
8274
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8275
            {
8276
              *total = COSTS_N_INSNS (!speed ? 5 : 41);
8277
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8278
                                              speed);
8279
            }
8280
          else
8281
            switch (INTVAL (XEXP (x, 1)))
8282
              {
8283
              case 0:
8284
                *total = 0;
8285
                break;
8286
              case 1:
8287
              case 8:
8288
                *total = COSTS_N_INSNS (2);
8289
                break;
8290
              case 9:
8291
                *total = COSTS_N_INSNS (3);
8292
                break;
8293
              case 2:
8294
              case 10:
8295
              case 15:
8296
                *total = COSTS_N_INSNS (4);
8297
                break;
8298
              case 7:
8299
              case 11:
8300
                *total = COSTS_N_INSNS (5);
8301
                break;
8302
              case 3:
8303
              case 12:
8304
              case 13:
8305
              case 14:
8306
                *total = COSTS_N_INSNS (!speed ? 5 : 6);
8307
                break;
8308
              case 4:
8309
                *total = COSTS_N_INSNS (!speed ? 5 : 7);
8310
                break;
8311
              case 5:
8312
              case 6:
8313
                *total = COSTS_N_INSNS (!speed ? 5 : 9);
8314
                break;
8315
              default:
8316
                *total = COSTS_N_INSNS (!speed ? 5 : 41);
8317
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8318
                                                speed);
8319
              }
8320
          break;
8321
 
8322
        case PSImode:
8323
          if (!CONST_INT_P (XEXP (x, 1)))
8324
            {
8325
              *total = COSTS_N_INSNS (!speed ? 6 : 73);
8326
            }
8327
          else
8328
            switch (INTVAL (XEXP (x, 1)))
8329
              {
8330
              case 0:
8331
                *total = 0;
8332
                break;
8333
              case 1:
8334
              case 8:
8335
              case 16:
8336
                *total = COSTS_N_INSNS (3);
8337
                break;
8338
              case 23:
8339
                *total = COSTS_N_INSNS (5);
8340
                break;
8341
              default:
8342
                *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8343
                break;
8344
              }
8345
          break;
8346
 
8347
        case SImode:
8348
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8349
            {
8350
              *total = COSTS_N_INSNS (!speed ? 7 : 113);
8351
              *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8352
                                              speed);
8353
            }
8354
          else
8355
            switch (INTVAL (XEXP (x, 1)))
8356
              {
8357
              case 0:
8358
                *total = 0;
8359
                break;
8360
              case 1:
8361
                *total = COSTS_N_INSNS (4);
8362
                break;
8363
              case 2:
8364
                *total = COSTS_N_INSNS (!speed ? 7 : 8);
8365
                break;
8366
              case 8:
8367
              case 16:
8368
              case 24:
8369
                *total = COSTS_N_INSNS (4);
8370
                break;
8371
              case 31:
8372
                *total = COSTS_N_INSNS (6);
8373
                break;
8374
              default:
8375
                *total = COSTS_N_INSNS (!speed ? 7 : 113);
8376
                *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8377
                                                speed);
8378
              }
8379
          break;
8380
 
8381
        default:
8382
          return false;
8383
        }
8384
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8385
      return true;
8386
 
8387
    case COMPARE:
8388
      switch (GET_MODE (XEXP (x, 0)))
8389
        {
8390
        case QImode:
8391
          *total = COSTS_N_INSNS (1);
8392
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8393
            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8394
          break;
8395
 
8396
        case HImode:
8397
          *total = COSTS_N_INSNS (2);
8398
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8399
            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8400
          else if (INTVAL (XEXP (x, 1)) != 0)
8401
            *total += COSTS_N_INSNS (1);
8402
          break;
8403
 
8404
        case PSImode:
8405
          *total = COSTS_N_INSNS (3);
8406
          if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8407
            *total += COSTS_N_INSNS (2);
8408
          break;
8409
 
8410
        case SImode:
8411
          *total = COSTS_N_INSNS (4);
8412
          if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8413
            *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8414
          else if (INTVAL (XEXP (x, 1)) != 0)
8415
            *total += COSTS_N_INSNS (3);
8416
          break;
8417
 
8418
        default:
8419
          return false;
8420
        }
8421
      *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8422
      return true;
8423
 
8424
    case TRUNCATE:
8425
      if (AVR_HAVE_MUL
8426
          && LSHIFTRT == GET_CODE (XEXP (x, 0))
8427
          && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8428
          && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8429
        {
8430
          if (QImode == mode || HImode == mode)
8431
            {
8432
              *total = COSTS_N_INSNS (2);
8433
              return true;
8434
            }
8435
        }
8436
      break;
8437
 
8438
    default:
8439
      break;
8440
    }
8441
  return false;
8442
}
8443
 
8444
 
8445
/* Implement `TARGET_RTX_COSTS'.  */
8446
 
8447
static bool
8448
avr_rtx_costs (rtx x, int codearg, int outer_code,
8449
               int opno, int *total, bool speed)
8450
{
8451
  bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8452
                               opno, total, speed);
8453
 
8454
  if (avr_log.rtx_costs)
8455
    {
8456
      avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8457
                 done, speed ? "speed" : "size", *total, outer_code, x);
8458
    }
8459
 
8460
  return done;
8461
}
8462
 
8463
 
8464
/* Implement `TARGET_ADDRESS_COST'.  */
8465
 
8466
static int
8467
avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8468
{
8469
  int cost = 4;
8470
 
8471
  if (GET_CODE (x) == PLUS
8472
      && CONST_INT_P (XEXP (x, 1))
8473
      && (REG_P (XEXP (x, 0))
8474
          || GET_CODE (XEXP (x, 0)) == SUBREG))
8475
    {
8476
      if (INTVAL (XEXP (x, 1)) >= 61)
8477
        cost = 18;
8478
    }
8479
  else if (CONSTANT_ADDRESS_P (x))
8480
    {
8481
      if (optimize > 0
8482
          && io_address_operand (x, QImode))
8483
        cost = 2;
8484
    }
8485
 
8486
  if (avr_log.address_cost)
8487
    avr_edump ("\n%?: %d = %r\n", cost, x);
8488
 
8489
  return cost;
8490
}
8491
 
8492
/* Test for extra memory constraint 'Q'.
8493
   It's a memory address based on Y or Z pointer with valid displacement.  */
8494
 
8495
int
8496
extra_constraint_Q (rtx x)
8497
{
8498
  int ok = 0;
8499
 
8500
  if (GET_CODE (XEXP (x,0)) == PLUS
8501
      && REG_P (XEXP (XEXP (x,0), 0))
8502
      && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8503
      && (INTVAL (XEXP (XEXP (x,0), 1))
8504
          <= MAX_LD_OFFSET (GET_MODE (x))))
8505
    {
8506
      rtx xx = XEXP (XEXP (x,0), 0);
8507
      int regno = REGNO (xx);
8508
 
8509
      ok = (/* allocate pseudos */
8510
            regno >= FIRST_PSEUDO_REGISTER
8511
            /* strictly check */
8512
            || regno == REG_Z || regno == REG_Y
8513
            /* XXX frame & arg pointer checks */
8514
            || xx == frame_pointer_rtx
8515
            || xx == arg_pointer_rtx);
8516
 
8517
      if (avr_log.constraints)
8518
        avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8519
                   ok, reload_completed, reload_in_progress, x);
8520
    }
8521
 
8522
  return ok;
8523
}
8524
 
8525
/* Convert condition code CONDITION to the valid AVR condition code.  */
8526
 
8527
RTX_CODE
8528
avr_normalize_condition (RTX_CODE condition)
8529
{
8530
  switch (condition)
8531
    {
8532
    case GT:
8533
      return GE;
8534
    case GTU:
8535
      return GEU;
8536
    case LE:
8537
      return LT;
8538
    case LEU:
8539
      return LTU;
8540
    default:
8541
      gcc_unreachable ();
8542
    }
8543
}
8544
 
8545
/* Helper function for `avr_reorg'.  */
8546
 
8547
static rtx
8548
avr_compare_pattern (rtx insn)
8549
{
8550
  rtx pattern = single_set (insn);
8551
 
8552
  if (pattern
8553
      && NONJUMP_INSN_P (insn)
8554
      && SET_DEST (pattern) == cc0_rtx
8555
      && GET_CODE (SET_SRC (pattern)) == COMPARE
8556
      && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8557
      && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8558
    {
8559
      return pattern;
8560
    }
8561
 
8562
  return NULL_RTX;
8563
}
8564
 
8565
/* Helper function for `avr_reorg'.  */
8566
 
8567
/* Expansion of switch/case decision trees leads to code like
8568
 
8569
       cc0 = compare (Reg, Num)
8570
       if (cc0 == 0)
8571
         goto L1
8572
 
8573
       cc0 = compare (Reg, Num)
8574
       if (cc0 > 0)
8575
         goto L2
8576
 
8577
   The second comparison is superfluous and can be deleted.
8578
   The second jump condition can be transformed from a
8579
   "difficult" one to a "simple" one because "cc0 > 0" and
8580
   "cc0 >= 0" will have the same effect here.
8581
 
8582
   This function relies on the way switch/case is being expaned
8583
   as binary decision tree.  For example code see PR 49903.
8584
 
8585
   Return TRUE if optimization performed.
8586
   Return FALSE if nothing changed.
8587
 
8588
   INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8589
 
8590
   We don't want to do this in text peephole because it is
8591
   tedious to work out jump offsets there and the second comparison
8592
   might have been transormed by `avr_reorg'.
8593
 
8594
   RTL peephole won't do because peephole2 does not scan across
8595
   basic blocks.  */
8596
 
8597
static bool
8598
avr_reorg_remove_redundant_compare (rtx insn1)
8599
{
8600
  rtx comp1, ifelse1, xcond1, branch1;
8601
  rtx comp2, ifelse2, xcond2, branch2, insn2;
8602
  enum rtx_code code;
8603
  rtx jump, target, cond;
8604
 
8605
  /* Look out for:  compare1 - branch1 - compare2 - branch2  */
8606
 
8607
  branch1 = next_nonnote_nondebug_insn (insn1);
8608
  if (!branch1 || !JUMP_P (branch1))
8609
    return false;
8610
 
8611
  insn2 = next_nonnote_nondebug_insn (branch1);
8612
  if (!insn2 || !avr_compare_pattern (insn2))
8613
    return false;
8614
 
8615
  branch2 = next_nonnote_nondebug_insn (insn2);
8616
  if (!branch2 || !JUMP_P (branch2))
8617
    return false;
8618
 
8619
  comp1 = avr_compare_pattern (insn1);
8620
  comp2 = avr_compare_pattern (insn2);
8621
  xcond1 = single_set (branch1);
8622
  xcond2 = single_set (branch2);
8623
 
8624
  if (!comp1 || !comp2
8625
      || !rtx_equal_p (comp1, comp2)
8626
      || !xcond1 || SET_DEST (xcond1) != pc_rtx
8627
      || !xcond2 || SET_DEST (xcond2) != pc_rtx
8628
      || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8629
      || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8630
    {
8631
      return false;
8632
    }
8633
 
8634
  comp1 = SET_SRC (comp1);
8635
  ifelse1 = SET_SRC (xcond1);
8636
  ifelse2 = SET_SRC (xcond2);
8637
 
8638
  /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE.  */
8639
 
8640
  if (EQ != GET_CODE (XEXP (ifelse1, 0))
8641
      || !REG_P (XEXP (comp1, 0))
8642
      || !CONST_INT_P (XEXP (comp1, 1))
8643
      || XEXP (ifelse1, 2) != pc_rtx
8644
      || XEXP (ifelse2, 2) != pc_rtx
8645
      || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8646
      || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8647
      || !COMPARISON_P (XEXP (ifelse2, 0))
8648
      || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8649
      || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8650
      || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8651
      || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8652
    {
8653
      return false;
8654
    }
8655
 
8656
  /* We filtered the insn sequence to look like
8657
 
8658
        (set (cc0)
8659
             (compare (reg:M N)
8660
                      (const_int VAL)))
8661
        (set (pc)
8662
             (if_then_else (eq (cc0)
8663
                               (const_int 0))
8664
                           (label_ref L1)
8665
                           (pc)))
8666
 
8667
        (set (cc0)
8668
             (compare (reg:M N)
8669
                      (const_int VAL)))
8670
        (set (pc)
8671
             (if_then_else (CODE (cc0)
8672
                                 (const_int 0))
8673
                           (label_ref L2)
8674
                           (pc)))
8675
  */
8676
 
8677
  code = GET_CODE (XEXP (ifelse2, 0));
8678
 
8679
  /* Map GT/GTU to GE/GEU which is easier for AVR.
8680
     The first two instructions compare/branch on EQ
8681
     so we may replace the difficult
8682
 
8683
        if (x == VAL)   goto L1;
8684
        if (x > VAL)    goto L2;
8685
 
8686
     with easy
8687
 
8688
         if (x == VAL)   goto L1;
8689
         if (x >= VAL)   goto L2;
8690
 
8691
     Similarly, replace LE/LEU by LT/LTU.  */
8692
 
8693
  switch (code)
8694
    {
8695
    case EQ:
8696
    case LT:  case LTU:
8697
    case GE:  case GEU:
8698
      break;
8699
 
8700
    case LE:  case LEU:
8701
    case GT:  case GTU:
8702
      code = avr_normalize_condition (code);
8703
      break;
8704
 
8705
    default:
8706
      return false;
8707
    }
8708
 
8709
  /* Wrap the branches into UNSPECs so they won't be changed or
8710
     optimized in the remainder.  */
8711
 
8712
  target = XEXP (XEXP (ifelse1, 1), 0);
8713
  cond = XEXP (ifelse1, 0);
8714
  jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8715
 
8716
  JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8717
 
8718
  target = XEXP (XEXP (ifelse2, 1), 0);
8719
  cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8720
  jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8721
 
8722
  JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8723
 
8724
  /* The comparisons in insn1 and insn2 are exactly the same;
8725
     insn2 is superfluous so delete it.  */
8726
 
8727
  delete_insn (insn2);
8728
  delete_insn (branch1);
8729
  delete_insn (branch2);
8730
 
8731
  return true;
8732
}
8733
 
8734
 
8735
/* Implement `TARGET_MACHINE_DEPENDENT_REORG'.  */
8736
/* Optimize conditional jumps.  */
8737
 
8738
static void
8739
avr_reorg (void)
8740
{
8741
  rtx insn = get_insns();
8742
 
8743
  for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8744
    {
8745
      rtx pattern = avr_compare_pattern (insn);
8746
 
8747
      if (!pattern)
8748
        continue;
8749
 
8750
      if (optimize
8751
          && avr_reorg_remove_redundant_compare (insn))
8752
        {
8753
          continue;
8754
        }
8755
 
8756
      if (compare_diff_p (insn))
8757
        {
8758
          /* Now we work under compare insn with difficult branch.  */
8759
 
8760
          rtx next = next_real_insn (insn);
8761
          rtx pat = PATTERN (next);
8762
 
8763
          pattern = SET_SRC (pattern);
8764
 
8765
          if (true_regnum (XEXP (pattern, 0)) >= 0
8766
              && true_regnum (XEXP (pattern, 1)) >= 0)
8767
            {
8768
              rtx x = XEXP (pattern, 0);
8769
              rtx src = SET_SRC (pat);
8770
              rtx t = XEXP (src,0);
8771
              PUT_CODE (t, swap_condition (GET_CODE (t)));
8772
              XEXP (pattern, 0) = XEXP (pattern, 1);
8773
              XEXP (pattern, 1) = x;
8774
              INSN_CODE (next) = -1;
8775
            }
8776
          else if (true_regnum (XEXP (pattern, 0)) >= 0
8777
                   && XEXP (pattern, 1) == const0_rtx)
8778
            {
8779
              /* This is a tst insn, we can reverse it.  */
8780
              rtx src = SET_SRC (pat);
8781
              rtx t = XEXP (src,0);
8782
 
8783
              PUT_CODE (t, swap_condition (GET_CODE (t)));
8784
              XEXP (pattern, 1) = XEXP (pattern, 0);
8785
              XEXP (pattern, 0) = const0_rtx;
8786
              INSN_CODE (next) = -1;
8787
              INSN_CODE (insn) = -1;
8788
            }
8789
          else if (true_regnum (XEXP (pattern, 0)) >= 0
8790
                   && CONST_INT_P (XEXP (pattern, 1)))
8791
            {
8792
              rtx x = XEXP (pattern, 1);
8793
              rtx src = SET_SRC (pat);
8794
              rtx t = XEXP (src,0);
8795
              enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8796
 
8797
              if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8798
                {
8799
                  XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8800
                  PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8801
                  INSN_CODE (next) = -1;
8802
                  INSN_CODE (insn) = -1;
8803
                }
8804
            }
8805
        }
8806
    }
8807
}
8808
 
8809
/* Returns register number for function return value.*/
8810
 
8811
static inline unsigned int
8812
avr_ret_register (void)
8813
{
8814
  return 24;
8815
}
8816
 
8817
/* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.  */
8818
 
8819
static bool
8820
avr_function_value_regno_p (const unsigned int regno)
8821
{
8822
  return (regno == avr_ret_register ());
8823
}
8824
 
8825
/* Create an RTX representing the place where a
8826
   library function returns a value of mode MODE.  */
8827
 
8828
static rtx
8829
avr_libcall_value (enum machine_mode mode,
8830
                   const_rtx func ATTRIBUTE_UNUSED)
8831
{
8832
  int offs = GET_MODE_SIZE (mode);
8833
 
8834
  if (offs <= 4)
8835
    offs = (offs + 1) & ~1;
8836
 
8837
  return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8838
}
8839
 
8840
/* Create an RTX representing the place where a
8841
   function returns a value of data type VALTYPE.  */
8842
 
8843
static rtx
8844
avr_function_value (const_tree type,
8845
                    const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8846
                    bool outgoing ATTRIBUTE_UNUSED)
8847
{
8848
  unsigned int offs;
8849
 
8850
  if (TYPE_MODE (type) != BLKmode)
8851
    return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8852
 
8853
  offs = int_size_in_bytes (type);
8854
  if (offs < 2)
8855
    offs = 2;
8856
  if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8857
    offs = GET_MODE_SIZE (SImode);
8858
  else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8859
    offs = GET_MODE_SIZE (DImode);
8860
 
8861
  return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8862
}
8863
 
8864
int
8865
test_hard_reg_class (enum reg_class rclass, rtx x)
8866
{
8867
  int regno = true_regnum (x);
8868
  if (regno < 0)
8869
    return 0;
8870
 
8871
  if (TEST_HARD_REG_CLASS (rclass, regno))
8872
    return 1;
8873
 
8874
  return 0;
8875
}
8876
 
8877
 
8878
/* Helper for jump_over_one_insn_p:  Test if INSN is a 2-word instruction
8879
   and thus is suitable to be skipped by CPSE, SBRC, etc.  */
8880
 
8881
static bool
8882
avr_2word_insn_p (rtx insn)
8883
{
8884
  if (avr_current_device->errata_skip
8885
      || !insn
8886
      || 2 != get_attr_length (insn))
8887
    {
8888
      return false;
8889
    }
8890
 
8891
  switch (INSN_CODE (insn))
8892
    {
8893
    default:
8894
      return false;
8895
 
8896
    case CODE_FOR_movqi_insn:
8897
      {
8898
        rtx set  = single_set (insn);
8899
        rtx src  = SET_SRC (set);
8900
        rtx dest = SET_DEST (set);
8901
 
8902
        /* Factor out LDS and STS from movqi_insn.  */
8903
 
8904
        if (MEM_P (dest)
8905
            && (REG_P (src) || src == const0_rtx))
8906
          {
8907
            return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8908
          }
8909
        else if (REG_P (dest)
8910
                 && MEM_P (src))
8911
          {
8912
            return CONSTANT_ADDRESS_P (XEXP (src, 0));
8913
          }
8914
 
8915
        return false;
8916
      }
8917
 
8918
    case CODE_FOR_call_insn:
8919
    case CODE_FOR_call_value_insn:
8920
      return true;
8921
    }
8922
}
8923
 
8924
 
8925
int
8926
jump_over_one_insn_p (rtx insn, rtx dest)
8927
{
8928
  int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8929
                      ? XEXP (dest, 0)
8930
                      : dest);
8931
  int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8932
  int dest_addr = INSN_ADDRESSES (uid);
8933
  int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8934
 
8935
  return (jump_offset == 1
8936
          || (jump_offset == 2
8937
              && avr_2word_insn_p (next_active_insn (insn))));
8938
}
8939
 
8940
/* Returns 1 if a value of mode MODE can be stored starting with hard
8941
   register number REGNO.  On the enhanced core, anything larger than
8942
   1 byte must start in even numbered register for "movw" to work
8943
   (this way we don't have to check for odd registers everywhere).  */
8944
 
8945
int
8946
avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8947
{
8948
  /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8949
        Disallowing QI et al. in these regs might lead to code like
8950
            (set (subreg:QI (reg:HI 28) n) ...)
8951
        which will result in wrong code because reload does not
8952
        handle SUBREGs of hard regsisters like this.
8953
        This could be fixed in reload.  However, it appears
8954
        that fixing reload is not wanted by reload people.  */
8955
 
8956
  /* Any GENERAL_REGS register can hold 8-bit values.  */
8957
 
8958
  if (GET_MODE_SIZE (mode) == 1)
8959
    return 1;
8960
 
8961
  /* FIXME: Ideally, the following test is not needed.
8962
        However, it turned out that it can reduce the number
8963
        of spill fails.  AVR and it's poor endowment with
8964
        address registers is extreme stress test for reload.  */
8965
 
8966
  if (GET_MODE_SIZE (mode) >= 4
8967
      && regno >= REG_X)
8968
    return 0;
8969
 
8970
  /* All modes larger than 8 bits should start in an even register.  */
8971
 
8972
  return !(regno & 1);
8973
}
8974
 
8975
 
8976
/* Implement `MODE_CODE_BASE_REG_CLASS'.  */
8977
 
8978
reg_class_t
8979
avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8980
                              addr_space_t as, RTX_CODE outer_code,
8981
                              RTX_CODE index_code ATTRIBUTE_UNUSED)
8982
{
8983
  if (!ADDR_SPACE_GENERIC_P (as))
8984
    {
8985
      return POINTER_Z_REGS;
8986
    }
8987
 
8988
  if (!avr_strict_X)
8989
    return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8990
 
8991
  return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
8992
}
8993
 
8994
 
8995
/* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'.  */
8996
 
8997
bool
8998
avr_regno_mode_code_ok_for_base_p (int regno,
8999
                                   enum machine_mode mode ATTRIBUTE_UNUSED,
9000
                                   addr_space_t as ATTRIBUTE_UNUSED,
9001
                                   RTX_CODE outer_code,
9002
                                   RTX_CODE index_code ATTRIBUTE_UNUSED)
9003
{
9004
  bool ok = false;
9005
 
9006
  if (!ADDR_SPACE_GENERIC_P (as))
9007
    {
9008
      if (regno < FIRST_PSEUDO_REGISTER
9009
          && regno == REG_Z)
9010
        {
9011
          return true;
9012
        }
9013
 
9014
      if (reg_renumber)
9015
        {
9016
          regno = reg_renumber[regno];
9017
 
9018
          if (regno == REG_Z)
9019
            {
9020
              return true;
9021
            }
9022
        }
9023
 
9024
      return false;
9025
    }
9026
 
9027
  if (regno < FIRST_PSEUDO_REGISTER
9028
      && (regno == REG_X
9029
          || regno == REG_Y
9030
          || regno == REG_Z
9031
          || regno == ARG_POINTER_REGNUM))
9032
    {
9033
      ok = true;
9034
    }
9035
  else if (reg_renumber)
9036
    {
9037
      regno = reg_renumber[regno];
9038
 
9039
      if (regno == REG_X
9040
          || regno == REG_Y
9041
          || regno == REG_Z
9042
          || regno == ARG_POINTER_REGNUM)
9043
        {
9044
          ok = true;
9045
        }
9046
    }
9047
 
9048
  if (avr_strict_X
9049
      && PLUS == outer_code
9050
      && regno == REG_X)
9051
    {
9052
      ok = false;
9053
    }
9054
 
9055
  return ok;
9056
}
9057
 
9058
 
9059
/* A helper for `output_reload_insisf' and `output_reload_inhi'.  */
9060
/* Set 32-bit register OP[0] to compile-time constant OP[1].
9061
   CLOBBER_REG is a QI clobber register or NULL_RTX.
9062
   LEN == NULL: output instructions.
9063
   LEN != NULL: set *LEN to the length of the instruction sequence
9064
                (in words) printed with LEN = NULL.
9065
   If CLEAR_P is true, OP[0] had been cleard to Zero already.
9066
   If CLEAR_P is false, nothing is known about OP[0].
9067
 
9068
   The effect on cc0 is as follows:
9069
 
9070
   Load 0 to any register except ZERO_REG : NONE
9071
   Load ld register with any value        : NONE
9072
   Anything else:                         : CLOBBER  */
9073
 
9074
static void
9075
output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9076
{
9077
  rtx src = op[1];
9078
  rtx dest = op[0];
9079
  rtx xval, xdest[4];
9080
  int ival[4];
9081
  int clobber_val = 1234;
9082
  bool cooked_clobber_p = false;
9083
  bool set_p = false;
9084
  enum machine_mode mode = GET_MODE (dest);
9085
  int n, n_bytes = GET_MODE_SIZE (mode);
9086
 
9087
  gcc_assert (REG_P (dest)
9088
              && CONSTANT_P (src));
9089
 
9090
  if (len)
9091
    *len = 0;
9092
 
9093
  /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9094
     but has some subregs that are in LD_REGS.  Use the MSB (REG:QI 17).  */
9095
 
9096
  if (REGNO (dest) < 16
9097
      && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9098
    {
9099
      clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9100
    }
9101
 
9102
  /* We might need a clobber reg but don't have one.  Look at the value to
9103
     be loaded more closely.  A clobber is only needed if it is a symbol
9104
     or contains a byte that is neither 0, -1 or a power of 2.  */
9105
 
9106
  if (NULL_RTX == clobber_reg
9107
      && !test_hard_reg_class (LD_REGS, dest)
9108
      && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9109
          || !avr_popcount_each_byte (src, n_bytes,
9110
                                      (1 << 0) | (1 << 1) | (1 << 8))))
9111
    {
9112
      /* We have no clobber register but need one.  Cook one up.
9113
         That's cheaper than loading from constant pool.  */
9114
 
9115
      cooked_clobber_p = true;
9116
      clobber_reg = all_regs_rtx[REG_Z + 1];
9117
      avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9118
    }
9119
 
9120
  /* Now start filling DEST from LSB to MSB.  */
9121
 
9122
  for (n = 0; n < n_bytes; n++)
9123
    {
9124
      int ldreg_p;
9125
      bool done_byte = false;
9126
      int j;
9127
      rtx xop[3];
9128
 
9129
      /* Crop the n-th destination byte.  */
9130
 
9131
      xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9132
      ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9133
 
9134
      if (!CONST_INT_P (src)
9135
          && !CONST_DOUBLE_P (src))
9136
        {
9137
          static const char* const asm_code[][2] =
9138
            {
9139
              { "ldi %2,lo8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,lo8(%1)"  },
9140
              { "ldi %2,hi8(%1)"  CR_TAB "mov %0,%2",    "ldi %0,hi8(%1)"  },
9141
              { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hlo8(%1)" },
9142
              { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2",    "ldi %0,hhi8(%1)" }
9143
            };
9144
 
9145
          xop[0] = xdest[n];
9146
          xop[1] = src;
9147
          xop[2] = clobber_reg;
9148
 
9149
          avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9150
 
9151
          continue;
9152
        }
9153
 
9154
      /* Crop the n-th source byte.  */
9155
 
9156
      xval = simplify_gen_subreg (QImode, src, mode, n);
9157
      ival[n] = INTVAL (xval);
9158
 
9159
      /* Look if we can reuse the low word by means of MOVW.  */
9160
 
9161
      if (n == 2
9162
          && n_bytes >= 4
9163
          && AVR_HAVE_MOVW)
9164
        {
9165
          rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9166
          rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9167
 
9168
          if (INTVAL (lo16) == INTVAL (hi16))
9169
            {
9170
              if (0 != INTVAL (lo16)
9171
                  || !clear_p)
9172
                {
9173
                  avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9174
                }
9175
 
9176
              break;
9177
            }
9178
        }
9179
 
9180
      /* Don't use CLR so that cc0 is set as expected.  */
9181
 
9182
      if (ival[n] == 0)
9183
        {
9184
          if (!clear_p)
9185
            avr_asm_len (ldreg_p ? "ldi %0,0"
9186
                         : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9187
                         : "mov %0,__zero_reg__",
9188
                         &xdest[n], len, 1);
9189
          continue;
9190
        }
9191
 
9192
      if (clobber_val == ival[n]
9193
          && REGNO (clobber_reg) == REGNO (xdest[n]))
9194
        {
9195
          continue;
9196
        }
9197
 
9198
      /* LD_REGS can use LDI to move a constant value */
9199
 
9200
      if (ldreg_p)
9201
        {
9202
          xop[0] = xdest[n];
9203
          xop[1] = xval;
9204
          avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9205
          continue;
9206
        }
9207
 
9208
      /* Try to reuse value already loaded in some lower byte. */
9209
 
9210
      for (j = 0; j < n; j++)
9211
        if (ival[j] == ival[n])
9212
          {
9213
            xop[0] = xdest[n];
9214
            xop[1] = xdest[j];
9215
 
9216
            avr_asm_len ("mov %0,%1", xop, len, 1);
9217
            done_byte = true;
9218
            break;
9219
          }
9220
 
9221
      if (done_byte)
9222
        continue;
9223
 
9224
      /* Need no clobber reg for -1: Use CLR/DEC */
9225
 
9226
      if (-1 == ival[n])
9227
        {
9228
          if (!clear_p)
9229
            avr_asm_len ("clr %0", &xdest[n], len, 1);
9230
 
9231
          avr_asm_len ("dec %0", &xdest[n], len, 1);
9232
          continue;
9233
        }
9234
      else if (1 == ival[n])
9235
        {
9236
          if (!clear_p)
9237
            avr_asm_len ("clr %0", &xdest[n], len, 1);
9238
 
9239
          avr_asm_len ("inc %0", &xdest[n], len, 1);
9240
          continue;
9241
        }
9242
 
9243
      /* Use T flag or INC to manage powers of 2 if we have
9244
         no clobber reg.  */
9245
 
9246
      if (NULL_RTX == clobber_reg
9247
          && single_one_operand (xval, QImode))
9248
        {
9249
          xop[0] = xdest[n];
9250
          xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9251
 
9252
          gcc_assert (constm1_rtx != xop[1]);
9253
 
9254
          if (!set_p)
9255
            {
9256
              set_p = true;
9257
              avr_asm_len ("set", xop, len, 1);
9258
            }
9259
 
9260
          if (!clear_p)
9261
            avr_asm_len ("clr %0", xop, len, 1);
9262
 
9263
          avr_asm_len ("bld %0,%1", xop, len, 1);
9264
          continue;
9265
        }
9266
 
9267
      /* We actually need the LD_REGS clobber reg.  */
9268
 
9269
      gcc_assert (NULL_RTX != clobber_reg);
9270
 
9271
      xop[0] = xdest[n];
9272
      xop[1] = xval;
9273
      xop[2] = clobber_reg;
9274
      clobber_val = ival[n];
9275
 
9276
      avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9277
                   "mov %0,%2", xop, len, 2);
9278
    }
9279
 
9280
  /* If we cooked up a clobber reg above, restore it.  */
9281
 
9282
  if (cooked_clobber_p)
9283
    {
9284
      avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9285
    }
9286
}
9287
 
9288
 
9289
/* Reload the constant OP[1] into the HI register OP[0].
9290
   CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9291
   into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
9292
   need a clobber reg or have to cook one up.
9293
 
9294
   PLEN == NULL: Output instructions.
9295
   PLEN != NULL: Output nothing.  Set *PLEN to number of words occupied
9296
                 by the insns printed.
9297
 
9298
   Return "".  */
9299
 
9300
const char*
9301
output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9302
{
9303
  output_reload_in_const (op, clobber_reg, plen, false);
9304
  return "";
9305
}
9306
 
9307
 
9308
/* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9309
   CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9310
   into a NO_LD_REGS register.  If CLOBBER_REG is NULL_RTX we either don't
9311
   need a clobber reg or have to cook one up.
9312
 
9313
   LEN == NULL: Output instructions.
9314
 
9315
   LEN != NULL: Output nothing.  Set *LEN to number of words occupied
9316
                by the insns printed.
9317
 
9318
   Return "".  */
9319
 
9320
const char *
9321
output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9322
{
9323
  if (AVR_HAVE_MOVW
9324
      && !test_hard_reg_class (LD_REGS, op[0])
9325
      && (CONST_INT_P (op[1])
9326
          || CONST_DOUBLE_P (op[1])))
9327
    {
9328
      int len_clr, len_noclr;
9329
 
9330
      /* In some cases it is better to clear the destination beforehand, e.g.
9331
 
9332
             CLR R2   CLR R3   MOVW R4,R2   INC R2
9333
 
9334
         is shorther than
9335
 
9336
             CLR R2   INC R2   CLR  R3      CLR R4   CLR R5
9337
 
9338
         We find it too tedious to work that out in the print function.
9339
         Instead, we call the print function twice to get the lengths of
9340
         both methods and use the shortest one.  */
9341
 
9342
      output_reload_in_const (op, clobber_reg, &len_clr, true);
9343
      output_reload_in_const (op, clobber_reg, &len_noclr, false);
9344
 
9345
      if (len_noclr - len_clr == 4)
9346
        {
9347
          /* Default needs 4 CLR instructions: clear register beforehand.  */
9348
 
9349
          avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9350
                       "mov %B0,__zero_reg__" CR_TAB
9351
                       "movw %C0,%A0", &op[0], len, 3);
9352
 
9353
          output_reload_in_const (op, clobber_reg, len, true);
9354
 
9355
          if (len)
9356
            *len += 3;
9357
 
9358
          return "";
9359
        }
9360
    }
9361
 
9362
  /* Default: destination not pre-cleared.  */
9363
 
9364
  output_reload_in_const (op, clobber_reg, len, false);
9365
  return "";
9366
}
9367
 
9368
const char *
9369
avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9370
{
9371
  output_reload_in_const (op, clobber_reg, len, false);
9372
  return "";
9373
}
9374
 
9375
 
9376
void
9377
avr_output_addr_vec_elt (FILE *stream, int value)
9378
{
9379
  if (AVR_HAVE_JMP_CALL)
9380
    fprintf (stream, "\t.word gs(.L%d)\n", value);
9381
  else
9382
    fprintf (stream, "\trjmp .L%d\n", value);
9383
}
9384
 
9385
/* Returns true if SCRATCH are safe to be allocated as a scratch
9386
   registers (for a define_peephole2) in the current function.  */
9387
 
9388
static bool
9389
avr_hard_regno_scratch_ok (unsigned int regno)
9390
{
9391
  /* Interrupt functions can only use registers that have already been saved
9392
     by the prologue, even if they would normally be call-clobbered.  */
9393
 
9394
  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9395
      && !df_regs_ever_live_p (regno))
9396
    return false;
9397
 
9398
  /* Don't allow hard registers that might be part of the frame pointer.
9399
     Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9400
     and don't care for a frame pointer that spans more than one register.  */
9401
 
9402
  if ((!reload_completed || frame_pointer_needed)
9403
      && (regno == REG_Y || regno == REG_Y + 1))
9404
    {
9405
      return false;
9406
    }
9407
 
9408
  return true;
9409
}
9410
 
9411
/* Return nonzero if register OLD_REG can be renamed to register NEW_REG.  */
9412
 
9413
int
9414
avr_hard_regno_rename_ok (unsigned int old_reg,
9415
                          unsigned int new_reg)
9416
{
9417
  /* Interrupt functions can only use registers that have already been
9418
     saved by the prologue, even if they would normally be
9419
     call-clobbered.  */
9420
 
9421
  if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9422
      && !df_regs_ever_live_p (new_reg))
9423
    return 0;
9424
 
9425
  /* Don't allow hard registers that might be part of the frame pointer.
9426
     Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9427
     and don't care for a frame pointer that spans more than one register.  */
9428
 
9429
  if ((!reload_completed || frame_pointer_needed)
9430
      && (old_reg == REG_Y || old_reg == REG_Y + 1
9431
          || new_reg == REG_Y || new_reg == REG_Y + 1))
9432
    {
9433
      return 0;
9434
    }
9435
 
9436
  return 1;
9437
}
9438
 
9439
/* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9440
   or memory location in the I/O space (QImode only).
9441
 
9442
   Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9443
   Operand 1: register operand to test, or CONST_INT memory address.
9444
   Operand 2: bit number.
9445
   Operand 3: label to jump to if the test is true.  */
9446
 
9447
const char *
9448
avr_out_sbxx_branch (rtx insn, rtx operands[])
9449
{
9450
  enum rtx_code comp = GET_CODE (operands[0]);
9451
  bool long_jump = get_attr_length (insn) >= 4;
9452
  bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9453
 
9454
  if (comp == GE)
9455
    comp = EQ;
9456
  else if (comp == LT)
9457
    comp = NE;
9458
 
9459
  if (reverse)
9460
    comp = reverse_condition (comp);
9461
 
9462
  switch (GET_CODE (operands[1]))
9463
    {
9464
    default:
9465
      gcc_unreachable();
9466
 
9467
    case CONST_INT:
9468
 
9469
      if (low_io_address_operand (operands[1], QImode))
9470
        {
9471
          if (comp == EQ)
9472
            output_asm_insn ("sbis %i1,%2", operands);
9473
          else
9474
            output_asm_insn ("sbic %i1,%2", operands);
9475
        }
9476
      else
9477
        {
9478
          output_asm_insn ("in __tmp_reg__,%i1", operands);
9479
          if (comp == EQ)
9480
            output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9481
          else
9482
            output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9483
        }
9484
 
9485
      break; /* CONST_INT */
9486
 
9487
    case REG:
9488
 
9489
      if (comp == EQ)
9490
        output_asm_insn ("sbrs %T1%T2", operands);
9491
      else
9492
        output_asm_insn ("sbrc %T1%T2", operands);
9493
 
9494
      break; /* REG */
9495
    }        /* switch */
9496
 
9497
  if (long_jump)
9498
    return ("rjmp .+4" CR_TAB
9499
            "jmp %x3");
9500
 
9501
  if (!reverse)
9502
    return "rjmp %x3";
9503
 
9504
  return "";
9505
}
9506
 
9507
/* Worker function for TARGET_ASM_CONSTRUCTOR.  */
9508
 
9509
static void
9510
avr_asm_out_ctor (rtx symbol, int priority)
9511
{
9512
  fputs ("\t.global __do_global_ctors\n", asm_out_file);
9513
  default_ctor_section_asm_out_constructor (symbol, priority);
9514
}
9515
 
9516
/* Worker function for TARGET_ASM_DESTRUCTOR.  */
9517
 
9518
static void
9519
avr_asm_out_dtor (rtx symbol, int priority)
9520
{
9521
  fputs ("\t.global __do_global_dtors\n", asm_out_file);
9522
  default_dtor_section_asm_out_destructor (symbol, priority);
9523
}
9524
 
9525
/* Worker function for TARGET_RETURN_IN_MEMORY.  */
9526
 
9527
static bool
9528
avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9529
{
9530
  if (TYPE_MODE (type) == BLKmode)
9531
    {
9532
      HOST_WIDE_INT size = int_size_in_bytes (type);
9533
      return (size == -1 || size > 8);
9534
    }
9535
  else
9536
    return false;
9537
}
9538
 
9539
/* Worker function for CASE_VALUES_THRESHOLD.  */
9540
 
9541
static unsigned int
9542
avr_case_values_threshold (void)
9543
{
9544
  return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9545
}
9546
 
9547
 
9548
/* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'.  */
9549
 
9550
static enum machine_mode
9551
avr_addr_space_address_mode (addr_space_t as)
9552
{
9553
  return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9554
}
9555
 
9556
 
9557
/* Implement `TARGET_ADDR_SPACE_POINTER_MODE'.  */
9558
 
9559
static enum machine_mode
9560
avr_addr_space_pointer_mode (addr_space_t as)
9561
{
9562
  return avr_addr_space_address_mode (as);
9563
}
9564
 
9565
 
9566
/* Helper for following function.  */
9567
 
9568
static bool
9569
avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9570
{
9571
  gcc_assert (REG_P (reg));
9572
 
9573
  if (strict)
9574
    {
9575
      return REGNO (reg) == REG_Z;
9576
    }
9577
 
9578
  /* Avoid combine to propagate hard regs.  */
9579
 
9580
  if (can_create_pseudo_p()
9581
      && REGNO (reg) < REG_Z)
9582
    {
9583
      return false;
9584
    }
9585
 
9586
  return true;
9587
}
9588
 
9589
 
9590
/* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'.  */
9591
 
9592
static bool
9593
avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9594
                                     bool strict, addr_space_t as)
9595
{
9596
  bool ok = false;
9597
 
9598
  switch (as)
9599
    {
9600
    default:
9601
      gcc_unreachable();
9602
 
9603
    case ADDR_SPACE_GENERIC:
9604
      return avr_legitimate_address_p (mode, x, strict);
9605
 
9606
    case ADDR_SPACE_FLASH:
9607
    case ADDR_SPACE_FLASH1:
9608
    case ADDR_SPACE_FLASH2:
9609
    case ADDR_SPACE_FLASH3:
9610
    case ADDR_SPACE_FLASH4:
9611
    case ADDR_SPACE_FLASH5:
9612
 
9613
      switch (GET_CODE (x))
9614
        {
9615
        case REG:
9616
          ok = avr_reg_ok_for_pgm_addr (x, strict);
9617
          break;
9618
 
9619
        case POST_INC:
9620
          ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9621
          break;
9622
 
9623
        default:
9624
          break;
9625
        }
9626
 
9627
      break; /* FLASH */
9628
 
9629
    case ADDR_SPACE_MEMX:
9630
      if (REG_P (x))
9631
        ok = (!strict
9632
              && can_create_pseudo_p());
9633
 
9634
      if (LO_SUM == GET_CODE (x))
9635
        {
9636
          rtx hi = XEXP (x, 0);
9637
          rtx lo = XEXP (x, 1);
9638
 
9639
          ok = (REG_P (hi)
9640
                && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9641
                && REG_P (lo)
9642
                && REGNO (lo) == REG_Z);
9643
        }
9644
 
9645
      break; /* MEMX */
9646
    }
9647
 
9648
  if (avr_log.legitimate_address_p)
9649
    {
9650
      avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9651
                 "reload_completed=%d reload_in_progress=%d %s:",
9652
                 ok, mode, strict, reload_completed, reload_in_progress,
9653
                 reg_renumber ? "(reg_renumber)" : "");
9654
 
9655
      if (GET_CODE (x) == PLUS
9656
          && REG_P (XEXP (x, 0))
9657
          && CONST_INT_P (XEXP (x, 1))
9658
          && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9659
          && reg_renumber)
9660
        {
9661
          avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9662
                     true_regnum (XEXP (x, 0)));
9663
        }
9664
 
9665
      avr_edump ("\n%r\n", x);
9666
    }
9667
 
9668
  return ok;
9669
}
9670
 
9671
 
9672
/* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'.  */
9673
 
9674
static rtx
9675
avr_addr_space_legitimize_address (rtx x, rtx old_x,
9676
                                   enum machine_mode mode, addr_space_t as)
9677
{
9678
  if (ADDR_SPACE_GENERIC_P (as))
9679
    return avr_legitimize_address (x, old_x, mode);
9680
 
9681
  if (avr_log.legitimize_address)
9682
    {
9683
      avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9684
    }
9685
 
9686
  return old_x;
9687
}
9688
 
9689
 
9690
/* Implement `TARGET_ADDR_SPACE_CONVERT'.  */
9691
 
9692
static rtx
9693
avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9694
{
9695
  addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9696
  addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9697
 
9698
  if (avr_log.progmem)
9699
    avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9700
               src, type_from, type_to);
9701
 
9702
  /* Up-casting from 16-bit to 24-bit pointer.  */
9703
 
9704
  if (as_from != ADDR_SPACE_MEMX
9705
      && as_to == ADDR_SPACE_MEMX)
9706
    {
9707
      int msb;
9708
      rtx sym = src;
9709
      rtx reg = gen_reg_rtx (PSImode);
9710
 
9711
      while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9712
        sym = XEXP (sym, 0);
9713
 
9714
      /* Look at symbol flags:  avr_encode_section_info set the flags
9715
         also if attribute progmem was seen so that we get the right
9716
         promotion for, e.g. PSTR-like strings that reside in generic space
9717
         but are located in flash.  In that case we patch the incoming
9718
         address space.  */
9719
 
9720
      if (SYMBOL_REF == GET_CODE (sym)
9721
          && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9722
        {
9723
          as_from = ADDR_SPACE_FLASH;
9724
        }
9725
 
9726
      /* Linearize memory: RAM has bit 23 set.  */
9727
 
9728
      msb = ADDR_SPACE_GENERIC_P (as_from)
9729
        ? 0x80
9730
        : avr_addrspace[as_from].segment;
9731
 
9732
      src = force_reg (Pmode, src);
9733
 
9734
      emit_insn (msb == 0
9735
                 ? gen_zero_extendhipsi2 (reg, src)
9736
                 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9737
 
9738
      return reg;
9739
    }
9740
 
9741
  /* Down-casting from 24-bit to 16-bit throws away the high byte.  */
9742
 
9743
  if (as_from == ADDR_SPACE_MEMX
9744
      && as_to != ADDR_SPACE_MEMX)
9745
    {
9746
      rtx new_src = gen_reg_rtx (Pmode);
9747
 
9748
      src = force_reg (PSImode, src);
9749
 
9750
      emit_move_insn (new_src,
9751
                      simplify_gen_subreg (Pmode, src, PSImode, 0));
9752
      return new_src;
9753
    }
9754
 
9755
  return src;
9756
}
9757
 
9758
 
9759
/* Implement `TARGET_ADDR_SPACE_SUBSET_P'.  */
9760
 
9761
static bool
9762
avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9763
                         addr_space_t superset ATTRIBUTE_UNUSED)
9764
{
9765
  /* Allow any kind of pointer mess.  */
9766
 
9767
  return true;
9768
}
9769
 
9770
 
9771
/* Worker function for movmemhi expander.
9772
   XOP[0]  Destination as MEM:BLK
9773
   XOP[1]  Source      "     "
9774
   XOP[2]  # Bytes to copy
9775
 
9776
   Return TRUE  if the expansion is accomplished.
9777
   Return FALSE if the operand compination is not supported.  */
9778
 
9779
bool
9780
avr_emit_movmemhi (rtx *xop)
9781
{
9782
  HOST_WIDE_INT count;
9783
  enum machine_mode loop_mode;
9784
  addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9785
  rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9786
  rtx a_hi8 = NULL_RTX;
9787
 
9788
  if (avr_mem_flash_p (xop[0]))
9789
    return false;
9790
 
9791
  if (!CONST_INT_P (xop[2]))
9792
    return false;
9793
 
9794
  count = INTVAL (xop[2]);
9795
  if (count <= 0)
9796
    return false;
9797
 
9798
  a_src  = XEXP (xop[1], 0);
9799
  a_dest = XEXP (xop[0], 0);
9800
 
9801
  if (PSImode == GET_MODE (a_src))
9802
    {
9803
      gcc_assert (as == ADDR_SPACE_MEMX);
9804
 
9805
      loop_mode = (count < 0x100) ? QImode : HImode;
9806
      loop_reg = gen_rtx_REG (loop_mode, 24);
9807
      emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9808
 
9809
      addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9810
      a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9811
    }
9812
  else
9813
    {
9814
      int segment = avr_addrspace[as].segment;
9815
 
9816
      if (segment
9817
          && avr_current_device->n_flash > 1)
9818
        {
9819
          a_hi8 = GEN_INT (segment);
9820
          emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9821
        }
9822
      else if (!ADDR_SPACE_GENERIC_P (as))
9823
        {
9824
          as = ADDR_SPACE_FLASH;
9825
        }
9826
 
9827
      addr1 = a_src;
9828
 
9829
      loop_mode = (count <= 0x100) ? QImode : HImode;
9830
      loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9831
    }
9832
 
9833
  xas = GEN_INT (as);
9834
 
9835
  /* FIXME: Register allocator might come up with spill fails if it is left
9836
        on its own.  Thus, we allocate the pointer registers by hand:
9837
        Z = source address
9838
        X = destination address  */
9839
 
9840
  emit_move_insn (lpm_addr_reg_rtx, addr1);
9841
  emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9842
 
9843
  /* FIXME: Register allocator does a bad job and might spill address
9844
        register(s) inside the loop leading to additional move instruction
9845
        to/from stack which could clobber tmp_reg.  Thus, do *not* emit
9846
        load and store as seperate insns.  Instead, we perform the copy
9847
        by means of one monolithic insn.  */
9848
 
9849
  gcc_assert (TMP_REGNO == LPM_REGNO);
9850
 
9851
  if (as != ADDR_SPACE_MEMX)
9852
    {
9853
      /* Load instruction ([E]LPM or LD) is known at compile time:
9854
         Do the copy-loop inline.  */
9855
 
9856
      rtx (*fun) (rtx, rtx, rtx)
9857
        = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9858
 
9859
      insn = fun (xas, loop_reg, loop_reg);
9860
    }
9861
  else
9862
    {
9863
      rtx (*fun) (rtx, rtx)
9864
        = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9865
 
9866
      emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9867
 
9868
      insn = fun (xas, GEN_INT (avr_addr.rampz));
9869
    }
9870
 
9871
  set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9872
  emit_insn (insn);
9873
 
9874
  return true;
9875
}
9876
 
9877
 
9878
/* Print assembler for movmem_qi, movmem_hi insns...
9879
       $0     : Address Space
9880
       $1, $2 : Loop register
9881
       Z      : Source address
9882
       X      : Destination address
9883
*/
9884
 
9885
const char*
9886
avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
9887
{
9888
  addr_space_t as = (addr_space_t) INTVAL (op[0]);
9889
  enum machine_mode loop_mode = GET_MODE (op[1]);
9890
  bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9891
  rtx xop[3];
9892
 
9893
  if (plen)
9894
    *plen = 0;
9895
 
9896
  xop[0] = op[0];
9897
  xop[1] = op[1];
9898
  xop[2] = tmp_reg_rtx;
9899
 
9900
  /* Loop label */
9901
 
9902
  avr_asm_len ("0:", xop, plen, 0);
9903
 
9904
  /* Load with post-increment */
9905
 
9906
  switch (as)
9907
    {
9908
    default:
9909
      gcc_unreachable();
9910
 
9911
    case ADDR_SPACE_GENERIC:
9912
 
9913
      avr_asm_len ("ld %2,Z+", xop, plen, 1);
9914
      break;
9915
 
9916
    case ADDR_SPACE_FLASH:
9917
 
9918
      if (AVR_HAVE_LPMX)
9919
        avr_asm_len ("lpm %2,%Z+", xop, plen, 1);
9920
      else
9921
        avr_asm_len ("lpm" CR_TAB
9922
                     "adiw r30,1", xop, plen, 2);
9923
      break;
9924
 
9925
    case ADDR_SPACE_FLASH1:
9926
    case ADDR_SPACE_FLASH2:
9927
    case ADDR_SPACE_FLASH3:
9928
    case ADDR_SPACE_FLASH4:
9929
    case ADDR_SPACE_FLASH5:
9930
 
9931
      if (AVR_HAVE_ELPMX)
9932
        avr_asm_len ("elpm %2,Z+", xop, plen, 1);
9933
      else
9934
        avr_asm_len ("elpm" CR_TAB
9935
                     "adiw r30,1", xop, plen, 2);
9936
      break;
9937
    }
9938
 
9939
  /* Store with post-increment */
9940
 
9941
  avr_asm_len ("st X+,%2", xop, plen, 1);
9942
 
9943
  /* Decrement loop-counter and set Z-flag */
9944
 
9945
  if (QImode == loop_mode)
9946
    {
9947
      avr_asm_len ("dec %1", xop, plen, 1);
9948
    }
9949
  else if (sbiw_p)
9950
    {
9951
      avr_asm_len ("sbiw %1,1", xop, plen, 1);
9952
    }
9953
  else
9954
    {
9955
      avr_asm_len ("subi %A1,1" CR_TAB
9956
                   "sbci %B1,0", xop, plen, 2);
9957
    }
9958
 
9959
  /* Loop until zero */
9960
 
9961
  return avr_asm_len ("brne 0b", xop, plen, 1);
9962
}
9963
 
9964
 
9965
 
9966
/* Helper for __builtin_avr_delay_cycles */
9967
 
9968
static void
9969
avr_expand_delay_cycles (rtx operands0)
9970
{
9971
  unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9972
  unsigned HOST_WIDE_INT cycles_used;
9973
  unsigned HOST_WIDE_INT loop_count;
9974
 
9975
  if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9976
    {
9977
      loop_count = ((cycles - 9) / 6) + 1;
9978
      cycles_used = ((loop_count - 1) * 6) + 9;
9979
      emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9980
      cycles -= cycles_used;
9981
    }
9982
 
9983
  if (IN_RANGE (cycles, 262145, 83886081))
9984
    {
9985
      loop_count = ((cycles - 7) / 5) + 1;
9986
      if (loop_count > 0xFFFFFF)
9987
        loop_count = 0xFFFFFF;
9988
      cycles_used = ((loop_count - 1) * 5) + 7;
9989
      emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9990
      cycles -= cycles_used;
9991
    }
9992
 
9993
  if (IN_RANGE (cycles, 768, 262144))
9994
    {
9995
      loop_count = ((cycles - 5) / 4) + 1;
9996
      if (loop_count > 0xFFFF)
9997
        loop_count = 0xFFFF;
9998
      cycles_used = ((loop_count - 1) * 4) + 5;
9999
      emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
10000
      cycles -= cycles_used;
10001
    }
10002
 
10003
  if (IN_RANGE (cycles, 6, 767))
10004
    {
10005
      loop_count = cycles / 3;
10006
      if (loop_count > 255)
10007
        loop_count = 255;
10008
      cycles_used = loop_count * 3;
10009
      emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
10010
      cycles -= cycles_used;
10011
      }
10012
 
10013
  while (cycles >= 2)
10014
    {
10015
      emit_insn (gen_nopv (GEN_INT(2)));
10016
      cycles -= 2;
10017
    }
10018
 
10019
  if (cycles == 1)
10020
    {
10021
      emit_insn (gen_nopv (GEN_INT(1)));
10022
      cycles--;
10023
    }
10024
}
10025
 
10026
 
10027
/* Return VAL * BASE + DIGIT.  BASE = 0 is shortcut for BASE = 2^{32}   */
10028
 
10029
static double_int
10030
avr_double_int_push_digit (double_int val, int base,
10031
                           unsigned HOST_WIDE_INT digit)
10032
{
10033
  val = 0 == base
10034
    ? double_int_lshift (val, 32, 64, false)
10035
    : double_int_mul (val, uhwi_to_double_int (base));
10036
 
10037
  return double_int_add (val, uhwi_to_double_int (digit));
10038
}
10039
 
10040
 
10041
/* Compute the image of x under f, i.e. perform   x --> f(x)    */
10042
 
10043
static int
10044
avr_map (double_int f, int x)
10045
{
10046
  return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10047
}
10048
 
10049
 
10050
/* Return some metrics of map A.  */
10051
 
10052
enum
10053
  {
10054
    /* Number of fixed points in { 0 ... 7 } */
10055
    MAP_FIXED_0_7,
10056
 
10057
    /* Size of preimage of non-fixed points in { 0 ... 7 } */
10058
    MAP_NONFIXED_0_7,
10059
 
10060
    /* Mask representing the fixed points in { 0 ... 7 } */
10061
    MAP_MASK_FIXED_0_7,
10062
 
10063
    /* Size of the preimage of { 0 ... 7 } */
10064
    MAP_PREIMAGE_0_7,
10065
 
10066
    /* Mask that represents the preimage of { f } */
10067
    MAP_MASK_PREIMAGE_F
10068
  };
10069
 
10070
static unsigned
10071
avr_map_metric (double_int a, int mode)
10072
{
10073
  unsigned i, metric = 0;
10074
 
10075
  for (i = 0; i < 8; i++)
10076
    {
10077
      unsigned ai = avr_map (a, i);
10078
 
10079
      if (mode == MAP_FIXED_0_7)
10080
        metric += ai == i;
10081
      else if (mode == MAP_NONFIXED_0_7)
10082
        metric += ai < 8 && ai != i;
10083
      else if (mode == MAP_MASK_FIXED_0_7)
10084
        metric |= ((unsigned) (ai == i)) << i;
10085
      else if (mode == MAP_PREIMAGE_0_7)
10086
        metric += ai < 8;
10087
      else if (mode == MAP_MASK_PREIMAGE_F)
10088
        metric |= ((unsigned) (ai == 0xf)) << i;
10089
      else
10090
        gcc_unreachable();
10091
    }
10092
 
10093
  return metric;
10094
}
10095
 
10096
 
10097
/* Return true if IVAL has a 0xf in its hexadecimal representation
10098
   and false, otherwise.  Only nibbles 0..7 are taken into account.
10099
   Used as constraint helper for C0f and Cxf.  */
10100
 
10101
bool
10102
avr_has_nibble_0xf (rtx ival)
10103
{
10104
  return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10105
}
10106
 
10107
 
10108
/* We have a set of bits that are mapped by a function F.
10109
   Try to decompose F by means of a second function G so that
10110
 
10111
      F = F o G^-1 o G
10112
 
10113
   and
10114
 
10115
      cost (F o G^-1) + cost (G)  <  cost (F)
10116
 
10117
   Example:  Suppose builtin insert_bits supplies us with the map
10118
   F = 0x3210ffff.  Instead of doing 4 bit insertions to get the high
10119
   nibble of the result, we can just as well rotate the bits before inserting
10120
   them and use the map 0x7654ffff which is cheaper than the original map.
10121
   For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff.  */
10122
 
10123
typedef struct
10124
{
10125
  /* tree code of binary function G */
10126
  enum tree_code code;
10127
 
10128
  /* The constant second argument of G */
10129
  int arg;
10130
 
10131
  /* G^-1, the inverse of G (*, arg) */
10132
  unsigned ginv;
10133
 
10134
  /* The cost of appplying G (*, arg) */
10135
  int cost;
10136
 
10137
  /* The composition F o G^-1 (*, arg) for some function F */
10138
  double_int map;
10139
 
10140
  /* For debug purpose only */
10141
  const char *str;
10142
} avr_map_op_t;
10143
 
10144
static const avr_map_op_t avr_map_op[] =
10145
  {
10146
    { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10147
    { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10148
    { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10149
    { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10150
    { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10151
    { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10152
    { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10153
    { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10154
    { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10155
    { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10156
    { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10157
    { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10158
    { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10159
    { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10160
    { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10161
  };
10162
 
10163
 
10164
/* Try to decompose F as F = (F o G^-1) o G as described above.
10165
   The result is a struct representing F o G^-1 and G.
10166
   If result.cost < 0 then such a decomposition does not exist.  */
10167
 
10168
static avr_map_op_t
10169
avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10170
{
10171
  int i;
10172
  bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10173
  avr_map_op_t f_ginv = *g;
10174
  double_int ginv = uhwi_to_double_int (g->ginv);
10175
 
10176
  f_ginv.cost = -1;
10177
 
10178
  /* Step 1:  Computing F o G^-1  */
10179
 
10180
  for (i = 7; i >= 0; i--)
10181
    {
10182
      int x = avr_map (f, i);
10183
 
10184
      if (x <= 7)
10185
        {
10186
          x = avr_map (ginv, x);
10187
 
10188
          /* The bit is no element of the image of G: no avail (cost = -1)  */
10189
 
10190
          if (x > 7)
10191
            return f_ginv;
10192
        }
10193
 
10194
      f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10195
    }
10196
 
10197
  /* Step 2:  Compute the cost of the operations.
10198
     The overall cost of doing an operation prior to the insertion is
10199
      the cost of the insertion plus the cost of the operation.  */
10200
 
10201
  /* Step 2a:  Compute cost of F o G^-1  */
10202
 
10203
  if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10204
    {
10205
      /* The mapping consists only of fixed points and can be folded
10206
         to AND/OR logic in the remainder.  Reasonable cost is 3. */
10207
 
10208
      f_ginv.cost = 2 + (val_used_p && !val_const_p);
10209
    }
10210
  else
10211
    {
10212
      rtx xop[4];
10213
 
10214
      /* Get the cost of the insn by calling the output worker with some
10215
         fake values.  Mimic effect of reloading xop[3]: Unused operands
10216
         are mapped to 0 and used operands are reloaded to xop[0].  */
10217
 
10218
      xop[0] = all_regs_rtx[24];
10219
      xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10220
      xop[2] = all_regs_rtx[25];
10221
      xop[3] = val_used_p ? xop[0] : const0_rtx;
10222
 
10223
      avr_out_insert_bits (xop, &f_ginv.cost);
10224
 
10225
      f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10226
    }
10227
 
10228
  /* Step 2b:  Add cost of G  */
10229
 
10230
  f_ginv.cost += g->cost;
10231
 
10232
  if (avr_log.builtin)
10233
    avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10234
 
10235
  return f_ginv;
10236
}
10237
 
10238
 
10239
/* Insert bits from XOP[1] into XOP[0] according to MAP.
10240
   XOP[0] and XOP[1] don't overlap.
10241
   If FIXP_P = true:  Move all bits according to MAP using BLD/BST sequences.
10242
   If FIXP_P = false: Just move the bit if its position in the destination
10243
   is different to its source position.  */
10244
 
10245
static void
10246
avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10247
{
10248
  int bit_dest, b;
10249
 
10250
  /* T-flag contains this bit of the source, i.e. of XOP[1]  */
10251
  int t_bit_src = -1;
10252
 
10253
  /* We order the operations according to the requested source bit b.  */
10254
 
10255
  for (b = 0; b < 8; b++)
10256
    for (bit_dest = 0; bit_dest < 8; bit_dest++)
10257
      {
10258
        int bit_src = avr_map (map, bit_dest);
10259
 
10260
        if (b != bit_src
10261
            || bit_src >= 8
10262
            /* Same position: No need to copy as requested by FIXP_P.  */
10263
            || (bit_dest == bit_src && !fixp_p))
10264
          continue;
10265
 
10266
        if (t_bit_src != bit_src)
10267
          {
10268
            /* Source bit is not yet in T: Store it to T.  */
10269
 
10270
            t_bit_src = bit_src;
10271
 
10272
            xop[3] = GEN_INT (bit_src);
10273
            avr_asm_len ("bst %T1%T3", xop, plen, 1);
10274
          }
10275
 
10276
        /* Load destination bit with T.  */
10277
 
10278
        xop[3] = GEN_INT (bit_dest);
10279
        avr_asm_len ("bld %T0%T3", xop, plen, 1);
10280
      }
10281
}
10282
 
10283
 
10284
/* PLEN == 0: Print assembler code for `insert_bits'.
10285
   PLEN != 0: Compute code length in bytes.
10286
 
10287
   OP[0]:  Result
10288
   OP[1]:  The mapping composed of nibbles. If nibble no. N is
10289
           0:   Bit N of result is copied from bit OP[2].0
10290
           ...  ...
10291
           7:   Bit N of result is copied from bit OP[2].7
10292
           0xf: Bit N of result is copied from bit OP[3].N
10293
   OP[2]:  Bits to be inserted
10294
   OP[3]:  Target value  */
10295
 
10296
const char*
10297
avr_out_insert_bits (rtx *op, int *plen)
10298
{
10299
  double_int map = rtx_to_double_int (op[1]);
10300
  unsigned mask_fixed;
10301
  bool fixp_p = true;
10302
  rtx xop[4];
10303
 
10304
  xop[0] = op[0];
10305
  xop[1] = op[2];
10306
  xop[2] = op[3];
10307
 
10308
  gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10309
 
10310
  if (plen)
10311
    *plen = 0;
10312
  else if (flag_print_asm_name)
10313
    fprintf (asm_out_file,
10314
             ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10315
             double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10316
 
10317
  /* If MAP has fixed points it might be better to initialize the result
10318
     with the bits to be inserted instead of moving all bits by hand.  */
10319
 
10320
  mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10321
 
10322
  if (REGNO (xop[0]) == REGNO (xop[1]))
10323
    {
10324
      /* Avoid early-clobber conflicts */
10325
 
10326
      avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10327
      xop[1] = tmp_reg_rtx;
10328
      fixp_p = false;
10329
    }
10330
 
10331
  if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10332
    {
10333
      /* XOP[2] is used and reloaded to XOP[0] already */
10334
 
10335
      int n_fix = 0, n_nofix = 0;
10336
 
10337
      gcc_assert (REG_P (xop[2]));
10338
 
10339
      /* Get the code size of the bit insertions; once with all bits
10340
         moved and once with fixed points omitted.  */
10341
 
10342
      avr_move_bits (xop, map, true, &n_fix);
10343
      avr_move_bits (xop, map, false, &n_nofix);
10344
 
10345
      if (fixp_p && n_fix - n_nofix > 3)
10346
        {
10347
          xop[3] = gen_int_mode (~mask_fixed, QImode);
10348
 
10349
          avr_asm_len ("eor %0,%1"   CR_TAB
10350
                       "andi %0,%3"  CR_TAB
10351
                       "eor %0,%1", xop, plen, 3);
10352
          fixp_p = false;
10353
        }
10354
    }
10355
  else
10356
    {
10357
      /* XOP[2] is unused */
10358
 
10359
      if (fixp_p && mask_fixed)
10360
        {
10361
          avr_asm_len ("mov %0,%1", xop, plen, 1);
10362
          fixp_p = false;
10363
        }
10364
    }
10365
 
10366
  /* Move/insert remaining bits.  */
10367
 
10368
  avr_move_bits (xop, map, fixp_p, plen);
10369
 
10370
  return "";
10371
}
10372
 
10373
 
10374
/* IDs for all the AVR builtins.  */
10375
 
10376
enum avr_builtin_id
10377
  {
10378
 
10379
#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10380
#include "builtins.def"  
10381
#undef DEF_BUILTIN
10382
 
10383
    AVR_BUILTIN_COUNT
10384
  };
10385
 
10386
static void
10387
avr_init_builtin_int24 (void)
10388
{
10389
  tree int24_type  = make_signed_type (GET_MODE_BITSIZE (PSImode));
10390
  tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10391
 
10392
  (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10393
  (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10394
}
10395
 
10396
/* Implement `TARGET_INIT_BUILTINS' */
10397
/* Set up all builtin functions for this target.  */
10398
 
10399
static void
10400
avr_init_builtins (void)
10401
{
10402
  tree void_ftype_void
10403
    = build_function_type_list (void_type_node, NULL_TREE);
10404
  tree uchar_ftype_uchar
10405
    = build_function_type_list (unsigned_char_type_node,
10406
                                unsigned_char_type_node,
10407
                                NULL_TREE);
10408
  tree uint_ftype_uchar_uchar
10409
    = build_function_type_list (unsigned_type_node,
10410
                                unsigned_char_type_node,
10411
                                unsigned_char_type_node,
10412
                                NULL_TREE);
10413
  tree int_ftype_char_char
10414
    = build_function_type_list (integer_type_node,
10415
                                char_type_node,
10416
                                char_type_node,
10417
                                NULL_TREE);
10418
  tree int_ftype_char_uchar
10419
    = build_function_type_list (integer_type_node,
10420
                                char_type_node,
10421
                                unsigned_char_type_node,
10422
                                NULL_TREE);
10423
  tree void_ftype_ulong
10424
    = build_function_type_list (void_type_node,
10425
                                long_unsigned_type_node,
10426
                                NULL_TREE);
10427
 
10428
  tree uchar_ftype_ulong_uchar_uchar
10429
    = build_function_type_list (unsigned_char_type_node,
10430
                                long_unsigned_type_node,
10431
                                unsigned_char_type_node,
10432
                                unsigned_char_type_node,
10433
                                NULL_TREE);
10434
 
10435
#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE)                       \
10436
  add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10437
#include "builtins.def"  
10438
#undef DEF_BUILTIN
10439
 
10440
  avr_init_builtin_int24 ();
10441
}
10442
 
10443
 
10444
struct avr_builtin_description
10445
{
10446
  enum insn_code icode;
10447
  const char *name;
10448
  enum avr_builtin_id id;
10449
  int n_args;
10450
};
10451
 
10452
static const struct avr_builtin_description
10453
avr_bdesc[] =
10454
  {
10455
 
10456
#define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE)      \
10457
    { ICODE, NAME, ID, N_ARGS },
10458
#include "builtins.def"  
10459
#undef DEF_BUILTIN
10460
 
10461
    { CODE_FOR_nothing, NULL, 0, -1 }
10462
  };
10463
 
10464
 
10465
/* Subroutine of avr_expand_builtin to take care of unop insns.  */
10466
 
10467
static rtx
10468
avr_expand_unop_builtin (enum insn_code icode, tree exp,
10469
                         rtx target)
10470
{
10471
  rtx pat;
10472
  tree arg0 = CALL_EXPR_ARG (exp, 0);
10473
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10474
  enum machine_mode op0mode = GET_MODE (op0);
10475
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
10476
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10477
 
10478
  if (! target
10479
      || GET_MODE (target) != tmode
10480
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10481
    {
10482
      target = gen_reg_rtx (tmode);
10483
    }
10484
 
10485
  if (op0mode == SImode && mode0 == HImode)
10486
    {
10487
      op0mode = HImode;
10488
      op0 = gen_lowpart (HImode, op0);
10489
    }
10490
 
10491
  gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10492
 
10493
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10494
    op0 = copy_to_mode_reg (mode0, op0);
10495
 
10496
  pat = GEN_FCN (icode) (target, op0);
10497
  if (! pat)
10498
    return 0;
10499
 
10500
  emit_insn (pat);
10501
 
10502
  return target;
10503
}
10504
 
10505
 
10506
/* Subroutine of avr_expand_builtin to take care of binop insns.  */
10507
 
10508
static rtx
10509
avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10510
{
10511
  rtx pat;
10512
  tree arg0 = CALL_EXPR_ARG (exp, 0);
10513
  tree arg1 = CALL_EXPR_ARG (exp, 1);
10514
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10515
  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10516
  enum machine_mode op0mode = GET_MODE (op0);
10517
  enum machine_mode op1mode = GET_MODE (op1);
10518
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
10519
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10520
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10521
 
10522
  if (! target
10523
      || GET_MODE (target) != tmode
10524
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10525
    {
10526
      target = gen_reg_rtx (tmode);
10527
    }
10528
 
10529
  if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10530
    {
10531
      op0mode = HImode;
10532
      op0 = gen_lowpart (HImode, op0);
10533
    }
10534
 
10535
  if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10536
    {
10537
      op1mode = HImode;
10538
      op1 = gen_lowpart (HImode, op1);
10539
    }
10540
 
10541
  /* In case the insn wants input operands in modes different from
10542
     the result, abort.  */
10543
 
10544
  gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10545
              && (op1mode == mode1 || op1mode == VOIDmode));
10546
 
10547
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10548
    op0 = copy_to_mode_reg (mode0, op0);
10549
 
10550
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10551
    op1 = copy_to_mode_reg (mode1, op1);
10552
 
10553
  pat = GEN_FCN (icode) (target, op0, op1);
10554
 
10555
  if (! pat)
10556
    return 0;
10557
 
10558
  emit_insn (pat);
10559
  return target;
10560
}
10561
 
10562
/* Subroutine of avr_expand_builtin to take care of 3-operand insns.  */
10563
 
10564
static rtx
10565
avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10566
{
10567
  rtx pat;
10568
  tree arg0 = CALL_EXPR_ARG (exp, 0);
10569
  tree arg1 = CALL_EXPR_ARG (exp, 1);
10570
  tree arg2 = CALL_EXPR_ARG (exp, 2);
10571
  rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10572
  rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10573
  rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10574
  enum machine_mode op0mode = GET_MODE (op0);
10575
  enum machine_mode op1mode = GET_MODE (op1);
10576
  enum machine_mode op2mode = GET_MODE (op2);
10577
  enum machine_mode tmode = insn_data[icode].operand[0].mode;
10578
  enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10579
  enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10580
  enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10581
 
10582
  if (! target
10583
      || GET_MODE (target) != tmode
10584
      || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10585
    {
10586
      target = gen_reg_rtx (tmode);
10587
    }
10588
 
10589
  if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10590
    {
10591
      op0mode = HImode;
10592
      op0 = gen_lowpart (HImode, op0);
10593
    }
10594
 
10595
  if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10596
    {
10597
      op1mode = HImode;
10598
      op1 = gen_lowpart (HImode, op1);
10599
    }
10600
 
10601
  if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10602
    {
10603
      op2mode = HImode;
10604
      op2 = gen_lowpart (HImode, op2);
10605
    }
10606
 
10607
  /* In case the insn wants input operands in modes different from
10608
     the result, abort.  */
10609
 
10610
  gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10611
              && (op1mode == mode1 || op1mode == VOIDmode)
10612
              && (op2mode == mode2 || op2mode == VOIDmode));
10613
 
10614
  if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10615
    op0 = copy_to_mode_reg (mode0, op0);
10616
 
10617
  if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10618
    op1 = copy_to_mode_reg (mode1, op1);
10619
 
10620
  if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10621
    op2 = copy_to_mode_reg (mode2, op2);
10622
 
10623
  pat = GEN_FCN (icode) (target, op0, op1, op2);
10624
 
10625
  if (! pat)
10626
    return 0;
10627
 
10628
  emit_insn (pat);
10629
  return target;
10630
}
10631
 
10632
 
10633
/* Expand an expression EXP that calls a built-in function,
10634
   with result going to TARGET if that's convenient
10635
   (and in mode MODE if that's convenient).
10636
   SUBTARGET may be used as the target for computing one of EXP's operands.
10637
   IGNORE is nonzero if the value is to be ignored.  */
10638
 
10639
static rtx
10640
avr_expand_builtin (tree exp, rtx target,
10641
                    rtx subtarget ATTRIBUTE_UNUSED,
10642
                    enum machine_mode mode ATTRIBUTE_UNUSED,
10643
                    int ignore ATTRIBUTE_UNUSED)
10644
{
10645
  size_t i;
10646
  tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10647
  const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10648
  unsigned int id = DECL_FUNCTION_CODE (fndecl);
10649
  tree arg0;
10650
  rtx op0;
10651
 
10652
  switch (id)
10653
    {
10654
    case AVR_BUILTIN_NOP:
10655
      emit_insn (gen_nopv (GEN_INT(1)));
10656
      return 0;
10657
 
10658
    case AVR_BUILTIN_DELAY_CYCLES:
10659
      {
10660
        arg0 = CALL_EXPR_ARG (exp, 0);
10661
        op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10662
 
10663
        if (!CONST_INT_P (op0))
10664
          error ("%s expects a compile time integer constant", bname);
10665
        else
10666
          avr_expand_delay_cycles (op0);
10667
 
10668
        return 0;
10669
      }
10670
 
10671
    case AVR_BUILTIN_INSERT_BITS:
10672
      {
10673
        arg0 = CALL_EXPR_ARG (exp, 0);
10674
        op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10675
 
10676
        if (!CONST_INT_P (op0))
10677
          {
10678
            error ("%s expects a compile time long integer constant"
10679
                   " as first argument", bname);
10680
            return target;
10681
          }
10682
      }
10683
    }
10684
 
10685
  for (i = 0; avr_bdesc[i].name; i++)
10686
    {
10687
      const struct avr_builtin_description *d = &avr_bdesc[i];
10688
 
10689
      if (d->id == id)
10690
        switch (d->n_args)
10691
          {
10692
          case 0:
10693
            emit_insn ((GEN_FCN (d->icode)) (target));
10694
            return 0;
10695
 
10696
          case 1:
10697
            return avr_expand_unop_builtin (d->icode, exp, target);
10698
 
10699
          case 2:
10700
            return avr_expand_binop_builtin (d->icode, exp, target);
10701
 
10702
          case 3:
10703
            return avr_expand_triop_builtin (d->icode, exp, target);
10704
 
10705
          default:
10706
            gcc_unreachable();
10707
        }
10708
    }
10709
 
10710
  gcc_unreachable ();
10711
}
10712
 
10713
 
10714
/* Implement `TARGET_FOLD_BUILTIN'.  */
10715
 
10716
static tree
10717
avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10718
                  bool ignore ATTRIBUTE_UNUSED)
10719
{
10720
  unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10721
  tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10722
 
10723
  if (!optimize)
10724
    return NULL_TREE;
10725
 
10726
  switch (fcode)
10727
    {
10728
    default:
10729
      break;
10730
 
10731
    case AVR_BUILTIN_SWAP:
10732
      {
10733
        return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10734
                            build_int_cst (val_type, 4));
10735
      }
10736
 
10737
    case AVR_BUILTIN_INSERT_BITS:
10738
      {
10739
        tree tbits = arg[1];
10740
        tree tval = arg[2];
10741
        tree tmap;
10742
        tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10743
        double_int map;
10744
        bool changed = false;
10745
        unsigned i;
10746
        avr_map_op_t best_g;
10747
 
10748
        if (TREE_CODE (arg[0]) != INTEGER_CST)
10749
          {
10750
            /* No constant as first argument: Don't fold this and run into
10751
               error in avr_expand_builtin.  */
10752
 
10753
            break;
10754
          }
10755
 
10756
        map = tree_to_double_int (arg[0]);
10757
        tmap = double_int_to_tree (map_type, map);
10758
 
10759
        if (TREE_CODE (tval) != INTEGER_CST
10760
            && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10761
          {
10762
            /* There are no F in the map, i.e. 3rd operand is unused.
10763
               Replace that argument with some constant to render
10764
               respective input unused.  */
10765
 
10766
            tval = build_int_cst (val_type, 0);
10767
            changed = true;
10768
          }
10769
 
10770
        if (TREE_CODE (tbits) != INTEGER_CST
10771
            && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10772
          {
10773
            /* Similar for the bits to be inserted. If they are unused,
10774
               we can just as well pass 0.  */
10775
 
10776
            tbits = build_int_cst (val_type, 0);
10777
          }
10778
 
10779
        if (TREE_CODE (tbits) == INTEGER_CST)
10780
          {
10781
            /* Inserting bits known at compile time is easy and can be
10782
               performed by AND and OR with appropriate masks.  */
10783
 
10784
            int bits = TREE_INT_CST_LOW (tbits);
10785
            int mask_ior = 0, mask_and = 0xff;
10786
 
10787
            for (i = 0; i < 8; i++)
10788
              {
10789
                int mi = avr_map (map, i);
10790
 
10791
                if (mi < 8)
10792
                  {
10793
                    if (bits & (1 << mi))     mask_ior |=  (1 << i);
10794
                    else                      mask_and &= ~(1 << i);
10795
                  }
10796
              }
10797
 
10798
            tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10799
                                build_int_cst (val_type, mask_ior));
10800
            return fold_build2 (BIT_AND_EXPR, val_type, tval,
10801
                                build_int_cst (val_type, mask_and));
10802
          }
10803
 
10804
        if (changed)
10805
          return build_call_expr (fndecl, 3, tmap, tbits, tval);
10806
 
10807
        /* If bits don't change their position we can use vanilla logic
10808
           to merge the two arguments.  */
10809
 
10810
        if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10811
          {
10812
            int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10813
            tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10814
 
10815
            tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10816
            tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10817
            return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10818
          }
10819
 
10820
        /* Try to decomposing map to reduce overall cost.  */
10821
 
10822
        if (avr_log.builtin)
10823
          avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10824
 
10825
        best_g = avr_map_op[0];
10826
        best_g.cost = 1000;
10827
 
10828
        for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10829
          {
10830
            avr_map_op_t g
10831
              = avr_map_decompose (map, avr_map_op + i,
10832
                                   TREE_CODE (tval) == INTEGER_CST);
10833
 
10834
            if (g.cost >= 0 && g.cost < best_g.cost)
10835
              best_g = g;
10836
          }
10837
 
10838
        if (avr_log.builtin)
10839
          avr_edump ("\n");
10840
 
10841
        if (best_g.arg == 0)
10842
          /* No optimization found */
10843
          break;
10844
 
10845
        /* Apply operation G to the 2nd argument.  */
10846
 
10847
        if (avr_log.builtin)
10848
          avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10849
                     best_g.str, best_g.arg, best_g.map, best_g.cost);
10850
 
10851
        /* Do right-shifts arithmetically: They copy the MSB instead of
10852
           shifting in a non-usable value (0) as with logic right-shift.  */
10853
 
10854
        tbits = fold_convert (signed_char_type_node, tbits);
10855
        tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10856
                             build_int_cst (val_type, best_g.arg));
10857
        tbits = fold_convert (val_type, tbits);
10858
 
10859
        /* Use map o G^-1 instead of original map to undo the effect of G.  */
10860
 
10861
        tmap = double_int_to_tree (map_type, best_g.map);
10862
 
10863
        return build_call_expr (fndecl, 3, tmap, tbits, tval);
10864
      } /* AVR_BUILTIN_INSERT_BITS */
10865
    }
10866
 
10867
  return NULL_TREE;
10868
}
10869
 
10870
 
10871
 
10872
/* Initialize the GCC target structure.  */
10873
 
10874
#undef  TARGET_ASM_ALIGNED_HI_OP
10875
#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10876
#undef  TARGET_ASM_ALIGNED_SI_OP
10877
#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10878
#undef  TARGET_ASM_UNALIGNED_HI_OP
10879
#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10880
#undef  TARGET_ASM_UNALIGNED_SI_OP
10881
#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10882
#undef  TARGET_ASM_INTEGER
10883
#define TARGET_ASM_INTEGER avr_assemble_integer
10884
#undef  TARGET_ASM_FILE_START
10885
#define TARGET_ASM_FILE_START avr_file_start
10886
#undef  TARGET_ASM_FILE_END
10887
#define TARGET_ASM_FILE_END avr_file_end
10888
 
10889
#undef  TARGET_ASM_FUNCTION_END_PROLOGUE
10890
#define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10891
#undef  TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10892
#define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10893
 
10894
#undef  TARGET_FUNCTION_VALUE
10895
#define TARGET_FUNCTION_VALUE avr_function_value
10896
#undef  TARGET_LIBCALL_VALUE
10897
#define TARGET_LIBCALL_VALUE avr_libcall_value
10898
#undef  TARGET_FUNCTION_VALUE_REGNO_P
10899
#define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10900
 
10901
#undef  TARGET_ATTRIBUTE_TABLE
10902
#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10903
#undef  TARGET_INSERT_ATTRIBUTES
10904
#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10905
#undef  TARGET_SECTION_TYPE_FLAGS
10906
#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10907
 
10908
#undef  TARGET_ASM_NAMED_SECTION
10909
#define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10910
#undef  TARGET_ASM_INIT_SECTIONS
10911
#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10912
#undef  TARGET_ENCODE_SECTION_INFO
10913
#define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10914
#undef  TARGET_ASM_SELECT_SECTION
10915
#define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10916
 
10917
#undef  TARGET_REGISTER_MOVE_COST
10918
#define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10919
#undef  TARGET_MEMORY_MOVE_COST
10920
#define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10921
#undef  TARGET_RTX_COSTS
10922
#define TARGET_RTX_COSTS avr_rtx_costs
10923
#undef  TARGET_ADDRESS_COST
10924
#define TARGET_ADDRESS_COST avr_address_cost
10925
#undef  TARGET_MACHINE_DEPENDENT_REORG
10926
#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10927
#undef  TARGET_FUNCTION_ARG
10928
#define TARGET_FUNCTION_ARG avr_function_arg
10929
#undef  TARGET_FUNCTION_ARG_ADVANCE
10930
#define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10931
 
10932
#undef  TARGET_RETURN_IN_MEMORY
10933
#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10934
 
10935
#undef  TARGET_STRICT_ARGUMENT_NAMING
10936
#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10937
 
10938
#undef  TARGET_BUILTIN_SETJMP_FRAME_VALUE
10939
#define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10940
 
10941
#undef  TARGET_HARD_REGNO_SCRATCH_OK
10942
#define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10943
#undef  TARGET_CASE_VALUES_THRESHOLD
10944
#define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10945
 
10946
#undef  TARGET_FRAME_POINTER_REQUIRED
10947
#define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10948
#undef  TARGET_CAN_ELIMINATE
10949
#define TARGET_CAN_ELIMINATE avr_can_eliminate
10950
 
10951
#undef  TARGET_CLASS_LIKELY_SPILLED_P
10952
#define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10953
 
10954
#undef  TARGET_OPTION_OVERRIDE
10955
#define TARGET_OPTION_OVERRIDE avr_option_override
10956
 
10957
#undef  TARGET_CANNOT_MODIFY_JUMPS_P
10958
#define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10959
 
10960
#undef  TARGET_FUNCTION_OK_FOR_SIBCALL
10961
#define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10962
 
10963
#undef  TARGET_INIT_BUILTINS
10964
#define TARGET_INIT_BUILTINS avr_init_builtins
10965
 
10966
#undef  TARGET_EXPAND_BUILTIN
10967
#define TARGET_EXPAND_BUILTIN avr_expand_builtin
10968
 
10969
#undef  TARGET_FOLD_BUILTIN
10970
#define TARGET_FOLD_BUILTIN avr_fold_builtin
10971
 
10972
#undef  TARGET_ASM_FUNCTION_RODATA_SECTION
10973
#define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10974
 
10975
#undef  TARGET_SCALAR_MODE_SUPPORTED_P
10976
#define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10977
 
10978
#undef  TARGET_ADDR_SPACE_SUBSET_P
10979
#define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
10980
 
10981
#undef  TARGET_ADDR_SPACE_CONVERT
10982
#define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
10983
 
10984
#undef  TARGET_ADDR_SPACE_ADDRESS_MODE
10985
#define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
10986
 
10987
#undef  TARGET_ADDR_SPACE_POINTER_MODE
10988
#define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
10989
 
10990
#undef  TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
10991
#define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P  \
10992
  avr_addr_space_legitimate_address_p
10993
 
10994
#undef  TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
10995
#define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
10996
 
10997
#undef  TARGET_PRINT_OPERAND
10998
#define TARGET_PRINT_OPERAND avr_print_operand
10999
#undef  TARGET_PRINT_OPERAND_ADDRESS
11000
#define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11001
#undef  TARGET_PRINT_OPERAND_PUNCT_VALID_P
11002
#define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11003
 
11004
struct gcc_target targetm = TARGET_INITIALIZER;
11005
 
11006
 
11007
#include "gt-avr.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.