URL
https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk
Subversion Repositories openrisc_me
Compare Revisions
- This comparison shows the changes necessary to convert path
/openrisc/trunk/gnu-src/gcc-4.2.2/gcc/config/avr
- from Rev 38 to Rev 154
- ↔ Reverse comparison
Rev 38 → Rev 154
/rtems.h
0,0 → 1,28
/* Definitions for rtems targeting a AVR using ELF. |
Copyright (C) 2004, 2007 Free Software Foundation, Inc. |
Contributed by Ralf Corsepius (ralf.corsepius@rtems.org). |
|
This file is part of GCC. |
|
GCC is free software; you can redistribute it and/or modify |
it under the terms of the GNU General Public License as published by |
the Free Software Foundation; either version 3, or (at your option) |
any later version. |
|
GCC is distributed in the hope that it will be useful, |
but WITHOUT ANY WARRANTY; without even the implied warranty of |
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
GNU General Public License for more details. |
|
You should have received a copy of the GNU General Public License |
along with GCC; see the file COPYING3. If not see |
<http://www.gnu.org/licenses/>. */ |
|
/* Specify predefined symbols in preprocessor. */ |
|
#define TARGET_OS_CPP_BUILTINS() \ |
do { \ |
builtin_define ("__rtems__"); \ |
builtin_define ("__USE_INIT_FINI__"); \ |
builtin_assert ("system=rtems"); \ |
} while (0) |
/predicates.md
0,0 → 1,76
;; Predicate definitions for ATMEL AVR micro controllers. |
;; Copyright (C) 2006, 2007 Free Software Foundation, Inc. |
;; |
;; This file is part of GCC. |
;; |
;; GCC is free software; you can redistribute it and/or modify |
;; it under the terms of the GNU General Public License as published by |
;; the Free Software Foundation; either version 3, or (at your option) |
;; any later version. |
;; |
;; GCC is distributed in the hope that it will be useful, |
;; but WITHOUT ANY WARRANTY; without even the implied warranty of |
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
;; GNU General Public License for more details. |
;; |
;; You should have received a copy of the GNU General Public License |
;; along with GCC; see the file COPYING3. If not see |
;; <http://www.gnu.org/licenses/>. |
|
;; Registers from r0 to r15. |
(define_predicate "l_register_operand" |
(and (match_code "reg") |
(match_test "REGNO (op) <= 15"))) |
|
;; Registers from r16 to r31. |
(define_predicate "d_register_operand" |
(and (match_code "reg") |
(match_test "REGNO (op) >= 16 && REGNO (op) <= 31"))) |
|
;; SP register. |
(define_predicate "stack_register_operand" |
(and (match_code "reg") |
(match_test "REGNO (op) == REG_SP"))) |
|
;; Return true if OP is a valid address for lower half of I/O space. |
(define_predicate "low_io_address_operand" |
(and (match_code "const_int") |
(match_test "IN_RANGE((INTVAL (op)), 0x20, 0x3F)"))) |
|
;; Return true if OP is a valid address for high half of I/O space. |
(define_predicate "high_io_address_operand" |
(and (match_code "const_int") |
(match_test "IN_RANGE((INTVAL (op)), 0x40, 0x5F)"))) |
|
;; Returns 1 if OP is a SYMBOL_REF. |
(define_predicate "symbol_ref_operand" |
(match_code "symbol_ref")) |
|
;; Return true if OP is a constant that contains only one 1 in its |
;; binary representation. |
(define_predicate "single_one_operand" |
(and (match_code "const_int") |
(match_test "exact_log2(INTVAL (op) & GET_MODE_MASK (mode)) >= 0"))) |
|
;; Return true if OP is a constant that contains only one 0 in its |
;; binary representation. |
(define_predicate "single_zero_operand" |
(and (match_code "const_int") |
(match_test "exact_log2(~INTVAL (op) & GET_MODE_MASK (mode)) >= 0"))) |
|
;; True for EQ & NE |
(define_predicate "eqne_operator" |
(match_code "eq,ne")) |
|
;; True for GE & LT |
(define_predicate "gelt_operator" |
(match_code "ge,lt")) |
|
;; True for GT, GTU, LE & LEU |
(define_predicate "difficult_comparison_operator" |
(match_code "gt,gtu,le,leu")) |
|
;; False for GT, GTU, LE & LEU |
(define_predicate "simple_comparison_operator" |
(and (match_operand 0 "comparison_operator") |
(not (match_code "gt,gtu,le,leu")))) |
predicates.md
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: avr-protos.h
===================================================================
--- avr-protos.h (nonexistent)
+++ avr-protos.h (revision 154)
@@ -0,0 +1,134 @@
+/* Prototypes for exported functions defined in avr.c
+
+ Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006, 2007
+ Free Software Foundation, Inc.
+ Contributed by Denis Chertykov (denisc@overta.ru)
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ . */
+
+
+extern int function_arg_regno_p (int r);
+extern void avr_init_once (void);
+extern void avr_override_options (void);
+extern void avr_optimization_options (int level, int size);
+extern char *avr_change_section (char *sect_name);
+extern int avr_ret_register (void);
+extern enum reg_class class_likely_spilled_p (int c);
+extern enum reg_class avr_regno_reg_class (int r);
+extern enum reg_class avr_reg_class_from_letter (int c);
+extern int frame_pointer_required_p (void);
+extern void asm_globalize_label (FILE *file, const char *name);
+extern void order_regs_for_local_alloc (void);
+extern int initial_elimination_offset (int from, int to);
+extern int avr_simple_epilogue (void);
+extern int mask_one_bit_p (HOST_WIDE_INT mask);
+extern void gas_output_limited_string (FILE *file, const char *str);
+extern void gas_output_ascii (FILE *file, const char *str, size_t length);
+
+#ifdef TREE_CODE
+extern void asm_output_external (FILE *file, tree decl, char *name);
+extern int avr_progmem_p (tree decl, tree attributes);
+
+#ifdef RTX_CODE /* inside TREE_CODE */
+extern rtx avr_function_value (tree type, tree func);
+extern void init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
+ rtx libname, tree fndecl);
+extern rtx function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ tree type, int named);
+#endif /* RTX_CODE inside TREE_CODE */
+
+#ifdef HAVE_MACHINE_MODES /* inside TREE_CODE */
+extern void function_arg_advance (CUMULATIVE_ARGS *cum,
+ enum machine_mode mode, tree type,
+ int named);
+#endif /* HAVE_MACHINE_MODES inside TREE_CODE*/
+#endif /* TREE_CODE */
+
+#ifdef RTX_CODE
+extern void asm_output_external_libcall (FILE *file, rtx symref);
+extern int legitimate_address_p (enum machine_mode mode, rtx x, int strict);
+extern int compare_diff_p (rtx insn);
+extern const char *output_movqi (rtx insn, rtx operands[], int *l);
+extern const char *output_movhi (rtx insn, rtx operands[], int *l);
+extern const char *out_movqi_r_mr (rtx insn, rtx op[], int *l);
+extern const char *out_movqi_mr_r (rtx insn, rtx op[], int *l);
+extern const char *out_movhi_r_mr (rtx insn, rtx op[], int *l);
+extern const char *out_movhi_mr_r (rtx insn, rtx op[], int *l);
+extern const char *out_movsi_r_mr (rtx insn, rtx op[], int *l);
+extern const char *out_movsi_mr_r (rtx insn, rtx op[], int *l);
+extern const char *output_movsisf (rtx insn, rtx operands[], int *l);
+extern const char *out_tstsi (rtx insn, int *l);
+extern const char *out_tsthi (rtx insn, int *l);
+extern const char *ret_cond_branch (rtx x, int len, int reverse);
+
+extern const char *ashlqi3_out (rtx insn, rtx operands[], int *len);
+extern const char *ashlhi3_out (rtx insn, rtx operands[], int *len);
+extern const char *ashlsi3_out (rtx insn, rtx operands[], int *len);
+
+extern const char *ashrqi3_out (rtx insn, rtx operands[], int *len);
+extern const char *ashrhi3_out (rtx insn, rtx operands[], int *len);
+extern const char *ashrsi3_out (rtx insn, rtx operands[], int *len);
+
+extern const char *lshrqi3_out (rtx insn, rtx operands[], int *len);
+extern const char *lshrhi3_out (rtx insn, rtx operands[], int *len);
+extern const char *lshrsi3_out (rtx insn, rtx operands[], int *len);
+
+extern void avr_output_bld (rtx operands[], int bit_nr);
+extern void avr_output_addr_vec_elt (FILE *stream, int value);
+extern const char *avr_out_sbxx_branch (rtx insn, rtx operands[]);
+
+extern enum reg_class preferred_reload_class (rtx x, enum reg_class class);
+extern int extra_constraint_Q (rtx x);
+extern rtx legitimize_address (rtx x, rtx oldx, enum machine_mode mode);
+extern int adjust_insn_length (rtx insn, int len);
+extern rtx avr_libcall_value (enum machine_mode mode);
+extern const char *output_reload_inhi (rtx insn, rtx *operands, int *len);
+extern const char *output_reload_insisf (rtx insn, rtx *operands, int *len);
+extern enum reg_class secondary_input_reload_class (enum reg_class,
+ enum machine_mode,
+ rtx);
+extern void notice_update_cc (rtx body, rtx insn);
+extern void print_operand (FILE *file, rtx x, int code);
+extern void print_operand_address (FILE *file, rtx addr);
+extern int reg_unused_after (rtx insn, rtx reg);
+extern int _reg_unused_after (rtx insn, rtx reg);
+extern int avr_jump_mode (rtx x, rtx insn);
+extern int byte_immediate_operand (rtx op, enum machine_mode mode);
+extern int test_hard_reg_class (enum reg_class class, rtx x);
+extern int jump_over_one_insn_p (rtx insn, rtx dest);
+
+extern int avr_hard_regno_mode_ok (int regno, enum machine_mode mode);
+extern int call_insn_operand (rtx op, enum machine_mode mode);
+extern void final_prescan_insn (rtx insn, rtx *operand, int num_operands);
+extern int avr_simplify_comparison_p (enum machine_mode mode,
+ RTX_CODE operator, rtx x);
+extern RTX_CODE avr_normalize_condition (RTX_CODE condition);
+extern int compare_eq_p (rtx insn);
+extern void out_shift_with_cnt (const char *template, rtx insn,
+ rtx operands[], int *len, int t_len);
+extern int avr_io_address_p (rtx x, int size);
+extern int const_int_pow2_p (rtx x);
+extern int avr_peep2_scratch_safe (rtx reg_rtx);
+#endif /* RTX_CODE */
+
+#ifdef HAVE_MACHINE_MODES
+extern int class_max_nregs (enum reg_class class, enum machine_mode mode);
+#endif /* HAVE_MACHINE_MODES */
+
+#ifdef REAL_VALUE_TYPE
+extern void asm_output_float (FILE *file, REAL_VALUE_TYPE n);
+#endif
Index: libgcc.S
===================================================================
--- libgcc.S (nonexistent)
+++ libgcc.S (revision 154)
@@ -0,0 +1,778 @@
+/* -*- Mode: Asm -*- */
+/* Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
+ Contributed by Denis Chertykov
+
+This file is free software; you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published by the
+Free Software Foundation; either version 2, or (at your option) any
+later version.
+
+In addition to the permissions in the GNU General Public License, the
+Free Software Foundation gives you unlimited permission to link the
+compiled version of this file into combinations with other programs,
+and to distribute those combinations without any restriction coming
+from the use of this file. (The General Public License restrictions
+do apply in other respects; for example, they cover modification of
+the file, and distribution when not linked into a combine
+executable.)
+
+This file is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; see the file COPYING. If not, write to
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
+
+#define __zero_reg__ r1
+#define __tmp_reg__ r0
+#define __SREG__ 0x3f
+#define __SP_H__ 0x3e
+#define __SP_L__ 0x3d
+
+/* Most of the functions here are called directly from avr.md
+ patterns, instead of using the standard libcall mechanisms.
+ This can make better code because GCC knows exactly which
+ of the call-used registers (not all of them) are clobbered. */
+
+ .section .text.libgcc, "ax", @progbits
+
+ .macro mov_l r_dest, r_src
+#if defined (__AVR_HAVE_MOVW__)
+ movw \r_dest, \r_src
+#else
+ mov \r_dest, \r_src
+#endif
+ .endm
+
+ .macro mov_h r_dest, r_src
+#if defined (__AVR_HAVE_MOVW__)
+ ; empty
+#else
+ mov \r_dest, \r_src
+#endif
+ .endm
+
+/* Note: mulqi3, mulhi3 are open-coded on the enhanced core. */
+#if !defined (__AVR_ENHANCED__)
+/*******************************************************
+ Multiplication 8 x 8
+*******************************************************/
+#if defined (L_mulqi3)
+
+#define r_arg2 r22 /* multiplicand */
+#define r_arg1 r24 /* multiplier */
+#define r_res __tmp_reg__ /* result */
+
+ .global __mulqi3
+ .func __mulqi3
+__mulqi3:
+ clr r_res ; clear result
+__mulqi3_loop:
+ sbrc r_arg1,0
+ add r_res,r_arg2
+ add r_arg2,r_arg2 ; shift multiplicand
+ breq __mulqi3_exit ; while multiplicand != 0
+ lsr r_arg1 ;
+ brne __mulqi3_loop ; exit if multiplier = 0
+__mulqi3_exit:
+ mov r_arg1,r_res ; result to return register
+ ret
+
+#undef r_arg2
+#undef r_arg1
+#undef r_res
+
+.endfunc
+#endif /* defined (L_mulqi3) */
+
+#if defined (L_mulqihi3)
+ .global __mulqihi3
+ .func __mulqihi3
+__mulqihi3:
+ clr r25
+ sbrc r24, 7
+ dec r25
+ clr r23
+ sbrc r22, 7
+ dec r22
+ rjmp __mulhi3
+ .endfunc
+#endif /* defined (L_mulqihi3) */
+
+#if defined (L_umulqihi3)
+ .global __umulqihi3
+ .func __umulqihi3
+__umulqihi3:
+ clr r25
+ clr r23
+ rjmp __mulhi3
+ .endfunc
+#endif /* defined (L_umulqihi3) */
+
+/*******************************************************
+ Multiplication 16 x 16
+*******************************************************/
+#if defined (L_mulhi3)
+#define r_arg1L r24 /* multiplier Low */
+#define r_arg1H r25 /* multiplier High */
+#define r_arg2L r22 /* multiplicand Low */
+#define r_arg2H r23 /* multiplicand High */
+#define r_resL __tmp_reg__ /* result Low */
+#define r_resH r21 /* result High */
+
+ .global __mulhi3
+ .func __mulhi3
+__mulhi3:
+ clr r_resH ; clear result
+ clr r_resL ; clear result
+__mulhi3_loop:
+ sbrs r_arg1L,0
+ rjmp __mulhi3_skip1
+ add r_resL,r_arg2L ; result + multiplicand
+ adc r_resH,r_arg2H
+__mulhi3_skip1:
+ add r_arg2L,r_arg2L ; shift multiplicand
+ adc r_arg2H,r_arg2H
+
+ cp r_arg2L,__zero_reg__
+ cpc r_arg2H,__zero_reg__
+ breq __mulhi3_exit ; while multiplicand != 0
+
+ lsr r_arg1H ; gets LSB of multiplier
+ ror r_arg1L
+ sbiw r_arg1L,0
+ brne __mulhi3_loop ; exit if multiplier = 0
+__mulhi3_exit:
+ mov r_arg1H,r_resH ; result to return register
+ mov r_arg1L,r_resL
+ ret
+
+#undef r_arg1L
+#undef r_arg1H
+#undef r_arg2L
+#undef r_arg2H
+#undef r_resL
+#undef r_resH
+
+.endfunc
+#endif /* defined (L_mulhi3) */
+#endif /* !defined (__AVR_ENHANCED__) */
+
+#if defined (L_mulhisi3)
+ .global __mulhisi3
+ .func __mulhisi3
+__mulhisi3:
+ mov_l r18, r24
+ mov_h r19, r25
+ clr r24
+ sbrc r23, 7
+ dec r24
+ mov r25, r24
+ clr r20
+ sbrc r19, 7
+ dec r20
+ mov r21, r20
+ rjmp __mulsi3
+ .endfunc
+#endif /* defined (L_mulhisi3) */
+
+#if defined (L_umulhisi3)
+ .global __umulhisi3
+ .func __umulhisi3
+__umulhisi3:
+ mov_l r18, r24
+ mov_h r19, r25
+ clr r24
+ clr r25
+ clr r20
+ clr r21
+ rjmp __mulsi3
+ .endfunc
+#endif /* defined (L_umulhisi3) */
+
+#if defined (L_mulsi3)
+/*******************************************************
+ Multiplication 32 x 32
+*******************************************************/
+#define r_arg1L r22 /* multiplier Low */
+#define r_arg1H r23
+#define r_arg1HL r24
+#define r_arg1HH r25 /* multiplier High */
+
+
+#define r_arg2L r18 /* multiplicand Low */
+#define r_arg2H r19
+#define r_arg2HL r20
+#define r_arg2HH r21 /* multiplicand High */
+
+#define r_resL r26 /* result Low */
+#define r_resH r27
+#define r_resHL r30
+#define r_resHH r31 /* result High */
+
+
+ .global __mulsi3
+ .func __mulsi3
+__mulsi3:
+#if defined (__AVR_ENHANCED__)
+ mul r_arg1L, r_arg2L
+ movw r_resL, r0
+ mul r_arg1H, r_arg2H
+ movw r_resHL, r0
+ mul r_arg1HL, r_arg2L
+ add r_resHL, r0
+ adc r_resHH, r1
+ mul r_arg1L, r_arg2HL
+ add r_resHL, r0
+ adc r_resHH, r1
+ mul r_arg1HH, r_arg2L
+ add r_resHH, r0
+ mul r_arg1HL, r_arg2H
+ add r_resHH, r0
+ mul r_arg1H, r_arg2HL
+ add r_resHH, r0
+ mul r_arg1L, r_arg2HH
+ add r_resHH, r0
+ clr r_arg1HH ; use instead of __zero_reg__ to add carry
+ mul r_arg1H, r_arg2L
+ add r_resH, r0
+ adc r_resHL, r1
+ adc r_resHH, r_arg1HH ; add carry
+ mul r_arg1L, r_arg2H
+ add r_resH, r0
+ adc r_resHL, r1
+ adc r_resHH, r_arg1HH ; add carry
+ movw r_arg1L, r_resL
+ movw r_arg1HL, r_resHL
+ clr r1 ; __zero_reg__ clobbered by "mul"
+ ret
+#else
+ clr r_resHH ; clear result
+ clr r_resHL ; clear result
+ clr r_resH ; clear result
+ clr r_resL ; clear result
+__mulsi3_loop:
+ sbrs r_arg1L,0
+ rjmp __mulsi3_skip1
+ add r_resL,r_arg2L ; result + multiplicand
+ adc r_resH,r_arg2H
+ adc r_resHL,r_arg2HL
+ adc r_resHH,r_arg2HH
+__mulsi3_skip1:
+ add r_arg2L,r_arg2L ; shift multiplicand
+ adc r_arg2H,r_arg2H
+ adc r_arg2HL,r_arg2HL
+ adc r_arg2HH,r_arg2HH
+
+ lsr r_arg1HH ; gets LSB of multiplier
+ ror r_arg1HL
+ ror r_arg1H
+ ror r_arg1L
+ brne __mulsi3_loop
+ sbiw r_arg1HL,0
+ cpc r_arg1H,r_arg1L
+ brne __mulsi3_loop ; exit if multiplier = 0
+__mulsi3_exit:
+ mov_h r_arg1HH,r_resHH ; result to return register
+ mov_l r_arg1HL,r_resHL
+ mov_h r_arg1H,r_resH
+ mov_l r_arg1L,r_resL
+ ret
+#endif /* !defined (__AVR_ENHANCED__) */
+#undef r_arg1L
+#undef r_arg1H
+#undef r_arg1HL
+#undef r_arg1HH
+
+
+#undef r_arg2L
+#undef r_arg2H
+#undef r_arg2HL
+#undef r_arg2HH
+
+#undef r_resL
+#undef r_resH
+#undef r_resHL
+#undef r_resHH
+
+.endfunc
+#endif /* defined (L_mulsi3) */
+
+/*******************************************************
+ Division 8 / 8 => (result + remainder)
+*******************************************************/
+#define r_rem r25 /* remainder */
+#define r_arg1 r24 /* dividend, quotient */
+#define r_arg2 r22 /* divisor */
+#define r_cnt r23 /* loop count */
+
+#if defined (L_udivmodqi4)
+ .global __udivmodqi4
+ .func __udivmodqi4
+__udivmodqi4:
+ sub r_rem,r_rem ; clear remainder and carry
+ ldi r_cnt,9 ; init loop counter
+ rjmp __udivmodqi4_ep ; jump to entry point
+__udivmodqi4_loop:
+ rol r_rem ; shift dividend into remainder
+ cp r_rem,r_arg2 ; compare remainder & divisor
+ brcs __udivmodqi4_ep ; remainder <= divisor
+ sub r_rem,r_arg2 ; restore remainder
+__udivmodqi4_ep:
+ rol r_arg1 ; shift dividend (with CARRY)
+ dec r_cnt ; decrement loop counter
+ brne __udivmodqi4_loop
+ com r_arg1 ; complement result
+ ; because C flag was complemented in loop
+ ret
+ .endfunc
+#endif /* defined (L_udivmodqi4) */
+
+#if defined (L_divmodqi4)
+ .global __divmodqi4
+ .func __divmodqi4
+__divmodqi4:
+ bst r_arg1,7 ; store sign of dividend
+ mov __tmp_reg__,r_arg1
+ eor __tmp_reg__,r_arg2; r0.7 is sign of result
+ sbrc r_arg1,7
+ neg r_arg1 ; dividend negative : negate
+ sbrc r_arg2,7
+ neg r_arg2 ; divisor negative : negate
+ rcall __udivmodqi4 ; do the unsigned div/mod
+ brtc __divmodqi4_1
+ neg r_rem ; correct remainder sign
+__divmodqi4_1:
+ sbrc __tmp_reg__,7
+ neg r_arg1 ; correct result sign
+__divmodqi4_exit:
+ ret
+ .endfunc
+#endif /* defined (L_divmodqi4) */
+
+#undef r_rem
+#undef r_arg1
+#undef r_arg2
+#undef r_cnt
+
+
+/*******************************************************
+ Division 16 / 16 => (result + remainder)
+*******************************************************/
+#define r_remL r26 /* remainder Low */
+#define r_remH r27 /* remainder High */
+
+/* return: remainder */
+#define r_arg1L r24 /* dividend Low */
+#define r_arg1H r25 /* dividend High */
+
+/* return: quotient */
+#define r_arg2L r22 /* divisor Low */
+#define r_arg2H r23 /* divisor High */
+
+#define r_cnt r21 /* loop count */
+
+#if defined (L_udivmodhi4)
+ .global __udivmodhi4
+ .func __udivmodhi4
+__udivmodhi4:
+ sub r_remL,r_remL
+ sub r_remH,r_remH ; clear remainder and carry
+ ldi r_cnt,17 ; init loop counter
+ rjmp __udivmodhi4_ep ; jump to entry point
+__udivmodhi4_loop:
+ rol r_remL ; shift dividend into remainder
+ rol r_remH
+ cp r_remL,r_arg2L ; compare remainder & divisor
+ cpc r_remH,r_arg2H
+ brcs __udivmodhi4_ep ; remainder < divisor
+ sub r_remL,r_arg2L ; restore remainder
+ sbc r_remH,r_arg2H
+__udivmodhi4_ep:
+ rol r_arg1L ; shift dividend (with CARRY)
+ rol r_arg1H
+ dec r_cnt ; decrement loop counter
+ brne __udivmodhi4_loop
+ com r_arg1L
+ com r_arg1H
+; div/mod results to return registers, as for the div() function
+ mov_l r_arg2L, r_arg1L ; quotient
+ mov_h r_arg2H, r_arg1H
+ mov_l r_arg1L, r_remL ; remainder
+ mov_h r_arg1H, r_remH
+ ret
+ .endfunc
+#endif /* defined (L_udivmodhi4) */
+
+#if defined (L_divmodhi4)
+ .global __divmodhi4
+ .func __divmodhi4
+__divmodhi4:
+ .global _div
+_div:
+ bst r_arg1H,7 ; store sign of dividend
+ mov __tmp_reg__,r_arg1H
+ eor __tmp_reg__,r_arg2H ; r0.7 is sign of result
+ rcall __divmodhi4_neg1 ; dividend negative : negate
+ sbrc r_arg2H,7
+ rcall __divmodhi4_neg2 ; divisor negative : negate
+ rcall __udivmodhi4 ; do the unsigned div/mod
+ rcall __divmodhi4_neg1 ; correct remainder sign
+ tst __tmp_reg__
+ brpl __divmodhi4_exit
+__divmodhi4_neg2:
+ com r_arg2H
+ neg r_arg2L ; correct divisor/result sign
+ sbci r_arg2H,0xff
+__divmodhi4_exit:
+ ret
+__divmodhi4_neg1:
+ brtc __divmodhi4_exit
+ com r_arg1H
+ neg r_arg1L ; correct dividend/remainder sign
+ sbci r_arg1H,0xff
+ ret
+ .endfunc
+#endif /* defined (L_divmodhi4) */
+
+#undef r_remH
+#undef r_remL
+
+#undef r_arg1H
+#undef r_arg1L
+
+#undef r_arg2H
+#undef r_arg2L
+
+#undef r_cnt
+
+/*******************************************************
+ Division 32 / 32 => (result + remainder)
+*******************************************************/
+#define r_remHH r31 /* remainder High */
+#define r_remHL r30
+#define r_remH r27
+#define r_remL r26 /* remainder Low */
+
+/* return: remainder */
+#define r_arg1HH r25 /* dividend High */
+#define r_arg1HL r24
+#define r_arg1H r23
+#define r_arg1L r22 /* dividend Low */
+
+/* return: quotient */
+#define r_arg2HH r21 /* divisor High */
+#define r_arg2HL r20
+#define r_arg2H r19
+#define r_arg2L r18 /* divisor Low */
+
+#define r_cnt __zero_reg__ /* loop count (0 after the loop!) */
+
+#if defined (L_udivmodsi4)
+ .global __udivmodsi4
+ .func __udivmodsi4
+__udivmodsi4:
+ ldi r_remL, 33 ; init loop counter
+ mov r_cnt, r_remL
+ sub r_remL,r_remL
+ sub r_remH,r_remH ; clear remainder and carry
+ mov_l r_remHL, r_remL
+ mov_h r_remHH, r_remH
+ rjmp __udivmodsi4_ep ; jump to entry point
+__udivmodsi4_loop:
+ rol r_remL ; shift dividend into remainder
+ rol r_remH
+ rol r_remHL
+ rol r_remHH
+ cp r_remL,r_arg2L ; compare remainder & divisor
+ cpc r_remH,r_arg2H
+ cpc r_remHL,r_arg2HL
+ cpc r_remHH,r_arg2HH
+ brcs __udivmodsi4_ep ; remainder <= divisor
+ sub r_remL,r_arg2L ; restore remainder
+ sbc r_remH,r_arg2H
+ sbc r_remHL,r_arg2HL
+ sbc r_remHH,r_arg2HH
+__udivmodsi4_ep:
+ rol r_arg1L ; shift dividend (with CARRY)
+ rol r_arg1H
+ rol r_arg1HL
+ rol r_arg1HH
+ dec r_cnt ; decrement loop counter
+ brne __udivmodsi4_loop
+ ; __zero_reg__ now restored (r_cnt == 0)
+ com r_arg1L
+ com r_arg1H
+ com r_arg1HL
+ com r_arg1HH
+; div/mod results to return registers, as for the ldiv() function
+ mov_l r_arg2L, r_arg1L ; quotient
+ mov_h r_arg2H, r_arg1H
+ mov_l r_arg2HL, r_arg1HL
+ mov_h r_arg2HH, r_arg1HH
+ mov_l r_arg1L, r_remL ; remainder
+ mov_h r_arg1H, r_remH
+ mov_l r_arg1HL, r_remHL
+ mov_h r_arg1HH, r_remHH
+ ret
+ .endfunc
+#endif /* defined (L_udivmodsi4) */
+
+#if defined (L_divmodsi4)
+ .global __divmodsi4
+ .func __divmodsi4
+__divmodsi4:
+ bst r_arg1HH,7 ; store sign of dividend
+ mov __tmp_reg__,r_arg1HH
+ eor __tmp_reg__,r_arg2HH ; r0.7 is sign of result
+ rcall __divmodsi4_neg1 ; dividend negative : negate
+ sbrc r_arg2HH,7
+ rcall __divmodsi4_neg2 ; divisor negative : negate
+ rcall __udivmodsi4 ; do the unsigned div/mod
+ rcall __divmodsi4_neg1 ; correct remainder sign
+ rol __tmp_reg__
+ brcc __divmodsi4_exit
+__divmodsi4_neg2:
+ com r_arg2HH
+ com r_arg2HL
+ com r_arg2H
+ neg r_arg2L ; correct divisor/quotient sign
+ sbci r_arg2H,0xff
+ sbci r_arg2HL,0xff
+ sbci r_arg2HH,0xff
+__divmodsi4_exit:
+ ret
+__divmodsi4_neg1:
+ brtc __divmodsi4_exit
+ com r_arg1HH
+ com r_arg1HL
+ com r_arg1H
+ neg r_arg1L ; correct dividend/remainder sign
+ sbci r_arg1H, 0xff
+ sbci r_arg1HL,0xff
+ sbci r_arg1HH,0xff
+ ret
+ .endfunc
+#endif /* defined (L_divmodsi4) */
+
+/**********************************
+ * This is a prologue subroutine
+ **********************************/
+#if defined (L_prologue)
+
+ .global __prologue_saves__
+ .func __prologue_saves__
+__prologue_saves__:
+ push r2
+ push r3
+ push r4
+ push r5
+ push r6
+ push r7
+ push r8
+ push r9
+ push r10
+ push r11
+ push r12
+ push r13
+ push r14
+ push r15
+ push r16
+ push r17
+ push r28
+ push r29
+ in r28,__SP_L__
+ in r29,__SP_H__
+ sub r28,r26
+ sbc r29,r27
+ in __tmp_reg__,__SREG__
+ cli
+ out __SP_H__,r29
+ out __SREG__,__tmp_reg__
+ out __SP_L__,r28
+ ijmp
+.endfunc
+#endif /* defined (L_prologue) */
+
+/*
+ * This is an epilogue subroutine
+ */
+#if defined (L_epilogue)
+
+ .global __epilogue_restores__
+ .func __epilogue_restores__
+__epilogue_restores__:
+ ldd r2,Y+18
+ ldd r3,Y+17
+ ldd r4,Y+16
+ ldd r5,Y+15
+ ldd r6,Y+14
+ ldd r7,Y+13
+ ldd r8,Y+12
+ ldd r9,Y+11
+ ldd r10,Y+10
+ ldd r11,Y+9
+ ldd r12,Y+8
+ ldd r13,Y+7
+ ldd r14,Y+6
+ ldd r15,Y+5
+ ldd r16,Y+4
+ ldd r17,Y+3
+ ldd r26,Y+2
+ ldd r27,Y+1
+ add r28,r30
+ adc r29,__zero_reg__
+ in __tmp_reg__,__SREG__
+ cli
+ out __SP_H__,r29
+ out __SREG__,__tmp_reg__
+ out __SP_L__,r28
+ mov_l r28, r26
+ mov_h r29, r27
+ ret
+.endfunc
+#endif /* defined (L_epilogue) */
+
+#ifdef L_exit
+ .section .fini9,"ax",@progbits
+ .global _exit
+ .func _exit
+_exit:
+ .weak exit
+exit:
+
+ /* Code from .fini8 ... .fini1 sections inserted by ld script. */
+
+ .section .fini0,"ax",@progbits
+__stop_program:
+ rjmp __stop_program
+ .endfunc
+#endif /* defined (L_exit) */
+
+#ifdef L_cleanup
+ .weak _cleanup
+ .func _cleanup
+_cleanup:
+ ret
+.endfunc
+#endif /* defined (L_cleanup) */
+
+#ifdef L_tablejump
+ .global __tablejump2__
+ .func __tablejump2__
+__tablejump2__:
+ lsl r30
+ rol r31
+ .global __tablejump__
+__tablejump__:
+#if defined (__AVR_ENHANCED__)
+ lpm __tmp_reg__, Z+
+ lpm r31, Z
+ mov r30, __tmp_reg__
+ ijmp
+#else
+ lpm
+ adiw r30, 1
+ push r0
+ lpm
+ push r0
+ ret
+#endif
+ .endfunc
+#endif /* defined (L_tablejump) */
+
+/* __do_copy_data is only necessary if there is anything in .data section.
+ Does not use RAMPZ - crt*.o provides a replacement for >64K devices. */
+
+#ifdef L_copy_data
+ .section .init4,"ax",@progbits
+ .global __do_copy_data
+__do_copy_data:
+ ldi r17, hi8(__data_end)
+ ldi r26, lo8(__data_start)
+ ldi r27, hi8(__data_start)
+ ldi r30, lo8(__data_load_start)
+ ldi r31, hi8(__data_load_start)
+ rjmp .do_copy_data_start
+.do_copy_data_loop:
+#if defined (__AVR_HAVE_LPMX__)
+ lpm r0, Z+
+#else
+ lpm
+ adiw r30, 1
+#endif
+ st X+, r0
+.do_copy_data_start:
+ cpi r26, lo8(__data_end)
+ cpc r27, r17
+ brne .do_copy_data_loop
+#endif /* L_copy_data */
+
+/* __do_clear_bss is only necessary if there is anything in .bss section. */
+
+#ifdef L_clear_bss
+ .section .init4,"ax",@progbits
+ .global __do_clear_bss
+__do_clear_bss:
+ ldi r17, hi8(__bss_end)
+ ldi r26, lo8(__bss_start)
+ ldi r27, hi8(__bss_start)
+ rjmp .do_clear_bss_start
+.do_clear_bss_loop:
+ st X+, __zero_reg__
+.do_clear_bss_start:
+ cpi r26, lo8(__bss_end)
+ cpc r27, r17
+ brne .do_clear_bss_loop
+#endif /* L_clear_bss */
+
+/* __do_global_ctors and __do_global_dtors are only necessary
+ if there are any constructors/destructors. */
+
+#if defined (__AVR_MEGA__)
+#define XCALL call
+#else
+#define XCALL rcall
+#endif
+
+#ifdef L_ctors
+ .section .init6,"ax",@progbits
+ .global __do_global_ctors
+__do_global_ctors:
+ ldi r17, hi8(__ctors_start)
+ ldi r28, lo8(__ctors_end)
+ ldi r29, hi8(__ctors_end)
+ rjmp .do_global_ctors_start
+.do_global_ctors_loop:
+ sbiw r28, 2
+ mov_h r31, r29
+ mov_l r30, r28
+ XCALL __tablejump__
+.do_global_ctors_start:
+ cpi r28, lo8(__ctors_start)
+ cpc r29, r17
+ brne .do_global_ctors_loop
+#endif /* L_ctors */
+
+#ifdef L_dtors
+ .section .fini6,"ax",@progbits
+ .global __do_global_dtors
+__do_global_dtors:
+ ldi r17, hi8(__dtors_end)
+ ldi r28, lo8(__dtors_start)
+ ldi r29, hi8(__dtors_start)
+ rjmp .do_global_dtors_start
+.do_global_dtors_loop:
+ mov_h r31, r29
+ mov_l r30, r28
+ XCALL __tablejump__
+ adiw r28, 2
+.do_global_dtors_start:
+ cpi r28, lo8(__dtors_end)
+ cpc r29, r17
+ brne .do_global_dtors_loop
+#endif /* L_dtors */
+
Index: constraints.md
===================================================================
--- constraints.md (nonexistent)
+++ constraints.md (revision 154)
@@ -0,0 +1,104 @@
+;; Constraint definitions for ATMEL AVR micro controllers.
+;; Copyright (C) 2006, 2007 Free Software Foundation, Inc.
+;;
+;; This file is part of GCC.
+;;
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 3, or (at your option)
+;; any later version.
+;;
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING3. If not see
+;; .
+
+;; Register constraints
+
+(define_register_constraint "t" "R0_REG"
+ "Temporary register r0")
+
+(define_register_constraint "b" "BASE_POINTER_REGS"
+ "Base pointer registers (r28--r31)")
+
+(define_register_constraint "e" "POINTER_REGS"
+ "Pointer registers (r26--r31)")
+
+(define_register_constraint "w" "ADDW_REGS"
+ "Registers from r24 to r31. These registers
+ can be used in @samp{adiw} command.")
+
+(define_register_constraint "d" "LD_REGS"
+ "Registers from r16 to r31.")
+
+(define_register_constraint "l" "NO_LD_REGS"
+ "Registers from r0 to r15.")
+
+(define_register_constraint "a" "SIMPLE_LD_REGS"
+ "Registers from r16 to r23.")
+
+(define_register_constraint "x" "POINTER_X_REGS"
+ "Register pair X (r27:r26).")
+
+(define_register_constraint "y" "POINTER_Y_REGS"
+ "Register pair Y (r29:r28).")
+
+(define_register_constraint "z" "POINTER_Z_REGS"
+ "Register pair Z (r31:r30).")
+
+(define_register_constraint "q" "STACK_REG"
+ "Stack pointer register (SPH:SPL).")
+
+(define_constraint "I"
+ "Integer constant in the range 0 @dots{} 63."
+ (and (match_code "const_int")
+ (match_test "ival >= 0 && ival <= 63")))
+
+(define_constraint "J"
+ "Integer constant in the range -63 @dots{} 0."
+ (and (match_code "const_int")
+ (match_test "ival <= 0 && ival >= -63")))
+
+(define_constraint "K"
+ "Integer constant 2."
+ (and (match_code "const_int")
+ (match_test "ival == 2")))
+
+(define_constraint "L"
+ "Zero."
+ (and (match_code "const_int")
+ (match_test "ival == 0")))
+
+(define_constraint "M"
+ "Integer constant in the range 0 @dots{} 0xff."
+ (and (match_code "const_int")
+ (match_test "ival >= 0 && ival <= 0xff")))
+
+(define_constraint "N"
+ "Constant integer @minus{}1."
+ (and (match_code "const_int")
+ (match_test "ival == -1")))
+
+(define_constraint "O"
+ "Constant integer 8, 16, or 24."
+ (and (match_code "const_int")
+ (match_test "ival == 8 || ival == 16 || ival == 24")))
+
+(define_constraint "P"
+ "Constant integer 1."
+ (and (match_code "const_int")
+ (match_test "ival == 1")))
+
+(define_constraint "G"
+ "Constant float 0."
+ (and (match_code "const_double")
+ (match_test "op == CONST0_RTX (SFmode)")))
+
+(define_memory_constraint "Q"
+ "A memory address based on X or Y pointer with displacement."
+ (and (match_code "mem")
+ (match_test "extra_constraint_Q (op)")))
Index: avr.md
===================================================================
--- avr.md (nonexistent)
+++ avr.md (revision 154)
@@ -0,0 +1,2528 @@
+;; -*- Mode: Scheme -*-
+;; Machine description for GNU compiler,
+;; for ATMEL AVR micro controllers.
+;; Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
+;; Free Software Foundation, Inc.
+;; Contributed by Denis Chertykov (denisc@overta.ru)
+
+;; This file is part of GCC.
+
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 3, or (at your option)
+;; any later version.
+
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING3. If not see
+;; .
+
+;; Special characters after '%':
+;; A No effect (add 0).
+;; B Add 1 to REG number, MEM address or CONST_INT.
+;; C Add 2.
+;; D Add 3.
+;; j Branch condition.
+;; k Reverse branch condition.
+;; o Displacement for (mem (plus (reg) (const_int))) operands.
+;; p POST_INC or PRE_DEC address as a pointer (X, Y, Z)
+;; r POST_INC or PRE_DEC address as a register (r26, r28, r30)
+;; ~ Output 'r' if not AVR_MEGA.
+
+;; UNSPEC usage:
+;; 0 Length of a string, see "strlenhi".
+;; 1 Jump by register pair Z or by table addressed by Z, see "casesi".
+
+(define_constants
+ [(REG_X 26)
+ (REG_Y 28)
+ (REG_Z 30)
+ (REG_W 24)
+ (REG_SP 32)
+ (TMP_REGNO 0) ; temporary register r0
+ (ZERO_REGNO 1) ; zero register r1
+ (UNSPEC_STRLEN 0)
+ (UNSPEC_INDEX_JMP 1)])
+
+(include "predicates.md")
+(include "constraints.md")
+
+;; Condition code settings.
+(define_attr "cc" "none,set_czn,set_zn,set_n,compare,clobber"
+ (const_string "none"))
+
+(define_attr "type" "branch,branch1,arith,xcall"
+ (const_string "arith"))
+
+(define_attr "mcu_have_movw" "yes,no"
+ (const (if_then_else (symbol_ref "AVR_HAVE_MOVW")
+ (const_string "yes")
+ (const_string "no"))))
+
+(define_attr "mcu_mega" "yes,no"
+ (const (if_then_else (symbol_ref "AVR_MEGA")
+ (const_string "yes")
+ (const_string "no"))))
+
+
+;; The size of instructions in bytes.
+;; XXX may depend from "cc"
+
+(define_attr "length" ""
+ (cond [(eq_attr "type" "branch")
+ (if_then_else (and (ge (minus (pc) (match_dup 0))
+ (const_int -63))
+ (le (minus (pc) (match_dup 0))
+ (const_int 62)))
+ (const_int 1)
+ (if_then_else (and (ge (minus (pc) (match_dup 0))
+ (const_int -2045))
+ (le (minus (pc) (match_dup 0))
+ (const_int 2045)))
+ (const_int 2)
+ (const_int 3)))
+ (eq_attr "type" "branch1")
+ (if_then_else (and (ge (minus (pc) (match_dup 0))
+ (const_int -62))
+ (le (minus (pc) (match_dup 0))
+ (const_int 61)))
+ (const_int 2)
+ (if_then_else (and (ge (minus (pc) (match_dup 0))
+ (const_int -2044))
+ (le (minus (pc) (match_dup 0))
+ (const_int 2043)))
+ (const_int 3)
+ (const_int 4)))
+ (eq_attr "type" "xcall")
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 1)
+ (const_int 2))]
+ (const_int 2)))
+
+(define_insn "*pop1"
+ [(set (reg:HI 32) (plus:HI (reg:HI 32) (const_int 1)))]
+ ""
+ "pop __tmp_reg__"
+ [(set_attr "length" "1")])
+
+(define_insn "*pop2"
+ [(set (reg:HI 32) (plus:HI (reg:HI 32) (const_int 2)))]
+ ""
+ "pop __tmp_reg__
+ pop __tmp_reg__"
+ [(set_attr "length" "2")])
+
+(define_insn "*pop3"
+ [(set (reg:HI 32) (plus:HI (reg:HI 32) (const_int 3)))]
+ ""
+ "pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__"
+ [(set_attr "length" "3")])
+
+(define_insn "*pop4"
+ [(set (reg:HI 32) (plus:HI (reg:HI 32) (const_int 4)))]
+ ""
+ "pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__"
+ [(set_attr "length" "4")])
+
+(define_insn "*pop5"
+ [(set (reg:HI 32) (plus:HI (reg:HI 32) (const_int 5)))]
+ ""
+ "pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__
+ pop __tmp_reg__"
+ [(set_attr "length" "5")])
+
+(define_insn "*pushqi"
+ [(set (mem:QI (post_dec (reg:HI 32)))
+ (match_operand:QI 0 "nonmemory_operand" "r,L"))]
+ "(operands[0] == const0_rtx || register_operand (operands[0], QImode))"
+ "@
+ push %0
+ push __zero_reg__"
+ [(set_attr "length" "1,1")])
+
+
+(define_insn "*pushhi"
+ [(set (mem:HI (post_dec (reg:HI 32)))
+ (match_operand:HI 0 "nonmemory_operand" "r,L"))]
+ "(operands[0] == const0_rtx || register_operand (operands[0], HImode))"
+ "@
+ push %B0\;push %A0
+ push __zero_reg__\;push __zero_reg__"
+ [(set_attr "length" "2,2")])
+
+(define_insn "*pushsi"
+ [(set (mem:SI (post_dec (reg:HI 32)))
+ (match_operand:SI 0 "nonmemory_operand" "r,L"))]
+ "(operands[0] == const0_rtx || register_operand (operands[0], SImode))"
+ "@
+ push %D0\;push %C0\;push %B0\;push %A0
+ push __zero_reg__\;push __zero_reg__\;push __zero_reg__\;push __zero_reg__"
+ [(set_attr "length" "4,4")])
+
+(define_insn "*pushsf"
+ [(set (mem:SF (post_dec (reg:HI 32)))
+ (match_operand:SF 0 "register_operand" "r"))]
+ ""
+ "push %D0
+ push %C0
+ push %B0
+ push %A0"
+ [(set_attr "length" "4")])
+
+;;========================================================================
+;; move byte
+;; The last alternative (any immediate constant to any register) is
+;; very expensive. It should be optimized by peephole2 if a scratch
+;; register is available, but then that register could just as well be
+;; allocated for the variable we are loading. But, most of NO_LD_REGS
+;; are call-saved registers, and most of LD_REGS are call-used registers,
+;; so this may still be a win for registers live across function calls.
+
+(define_expand "movqi"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "")
+ (match_operand:QI 1 "general_operand" ""))]
+ ""
+ "/* One of the ops has to be in a register. */
+ if (!register_operand(operand0, QImode)
+ && ! (register_operand(operand1, QImode) || const0_rtx == operand1))
+ operands[1] = copy_to_mode_reg(QImode, operand1);
+ ")
+
+(define_insn "*movqi"
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=r,d,Qm,r,q,r,*r")
+ (match_operand:QI 1 "general_operand" "r,i,rL,Qm,r,q,i"))]
+ "(register_operand (operands[0],QImode)
+ || register_operand (operands[1], QImode) || const0_rtx == operands[1])"
+ "* return output_movqi (insn, operands, NULL);"
+ [(set_attr "length" "1,1,5,5,1,1,4")
+ (set_attr "cc" "none,none,clobber,clobber,none,none,clobber")])
+
+;; This is used in peephole2 to optimize loading immediate constants
+;; if a scratch register from LD_REGS happens to be available.
+
+(define_insn "*reload_inqi"
+ [(set (match_operand:QI 0 "register_operand" "=l")
+ (match_operand:QI 1 "immediate_operand" "i"))
+ (clobber (match_operand:QI 2 "register_operand" "=&d"))]
+ "reload_completed"
+ "ldi %2,lo8(%1)
+ mov %0,%2"
+ [(set_attr "length" "2")
+ (set_attr "cc" "none")])
+
+(define_peephole2
+ [(match_scratch:QI 2 "d")
+ (set (match_operand:QI 0 "l_register_operand" "")
+ (match_operand:QI 1 "immediate_operand" ""))]
+ "(operands[1] != const0_rtx
+ && operands[1] != const1_rtx
+ && operands[1] != constm1_rtx)"
+ [(parallel [(set (match_dup 0) (match_dup 1))
+ (clobber (match_dup 2))])]
+ "if (!avr_peep2_scratch_safe (operands[2]))
+ FAIL;")
+
+;;============================================================================
+;; move word (16 bit)
+
+(define_expand "movhi"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "")
+ (match_operand:HI 1 "general_operand" ""))]
+ ""
+ "
+{
+ /* One of the ops has to be in a register. */
+ if (!register_operand(operand0, HImode)
+ && !(register_operand(operand1, HImode) || const0_rtx == operands[1]))
+ {
+ operands[1] = copy_to_mode_reg(HImode, operand1);
+ }
+}")
+
+
+(define_peephole2
+ [(match_scratch:QI 2 "d")
+ (set (match_operand:HI 0 "l_register_operand" "")
+ (match_operand:HI 1 "immediate_operand" ""))]
+ "(operands[1] != const0_rtx
+ && operands[1] != constm1_rtx)"
+ [(parallel [(set (match_dup 0) (match_dup 1))
+ (clobber (match_dup 2))])]
+ "if (!avr_peep2_scratch_safe (operands[2]))
+ FAIL;")
+
+;; '*' because it is not used in rtl generation, only in above peephole
+(define_insn "*reload_inhi"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (match_operand:HI 1 "immediate_operand" "i"))
+ (clobber (match_operand:QI 2 "register_operand" "=&d"))]
+ "reload_completed"
+ "* return output_reload_inhi (insn, operands, NULL);"
+ [(set_attr "length" "4")
+ (set_attr "cc" "none")])
+
+(define_insn "*movhi"
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,d,*r,q,r")
+ (match_operand:HI 1 "general_operand" "r,m,rL,i,i,r,q"))]
+ "(register_operand (operands[0],HImode)
+ || register_operand (operands[1],HImode) || const0_rtx == operands[1])"
+ "* return output_movhi (insn, operands, NULL);"
+ [(set_attr "length" "2,6,7,2,6,5,2")
+ (set_attr "cc" "none,clobber,clobber,none,clobber,none,none")])
+
+;;==========================================================================
+;; move double word (32 bit)
+
+(define_expand "movsi"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "")
+ (match_operand:SI 1 "general_operand" ""))]
+ ""
+ "
+{
+ /* One of the ops has to be in a register. */
+ if (!register_operand (operand0, SImode)
+ && !(register_operand (operand1, SImode) || const0_rtx == operand1))
+ {
+ operands[1] = copy_to_mode_reg (SImode, operand1);
+ }
+}")
+
+
+
+(define_peephole2
+ [(match_scratch:QI 2 "d")
+ (set (match_operand:SI 0 "l_register_operand" "")
+ (match_operand:SI 1 "immediate_operand" ""))]
+ "(operands[1] != const0_rtx
+ && operands[1] != constm1_rtx)"
+ [(parallel [(set (match_dup 0) (match_dup 1))
+ (clobber (match_dup 2))])]
+ "if (!avr_peep2_scratch_safe (operands[2]))
+ FAIL;")
+
+;; '*' because it is not used in rtl generation.
+(define_insn "*reload_insi"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (match_operand:SI 1 "immediate_operand" "i"))
+ (clobber (match_operand:QI 2 "register_operand" "=&d"))]
+ "reload_completed"
+ "* return output_reload_insisf (insn, operands, NULL);"
+ [(set_attr "length" "8")
+ (set_attr "cc" "none")])
+
+
+(define_insn "*movsi"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
+ (match_operand:SI 1 "general_operand" "r,L,Qm,rL,i,i"))]
+ "(register_operand (operands[0],SImode)
+ || register_operand (operands[1],SImode) || const0_rtx == operands[1])"
+ "* return output_movsisf (insn, operands, NULL);"
+ [(set_attr "length" "4,4,8,9,4,10")
+ (set_attr "cc" "none,set_zn,clobber,clobber,none,clobber")])
+
+;; fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+;; move floating point numbers (32 bit)
+
+(define_expand "movsf"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "")
+ (match_operand:SF 1 "general_operand" ""))]
+ ""
+ "
+{
+ /* One of the ops has to be in a register. */
+ if (!register_operand (operand1, SFmode)
+ && !register_operand (operand0, SFmode))
+ {
+ operands[1] = copy_to_mode_reg (SFmode, operand1);
+ }
+}")
+
+(define_insn "*movsf"
+ [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r,Qm,!d,r")
+ (match_operand:SF 1 "general_operand" "r,G,Qm,r,F,F"))]
+ "register_operand (operands[0], SFmode)
+ || register_operand (operands[1], SFmode)"
+ "* return output_movsisf (insn, operands, NULL);"
+ [(set_attr "length" "4,4,8,9,4,10")
+ (set_attr "cc" "none,set_zn,clobber,clobber,none,clobber")])
+
+;;=========================================================================
+;; move string (like memcpy)
+;; implement as RTL loop
+
+(define_expand "movmemhi"
+ [(parallel [(set (match_operand:BLK 0 "memory_operand" "")
+ (match_operand:BLK 1 "memory_operand" ""))
+ (use (match_operand:HI 2 "const_int_operand" ""))
+ (use (match_operand:HI 3 "const_int_operand" ""))])]
+ ""
+ "{
+ int prob;
+ HOST_WIDE_INT count;
+ enum machine_mode mode;
+ rtx label = gen_label_rtx ();
+ rtx loop_reg;
+ rtx jump;
+
+ /* Copy pointers into new psuedos - they will be changed. */
+ rtx addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
+ rtx addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
+
+ /* Create rtx for tmp register - we use this as scratch. */
+ rtx tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
+
+ if (GET_CODE (operands[2]) != CONST_INT)
+ FAIL;
+
+ count = INTVAL (operands[2]);
+ if (count <= 0)
+ FAIL;
+
+ /* Work out branch probability for latter use. */
+ prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / count;
+
+ /* See if constant fit 8 bits. */
+ mode = (count < 0x100) ? QImode : HImode;
+ /* Create loop counter register. */
+ loop_reg = copy_to_mode_reg (mode, gen_int_mode (count, mode));
+
+ /* Now create RTL code for move loop. */
+ /* Label at top of loop. */
+ emit_label (label);
+
+ /* Move one byte into scratch and inc pointer. */
+ emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, addr1));
+ emit_move_insn (addr1, gen_rtx_PLUS (Pmode, addr1, const1_rtx));
+
+ /* Move to mem and inc pointer. */
+ emit_move_insn (gen_rtx_MEM (QImode, addr0), tmp_reg_rtx);
+ emit_move_insn (addr0, gen_rtx_PLUS (Pmode, addr0, const1_rtx));
+
+ /* Decrement count. */
+ emit_move_insn (loop_reg, gen_rtx_PLUS (mode, loop_reg, constm1_rtx));
+
+ /* Compare with zero and jump if not equal. */
+ emit_cmp_and_jump_insns (loop_reg, const0_rtx, NE, NULL_RTX, mode, 1,
+ label);
+ /* Set jump probability based on loop count. */
+ jump = get_last_insn ();
+ REG_NOTES (jump) = gen_rtx_EXPR_LIST (REG_BR_PROB,
+ GEN_INT (prob),
+ REG_NOTES (jump));
+ DONE;
+}")
+
+;; =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2 =%2
+;; memset (%0, %2, %1)
+
+(define_expand "setmemhi"
+ [(parallel [(set (match_operand:BLK 0 "memory_operand" "")
+ (match_operand 2 "const_int_operand" ""))
+ (use (match_operand:HI 1 "const_int_operand" ""))
+ (use (match_operand:HI 3 "const_int_operand" "n"))
+ (clobber (match_scratch:HI 4 ""))
+ (clobber (match_dup 5))])]
+ ""
+ "{
+ rtx addr0;
+ int cnt8;
+ enum machine_mode mode;
+
+ /* If value to set is not zero, use the library routine. */
+ if (operands[2] != const0_rtx)
+ FAIL;
+
+ if (GET_CODE (operands[1]) != CONST_INT)
+ FAIL;
+
+ cnt8 = byte_immediate_operand (operands[1], GET_MODE (operands[1]));
+ mode = cnt8 ? QImode : HImode;
+ operands[5] = gen_rtx_SCRATCH (mode);
+ operands[1] = copy_to_mode_reg (mode,
+ gen_int_mode (INTVAL (operands[1]), mode));
+ addr0 = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
+ operands[0] = gen_rtx_MEM (BLKmode, addr0);
+}")
+
+(define_insn "*clrmemqi"
+ [(set (mem:BLK (match_operand:HI 0 "register_operand" "e"))
+ (const_int 0))
+ (use (match_operand:QI 1 "register_operand" "r"))
+ (use (match_operand:QI 2 "const_int_operand" "n"))
+ (clobber (match_scratch:HI 3 "=0"))
+ (clobber (match_scratch:QI 4 "=&1"))]
+ ""
+ "st %a0+,__zero_reg__
+ dec %1
+ brne .-6"
+ [(set_attr "length" "3")
+ (set_attr "cc" "clobber")])
+
+(define_insn "*clrmemhi"
+ [(set (mem:BLK (match_operand:HI 0 "register_operand" "e,e"))
+ (const_int 0))
+ (use (match_operand:HI 1 "register_operand" "!w,d"))
+ (use (match_operand:HI 2 "const_int_operand" "n,n"))
+ (clobber (match_scratch:HI 3 "=0,0"))
+ (clobber (match_scratch:HI 4 "=&1,&1"))]
+ ""
+ "*{
+ if (which_alternative==0)
+ return (AS2 (st,%a0+,__zero_reg__) CR_TAB
+ AS2 (sbiw,%A1,1) CR_TAB
+ AS1 (brne,.-6));
+ else
+ return (AS2 (st,%a0+,__zero_reg__) CR_TAB
+ AS2 (subi,%A1,1) CR_TAB
+ AS2 (sbci,%B1,0) CR_TAB
+ AS1 (brne,.-8));
+}"
+ [(set_attr "length" "3,4")
+ (set_attr "cc" "clobber,clobber")])
+
+(define_expand "strlenhi"
+ [(set (match_dup 4)
+ (unspec:HI [(match_operand:BLK 1 "memory_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")
+ (match_operand:HI 3 "immediate_operand" "")]
+ UNSPEC_STRLEN))
+ (set (match_dup 4) (plus:HI (match_dup 4)
+ (const_int -1)))
+ (set (match_operand:HI 0 "register_operand" "")
+ (minus:HI (match_dup 4)
+ (match_dup 5)))]
+ ""
+ "{
+ rtx addr;
+ if (! (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 0))
+ FAIL;
+ addr = copy_to_mode_reg (Pmode, XEXP (operands[1],0));
+ operands[1] = gen_rtx_MEM (BLKmode, addr);
+ operands[5] = addr;
+ operands[4] = gen_reg_rtx (HImode);
+}")
+
+(define_insn "*strlenhi"
+ [(set (match_operand:HI 0 "register_operand" "=e")
+ (unspec:HI [(mem:BLK (match_operand:HI 1 "register_operand" "%0"))
+ (const_int 0)
+ (match_operand:HI 2 "immediate_operand" "i")]
+ UNSPEC_STRLEN))]
+ ""
+ "ld __tmp_reg__,%a0+
+ tst __tmp_reg__
+ brne .-6"
+ [(set_attr "length" "3")
+ (set_attr "cc" "clobber")])
+
+;+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+; add bytes
+
+(define_insn "addqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,d,r,r")
+ (plus:QI (match_operand:QI 1 "register_operand" "%0,0,0,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,i,P,N")))]
+ ""
+ "@
+ add %0,%2
+ subi %0,lo8(-(%2))
+ inc %0
+ dec %0"
+ [(set_attr "length" "1,1,1,1")
+ (set_attr "cc" "set_czn,set_czn,set_zn,set_zn")])
+
+
+(define_expand "addhi3"
+ [(set (match_operand:HI 0 "register_operand" "")
+ (plus:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:HI 2 "nonmemory_operand" "")))]
+ ""
+ "
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ short tmp = INTVAL (operands[2]);
+ operands[2] = GEN_INT(tmp);
+ }
+}")
+
+
+(define_insn "*addhi3_zero_extend"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (plus:HI (zero_extend:HI
+ (match_operand:QI 1 "register_operand" "r"))
+ (match_operand:HI 2 "register_operand" "0")))]
+ ""
+ "add %A0,%1
+ adc %B0,__zero_reg__"
+ [(set_attr "length" "2")
+ (set_attr "cc" "set_n")])
+
+(define_insn "*addhi3_zero_extend1"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (plus:HI (match_operand:HI 1 "register_operand" "%0")
+ (zero_extend:HI
+ (match_operand:QI 2 "register_operand" "r"))))]
+ ""
+ "add %A0,%2
+ adc %B0,__zero_reg__"
+ [(set_attr "length" "2")
+ (set_attr "cc" "set_n")])
+
+(define_insn "*addhi3_zero_extend2"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (plus:HI
+ (zero_extend:HI (match_operand:QI 1 "register_operand" "%0"))
+ (zero_extend:HI (match_operand:QI 2 "register_operand" "r"))))]
+ ""
+ "add %0,%2
+ mov %B0,__zero_reg__
+ adc %B0,__zero_reg__"
+ [(set_attr "length" "3")
+ (set_attr "cc" "set_n")])
+
+(define_insn "*addhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,!w,!w,d,r,r")
+ (plus:HI
+ (match_operand:HI 1 "register_operand" "%0,0,0,0,0,0")
+ (match_operand:HI 2 "nonmemory_operand" "r,I,J,i,P,N")))]
+ ""
+ "@
+ add %A0,%A2\;adc %B0,%B2
+ adiw %A0,%2
+ sbiw %A0,%n2
+ subi %A0,lo8(-(%2))\;sbci %B0,hi8(-(%2))
+ sec\;adc %A0,__zero_reg__\;adc %B0,__zero_reg__
+ sec\;sbc %A0,__zero_reg__\;sbc %B0,__zero_reg__"
+ [(set_attr "length" "2,1,1,2,3,3")
+ (set_attr "cc" "set_n,set_czn,set_czn,set_czn,set_n,set_n")])
+
+(define_insn "addsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,!w,!w,d,r,r")
+ (plus:SI
+ (match_operand:SI 1 "register_operand" "%0,0,0,0,0,0")
+ (match_operand:SI 2 "nonmemory_operand" "r,I,J,i,P,N")))]
+ ""
+ "@
+ add %A0,%A2\;adc %B0,%B2\;adc %C0,%C2\;adc %D0,%D2
+ adiw %0,%2\;adc %C0,__zero_reg__\;adc %D0,__zero_reg__
+ sbiw %0,%n2\;sbc %C0,__zero_reg__\;sbc %D0,__zero_reg__
+ subi %0,lo8(-(%2))\;sbci %B0,hi8(-(%2))\;sbci %C0,hlo8(-(%2))\;sbci %D0,hhi8(-(%2))
+ sec\;adc %A0,__zero_reg__\;adc %B0,__zero_reg__\;adc %C0,__zero_reg__\;adc %D0,__zero_reg__
+ sec\;sbc %A0,__zero_reg__\;sbc %B0,__zero_reg__\;sbc %C0,__zero_reg__\;sbc %D0,__zero_reg__"
+ [(set_attr "length" "4,3,3,4,5,5")
+ (set_attr "cc" "set_n,set_n,set_czn,set_czn,set_n,set_n")])
+
+(define_insn "*addsi3_zero_extend"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (plus:SI (zero_extend:SI
+ (match_operand:QI 1 "register_operand" "r"))
+ (match_operand:SI 2 "register_operand" "0")))]
+ ""
+ "add %A0,%1
+ adc %B0,__zero_reg__
+ adc %C0,__zero_reg__
+ adc %D0,__zero_reg__"
+ [(set_attr "length" "4")
+ (set_attr "cc" "set_n")])
+
+;-----------------------------------------------------------------------------
+; sub bytes
+(define_insn "subqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,d")
+ (minus:QI (match_operand:QI 1 "register_operand" "0,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ sub %0,%2
+ subi %0,lo8(%2)"
+ [(set_attr "length" "1,1")
+ (set_attr "cc" "set_czn,set_czn")])
+
+(define_insn "subhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,d")
+ (minus:HI (match_operand:HI 1 "register_operand" "0,0")
+ (match_operand:HI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ sub %A0,%A2\;sbc %B0,%B2
+ subi %A0,lo8(%2)\;sbci %B0,hi8(%2)"
+ [(set_attr "length" "2,2")
+ (set_attr "cc" "set_czn,set_czn")])
+
+(define_insn "*subhi3_zero_extend1"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (minus:HI (match_operand:HI 1 "register_operand" "0")
+ (zero_extend:HI
+ (match_operand:QI 2 "register_operand" "r"))))]
+ ""
+ "sub %A0,%2
+ sbc %B0,__zero_reg__"
+ [(set_attr "length" "2")
+ (set_attr "cc" "set_n")])
+
+(define_insn "subsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,d")
+ (minus:SI (match_operand:SI 1 "register_operand" "0,0")
+ (match_operand:SI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ sub %0,%2\;sbc %B0,%B2\;sbc %C0,%C2\;sbc %D0,%D2
+ subi %A0,lo8(%2)\;sbci %B0,hi8(%2)\;sbci %C0,hlo8(%2)\;sbci %D0,hhi8(%2)"
+ [(set_attr "length" "4,4")
+ (set_attr "cc" "set_czn,set_czn")])
+
+(define_insn "*subsi3_zero_extend"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (minus:SI (match_operand:SI 1 "register_operand" "0")
+ (zero_extend:SI
+ (match_operand:QI 2 "register_operand" "r"))))]
+ ""
+ "sub %A0,%2
+ sbc %B0,__zero_reg__
+ sbc %C0,__zero_reg__
+ sbc %D0,__zero_reg__"
+ [(set_attr "length" "4")
+ (set_attr "cc" "set_n")])
+
+;******************************************************************************
+; mul
+
+(define_expand "mulqi3"
+ [(set (match_operand:QI 0 "register_operand" "")
+ (mult:QI (match_operand:QI 1 "register_operand" "")
+ (match_operand:QI 2 "register_operand" "")))]
+ ""
+ "{
+ if (!AVR_ENHANCED)
+ {
+ emit_insn (gen_mulqi3_call (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+}")
+
+(define_insn "*mulqi3_enh"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (mult:QI (match_operand:QI 1 "register_operand" "r")
+ (match_operand:QI 2 "register_operand" "r")))]
+ "AVR_ENHANCED"
+ "mul %1,%2
+ mov %0,r0
+ clr r1"
+ [(set_attr "length" "3")
+ (set_attr "cc" "clobber")])
+
+(define_expand "mulqi3_call"
+ [(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
+ (set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
+ (parallel [(set (reg:QI 24) (mult:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 22))])
+ (set (match_operand:QI 0 "register_operand" "") (reg:QI 24))]
+ ""
+ "")
+
+(define_insn "*mulqi3_call"
+ [(set (reg:QI 24) (mult:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 22))]
+ "!AVR_ENHANCED"
+ "%~call __mulqi3"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_insn "mulqihi3"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (mult:HI (sign_extend:HI (match_operand:QI 1 "register_operand" "d"))
+ (sign_extend:HI (match_operand:QI 2 "register_operand" "d"))))]
+ "AVR_ENHANCED"
+ "muls %1,%2
+ movw %0,r0
+ clr r1"
+ [(set_attr "length" "3")
+ (set_attr "cc" "clobber")])
+
+(define_insn "umulqihi3"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (mult:HI (zero_extend:HI (match_operand:QI 1 "register_operand" "r"))
+ (zero_extend:HI (match_operand:QI 2 "register_operand" "r"))))]
+ "AVR_ENHANCED"
+ "mul %1,%2
+ movw %0,r0
+ clr r1"
+ [(set_attr "length" "3")
+ (set_attr "cc" "clobber")])
+
+(define_expand "mulhi3"
+ [(set (match_operand:HI 0 "register_operand" "")
+ (mult:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:HI 2 "register_operand" "")))]
+ ""
+ "
+{
+ if (!AVR_ENHANCED)
+ {
+ emit_insn (gen_mulhi3_call (operands[0], operands[1], operands[2]));
+ DONE;
+ }
+}")
+
+(define_insn "*mulhi3_enh"
+ [(set (match_operand:HI 0 "register_operand" "=&r")
+ (mult:HI (match_operand:HI 1 "register_operand" "r")
+ (match_operand:HI 2 "register_operand" "r")))]
+ "AVR_ENHANCED"
+ "mul %A1,%A2
+ movw %0,r0
+ mul %A1,%B2
+ add %B0,r0
+ mul %B1,%A2
+ add %B0,r0
+ clr r1"
+ [(set_attr "length" "7")
+ (set_attr "cc" "clobber")])
+
+(define_expand "mulhi3_call"
+ [(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
+ (set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
+ (parallel [(set (reg:HI 24) (mult:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 22))
+ (clobber (reg:QI 21))])
+ (set (match_operand:HI 0 "register_operand" "") (reg:HI 24))]
+ ""
+ "")
+
+(define_insn "*mulhi3_call"
+ [(set (reg:HI 24) (mult:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 22))
+ (clobber (reg:QI 21))]
+ "!AVR_ENHANCED"
+ "%~call __mulhi3"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+;; Operand 2 (reg:SI 18) not clobbered on the enhanced core.
+;; All call-used registers clobbered otherwise - normal library call.
+(define_expand "mulsi3"
+ [(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
+ (set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
+ (parallel [(set (reg:SI 22) (mult:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))])
+ (set (match_operand:SI 0 "register_operand" "") (reg:SI 22))]
+ "AVR_ENHANCED"
+ "")
+
+(define_insn "*mulsi3_call"
+ [(set (reg:SI 22) (mult:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))]
+ "AVR_ENHANCED"
+ "%~call __mulsi3"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+; / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / % / %
+; divmod
+
+;; Generate libgcc.S calls ourselves, because:
+;; - we know exactly which registers are clobbered (for QI and HI
+;; modes, some of the call-used registers are preserved)
+;; - we get both the quotient and the remainder at no extra cost
+
+(define_expand "divmodqi4"
+ [(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
+ (set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
+ (parallel [(set (reg:QI 24) (div:QI (reg:QI 24) (reg:QI 22)))
+ (set (reg:QI 25) (mod:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 22))
+ (clobber (reg:QI 23))])
+ (set (match_operand:QI 0 "register_operand" "") (reg:QI 24))
+ (set (match_operand:QI 3 "register_operand" "") (reg:QI 25))]
+ ""
+ "")
+
+(define_insn "*divmodqi4_call"
+ [(set (reg:QI 24) (div:QI (reg:QI 24) (reg:QI 22)))
+ (set (reg:QI 25) (mod:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 22))
+ (clobber (reg:QI 23))]
+ ""
+ "%~call __divmodqi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_expand "udivmodqi4"
+ [(set (reg:QI 24) (match_operand:QI 1 "register_operand" ""))
+ (set (reg:QI 22) (match_operand:QI 2 "register_operand" ""))
+ (parallel [(set (reg:QI 24) (udiv:QI (reg:QI 24) (reg:QI 22)))
+ (set (reg:QI 25) (umod:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 23))])
+ (set (match_operand:QI 0 "register_operand" "") (reg:QI 24))
+ (set (match_operand:QI 3 "register_operand" "") (reg:QI 25))]
+ ""
+ "")
+
+(define_insn "*udivmodqi4_call"
+ [(set (reg:QI 24) (udiv:QI (reg:QI 24) (reg:QI 22)))
+ (set (reg:QI 25) (umod:QI (reg:QI 24) (reg:QI 22)))
+ (clobber (reg:QI 23))]
+ ""
+ "%~call __udivmodqi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_expand "divmodhi4"
+ [(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
+ (set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
+ (parallel [(set (reg:HI 22) (div:HI (reg:HI 24) (reg:HI 22)))
+ (set (reg:HI 24) (mod:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 26))
+ (clobber (reg:QI 21))])
+ (set (match_operand:HI 0 "register_operand" "") (reg:HI 22))
+ (set (match_operand:HI 3 "register_operand" "") (reg:HI 24))]
+ ""
+ "")
+
+(define_insn "*divmodhi4_call"
+ [(set (reg:HI 22) (div:HI (reg:HI 24) (reg:HI 22)))
+ (set (reg:HI 24) (mod:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 26))
+ (clobber (reg:QI 21))]
+ ""
+ "%~call __divmodhi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_expand "udivmodhi4"
+ [(set (reg:HI 24) (match_operand:HI 1 "register_operand" ""))
+ (set (reg:HI 22) (match_operand:HI 2 "register_operand" ""))
+ (parallel [(set (reg:HI 22) (udiv:HI (reg:HI 24) (reg:HI 22)))
+ (set (reg:HI 24) (umod:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 26))
+ (clobber (reg:QI 21))])
+ (set (match_operand:HI 0 "register_operand" "") (reg:HI 22))
+ (set (match_operand:HI 3 "register_operand" "") (reg:HI 24))]
+ ""
+ "")
+
+(define_insn "*udivmodhi4_call"
+ [(set (reg:HI 22) (udiv:HI (reg:HI 24) (reg:HI 22)))
+ (set (reg:HI 24) (umod:HI (reg:HI 24) (reg:HI 22)))
+ (clobber (reg:HI 26))
+ (clobber (reg:QI 21))]
+ ""
+ "%~call __udivmodhi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_expand "divmodsi4"
+ [(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
+ (set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
+ (parallel [(set (reg:SI 18) (div:SI (reg:SI 22) (reg:SI 18)))
+ (set (reg:SI 22) (mod:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))])
+ (set (match_operand:SI 0 "register_operand" "") (reg:SI 18))
+ (set (match_operand:SI 3 "register_operand" "") (reg:SI 22))]
+ ""
+ "")
+
+(define_insn "*divmodsi4_call"
+ [(set (reg:SI 18) (div:SI (reg:SI 22) (reg:SI 18)))
+ (set (reg:SI 22) (mod:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))]
+ ""
+ "%~call __divmodsi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+(define_expand "udivmodsi4"
+ [(set (reg:SI 22) (match_operand:SI 1 "register_operand" ""))
+ (set (reg:SI 18) (match_operand:SI 2 "register_operand" ""))
+ (parallel [(set (reg:SI 18) (udiv:SI (reg:SI 22) (reg:SI 18)))
+ (set (reg:SI 22) (umod:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))])
+ (set (match_operand:SI 0 "register_operand" "") (reg:SI 18))
+ (set (match_operand:SI 3 "register_operand" "") (reg:SI 22))]
+ ""
+ "")
+
+(define_insn "*udivmodsi4_call"
+ [(set (reg:SI 18) (udiv:SI (reg:SI 22) (reg:SI 18)))
+ (set (reg:SI 22) (umod:SI (reg:SI 22) (reg:SI 18)))
+ (clobber (reg:HI 26))
+ (clobber (reg:HI 30))]
+ ""
+ "%~call __udivmodsi4"
+ [(set_attr "type" "xcall")
+ (set_attr "cc" "clobber")])
+
+;&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
+; and
+
+(define_insn "andqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,d")
+ (and:QI (match_operand:QI 1 "register_operand" "%0,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ and %0,%2
+ andi %0,lo8(%2)"
+ [(set_attr "length" "1,1")
+ (set_attr "cc" "set_zn,set_zn")])
+
+(define_insn "andhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,d,r")
+ (and:HI (match_operand:HI 1 "register_operand" "%0,0,0")
+ (match_operand:HI 2 "nonmemory_operand" "r,i,M")))
+ (clobber (match_scratch:QI 3 "=X,X,&d"))]
+ ""
+ "*{
+ if (which_alternative==0)
+ return (AS2 (and,%A0,%A2) CR_TAB
+ AS2 (and,%B0,%B2));
+ else if (which_alternative==1)
+ {
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int mask = INTVAL (operands[2]);
+ if ((mask & 0xff) != 0xff)
+ output_asm_insn (AS2 (andi,%A0,lo8(%2)), operands);
+ if ((mask & 0xff00) != 0xff00)
+ output_asm_insn (AS2 (andi,%B0,hi8(%2)), operands);
+ return \"\";
+ }
+ return (AS2 (andi,%A0,lo8(%2)) CR_TAB
+ AS2 (andi,%B0,hi8(%2)));
+ }
+ return (AS2 (ldi,%3,lo8(%2)) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS1 (clr,%B0));
+}"
+ [(set_attr "length" "2,2,3")
+ (set_attr "cc" "set_n,clobber,set_n")])
+
+(define_insn "andsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,d")
+ (and:SI (match_operand:SI 1 "register_operand" "%0,0")
+ (match_operand:SI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "*{
+ if (which_alternative==0)
+ return (AS2 (and, %0,%2) CR_TAB
+ AS2 (and, %B0,%B2) CR_TAB
+ AS2 (and, %C0,%C2) CR_TAB
+ AS2 (and, %D0,%D2));
+ else if (which_alternative==1)
+ {
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ HOST_WIDE_INT mask = INTVAL (operands[2]);
+ if ((mask & 0xff) != 0xff)
+ output_asm_insn (AS2 (andi,%A0,lo8(%2)), operands);
+ if ((mask & 0xff00) != 0xff00)
+ output_asm_insn (AS2 (andi,%B0,hi8(%2)), operands);
+ if ((mask & 0xff0000L) != 0xff0000L)
+ output_asm_insn (AS2 (andi,%C0,hlo8(%2)), operands);
+ if ((mask & 0xff000000L) != 0xff000000L)
+ output_asm_insn (AS2 (andi,%D0,hhi8(%2)), operands);
+ return \"\";
+ }
+ return (AS2 (andi, %A0,lo8(%2)) CR_TAB
+ AS2 (andi, %B0,hi8(%2)) CR_TAB
+ AS2 (andi, %C0,hlo8(%2)) CR_TAB
+ AS2 (andi, %D0,hhi8(%2)));
+ }
+ return \"bug\";
+}"
+ [(set_attr "length" "4,4")
+ (set_attr "cc" "set_n,set_n")])
+
+;;|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
+;; ior
+
+(define_insn "iorqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,d")
+ (ior:QI (match_operand:QI 1 "register_operand" "%0,0")
+ (match_operand:QI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ or %0,%2
+ ori %0,lo8(%2)"
+ [(set_attr "length" "1,1")
+ (set_attr "cc" "set_zn,set_zn")])
+
+(define_insn "iorhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,d")
+ (ior:HI (match_operand:HI 1 "register_operand" "%0,0")
+ (match_operand:HI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "*{
+ if (which_alternative==0)
+ return (AS2 (or,%A0,%A2) CR_TAB
+ AS2 (or,%B0,%B2));
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int mask = INTVAL (operands[2]);
+ if (mask & 0xff)
+ output_asm_insn (AS2 (ori,%A0,lo8(%2)), operands);
+ if (mask & 0xff00)
+ output_asm_insn (AS2 (ori,%B0,hi8(%2)), operands);
+ return \"\";
+ }
+ return (AS2 (ori,%0,lo8(%2)) CR_TAB
+ AS2 (ori,%B0,hi8(%2)));
+}"
+ [(set_attr "length" "2,2")
+ (set_attr "cc" "set_n,clobber")])
+
+(define_insn "*iorhi3_clobber"
+ [(set (match_operand:HI 0 "register_operand" "=r,r")
+ (ior:HI (match_operand:HI 1 "register_operand" "%0,0")
+ (match_operand:HI 2 "immediate_operand" "M,i")))
+ (clobber (match_scratch:QI 3 "=&d,&d"))]
+ ""
+ "@
+ ldi %3,lo8(%2)\;or %A0,%3
+ ldi %3,lo8(%2)\;or %A0,%3\;ldi %3,hi8(%2)\;or %B0,%3"
+ [(set_attr "length" "2,4")
+ (set_attr "cc" "clobber,set_n")])
+
+(define_insn "iorsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,d")
+ (ior:SI (match_operand:SI 1 "register_operand" "%0,0")
+ (match_operand:SI 2 "nonmemory_operand" "r,i")))]
+ ""
+ "*{
+ if (which_alternative==0)
+ return (AS2 (or, %0,%2) CR_TAB
+ AS2 (or, %B0,%B2) CR_TAB
+ AS2 (or, %C0,%C2) CR_TAB
+ AS2 (or, %D0,%D2));
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ HOST_WIDE_INT mask = INTVAL (operands[2]);
+ if (mask & 0xff)
+ output_asm_insn (AS2 (ori,%A0,lo8(%2)), operands);
+ if (mask & 0xff00)
+ output_asm_insn (AS2 (ori,%B0,hi8(%2)), operands);
+ if (mask & 0xff0000L)
+ output_asm_insn (AS2 (ori,%C0,hlo8(%2)), operands);
+ if (mask & 0xff000000L)
+ output_asm_insn (AS2 (ori,%D0,hhi8(%2)), operands);
+ return \"\";
+ }
+ return (AS2 (ori, %A0,lo8(%2)) CR_TAB
+ AS2 (ori, %B0,hi8(%2)) CR_TAB
+ AS2 (ori, %C0,hlo8(%2)) CR_TAB
+ AS2 (ori, %D0,hhi8(%2)));
+}"
+ [(set_attr "length" "4,4")
+ (set_attr "cc" "set_n,clobber")])
+
+(define_insn "*iorsi3_clobber"
+ [(set (match_operand:SI 0 "register_operand" "=r,r")
+ (ior:SI (match_operand:SI 1 "register_operand" "%0,0")
+ (match_operand:SI 2 "immediate_operand" "M,i")))
+ (clobber (match_scratch:QI 3 "=&d,&d"))]
+ ""
+ "@
+ ldi %3,lo8(%2)\;or %A0,%3
+ ldi %3,lo8(%2)\;or %A0,%3\;ldi %3,hi8(%2)\;or %B0,%3\;ldi %3,hlo8(%2)\;or %C0,%3\;ldi %3,hhi8(%2)\;or %D0,%3"
+ [(set_attr "length" "2,8")
+ (set_attr "cc" "clobber,set_n")])
+
+;;^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+;; xor
+
+(define_insn "xorqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (xor:QI (match_operand:QI 1 "register_operand" "%0")
+ (match_operand:QI 2 "register_operand" "r")))]
+ ""
+ "eor %0,%2"
+ [(set_attr "length" "1")
+ (set_attr "cc" "set_zn")])
+
+(define_insn "xorhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (xor:HI (match_operand:HI 1 "register_operand" "%0")
+ (match_operand:HI 2 "register_operand" "r")))]
+ ""
+ "eor %0,%2
+ eor %B0,%B2"
+ [(set_attr "length" "2")
+ (set_attr "cc" "set_n")])
+
+(define_insn "xorsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (xor:SI (match_operand:SI 1 "register_operand" "%0")
+ (match_operand:SI 2 "register_operand" "r")))]
+ ""
+ "eor %0,%2
+ eor %B0,%B2
+ eor %C0,%C2
+ eor %D0,%D2"
+ [(set_attr "length" "4")
+ (set_attr "cc" "set_n")])
+
+;;<< << << << << << << << << << << << << << << << << << << << << << << << << <<
+;; arithmetic shift left
+
+(define_insn "ashlqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
+ (ashift:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,K,n,n,Qm")))]
+ ""
+ "* return ashlqi3_out (insn, operands, NULL);"
+ [(set_attr "length" "5,0,1,2,4,6,9")
+ (set_attr "cc" "clobber,none,set_czn,set_czn,set_czn,set_czn,clobber")])
+
+(define_insn "ashlhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return ashlhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "6,0,2,2,4,10,10")
+ (set_attr "cc" "clobber,none,set_n,clobber,set_n,clobber,clobber")])
+
+(define_insn "ashlsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return ashlsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "8,0,4,4,8,10,12")
+ (set_attr "cc" "clobber,none,set_n,clobber,set_n,clobber,clobber")])
+
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (ashift:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashift:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*ashlhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
+ (ashift:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ "reload_completed"
+ "* return ashlhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,2,2,4,10")
+ (set_attr "cc" "none,set_n,clobber,set_n,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (ashift:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*ashlsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
+ (ashift:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return ashlsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,4,4,10")
+ (set_attr "cc" "none,set_n,clobber,clobber")])
+
+;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
+;; arithmetic shift right
+
+(define_insn "ashrqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,r,r")
+ (ashiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,K,n,Qm")))]
+ ""
+ "* return ashrqi3_out (insn, operands, NULL);"
+ [(set_attr "length" "5,0,1,2,5,9")
+ (set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber")])
+
+(define_insn "ashrhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return ashrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "6,0,2,4,4,10,10")
+ (set_attr "cc" "clobber,none,clobber,set_n,clobber,clobber,clobber")])
+
+(define_insn "ashrsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return ashrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "8,0,4,6,8,10,12")
+ (set_attr "cc" "clobber,none,clobber,set_n,clobber,clobber,clobber")])
+
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashiftrt:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*ashrhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
+ (ashiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ "reload_completed"
+ "* return ashrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,2,4,4,10")
+ (set_attr "cc" "none,clobber,set_n,clobber,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*ashrsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
+ (ashiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return ashrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,4,4,10")
+ (set_attr "cc" "none,clobber,set_n,clobber")])
+
+;; >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >> >>
+;; logical shift right
+
+(define_insn "lshrqi3"
+ [(set (match_operand:QI 0 "register_operand" "=r,r,r,r,!d,r,r")
+ (lshiftrt:QI (match_operand:QI 1 "register_operand" "0,0,0,0,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,K,n,n,Qm")))]
+ ""
+ "* return lshrqi3_out (insn, operands, NULL);"
+ [(set_attr "length" "5,0,1,2,4,6,9")
+ (set_attr "cc" "clobber,none,set_czn,set_czn,set_czn,set_czn,clobber")])
+
+(define_insn "lshrhi3"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return lshrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "6,0,2,2,4,10,10")
+ (set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber,clobber")])
+
+(define_insn "lshrsi3"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,0,r,0,0,0")
+ (match_operand:QI 2 "general_operand" "r,L,P,O,K,n,Qm")))]
+ ""
+ "* return lshrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "8,0,4,4,8,10,12")
+ (set_attr "cc" "clobber,none,clobber,clobber,clobber,clobber,clobber")])
+
+;; Optimize if a scratch register from LD_REGS happens to be available.
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:HI 0 "register_operand" "")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (lshiftrt:HI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*lshrhi3_const"
+ [(set (match_operand:HI 0 "register_operand" "=r,r,r,r,r")
+ (lshiftrt:HI (match_operand:HI 1 "register_operand" "0,0,r,0,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,K,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,X,&d"))]
+ "reload_completed"
+ "* return lshrhi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,2,2,4,10")
+ (set_attr "cc" "none,clobber,clobber,clobber,clobber")])
+
+(define_peephole2
+ [(match_scratch:QI 3 "d")
+ (set (match_operand:SI 0 "register_operand" "")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "")
+ (match_operand:QI 2 "const_int_operand" "")))]
+ ""
+ [(parallel [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (match_dup 2)))
+ (clobber (match_dup 3))])]
+ "if (!avr_peep2_scratch_safe (operands[3]))
+ FAIL;")
+
+(define_insn "*lshrsi3_const"
+ [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
+ (lshiftrt:SI (match_operand:SI 1 "register_operand" "0,0,r,0")
+ (match_operand:QI 2 "const_int_operand" "L,P,O,n")))
+ (clobber (match_scratch:QI 3 "=X,X,X,&d"))]
+ "reload_completed"
+ "* return lshrsi3_out (insn, operands, NULL);"
+ [(set_attr "length" "0,4,4,10")
+ (set_attr "cc" "none,clobber,clobber,clobber")])
+
+;; abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x) abs(x)
+;; abs
+
+(define_insn "absqi2"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (abs:QI (match_operand:QI 1 "register_operand" "0")))]
+ ""
+ "sbrc %0,7
+ neg %0"
+ [(set_attr "length" "2")
+ (set_attr "cc" "clobber")])
+
+
+(define_insn "abssf2"
+ [(set (match_operand:SF 0 "register_operand" "=d,r")
+ (abs:SF (match_operand:SF 1 "register_operand" "0,0")))]
+ ""
+ "@
+ andi %D0,0x7f
+ clt\;bld %D0,7"
+ [(set_attr "length" "1,2")
+ (set_attr "cc" "set_n,clobber")])
+
+;; 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x 0 - x
+;; neg
+
+(define_insn "negqi2"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (neg:QI (match_operand:QI 1 "register_operand" "0")))]
+ ""
+ "neg %0"
+ [(set_attr "length" "1")
+ (set_attr "cc" "set_zn")])
+
+(define_insn "neghi2"
+ [(set (match_operand:HI 0 "register_operand" "=!d,r,&r")
+ (neg:HI (match_operand:HI 1 "register_operand" "0,0,r")))]
+ ""
+ "@
+ com %B0\;neg %A0\;sbci %B0,lo8(-1)
+ com %B0\;neg %A0\;sbc %B0,__zero_reg__\;inc %B0
+ clr %A0\;clr %B0\;sub %A0,%A1\;sbc %B0,%B1"
+ [(set_attr "length" "3,4,4")
+ (set_attr "cc" "set_czn,set_n,set_czn")])
+
+(define_insn "negsi2"
+ [(set (match_operand:SI 0 "register_operand" "=!d,r,&r")
+ (neg:SI (match_operand:SI 1 "register_operand" "0,0,r")))]
+ ""
+ "@
+ com %D0\;com %C0\;com %B0\;neg %A0\;sbci %B0,lo8(-1)\;sbci %C0,lo8(-1)\;sbci %D0,lo8(-1)
+ com %D0\;com %C0\;com %B0\;com %A0\;adc %A0,__zero_reg__\;adc %B0,__zero_reg__\;adc %C0,__zero_reg__\;adc %D0,__zero_reg__
+ clr %A0\;clr %B0\;{clr %C0\;clr %D0|movw %C0,%A0}\;sub %A0,%A1\;sbc %B0,%B1\;sbc %C0,%C1\;sbc %D0,%D1"
+ [(set_attr_alternative "length"
+ [(const_int 7)
+ (const_int 8)
+ (if_then_else (eq_attr "mcu_have_movw" "yes")
+ (const_int 7)
+ (const_int 8))])
+ (set_attr "cc" "set_czn,set_n,set_czn")])
+
+(define_insn "negsf2"
+ [(set (match_operand:SF 0 "register_operand" "=d,r")
+ (neg:SF (match_operand:SF 1 "register_operand" "0,0")))]
+ ""
+ "@
+ subi %D0,0x80
+ bst %D0,7\;com %D0\;bld %D0,7\;com %D0"
+ [(set_attr "length" "1,4")
+ (set_attr "cc" "set_n,set_n")])
+
+;; !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+;; not
+
+(define_insn "one_cmplqi2"
+ [(set (match_operand:QI 0 "register_operand" "=r")
+ (not:QI (match_operand:QI 1 "register_operand" "0")))]
+ ""
+ "com %0"
+ [(set_attr "length" "1")
+ (set_attr "cc" "set_czn")])
+
+(define_insn "one_cmplhi2"
+ [(set (match_operand:HI 0 "register_operand" "=r")
+ (not:HI (match_operand:HI 1 "register_operand" "0")))]
+ ""
+ "com %0
+ com %B0"
+ [(set_attr "length" "2")
+ (set_attr "cc" "set_n")])
+
+(define_insn "one_cmplsi2"
+ [(set (match_operand:SI 0 "register_operand" "=r")
+ (not:SI (match_operand:SI 1 "register_operand" "0")))]
+ ""
+ "com %0
+ com %B0
+ com %C0
+ com %D0"
+ [(set_attr "length" "4")
+ (set_attr "cc" "set_n")])
+
+;; xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x
+;; sign extend
+
+(define_insn "extendqihi2"
+ [(set (match_operand:HI 0 "register_operand" "=r,r")
+ (sign_extend:HI (match_operand:QI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %B0\;sbrc %0,7\;com %B0
+ mov %A0,%A1\;clr %B0\;sbrc %A0,7\;com %B0"
+ [(set_attr "length" "3,4")
+ (set_attr "cc" "set_n,set_n")])
+
+(define_insn "extendqisi2"
+ [(set (match_operand:SI 0 "register_operand" "=r,r")
+ (sign_extend:SI (match_operand:QI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %B0\;sbrc %A0,7\;com %B0\;mov %C0,%B0\;mov %D0,%B0
+ mov %A0,%A1\;clr %B0\;sbrc %A0,7\;com %B0\;mov %C0,%B0\;mov %D0,%B0"
+ [(set_attr "length" "5,6")
+ (set_attr "cc" "set_n,set_n")])
+
+(define_insn "extendhisi2"
+ [(set (match_operand:SI 0 "register_operand" "=r,&r")
+ (sign_extend:SI (match_operand:HI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %C0\;sbrc %B0,7\;com %C0\;mov %D0,%C0
+ {mov %A0,%A1\;mov %B0,%B1|movw %A0,%A1}\;clr %C0\;sbrc %B0,7\;com %C0\;mov %D0,%C0"
+ [(set_attr_alternative "length"
+ [(const_int 4)
+ (if_then_else (eq_attr "mcu_have_movw" "yes")
+ (const_int 5)
+ (const_int 6))])
+ (set_attr "cc" "set_n,set_n")])
+
+;; xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x xx<---x
+;; zero extend
+
+(define_insn "zero_extendqihi2"
+ [(set (match_operand:HI 0 "register_operand" "=r,r")
+ (zero_extend:HI (match_operand:QI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %B0
+ mov %A0,%A1\;clr %B0"
+ [(set_attr "length" "1,2")
+ (set_attr "cc" "set_n,set_n")])
+
+(define_insn "zero_extendqisi2"
+ [(set (match_operand:SI 0 "register_operand" "=r,r")
+ (zero_extend:SI (match_operand:QI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %B0\;clr %C0\;clr %D0
+ mov %A0,%A1\;clr %B0\;clr %C0\;clr %D0"
+ [(set_attr "length" "3,4")
+ (set_attr "cc" "set_n,set_n")])
+
+(define_insn "zero_extendhisi2"
+ [(set (match_operand:SI 0 "register_operand" "=r,&r")
+ (zero_extend:SI (match_operand:HI 1 "register_operand" "0,*r")))]
+ ""
+ "@
+ clr %C0\;clr %D0
+ {mov %A0,%A1\;mov %B0,%B1|movw %A0,%A1}\;clr %C0\;clr %D0"
+ [(set_attr_alternative "length"
+ [(const_int 2)
+ (if_then_else (eq_attr "mcu_have_movw" "yes")
+ (const_int 3)
+ (const_int 4))])
+ (set_attr "cc" "set_n,set_n")])
+
+;;<=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=><=>
+;; compare
+
+(define_insn "tstqi"
+ [(set (cc0)
+ (match_operand:QI 0 "register_operand" "r"))]
+ ""
+ "tst %0"
+ [(set_attr "cc" "compare")
+ (set_attr "length" "1")])
+
+(define_insn "*negated_tstqi"
+ [(set (cc0)
+ (neg:QI (match_operand:QI 0 "register_operand" "r")))]
+ ""
+ "cp __zero_reg__,%0"
+ [(set_attr "cc" "compare")
+ (set_attr "length" "1")])
+
+(define_insn "tsthi"
+ [(set (cc0)
+ (match_operand:HI 0 "register_operand" "!w,r"))]
+ ""
+ "* return out_tsthi (insn,NULL);"
+[(set_attr "cc" "compare,compare")
+ (set_attr "length" "1,2")])
+
+(define_insn "*negated_tsthi"
+ [(set (cc0)
+ (neg:HI (match_operand:HI 0 "register_operand" "r")))]
+ ""
+ "cp __zero_reg__,%A0
+ cpc __zero_reg__,%B0"
+[(set_attr "cc" "compare")
+ (set_attr "length" "2")])
+
+(define_insn "tstsi"
+ [(set (cc0)
+ (match_operand:SI 0 "register_operand" "r"))]
+ ""
+ "* return out_tstsi (insn,NULL);"
+ [(set_attr "cc" "compare")
+ (set_attr "length" "4")])
+
+(define_insn "*negated_tstsi"
+ [(set (cc0)
+ (neg:SI (match_operand:SI 0 "register_operand" "r")))]
+ ""
+ "cp __zero_reg__,%A0
+ cpc __zero_reg__,%B0
+ cpc __zero_reg__,%C0
+ cpc __zero_reg__,%D0"
+ [(set_attr "cc" "compare")
+ (set_attr "length" "4")])
+
+
+(define_insn "cmpqi"
+ [(set (cc0)
+ (compare (match_operand:QI 0 "register_operand" "r,d")
+ (match_operand:QI 1 "nonmemory_operand" "r,i")))]
+ ""
+ "@
+ cp %0,%1
+ cpi %0,lo8(%1)"
+ [(set_attr "cc" "compare,compare")
+ (set_attr "length" "1,1")])
+
+(define_insn "*cmpqi_sign_extend"
+ [(set (cc0)
+ (compare (sign_extend:HI
+ (match_operand:QI 0 "register_operand" "d"))
+ (match_operand:HI 1 "const_int_operand" "n")))]
+ "INTVAL (operands[1]) >= -128 && INTVAL (operands[1]) <= 127"
+ "cpi %0,lo8(%1)"
+ [(set_attr "cc" "compare")
+ (set_attr "length" "1")])
+
+(define_insn "cmphi"
+ [(set (cc0)
+ (compare (match_operand:HI 0 "register_operand" "r,d,d,r,r")
+ (match_operand:HI 1 "nonmemory_operand" "r,M,i,M,i")))
+ (clobber (match_scratch:QI 2 "=X,X,&d,&d,&d"))]
+ ""
+ "*{
+ switch (which_alternative)
+ {
+ case 0:
+ return (AS2 (cp,%A0,%A1) CR_TAB
+ AS2 (cpc,%B0,%B1));
+ case 1:
+ if (reg_unused_after (insn, operands[0])
+ && INTVAL (operands[1]) >= 0 && INTVAL (operands[1]) <= 63
+ && test_hard_reg_class (ADDW_REGS, operands[0]))
+ return AS2 (sbiw,%0,%1);
+ else
+ return (AS2 (cpi,%0,%1) CR_TAB
+ AS2 (cpc,%B0,__zero_reg__));
+ case 2:
+ if (reg_unused_after (insn, operands[0]))
+ return (AS2 (subi,%0,lo8(%1)) CR_TAB
+ AS2 (sbci,%B0,hi8(%1)));
+ else
+ return (AS2 (ldi, %2,hi8(%1)) CR_TAB
+ AS2 (cpi, %A0,lo8(%1)) CR_TAB
+ AS2 (cpc, %B0,%2));
+ case 3:
+ return (AS2 (ldi, %2,lo8(%1)) CR_TAB
+ AS2 (cp, %A0,%2) CR_TAB
+ AS2 (cpc, %B0,__zero_reg__));
+
+ case 4:
+ return (AS2 (ldi, %2,lo8(%1)) CR_TAB
+ AS2 (cp, %A0,%2) CR_TAB
+ AS2 (ldi, %2,hi8(%1)) CR_TAB
+ AS2 (cpc, %B0,%2));
+ }
+ return \"bug\";
+}"
+ [(set_attr "cc" "compare,compare,compare,compare,compare")
+ (set_attr "length" "2,2,3,3,4")])
+
+
+(define_insn "cmpsi"
+ [(set (cc0)
+ (compare (match_operand:SI 0 "register_operand" "r,d,d,r,r")
+ (match_operand:SI 1 "nonmemory_operand" "r,M,i,M,i")))
+ (clobber (match_scratch:QI 2 "=X,X,&d,&d,&d"))]
+ ""
+ "*{
+ switch (which_alternative)
+ {
+ case 0:
+ return (AS2 (cp,%A0,%A1) CR_TAB
+ AS2 (cpc,%B0,%B1) CR_TAB
+ AS2 (cpc,%C0,%C1) CR_TAB
+ AS2 (cpc,%D0,%D1));
+ case 1:
+ if (reg_unused_after (insn, operands[0])
+ && INTVAL (operands[1]) >= 0 && INTVAL (operands[1]) <= 63
+ && test_hard_reg_class (ADDW_REGS, operands[0]))
+ return (AS2 (sbiw,%0,%1) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+ else
+ return (AS2 (cpi,%A0,lo8(%1)) CR_TAB
+ AS2 (cpc,%B0,__zero_reg__) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+ case 2:
+ if (reg_unused_after (insn, operands[0]))
+ return (AS2 (subi,%A0,lo8(%1)) CR_TAB
+ AS2 (sbci,%B0,hi8(%1)) CR_TAB
+ AS2 (sbci,%C0,hlo8(%1)) CR_TAB
+ AS2 (sbci,%D0,hhi8(%1)));
+ else
+ return (AS2 (cpi, %A0,lo8(%1)) CR_TAB
+ AS2 (ldi, %2,hi8(%1)) CR_TAB
+ AS2 (cpc, %B0,%2) CR_TAB
+ AS2 (ldi, %2,hlo8(%1)) CR_TAB
+ AS2 (cpc, %C0,%2) CR_TAB
+ AS2 (ldi, %2,hhi8(%1)) CR_TAB
+ AS2 (cpc, %D0,%2));
+ case 3:
+ return (AS2 (ldi,%2,lo8(%1)) CR_TAB
+ AS2 (cp,%A0,%2) CR_TAB
+ AS2 (cpc,%B0,__zero_reg__) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+ case 4:
+ return (AS2 (ldi, %2,lo8(%1)) CR_TAB
+ AS2 (cp, %A0,%2) CR_TAB
+ AS2 (ldi, %2,hi8(%1)) CR_TAB
+ AS2 (cpc, %B0,%2) CR_TAB
+ AS2 (ldi, %2,hlo8(%1)) CR_TAB
+ AS2 (cpc, %C0,%2) CR_TAB
+ AS2 (ldi, %2,hhi8(%1)) CR_TAB
+ AS2 (cpc, %D0,%2));
+ }
+ return \"bug\";
+}"
+ [(set_attr "cc" "compare,compare,compare,compare,compare")
+ (set_attr "length" "4,4,7,5,8")])
+
+;; ----------------------------------------------------------------------
+;; JUMP INSTRUCTIONS
+;; ----------------------------------------------------------------------
+;; Conditional jump instructions
+
+(define_expand "beq"
+ [(set (pc)
+ (if_then_else (eq (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bne"
+ [(set (pc)
+ (if_then_else (ne (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bge"
+ [(set (pc)
+ (if_then_else (ge (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bgeu"
+ [(set (pc)
+ (if_then_else (geu (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "blt"
+ [(set (pc)
+ (if_then_else (lt (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bltu"
+ [(set (pc)
+ (if_then_else (ltu (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+
+
+/****************************************************************
+ AVR not have following conditional jumps: LE,LEU,GT,GTU.
+ Convert them all to proper jumps.
+*****************************************************************/
+
+(define_expand "ble"
+ [(set (pc)
+ (if_then_else (le (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bleu"
+ [(set (pc)
+ (if_then_else (leu (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bgt"
+ [(set (pc)
+ (if_then_else (gt (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+(define_expand "bgtu"
+ [(set (pc)
+ (if_then_else (gtu (cc0) (const_int 0))
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "")
+
+;; Test a single bit in a QI/HI/SImode register.
+(define_insn "*sbrx_branch"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "eqne_operator"
+ [(zero_extract
+ (match_operand:QI 1 "register_operand" "r")
+ (const_int 1)
+ (match_operand 2 "const_int_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ ""
+ "* return avr_out_sbxx_branch (insn, operands);"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046))
+ (le (minus (pc) (match_dup 3)) (const_int 2046)))
+ (const_int 2)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 2)
+ (const_int 4))))
+ (set_attr "cc" "clobber")])
+
+(define_insn "*sbrx_and_branchhi"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "eqne_operator"
+ [(and:HI
+ (match_operand:HI 1 "register_operand" "r")
+ (match_operand:HI 2 "single_one_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ ""
+ "* return avr_out_sbxx_branch (insn, operands);"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046))
+ (le (minus (pc) (match_dup 3)) (const_int 2046)))
+ (const_int 2)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 2)
+ (const_int 4))))
+ (set_attr "cc" "clobber")])
+
+(define_insn "*sbrx_and_branchsi"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "eqne_operator"
+ [(and:SI
+ (match_operand:SI 1 "register_operand" "r")
+ (match_operand:SI 2 "single_one_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ ""
+ "* return avr_out_sbxx_branch (insn, operands);"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046))
+ (le (minus (pc) (match_dup 3)) (const_int 2046)))
+ (const_int 2)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 2)
+ (const_int 4))))
+ (set_attr "cc" "clobber")])
+
+;; Convert sign tests to bit 7/15/31 tests that match the above insns.
+(define_peephole2
+ [(set (cc0) (match_operand:QI 0 "register_operand" ""))
+ (set (pc) (if_then_else (ge (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (eq (zero_extract (match_dup 0)
+ (const_int 1)
+ (const_int 7))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "")
+
+(define_peephole2
+ [(set (cc0) (match_operand:QI 0 "register_operand" ""))
+ (set (pc) (if_then_else (lt (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (ne (zero_extract (match_dup 0)
+ (const_int 1)
+ (const_int 7))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "")
+
+(define_peephole2
+ [(set (cc0) (match_operand:HI 0 "register_operand" ""))
+ (set (pc) (if_then_else (ge (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (eq (and:HI (match_dup 0) (const_int -32768))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "")
+
+(define_peephole2
+ [(set (cc0) (match_operand:HI 0 "register_operand" ""))
+ (set (pc) (if_then_else (lt (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (ne (and:HI (match_dup 0) (const_int -32768))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "")
+
+(define_peephole2
+ [(set (cc0) (match_operand:SI 0 "register_operand" ""))
+ (set (pc) (if_then_else (ge (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (eq (and:SI (match_dup 0) (match_dup 2))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "operands[2] = GEN_INT (-2147483647 - 1);")
+
+(define_peephole2
+ [(set (cc0) (match_operand:SI 0 "register_operand" ""))
+ (set (pc) (if_then_else (lt (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ [(set (pc) (if_then_else (ne (and:SI (match_dup 0) (match_dup 2))
+ (const_int 0))
+ (label_ref (match_dup 1))
+ (pc)))]
+ "operands[2] = GEN_INT (-2147483647 - 1);")
+
+;; ************************************************************************
+;; Implementation of conditional jumps here.
+;; Compare with 0 (test) jumps
+;; ************************************************************************
+
+(define_insn "branch"
+ [(set (pc)
+ (if_then_else (match_operator 1 "simple_comparison_operator"
+ [(cc0)
+ (const_int 0)])
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "*
+ return ret_cond_branch (operands[1], avr_jump_mode (operands[0],insn), 0);"
+ [(set_attr "type" "branch")
+ (set_attr "cc" "clobber")])
+
+(define_insn "difficult_branch"
+ [(set (pc)
+ (if_then_else (match_operator 1 "difficult_comparison_operator"
+ [(cc0)
+ (const_int 0)])
+ (label_ref (match_operand 0 "" ""))
+ (pc)))]
+ ""
+ "*
+ return ret_cond_branch (operands[1], avr_jump_mode (operands[0],insn), 0);"
+ [(set_attr "type" "branch1")
+ (set_attr "cc" "clobber")])
+
+;; revers branch
+
+(define_insn "rvbranch"
+ [(set (pc)
+ (if_then_else (match_operator 1 "simple_comparison_operator"
+ [(cc0)
+ (const_int 0)])
+ (pc)
+ (label_ref (match_operand 0 "" ""))))]
+ ""
+ "*
+ return ret_cond_branch (operands[1], avr_jump_mode (operands[0], insn), 1);"
+ [(set_attr "type" "branch1")
+ (set_attr "cc" "clobber")])
+
+(define_insn "difficult_rvbranch"
+ [(set (pc)
+ (if_then_else (match_operator 1 "difficult_comparison_operator"
+ [(cc0)
+ (const_int 0)])
+ (pc)
+ (label_ref (match_operand 0 "" ""))))]
+ ""
+ "*
+ return ret_cond_branch (operands[1], avr_jump_mode (operands[0], insn), 1);"
+ [(set_attr "type" "branch")
+ (set_attr "cc" "clobber")])
+
+;; **************************************************************************
+;; Unconditional and other jump instructions.
+
+(define_insn "jump"
+ [(set (pc)
+ (label_ref (match_operand 0 "" "")))]
+ ""
+ "*{
+ if (AVR_MEGA && get_attr_length (insn) != 1)
+ return AS1 (jmp,%0);
+ return AS1 (rjmp,%0);
+}"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 0)) (const_int -2047))
+ (le (minus (pc) (match_dup 0)) (const_int 2047)))
+ (const_int 1)
+ (const_int 2)))
+ (set_attr "cc" "none")])
+
+;; call
+
+(define_expand "call"
+ [(call (match_operand:HI 0 "call_insn_operand" "")
+ (match_operand:HI 1 "general_operand" ""))]
+ ;; Operand 1 not used on the AVR.
+ ""
+ "")
+
+;; call value
+
+(define_expand "call_value"
+ [(set (match_operand 0 "register_operand" "")
+ (call (match_operand:HI 1 "call_insn_operand" "")
+ (match_operand:HI 2 "general_operand" "")))]
+ ;; Operand 2 not used on the AVR.
+ ""
+ "")
+
+(define_insn "call_insn"
+ [(call (mem:HI (match_operand:HI 0 "nonmemory_operand" "!z,*r,s,n"))
+ (match_operand:HI 1 "general_operand" "X,X,X,X"))]
+;; We don't need in saving Z register because r30,r31 is a call used registers
+ ;; Operand 1 not used on the AVR.
+ "(register_operand (operands[0], HImode) || CONSTANT_P (operands[0]))"
+ "*{
+ if (which_alternative==0)
+ return \"icall\";
+ else if (which_alternative==1)
+ {
+ if (AVR_HAVE_MOVW)
+ return (AS2 (movw, r30, %0) CR_TAB
+ \"icall\");
+ else
+ return (AS2 (mov, r30, %A0) CR_TAB
+ AS2 (mov, r31, %B0) CR_TAB
+ \"icall\");
+ }
+ else if (which_alternative==2)
+ return AS1(%~call,%c0);
+ return (AS2 (ldi,r30,lo8(%0)) CR_TAB
+ AS2 (ldi,r31,hi8(%0)) CR_TAB
+ \"icall\");
+}"
+ [(set_attr "cc" "clobber,clobber,clobber,clobber")
+ (set_attr_alternative "length"
+ [(const_int 1)
+ (if_then_else (eq_attr "mcu_have_movw" "yes")
+ (const_int 2)
+ (const_int 3))
+ (if_then_else (eq_attr "mcu_mega" "yes")
+ (const_int 2)
+ (const_int 1))
+ (const_int 3)])])
+
+(define_insn "call_value_insn"
+ [(set (match_operand 0 "register_operand" "=r,r,r,r")
+ (call (mem:HI (match_operand:HI 1 "nonmemory_operand" "!z,*r,s,n"))
+;; We don't need in saving Z register because r30,r31 is a call used registers
+ (match_operand:HI 2 "general_operand" "X,X,X,X")))]
+ ;; Operand 2 not used on the AVR.
+ "(register_operand (operands[0], VOIDmode) || CONSTANT_P (operands[0]))"
+ "*{
+ if (which_alternative==0)
+ return \"icall\";
+ else if (which_alternative==1)
+ {
+ if (AVR_HAVE_MOVW)
+ return (AS2 (movw, r30, %1) CR_TAB
+ \"icall\");
+ else
+ return (AS2 (mov, r30, %A1) CR_TAB
+ AS2 (mov, r31, %B1) CR_TAB
+ \"icall\");
+ }
+ else if (which_alternative==2)
+ return AS1(%~call,%c1);
+ return (AS2 (ldi, r30, lo8(%1)) CR_TAB
+ AS2 (ldi, r31, hi8(%1)) CR_TAB
+ \"icall\");
+}"
+ [(set_attr "cc" "clobber,clobber,clobber,clobber")
+ (set_attr_alternative "length"
+ [(const_int 1)
+ (if_then_else (eq_attr "mcu_have_movw" "yes")
+ (const_int 2)
+ (const_int 3))
+ (if_then_else (eq_attr "mcu_mega" "yes")
+ (const_int 2)
+ (const_int 1))
+ (const_int 3)])])
+
+(define_insn "return"
+ [(return)]
+ "reload_completed && avr_simple_epilogue ()"
+ "ret"
+ [(set_attr "cc" "none")
+ (set_attr "length" "1")])
+
+(define_insn "nop"
+ [(const_int 0)]
+ ""
+ "nop"
+ [(set_attr "cc" "none")
+ (set_attr "length" "1")])
+
+; indirect jump
+(define_insn "indirect_jump"
+ [(set (pc) (match_operand:HI 0 "register_operand" "!z,*r"))]
+ ""
+ "@
+ ijmp
+ push %A0\;push %B0\;ret"
+ [(set_attr "length" "1,3")
+ (set_attr "cc" "none,none")])
+
+;; table jump
+
+;; Table made from "rjmp" instructions for <=8K devices.
+(define_insn "*tablejump_rjmp"
+ [(set (pc) (unspec:HI [(match_operand:HI 0 "register_operand" "!z,*r")]
+ UNSPEC_INDEX_JMP))
+ (use (label_ref (match_operand 1 "" "")))
+ (clobber (match_dup 0))]
+ "!AVR_MEGA"
+ "@
+ ijmp
+ push %A0\;push %B0\;ret"
+ [(set_attr "length" "1,3")
+ (set_attr "cc" "none,none")])
+
+;; Not a prologue, but similar idea - move the common piece of code to libgcc.
+(define_insn "*tablejump_lib"
+ [(set (pc) (unspec:HI [(match_operand:HI 0 "register_operand" "z")]
+ UNSPEC_INDEX_JMP))
+ (use (label_ref (match_operand 1 "" "")))
+ (clobber (match_dup 0))]
+ "AVR_MEGA && TARGET_CALL_PROLOGUES"
+ "jmp __tablejump2__"
+ [(set_attr "length" "2")
+ (set_attr "cc" "clobber")])
+
+(define_insn "*tablejump_enh"
+ [(set (pc) (unspec:HI [(match_operand:HI 0 "register_operand" "z")]
+ UNSPEC_INDEX_JMP))
+ (use (label_ref (match_operand 1 "" "")))
+ (clobber (match_dup 0))]
+ "AVR_MEGA && AVR_ENHANCED"
+ "lsl r30
+ rol r31
+ lpm __tmp_reg__,Z+
+ lpm r31,Z
+ mov r30,__tmp_reg__
+ ijmp"
+ [(set_attr "length" "6")
+ (set_attr "cc" "clobber")])
+
+(define_insn "*tablejump"
+ [(set (pc) (unspec:HI [(match_operand:HI 0 "register_operand" "z")]
+ UNSPEC_INDEX_JMP))
+ (use (label_ref (match_operand 1 "" "")))
+ (clobber (match_dup 0))]
+ "AVR_MEGA"
+ "lsl r30
+ rol r31
+ lpm
+ inc r30
+ push r0
+ lpm
+ push r0
+ ret"
+ [(set_attr "length" "8")
+ (set_attr "cc" "clobber")])
+
+(define_expand "casesi"
+ [(set (match_dup 6)
+ (minus:HI (subreg:HI (match_operand:SI 0 "register_operand" "") 0)
+ (match_operand:HI 1 "register_operand" "")))
+ (parallel [(set (cc0)
+ (compare (match_dup 6)
+ (match_operand:HI 2 "register_operand" "")))
+ (clobber (match_scratch:QI 9 ""))])
+
+ (set (pc)
+ (if_then_else (gtu (cc0)
+ (const_int 0))
+ (label_ref (match_operand 4 "" ""))
+ (pc)))
+
+ (set (match_dup 6)
+ (plus:HI (match_dup 6) (label_ref (match_operand:HI 3 "" ""))))
+
+ (parallel [(set (pc) (unspec:HI [(match_dup 6)] UNSPEC_INDEX_JMP))
+ (use (label_ref (match_dup 3)))
+ (clobber (match_dup 6))])]
+ ""
+ "
+{
+ operands[6] = gen_reg_rtx (HImode);
+}")
+
+
+;; ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+;; This instruction sets Z flag
+
+(define_insn "sez"
+ [(set (cc0) (const_int 0))]
+ ""
+ "sez"
+ [(set_attr "length" "1")
+ (set_attr "cc" "compare")])
+
+;; Clear/set/test a single bit in I/O address space.
+
+(define_insn "*cbi"
+ [(set (mem:QI (match_operand 0 "low_io_address_operand" "n"))
+ (and:QI (mem:QI (match_dup 0))
+ (match_operand:QI 1 "single_zero_operand" "n")))]
+ "(optimize > 0)"
+{
+ operands[2] = GEN_INT (exact_log2 (~INTVAL (operands[1]) & 0xff));
+ return AS2 (cbi,%0-0x20,%2);
+}
+ [(set_attr "length" "1")
+ (set_attr "cc" "none")])
+
+(define_insn "*sbi"
+ [(set (mem:QI (match_operand 0 "low_io_address_operand" "n"))
+ (ior:QI (mem:QI (match_dup 0))
+ (match_operand:QI 1 "single_one_operand" "n")))]
+ "(optimize > 0)"
+{
+ operands[2] = GEN_INT (exact_log2 (INTVAL (operands[1]) & 0xff));
+ return AS2 (sbi,%0-0x20,%2);
+}
+ [(set_attr "length" "1")
+ (set_attr "cc" "none")])
+
+;; Lower half of the I/O space - use sbic/sbis directly.
+(define_insn "*sbix_branch"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "eqne_operator"
+ [(zero_extract
+ (mem:QI (match_operand 1 "low_io_address_operand" "n"))
+ (const_int 1)
+ (match_operand 2 "const_int_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "(optimize > 0)"
+ "* return avr_out_sbxx_branch (insn, operands);"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046))
+ (le (minus (pc) (match_dup 3)) (const_int 2046)))
+ (const_int 2)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 2)
+ (const_int 4))))
+ (set_attr "cc" "clobber")])
+
+;; Tests of bit 7 are pessimized to sign tests, so we need this too...
+(define_insn "*sbix_branch_bit7"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "gelt_operator"
+ [(mem:QI (match_operand 1 "low_io_address_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 2 "" ""))
+ (pc)))]
+ "(optimize > 0)"
+{
+ operands[3] = operands[2];
+ operands[2] = GEN_INT (7);
+ return avr_out_sbxx_branch (insn, operands);
+}
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 2)) (const_int -2046))
+ (le (minus (pc) (match_dup 2)) (const_int 2046)))
+ (const_int 2)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 2)
+ (const_int 4))))
+ (set_attr "cc" "clobber")])
+
+;; Upper half of the I/O space - read port to __tmp_reg__ and use sbrc/sbrs.
+(define_insn "*sbix_branch_tmp"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "eqne_operator"
+ [(zero_extract
+ (mem:QI (match_operand 1 "high_io_address_operand" "n"))
+ (const_int 1)
+ (match_operand 2 "const_int_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 3 "" ""))
+ (pc)))]
+ "(optimize > 0)"
+ "* return avr_out_sbxx_branch (insn, operands);"
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 3)) (const_int -2046))
+ (le (minus (pc) (match_dup 3)) (const_int 2045)))
+ (const_int 3)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 3)
+ (const_int 5))))
+ (set_attr "cc" "clobber")])
+
+(define_insn "*sbix_branch_tmp_bit7"
+ [(set (pc)
+ (if_then_else
+ (match_operator 0 "gelt_operator"
+ [(mem:QI (match_operand 1 "high_io_address_operand" "n"))
+ (const_int 0)])
+ (label_ref (match_operand 2 "" ""))
+ (pc)))]
+ "(optimize > 0)"
+{
+ operands[3] = operands[2];
+ operands[2] = GEN_INT (7);
+ return avr_out_sbxx_branch (insn, operands);
+}
+ [(set (attr "length")
+ (if_then_else (and (ge (minus (pc) (match_dup 2)) (const_int -2046))
+ (le (minus (pc) (match_dup 2)) (const_int 2045)))
+ (const_int 3)
+ (if_then_else (eq_attr "mcu_mega" "no")
+ (const_int 3)
+ (const_int 5))))
+ (set_attr "cc" "clobber")])
+
+;; ************************* Peepholes ********************************
+
+(define_peephole
+ [(set (match_operand:SI 0 "d_register_operand" "")
+ (plus:SI (match_dup 0)
+ (const_int -1)))
+ (parallel
+ [(set (cc0)
+ (compare (match_dup 0)
+ (const_int -1)))
+ (clobber (match_operand:QI 1 "d_register_operand" ""))])
+ (set (pc)
+ (if_then_else (ne (cc0) (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))]
+ ""
+ "*
+{
+ CC_STATUS_INIT;
+ if (test_hard_reg_class (ADDW_REGS, operands[0]))
+ output_asm_insn (AS2 (sbiw,%0,1) CR_TAB
+ AS2 (sbc,%C0,__zero_reg__) CR_TAB
+ AS2 (sbc,%D0,__zero_reg__) \"\\n\", operands);
+ else
+ output_asm_insn (AS2 (subi,%A0,1) CR_TAB
+ AS2 (sbc,%B0,__zero_reg__) CR_TAB
+ AS2 (sbc,%C0,__zero_reg__) CR_TAB
+ AS2 (sbc,%D0,__zero_reg__) \"\\n\", operands);
+ switch (avr_jump_mode (operands[2],insn))
+ {
+ case 1:
+ return AS1 (brcc,%2);
+ case 2:
+ return (AS1 (brcs,.+2) CR_TAB
+ AS1 (rjmp,%2));
+ }
+ return (AS1 (brcs,.+4) CR_TAB
+ AS1 (jmp,%2));
+}")
+
+(define_peephole
+ [(set (match_operand:HI 0 "d_register_operand" "")
+ (plus:HI (match_dup 0)
+ (const_int -1)))
+ (parallel
+ [(set (cc0)
+ (compare (match_dup 0)
+ (const_int 65535)))
+ (clobber (match_operand:QI 1 "d_register_operand" ""))])
+ (set (pc)
+ (if_then_else (ne (cc0) (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))]
+ ""
+ "*
+{
+ CC_STATUS_INIT;
+ if (test_hard_reg_class (ADDW_REGS, operands[0]))
+ output_asm_insn (AS2 (sbiw,%0,1), operands);
+ else
+ output_asm_insn (AS2 (subi,%A0,1) CR_TAB
+ AS2 (sbc,%B0,__zero_reg__) \"\\n\", operands);
+ switch (avr_jump_mode (operands[2],insn))
+ {
+ case 1:
+ return AS1 (brcc,%2);
+ case 2:
+ return (AS1 (brcs,.+2) CR_TAB
+ AS1 (rjmp,%2));
+ }
+ return (AS1 (brcs,.+4) CR_TAB
+ AS1 (jmp,%2));
+}")
+
+(define_peephole
+ [(set (match_operand:QI 0 "d_register_operand" "")
+ (plus:QI (match_dup 0)
+ (const_int -1)))
+ (set (cc0)
+ (compare (match_dup 0)
+ (const_int -1)))
+ (set (pc)
+ (if_then_else (ne (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ ""
+ "*
+{
+ CC_STATUS_INIT;
+ cc_status.value1 = operands[0];
+ cc_status.flags |= CC_OVERFLOW_UNUSABLE;
+ output_asm_insn (AS2 (subi,%A0,1), operands);
+ switch (avr_jump_mode (operands[1],insn))
+ {
+ case 1:
+ return AS1 (brcc,%1);
+ case 2:
+ return (AS1 (brcs,.+2) CR_TAB
+ AS1 (rjmp,%1));
+ }
+ return (AS1 (brcs,.+4) CR_TAB
+ AS1 (jmp,%1));
+}")
+
+(define_peephole
+ [(set (cc0) (match_operand:QI 0 "register_operand" ""))
+ (set (pc)
+ (if_then_else (eq (cc0) (const_int 0))
+ (label_ref (match_operand 1 "" ""))
+ (pc)))]
+ "jump_over_one_insn_p (insn, operands[1])"
+ "cpse %0,__zero_reg__")
+
+(define_peephole
+ [(set (cc0)
+ (compare (match_operand:QI 0 "register_operand" "")
+ (match_operand:QI 1 "register_operand" "")))
+ (set (pc)
+ (if_then_else (eq (cc0) (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))]
+ "jump_over_one_insn_p (insn, operands[2])"
+ "cpse %0,%1")
Index: t-avr
===================================================================
--- t-avr (nonexistent)
+++ t-avr (revision 154)
@@ -0,0 +1,119 @@
+LIB1ASMSRC = avr/libgcc.S
+LIB1ASMFUNCS = \
+ _mulqi3 \
+ _mulhi3 \
+ _mulsi3 \
+ _udivmodqi4 \
+ _divmodqi4 \
+ _udivmodhi4 \
+ _divmodhi4 \
+ _udivmodsi4 \
+ _divmodsi4 \
+ _prologue \
+ _epilogue \
+ _exit \
+ _cleanup \
+ _tablejump \
+ _copy_data \
+ _clear_bss \
+ _ctors \
+ _dtors
+
+# We do not have the DF type.
+# Most of the C functions in libgcc2 use almost all registers,
+# so use -mcall-prologues for smaller code size.
+TARGET_LIBGCC2_CFLAGS = -DDF=SF -Dinhibit_libc -mcall-prologues -Os
+
+fp-bit.c: $(srcdir)/config/fp-bit.c $(srcdir)/config/avr/t-avr
+ echo '#define FLOAT' > fp-bit.c
+ echo '#define FLOAT_ONLY' >> fp-bit.c
+ echo '#define CMPtype QItype' >> fp-bit.c
+ echo '#define DF SF' >> fp-bit.c
+ echo '#define DI SI' >> fp-bit.c
+ echo '#define FLOAT_BIT_ORDER_MISMATCH' >> fp-bit.c
+ echo '#define SMALL_MACHINE' >> fp-bit.c
+ echo 'typedef int QItype __attribute__ ((mode (QI)));' >> fp-bit.c
+ cat $(srcdir)/config/fp-bit.c >> fp-bit.c
+
+FPBIT = fp-bit.c
+
+MULTILIB_OPTIONS = mmcu=avr2/mmcu=avr25/mmcu=avr3/mmcu=avr4/mmcu=avr5
+MULTILIB_DIRNAMES = avr2 avr25 avr3 avr4 avr5
+
+# The many avr2 matches are not listed here - this is the default.
+MULTILIB_MATCHES = \
+ mmcu?avr25=mmcu?attiny13 \
+ mmcu?avr25=mmcu?attiny2313 \
+ mmcu?avr25=mmcu?attiny24 \
+ mmcu?avr25=mmcu?attiny44 \
+ mmcu?avr25=mmcu?attiny84 \
+ mmcu?avr25=mmcu?attiny25 \
+ mmcu?avr25=mmcu?attiny45 \
+ mmcu?avr25=mmcu?attiny85 \
+ mmcu?avr25=mmcu?attiny261 \
+ mmcu?avr25=mmcu?attiny461 \
+ mmcu?avr25=mmcu?attiny861 \
+ mmcu?avr25=mmcu?at86rf401 \
+ mmcu?avr3=mmcu?atmega103 \
+ mmcu?avr3=mmcu?atmega603 \
+ mmcu?avr3=mmcu?at43usb320 \
+ mmcu?avr3=mmcu?at43usb355 \
+ mmcu?avr3=mmcu?at76c711 \
+ mmcu?avr4=mmcu?atmega48 \
+ mmcu?avr4=mmcu?atmega8 \
+ mmcu?avr4=mmcu?atmega8515 \
+ mmcu?avr4=mmcu?atmega8535 \
+ mmcu?avr4=mmcu?atmega88 \
+ mmcu?avr4=mmcu?atmega8hva \
+ mmcu?avr4=mmcu?at90pwm1 \
+ mmcu?avr4=mmcu?at90pwm2 \
+ mmcu?avr4=mmcu?at90pwm3 \
+ mmcu?avr5=mmcu?atmega16 \
+ mmcu?avr5=mmcu?atmega161 \
+ mmcu?avr5=mmcu?atmega162 \
+ mmcu?avr5=mmcu?atmega163 \
+ mmcu?avr5=mmcu?atmega164p \
+ mmcu?avr5=mmcu?atmega165 \
+ mmcu?avr5=mmcu?atmega165p \
+ mmcu?avr5=mmcu?atmega168 \
+ mmcu?avr5=mmcu?atmega169 \
+ mmcu?avr5=mmcu?atmega169p \
+ mmcu?avr5=mmcu?atmega32 \
+ mmcu?avr5=mmcu?atmega323 \
+ mmcu?avr5=mmcu?atmega324p \
+ mmcu?avr5=mmcu?atmega325 \
+ mmcu?avr5=mmcu?atmega325p \
+ mmcu?avr5=mmcu?atmega3250 \
+ mmcu?avr5=mmcu?atmega3250p \
+ mmcu?avr5=mmcu?atmega329 \
+ mmcu?avr5=mmcu?atmega329p \
+ mmcu?avr5=mmcu?atmega3290 \
+ mmcu?avr5=mmcu?atmega3290p \
+ mmcu?avr5=mmcu?atmega406 \
+ mmcu?avr5=mmcu?atmega64 \
+ mmcu?avr5=mmcu?atmega640 \
+ mmcu?avr5=mmcu?atmega644 \
+ mmcu?avr5=mmcu?atmega644p \
+ mmcu?avr5=mmcu?atmega645 \
+ mmcu?avr5=mmcu?atmega6450 \
+ mmcu?avr5=mmcu?atmega649 \
+ mmcu?avr5=mmcu?atmega6490 \
+ mmcu?avr5=mmcu?atmega128 \
+ mmcu?avr5=mmcu?atmega1280 \
+ mmcu?avr5=mmcu?atmega1281 \
+ mmcu?avr5=mmcu?atmega16hva \
+ mmcu?avr5=mmcu?at90can32 \
+ mmcu?avr5=mmcu?at90can64 \
+ mmcu?avr5=mmcu?at90can128 \
+ mmcu?avr5=mmcu?at90usb82 \
+ mmcu?avr5=mmcu?at90usb162 \
+ mmcu?avr5=mmcu?at90usb646 \
+ mmcu?avr5=mmcu?at90usb647 \
+ mmcu?avr5=mmcu?at90usb1286 \
+ mmcu?avr5=mmcu?at90usb1287 \
+ mmcu?avr5=mmcu?at94k
+
+MULTILIB_EXCEPTIONS =
+
+LIBGCC = stmp-multilib
+INSTALL_LIBGCC = install-multilib
Index: avr.opt
===================================================================
--- avr.opt (nonexistent)
+++ avr.opt (revision 154)
@@ -0,0 +1,64 @@
+; Options for the ATMEL AVR port of the compiler.
+
+; Copyright (C) 2005, 2007 Free Software Foundation, Inc.
+;
+; This file is part of GCC.
+;
+; GCC is free software; you can redistribute it and/or modify it under
+; the terms of the GNU General Public License as published by the Free
+; Software Foundation; either version 3, or (at your option) any later
+; version.
+;
+; GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+; WARRANTY; without even the implied warranty of MERCHANTABILITY or
+; FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+; for more details.
+;
+; You should have received a copy of the GNU General Public License
+; along with GCC; see the file COPYING3. If not see
+; .
+
+mcall-prologues
+Target Report Mask(CALL_PROLOGUES)
+Use subroutines for function prologues and epilogues
+
+mmcu=
+Target RejectNegative Joined Var(avr_mcu_name) Init("avr2")
+-mmcu=MCU Select the target MCU
+
+mdeb
+Target Report Undocumented Mask(ALL_DEBUG)
+
+minit-stack=
+Target RejectNegative Joined Var(avr_init_stack) Init("__stack")
+-minit-stack=STACK Use STACK as the initial value of the stack pointer
+
+mint8
+Target Report Mask(INT8)
+Use an 8-bit 'int' type
+
+mno-interrupts
+Target Report RejectNegative Mask(NO_INTERRUPTS)
+Change the stack pointer without disabling interrupts
+
+mno-tablejump
+Target Report RejectNegative Mask(NO_TABLEJUMP)
+Do not generate tablejump insns
+
+morder1
+Target Report Undocumented Mask(ORDER_1)
+
+morder2
+Target Report Undocumented Mask(ORDER_2)
+
+mshort-calls
+Target Report Mask(SHORT_CALLS)
+Use rjmp/rcall (limited range) on >8K devices
+
+msize
+Target Report Mask(INSN_SIZE_DUMP)
+Output instruction sizes to the asm file
+
+mtiny-stack
+Target Report Mask(TINY_STACK)
+Change only the low 8 bits of the stack pointer
Index: avr.c
===================================================================
--- avr.c (nonexistent)
+++ avr.c (revision 154)
@@ -0,0 +1,6050 @@
+/* Subroutines for insn-output.c for ATMEL AVR micro controllers
+ Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
+ Contributed by Denis Chertykov (denisc@overta.ru)
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ . */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "rtl.h"
+#include "regs.h"
+#include "hard-reg-set.h"
+#include "real.h"
+#include "insn-config.h"
+#include "conditions.h"
+#include "insn-attr.h"
+#include "flags.h"
+#include "reload.h"
+#include "tree.h"
+#include "output.h"
+#include "expr.h"
+#include "toplev.h"
+#include "obstack.h"
+#include "function.h"
+#include "recog.h"
+#include "ggc.h"
+#include "tm_p.h"
+#include "target.h"
+#include "target-def.h"
+
+/* Maximal allowed offset for an address in the LD command */
+#define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
+
+static int avr_naked_function_p (tree);
+static int interrupt_function_p (tree);
+static int signal_function_p (tree);
+static int avr_regs_to_save (HARD_REG_SET *);
+static int sequent_regs_live (void);
+static const char *ptrreg_to_str (int);
+static const char *cond_string (enum rtx_code);
+static int avr_num_arg_regs (enum machine_mode, tree);
+static int out_adj_frame_ptr (FILE *, int);
+static int out_set_stack_ptr (FILE *, int, int);
+static RTX_CODE compare_condition (rtx insn);
+static int compare_sign_p (rtx insn);
+static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
+static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
+static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
+const struct attribute_spec avr_attribute_table[];
+static bool avr_assemble_integer (rtx, unsigned int, int);
+static void avr_file_start (void);
+static void avr_file_end (void);
+static void avr_output_function_prologue (FILE *, HOST_WIDE_INT);
+static void avr_output_function_epilogue (FILE *, HOST_WIDE_INT);
+static void avr_insert_attributes (tree, tree *);
+static void avr_asm_init_sections (void);
+static unsigned int avr_section_type_flags (tree, const char *, int);
+
+static void avr_reorg (void);
+static void avr_asm_out_ctor (rtx, int);
+static void avr_asm_out_dtor (rtx, int);
+static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
+static bool avr_rtx_costs (rtx, int, int, int *);
+static int avr_address_cost (rtx);
+static bool avr_return_in_memory (tree, tree);
+
+/* Allocate registers from r25 to r8 for parameters for function calls. */
+#define FIRST_CUM_REG 26
+
+/* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
+static GTY(()) rtx tmp_reg_rtx;
+
+/* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
+static GTY(()) rtx zero_reg_rtx;
+
+/* AVR register names {"r0", "r1", ..., "r31"} */
+static const char *const avr_regnames[] = REGISTER_NAMES;
+
+/* This holds the last insn address. */
+static int last_insn_address = 0;
+
+/* Commands count in the compiled file */
+static int commands_in_file;
+
+/* Commands in the functions prologues in the compiled file */
+static int commands_in_prologues;
+
+/* Commands in the functions epilogues in the compiled file */
+static int commands_in_epilogues;
+
+/* Prologue/Epilogue size in words */
+static int prologue_size;
+static int epilogue_size;
+
+/* Size of all jump tables in the current function, in words. */
+static int jump_tables_size;
+
+/* Preprocessor macros to define depending on MCU type. */
+const char *avr_base_arch_macro;
+const char *avr_extra_arch_macro;
+
+section *progmem_section;
+
+/* More than 8K of program memory: use "call" and "jmp". */
+int avr_mega_p = 0;
+
+/* Enhanced core: use "movw", "mul", ... */
+int avr_enhanced_p = 0;
+
+/* Assembler only. */
+int avr_asm_only_p = 0;
+
+/* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
+int avr_have_movw_lpmx_p = 0;
+
+struct base_arch_s {
+ int asm_only;
+ int enhanced;
+ int mega;
+ int have_movw_lpmx;
+ const char *const macro;
+};
+
+static const struct base_arch_s avr_arch_types[] = {
+ { 1, 0, 0, 0, NULL }, /* unknown device specified */
+ { 1, 0, 0, 0, "__AVR_ARCH__=1" },
+ { 0, 0, 0, 0, "__AVR_ARCH__=2" },
+ { 0, 0, 0, 1, "__AVR_ARCH__=25"},
+ { 0, 0, 1, 0, "__AVR_ARCH__=3" },
+ { 0, 1, 0, 1, "__AVR_ARCH__=4" },
+ { 0, 1, 1, 1, "__AVR_ARCH__=5" }
+};
+
+/* These names are used as the index into the avr_arch_types[] table
+ above. */
+
+enum avr_arch
+{
+ ARCH_UNKNOWN,
+ ARCH_AVR1,
+ ARCH_AVR2,
+ ARCH_AVR25,
+ ARCH_AVR3,
+ ARCH_AVR4,
+ ARCH_AVR5
+};
+
+struct mcu_type_s {
+ const char *const name;
+ int arch; /* index in avr_arch_types[] */
+ /* Must lie outside user's namespace. NULL == no macro. */
+ const char *const macro;
+};
+
+/* List of all known AVR MCU types - if updated, it has to be kept
+ in sync in several places (FIXME: is there a better way?):
+ - here
+ - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
+ - t-avr (MULTILIB_MATCHES)
+ - gas/config/tc-avr.c
+ - avr-libc */
+
+static const struct mcu_type_s avr_mcu_types[] = {
+ /* Classic, <= 8K. */
+ { "avr2", ARCH_AVR2, NULL },
+ { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
+ { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
+ { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
+ { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
+ { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
+ { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
+ { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
+ { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
+ { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
+ { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
+ { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
+ { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
+ /* Classic + MOVW, <= 8K. */
+ { "avr25", ARCH_AVR25, NULL },
+ { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
+ { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
+ { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
+ { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
+ { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
+ { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
+ { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
+ { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
+ { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
+ { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
+ { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
+ { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
+ /* Classic, > 8K. */
+ { "avr3", ARCH_AVR3, NULL },
+ { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
+ { "atmega603", ARCH_AVR3, "__AVR_ATmega603__" },
+ { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
+ { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
+ { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
+ /* Enhanced, <= 8K. */
+ { "avr4", ARCH_AVR4, NULL },
+ { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
+ { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
+ { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
+ { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
+ { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
+ { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
+ { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
+ { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
+ { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
+ /* Enhanced, > 8K. */
+ { "avr5", ARCH_AVR5, NULL },
+ { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
+ { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
+ { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
+ { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
+ { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
+ { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
+ { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
+ { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
+ { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
+ { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
+ { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
+ { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
+ { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
+ { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
+ { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
+ { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
+ { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
+ { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
+ { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
+ { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
+ { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
+ { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
+ { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
+ { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
+ { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
+ { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
+ { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
+ { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
+ { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
+ { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
+ { "atmega128", ARCH_AVR5, "__AVR_ATmega128__" },
+ { "atmega1280", ARCH_AVR5, "__AVR_ATmega1280__" },
+ { "atmega1281", ARCH_AVR5, "__AVR_ATmega1281__" },
+ { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
+ { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
+ { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
+ { "at90can128", ARCH_AVR5, "__AVR_AT90CAN128__" },
+ { "at90usb82", ARCH_AVR5, "__AVR_AT90USB82__" },
+ { "at90usb162", ARCH_AVR5, "__AVR_AT90USB162__" },
+ { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
+ { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
+ { "at90usb1286", ARCH_AVR5, "__AVR_AT90USB1286__" },
+ { "at90usb1287", ARCH_AVR5, "__AVR_AT90USB1287__" },
+ { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
+ /* Assembler only. */
+ { "avr1", ARCH_AVR1, NULL },
+ { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
+ { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
+ { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
+ { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
+ { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
+ { NULL, ARCH_UNKNOWN, NULL }
+};
+
+int avr_case_values_threshold = 30000;
+
+/* Initialize the GCC target structure. */
+#undef TARGET_ASM_ALIGNED_HI_OP
+#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
+#undef TARGET_ASM_ALIGNED_SI_OP
+#define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
+#undef TARGET_ASM_UNALIGNED_HI_OP
+#define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
+#undef TARGET_ASM_UNALIGNED_SI_OP
+#define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
+#undef TARGET_ASM_INTEGER
+#define TARGET_ASM_INTEGER avr_assemble_integer
+#undef TARGET_ASM_FILE_START
+#define TARGET_ASM_FILE_START avr_file_start
+#undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
+#define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
+#undef TARGET_ASM_FILE_END
+#define TARGET_ASM_FILE_END avr_file_end
+
+#undef TARGET_ASM_FUNCTION_PROLOGUE
+#define TARGET_ASM_FUNCTION_PROLOGUE avr_output_function_prologue
+#undef TARGET_ASM_FUNCTION_EPILOGUE
+#define TARGET_ASM_FUNCTION_EPILOGUE avr_output_function_epilogue
+#undef TARGET_ATTRIBUTE_TABLE
+#define TARGET_ATTRIBUTE_TABLE avr_attribute_table
+#undef TARGET_ASM_FUNCTION_RODATA_SECTION
+#define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
+#undef TARGET_INSERT_ATTRIBUTES
+#define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
+#undef TARGET_SECTION_TYPE_FLAGS
+#define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
+#undef TARGET_RTX_COSTS
+#define TARGET_RTX_COSTS avr_rtx_costs
+#undef TARGET_ADDRESS_COST
+#define TARGET_ADDRESS_COST avr_address_cost
+#undef TARGET_MACHINE_DEPENDENT_REORG
+#define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
+
+#undef TARGET_RETURN_IN_MEMORY
+#define TARGET_RETURN_IN_MEMORY avr_return_in_memory
+
+#undef TARGET_STRICT_ARGUMENT_NAMING
+#define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
+
+struct gcc_target targetm = TARGET_INITIALIZER;
+
+void
+avr_override_options (void)
+{
+ const struct mcu_type_s *t;
+ const struct base_arch_s *base;
+
+ flag_delete_null_pointer_checks = 0;
+
+ for (t = avr_mcu_types; t->name; t++)
+ if (strcmp (t->name, avr_mcu_name) == 0)
+ break;
+
+ if (!t->name)
+ {
+ fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
+ avr_mcu_name);
+ for (t = avr_mcu_types; t->name; t++)
+ fprintf (stderr," %s\n", t->name);
+ }
+
+ base = &avr_arch_types[t->arch];
+ avr_asm_only_p = base->asm_only;
+ avr_enhanced_p = base->enhanced;
+ avr_mega_p = base->mega;
+ avr_have_movw_lpmx_p = base->have_movw_lpmx;
+ avr_base_arch_macro = base->macro;
+ avr_extra_arch_macro = t->macro;
+
+ if (optimize && !TARGET_NO_TABLEJUMP)
+ avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
+
+ tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
+ zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
+}
+
+/* return register class from register number. */
+
+static const int reg_class_tab[]={
+ GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
+ GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
+ GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
+ GENERAL_REGS, /* r0 - r15 */
+ LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
+ LD_REGS, /* r16 - 23 */
+ ADDW_REGS,ADDW_REGS, /* r24,r25 */
+ POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
+ POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
+ POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
+ STACK_REG,STACK_REG /* SPL,SPH */
+};
+
+/* Return register class for register R. */
+
+enum reg_class
+avr_regno_reg_class (int r)
+{
+ if (r <= 33)
+ return reg_class_tab[r];
+ return ALL_REGS;
+}
+
+/* Return nonzero if FUNC is a naked function. */
+
+static int
+avr_naked_function_p (tree func)
+{
+ tree a;
+
+ gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
+
+ a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
+ return a != NULL_TREE;
+}
+
+/* Return nonzero if FUNC is an interrupt function as specified
+ by the "interrupt" attribute. */
+
+static int
+interrupt_function_p (tree func)
+{
+ tree a;
+
+ if (TREE_CODE (func) != FUNCTION_DECL)
+ return 0;
+
+ a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
+ return a != NULL_TREE;
+}
+
+/* Return nonzero if FUNC is a signal function as specified
+ by the "signal" attribute. */
+
+static int
+signal_function_p (tree func)
+{
+ tree a;
+
+ if (TREE_CODE (func) != FUNCTION_DECL)
+ return 0;
+
+ a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
+ return a != NULL_TREE;
+}
+
+/* Return the number of hard registers to push/pop in the prologue/epilogue
+ of the current function, and optionally store these registers in SET. */
+
+static int
+avr_regs_to_save (HARD_REG_SET *set)
+{
+ int reg, count;
+ int int_or_sig_p = (interrupt_function_p (current_function_decl)
+ || signal_function_p (current_function_decl));
+ int leaf_func_p = leaf_function_p ();
+
+ if (set)
+ CLEAR_HARD_REG_SET (*set);
+ count = 0;
+
+ /* No need to save any registers if the function never returns. */
+ if (TREE_THIS_VOLATILE (current_function_decl))
+ return 0;
+
+ for (reg = 0; reg < 32; reg++)
+ {
+ /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
+ any global register variables. */
+ if (fixed_regs[reg])
+ continue;
+
+ if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
+ || (regs_ever_live[reg]
+ && (int_or_sig_p || !call_used_regs[reg])
+ && !(frame_pointer_needed
+ && (reg == REG_Y || reg == (REG_Y+1)))))
+ {
+ if (set)
+ SET_HARD_REG_BIT (*set, reg);
+ count++;
+ }
+ }
+ return count;
+}
+
+/* Compute offset between arg_pointer and frame_pointer. */
+
+int
+initial_elimination_offset (int from, int to)
+{
+ if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
+ return 0;
+ else
+ {
+ int offset = frame_pointer_needed ? 2 : 0;
+
+ offset += avr_regs_to_save (NULL);
+ return get_frame_size () + 2 + 1 + offset;
+ }
+}
+
+/* Return 1 if the function epilogue is just a single "ret". */
+
+int
+avr_simple_epilogue (void)
+{
+ return (! frame_pointer_needed
+ && get_frame_size () == 0
+ && avr_regs_to_save (NULL) == 0
+ && ! interrupt_function_p (current_function_decl)
+ && ! signal_function_p (current_function_decl)
+ && ! avr_naked_function_p (current_function_decl)
+ && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
+ && ! TREE_THIS_VOLATILE (current_function_decl));
+}
+
+/* This function checks sequence of live registers. */
+
+static int
+sequent_regs_live (void)
+{
+ int reg;
+ int live_seq=0;
+ int cur_seq=0;
+
+ for (reg = 0; reg < 18; ++reg)
+ {
+ if (!call_used_regs[reg])
+ {
+ if (regs_ever_live[reg])
+ {
+ ++live_seq;
+ ++cur_seq;
+ }
+ else
+ cur_seq = 0;
+ }
+ }
+
+ if (!frame_pointer_needed)
+ {
+ if (regs_ever_live[REG_Y])
+ {
+ ++live_seq;
+ ++cur_seq;
+ }
+ else
+ cur_seq = 0;
+
+ if (regs_ever_live[REG_Y+1])
+ {
+ ++live_seq;
+ ++cur_seq;
+ }
+ else
+ cur_seq = 0;
+ }
+ else
+ {
+ cur_seq += 2;
+ live_seq += 2;
+ }
+ return (cur_seq == live_seq) ? live_seq : 0;
+}
+
+
+/* Output to FILE the asm instructions to adjust the frame pointer by
+ ADJ (r29:r28 -= ADJ;) which can be positive (prologue) or negative
+ (epilogue). Returns the number of instructions generated. */
+
+static int
+out_adj_frame_ptr (FILE *file, int adj)
+{
+ int size = 0;
+
+ if (adj)
+ {
+ if (TARGET_TINY_STACK)
+ {
+ if (adj < -63 || adj > 63)
+ warning (0, "large frame pointer change (%d) with -mtiny-stack", adj);
+
+ /* The high byte (r29) doesn't change - prefer "subi" (1 cycle)
+ over "sbiw" (2 cycles, same size). */
+
+ fprintf (file, (AS2 (subi, r28, %d) CR_TAB), adj);
+ size++;
+ }
+ else if (adj < -63 || adj > 63)
+ {
+ fprintf (file, (AS2 (subi, r28, lo8(%d)) CR_TAB
+ AS2 (sbci, r29, hi8(%d)) CR_TAB),
+ adj, adj);
+ size += 2;
+ }
+ else if (adj < 0)
+ {
+ fprintf (file, (AS2 (adiw, r28, %d) CR_TAB), -adj);
+ size++;
+ }
+ else
+ {
+ fprintf (file, (AS2 (sbiw, r28, %d) CR_TAB), adj);
+ size++;
+ }
+ }
+ return size;
+}
+
+
+/* Output to FILE the asm instructions to copy r29:r28 to SPH:SPL,
+ handling various cases of interrupt enable flag state BEFORE and AFTER
+ (0=disabled, 1=enabled, -1=unknown/unchanged) and target_flags.
+ Returns the number of instructions generated. */
+
+static int
+out_set_stack_ptr (FILE *file, int before, int after)
+{
+ int do_sph, do_cli, do_save, do_sei, lock_sph, size;
+
+ /* The logic here is so that -mno-interrupts actually means
+ "it is safe to write SPH in one instruction, then SPL in the
+ next instruction, without disabling interrupts first".
+ The after != -1 case (interrupt/signal) is not affected. */
+
+ do_sph = !TARGET_TINY_STACK;
+ lock_sph = do_sph && !TARGET_NO_INTERRUPTS;
+ do_cli = (before != 0 && (after == 0 || lock_sph));
+ do_save = (do_cli && before == -1 && after == -1);
+ do_sei = ((do_cli || before != 1) && after == 1);
+ size = 1;
+
+ if (do_save)
+ {
+ fprintf (file, AS2 (in, __tmp_reg__, __SREG__) CR_TAB);
+ size++;
+ }
+
+ if (do_cli)
+ {
+ fprintf (file, "cli" CR_TAB);
+ size++;
+ }
+
+ /* Do SPH first - maybe this will disable interrupts for one instruction
+ someday (a suggestion has been sent to avr@atmel.com for consideration
+ in future devices - that would make -mno-interrupts always safe). */
+ if (do_sph)
+ {
+ fprintf (file, AS2 (out, __SP_H__, r29) CR_TAB);
+ size++;
+ }
+
+ /* Set/restore the I flag now - interrupts will be really enabled only
+ after the next instruction. This is not clearly documented, but
+ believed to be true for all AVR devices. */
+ if (do_save)
+ {
+ fprintf (file, AS2 (out, __SREG__, __tmp_reg__) CR_TAB);
+ size++;
+ }
+ else if (do_sei)
+ {
+ fprintf (file, "sei" CR_TAB);
+ size++;
+ }
+
+ fprintf (file, AS2 (out, __SP_L__, r28) "\n");
+
+ return size;
+}
+
+
+/* Output function prologue. */
+
+static void
+avr_output_function_prologue (FILE *file, HOST_WIDE_INT size)
+{
+ int reg;
+ int interrupt_func_p;
+ int signal_func_p;
+ int main_p;
+ int live_seq;
+ int minimize;
+
+ last_insn_address = 0;
+ jump_tables_size = 0;
+ prologue_size = 0;
+ fprintf (file, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n",
+ size);
+
+ if (avr_naked_function_p (current_function_decl))
+ {
+ fputs ("/* prologue: naked */\n", file);
+ goto out;
+ }
+
+ interrupt_func_p = interrupt_function_p (current_function_decl);
+ signal_func_p = signal_function_p (current_function_decl);
+ main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
+ live_seq = sequent_regs_live ();
+ minimize = (TARGET_CALL_PROLOGUES
+ && !interrupt_func_p && !signal_func_p && live_seq);
+
+ if (interrupt_func_p)
+ {
+ fprintf (file,"\tsei\n");
+ ++prologue_size;
+ }
+ if (interrupt_func_p || signal_func_p)
+ {
+ fprintf (file, "\t"
+ AS1 (push,__zero_reg__) CR_TAB
+ AS1 (push,__tmp_reg__) CR_TAB
+ AS2 (in,__tmp_reg__,__SREG__) CR_TAB
+ AS1 (push,__tmp_reg__) CR_TAB
+ AS1 (clr,__zero_reg__) "\n");
+ prologue_size += 5;
+ }
+ if (main_p)
+ {
+ fprintf (file, ("\t"
+ AS1 (ldi,r28) ",lo8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
+ AS1 (ldi,r29) ",hi8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
+ AS2 (out,__SP_H__,r29) CR_TAB
+ AS2 (out,__SP_L__,r28) "\n"),
+ avr_init_stack, size, avr_init_stack, size);
+
+ prologue_size += 4;
+ }
+ else if (minimize && (frame_pointer_needed || live_seq > 6))
+ {
+ fprintf (file, ("\t"
+ AS1 (ldi, r26) ",lo8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
+ AS1 (ldi, r27) ",hi8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB), size, size);
+
+ fputs ((AS2 (ldi,r30,pm_lo8(1f)) CR_TAB
+ AS2 (ldi,r31,pm_hi8(1f)) CR_TAB), file);
+
+ prologue_size += 4;
+
+ if (AVR_MEGA)
+ {
+ fprintf (file, AS1 (jmp,__prologue_saves__+%d) "\n",
+ (18 - live_seq) * 2);
+ prologue_size += 2;
+ }
+ else
+ {
+ fprintf (file, AS1 (rjmp,__prologue_saves__+%d) "\n",
+ (18 - live_seq) * 2);
+ ++prologue_size;
+ }
+ fputs ("1:\n", file);
+ }
+ else
+ {
+ HARD_REG_SET set;
+
+ prologue_size += avr_regs_to_save (&set);
+ for (reg = 0; reg < 32; ++reg)
+ {
+ if (TEST_HARD_REG_BIT (set, reg))
+ {
+ fprintf (file, "\t" AS1 (push,%s) "\n", avr_regnames[reg]);
+ }
+ }
+ if (frame_pointer_needed)
+ {
+ fprintf (file, "\t"
+ AS1 (push,r28) CR_TAB
+ AS1 (push,r29) CR_TAB
+ AS2 (in,r28,__SP_L__) CR_TAB
+ AS2 (in,r29,__SP_H__) "\n");
+ prologue_size += 4;
+ if (size)
+ {
+ fputs ("\t", file);
+ prologue_size += out_adj_frame_ptr (file, size);
+
+ if (interrupt_func_p)
+ {
+ prologue_size += out_set_stack_ptr (file, 1, 1);
+ }
+ else if (signal_func_p)
+ {
+ prologue_size += out_set_stack_ptr (file, 0, 0);
+ }
+ else
+ {
+ prologue_size += out_set_stack_ptr (file, -1, -1);
+ }
+ }
+ }
+ }
+
+ out:
+ fprintf (file, "/* prologue end (size=%d) */\n", prologue_size);
+}
+
+/* Output function epilogue. */
+
+static void
+avr_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
+{
+ int reg;
+ int interrupt_func_p;
+ int signal_func_p;
+ int main_p;
+ int function_size;
+ int live_seq;
+ int minimize;
+ rtx last = get_last_nonnote_insn ();
+
+ function_size = jump_tables_size;
+ if (last)
+ {
+ rtx first = get_first_nonnote_insn ();
+ function_size += (INSN_ADDRESSES (INSN_UID (last)) -
+ INSN_ADDRESSES (INSN_UID (first)));
+ function_size += get_attr_length (last);
+ }
+
+ fprintf (file, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n", size);
+ epilogue_size = 0;
+
+ if (avr_naked_function_p (current_function_decl))
+ {
+ fputs ("/* epilogue: naked */\n", file);
+ goto out;
+ }
+
+ if (last && GET_CODE (last) == BARRIER)
+ {
+ fputs ("/* epilogue: noreturn */\n", file);
+ goto out;
+ }
+
+ interrupt_func_p = interrupt_function_p (current_function_decl);
+ signal_func_p = signal_function_p (current_function_decl);
+ main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
+ live_seq = sequent_regs_live ();
+ minimize = (TARGET_CALL_PROLOGUES
+ && !interrupt_func_p && !signal_func_p && live_seq);
+
+ if (main_p)
+ {
+ /* Return value from main() is already in the correct registers
+ (r25:r24) as the exit() argument. */
+ if (AVR_MEGA)
+ {
+ fputs ("\t" AS1 (jmp,exit) "\n", file);
+ epilogue_size += 2;
+ }
+ else
+ {
+ fputs ("\t" AS1 (rjmp,exit) "\n", file);
+ ++epilogue_size;
+ }
+ }
+ else if (minimize && (frame_pointer_needed || live_seq > 4))
+ {
+ fprintf (file, ("\t" AS2 (ldi, r30, %d) CR_TAB), live_seq);
+ ++epilogue_size;
+ if (frame_pointer_needed)
+ {
+ epilogue_size += out_adj_frame_ptr (file, -size);
+ }
+ else
+ {
+ fprintf (file, (AS2 (in , r28, __SP_L__) CR_TAB
+ AS2 (in , r29, __SP_H__) CR_TAB));
+ epilogue_size += 2;
+ }
+
+ if (AVR_MEGA)
+ {
+ fprintf (file, AS1 (jmp,__epilogue_restores__+%d) "\n",
+ (18 - live_seq) * 2);
+ epilogue_size += 2;
+ }
+ else
+ {
+ fprintf (file, AS1 (rjmp,__epilogue_restores__+%d) "\n",
+ (18 - live_seq) * 2);
+ ++epilogue_size;
+ }
+ }
+ else
+ {
+ HARD_REG_SET set;
+
+ if (frame_pointer_needed)
+ {
+ if (size)
+ {
+ fputs ("\t", file);
+ epilogue_size += out_adj_frame_ptr (file, -size);
+
+ if (interrupt_func_p || signal_func_p)
+ {
+ epilogue_size += out_set_stack_ptr (file, -1, 0);
+ }
+ else
+ {
+ epilogue_size += out_set_stack_ptr (file, -1, -1);
+ }
+ }
+ fprintf (file, "\t"
+ AS1 (pop,r29) CR_TAB
+ AS1 (pop,r28) "\n");
+ epilogue_size += 2;
+ }
+
+ epilogue_size += avr_regs_to_save (&set);
+ for (reg = 31; reg >= 0; --reg)
+ {
+ if (TEST_HARD_REG_BIT (set, reg))
+ {
+ fprintf (file, "\t" AS1 (pop,%s) "\n", avr_regnames[reg]);
+ }
+ }
+
+ if (interrupt_func_p || signal_func_p)
+ {
+ fprintf (file, "\t"
+ AS1 (pop,__tmp_reg__) CR_TAB
+ AS2 (out,__SREG__,__tmp_reg__) CR_TAB
+ AS1 (pop,__tmp_reg__) CR_TAB
+ AS1 (pop,__zero_reg__) "\n");
+ epilogue_size += 4;
+ fprintf (file, "\treti\n");
+ }
+ else
+ fprintf (file, "\tret\n");
+ ++epilogue_size;
+ }
+
+ out:
+ fprintf (file, "/* epilogue end (size=%d) */\n", epilogue_size);
+ fprintf (file, "/* function %s size %d (%d) */\n", current_function_name (),
+ prologue_size + function_size + epilogue_size, function_size);
+ commands_in_file += prologue_size + function_size + epilogue_size;
+ commands_in_prologues += prologue_size;
+ commands_in_epilogues += epilogue_size;
+}
+
+
+/* Return nonzero if X (an RTX) is a legitimate memory address on the target
+ machine for a memory operand of mode MODE. */
+
+int
+legitimate_address_p (enum machine_mode mode, rtx x, int strict)
+{
+ enum reg_class r = NO_REGS;
+
+ if (TARGET_ALL_DEBUG)
+ {
+ fprintf (stderr, "mode: (%s) %s %s %s %s:",
+ GET_MODE_NAME(mode),
+ strict ? "(strict)": "",
+ reload_completed ? "(reload_completed)": "",
+ reload_in_progress ? "(reload_in_progress)": "",
+ reg_renumber ? "(reg_renumber)" : "");
+ if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x, 0))
+ && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && INTVAL (XEXP (x, 1)) >= 0
+ && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
+ && reg_renumber
+ )
+ fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
+ true_regnum (XEXP (x, 0)));
+ debug_rtx (x);
+ }
+ if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
+ : REG_OK_FOR_BASE_NOSTRICT_P (x)))
+ r = POINTER_REGS;
+ else if (CONSTANT_ADDRESS_P (x))
+ r = ALL_REGS;
+ else if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x, 0))
+ && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && INTVAL (XEXP (x, 1)) >= 0)
+ {
+ int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
+ if (fit)
+ {
+ if (! strict
+ || REGNO (XEXP (x,0)) == REG_Y
+ || REGNO (XEXP (x,0)) == REG_Z)
+ r = BASE_POINTER_REGS;
+ if (XEXP (x,0) == frame_pointer_rtx
+ || XEXP (x,0) == arg_pointer_rtx)
+ r = BASE_POINTER_REGS;
+ }
+ else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
+ r = POINTER_Y_REGS;
+ }
+ else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
+ && REG_P (XEXP (x, 0))
+ && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
+ : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
+ {
+ r = POINTER_REGS;
+ }
+ if (TARGET_ALL_DEBUG)
+ {
+ fprintf (stderr, " ret = %c\n", r + '0');
+ }
+ return r == NO_REGS ? 0 : (int)r;
+}
+
+/* Attempts to replace X with a valid
+ memory address for an operand of mode MODE */
+
+rtx
+legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
+{
+ x = oldx;
+ if (TARGET_ALL_DEBUG)
+ {
+ fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
+ debug_rtx (oldx);
+ }
+
+ if (GET_CODE (oldx) == PLUS
+ && REG_P (XEXP (oldx,0)))
+ {
+ if (REG_P (XEXP (oldx,1)))
+ x = force_reg (GET_MODE (oldx), oldx);
+ else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
+ {
+ int offs = INTVAL (XEXP (oldx,1));
+ if (frame_pointer_rtx != XEXP (oldx,0))
+ if (offs > MAX_LD_OFFSET (mode))
+ {
+ if (TARGET_ALL_DEBUG)
+ fprintf (stderr, "force_reg (big offset)\n");
+ x = force_reg (GET_MODE (oldx), oldx);
+ }
+ }
+ }
+ return x;
+}
+
+
+/* Return a pointer register name as a string. */
+
+static const char *
+ptrreg_to_str (int regno)
+{
+ switch (regno)
+ {
+ case REG_X: return "X";
+ case REG_Y: return "Y";
+ case REG_Z: return "Z";
+ default:
+ output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
+ }
+ return NULL;
+}
+
+/* Return the condition name as a string.
+ Used in conditional jump constructing */
+
+static const char *
+cond_string (enum rtx_code code)
+{
+ switch (code)
+ {
+ case NE:
+ return "ne";
+ case EQ:
+ return "eq";
+ case GE:
+ if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ return "pl";
+ else
+ return "ge";
+ case LT:
+ if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ return "mi";
+ else
+ return "lt";
+ case GEU:
+ return "sh";
+ case LTU:
+ return "lo";
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Output ADDR to FILE as address. */
+
+void
+print_operand_address (FILE *file, rtx addr)
+{
+ switch (GET_CODE (addr))
+ {
+ case REG:
+ fprintf (file, ptrreg_to_str (REGNO (addr)));
+ break;
+
+ case PRE_DEC:
+ fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
+ break;
+
+ case POST_INC:
+ fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
+ break;
+
+ default:
+ if (CONSTANT_ADDRESS_P (addr)
+ && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
+ || GET_CODE (addr) == LABEL_REF))
+ {
+ fprintf (file, "pm(");
+ output_addr_const (file,addr);
+ fprintf (file ,")");
+ }
+ else
+ output_addr_const (file, addr);
+ }
+}
+
+
+/* Output X as assembler operand to file FILE. */
+
+void
+print_operand (FILE *file, rtx x, int code)
+{
+ int abcd = 0;
+
+ if (code >= 'A' && code <= 'D')
+ abcd = code - 'A';
+
+ if (code == '~')
+ {
+ if (!AVR_MEGA)
+ fputc ('r', file);
+ }
+ else if (REG_P (x))
+ {
+ if (x == zero_reg_rtx)
+ fprintf (file, "__zero_reg__");
+ else
+ fprintf (file, reg_names[true_regnum (x) + abcd]);
+ }
+ else if (GET_CODE (x) == CONST_INT)
+ fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
+ else if (GET_CODE (x) == MEM)
+ {
+ rtx addr = XEXP (x,0);
+
+ if (CONSTANT_P (addr) && abcd)
+ {
+ fputc ('(', file);
+ output_address (addr);
+ fprintf (file, ")+%d", abcd);
+ }
+ else if (code == 'o')
+ {
+ if (GET_CODE (addr) != PLUS)
+ fatal_insn ("bad address, not (reg+disp):", addr);
+
+ print_operand (file, XEXP (addr, 1), 0);
+ }
+ else if (code == 'p' || code == 'r')
+ {
+ if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
+ fatal_insn ("bad address, not post_inc or pre_dec:", addr);
+
+ if (code == 'p')
+ print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
+ else
+ print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
+ }
+ else if (GET_CODE (addr) == PLUS)
+ {
+ print_operand_address (file, XEXP (addr,0));
+ if (REGNO (XEXP (addr, 0)) == REG_X)
+ fatal_insn ("internal compiler error. Bad address:"
+ ,addr);
+ fputc ('+', file);
+ print_operand (file, XEXP (addr,1), code);
+ }
+ else
+ print_operand_address (file, addr);
+ }
+ else if (GET_CODE (x) == CONST_DOUBLE)
+ {
+ long val;
+ REAL_VALUE_TYPE rv;
+ if (GET_MODE (x) != SFmode)
+ fatal_insn ("internal compiler error. Unknown mode:", x);
+ REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
+ REAL_VALUE_TO_TARGET_SINGLE (rv, val);
+ fprintf (file, "0x%lx", val);
+ }
+ else if (code == 'j')
+ fputs (cond_string (GET_CODE (x)), file);
+ else if (code == 'k')
+ fputs (cond_string (reverse_condition (GET_CODE (x))), file);
+ else
+ print_operand_address (file, x);
+}
+
+/* Recognize operand OP of mode MODE used in call instructions. */
+
+int
+call_insn_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+{
+ if (GET_CODE (op) == MEM)
+ {
+ rtx inside = XEXP (op, 0);
+ if (register_operand (inside, Pmode))
+ return 1;
+ if (CONSTANT_ADDRESS_P (inside))
+ return 1;
+ }
+ return 0;
+}
+
+/* Update the condition code in the INSN. */
+
+void
+notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
+{
+ rtx set;
+
+ switch (get_attr_cc (insn))
+ {
+ case CC_NONE:
+ /* Insn does not affect CC at all. */
+ break;
+
+ case CC_SET_N:
+ CC_STATUS_INIT;
+ break;
+
+ case CC_SET_ZN:
+ set = single_set (insn);
+ CC_STATUS_INIT;
+ if (set)
+ {
+ cc_status.flags |= CC_NO_OVERFLOW;
+ cc_status.value1 = SET_DEST (set);
+ }
+ break;
+
+ case CC_SET_CZN:
+ /* Insn sets the Z,N,C flags of CC to recog_operand[0].
+ The V flag may or may not be known but that's ok because
+ alter_cond will change tests to use EQ/NE. */
+ set = single_set (insn);
+ CC_STATUS_INIT;
+ if (set)
+ {
+ cc_status.value1 = SET_DEST (set);
+ cc_status.flags |= CC_OVERFLOW_UNUSABLE;
+ }
+ break;
+
+ case CC_COMPARE:
+ set = single_set (insn);
+ CC_STATUS_INIT;
+ if (set)
+ cc_status.value1 = SET_SRC (set);
+ break;
+
+ case CC_CLOBBER:
+ /* Insn doesn't leave CC in a usable state. */
+ CC_STATUS_INIT;
+
+ /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
+ set = single_set (insn);
+ if (set)
+ {
+ rtx src = SET_SRC (set);
+
+ if (GET_CODE (src) == ASHIFTRT
+ && GET_MODE (src) == QImode)
+ {
+ rtx x = XEXP (src, 1);
+
+ if (GET_CODE (x) == CONST_INT
+ && INTVAL (x) > 0
+ && INTVAL (x) != 6)
+ {
+ cc_status.value1 = SET_DEST (set);
+ cc_status.flags |= CC_OVERFLOW_UNUSABLE;
+ }
+ }
+ }
+ break;
+ }
+}
+
+/* Return maximum number of consecutive registers of
+ class CLASS needed to hold a value of mode MODE. */
+
+int
+class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
+{
+ return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
+}
+
+/* Choose mode for jump insn:
+ 1 - relative jump in range -63 <= x <= 62 ;
+ 2 - relative jump in range -2046 <= x <= 2045 ;
+ 3 - absolute jump (only for ATmega[16]03). */
+
+int
+avr_jump_mode (rtx x, rtx insn)
+{
+ int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
+ ? XEXP (x, 0) : x));
+ int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
+ int jump_distance = cur_addr - dest_addr;
+
+ if (-63 <= jump_distance && jump_distance <= 62)
+ return 1;
+ else if (-2046 <= jump_distance && jump_distance <= 2045)
+ return 2;
+ else if (AVR_MEGA)
+ return 3;
+
+ return 2;
+}
+
+/* return an AVR condition jump commands.
+ X is a comparison RTX.
+ LEN is a number returned by avr_jump_mode function.
+ if REVERSE nonzero then condition code in X must be reversed. */
+
+const char *
+ret_cond_branch (rtx x, int len, int reverse)
+{
+ RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
+
+ switch (cond)
+ {
+ case GT:
+ if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ return (len == 1 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brpl,%0)) :
+ len == 2 ? (AS1 (breq,.+4) CR_TAB
+ AS1 (brmi,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+6) CR_TAB
+ AS1 (brmi,.+4) CR_TAB
+ AS1 (jmp,%0)));
+
+ else
+ return (len == 1 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brge,%0)) :
+ len == 2 ? (AS1 (breq,.+4) CR_TAB
+ AS1 (brlt,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+6) CR_TAB
+ AS1 (brlt,.+4) CR_TAB
+ AS1 (jmp,%0)));
+ case GTU:
+ return (len == 1 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brsh,%0)) :
+ len == 2 ? (AS1 (breq,.+4) CR_TAB
+ AS1 (brlo,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+6) CR_TAB
+ AS1 (brlo,.+4) CR_TAB
+ AS1 (jmp,%0)));
+ case LE:
+ if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
+ return (len == 1 ? (AS1 (breq,%0) CR_TAB
+ AS1 (brmi,%0)) :
+ len == 2 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brpl,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+2) CR_TAB
+ AS1 (brpl,.+4) CR_TAB
+ AS1 (jmp,%0)));
+ else
+ return (len == 1 ? (AS1 (breq,%0) CR_TAB
+ AS1 (brlt,%0)) :
+ len == 2 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brge,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+2) CR_TAB
+ AS1 (brge,.+4) CR_TAB
+ AS1 (jmp,%0)));
+ case LEU:
+ return (len == 1 ? (AS1 (breq,%0) CR_TAB
+ AS1 (brlo,%0)) :
+ len == 2 ? (AS1 (breq,.+2) CR_TAB
+ AS1 (brsh,.+2) CR_TAB
+ AS1 (rjmp,%0)) :
+ (AS1 (breq,.+2) CR_TAB
+ AS1 (brsh,.+4) CR_TAB
+ AS1 (jmp,%0)));
+ default:
+ if (reverse)
+ {
+ switch (len)
+ {
+ case 1:
+ return AS1 (br%k1,%0);
+ case 2:
+ return (AS1 (br%j1,.+2) CR_TAB
+ AS1 (rjmp,%0));
+ default:
+ return (AS1 (br%j1,.+4) CR_TAB
+ AS1 (jmp,%0));
+ }
+ }
+ else
+ {
+ switch (len)
+ {
+ case 1:
+ return AS1 (br%j1,%0);
+ case 2:
+ return (AS1 (br%k1,.+2) CR_TAB
+ AS1 (rjmp,%0));
+ default:
+ return (AS1 (br%k1,.+4) CR_TAB
+ AS1 (jmp,%0));
+ }
+ }
+ }
+ return "";
+}
+
+/* Predicate function for immediate operand which fits to byte (8bit) */
+
+int
+byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+{
+ return (GET_CODE (op) == CONST_INT
+ && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
+}
+
+/* Output all insn addresses and their sizes into the assembly language
+ output file. This is helpful for debugging whether the length attributes
+ in the md file are correct.
+ Output insn cost for next insn. */
+
+void
+final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
+ int num_operands ATTRIBUTE_UNUSED)
+{
+ int uid = INSN_UID (insn);
+
+ if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
+ {
+ fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
+ INSN_ADDRESSES (uid),
+ INSN_ADDRESSES (uid) - last_insn_address,
+ rtx_cost (PATTERN (insn), INSN));
+ }
+ last_insn_address = INSN_ADDRESSES (uid);
+}
+
+/* Return 0 if undefined, 1 if always true or always false. */
+
+int
+avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
+{
+ unsigned int max = (mode == QImode ? 0xff :
+ mode == HImode ? 0xffff :
+ mode == SImode ? 0xffffffff : 0);
+ if (max && operator && GET_CODE (x) == CONST_INT)
+ {
+ if (unsigned_condition (operator) != operator)
+ max >>= 1;
+
+ if (max != (INTVAL (x) & max)
+ && INTVAL (x) != 0xff)
+ return 1;
+ }
+ return 0;
+}
+
+
+/* Returns nonzero if REGNO is the number of a hard
+ register in which function arguments are sometimes passed. */
+
+int
+function_arg_regno_p(int r)
+{
+ return (r >= 8 && r <= 25);
+}
+
+/* Initializing the variable cum for the state at the beginning
+ of the argument list. */
+
+void
+init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
+ tree fndecl ATTRIBUTE_UNUSED)
+{
+ cum->nregs = 18;
+ cum->regno = FIRST_CUM_REG;
+ if (!libname && fntype)
+ {
+ int stdarg = (TYPE_ARG_TYPES (fntype) != 0
+ && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
+ != void_type_node));
+ if (stdarg)
+ cum->nregs = 0;
+ }
+}
+
+/* Returns the number of registers to allocate for a function argument. */
+
+static int
+avr_num_arg_regs (enum machine_mode mode, tree type)
+{
+ int size;
+
+ if (mode == BLKmode)
+ size = int_size_in_bytes (type);
+ else
+ size = GET_MODE_SIZE (mode);
+
+ /* Align all function arguments to start in even-numbered registers.
+ Odd-sized arguments leave holes above them. */
+
+ return (size + 1) & ~1;
+}
+
+/* Controls whether a function argument is passed
+ in a register, and which register. */
+
+rtx
+function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
+ int named ATTRIBUTE_UNUSED)
+{
+ int bytes = avr_num_arg_regs (mode, type);
+
+ if (cum->nregs && bytes <= cum->nregs)
+ return gen_rtx_REG (mode, cum->regno - bytes);
+
+ return NULL_RTX;
+}
+
+/* Update the summarizer variable CUM to advance past an argument
+ in the argument list. */
+
+void
+function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
+ int named ATTRIBUTE_UNUSED)
+{
+ int bytes = avr_num_arg_regs (mode, type);
+
+ cum->nregs -= bytes;
+ cum->regno -= bytes;
+
+ if (cum->nregs <= 0)
+ {
+ cum->nregs = 0;
+ cum->regno = FIRST_CUM_REG;
+ }
+}
+
+/***********************************************************************
+ Functions for outputting various mov's for a various modes
+************************************************************************/
+const char *
+output_movqi (rtx insn, rtx operands[], int *l)
+{
+ int dummy;
+ rtx dest = operands[0];
+ rtx src = operands[1];
+ int *real_l = l;
+
+ if (!l)
+ l = &dummy;
+
+ *l = 1;
+
+ if (register_operand (dest, QImode))
+ {
+ if (register_operand (src, QImode)) /* mov r,r */
+ {
+ if (test_hard_reg_class (STACK_REG, dest))
+ return AS2 (out,%0,%1);
+ else if (test_hard_reg_class (STACK_REG, src))
+ return AS2 (in,%0,%1);
+
+ return AS2 (mov,%0,%1);
+ }
+ else if (CONSTANT_P (src))
+ {
+ if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
+ return AS2 (ldi,%0,lo8(%1));
+
+ if (GET_CODE (src) == CONST_INT)
+ {
+ if (src == const0_rtx) /* mov r,L */
+ return AS1 (clr,%0);
+ else if (src == const1_rtx)
+ {
+ *l = 2;
+ return (AS1 (clr,%0) CR_TAB
+ AS1 (inc,%0));
+ }
+ else if (src == constm1_rtx)
+ {
+ /* Immediate constants -1 to any register */
+ *l = 2;
+ return (AS1 (clr,%0) CR_TAB
+ AS1 (dec,%0));
+ }
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ *l = 3;
+ if (!real_l)
+ output_asm_insn ((AS1 (clr,%0) CR_TAB
+ "set"), operands);
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
+ }
+
+ /* Last resort, larger than loading from memory. */
+ *l = 4;
+ return (AS2 (mov,__tmp_reg__,r31) CR_TAB
+ AS2 (ldi,r31,lo8(%1)) CR_TAB
+ AS2 (mov,%0,r31) CR_TAB
+ AS2 (mov,r31,__tmp_reg__));
+ }
+ else if (GET_CODE (src) == MEM)
+ return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
+ }
+ else if (GET_CODE (dest) == MEM)
+ {
+ const char *template;
+
+ if (src == const0_rtx)
+ operands[1] = zero_reg_rtx;
+
+ template = out_movqi_mr_r (insn, operands, real_l);
+
+ if (!real_l)
+ output_asm_insn (template, operands);
+
+ operands[1] = src;
+ }
+ return "";
+}
+
+
+const char *
+output_movhi (rtx insn, rtx operands[], int *l)
+{
+ int dummy;
+ rtx dest = operands[0];
+ rtx src = operands[1];
+ int *real_l = l;
+
+ if (!l)
+ l = &dummy;
+
+ if (register_operand (dest, HImode))
+ {
+ if (register_operand (src, HImode)) /* mov r,r */
+ {
+ if (test_hard_reg_class (STACK_REG, dest))
+ {
+ if (TARGET_TINY_STACK)
+ {
+ *l = 1;
+ return AS2 (out,__SP_L__,%A1);
+ }
+ else if (TARGET_NO_INTERRUPTS)
+ {
+ *l = 2;
+ return (AS2 (out,__SP_H__,%B1) CR_TAB
+ AS2 (out,__SP_L__,%A1));
+ }
+
+ *l = 5;
+ return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
+ "cli" CR_TAB
+ AS2 (out,__SP_H__,%B1) CR_TAB
+ AS2 (out,__SREG__,__tmp_reg__) CR_TAB
+ AS2 (out,__SP_L__,%A1));
+ }
+ else if (test_hard_reg_class (STACK_REG, src))
+ {
+ *l = 2;
+ return (AS2 (in,%A0,__SP_L__) CR_TAB
+ AS2 (in,%B0,__SP_H__));
+ }
+
+ if (AVR_HAVE_MOVW)
+ {
+ *l = 1;
+ return (AS2 (movw,%0,%1));
+ }
+
+ if (true_regnum (dest) > true_regnum (src))
+ {
+ *l = 2;
+ return (AS2 (mov,%B0,%B1) CR_TAB
+ AS2 (mov,%A0,%A1));
+ }
+ else
+ {
+ *l = 2;
+ return (AS2 (mov,%A0,%A1) CR_TAB
+ AS2 (mov,%B0,%B1));
+ }
+ }
+ else if (CONSTANT_P (src))
+ {
+ if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
+ {
+ *l = 2;
+ return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
+ AS2 (ldi,%B0,hi8(%1)));
+ }
+
+ if (GET_CODE (src) == CONST_INT)
+ {
+ if (src == const0_rtx) /* mov r,L */
+ {
+ *l = 2;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0));
+ }
+ else if (src == const1_rtx)
+ {
+ *l = 3;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (inc,%A0));
+ }
+ else if (src == constm1_rtx)
+ {
+ /* Immediate constants -1 to any register */
+ *l = 3;
+ return (AS1 (clr,%0) CR_TAB
+ AS1 (dec,%A0) CR_TAB
+ AS2 (mov,%B0,%A0));
+ }
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ *l = 4;
+ if (!real_l)
+ output_asm_insn ((AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ "set"), operands);
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
+
+ if ((INTVAL (src) & 0xff) == 0)
+ {
+ *l = 5;
+ return (AS2 (mov,__tmp_reg__,r31) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS2 (ldi,r31,hi8(%1)) CR_TAB
+ AS2 (mov,%B0,r31) CR_TAB
+ AS2 (mov,r31,__tmp_reg__));
+ }
+ else if ((INTVAL (src) & 0xff00) == 0)
+ {
+ *l = 5;
+ return (AS2 (mov,__tmp_reg__,r31) CR_TAB
+ AS2 (ldi,r31,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,r31) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS2 (mov,r31,__tmp_reg__));
+ }
+ }
+
+ /* Last resort, equal to loading from memory. */
+ *l = 6;
+ return (AS2 (mov,__tmp_reg__,r31) CR_TAB
+ AS2 (ldi,r31,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,r31) CR_TAB
+ AS2 (ldi,r31,hi8(%1)) CR_TAB
+ AS2 (mov,%B0,r31) CR_TAB
+ AS2 (mov,r31,__tmp_reg__));
+ }
+ else if (GET_CODE (src) == MEM)
+ return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
+ }
+ else if (GET_CODE (dest) == MEM)
+ {
+ const char *template;
+
+ if (src == const0_rtx)
+ operands[1] = zero_reg_rtx;
+
+ template = out_movhi_mr_r (insn, operands, real_l);
+
+ if (!real_l)
+ output_asm_insn (template, operands);
+
+ operands[1] = src;
+ return "";
+ }
+ fatal_insn ("invalid insn:", insn);
+ return "";
+}
+
+const char *
+out_movqi_r_mr (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx x = XEXP (src, 0);
+ int dummy;
+
+ if (!l)
+ l = &dummy;
+
+ if (CONSTANT_ADDRESS_P (x))
+ {
+ if (avr_io_address_p (x, 1))
+ {
+ *l = 1;
+ return AS2 (in,%0,%1-0x20);
+ }
+ *l = 2;
+ return AS2 (lds,%0,%1);
+ }
+ /* memory access by reg+disp */
+ else if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x,0))
+ && GET_CODE (XEXP (x,1)) == CONST_INT)
+ {
+ if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
+ {
+ int disp = INTVAL (XEXP (x,1));
+ if (REGNO (XEXP (x,0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+ return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
+ AS2 (ldd,%0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-63));
+
+ return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%0,Y) CR_TAB
+ AS2 (subi,r28,lo8(%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(%o1)));
+ }
+ else if (REGNO (XEXP (x,0)) == REG_X)
+ {
+ /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
+ it but I have this situation with extremal optimizing options. */
+ if (reg_overlap_mentioned_p (dest, XEXP (x,0))
+ || reg_unused_after (insn, XEXP (x,0)))
+ return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%0,X));
+
+ return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%0,X) CR_TAB
+ AS2 (sbiw,r26,%o1));
+ }
+ *l = 1;
+ return AS2 (ldd,%0,%1);
+ }
+ *l = 1;
+ return AS2 (ld,%0,%1);
+}
+
+const char *
+out_movhi_r_mr (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx base = XEXP (src, 0);
+ int reg_dest = true_regnum (dest);
+ int reg_base = true_regnum (base);
+ /* "volatile" forces reading low byte first, even if less efficient,
+ for correct operation with 16-bit I/O registers. */
+ int mem_volatile_p = MEM_VOLATILE_P (src);
+ int tmp;
+
+ if (!l)
+ l = &tmp;
+
+ if (reg_base > 0)
+ {
+ if (reg_dest == reg_base) /* R = (R) */
+ {
+ *l = 3;
+ return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
+ AS2 (ld,%B0,%1) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+ }
+ else if (reg_base == REG_X) /* (R26) */
+ {
+ if (reg_unused_after (insn, base))
+ {
+ *l = 2;
+ return (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X));
+ }
+ *l = 3;
+ return (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,1));
+ }
+ else /* (R) */
+ {
+ *l = 2;
+ return (AS2 (ld,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1));
+ }
+ }
+ else if (GET_CODE (base) == PLUS) /* (R + i) */
+ {
+ int disp = INTVAL (XEXP (base, 1));
+ int reg_base = true_regnum (XEXP (base, 0));
+
+ if (disp > MAX_LD_OFFSET (GET_MODE (src)))
+ {
+ if (REGNO (XEXP (base, 0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+ return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
+ AS2 (ldd,%A0,Y+62) CR_TAB
+ AS2 (ldd,%B0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-62));
+
+ return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%A0,Y) CR_TAB
+ AS2 (ldd,%B0,Y+1) CR_TAB
+ AS2 (subi,r28,lo8(%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(%o1)));
+ }
+ if (reg_base == REG_X)
+ {
+ /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
+ it but I have this situation with extremal
+ optimization options. */
+
+ *l = 4;
+ if (reg_base == reg_dest)
+ return (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,__tmp_reg__,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+
+ return (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,%o1+1));
+ }
+
+ if (reg_base == reg_dest)
+ {
+ *l = 3;
+ return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+ }
+
+ *l = 2;
+ return (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1));
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ {
+ if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
+ fatal_insn ("incorrect insn:", insn);
+
+ if (mem_volatile_p)
+ {
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+ return (AS2 (sbiw,r26,2) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X) CR_TAB
+ AS2 (sbiw,r26,1));
+ }
+ else
+ {
+ *l = 3;
+ return (AS2 (sbiw,%r1,2) CR_TAB
+ AS2 (ld,%A0,%p1) CR_TAB
+ AS2 (ldd,%B0,%p1+1));
+ }
+ }
+
+ *l = 2;
+ return (AS2 (ld,%B0,%1) CR_TAB
+ AS2 (ld,%A0,%1));
+ }
+ else if (GET_CODE (base) == POST_INC) /* (R++) */
+ {
+ if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
+ fatal_insn ("incorrect insn:", insn);
+
+ *l = 2;
+ return (AS2 (ld,%A0,%1) CR_TAB
+ AS2 (ld,%B0,%1));
+ }
+ else if (CONSTANT_ADDRESS_P (base))
+ {
+ if (avr_io_address_p (base, 2))
+ {
+ *l = 2;
+ return (AS2 (in,%A0,%A1-0x20) CR_TAB
+ AS2 (in,%B0,%B1-0x20));
+ }
+ *l = 4;
+ return (AS2 (lds,%A0,%A1) CR_TAB
+ AS2 (lds,%B0,%B1));
+ }
+
+ fatal_insn ("unknown move insn:",insn);
+ return "";
+}
+
+const char *
+out_movsi_r_mr (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx base = XEXP (src, 0);
+ int reg_dest = true_regnum (dest);
+ int reg_base = true_regnum (base);
+ int tmp;
+
+ if (!l)
+ l = &tmp;
+
+ if (reg_base > 0)
+ {
+ if (reg_base == REG_X) /* (R26) */
+ {
+ if (reg_dest == REG_X)
+ /* "ld r26,-X" is undefined */
+ return *l=7, (AS2 (adiw,r26,3) CR_TAB
+ AS2 (ld,r29,X) CR_TAB
+ AS2 (ld,r28,-X) CR_TAB
+ AS2 (ld,__tmp_reg__,-X) CR_TAB
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (ld,r26,X) CR_TAB
+ AS2 (mov,r27,__tmp_reg__));
+ else if (reg_dest == REG_X - 2)
+ return *l=5, (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,__tmp_reg__,X+) CR_TAB
+ AS2 (ld,%D0,X) CR_TAB
+ AS2 (mov,%C0,__tmp_reg__));
+ else if (reg_unused_after (insn, base))
+ return *l=4, (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,%C0,X+) CR_TAB
+ AS2 (ld,%D0,X));
+ else
+ return *l=5, (AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,%C0,X+) CR_TAB
+ AS2 (ld,%D0,X) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+ else
+ {
+ if (reg_dest == reg_base)
+ return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
+ AS2 (ldd,%C0,%1+2) CR_TAB
+ AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
+ AS2 (ld,%A0,%1) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
+ else if (reg_base == reg_dest + 2)
+ return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
+ AS2 (ldd,%D0,%1+3) CR_TAB
+ AS2 (mov,%C0,__tmp_reg__));
+ else
+ return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
+ AS2 (ldd,%B0,%1+1) CR_TAB
+ AS2 (ldd,%C0,%1+2) CR_TAB
+ AS2 (ldd,%D0,%1+3));
+ }
+ }
+ else if (GET_CODE (base) == PLUS) /* (R + i) */
+ {
+ int disp = INTVAL (XEXP (base, 1));
+
+ if (disp > MAX_LD_OFFSET (GET_MODE (src)))
+ {
+ if (REGNO (XEXP (base, 0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
+ return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
+ AS2 (ldd,%A0,Y+60) CR_TAB
+ AS2 (ldd,%B0,Y+61) CR_TAB
+ AS2 (ldd,%C0,Y+62) CR_TAB
+ AS2 (ldd,%D0,Y+63) CR_TAB
+ AS2 (sbiw,r28,%o1-60));
+
+ return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o1)) CR_TAB
+ AS2 (ld,%A0,Y) CR_TAB
+ AS2 (ldd,%B0,Y+1) CR_TAB
+ AS2 (ldd,%C0,Y+2) CR_TAB
+ AS2 (ldd,%D0,Y+3) CR_TAB
+ AS2 (subi,r28,lo8(%o1)) CR_TAB
+ AS2 (sbci,r29,hi8(%o1)));
+ }
+
+ reg_base = true_regnum (XEXP (base, 0));
+ if (reg_base == REG_X)
+ {
+ /* R = (X + d) */
+ if (reg_dest == REG_X)
+ {
+ *l = 7;
+ /* "ld r26,-X" is undefined */
+ return (AS2 (adiw,r26,%o1+3) CR_TAB
+ AS2 (ld,r29,X) CR_TAB
+ AS2 (ld,r28,-X) CR_TAB
+ AS2 (ld,__tmp_reg__,-X) CR_TAB
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (ld,r26,X) CR_TAB
+ AS2 (mov,r27,__tmp_reg__));
+ }
+ *l = 6;
+ if (reg_dest == REG_X - 2)
+ return (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,r24,X+) CR_TAB
+ AS2 (ld,r25,X+) CR_TAB
+ AS2 (ld,__tmp_reg__,X+) CR_TAB
+ AS2 (ld,r27,X) CR_TAB
+ AS2 (mov,r26,__tmp_reg__));
+
+ return (AS2 (adiw,r26,%o1) CR_TAB
+ AS2 (ld,%A0,X+) CR_TAB
+ AS2 (ld,%B0,X+) CR_TAB
+ AS2 (ld,%C0,X+) CR_TAB
+ AS2 (ld,%D0,X) CR_TAB
+ AS2 (sbiw,r26,%o1+3));
+ }
+ if (reg_dest == reg_base)
+ return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
+ AS2 (ldd,%C0,%C1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%B1) CR_TAB
+ AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
+ else if (reg_dest == reg_base - 2)
+ return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (ldd,__tmp_reg__,%C1) CR_TAB
+ AS2 (ldd,%D0,%D1) CR_TAB
+ AS2 (mov,%C0,__tmp_reg__));
+ return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
+ AS2 (ldd,%B0,%B1) CR_TAB
+ AS2 (ldd,%C0,%C1) CR_TAB
+ AS2 (ldd,%D0,%D1));
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ return *l=4, (AS2 (ld,%D0,%1) CR_TAB
+ AS2 (ld,%C0,%1) CR_TAB
+ AS2 (ld,%B0,%1) CR_TAB
+ AS2 (ld,%A0,%1));
+ else if (GET_CODE (base) == POST_INC) /* (R++) */
+ return *l=4, (AS2 (ld,%A0,%1) CR_TAB
+ AS2 (ld,%B0,%1) CR_TAB
+ AS2 (ld,%C0,%1) CR_TAB
+ AS2 (ld,%D0,%1));
+ else if (CONSTANT_ADDRESS_P (base))
+ return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
+ AS2 (lds,%B0,%B1) CR_TAB
+ AS2 (lds,%C0,%C1) CR_TAB
+ AS2 (lds,%D0,%D1));
+
+ fatal_insn ("unknown move insn:",insn);
+ return "";
+}
+
+const char *
+out_movsi_mr_r (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx base = XEXP (dest, 0);
+ int reg_base = true_regnum (base);
+ int reg_src = true_regnum (src);
+ int tmp;
+
+ if (!l)
+ l = &tmp;
+
+ if (CONSTANT_ADDRESS_P (base))
+ return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
+ AS2 (sts,%B0,%B1) CR_TAB
+ AS2 (sts,%C0,%C1) CR_TAB
+ AS2 (sts,%D0,%D1));
+ if (reg_base > 0) /* (r) */
+ {
+ if (reg_base == REG_X) /* (R26) */
+ {
+ if (reg_src == REG_X)
+ {
+ /* "st X+,r26" is undefined */
+ if (reg_unused_after (insn, base))
+ return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X+,r28) CR_TAB
+ AS2 (st,X,r29));
+ else
+ return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X+,r28) CR_TAB
+ AS2 (st,X,r29) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+ else if (reg_base == reg_src + 2)
+ {
+ if (reg_unused_after (insn, base))
+ return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
+ AS2 (mov,__tmp_reg__,%D1) CR_TAB
+ AS2 (st,%0+,%A1) CR_TAB
+ AS2 (st,%0+,%B1) CR_TAB
+ AS2 (st,%0+,__zero_reg__) CR_TAB
+ AS2 (st,%0,__tmp_reg__) CR_TAB
+ AS1 (clr,__zero_reg__));
+ else
+ return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
+ AS2 (mov,__tmp_reg__,%D1) CR_TAB
+ AS2 (st,%0+,%A1) CR_TAB
+ AS2 (st,%0+,%B1) CR_TAB
+ AS2 (st,%0+,__zero_reg__) CR_TAB
+ AS2 (st,%0,__tmp_reg__) CR_TAB
+ AS1 (clr,__zero_reg__) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+ return *l=5, (AS2 (st,%0+,%A1) CR_TAB
+ AS2 (st,%0+,%B1) CR_TAB
+ AS2 (st,%0+,%C1) CR_TAB
+ AS2 (st,%0,%D1) CR_TAB
+ AS2 (sbiw,r26,3));
+ }
+ else
+ return *l=4, (AS2 (st,%0,%A1) CR_TAB
+ AS2 (std,%0+1,%B1) CR_TAB
+ AS2 (std,%0+2,%C1) CR_TAB
+ AS2 (std,%0+3,%D1));
+ }
+ else if (GET_CODE (base) == PLUS) /* (R + i) */
+ {
+ int disp = INTVAL (XEXP (base, 1));
+ reg_base = REGNO (XEXP (base, 0));
+ if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
+ {
+ if (reg_base != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+ return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
+ AS2 (std,Y+60,%A1) CR_TAB
+ AS2 (std,Y+61,%B1) CR_TAB
+ AS2 (std,Y+62,%C1) CR_TAB
+ AS2 (std,Y+63,%D1) CR_TAB
+ AS2 (sbiw,r28,%o0-60));
+
+ return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
+ AS2 (std,Y+1,%B1) CR_TAB
+ AS2 (std,Y+2,%C1) CR_TAB
+ AS2 (std,Y+3,%D1) CR_TAB
+ AS2 (subi,r28,lo8(%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(%o0)));
+ }
+ if (reg_base == REG_X)
+ {
+ /* (X + d) = R */
+ if (reg_src == REG_X)
+ {
+ *l = 9;
+ return (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X+,__zero_reg__) CR_TAB
+ AS2 (st,X+,r28) CR_TAB
+ AS2 (st,X,r29) CR_TAB
+ AS1 (clr,__zero_reg__) CR_TAB
+ AS2 (sbiw,r26,%o0+3));
+ }
+ else if (reg_src == REG_X - 2)
+ {
+ *l = 9;
+ return (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,r24) CR_TAB
+ AS2 (st,X+,r25) CR_TAB
+ AS2 (st,X+,__tmp_reg__) CR_TAB
+ AS2 (st,X,__zero_reg__) CR_TAB
+ AS1 (clr,__zero_reg__) CR_TAB
+ AS2 (sbiw,r26,%o0+3));
+ }
+ *l = 6;
+ return (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X+,%A1) CR_TAB
+ AS2 (st,X+,%B1) CR_TAB
+ AS2 (st,X+,%C1) CR_TAB
+ AS2 (st,X,%D1) CR_TAB
+ AS2 (sbiw,r26,%o0+3));
+ }
+ return *l=4, (AS2 (std,%A0,%A1) CR_TAB
+ AS2 (std,%B0,%B1) CR_TAB
+ AS2 (std,%C0,%C1) CR_TAB
+ AS2 (std,%D0,%D1));
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ return *l=4, (AS2 (st,%0,%D1) CR_TAB
+ AS2 (st,%0,%C1) CR_TAB
+ AS2 (st,%0,%B1) CR_TAB
+ AS2 (st,%0,%A1));
+ else if (GET_CODE (base) == POST_INC) /* (R++) */
+ return *l=4, (AS2 (st,%0,%A1) CR_TAB
+ AS2 (st,%0,%B1) CR_TAB
+ AS2 (st,%0,%C1) CR_TAB
+ AS2 (st,%0,%D1));
+ fatal_insn ("unknown move insn:",insn);
+ return "";
+}
+
+const char *
+output_movsisf(rtx insn, rtx operands[], int *l)
+{
+ int dummy;
+ rtx dest = operands[0];
+ rtx src = operands[1];
+ int *real_l = l;
+
+ if (!l)
+ l = &dummy;
+
+ if (register_operand (dest, VOIDmode))
+ {
+ if (register_operand (src, VOIDmode)) /* mov r,r */
+ {
+ if (true_regnum (dest) > true_regnum (src))
+ {
+ if (AVR_HAVE_MOVW)
+ {
+ *l = 2;
+ return (AS2 (movw,%C0,%C1) CR_TAB
+ AS2 (movw,%A0,%A1));
+ }
+ *l = 4;
+ return (AS2 (mov,%D0,%D1) CR_TAB
+ AS2 (mov,%C0,%C1) CR_TAB
+ AS2 (mov,%B0,%B1) CR_TAB
+ AS2 (mov,%A0,%A1));
+ }
+ else
+ {
+ if (AVR_HAVE_MOVW)
+ {
+ *l = 2;
+ return (AS2 (movw,%A0,%A1) CR_TAB
+ AS2 (movw,%C0,%C1));
+ }
+ *l = 4;
+ return (AS2 (mov,%A0,%A1) CR_TAB
+ AS2 (mov,%B0,%B1) CR_TAB
+ AS2 (mov,%C0,%C1) CR_TAB
+ AS2 (mov,%D0,%D1));
+ }
+ }
+ else if (CONSTANT_P (src))
+ {
+ if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
+ {
+ *l = 4;
+ return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
+ AS2 (ldi,%B0,hi8(%1)) CR_TAB
+ AS2 (ldi,%C0,hlo8(%1)) CR_TAB
+ AS2 (ldi,%D0,hhi8(%1)));
+ }
+
+ if (GET_CODE (src) == CONST_INT)
+ {
+ const char *const clr_op0 =
+ AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS2 (movw,%C0,%A0))
+ : (AS1 (clr,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+
+ if (src == const0_rtx) /* mov r,L */
+ {
+ *l = AVR_HAVE_MOVW ? 3 : 4;
+ return clr_op0;
+ }
+ else if (src == const1_rtx)
+ {
+ if (!real_l)
+ output_asm_insn (clr_op0, operands);
+ *l = AVR_HAVE_MOVW ? 4 : 5;
+ return AS1 (inc,%A0);
+ }
+ else if (src == constm1_rtx)
+ {
+ /* Immediate constants -1 to any register */
+ if (AVR_HAVE_MOVW)
+ {
+ *l = 4;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (dec,%A0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (movw,%C0,%A0));
+ }
+ *l = 5;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (dec,%A0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (mov,%C0,%A0) CR_TAB
+ AS2 (mov,%D0,%A0));
+ }
+ else
+ {
+ int bit_nr = exact_log2 (INTVAL (src));
+
+ if (bit_nr >= 0)
+ {
+ *l = AVR_HAVE_MOVW ? 5 : 6;
+ if (!real_l)
+ {
+ output_asm_insn (clr_op0, operands);
+ output_asm_insn ("set", operands);
+ }
+ if (!real_l)
+ avr_output_bld (operands, bit_nr);
+
+ return "";
+ }
+ }
+ }
+
+ /* Last resort, better than loading from memory. */
+ *l = 10;
+ return (AS2 (mov,__tmp_reg__,r31) CR_TAB
+ AS2 (ldi,r31,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,r31) CR_TAB
+ AS2 (ldi,r31,hi8(%1)) CR_TAB
+ AS2 (mov,%B0,r31) CR_TAB
+ AS2 (ldi,r31,hlo8(%1)) CR_TAB
+ AS2 (mov,%C0,r31) CR_TAB
+ AS2 (ldi,r31,hhi8(%1)) CR_TAB
+ AS2 (mov,%D0,r31) CR_TAB
+ AS2 (mov,r31,__tmp_reg__));
+ }
+ else if (GET_CODE (src) == MEM)
+ return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
+ }
+ else if (GET_CODE (dest) == MEM)
+ {
+ const char *template;
+
+ if (src == const0_rtx)
+ operands[1] = zero_reg_rtx;
+
+ template = out_movsi_mr_r (insn, operands, real_l);
+
+ if (!real_l)
+ output_asm_insn (template, operands);
+
+ operands[1] = src;
+ return "";
+ }
+ fatal_insn ("invalid insn:", insn);
+ return "";
+}
+
+const char *
+out_movqi_mr_r (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx x = XEXP (dest, 0);
+ int dummy;
+
+ if (!l)
+ l = &dummy;
+
+ if (CONSTANT_ADDRESS_P (x))
+ {
+ if (avr_io_address_p (x, 1))
+ {
+ *l = 1;
+ return AS2 (out,%0-0x20,%1);
+ }
+ *l = 2;
+ return AS2 (sts,%0,%1);
+ }
+ /* memory access by reg+disp */
+ else if (GET_CODE (x) == PLUS
+ && REG_P (XEXP (x,0))
+ && GET_CODE (XEXP (x,1)) == CONST_INT)
+ {
+ if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
+ {
+ int disp = INTVAL (XEXP (x,1));
+ if (REGNO (XEXP (x,0)) != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+ return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
+ AS2 (std,Y+63,%1) CR_TAB
+ AS2 (sbiw,r28,%o0-63));
+
+ return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (st,Y,%1) CR_TAB
+ AS2 (subi,r28,lo8(%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(%o0)));
+ }
+ else if (REGNO (XEXP (x,0)) == REG_X)
+ {
+ if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
+ {
+ if (reg_unused_after (insn, XEXP (x,0)))
+ return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,__tmp_reg__));
+
+ return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
+ AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,__tmp_reg__) CR_TAB
+ AS2 (sbiw,r26,%o0));
+ }
+ else
+ {
+ if (reg_unused_after (insn, XEXP (x,0)))
+ return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,%1));
+
+ return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
+ AS2 (st,X,%1) CR_TAB
+ AS2 (sbiw,r26,%o0));
+ }
+ }
+ *l = 1;
+ return AS2 (std,%0,%1);
+ }
+ *l = 1;
+ return AS2 (st,%0,%1);
+}
+
+const char *
+out_movhi_mr_r (rtx insn, rtx op[], int *l)
+{
+ rtx dest = op[0];
+ rtx src = op[1];
+ rtx base = XEXP (dest, 0);
+ int reg_base = true_regnum (base);
+ int reg_src = true_regnum (src);
+ /* "volatile" forces writing high byte first, even if less efficient,
+ for correct operation with 16-bit I/O registers. */
+ int mem_volatile_p = MEM_VOLATILE_P (dest);
+ int tmp;
+
+ if (!l)
+ l = &tmp;
+ if (CONSTANT_ADDRESS_P (base))
+ {
+ if (avr_io_address_p (base, 2))
+ {
+ *l = 2;
+ return (AS2 (out,%B0-0x20,%B1) CR_TAB
+ AS2 (out,%A0-0x20,%A1));
+ }
+ return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
+ AS2 (sts,%A0,%A1));
+ }
+ if (reg_base > 0)
+ {
+ if (reg_base == REG_X)
+ {
+ if (reg_src == REG_X)
+ {
+ /* "st X+,r26" and "st -X,r26" are undefined. */
+ if (!mem_volatile_p && reg_unused_after (insn, src))
+ return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (st,X,r26) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,__tmp_reg__));
+ else
+ return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
+ AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,__tmp_reg__) CR_TAB
+ AS2 (sbiw,r26,1) CR_TAB
+ AS2 (st,X,r26));
+ }
+ else
+ {
+ if (!mem_volatile_p && reg_unused_after (insn, base))
+ return *l=2, (AS2 (st,X+,%A1) CR_TAB
+ AS2 (st,X,%B1));
+ else
+ return *l=3, (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1));
+ }
+ }
+ else
+ return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
+ AS2 (st,%0,%A1));
+ }
+ else if (GET_CODE (base) == PLUS)
+ {
+ int disp = INTVAL (XEXP (base, 1));
+ reg_base = REGNO (XEXP (base, 0));
+ if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
+ {
+ if (reg_base != REG_Y)
+ fatal_insn ("incorrect insn:",insn);
+
+ if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
+ return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
+ AS2 (std,Y+63,%B1) CR_TAB
+ AS2 (std,Y+62,%A1) CR_TAB
+ AS2 (sbiw,r28,%o0-62));
+
+ return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(-%o0)) CR_TAB
+ AS2 (std,Y+1,%B1) CR_TAB
+ AS2 (st,Y,%A1) CR_TAB
+ AS2 (subi,r28,lo8(%o0)) CR_TAB
+ AS2 (sbci,r29,hi8(%o0)));
+ }
+ if (reg_base == REG_X)
+ {
+ /* (X + d) = R */
+ if (reg_src == REG_X)
+ {
+ *l = 7;
+ return (AS2 (mov,__tmp_reg__,r26) CR_TAB
+ AS2 (mov,__zero_reg__,r27) CR_TAB
+ AS2 (adiw,r26,%o0+1) CR_TAB
+ AS2 (st,X,__zero_reg__) CR_TAB
+ AS2 (st,-X,__tmp_reg__) CR_TAB
+ AS1 (clr,__zero_reg__) CR_TAB
+ AS2 (sbiw,r26,%o0));
+ }
+ *l = 4;
+ return (AS2 (adiw,r26,%o0+1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (sbiw,r26,%o0));
+ }
+ return *l=2, (AS2 (std,%B0,%B1) CR_TAB
+ AS2 (std,%A0,%A1));
+ }
+ else if (GET_CODE (base) == PRE_DEC) /* (--R) */
+ return *l=2, (AS2 (st,%0,%B1) CR_TAB
+ AS2 (st,%0,%A1));
+ else if (GET_CODE (base) == POST_INC) /* (R++) */
+ {
+ if (mem_volatile_p)
+ {
+ if (REGNO (XEXP (base, 0)) == REG_X)
+ {
+ *l = 4;
+ return (AS2 (adiw,r26,1) CR_TAB
+ AS2 (st,X,%B1) CR_TAB
+ AS2 (st,-X,%A1) CR_TAB
+ AS2 (adiw,r26,2));
+ }
+ else
+ {
+ *l = 3;
+ return (AS2 (std,%p0+1,%B1) CR_TAB
+ AS2 (st,%p0,%A1) CR_TAB
+ AS2 (adiw,%r0,2));
+ }
+ }
+
+ *l = 2;
+ return (AS2 (st,%0,%A1) CR_TAB
+ AS2 (st,%0,%B1));
+ }
+ fatal_insn ("unknown move insn:",insn);
+ return "";
+}
+
+/* Return 1 if frame pointer for current function required. */
+
+int
+frame_pointer_required_p (void)
+{
+ return (current_function_calls_alloca
+ || current_function_args_info.nregs == 0
+ || get_frame_size () > 0);
+}
+
+/* Returns the condition of compare insn INSN, or UNKNOWN. */
+
+static RTX_CODE
+compare_condition (rtx insn)
+{
+ rtx next = next_real_insn (insn);
+ RTX_CODE cond = UNKNOWN;
+ if (next && GET_CODE (next) == JUMP_INSN)
+ {
+ rtx pat = PATTERN (next);
+ rtx src = SET_SRC (pat);
+ rtx t = XEXP (src, 0);
+ cond = GET_CODE (t);
+ }
+ return cond;
+}
+
+/* Returns nonzero if INSN is a tst insn that only tests the sign. */
+
+static int
+compare_sign_p (rtx insn)
+{
+ RTX_CODE cond = compare_condition (insn);
+ return (cond == GE || cond == LT);
+}
+
+/* Returns nonzero if the next insn is a JUMP_INSN with a condition
+ that needs to be swapped (GT, GTU, LE, LEU). */
+
+int
+compare_diff_p (rtx insn)
+{
+ RTX_CODE cond = compare_condition (insn);
+ return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
+}
+
+/* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
+
+int
+compare_eq_p (rtx insn)
+{
+ RTX_CODE cond = compare_condition (insn);
+ return (cond == EQ || cond == NE);
+}
+
+
+/* Output test instruction for HImode. */
+
+const char *
+out_tsthi (rtx insn, int *l)
+{
+ if (compare_sign_p (insn))
+ {
+ if (l) *l = 1;
+ return AS1 (tst,%B0);
+ }
+ if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
+ && compare_eq_p (insn))
+ {
+ /* Faster than sbiw if we can clobber the operand. */
+ if (l) *l = 1;
+ return AS2 (or,%A0,%B0);
+ }
+ if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
+ {
+ if (l) *l = 1;
+ return AS2 (sbiw,%0,0);
+ }
+ if (l) *l = 2;
+ return (AS2 (cp,%A0,__zero_reg__) CR_TAB
+ AS2 (cpc,%B0,__zero_reg__));
+}
+
+
+/* Output test instruction for SImode. */
+
+const char *
+out_tstsi (rtx insn, int *l)
+{
+ if (compare_sign_p (insn))
+ {
+ if (l) *l = 1;
+ return AS1 (tst,%D0);
+ }
+ if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
+ {
+ if (l) *l = 3;
+ return (AS2 (sbiw,%A0,0) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+ }
+ if (l) *l = 4;
+ return (AS2 (cp,%A0,__zero_reg__) CR_TAB
+ AS2 (cpc,%B0,__zero_reg__) CR_TAB
+ AS2 (cpc,%C0,__zero_reg__) CR_TAB
+ AS2 (cpc,%D0,__zero_reg__));
+}
+
+
+/* Generate asm equivalent for various shifts.
+ Shift count is a CONST_INT, MEM or REG.
+ This only handles cases that are not already
+ carefully hand-optimized in ?sh??i3_out. */
+
+void
+out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
+ int *len, int t_len)
+{
+ rtx op[10];
+ char str[500];
+ int second_label = 1;
+ int saved_in_tmp = 0;
+ int use_zero_reg = 0;
+
+ op[0] = operands[0];
+ op[1] = operands[1];
+ op[2] = operands[2];
+ op[3] = operands[3];
+ str[0] = 0;
+
+ if (len)
+ *len = 1;
+
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int count = INTVAL (operands[2]);
+ int max_len = 10; /* If larger than this, always use a loop. */
+
+ if (count <= 0)
+ {
+ if (len)
+ *len = 0;
+ return;
+ }
+
+ if (count < 8 && !scratch)
+ use_zero_reg = 1;
+
+ if (optimize_size)
+ max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
+
+ if (t_len * count <= max_len)
+ {
+ /* Output shifts inline with no loop - faster. */
+ if (len)
+ *len = t_len * count;
+ else
+ {
+ while (count-- > 0)
+ output_asm_insn (template, op);
+ }
+
+ return;
+ }
+
+ if (scratch)
+ {
+ if (!len)
+ strcat (str, AS2 (ldi,%3,%2));
+ }
+ else if (use_zero_reg)
+ {
+ /* Hack to save one word: use __zero_reg__ as loop counter.
+ Set one bit, then shift in a loop until it is 0 again. */
+
+ op[3] = zero_reg_rtx;
+ if (len)
+ *len = 2;
+ else
+ strcat (str, ("set" CR_TAB
+ AS2 (bld,%3,%2-1)));
+ }
+ else
+ {
+ /* No scratch register available, use one from LD_REGS (saved in
+ __tmp_reg__) that doesn't overlap with registers to shift. */
+
+ op[3] = gen_rtx_REG (QImode,
+ ((true_regnum (operands[0]) - 1) & 15) + 16);
+ op[4] = tmp_reg_rtx;
+ saved_in_tmp = 1;
+
+ if (len)
+ *len = 3; /* Includes "mov %3,%4" after the loop. */
+ else
+ strcat (str, (AS2 (mov,%4,%3) CR_TAB
+ AS2 (ldi,%3,%2)));
+ }
+
+ second_label = 0;
+ }
+ else if (GET_CODE (operands[2]) == MEM)
+ {
+ rtx op_mov[10];
+
+ op[3] = op_mov[0] = tmp_reg_rtx;
+ op_mov[1] = op[2];
+
+ if (len)
+ out_movqi_r_mr (insn, op_mov, len);
+ else
+ output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
+ }
+ else if (register_operand (operands[2], QImode))
+ {
+ if (reg_unused_after (insn, operands[2]))
+ op[3] = op[2];
+ else
+ {
+ op[3] = tmp_reg_rtx;
+ if (!len)
+ strcat (str, (AS2 (mov,%3,%2) CR_TAB));
+ }
+ }
+ else
+ fatal_insn ("bad shift insn:", insn);
+
+ if (second_label)
+ {
+ if (len)
+ ++*len;
+ else
+ strcat (str, AS1 (rjmp,2f));
+ }
+
+ if (len)
+ *len += t_len + 2; /* template + dec + brXX */
+ else
+ {
+ strcat (str, "\n1:\t");
+ strcat (str, template);
+ strcat (str, second_label ? "\n2:\t" : "\n\t");
+ strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
+ strcat (str, CR_TAB);
+ strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
+ if (saved_in_tmp)
+ strcat (str, (CR_TAB AS2 (mov,%3,%4)));
+ output_asm_insn (str, op);
+ }
+}
+
+
+/* 8bit shift left ((char)x << i) */
+
+const char *
+ashlqi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 8)
+ break;
+
+ *len = 1;
+ return AS1 (clr,%0);
+
+ case 1:
+ *len = 1;
+ return AS1 (lsl,%0);
+
+ case 2:
+ *len = 2;
+ return (AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0));
+
+ case 3:
+ *len = 3;
+ return (AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0));
+
+ case 4:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len = 2;
+ return (AS1 (swap,%0) CR_TAB
+ AS2 (andi,%0,0xf0));
+ }
+ *len = 4;
+ return (AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0));
+
+ case 5:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len = 3;
+ return (AS1 (swap,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS2 (andi,%0,0xe0));
+ }
+ *len = 5;
+ return (AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0));
+
+ case 6:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len = 4;
+ return (AS1 (swap,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS2 (andi,%0,0xc0));
+ }
+ *len = 6;
+ return (AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS1 (lsl,%0));
+
+ case 7:
+ *len = 3;
+ return (AS1 (ror,%0) CR_TAB
+ AS1 (clr,%0) CR_TAB
+ AS1 (ror,%0));
+ }
+ }
+ else if (CONSTANT_P (operands[2]))
+ fatal_insn ("internal compiler error. Incorrect shift:", insn);
+
+ out_shift_with_cnt (AS1 (lsl,%0),
+ insn, operands, len, 1);
+ return "";
+}
+
+
+/* 16bit shift left ((short)x << i) */
+
+const char *
+ashlhi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 16)
+ break;
+
+ *len = 2;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+
+ case 4:
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (ldi_ok)
+ {
+ *len = 6;
+ return (AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (andi,%B0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (andi,%A0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ if (scratch)
+ {
+ *len = 7;
+ return (AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ break; /* optimize_size ? 6 : 8 */
+
+ case 5:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ if (ldi_ok)
+ {
+ *len = 8;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (andi,%B0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (andi,%A0,0xf0) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ if (scratch)
+ {
+ *len = 9;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%B0,%A0) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%B0,%A0));
+ }
+ break; /* 10 */
+
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 9;
+ return (AS1 (clr,__tmp_reg__) CR_TAB
+ AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (ror,__tmp_reg__) CR_TAB
+ AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (ror,__tmp_reg__) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,__tmp_reg__));
+
+ case 7:
+ *len = 5;
+ return (AS1 (lsr,%B0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (ror,%A0));
+
+ case 8:
+ if (true_regnum (operands[0]) + 1 == true_regnum (operands[1]))
+ return *len = 1, AS1 (clr,%A0);
+ else
+ return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
+ AS1 (clr,%A0));
+
+ case 9:
+ *len = 3;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0));
+
+ case 10:
+ *len = 4;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
+
+ case 11:
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
+
+ case 12:
+ if (ldi_ok)
+ {
+ *len = 4;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (andi,%B0,0xf0));
+ }
+ if (scratch)
+ {
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS2 (ldi,%3,0xf0) CR_TAB
+ AS2 (and,%B0,%3));
+ }
+ *len = 6;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
+
+ case 13:
+ if (ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (andi,%B0,0xe0));
+ }
+ if (AVR_ENHANCED && scratch)
+ {
+ *len = 5;
+ return (AS2 (ldi,%3,0x20) CR_TAB
+ AS2 (mul,%A0,%3) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (scratch)
+ {
+ *len = 6;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (ldi,%3,0xe0) CR_TAB
+ AS2 (and,%B0,%3));
+ }
+ if (AVR_ENHANCED)
+ {
+ *len = 6;
+ return ("set" CR_TAB
+ AS2 (bld,r1,5) CR_TAB
+ AS2 (mul,%A0,r1) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ *len = 7;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (lsl,%B0));
+
+ case 14:
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%B0,0x40) CR_TAB
+ AS2 (mul,%A0,%B0) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (AVR_ENHANCED && scratch)
+ {
+ *len = 5;
+ return (AS2 (ldi,%3,0x40) CR_TAB
+ AS2 (mul,%A0,%3) CR_TAB
+ AS2 (mov,%B0,r0) CR_TAB
+ AS1 (clr,%A0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (ldi,%A0,6) "\n1:\t"
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (dec,%A0) CR_TAB
+ AS1 (brne,1b));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (clr,%A0));
+
+ case 15:
+ *len = 4;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0)),
+ insn, operands, len, 2);
+ return "";
+}
+
+
+/* 32bit shift left ((long)x << i) */
+
+const char *
+ashlsi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 32)
+ break;
+
+ if (AVR_HAVE_MOVW)
+ return *len = 3, (AS1 (clr,%D0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS2 (movw,%A0,%C0));
+ *len = 4;
+ return (AS1 (clr,%D0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+
+ case 8:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len = 4;
+ if (reg0 >= reg1)
+ return (AS2 (mov,%D0,%C1) CR_TAB
+ AS2 (mov,%C0,%B1) CR_TAB
+ AS2 (mov,%B0,%A1) CR_TAB
+ AS1 (clr,%A0));
+ else if (reg0 + 1 == reg1)
+ {
+ *len = 1;
+ return AS1 (clr,%A0);
+ }
+ else
+ return (AS1 (clr,%A0) CR_TAB
+ AS2 (mov,%B0,%A1) CR_TAB
+ AS2 (mov,%C0,%B1) CR_TAB
+ AS2 (mov,%D0,%C1));
+ }
+
+ case 16:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len = 4;
+ if (AVR_HAVE_MOVW && (reg0 + 2 != reg1))
+ {
+ *len = 3;
+ return (AS2 (movw,%C0,%A1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+ if (reg0 + 1 >= reg1)
+ return (AS2 (mov,%D0,%B1) CR_TAB
+ AS2 (mov,%C0,%A1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ if (reg0 + 2 == reg1)
+ {
+ *len = 2;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+ else
+ return (AS2 (mov,%C0,%A1) CR_TAB
+ AS2 (mov,%D0,%B1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+
+ case 24:
+ *len = 4;
+ if (true_regnum (operands[0]) + 3 != true_regnum (operands[1]))
+ return (AS2 (mov,%D0,%A1) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ else
+ {
+ *len = 3;
+ return (AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+
+ case 31:
+ *len = 6;
+ return (AS1 (clr,%D0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (ror,%D0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,%C0) CR_TAB
+ AS1 (rol,%D0)),
+ insn, operands, len, 4);
+ return "";
+}
+
+/* 8bit arithmetic shift right ((signed char)x >> i) */
+
+const char *
+ashrqi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ case 1:
+ *len = 1;
+ return AS1 (asr,%0);
+
+ case 2:
+ *len = 2;
+ return (AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0));
+
+ case 3:
+ *len = 3;
+ return (AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0));
+
+ case 4:
+ *len = 4;
+ return (AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0));
+
+ case 5:
+ *len = 5;
+ return (AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0) CR_TAB
+ AS1 (asr,%0));
+
+ case 6:
+ *len = 4;
+ return (AS2 (bst,%0,6) CR_TAB
+ AS1 (lsl,%0) CR_TAB
+ AS2 (sbc,%0,%0) CR_TAB
+ AS2 (bld,%0,0));
+
+ default:
+ if (INTVAL (operands[2]) < 8)
+ break;
+
+ /* fall through */
+
+ case 7:
+ *len = 2;
+ return (AS1 (lsl,%0) CR_TAB
+ AS2 (sbc,%0,%0));
+ }
+ }
+ else if (CONSTANT_P (operands[2]))
+ fatal_insn ("internal compiler error. Incorrect shift:", insn);
+
+ out_shift_with_cnt (AS1 (asr,%0),
+ insn, operands, len, 1);
+ return "";
+}
+
+
+/* 16bit arithmetic shift right ((signed short)x >> i) */
+
+const char *
+ashrhi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ case 4:
+ case 5:
+ /* XXX try to optimize this too? */
+ break;
+
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 8;
+ return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,__tmp_reg__) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (lsl,__tmp_reg__) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (rol,%B0));
+
+ case 7:
+ *len = 4;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS2 (sbc,%B0,%B0));
+
+ case 8:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+
+ if (reg0 == reg1)
+ return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0));
+ else if (reg0 == reg1 + 1)
+ return *len = 3, (AS1 (clr,%B0) CR_TAB
+ AS2 (sbrc,%A0,7) CR_TAB
+ AS1 (dec,%B0));
+
+ return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS2 (sbrc,%A0,7) CR_TAB
+ AS1 (dec,%B0));
+ }
+
+ case 9:
+ *len = 4;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0));
+
+ case 10:
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
+
+ case 11:
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%A0,0x20) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
+
+ case 12:
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%A0,0x10) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 7;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
+
+ case 13:
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%A0,0x08) CR_TAB
+ AS2 (muls,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size)
+ break; /* scratch ? 5 : 7 */
+ *len = 8;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0) CR_TAB
+ AS1 (asr,%A0));
+
+ case 14:
+ *len = 5;
+ return (AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%A0,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS1 (rol,%A0));
+
+ default:
+ if (INTVAL (operands[2]) < 16)
+ break;
+
+ /* fall through */
+
+ case 15:
+ return *len = 3, (AS1 (lsl,%B0) CR_TAB
+ AS2 (sbc,%A0,%A0) CR_TAB
+ AS2 (mov,%B0,%A0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 2);
+ return "";
+}
+
+
+/* 32bit arithmetic shift right ((signed long)x >> i) */
+
+const char *
+ashrsi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ case 8:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len=6;
+ if (reg0 <= reg1)
+ return (AS2 (mov,%A0,%B1) CR_TAB
+ AS2 (mov,%B0,%C1) CR_TAB
+ AS2 (mov,%C0,%D1) CR_TAB
+ AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%C0,7) CR_TAB
+ AS1 (dec,%D0));
+ else if (reg0 == reg1 + 1)
+ {
+ *len = 3;
+ return (AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%C0,7) CR_TAB
+ AS1 (dec,%D0));
+ }
+ else
+ return (AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%D1,7) CR_TAB
+ AS1 (dec,%D0) CR_TAB
+ AS2 (mov,%C0,%D1) CR_TAB
+ AS2 (mov,%B0,%C1) CR_TAB
+ AS2 (mov,%A0,%B1));
+ }
+
+ case 16:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len=6;
+ if (AVR_HAVE_MOVW && (reg0 != reg1 + 2))
+ {
+ *len = 5;
+ return (AS2 (movw,%A0,%C1) CR_TAB
+ AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%B0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+ }
+ if (reg0 <= reg1 + 1)
+ return (AS2 (mov,%A0,%C1) CR_TAB
+ AS2 (mov,%B0,%D1) CR_TAB
+ AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%B0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+ else if (reg0 == reg1 + 2)
+ return *len = 4, (AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%B0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+ else
+ return (AS2 (mov,%B0,%D1) CR_TAB
+ AS2 (mov,%A0,%C1) CR_TAB
+ AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%B0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+ }
+
+ case 24:
+ if (true_regnum (operands[0]) != true_regnum (operands[1]) + 3)
+ return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
+ AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%A0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%B0,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+ else
+ return *len = 5, (AS1 (clr,%D0) CR_TAB
+ AS2 (sbrc,%A0,7) CR_TAB
+ AS1 (com,%D0) CR_TAB
+ AS2 (mov,%B0,%D0) CR_TAB
+ AS2 (mov,%C0,%D0));
+
+ default:
+ if (INTVAL (operands[2]) < 32)
+ break;
+
+ /* fall through */
+
+ case 31:
+ if (AVR_HAVE_MOVW)
+ return *len = 4, (AS1 (lsl,%D0) CR_TAB
+ AS2 (sbc,%A0,%A0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (movw,%C0,%A0));
+ else
+ return *len = 5, (AS1 (lsl,%D0) CR_TAB
+ AS2 (sbc,%A0,%A0) CR_TAB
+ AS2 (mov,%B0,%A0) CR_TAB
+ AS2 (mov,%C0,%A0) CR_TAB
+ AS2 (mov,%D0,%A0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
+ AS1 (ror,%C0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 4);
+ return "";
+}
+
+/* 8bit logic shift right ((unsigned char)x >> i) */
+
+const char *
+lshrqi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 8)
+ break;
+
+ *len = 1;
+ return AS1 (clr,%0);
+
+ case 1:
+ *len = 1;
+ return AS1 (lsr,%0);
+
+ case 2:
+ *len = 2;
+ return (AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0));
+ case 3:
+ *len = 3;
+ return (AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0));
+
+ case 4:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len=2;
+ return (AS1 (swap,%0) CR_TAB
+ AS2 (andi,%0,0x0f));
+ }
+ *len = 4;
+ return (AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0));
+
+ case 5:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len = 3;
+ return (AS1 (swap,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS2 (andi,%0,0x7));
+ }
+ *len = 5;
+ return (AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0));
+
+ case 6:
+ if (test_hard_reg_class (LD_REGS, operands[0]))
+ {
+ *len = 4;
+ return (AS1 (swap,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS2 (andi,%0,0x3));
+ }
+ *len = 6;
+ return (AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0) CR_TAB
+ AS1 (lsr,%0));
+
+ case 7:
+ *len = 3;
+ return (AS1 (rol,%0) CR_TAB
+ AS1 (clr,%0) CR_TAB
+ AS1 (rol,%0));
+ }
+ }
+ else if (CONSTANT_P (operands[2]))
+ fatal_insn ("internal compiler error. Incorrect shift:", insn);
+
+ out_shift_with_cnt (AS1 (lsr,%0),
+ insn, operands, len, 1);
+ return "";
+}
+
+/* 16bit logic shift right ((unsigned short)x >> i) */
+
+const char *
+lshrhi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
+ int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 16)
+ break;
+
+ *len = 2;
+ return (AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+
+ case 4:
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (ldi_ok)
+ {
+ *len = 6;
+ return (AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (andi,%A0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (andi,%B0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ if (scratch)
+ {
+ *len = 7;
+ return (AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ break; /* optimize_size ? 6 : 8 */
+
+ case 5:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ if (ldi_ok)
+ {
+ *len = 8;
+ return (AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (andi,%A0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (andi,%B0,0x0f) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ if (scratch)
+ {
+ *len = 9;
+ return (AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0) CR_TAB
+ AS1 (swap,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3) CR_TAB
+ AS2 (eor,%A0,%B0) CR_TAB
+ AS2 (and,%B0,%3) CR_TAB
+ AS2 (eor,%A0,%B0));
+ }
+ break; /* 10 */
+
+ case 6:
+ if (optimize_size)
+ break; /* scratch ? 5 : 6 */
+ *len = 9;
+ return (AS1 (clr,__tmp_reg__) CR_TAB
+ AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,__tmp_reg__) CR_TAB
+ AS1 (lsl,%A0) CR_TAB
+ AS1 (rol,%B0) CR_TAB
+ AS1 (rol,__tmp_reg__) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS2 (mov,%B0,__tmp_reg__));
+
+ case 7:
+ *len = 5;
+ return (AS1 (lsl,%A0) CR_TAB
+ AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS2 (sbc,%B0,%B0) CR_TAB
+ AS1 (neg,%B0));
+
+ case 8:
+ if (true_regnum (operands[0]) != true_regnum (operands[1]) + 1)
+ return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
+ AS1 (clr,%B0));
+ else
+ return *len = 1, AS1 (clr,%B0);
+
+ case 9:
+ *len = 3;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0));
+
+ case 10:
+ *len = 4;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
+
+ case 11:
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
+
+ case 12:
+ if (ldi_ok)
+ {
+ *len = 4;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (andi,%A0,0x0f));
+ }
+ if (scratch)
+ {
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS2 (ldi,%3,0x0f) CR_TAB
+ AS2 (and,%A0,%3));
+ }
+ *len = 6;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
+
+ case 13:
+ if (ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS2 (andi,%A0,0x07));
+ }
+ if (AVR_ENHANCED && scratch)
+ {
+ *len = 5;
+ return (AS2 (ldi,%3,0x08) CR_TAB
+ AS2 (mul,%B0,%3) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ if (scratch)
+ {
+ *len = 6;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (swap,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS2 (ldi,%3,0x07) CR_TAB
+ AS2 (and,%A0,%3));
+ }
+ if (AVR_ENHANCED)
+ {
+ *len = 6;
+ return ("set" CR_TAB
+ AS2 (bld,r1,3) CR_TAB
+ AS2 (mul,%B0,r1) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ *len = 7;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (lsr,%A0));
+
+ case 14:
+ if (AVR_ENHANCED && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (ldi,%A0,0x04) CR_TAB
+ AS2 (mul,%B0,%A0) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (AVR_ENHANCED && scratch)
+ {
+ *len = 5;
+ return (AS2 (ldi,%3,0x04) CR_TAB
+ AS2 (mul,%B0,%3) CR_TAB
+ AS2 (mov,%A0,r1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,__zero_reg__));
+ }
+ if (optimize_size && ldi_ok)
+ {
+ *len = 5;
+ return (AS2 (mov,%A0,%B0) CR_TAB
+ AS2 (ldi,%B0,6) "\n1:\t"
+ AS1 (lsr,%A0) CR_TAB
+ AS1 (dec,%B0) CR_TAB
+ AS1 (brne,1b));
+ }
+ if (optimize_size && scratch)
+ break; /* 5 */
+ *len = 6;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (clr,%B0));
+
+ case 15:
+ *len = 4;
+ return (AS1 (clr,%A0) CR_TAB
+ AS1 (lsl,%B0) CR_TAB
+ AS1 (rol,%A0) CR_TAB
+ AS1 (clr,%B0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 2);
+ return "";
+}
+
+/* 32bit logic shift right ((unsigned int)x >> i) */
+
+const char *
+lshrsi3_out (rtx insn, rtx operands[], int *len)
+{
+ if (GET_CODE (operands[2]) == CONST_INT)
+ {
+ int k;
+ int *t = len;
+
+ if (!len)
+ len = &k;
+
+ switch (INTVAL (operands[2]))
+ {
+ default:
+ if (INTVAL (operands[2]) < 32)
+ break;
+
+ if (AVR_HAVE_MOVW)
+ return *len = 3, (AS1 (clr,%D0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS2 (movw,%A0,%C0));
+ *len = 4;
+ return (AS1 (clr,%D0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%A0));
+
+ case 8:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len = 4;
+ if (reg0 <= reg1)
+ return (AS2 (mov,%A0,%B1) CR_TAB
+ AS2 (mov,%B0,%C1) CR_TAB
+ AS2 (mov,%C0,%D1) CR_TAB
+ AS1 (clr,%D0));
+ else if (reg0 == reg1 + 1)
+ return *len = 1, AS1 (clr,%D0);
+ else
+ return (AS1 (clr,%D0) CR_TAB
+ AS2 (mov,%C0,%D1) CR_TAB
+ AS2 (mov,%B0,%C1) CR_TAB
+ AS2 (mov,%A0,%B1));
+ }
+
+ case 16:
+ {
+ int reg0 = true_regnum (operands[0]);
+ int reg1 = true_regnum (operands[1]);
+ *len = 4;
+ if (AVR_HAVE_MOVW && (reg0 != reg1 + 2))
+ {
+ *len = 3;
+ return (AS2 (movw,%A0,%C1) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ }
+ if (reg0 <= reg1 + 1)
+ return (AS2 (mov,%A0,%C1) CR_TAB
+ AS2 (mov,%B0,%D1) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ else if (reg0 == reg1 + 2)
+ return *len = 2, (AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ else
+ return (AS2 (mov,%B0,%D1) CR_TAB
+ AS2 (mov,%A0,%C1) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ }
+
+ case 24:
+ if (true_regnum (operands[0]) != true_regnum (operands[1]) + 3)
+ return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ else
+ return *len = 3, (AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+
+ case 31:
+ *len = 6;
+ return (AS1 (clr,%A0) CR_TAB
+ AS2 (sbrc,%D0,7) CR_TAB
+ AS1 (inc,%A0) CR_TAB
+ AS1 (clr,%B0) CR_TAB
+ AS1 (clr,%C0) CR_TAB
+ AS1 (clr,%D0));
+ }
+ len = t;
+ }
+ out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
+ AS1 (ror,%C0) CR_TAB
+ AS1 (ror,%B0) CR_TAB
+ AS1 (ror,%A0)),
+ insn, operands, len, 4);
+ return "";
+}
+
+/* Modifies the length assigned to instruction INSN
+ LEN is the initially computed length of the insn. */
+
+int
+adjust_insn_length (rtx insn, int len)
+{
+ rtx patt = PATTERN (insn);
+ rtx set;
+
+ if (GET_CODE (patt) == SET)
+ {
+ rtx op[10];
+ op[1] = SET_SRC (patt);
+ op[0] = SET_DEST (patt);
+ if (general_operand (op[1], VOIDmode)
+ && general_operand (op[0], VOIDmode))
+ {
+ switch (GET_MODE (op[0]))
+ {
+ case QImode:
+ output_movqi (insn, op, &len);
+ break;
+ case HImode:
+ output_movhi (insn, op, &len);
+ break;
+ case SImode:
+ case SFmode:
+ output_movsisf (insn, op, &len);
+ break;
+ default:
+ break;
+ }
+ }
+ else if (op[0] == cc0_rtx && REG_P (op[1]))
+ {
+ switch (GET_MODE (op[1]))
+ {
+ case HImode: out_tsthi (insn,&len); break;
+ case SImode: out_tstsi (insn,&len); break;
+ default: break;
+ }
+ }
+ else if (GET_CODE (op[1]) == AND)
+ {
+ if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
+ {
+ HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
+ if (GET_MODE (op[1]) == SImode)
+ len = (((mask & 0xff) != 0xff)
+ + ((mask & 0xff00) != 0xff00)
+ + ((mask & 0xff0000L) != 0xff0000L)
+ + ((mask & 0xff000000L) != 0xff000000L));
+ else if (GET_MODE (op[1]) == HImode)
+ len = (((mask & 0xff) != 0xff)
+ + ((mask & 0xff00) != 0xff00));
+ }
+ }
+ else if (GET_CODE (op[1]) == IOR)
+ {
+ if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
+ {
+ HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
+ if (GET_MODE (op[1]) == SImode)
+ len = (((mask & 0xff) != 0)
+ + ((mask & 0xff00) != 0)
+ + ((mask & 0xff0000L) != 0)
+ + ((mask & 0xff000000L) != 0));
+ else if (GET_MODE (op[1]) == HImode)
+ len = (((mask & 0xff) != 0)
+ + ((mask & 0xff00) != 0));
+ }
+ }
+ }
+ set = single_set (insn);
+ if (set)
+ {
+ rtx op[10];
+
+ op[1] = SET_SRC (set);
+ op[0] = SET_DEST (set);
+
+ if (GET_CODE (patt) == PARALLEL
+ && general_operand (op[1], VOIDmode)
+ && general_operand (op[0], VOIDmode))
+ {
+ if (XVECLEN (patt, 0) == 2)
+ op[2] = XVECEXP (patt, 0, 1);
+
+ switch (GET_MODE (op[0]))
+ {
+ case QImode:
+ len = 2;
+ break;
+ case HImode:
+ output_reload_inhi (insn, op, &len);
+ break;
+ case SImode:
+ case SFmode:
+ output_reload_insisf (insn, op, &len);
+ break;
+ default:
+ break;
+ }
+ }
+ else if (GET_CODE (op[1]) == ASHIFT
+ || GET_CODE (op[1]) == ASHIFTRT
+ || GET_CODE (op[1]) == LSHIFTRT)
+ {
+ rtx ops[10];
+ ops[0] = op[0];
+ ops[1] = XEXP (op[1],0);
+ ops[2] = XEXP (op[1],1);
+ switch (GET_CODE (op[1]))
+ {
+ case ASHIFT:
+ switch (GET_MODE (op[0]))
+ {
+ case QImode: ashlqi3_out (insn,ops,&len); break;
+ case HImode: ashlhi3_out (insn,ops,&len); break;
+ case SImode: ashlsi3_out (insn,ops,&len); break;
+ default: break;
+ }
+ break;
+ case ASHIFTRT:
+ switch (GET_MODE (op[0]))
+ {
+ case QImode: ashrqi3_out (insn,ops,&len); break;
+ case HImode: ashrhi3_out (insn,ops,&len); break;
+ case SImode: ashrsi3_out (insn,ops,&len); break;
+ default: break;
+ }
+ break;
+ case LSHIFTRT:
+ switch (GET_MODE (op[0]))
+ {
+ case QImode: lshrqi3_out (insn,ops,&len); break;
+ case HImode: lshrhi3_out (insn,ops,&len); break;
+ case SImode: lshrsi3_out (insn,ops,&len); break;
+ default: break;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ return len;
+}
+
+/* Return nonzero if register REG dead after INSN. */
+
+int
+reg_unused_after (rtx insn, rtx reg)
+{
+ return (dead_or_set_p (insn, reg)
+ || (REG_P(reg) && _reg_unused_after (insn, reg)));
+}
+
+/* Return nonzero if REG is not used after INSN.
+ We assume REG is a reload reg, and therefore does
+ not live past labels. It may live past calls or jumps though. */
+
+int
+_reg_unused_after (rtx insn, rtx reg)
+{
+ enum rtx_code code;
+ rtx set;
+
+ /* If the reg is set by this instruction, then it is safe for our
+ case. Disregard the case where this is a store to memory, since
+ we are checking a register used in the store address. */
+ set = single_set (insn);
+ if (set && GET_CODE (SET_DEST (set)) != MEM
+ && reg_overlap_mentioned_p (reg, SET_DEST (set)))
+ return 1;
+
+ while ((insn = NEXT_INSN (insn)))
+ {
+ rtx set;
+ code = GET_CODE (insn);
+
+#if 0
+ /* If this is a label that existed before reload, then the register
+ if dead here. However, if this is a label added by reorg, then
+ the register may still be live here. We can't tell the difference,
+ so we just ignore labels completely. */
+ if (code == CODE_LABEL)
+ return 1;
+ /* else */
+#endif
+
+ if (!INSN_P (insn))
+ continue;
+
+ if (code == JUMP_INSN)
+ return 0;
+
+ /* If this is a sequence, we must handle them all at once.
+ We could have for instance a call that sets the target register,
+ and an insn in a delay slot that uses the register. In this case,
+ we must return 0. */
+ else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ {
+ int i;
+ int retval = 0;
+
+ for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
+ {
+ rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
+ rtx set = single_set (this_insn);
+
+ if (GET_CODE (this_insn) == CALL_INSN)
+ code = CALL_INSN;
+ else if (GET_CODE (this_insn) == JUMP_INSN)
+ {
+ if (INSN_ANNULLED_BRANCH_P (this_insn))
+ return 0;
+ code = JUMP_INSN;
+ }
+
+ if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
+ return 0;
+ if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
+ {
+ if (GET_CODE (SET_DEST (set)) != MEM)
+ retval = 1;
+ else
+ return 0;
+ }
+ if (set == 0
+ && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
+ return 0;
+ }
+ if (retval == 1)
+ return 1;
+ else if (code == JUMP_INSN)
+ return 0;
+ }
+
+ if (code == CALL_INSN)
+ {
+ rtx tem;
+ for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
+ if (GET_CODE (XEXP (tem, 0)) == USE
+ && REG_P (XEXP (XEXP (tem, 0), 0))
+ && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
+ return 0;
+ if (call_used_regs[REGNO (reg)])
+ return 1;
+ }
+
+ set = single_set (insn);
+
+ if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
+ return 0;
+ if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
+ return GET_CODE (SET_DEST (set)) != MEM;
+ if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
+ return 0;
+ }
+ return 1;
+}
+
+/* Target hook for assembling integer objects. The AVR version needs
+ special handling for references to certain labels. */
+
+static bool
+avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
+{
+ if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
+ && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
+ || GET_CODE (x) == LABEL_REF))
+ {
+ fputs ("\t.word\tpm(", asm_out_file);
+ output_addr_const (asm_out_file, x);
+ fputs (")\n", asm_out_file);
+ return true;
+ }
+ return default_assemble_integer (x, size, aligned_p);
+}
+
+/* The routine used to output NUL terminated strings. We use a special
+ version of this for most svr4 targets because doing so makes the
+ generated assembly code more compact (and thus faster to assemble)
+ as well as more readable, especially for targets like the i386
+ (where the only alternative is to output character sequences as
+ comma separated lists of numbers). */
+
+void
+gas_output_limited_string(FILE *file, const char *str)
+{
+ const unsigned char *_limited_str = (unsigned char *) str;
+ unsigned ch;
+ fprintf (file, "%s\"", STRING_ASM_OP);
+ for (; (ch = *_limited_str); _limited_str++)
+ {
+ int escape;
+ switch (escape = ESCAPES[ch])
+ {
+ case 0:
+ putc (ch, file);
+ break;
+ case 1:
+ fprintf (file, "\\%03o", ch);
+ break;
+ default:
+ putc ('\\', file);
+ putc (escape, file);
+ break;
+ }
+ }
+ fprintf (file, "\"\n");
+}
+
+/* The routine used to output sequences of byte values. We use a special
+ version of this for most svr4 targets because doing so makes the
+ generated assembly code more compact (and thus faster to assemble)
+ as well as more readable. Note that if we find subparts of the
+ character sequence which end with NUL (and which are shorter than
+ STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
+
+void
+gas_output_ascii(FILE *file, const char *str, size_t length)
+{
+ const unsigned char *_ascii_bytes = (const unsigned char *) str;
+ const unsigned char *limit = _ascii_bytes + length;
+ unsigned bytes_in_chunk = 0;
+ for (; _ascii_bytes < limit; _ascii_bytes++)
+ {
+ const unsigned char *p;
+ if (bytes_in_chunk >= 60)
+ {
+ fprintf (file, "\"\n");
+ bytes_in_chunk = 0;
+ }
+ for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
+ continue;
+ if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
+ {
+ if (bytes_in_chunk > 0)
+ {
+ fprintf (file, "\"\n");
+ bytes_in_chunk = 0;
+ }
+ gas_output_limited_string (file, (char*)_ascii_bytes);
+ _ascii_bytes = p;
+ }
+ else
+ {
+ int escape;
+ unsigned ch;
+ if (bytes_in_chunk == 0)
+ fprintf (file, "\t.ascii\t\"");
+ switch (escape = ESCAPES[ch = *_ascii_bytes])
+ {
+ case 0:
+ putc (ch, file);
+ bytes_in_chunk++;
+ break;
+ case 1:
+ fprintf (file, "\\%03o", ch);
+ bytes_in_chunk += 4;
+ break;
+ default:
+ putc ('\\', file);
+ putc (escape, file);
+ bytes_in_chunk += 2;
+ break;
+ }
+ }
+ }
+ if (bytes_in_chunk > 0)
+ fprintf (file, "\"\n");
+}
+
+/* Return value is nonzero if pseudos that have been
+ assigned to registers of class CLASS would likely be spilled
+ because registers of CLASS are needed for spill registers. */
+
+enum reg_class
+class_likely_spilled_p (int c)
+{
+ return (c != ALL_REGS && c != ADDW_REGS);
+}
+
+/* Valid attributes:
+ progmem - put data to program memory;
+ signal - make a function to be hardware interrupt. After function
+ prologue interrupts are disabled;
+ interrupt - make a function to be hardware interrupt. After function
+ prologue interrupts are enabled;
+ naked - don't generate function prologue/epilogue and `ret' command.
+
+ Only `progmem' attribute valid for type. */
+
+const struct attribute_spec avr_attribute_table[] =
+{
+ /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
+ { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
+ { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
+ { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
+ { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
+ { NULL, 0, 0, false, false, false, NULL }
+};
+
+/* Handle a "progmem" attribute; arguments as in
+ struct attribute_spec.handler. */
+static tree
+avr_handle_progmem_attribute (tree *node, tree name,
+ tree args ATTRIBUTE_UNUSED,
+ int flags ATTRIBUTE_UNUSED,
+ bool *no_add_attrs)
+{
+ if (DECL_P (*node))
+ {
+ if (TREE_CODE (*node) == TYPE_DECL)
+ {
+ /* This is really a decl attribute, not a type attribute,
+ but try to handle it for GCC 3.0 backwards compatibility. */
+
+ tree type = TREE_TYPE (*node);
+ tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
+ tree newtype = build_type_attribute_variant (type, attr);
+
+ TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
+ TREE_TYPE (*node) = newtype;
+ *no_add_attrs = true;
+ }
+ else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
+ {
+ if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
+ {
+ warning (0, "only initialized variables can be placed into "
+ "program memory area");
+ *no_add_attrs = true;
+ }
+ }
+ else
+ {
+ warning (OPT_Wattributes, "%qs attribute ignored",
+ IDENTIFIER_POINTER (name));
+ *no_add_attrs = true;
+ }
+ }
+
+ return NULL_TREE;
+}
+
+/* Handle an attribute requiring a FUNCTION_DECL; arguments as in
+ struct attribute_spec.handler. */
+
+static tree
+avr_handle_fndecl_attribute (tree *node, tree name,
+ tree args ATTRIBUTE_UNUSED,
+ int flags ATTRIBUTE_UNUSED,
+ bool *no_add_attrs)
+{
+ if (TREE_CODE (*node) != FUNCTION_DECL)
+ {
+ warning (OPT_Wattributes, "%qs attribute only applies to functions",
+ IDENTIFIER_POINTER (name));
+ *no_add_attrs = true;
+ }
+ else
+ {
+ const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
+ const char *attr = IDENTIFIER_POINTER (name);
+
+ /* If the function has the 'signal' or 'interrupt' attribute, test to
+ make sure that the name of the function is "__vector_NN" so as to
+ catch when the user misspells the interrupt vector name. */
+
+ if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
+ {
+ if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
+ {
+ warning (0, "%qs appears to be a misspelled interrupt handler",
+ func_name);
+ }
+ }
+ else if (strncmp (attr, "signal", strlen ("signal")) == 0)
+ {
+ if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
+ {
+ warning (0, "%qs appears to be a misspelled signal handler",
+ func_name);
+ }
+ }
+ }
+
+ return NULL_TREE;
+}
+
+static tree
+avr_handle_fntype_attribute (tree *node, tree name,
+ tree args ATTRIBUTE_UNUSED,
+ int flags ATTRIBUTE_UNUSED,
+ bool *no_add_attrs)
+{
+ if (TREE_CODE (*node) != FUNCTION_TYPE)
+ {
+ warning (OPT_Wattributes, "%qs attribute only applies to functions",
+ IDENTIFIER_POINTER (name));
+ *no_add_attrs = true;
+ }
+
+ return NULL_TREE;
+}
+
+/* Look for attribute `progmem' in DECL
+ if found return 1, otherwise 0. */
+
+int
+avr_progmem_p (tree decl, tree attributes)
+{
+ tree a;
+
+ if (TREE_CODE (decl) != VAR_DECL)
+ return 0;
+
+ if (NULL_TREE
+ != lookup_attribute ("progmem", attributes))
+ return 1;
+
+ a=decl;
+ do
+ a = TREE_TYPE(a);
+ while (TREE_CODE (a) == ARRAY_TYPE);
+
+ if (a == error_mark_node)
+ return 0;
+
+ if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
+ return 1;
+
+ return 0;
+}
+
+/* Add the section attribute if the variable is in progmem. */
+
+static void
+avr_insert_attributes (tree node, tree *attributes)
+{
+ if (TREE_CODE (node) == VAR_DECL
+ && (TREE_STATIC (node) || DECL_EXTERNAL (node))
+ && avr_progmem_p (node, *attributes))
+ {
+ static const char dsec[] = ".progmem.data";
+ *attributes = tree_cons (get_identifier ("section"),
+ build_tree_list (NULL, build_string (strlen (dsec), dsec)),
+ *attributes);
+
+ /* ??? This seems sketchy. Why can't the user declare the
+ thing const in the first place? */
+ TREE_READONLY (node) = 1;
+ }
+}
+
+/* A get_unnamed_section callback for switching to progmem_section. */
+
+static void
+avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
+{
+ fprintf (asm_out_file,
+ "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
+ AVR_MEGA ? "a" : "ax");
+ /* Should already be aligned, this is just to be safe if it isn't. */
+ fprintf (asm_out_file, "\t.p2align 1\n");
+}
+
+/* Implement TARGET_ASM_INIT_SECTIONS. */
+
+static void
+avr_asm_init_sections (void)
+{
+ progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
+ avr_output_progmem_section_asm_op,
+ NULL);
+ readonly_data_section = data_section;
+}
+
+static unsigned int
+avr_section_type_flags (tree decl, const char *name, int reloc)
+{
+ unsigned int flags = default_section_type_flags (decl, name, reloc);
+
+ if (strncmp (name, ".noinit", 7) == 0)
+ {
+ if (decl && TREE_CODE (decl) == VAR_DECL
+ && DECL_INITIAL (decl) == NULL_TREE)
+ flags |= SECTION_BSS; /* @nobits */
+ else
+ warning (0, "only uninitialized variables can be placed in the "
+ ".noinit section");
+ }
+
+ return flags;
+}
+
+/* Outputs some appropriate text to go at the start of an assembler
+ file. */
+
+static void
+avr_file_start (void)
+{
+ if (avr_asm_only_p)
+ error ("MCU %qs supported for assembler only", avr_mcu_name);
+
+ default_file_start ();
+
+/* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
+ fputs ("__SREG__ = 0x3f\n"
+ "__SP_H__ = 0x3e\n"
+ "__SP_L__ = 0x3d\n", asm_out_file);
+
+ fputs ("__tmp_reg__ = 0\n"
+ "__zero_reg__ = 1\n", asm_out_file);
+
+ /* FIXME: output these only if there is anything in the .data / .bss
+ sections - some code size could be saved by not linking in the
+ initialization code from libgcc if one or both sections are empty. */
+ fputs ("\t.global __do_copy_data\n", asm_out_file);
+ fputs ("\t.global __do_clear_bss\n", asm_out_file);
+
+ commands_in_file = 0;
+ commands_in_prologues = 0;
+ commands_in_epilogues = 0;
+}
+
+/* Outputs to the stdio stream FILE some
+ appropriate text to go at the end of an assembler file. */
+
+static void
+avr_file_end (void)
+{
+ fputs ("/* File ", asm_out_file);
+ output_quoted_string (asm_out_file, main_input_filename);
+ fprintf (asm_out_file,
+ ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
+ commands_in_file,
+ commands_in_file,
+ commands_in_file - commands_in_prologues - commands_in_epilogues,
+ commands_in_prologues, commands_in_epilogues);
+}
+
+/* Choose the order in which to allocate hard registers for
+ pseudo-registers local to a basic block.
+
+ Store the desired register order in the array `reg_alloc_order'.
+ Element 0 should be the register to allocate first; element 1, the
+ next register; and so on. */
+
+void
+order_regs_for_local_alloc (void)
+{
+ unsigned int i;
+ static const int order_0[] = {
+ 24,25,
+ 18,19,
+ 20,21,
+ 22,23,
+ 30,31,
+ 26,27,
+ 28,29,
+ 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
+ 0,1,
+ 32,33,34,35
+ };
+ static const int order_1[] = {
+ 18,19,
+ 20,21,
+ 22,23,
+ 24,25,
+ 30,31,
+ 26,27,
+ 28,29,
+ 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
+ 0,1,
+ 32,33,34,35
+ };
+ static const int order_2[] = {
+ 25,24,
+ 23,22,
+ 21,20,
+ 19,18,
+ 30,31,
+ 26,27,
+ 28,29,
+ 17,16,
+ 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
+ 1,0,
+ 32,33,34,35
+ };
+
+ const int *order = (TARGET_ORDER_1 ? order_1 :
+ TARGET_ORDER_2 ? order_2 :
+ order_0);
+ for (i=0; i < ARRAY_SIZE (order_0); ++i)
+ reg_alloc_order[i] = order[i];
+}
+
+
+/* Mutually recursive subroutine of avr_rtx_cost for calculating the
+ cost of an RTX operand given its context. X is the rtx of the
+ operand, MODE is its mode, and OUTER is the rtx_code of this
+ operand's parent operator. */
+
+static int
+avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
+{
+ enum rtx_code code = GET_CODE (x);
+ int total;
+
+ switch (code)
+ {
+ case REG:
+ case SUBREG:
+ return 0;
+
+ case CONST_INT:
+ case CONST_DOUBLE:
+ return COSTS_N_INSNS (GET_MODE_SIZE (mode));
+
+ default:
+ break;
+ }
+
+ total = 0;
+ avr_rtx_costs (x, code, outer, &total);
+ return total;
+}
+
+/* The AVR backend's rtx_cost function. X is rtx expression whose cost
+ is to be calculated. Return true if the complete cost has been
+ computed, and false if subexpressions should be scanned. In either
+ case, *TOTAL contains the cost result. */
+
+static bool
+avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
+{
+ enum machine_mode mode = GET_MODE (x);
+ HOST_WIDE_INT val;
+
+ switch (code)
+ {
+ case CONST_INT:
+ case CONST_DOUBLE:
+ /* Immediate constants are as cheap as registers. */
+ *total = 0;
+ return true;
+
+ case MEM:
+ case CONST:
+ case LABEL_REF:
+ case SYMBOL_REF:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
+ return true;
+
+ case NEG:
+ switch (mode)
+ {
+ case QImode:
+ case SFmode:
+ *total = COSTS_N_INSNS (1);
+ break;
+
+ case HImode:
+ *total = COSTS_N_INSNS (3);
+ break;
+
+ case SImode:
+ *total = COSTS_N_INSNS (7);
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case ABS:
+ switch (mode)
+ {
+ case QImode:
+ case SFmode:
+ *total = COSTS_N_INSNS (1);
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case NOT:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case ZERO_EXTEND:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
+ - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case SIGN_EXTEND:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
+ - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case PLUS:
+ switch (mode)
+ {
+ case QImode:
+ *total = COSTS_N_INSNS (1);
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ break;
+
+ case HImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (2);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
+ *total = COSTS_N_INSNS (1);
+ else
+ *total = COSTS_N_INSNS (2);
+ break;
+
+ case SImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (4);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
+ *total = COSTS_N_INSNS (1);
+ else
+ *total = COSTS_N_INSNS (4);
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case MINUS:
+ case AND:
+ case IOR:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ return true;
+
+ case XOR:
+ *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ return true;
+
+ case MULT:
+ switch (mode)
+ {
+ case QImode:
+ if (AVR_ENHANCED)
+ *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
+ else if (optimize_size)
+ *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
+ else
+ return false;
+ break;
+
+ case HImode:
+ if (AVR_ENHANCED)
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
+ else if (optimize_size)
+ *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
+ else
+ return false;
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ return true;
+
+ case DIV:
+ case MOD:
+ case UDIV:
+ case UMOD:
+ if (optimize_size)
+ *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
+ else
+ return false;
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ return true;
+
+ case ASHIFT:
+ switch (mode)
+ {
+ case QImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ {
+ val = INTVAL (XEXP (x, 1));
+ if (val == 7)
+ *total = COSTS_N_INSNS (3);
+ else if (val >= 0 && val <= 7)
+ *total = COSTS_N_INSNS (val);
+ else
+ *total = COSTS_N_INSNS (1);
+ }
+ break;
+
+ case HImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 1:
+ case 8:
+ *total = COSTS_N_INSNS (2);
+ break;
+ case 9:
+ *total = COSTS_N_INSNS (3);
+ break;
+ case 2:
+ case 3:
+ case 10:
+ case 15:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 7:
+ case 11:
+ case 12:
+ *total = COSTS_N_INSNS (5);
+ break;
+ case 4:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
+ break;
+ case 6:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
+ break;
+ case 5:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ case SImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 24:
+ *total = COSTS_N_INSNS (3);
+ break;
+ case 1:
+ case 8:
+ case 16:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 31:
+ *total = COSTS_N_INSNS (6);
+ break;
+ case 2:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case ASHIFTRT:
+ switch (mode)
+ {
+ case QImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ {
+ val = INTVAL (XEXP (x, 1));
+ if (val == 6)
+ *total = COSTS_N_INSNS (4);
+ else if (val == 7)
+ *total = COSTS_N_INSNS (2);
+ else if (val >= 0 && val <= 7)
+ *total = COSTS_N_INSNS (val);
+ else
+ *total = COSTS_N_INSNS (1);
+ }
+ break;
+
+ case HImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 1:
+ *total = COSTS_N_INSNS (2);
+ break;
+ case 15:
+ *total = COSTS_N_INSNS (3);
+ break;
+ case 2:
+ case 7:
+ case 8:
+ case 9:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 10:
+ case 14:
+ *total = COSTS_N_INSNS (5);
+ break;
+ case 11:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
+ break;
+ case 12:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
+ break;
+ case 6:
+ case 13:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ case SImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 1:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 8:
+ case 16:
+ case 24:
+ *total = COSTS_N_INSNS (6);
+ break;
+ case 2:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ break;
+ case 31:
+ *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case LSHIFTRT:
+ switch (mode)
+ {
+ case QImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ {
+ val = INTVAL (XEXP (x, 1));
+ if (val == 7)
+ *total = COSTS_N_INSNS (3);
+ else if (val >= 0 && val <= 7)
+ *total = COSTS_N_INSNS (val);
+ else
+ *total = COSTS_N_INSNS (1);
+ }
+ break;
+
+ case HImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 1:
+ case 8:
+ *total = COSTS_N_INSNS (2);
+ break;
+ case 9:
+ *total = COSTS_N_INSNS (3);
+ break;
+ case 2:
+ case 10:
+ case 15:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 7:
+ case 11:
+ *total = COSTS_N_INSNS (5);
+ break;
+ case 3:
+ case 12:
+ case 13:
+ case 14:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
+ break;
+ case 4:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
+ break;
+ case 5:
+ case 6:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ case SImode:
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ {
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ else
+ switch (INTVAL (XEXP (x, 1)))
+ {
+ case 0:
+ *total = 0;
+ break;
+ case 1:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 2:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
+ break;
+ case 8:
+ case 16:
+ case 24:
+ *total = COSTS_N_INSNS (4);
+ break;
+ case 31:
+ *total = COSTS_N_INSNS (6);
+ break;
+ default:
+ *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ }
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ case COMPARE:
+ switch (GET_MODE (XEXP (x, 0)))
+ {
+ case QImode:
+ *total = COSTS_N_INSNS (1);
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ break;
+
+ case HImode:
+ *total = COSTS_N_INSNS (2);
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ else if (INTVAL (XEXP (x, 1)) != 0)
+ *total += COSTS_N_INSNS (1);
+ break;
+
+ case SImode:
+ *total = COSTS_N_INSNS (4);
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
+ else if (INTVAL (XEXP (x, 1)) != 0)
+ *total += COSTS_N_INSNS (3);
+ break;
+
+ default:
+ return false;
+ }
+ *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
+ return true;
+
+ default:
+ break;
+ }
+ return false;
+}
+
+/* Calculate the cost of a memory address. */
+
+static int
+avr_address_cost (rtx x)
+{
+ if (GET_CODE (x) == PLUS
+ && GET_CODE (XEXP (x,1)) == CONST_INT
+ && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
+ && INTVAL (XEXP (x,1)) >= 61)
+ return 18;
+ if (CONSTANT_ADDRESS_P (x))
+ {
+ if (avr_io_address_p (x, 1))
+ return 2;
+ return 4;
+ }
+ return 4;
+}
+
+/* Test for extra memory constraint 'Q'.
+ It's a memory address based on Y or Z pointer with valid displacement. */
+
+int
+extra_constraint_Q (rtx x)
+{
+ if (GET_CODE (XEXP (x,0)) == PLUS
+ && REG_P (XEXP (XEXP (x,0), 0))
+ && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
+ && (INTVAL (XEXP (XEXP (x,0), 1))
+ <= MAX_LD_OFFSET (GET_MODE (x))))
+ {
+ rtx xx = XEXP (XEXP (x,0), 0);
+ int regno = REGNO (xx);
+ if (TARGET_ALL_DEBUG)
+ {
+ fprintf (stderr, ("extra_constraint:\n"
+ "reload_completed: %d\n"
+ "reload_in_progress: %d\n"),
+ reload_completed, reload_in_progress);
+ debug_rtx (x);
+ }
+ if (regno >= FIRST_PSEUDO_REGISTER)
+ return 1; /* allocate pseudos */
+ else if (regno == REG_Z || regno == REG_Y)
+ return 1; /* strictly check */
+ else if (xx == frame_pointer_rtx
+ || xx == arg_pointer_rtx)
+ return 1; /* XXX frame & arg pointer checks */
+ }
+ return 0;
+}
+
+/* Convert condition code CONDITION to the valid AVR condition code. */
+
+RTX_CODE
+avr_normalize_condition (RTX_CODE condition)
+{
+ switch (condition)
+ {
+ case GT:
+ return GE;
+ case GTU:
+ return GEU;
+ case LE:
+ return LT;
+ case LEU:
+ return LTU;
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* This function optimizes conditional jumps. */
+
+static void
+avr_reorg (void)
+{
+ rtx insn, pattern;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (! (GET_CODE (insn) == INSN
+ || GET_CODE (insn) == CALL_INSN
+ || GET_CODE (insn) == JUMP_INSN)
+ || !single_set (insn))
+ continue;
+
+ pattern = PATTERN (insn);
+
+ if (GET_CODE (pattern) == PARALLEL)
+ pattern = XVECEXP (pattern, 0, 0);
+ if (GET_CODE (pattern) == SET
+ && SET_DEST (pattern) == cc0_rtx
+ && compare_diff_p (insn))
+ {
+ if (GET_CODE (SET_SRC (pattern)) == COMPARE)
+ {
+ /* Now we work under compare insn. */
+
+ pattern = SET_SRC (pattern);
+ if (true_regnum (XEXP (pattern,0)) >= 0
+ && true_regnum (XEXP (pattern,1)) >= 0 )
+ {
+ rtx x = XEXP (pattern,0);
+ rtx next = next_real_insn (insn);
+ rtx pat = PATTERN (next);
+ rtx src = SET_SRC (pat);
+ rtx t = XEXP (src,0);
+ PUT_CODE (t, swap_condition (GET_CODE (t)));
+ XEXP (pattern,0) = XEXP (pattern,1);
+ XEXP (pattern,1) = x;
+ INSN_CODE (next) = -1;
+ }
+ else if (true_regnum (XEXP (pattern,0)) >= 0
+ && GET_CODE (XEXP (pattern,1)) == CONST_INT)
+ {
+ rtx x = XEXP (pattern,1);
+ rtx next = next_real_insn (insn);
+ rtx pat = PATTERN (next);
+ rtx src = SET_SRC (pat);
+ rtx t = XEXP (src,0);
+ enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
+
+ if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
+ {
+ XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
+ PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
+ INSN_CODE (next) = -1;
+ INSN_CODE (insn) = -1;
+ }
+ }
+ }
+ else if (true_regnum (SET_SRC (pattern)) >= 0)
+ {
+ /* This is a tst insn */
+ rtx next = next_real_insn (insn);
+ rtx pat = PATTERN (next);
+ rtx src = SET_SRC (pat);
+ rtx t = XEXP (src,0);
+
+ PUT_CODE (t, swap_condition (GET_CODE (t)));
+ SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
+ SET_SRC (pattern));
+ INSN_CODE (next) = -1;
+ INSN_CODE (insn) = -1;
+ }
+ }
+ }
+}
+
+/* Returns register number for function return value.*/
+
+int
+avr_ret_register (void)
+{
+ return 24;
+}
+
+/* Ceate an RTX representing the place where a
+ library function returns a value of mode MODE. */
+
+rtx
+avr_libcall_value (enum machine_mode mode)
+{
+ int offs = GET_MODE_SIZE (mode);
+ if (offs < 2)
+ offs = 2;
+ return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
+}
+
+/* Create an RTX representing the place where a
+ function returns a value of data type VALTYPE. */
+
+rtx
+avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
+{
+ unsigned int offs;
+
+ if (TYPE_MODE (type) != BLKmode)
+ return avr_libcall_value (TYPE_MODE (type));
+
+ offs = int_size_in_bytes (type);
+ if (offs < 2)
+ offs = 2;
+ if (offs > 2 && offs < GET_MODE_SIZE (SImode))
+ offs = GET_MODE_SIZE (SImode);
+ else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
+ offs = GET_MODE_SIZE (DImode);
+
+ return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
+}
+
+/* Returns nonzero if the number MASK has only one bit set. */
+
+int
+mask_one_bit_p (HOST_WIDE_INT mask)
+{
+ int i;
+ unsigned HOST_WIDE_INT n=mask;
+ for (i = 0; i < 32; ++i)
+ {
+ if (n & 0x80000000L)
+ {
+ if (n & 0x7fffffffL)
+ return 0;
+ else
+ return 32-i;
+ }
+ n<<=1;
+ }
+ return 0;
+}
+
+
+/* Places additional restrictions on the register class to
+ use when it is necessary to copy value X into a register
+ in class CLASS. */
+
+enum reg_class
+preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
+{
+ return class;
+}
+
+int
+test_hard_reg_class (enum reg_class class, rtx x)
+{
+ int regno = true_regnum (x);
+ if (regno < 0)
+ return 0;
+
+ if (TEST_HARD_REG_CLASS (class, regno))
+ return 1;
+
+ return 0;
+}
+
+
+int
+jump_over_one_insn_p (rtx insn, rtx dest)
+{
+ int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
+ ? XEXP (dest, 0)
+ : dest);
+ int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
+ int dest_addr = INSN_ADDRESSES (uid);
+ return dest_addr - jump_addr == get_attr_length (insn) + 1;
+}
+
+/* Returns 1 if a value of mode MODE can be stored starting with hard
+ register number REGNO. On the enhanced core, anything larger than
+ 1 byte must start in even numbered register for "movw" to work
+ (this way we don't have to check for odd registers everywhere). */
+
+int
+avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
+{
+ /* The only thing that can go into registers r28:r29 is a Pmode. */
+ if (regno == REG_Y && mode == Pmode)
+ return 1;
+
+ /* Otherwise disallow all regno/mode combinations that span r28:r29. */
+ if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
+ return 0;
+
+ if (mode == QImode)
+ return 1;
+
+ /* Modes larger than QImode occupy consecutive registers. */
+ if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
+ return 0;
+
+ /* All modes larger than QImode should start in an even register. */
+ return !(regno & 1);
+}
+
+/* Returns 1 if X is a valid address for an I/O register of size SIZE
+ (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
+ to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
+
+int
+avr_io_address_p (rtx x, int size)
+{
+ return (optimize > 0 && GET_CODE (x) == CONST_INT
+ && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
+}
+
+/* Returns nonzero (bit number + 1) if X, or -X, is a constant power of 2. */
+
+int
+const_int_pow2_p (rtx x)
+{
+ if (GET_CODE (x) == CONST_INT)
+ {
+ HOST_WIDE_INT d = INTVAL (x);
+ HOST_WIDE_INT abs_d = (d >= 0) ? d : -d;
+ return exact_log2 (abs_d) + 1;
+ }
+ return 0;
+}
+
+const char *
+output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
+{
+ int tmp;
+ if (!len)
+ len = &tmp;
+
+ if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ int val = INTVAL (operands[1]);
+ if ((val & 0xff) == 0)
+ {
+ *len = 3;
+ return (AS2 (mov,%A0,__zero_reg__) CR_TAB
+ AS2 (ldi,%2,hi8(%1)) CR_TAB
+ AS2 (mov,%B0,%2));
+ }
+ else if ((val & 0xff00) == 0)
+ {
+ *len = 3;
+ return (AS2 (ldi,%2,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,%2) CR_TAB
+ AS2 (mov,%B0,__zero_reg__));
+ }
+ else if ((val & 0xff) == ((val & 0xff00) >> 8))
+ {
+ *len = 3;
+ return (AS2 (ldi,%2,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,%2) CR_TAB
+ AS2 (mov,%B0,%2));
+ }
+ }
+ *len = 4;
+ return (AS2 (ldi,%2,lo8(%1)) CR_TAB
+ AS2 (mov,%A0,%2) CR_TAB
+ AS2 (ldi,%2,hi8(%1)) CR_TAB
+ AS2 (mov,%B0,%2));
+}
+
+
+const char *
+output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
+{
+ rtx src = operands[1];
+ int cnst = (GET_CODE (src) == CONST_INT);
+
+ if (len)
+ {
+ if (cnst)
+ *len = 4 + ((INTVAL (src) & 0xff) != 0)
+ + ((INTVAL (src) & 0xff00) != 0)
+ + ((INTVAL (src) & 0xff0000) != 0)
+ + ((INTVAL (src) & 0xff000000) != 0);
+ else
+ *len = 8;
+
+ return "";
+ }
+
+ if (cnst && ((INTVAL (src) & 0xff) == 0))
+ output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
+ else
+ {
+ output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
+ output_asm_insn (AS2 (mov, %A0, %2), operands);
+ }
+ if (cnst && ((INTVAL (src) & 0xff00) == 0))
+ output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
+ else
+ {
+ output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
+ output_asm_insn (AS2 (mov, %B0, %2), operands);
+ }
+ if (cnst && ((INTVAL (src) & 0xff0000) == 0))
+ output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
+ else
+ {
+ output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
+ output_asm_insn (AS2 (mov, %C0, %2), operands);
+ }
+ if (cnst && ((INTVAL (src) & 0xff000000) == 0))
+ output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
+ else
+ {
+ output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
+ output_asm_insn (AS2 (mov, %D0, %2), operands);
+ }
+ return "";
+}
+
+void
+avr_output_bld (rtx operands[], int bit_nr)
+{
+ static char s[] = "bld %A0,0";
+
+ s[5] = 'A' + (bit_nr >> 3);
+ s[8] = '0' + (bit_nr & 7);
+ output_asm_insn (s, operands);
+}
+
+void
+avr_output_addr_vec_elt (FILE *stream, int value)
+{
+ switch_to_section (progmem_section);
+ if (AVR_MEGA)
+ fprintf (stream, "\t.word pm(.L%d)\n", value);
+ else
+ fprintf (stream, "\trjmp .L%d\n", value);
+
+ jump_tables_size++;
+}
+
+/* Returns 1 if SCRATCH are safe to be allocated as a scratch
+ registers (for a define_peephole2) in the current function. */
+
+int
+avr_peep2_scratch_safe (rtx scratch)
+{
+ if ((interrupt_function_p (current_function_decl)
+ || signal_function_p (current_function_decl))
+ && leaf_function_p ())
+ {
+ int first_reg = true_regnum (scratch);
+ int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
+ int reg;
+
+ for (reg = first_reg; reg <= last_reg; reg++)
+ {
+ if (!regs_ever_live[reg])
+ return 0;
+ }
+ }
+ return 1;
+}
+
+/* Output a branch that tests a single bit of a register (QI, HI or SImode)
+ or memory location in the I/O space (QImode only).
+
+ Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
+ Operand 1: register operand to test, or CONST_INT memory address.
+ Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
+ Operand 3: label to jump to if the test is true. */
+
+const char *
+avr_out_sbxx_branch (rtx insn, rtx operands[])
+{
+ enum rtx_code comp = GET_CODE (operands[0]);
+ int long_jump = (get_attr_length (insn) >= 4);
+ int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
+
+ if (comp == GE)
+ comp = EQ;
+ else if (comp == LT)
+ comp = NE;
+
+ if (reverse)
+ comp = reverse_condition (comp);
+
+ if (GET_CODE (operands[1]) == CONST_INT)
+ {
+ if (INTVAL (operands[1]) < 0x40)
+ {
+ if (comp == EQ)
+ output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
+ else
+ output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
+ }
+ else
+ {
+ output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
+ if (comp == EQ)
+ output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
+ else
+ output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
+ }
+ }
+ else /* GET_CODE (operands[1]) == REG */
+ {
+ if (GET_MODE (operands[1]) == QImode)
+ {
+ if (comp == EQ)
+ output_asm_insn (AS2 (sbrs,%1,%2), operands);
+ else
+ output_asm_insn (AS2 (sbrc,%1,%2), operands);
+ }
+ else /* HImode or SImode */
+ {
+ static char buf[] = "sbrc %A1,0";
+ int bit_nr = exact_log2 (INTVAL (operands[2])
+ & GET_MODE_MASK (GET_MODE (operands[1])));
+
+ buf[3] = (comp == EQ) ? 's' : 'c';
+ buf[6] = 'A' + (bit_nr >> 3);
+ buf[9] = '0' + (bit_nr & 7);
+ output_asm_insn (buf, operands);
+ }
+ }
+
+ if (long_jump)
+ return (AS1 (rjmp,.+4) CR_TAB
+ AS1 (jmp,%3));
+ if (!reverse)
+ return AS1 (rjmp,%3);
+ return "";
+}
+
+/* Worker function for TARGET_ASM_CONSTRUCTOR. */
+
+static void
+avr_asm_out_ctor (rtx symbol, int priority)
+{
+ fputs ("\t.global __do_global_ctors\n", asm_out_file);
+ default_ctor_section_asm_out_constructor (symbol, priority);
+}
+
+/* Worker function for TARGET_ASM_DESTRUCTOR. */
+
+static void
+avr_asm_out_dtor (rtx symbol, int priority)
+{
+ fputs ("\t.global __do_global_dtors\n", asm_out_file);
+ default_dtor_section_asm_out_destructor (symbol, priority);
+}
+
+/* Worker function for TARGET_RETURN_IN_MEMORY. */
+
+static bool
+avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
+{
+ if (TYPE_MODE (type) == BLKmode)
+ {
+ HOST_WIDE_INT size = int_size_in_bytes (type);
+ return (size == -1 || size > 8);
+ }
+ else
+ return false;
+}
+
+#include "gt-avr.h"
Index: t-rtems
===================================================================
--- t-rtems (nonexistent)
+++ t-rtems (revision 154)
@@ -0,0 +1,3 @@
+# Multilibs for avr RTEMS targets.
+
+# ATM, this is just a stub
Index: avr.h
===================================================================
--- avr.h (nonexistent)
+++ avr.h (revision 154)
@@ -0,0 +1,936 @@
+/* Definitions of target machine for GNU compiler,
+ for ATMEL AVR at90s8515, ATmega103/103L, ATmega603/603L microcontrollers.
+ Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
+ Contributed by Denis Chertykov (denisc@overta.ru)
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 3, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3. If not see
+ . */
+
+/* Names to predefine in the preprocessor for this target machine. */
+
+#define TARGET_CPU_CPP_BUILTINS() \
+ do \
+ { \
+ builtin_define_std ("AVR"); \
+ if (avr_base_arch_macro) \
+ builtin_define (avr_base_arch_macro); \
+ if (avr_extra_arch_macro) \
+ builtin_define (avr_extra_arch_macro); \
+ if (avr_have_movw_lpmx_p) \
+ builtin_define ("__AVR_HAVE_MOVW__"); \
+ if (avr_have_movw_lpmx_p) \
+ builtin_define ("__AVR_HAVE_LPMX__"); \
+ if (avr_asm_only_p) \
+ builtin_define ("__AVR_ASM_ONLY__"); \
+ if (avr_enhanced_p) \
+ builtin_define ("__AVR_ENHANCED__"); \
+ if (avr_enhanced_p) \
+ builtin_define ("__AVR_HAVE_MUL__"); \
+ if (avr_mega_p) \
+ builtin_define ("__AVR_MEGA__"); \
+ if (TARGET_NO_INTERRUPTS) \
+ builtin_define ("__NO_INTERRUPTS__"); \
+ } \
+ while (0)
+
+extern const char *avr_base_arch_macro;
+extern const char *avr_extra_arch_macro;
+extern int avr_mega_p;
+extern int avr_enhanced_p;
+extern int avr_asm_only_p;
+extern int avr_have_movw_lpmx_p;
+#ifndef IN_LIBGCC2
+extern GTY(()) section *progmem_section;
+#endif
+
+#define AVR_MEGA (avr_mega_p && !TARGET_SHORT_CALLS)
+#define AVR_ENHANCED (avr_enhanced_p)
+#define AVR_HAVE_MOVW (avr_have_movw_lpmx_p)
+
+#define TARGET_VERSION fprintf (stderr, " (GNU assembler syntax)");
+
+#define OVERRIDE_OPTIONS avr_override_options ()
+
+#define CAN_DEBUG_WITHOUT_FP
+
+#define BITS_BIG_ENDIAN 0
+#define BYTES_BIG_ENDIAN 0
+#define WORDS_BIG_ENDIAN 0
+
+#ifdef IN_LIBGCC2
+/* This is to get correct SI and DI modes in libgcc2.c (32 and 64 bits). */
+#define UNITS_PER_WORD 4
+#else
+/* Width of a word, in units (bytes). */
+#define UNITS_PER_WORD 1
+#endif
+
+#define POINTER_SIZE 16
+
+
+/* Maximum sized of reasonable data type
+ DImode or Dfmode ... */
+#define MAX_FIXED_MODE_SIZE 32
+
+#define PARM_BOUNDARY 8
+
+#define FUNCTION_BOUNDARY 8
+
+#define EMPTY_FIELD_BOUNDARY 8
+
+/* No data type wants to be aligned rounder than this. */
+#define BIGGEST_ALIGNMENT 8
+
+#define TARGET_VTABLE_ENTRY_ALIGN 8
+
+#define STRICT_ALIGNMENT 0
+
+#define INT_TYPE_SIZE (TARGET_INT8 ? 8 : 16)
+#define SHORT_TYPE_SIZE (INT_TYPE_SIZE == 8 ? INT_TYPE_SIZE : 16)
+#define LONG_TYPE_SIZE (INT_TYPE_SIZE == 8 ? 16 : 32)
+#define LONG_LONG_TYPE_SIZE (INT_TYPE_SIZE == 8 ? 32 : 64)
+#define FLOAT_TYPE_SIZE 32
+#define DOUBLE_TYPE_SIZE 32
+#define LONG_DOUBLE_TYPE_SIZE 32
+
+#define DEFAULT_SIGNED_CHAR 1
+
+#define SIZE_TYPE (INT_TYPE_SIZE == 8 ? "long unsigned int" : "unsigned int")
+#define PTRDIFF_TYPE (INT_TYPE_SIZE == 8 ? "long int" :"int")
+
+#define WCHAR_TYPE_SIZE 16
+
+#define FIRST_PSEUDO_REGISTER 36
+
+#define FIXED_REGISTERS {\
+ 1,1,/* r0 r1 */\
+ 0,0,/* r2 r3 */\
+ 0,0,/* r4 r5 */\
+ 0,0,/* r6 r7 */\
+ 0,0,/* r8 r9 */\
+ 0,0,/* r10 r11 */\
+ 0,0,/* r12 r13 */\
+ 0,0,/* r14 r15 */\
+ 0,0,/* r16 r17 */\
+ 0,0,/* r18 r19 */\
+ 0,0,/* r20 r21 */\
+ 0,0,/* r22 r23 */\
+ 0,0,/* r24 r25 */\
+ 0,0,/* r26 r27 */\
+ 0,0,/* r28 r29 */\
+ 0,0,/* r30 r31 */\
+ 1,1,/* STACK */\
+ 1,1 /* arg pointer */ }
+
+#define CALL_USED_REGISTERS { \
+ 1,1,/* r0 r1 */ \
+ 0,0,/* r2 r3 */ \
+ 0,0,/* r4 r5 */ \
+ 0,0,/* r6 r7 */ \
+ 0,0,/* r8 r9 */ \
+ 0,0,/* r10 r11 */ \
+ 0,0,/* r12 r13 */ \
+ 0,0,/* r14 r15 */ \
+ 0,0,/* r16 r17 */ \
+ 1,1,/* r18 r19 */ \
+ 1,1,/* r20 r21 */ \
+ 1,1,/* r22 r23 */ \
+ 1,1,/* r24 r25 */ \
+ 1,1,/* r26 r27 */ \
+ 0,0,/* r28 r29 */ \
+ 1,1,/* r30 r31 */ \
+ 1,1,/* STACK */ \
+ 1,1 /* arg pointer */ }
+
+#define REG_ALLOC_ORDER { \
+ 24,25, \
+ 18,19, \
+ 20,21, \
+ 22,23, \
+ 30,31, \
+ 26,27, \
+ 28,29, \
+ 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2, \
+ 0,1, \
+ 32,33,34,35 \
+ }
+
+#define ORDER_REGS_FOR_LOCAL_ALLOC order_regs_for_local_alloc ()
+
+
+#define HARD_REGNO_NREGS(REGNO, MODE) ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
+
+#define HARD_REGNO_MODE_OK(REGNO, MODE) avr_hard_regno_mode_ok(REGNO, MODE)
+
+#define MODES_TIEABLE_P(MODE1, MODE2) 1
+
+enum reg_class {
+ NO_REGS,
+ R0_REG, /* r0 */
+ POINTER_X_REGS, /* r26 - r27 */
+ POINTER_Y_REGS, /* r28 - r29 */
+ POINTER_Z_REGS, /* r30 - r31 */
+ STACK_REG, /* STACK */
+ BASE_POINTER_REGS, /* r28 - r31 */
+ POINTER_REGS, /* r26 - r31 */
+ ADDW_REGS, /* r24 - r31 */
+ SIMPLE_LD_REGS, /* r16 - r23 */
+ LD_REGS, /* r16 - r31 */
+ NO_LD_REGS, /* r0 - r15 */
+ GENERAL_REGS, /* r0 - r31 */
+ ALL_REGS, LIM_REG_CLASSES
+};
+
+
+#define N_REG_CLASSES (int)LIM_REG_CLASSES
+
+#define REG_CLASS_NAMES { \
+ "NO_REGS", \
+ "R0_REG", /* r0 */ \
+ "POINTER_X_REGS", /* r26 - r27 */ \
+ "POINTER_Y_REGS", /* r28 - r29 */ \
+ "POINTER_Z_REGS", /* r30 - r31 */ \
+ "STACK_REG", /* STACK */ \
+ "BASE_POINTER_REGS", /* r28 - r31 */ \
+ "POINTER_REGS", /* r26 - r31 */ \
+ "ADDW_REGS", /* r24 - r31 */ \
+ "SIMPLE_LD_REGS", /* r16 - r23 */ \
+ "LD_REGS", /* r16 - r31 */ \
+ "NO_LD_REGS", /* r0 - r15 */ \
+ "GENERAL_REGS", /* r0 - r31 */ \
+ "ALL_REGS" }
+
+#define REG_CLASS_CONTENTS { \
+ {0x00000000,0x00000000}, /* NO_REGS */ \
+ {0x00000001,0x00000000}, /* R0_REG */ \
+ {3 << REG_X,0x00000000}, /* POINTER_X_REGS, r26 - r27 */ \
+ {3 << REG_Y,0x00000000}, /* POINTER_Y_REGS, r28 - r29 */ \
+ {3 << REG_Z,0x00000000}, /* POINTER_Z_REGS, r30 - r31 */ \
+ {0x00000000,0x00000003}, /* STACK_REG, STACK */ \
+ {(3 << REG_Y) | (3 << REG_Z), \
+ 0x00000000}, /* BASE_POINTER_REGS, r28 - r31 */ \
+ {(3 << REG_X) | (3 << REG_Y) | (3 << REG_Z), \
+ 0x00000000}, /* POINTER_REGS, r26 - r31 */ \
+ {(3 << REG_X) | (3 << REG_Y) | (3 << REG_Z) | (3 << REG_W), \
+ 0x00000000}, /* ADDW_REGS, r24 - r31 */ \
+ {0x00ff0000,0x00000000}, /* SIMPLE_LD_REGS r16 - r23 */ \
+ {(3 << REG_X)|(3 << REG_Y)|(3 << REG_Z)|(3 << REG_W)|(0xff << 16), \
+ 0x00000000}, /* LD_REGS, r16 - r31 */ \
+ {0x0000ffff,0x00000000}, /* NO_LD_REGS r0 - r15 */ \
+ {0xffffffff,0x00000000}, /* GENERAL_REGS, r0 - r31 */ \
+ {0xffffffff,0x00000003} /* ALL_REGS */ \
+}
+
+#define REGNO_REG_CLASS(R) avr_regno_reg_class(R)
+
+#define BASE_REG_CLASS (reload_completed ? BASE_POINTER_REGS : POINTER_REGS)
+
+#define INDEX_REG_CLASS NO_REGS
+
+#define REGNO_OK_FOR_BASE_P(r) (((r) < FIRST_PSEUDO_REGISTER \
+ && ((r) == REG_X \
+ || (r) == REG_Y \
+ || (r) == REG_Z \
+ || (r) == ARG_POINTER_REGNUM)) \
+ || (reg_renumber \
+ && (reg_renumber[r] == REG_X \
+ || reg_renumber[r] == REG_Y \
+ || reg_renumber[r] == REG_Z \
+ || (reg_renumber[r] \
+ == ARG_POINTER_REGNUM))))
+
+#define REGNO_OK_FOR_INDEX_P(NUM) 0
+
+#define PREFERRED_RELOAD_CLASS(X, CLASS) preferred_reload_class(X,CLASS)
+
+#define SMALL_REGISTER_CLASSES 1
+
+#define CLASS_LIKELY_SPILLED_P(c) class_likely_spilled_p(c)
+
+#define CLASS_MAX_NREGS(CLASS, MODE) class_max_nregs (CLASS, MODE)
+
+#define STACK_PUSH_CODE POST_DEC
+
+#define STACK_GROWS_DOWNWARD
+
+#define STARTING_FRAME_OFFSET 1
+
+#define STACK_POINTER_OFFSET 1
+
+#define FIRST_PARM_OFFSET(FUNDECL) 0
+
+#define STACK_BOUNDARY 8
+
+#define STACK_POINTER_REGNUM 32
+
+#define FRAME_POINTER_REGNUM REG_Y
+
+#define ARG_POINTER_REGNUM 34
+
+#define STATIC_CHAIN_REGNUM 2
+
+#define FRAME_POINTER_REQUIRED frame_pointer_required_p()
+
+/* Offset from the frame pointer register value to the top of the stack. */
+#define FRAME_POINTER_CFA_OFFSET(FNDECL) 0
+
+#define ELIMINABLE_REGS { \
+ {ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM}, \
+ {FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM} \
+ ,{FRAME_POINTER_REGNUM+1,STACK_POINTER_REGNUM+1}}
+
+#define CAN_ELIMINATE(FROM, TO) (((FROM) == ARG_POINTER_REGNUM \
+ && (TO) == FRAME_POINTER_REGNUM) \
+ || (((FROM) == FRAME_POINTER_REGNUM \
+ || (FROM) == FRAME_POINTER_REGNUM+1) \
+ && ! FRAME_POINTER_REQUIRED \
+ ))
+
+#define INITIAL_ELIMINATION_OFFSET(FROM, TO, OFFSET) \
+ OFFSET = initial_elimination_offset (FROM, TO)
+
+#define RETURN_ADDR_RTX(count, x) \
+ gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (tem, 1)))
+
+#define PUSH_ROUNDING(NPUSHED) (NPUSHED)
+
+#define RETURN_POPS_ARGS(FUNDECL, FUNTYPE, STACK_SIZE) 0
+
+#define FUNCTION_ARG(CUM, MODE, TYPE, NAMED) (function_arg (&(CUM), MODE, TYPE, NAMED))
+
+typedef struct avr_args {
+ int nregs; /* # registers available for passing */
+ int regno; /* next available register number */
+} CUMULATIVE_ARGS;
+
+#define INIT_CUMULATIVE_ARGS(CUM, FNTYPE, LIBNAME, FNDECL, N_NAMED_ARGS) \
+ init_cumulative_args (&(CUM), FNTYPE, LIBNAME, FNDECL)
+
+#define FUNCTION_ARG_ADVANCE(CUM, MODE, TYPE, NAMED) \
+ (function_arg_advance (&CUM, MODE, TYPE, NAMED))
+
+#define FUNCTION_ARG_REGNO_P(r) function_arg_regno_p(r)
+
+extern int avr_reg_order[];
+
+#define RET_REGISTER avr_ret_register ()
+
+#define FUNCTION_VALUE(VALTYPE, FUNC) avr_function_value (VALTYPE, FUNC)
+
+#define LIBCALL_VALUE(MODE) avr_libcall_value (MODE)
+
+#define FUNCTION_VALUE_REGNO_P(N) ((int) (N) == RET_REGISTER)
+
+#define DEFAULT_PCC_STRUCT_RETURN 0
+
+#define EPILOGUE_USES(REGNO) 0
+
+#define HAVE_POST_INCREMENT 1
+#define HAVE_PRE_DECREMENT 1
+
+#define CONSTANT_ADDRESS_P(X) CONSTANT_P (X)
+
+#define MAX_REGS_PER_ADDRESS 1
+
+#ifdef REG_OK_STRICT
+# define GO_IF_LEGITIMATE_ADDRESS(mode, operand, ADDR) \
+{ \
+ if (legitimate_address_p (mode, operand, 1)) \
+ goto ADDR; \
+}
+# else
+# define GO_IF_LEGITIMATE_ADDRESS(mode, operand, ADDR) \
+{ \
+ if (legitimate_address_p (mode, operand, 0)) \
+ goto ADDR; \
+}
+#endif
+
+#define REG_OK_FOR_BASE_NOSTRICT_P(X) \
+ (REGNO (X) >= FIRST_PSEUDO_REGISTER || REG_OK_FOR_BASE_STRICT_P(X))
+
+#define REG_OK_FOR_BASE_STRICT_P(X) REGNO_OK_FOR_BASE_P (REGNO (X))
+
+#ifdef REG_OK_STRICT
+# define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_STRICT_P (X)
+#else
+# define REG_OK_FOR_BASE_P(X) REG_OK_FOR_BASE_NOSTRICT_P (X)
+#endif
+
+#define REG_OK_FOR_INDEX_P(X) 0
+
+#define LEGITIMIZE_ADDRESS(X, OLDX, MODE, WIN) \
+{ \
+ (X) = legitimize_address (X, OLDX, MODE); \
+ if (memory_address_p (MODE, X)) \
+ goto WIN; \
+}
+
+#define XEXP_(X,Y) (X)
+#define LEGITIMIZE_RELOAD_ADDRESS(X, MODE, OPNUM, TYPE, IND_LEVELS, WIN) \
+do { \
+ if (1&&(GET_CODE (X) == POST_INC || GET_CODE (X) == PRE_DEC)) \
+ { \
+ push_reload (XEXP (X,0), XEXP (X,0), &XEXP (X,0), &XEXP (X,0), \
+ POINTER_REGS, GET_MODE (X),GET_MODE (X) , 0, 0, \
+ OPNUM, RELOAD_OTHER); \
+ goto WIN; \
+ } \
+ if (GET_CODE (X) == PLUS \
+ && REG_P (XEXP (X, 0)) \
+ && GET_CODE (XEXP (X, 1)) == CONST_INT \
+ && INTVAL (XEXP (X, 1)) >= 1) \
+ { \
+ int fit = INTVAL (XEXP (X, 1)) <= (64 - GET_MODE_SIZE (MODE)); \
+ if (fit) \
+ { \
+ if (reg_equiv_address[REGNO (XEXP (X, 0))] != 0) \
+ { \
+ int regno = REGNO (XEXP (X, 0)); \
+ rtx mem = make_memloc (X, regno); \
+ push_reload (XEXP (mem,0), NULL, &XEXP (mem,0), NULL, \
+ POINTER_REGS, Pmode, VOIDmode, 0, 0, \
+ 1, ADDR_TYPE (TYPE)); \
+ push_reload (mem, NULL_RTX, &XEXP (X, 0), NULL, \
+ BASE_POINTER_REGS, GET_MODE (X), VOIDmode, 0, 0, \
+ OPNUM, TYPE); \
+ goto WIN; \
+ } \
+ push_reload (XEXP (X, 0), NULL_RTX, &XEXP (X, 0), NULL, \
+ BASE_POINTER_REGS, GET_MODE (X), VOIDmode, 0, 0, \
+ OPNUM, TYPE); \
+ goto WIN; \
+ } \
+ else if (! (frame_pointer_needed && XEXP (X,0) == frame_pointer_rtx)) \
+ { \
+ push_reload (X, NULL_RTX, &X, NULL, \
+ POINTER_REGS, GET_MODE (X), VOIDmode, 0, 0, \
+ OPNUM, TYPE); \
+ goto WIN; \
+ } \
+ } \
+} while(0)
+
+#define GO_IF_MODE_DEPENDENT_ADDRESS(ADDR,LABEL) \
+ if (GET_CODE (ADDR) == POST_INC || GET_CODE (ADDR) == PRE_DEC) \
+ goto LABEL
+
+#define LEGITIMATE_CONSTANT_P(X) 1
+
+#define REGISTER_MOVE_COST(MODE, FROM, TO) ((FROM) == STACK_REG ? 6 \
+ : (TO) == STACK_REG ? 12 \
+ : 2)
+
+#define MEMORY_MOVE_COST(MODE,CLASS,IN) ((MODE)==QImode ? 2 : \
+ (MODE)==HImode ? 4 : \
+ (MODE)==SImode ? 8 : \
+ (MODE)==SFmode ? 8 : 16)
+
+#define BRANCH_COST 0
+
+#define SLOW_BYTE_ACCESS 0
+
+#define NO_FUNCTION_CSE
+
+#define TEXT_SECTION_ASM_OP "\t.text"
+
+#define DATA_SECTION_ASM_OP "\t.data"
+
+#define BSS_SECTION_ASM_OP "\t.section .bss"
+
+/* Define the pseudo-ops used to switch to the .ctors and .dtors sections.
+ There are no shared libraries on this target, and these sections are
+ placed in the read-only program memory, so they are not writable. */
+
+#undef CTORS_SECTION_ASM_OP
+#define CTORS_SECTION_ASM_OP "\t.section .ctors,\"a\",@progbits"
+
+#undef DTORS_SECTION_ASM_OP
+#define DTORS_SECTION_ASM_OP "\t.section .dtors,\"a\",@progbits"
+
+#define TARGET_ASM_CONSTRUCTOR avr_asm_out_ctor
+
+#define TARGET_ASM_DESTRUCTOR avr_asm_out_dtor
+
+#define JUMP_TABLES_IN_TEXT_SECTION 0
+
+#define ASM_COMMENT_START " ; "
+
+#define ASM_APP_ON "/* #APP */\n"
+
+#define ASM_APP_OFF "/* #NOAPP */\n"
+
+/* Switch into a generic section. */
+#define TARGET_ASM_NAMED_SECTION default_elf_asm_named_section
+#define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
+
+#define ASM_OUTPUT_ASCII(FILE, P, SIZE) gas_output_ascii (FILE,P,SIZE)
+
+#define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == '\n' \
+ || ((C) == '$'))
+
+#define ASM_OUTPUT_COMMON(STREAM, NAME, SIZE, ROUNDED) \
+do { \
+ fputs ("\t.comm ", (STREAM)); \
+ assemble_name ((STREAM), (NAME)); \
+ fprintf ((STREAM), ",%lu,1\n", (unsigned long)(SIZE)); \
+} while (0)
+
+#define ASM_OUTPUT_BSS(FILE, DECL, NAME, SIZE, ROUNDED) \
+ asm_output_bss ((FILE), (DECL), (NAME), (SIZE), (ROUNDED))
+
+#define ASM_OUTPUT_LOCAL(STREAM, NAME, SIZE, ROUNDED) \
+do { \
+ fputs ("\t.lcomm ", (STREAM)); \
+ assemble_name ((STREAM), (NAME)); \
+ fprintf ((STREAM), ",%d\n", (int)(SIZE)); \
+} while (0)
+
+#undef TYPE_ASM_OP
+#undef SIZE_ASM_OP
+#undef WEAK_ASM_OP
+#define TYPE_ASM_OP "\t.type\t"
+#define SIZE_ASM_OP "\t.size\t"
+#define WEAK_ASM_OP "\t.weak\t"
+/* Define the strings used for the special svr4 .type and .size directives.
+ These strings generally do not vary from one system running svr4 to
+ another, but if a given system (e.g. m88k running svr) needs to use
+ different pseudo-op names for these, they may be overridden in the
+ file which includes this one. */
+
+
+#undef TYPE_OPERAND_FMT
+#define TYPE_OPERAND_FMT "@%s"
+/* The following macro defines the format used to output the second
+ operand of the .type assembler directive. Different svr4 assemblers
+ expect various different forms for this operand. The one given here
+ is just a default. You may need to override it in your machine-
+ specific tm.h file (depending upon the particulars of your assembler). */
+
+#define ASM_DECLARE_FUNCTION_NAME(FILE, NAME, DECL) \
+do { \
+ ASM_OUTPUT_TYPE_DIRECTIVE (FILE, NAME, "function"); \
+ ASM_OUTPUT_LABEL (FILE, NAME); \
+} while (0)
+
+#define ASM_DECLARE_FUNCTION_SIZE(FILE, FNAME, DECL) \
+ do { \
+ if (!flag_inhibit_size_directive) \
+ ASM_OUTPUT_MEASURED_SIZE (FILE, FNAME); \
+ } while (0)
+
+#define ASM_DECLARE_OBJECT_NAME(FILE, NAME, DECL) \
+do { \
+ ASM_OUTPUT_TYPE_DIRECTIVE (FILE, NAME, "object"); \
+ size_directive_output = 0; \
+ if (!flag_inhibit_size_directive && DECL_SIZE (DECL)) \
+ { \
+ size_directive_output = 1; \
+ ASM_OUTPUT_SIZE_DIRECTIVE (FILE, NAME, \
+ int_size_in_bytes (TREE_TYPE (DECL))); \
+ } \
+ ASM_OUTPUT_LABEL(FILE, NAME); \
+} while (0)
+
+#undef ASM_FINISH_DECLARE_OBJECT
+#define ASM_FINISH_DECLARE_OBJECT(FILE, DECL, TOP_LEVEL, AT_END) \
+do { \
+ const char *name = XSTR (XEXP (DECL_RTL (DECL), 0), 0); \
+ HOST_WIDE_INT size; \
+ if (!flag_inhibit_size_directive && DECL_SIZE (DECL) \
+ && ! AT_END && TOP_LEVEL \
+ && DECL_INITIAL (DECL) == error_mark_node \
+ && !size_directive_output) \
+ { \
+ size_directive_output = 1; \
+ size = int_size_in_bytes (TREE_TYPE (DECL)); \
+ ASM_OUTPUT_SIZE_DIRECTIVE (FILE, name, size); \
+ } \
+ } while (0)
+
+
+#define ESCAPES \
+"\1\1\1\1\1\1\1\1btn\1fr\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
+\0\0\"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\
+\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\\\0\0\0\
+\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\1\
+\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
+\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
+\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\
+\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1"
+/* A table of bytes codes used by the ASM_OUTPUT_ASCII and
+ ASM_OUTPUT_LIMITED_STRING macros. Each byte in the table
+ corresponds to a particular byte value [0..255]. For any
+ given byte value, if the value in the corresponding table
+ position is zero, the given character can be output directly.
+ If the table value is 1, the byte must be output as a \ooo
+ octal escape. If the tables value is anything else, then the
+ byte value should be output as a \ followed by the value
+ in the table. Note that we can use standard UN*X escape
+ sequences for many control characters, but we don't use
+ \a to represent BEL because some svr4 assemblers (e.g. on
+ the i386) don't know about that. Also, we don't use \v
+ since some versions of gas, such as 2.2 did not accept it. */
+
+#define STRING_LIMIT ((unsigned) 64)
+#define STRING_ASM_OP "\t.string\t"
+/* Some svr4 assemblers have a limit on the number of characters which
+ can appear in the operand of a .string directive. If your assembler
+ has such a limitation, you should define STRING_LIMIT to reflect that
+ limit. Note that at least some svr4 assemblers have a limit on the
+ actual number of bytes in the double-quoted string, and that they
+ count each character in an escape sequence as one byte. Thus, an
+ escape sequence like \377 would count as four bytes.
+
+ If your target assembler doesn't support the .string directive, you
+ should define this to zero. */
+
+/* Globalizing directive for a label. */
+#define GLOBAL_ASM_OP ".global\t"
+
+#define SET_ASM_OP "\t.set\t"
+
+#define ASM_WEAKEN_LABEL(FILE, NAME) \
+ do \
+ { \
+ fputs ("\t.weak\t", (FILE)); \
+ assemble_name ((FILE), (NAME)); \
+ fputc ('\n', (FILE)); \
+ } \
+ while (0)
+
+#define SUPPORTS_WEAK 1
+
+#define ASM_GENERATE_INTERNAL_LABEL(STRING, PREFIX, NUM) \
+sprintf (STRING, "*.%s%lu", PREFIX, (unsigned long)(NUM))
+
+#define HAS_INIT_SECTION 1
+
+#define REGISTER_NAMES { \
+ "r0","r1","r2","r3","r4","r5","r6","r7", \
+ "r8","r9","r10","r11","r12","r13","r14","r15", \
+ "r16","r17","r18","r19","r20","r21","r22","r23", \
+ "r24","r25","r26","r27","r28","r29","r30","r31", \
+ "__SPL__","__SPH__","argL","argH"}
+
+#define FINAL_PRESCAN_INSN(insn, operand, nop) final_prescan_insn (insn, operand,nop)
+
+#define PRINT_OPERAND(STREAM, X, CODE) print_operand (STREAM, X, CODE)
+
+#define PRINT_OPERAND_PUNCT_VALID_P(CODE) ((CODE) == '~')
+
+#define PRINT_OPERAND_ADDRESS(STREAM, X) print_operand_address(STREAM, X)
+
+#define USER_LABEL_PREFIX ""
+
+#define ASSEMBLER_DIALECT AVR_HAVE_MOVW
+
+#define ASM_OUTPUT_REG_PUSH(STREAM, REGNO) \
+{ \
+ gcc_assert (REGNO < 32); \
+ fprintf (STREAM, "\tpush\tr%d", REGNO); \
+}
+
+#define ASM_OUTPUT_REG_POP(STREAM, REGNO) \
+{ \
+ gcc_assert (REGNO < 32); \
+ fprintf (STREAM, "\tpop\tr%d", REGNO); \
+}
+
+#define ASM_OUTPUT_ADDR_VEC_ELT(STREAM, VALUE) \
+ avr_output_addr_vec_elt(STREAM, VALUE)
+
+#define ASM_OUTPUT_CASE_LABEL(STREAM, PREFIX, NUM, TABLE) \
+ (switch_to_section (progmem_section), \
+ (*targetm.asm_out.internal_label) (STREAM, PREFIX, NUM))
+
+#define ASM_OUTPUT_SKIP(STREAM, N) \
+fprintf (STREAM, "\t.skip %lu,0\n", (unsigned long)(N))
+
+#define ASM_OUTPUT_ALIGN(STREAM, POWER) \
+ do { \
+ if ((POWER) > 1) \
+ fprintf (STREAM, "\t.p2align\t%d\n", POWER); \
+ } while (0)
+
+#define CASE_VECTOR_MODE HImode
+
+extern int avr_case_values_threshold;
+
+#define CASE_VALUES_THRESHOLD avr_case_values_threshold
+
+#undef WORD_REGISTER_OPERATIONS
+
+#define MOVE_MAX 4
+
+#define TRULY_NOOP_TRUNCATION(OUTPREC, INPREC) 1
+
+#define Pmode HImode
+
+#define FUNCTION_MODE HImode
+
+#define DOLLARS_IN_IDENTIFIERS 0
+
+#define NO_DOLLAR_IN_LABEL 1
+
+#define TRAMPOLINE_TEMPLATE(FILE) \
+ internal_error ("trampolines not supported")
+
+#define TRAMPOLINE_SIZE 4
+
+#define INITIALIZE_TRAMPOLINE(TRAMP, FNADDR, CXT) \
+{ \
+ emit_move_insn (gen_rtx_MEM (HImode, plus_constant ((TRAMP), 2)), CXT); \
+ emit_move_insn (gen_rtx_MEM (HImode, plus_constant ((TRAMP), 6)), FNADDR); \
+}
+/* Store in cc_status the expressions
+ that the condition codes will describe
+ after execution of an instruction whose pattern is EXP.
+ Do not alter them if the instruction would not alter the cc's. */
+
+#define NOTICE_UPDATE_CC(EXP, INSN) notice_update_cc(EXP, INSN)
+
+/* The add insns don't set overflow in a usable way. */
+#define CC_OVERFLOW_UNUSABLE 01000
+/* The mov,and,or,xor insns don't set carry. That's ok though as the
+ Z bit is all we need when doing unsigned comparisons on the result of
+ these insns (since they're always with 0). However, conditions.h has
+ CC_NO_OVERFLOW defined for this purpose. Rename it to something more
+ understandable. */
+#define CC_NO_CARRY CC_NO_OVERFLOW
+
+
+/* Output assembler code to FILE to increment profiler label # LABELNO
+ for profiling a function entry. */
+
+#define FUNCTION_PROFILER(FILE, LABELNO) \
+ fprintf (FILE, "/* profiler %d */", (LABELNO))
+
+#define ADJUST_INSN_LENGTH(INSN, LENGTH) (LENGTH =\
+ adjust_insn_length (INSN, LENGTH))
+
+#define CPP_SPEC "%{posix:-D_POSIX_SOURCE}"
+
+#define CC1_SPEC "%{profile:-p}"
+
+#define CC1PLUS_SPEC "%{!frtti:-fno-rtti} \
+ %{!fenforce-eh-specs:-fno-enforce-eh-specs} \
+ %{!fexceptions:-fno-exceptions}"
+/* A C string constant that tells the GCC drvier program options to
+ pass to `cc1plus'. */
+
+#define ASM_SPEC "%{mmcu=avr25:-mmcu=avr2;\
+mmcu=*:-mmcu=%*}"
+
+#define LINK_SPEC " %{!mmcu*:-m avr2}\
+%{mmcu=at90s1200|\
+ mmcu=attiny11|\
+ mmcu=attiny12|\
+ mmcu=attiny15|\
+ mmcu=attiny28:-m avr1}\
+%{mmcu=attiny22|\
+ mmcu=attiny26|\
+ mmcu=at90s2*|\
+ mmcu=at90s4*|\
+ mmcu=at90s8*|\
+ mmcu=at90c8*|\
+ mmcu=at86rf401|\
+ mmcu=attiny13|\
+ mmcu=attiny2313|\
+ mmcu=attiny24|\
+ mmcu=attiny25|\
+ mmcu=attiny261|\
+ mmcu=attiny4*|\
+ mmcu=attiny8*:-m avr2}\
+%{mmcu=atmega103|\
+ mmcu=atmega603|\
+ mmcu=at43*|\
+ mmcu=at76*:-m avr3}\
+%{mmcu=atmega8*|\
+ mmcu=atmega48|\
+ mmcu=at90pwm*:-m avr4}\
+%{mmcu=atmega16*|\
+ mmcu=atmega32*|\
+ mmcu=atmega406|\
+ mmcu=atmega64*|\
+ mmcu=atmega128*|\
+ mmcu=at90can*|\
+ mmcu=at90usb*|\
+ mmcu=at94k:-m avr5}\
+%{mmcu=atmega324*|\
+ mmcu=atmega325*|\
+ mmcu=atmega329*|\
+ mmcu=atmega406|\
+ mmcu=atmega48|\
+ mmcu=atmega88|\
+ mmcu=atmega64|\
+ mmcu=atmega644*|\
+ mmcu=atmega645*|\
+ mmcu=atmega649*|\
+ mmcu=atmega128|\
+ mmcu=atmega162|\
+ mmcu=atmega164*|\
+ mmcu=atmega165*|\
+ mmcu=atmega168|\
+ mmcu=atmega169*|\
+ mmcu=atmega8hva|\
+ mmcu=atmega16hva|\
+ mmcu=at90can*|\
+ mmcu=at90pwm*|\
+ mmcu=at90usb*: -Tdata 0x800100}\
+%{mmcu=atmega640|\
+ mmcu=atmega1280|\
+ mmcu=atmega1281: -Tdata 0x800200} "
+
+#define LIB_SPEC \
+ "%{!mmcu=at90s1*:%{!mmcu=attiny11:%{!mmcu=attiny12:%{!mmcu=attiny15:%{!mmcu=attiny28: -lc }}}}}"
+
+#define LIBSTDCXX "-lgcc"
+/* No libstdc++ for now. Empty string doesn't work. */
+
+#define LIBGCC_SPEC \
+ "%{!mmcu=at90s1*:%{!mmcu=attiny11:%{!mmcu=attiny12:%{!mmcu=attiny15:%{!mmcu=attiny28: -lgcc }}}}}"
+
+#define STARTFILE_SPEC "%(crt_binutils)"
+
+#define ENDFILE_SPEC ""
+
+#define CRT_BINUTILS_SPECS "\
+%{mmcu=at90s1200|mmcu=avr1:crts1200.o%s} \
+%{mmcu=attiny11:crttn11.o%s} \
+%{mmcu=attiny12:crttn12.o%s} \
+%{mmcu=attiny15:crttn15.o%s} \
+%{mmcu=attiny28:crttn28.o%s} \
+%{!mmcu*|mmcu=at90s8515|mmcu=avr2:crts8515.o%s} \
+%{mmcu=at90s2313:crts2313.o%s} \
+%{mmcu=at90s2323:crts2323.o%s} \
+%{mmcu=at90s2333:crts2333.o%s} \
+%{mmcu=at90s2343:crts2343.o%s} \
+%{mmcu=attiny22:crttn22.o%s} \
+%{mmcu=attiny26:crttn26.o%s} \
+%{mmcu=at90s4433:crts4433.o%s} \
+%{mmcu=at90s4414:crts4414.o%s} \
+%{mmcu=at90s4434:crts4434.o%s} \
+%{mmcu=at90c8534:crtc8534.o%s} \
+%{mmcu=at90s8535:crts8535.o%s} \
+%{mmcu=at86rf401:crt86401.o%s} \
+%{mmcu=attiny13:crttn13.o%s} \
+%{mmcu=attiny2313|mmcu=avr25:crttn2313.o%s} \
+%{mmcu=attiny24:crttn24.o%s} \
+%{mmcu=attiny44:crttn44.o%s} \
+%{mmcu=attiny84:crttn84.o%s} \
+%{mmcu=attiny25:crttn25.o%s} \
+%{mmcu=attiny45:crttn45.o%s} \
+%{mmcu=attiny85:crttn85.o%s} \
+%{mmcu=attiny261:crttn261.o%s} \
+%{mmcu=attiny461:crttn461.o%s} \
+%{mmcu=attiny861:crttn861.o%s} \
+%{mmcu=atmega103|mmcu=avr3:crtm103.o%s} \
+%{mmcu=atmega603:crtm603.o%s} \
+%{mmcu=at43usb320:crt43320.o%s} \
+%{mmcu=at43usb355:crt43355.o%s} \
+%{mmcu=at76c711:crt76711.o%s} \
+%{mmcu=atmega8|mmcu=avr4:crtm8.o%s} \
+%{mmcu=atmega48:crtm48.o%s} \
+%{mmcu=atmega88:crtm88.o%s} \
+%{mmcu=atmega8515:crtm8515.o%s} \
+%{mmcu=atmega8535:crtm8535.o%s} \
+%{mmcu=at90pwm1:crt90pwm1.o%s} \
+%{mmcu=at90pwm2:crt90pwm2.o%s} \
+%{mmcu=at90pwm3:crt90pwm3.o%s} \
+%{mmcu=atmega16:crtm16.o%s} \
+%{mmcu=atmega161|mmcu=avr5:crtm161.o%s} \
+%{mmcu=atmega162:crtm162.o%s} \
+%{mmcu=atmega163:crtm163.o%s} \
+%{mmcu=atmega164p:crtm164p.o%s} \
+%{mmcu=atmega165:crtm165.o%s} \
+%{mmcu=atmega165p:crtm165p.o%s} \
+%{mmcu=atmega168:crtm168.o%s} \
+%{mmcu=atmega169:crtm169.o%s} \
+%{mmcu=atmega169p:crtm169p.o%s} \
+%{mmcu=atmega32:crtm32.o%s} \
+%{mmcu=atmega323:crtm323.o%s} \
+%{mmcu=atmega324p:crtm324p.o%s} \
+%{mmcu=atmega325:crtm325.o%s} \
+%{mmcu=atmega325p:crtm325p.o%s} \
+%{mmcu=atmega3250:crtm3250.o%s} \
+%{mmcu=atmega3250p:crtm3250p.o%s} \
+%{mmcu=atmega329:crtm329.o%s} \
+%{mmcu=atmega329p:crtm329p.o%s} \
+%{mmcu=atmega3290:crtm3290.o%s} \
+%{mmcu=atmega3290p:crtm3290p.o%s} \
+%{mmcu=atmega406:crtm406.o%s} \
+%{mmcu=atmega64:crtm64.o%s} \
+%{mmcu=atmega640:crtm640.o%s} \
+%{mmcu=atmega644:crtm644.o%s} \
+%{mmcu=atmega644p:crtm644p.o%s} \
+%{mmcu=atmega645:crtm645.o%s} \
+%{mmcu=atmega6450:crtm6450.o%s} \
+%{mmcu=atmega649:crtm649.o%s} \
+%{mmcu=atmega6490:crtm6490.o%s} \
+%{mmcu=atmega128:crtm128.o%s} \
+%{mmcu=atmega1280:crtm1280.o%s} \
+%{mmcu=atmega1281:crtm1281.o%s} \
+%{mmcu=atmega8hva:crtm8hva.o%s} \
+%{mmcu=atmega16hva:crtm16hva.o%s} \
+%{mmcu=at90can32:crtcan32.o%s} \
+%{mmcu=at90can64:crtcan64.o%s} \
+%{mmcu=at90can128:crtcan128.o%s} \
+%{mmcu=at90usb82:crtusb82.o%s} \
+%{mmcu=at90usb162:crtusb162.o%s} \
+%{mmcu=at90usb646:crtusb646.o%s} \
+%{mmcu=at90usb647:crtusb647.o%s} \
+%{mmcu=at90usb1286:crtusb1286.o%s} \
+%{mmcu=at90usb1287:crtusb1287.o%s} \
+%{mmcu=at94k:crtat94k.o%s}"
+
+#define EXTRA_SPECS {"crt_binutils", CRT_BINUTILS_SPECS},
+
+/* This is the default without any -mmcu=* option (AT90S*). */
+#define MULTILIB_DEFAULTS { "mmcu=avr2" }
+
+/* This is undefined macro for collect2 disabling */
+#define LINKER_NAME "ld"
+
+#define TEST_HARD_REG_CLASS(CLASS, REGNO) \
+ TEST_HARD_REG_BIT (reg_class_contents[ (int) (CLASS)], REGNO)
+
+/* Note that the other files fail to use these
+ in some of the places where they should. */
+
+#if defined(__STDC__) || defined(ALMOST_STDC)
+#define AS2(a,b,c) #a " " #b "," #c
+#define AS2C(b,c) " " #b "," #c
+#define AS3(a,b,c,d) #a " " #b "," #c "," #d
+#define AS1(a,b) #a " " #b
+#else
+#define AS1(a,b) "a b"
+#define AS2(a,b,c) "a b,c"
+#define AS2C(b,c) " b,c"
+#define AS3(a,b,c,d) "a b,c,d"
+#endif
+#define OUT_AS1(a,b) output_asm_insn (AS1(a,b), operands)
+#define OUT_AS2(a,b,c) output_asm_insn (AS2(a,b,c), operands)
+#define CR_TAB "\n\t"
+
+#define PREFERRED_DEBUGGING_TYPE DBX_DEBUG
+
+#define DWARF2_DEBUGGING_INFO 1
+
+#define DWARF2_ADDR_SIZE 4
+
+#define OBJECT_FORMAT_ELF