OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [ifcvt.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* If-conversion support.
2
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005
3
   Free Software Foundation, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 2, or (at your option)
10
   any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING.  If not, write to the Free
19
   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
   02110-1301, USA.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "rtl.h"
28
#include "regs.h"
29
#include "function.h"
30
#include "flags.h"
31
#include "insn-config.h"
32
#include "recog.h"
33
#include "except.h"
34
#include "hard-reg-set.h"
35
#include "basic-block.h"
36
#include "expr.h"
37
#include "real.h"
38
#include "output.h"
39
#include "optabs.h"
40
#include "toplev.h"
41
#include "tm_p.h"
42
#include "cfgloop.h"
43
#include "target.h"
44
#include "timevar.h"
45
#include "tree-pass.h"
46
 
47
 
48
#ifndef HAVE_conditional_execution
49
#define HAVE_conditional_execution 0
50
#endif
51
#ifndef HAVE_conditional_move
52
#define HAVE_conditional_move 0
53
#endif
54
#ifndef HAVE_incscc
55
#define HAVE_incscc 0
56
#endif
57
#ifndef HAVE_decscc
58
#define HAVE_decscc 0
59
#endif
60
#ifndef HAVE_trap
61
#define HAVE_trap 0
62
#endif
63
#ifndef HAVE_conditional_trap
64
#define HAVE_conditional_trap 0
65
#endif
66
 
67
#ifndef MAX_CONDITIONAL_EXECUTE
68
#define MAX_CONDITIONAL_EXECUTE   (BRANCH_COST + 1)
69
#endif
70
 
71
#define NULL_BLOCK      ((basic_block) NULL)
72
 
73
/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74
static int num_possible_if_blocks;
75
 
76
/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77
   execution.  */
78
static int num_updated_if_blocks;
79
 
80
/* # of changes made which require life information to be updated.  */
81
static int num_true_changes;
82
 
83
/* Whether conditional execution changes were made.  */
84
static int cond_exec_changed_p;
85
 
86
/* True if life data ok at present.  */
87
static bool life_data_ok;
88
 
89
/* Forward references.  */
90
static int count_bb_insns (basic_block);
91
static bool cheap_bb_rtx_cost_p (basic_block, int);
92
static rtx first_active_insn (basic_block);
93
static rtx last_active_insn (basic_block, int);
94
static basic_block block_fallthru (basic_block);
95
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
96
static rtx cond_exec_get_condition (rtx);
97
static int cond_exec_process_if_block (ce_if_block_t *, int);
98
static rtx noce_get_condition (rtx, rtx *);
99
static int noce_operand_ok (rtx);
100
static int noce_process_if_block (ce_if_block_t *);
101
static int process_if_block (ce_if_block_t *);
102
static void merge_if_block (ce_if_block_t *);
103
static int find_cond_trap (basic_block, edge, edge);
104
static basic_block find_if_header (basic_block, int);
105
static int block_jumps_and_fallthru_p (basic_block, basic_block);
106
static int find_if_block (ce_if_block_t *);
107
static int find_if_case_1 (basic_block, edge, edge);
108
static int find_if_case_2 (basic_block, edge, edge);
109
static int find_memory (rtx *, void *);
110
static int dead_or_predicable (basic_block, basic_block, basic_block,
111
                               basic_block, int);
112
static void noce_emit_move_insn (rtx, rtx);
113
static rtx block_has_only_trap (basic_block);
114
 
115
/* Count the number of non-jump active insns in BB.  */
116
 
117
static int
118
count_bb_insns (basic_block bb)
119
{
120
  int count = 0;
121
  rtx insn = BB_HEAD (bb);
122
 
123
  while (1)
124
    {
125
      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
126
        count++;
127
 
128
      if (insn == BB_END (bb))
129
        break;
130
      insn = NEXT_INSN (insn);
131
    }
132
 
133
  return count;
134
}
135
 
136
/* Determine whether the total insn_rtx_cost on non-jump insns in
137
   basic block BB is less than MAX_COST.  This function returns
138
   false if the cost of any instruction could not be estimated.  */
139
 
140
static bool
141
cheap_bb_rtx_cost_p (basic_block bb, int max_cost)
142
{
143
  int count = 0;
144
  rtx insn = BB_HEAD (bb);
145
 
146
  while (1)
147
    {
148
      if (NONJUMP_INSN_P (insn))
149
        {
150
          int cost = insn_rtx_cost (PATTERN (insn));
151
          if (cost == 0)
152
            return false;
153
 
154
          /* If this instruction is the load or set of a "stack" register,
155
             such as a floating point register on x87, then the cost of
156
             speculatively executing this instruction needs to include
157
             the additional cost of popping this register off of the
158
             register stack.  */
159
#ifdef STACK_REGS
160
          {
161
            rtx set = single_set (insn);
162
            if (set && STACK_REG_P (SET_DEST (set)))
163
              cost += COSTS_N_INSNS (1);
164
          }
165
#endif
166
 
167
          count += cost;
168
          if (count >= max_cost)
169
            return false;
170
        }
171
      else if (CALL_P (insn))
172
        return false;
173
 
174
      if (insn == BB_END (bb))
175
        break;
176
      insn = NEXT_INSN (insn);
177
    }
178
 
179
  return true;
180
}
181
 
182
/* Return the first non-jump active insn in the basic block.  */
183
 
184
static rtx
185
first_active_insn (basic_block bb)
186
{
187
  rtx insn = BB_HEAD (bb);
188
 
189
  if (LABEL_P (insn))
190
    {
191
      if (insn == BB_END (bb))
192
        return NULL_RTX;
193
      insn = NEXT_INSN (insn);
194
    }
195
 
196
  while (NOTE_P (insn))
197
    {
198
      if (insn == BB_END (bb))
199
        return NULL_RTX;
200
      insn = NEXT_INSN (insn);
201
    }
202
 
203
  if (JUMP_P (insn))
204
    return NULL_RTX;
205
 
206
  return insn;
207
}
208
 
209
/* Return the last non-jump active (non-jump) insn in the basic block.  */
210
 
211
static rtx
212
last_active_insn (basic_block bb, int skip_use_p)
213
{
214
  rtx insn = BB_END (bb);
215
  rtx head = BB_HEAD (bb);
216
 
217
  while (NOTE_P (insn)
218
         || JUMP_P (insn)
219
         || (skip_use_p
220
             && NONJUMP_INSN_P (insn)
221
             && GET_CODE (PATTERN (insn)) == USE))
222
    {
223
      if (insn == head)
224
        return NULL_RTX;
225
      insn = PREV_INSN (insn);
226
    }
227
 
228
  if (LABEL_P (insn))
229
    return NULL_RTX;
230
 
231
  return insn;
232
}
233
 
234
/* Return the basic block reached by falling though the basic block BB.  */
235
 
236
static basic_block
237
block_fallthru (basic_block bb)
238
{
239
  edge e;
240
  edge_iterator ei;
241
 
242
  FOR_EACH_EDGE (e, ei, bb->succs)
243
    if (e->flags & EDGE_FALLTHRU)
244
      break;
245
 
246
  return (e) ? e->dest : NULL_BLOCK;
247
}
248
 
249
/* Go through a bunch of insns, converting them to conditional
250
   execution format if possible.  Return TRUE if all of the non-note
251
   insns were processed.  */
252
 
253
static int
254
cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
255
                         /* if block information */rtx start,
256
                         /* first insn to look at */rtx end,
257
                         /* last insn to look at */rtx test,
258
                         /* conditional execution test */rtx prob_val,
259
                         /* probability of branch taken. */int mod_ok)
260
{
261
  int must_be_last = FALSE;
262
  rtx insn;
263
  rtx xtest;
264
  rtx pattern;
265
 
266
  if (!start || !end)
267
    return FALSE;
268
 
269
  for (insn = start; ; insn = NEXT_INSN (insn))
270
    {
271
      if (NOTE_P (insn))
272
        goto insn_done;
273
 
274
      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
275
 
276
      /* Remove USE insns that get in the way.  */
277
      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
278
        {
279
          /* ??? Ug.  Actually unlinking the thing is problematic,
280
             given what we'd have to coordinate with our callers.  */
281
          SET_INSN_DELETED (insn);
282
          goto insn_done;
283
        }
284
 
285
      /* Last insn wasn't last?  */
286
      if (must_be_last)
287
        return FALSE;
288
 
289
      if (modified_in_p (test, insn))
290
        {
291
          if (!mod_ok)
292
            return FALSE;
293
          must_be_last = TRUE;
294
        }
295
 
296
      /* Now build the conditional form of the instruction.  */
297
      pattern = PATTERN (insn);
298
      xtest = copy_rtx (test);
299
 
300
      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
301
         two conditions.  */
302
      if (GET_CODE (pattern) == COND_EXEC)
303
        {
304
          if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
305
            return FALSE;
306
 
307
          xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
308
                               COND_EXEC_TEST (pattern));
309
          pattern = COND_EXEC_CODE (pattern);
310
        }
311
 
312
      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
313
 
314
      /* If the machine needs to modify the insn being conditionally executed,
315
         say for example to force a constant integer operand into a temp
316
         register, do so here.  */
317
#ifdef IFCVT_MODIFY_INSN
318
      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
319
      if (! pattern)
320
        return FALSE;
321
#endif
322
 
323
      validate_change (insn, &PATTERN (insn), pattern, 1);
324
 
325
      if (CALL_P (insn) && prob_val)
326
        validate_change (insn, &REG_NOTES (insn),
327
                         alloc_EXPR_LIST (REG_BR_PROB, prob_val,
328
                                          REG_NOTES (insn)), 1);
329
 
330
    insn_done:
331
      if (insn == end)
332
        break;
333
    }
334
 
335
  return TRUE;
336
}
337
 
338
/* Return the condition for a jump.  Do not do any special processing.  */
339
 
340
static rtx
341
cond_exec_get_condition (rtx jump)
342
{
343
  rtx test_if, cond;
344
 
345
  if (any_condjump_p (jump))
346
    test_if = SET_SRC (pc_set (jump));
347
  else
348
    return NULL_RTX;
349
  cond = XEXP (test_if, 0);
350
 
351
  /* If this branches to JUMP_LABEL when the condition is false,
352
     reverse the condition.  */
353
  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
354
      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
355
    {
356
      enum rtx_code rev = reversed_comparison_code (cond, jump);
357
      if (rev == UNKNOWN)
358
        return NULL_RTX;
359
 
360
      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
361
                             XEXP (cond, 1));
362
    }
363
 
364
  return cond;
365
}
366
 
367
/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
368
   to conditional execution.  Return TRUE if we were successful at
369
   converting the block.  */
370
 
371
static int
372
cond_exec_process_if_block (ce_if_block_t * ce_info,
373
                            /* if block information */int do_multiple_p)
374
{
375
  basic_block test_bb = ce_info->test_bb;       /* last test block */
376
  basic_block then_bb = ce_info->then_bb;       /* THEN */
377
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
378
  rtx test_expr;                /* expression in IF_THEN_ELSE that is tested */
379
  rtx then_start;               /* first insn in THEN block */
380
  rtx then_end;                 /* last insn + 1 in THEN block */
381
  rtx else_start = NULL_RTX;    /* first insn in ELSE block or NULL */
382
  rtx else_end = NULL_RTX;      /* last insn + 1 in ELSE block */
383
  int max;                      /* max # of insns to convert.  */
384
  int then_mod_ok;              /* whether conditional mods are ok in THEN */
385
  rtx true_expr;                /* test for else block insns */
386
  rtx false_expr;               /* test for then block insns */
387
  rtx true_prob_val;            /* probability of else block */
388
  rtx false_prob_val;           /* probability of then block */
389
  int n_insns;
390
  enum rtx_code false_code;
391
 
392
  /* If test is comprised of && or || elements, and we've failed at handling
393
     all of them together, just use the last test if it is the special case of
394
     && elements without an ELSE block.  */
395
  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
396
    {
397
      if (else_bb || ! ce_info->and_and_p)
398
        return FALSE;
399
 
400
      ce_info->test_bb = test_bb = ce_info->last_test_bb;
401
      ce_info->num_multiple_test_blocks = 0;
402
      ce_info->num_and_and_blocks = 0;
403
      ce_info->num_or_or_blocks = 0;
404
    }
405
 
406
  /* Find the conditional jump to the ELSE or JOIN part, and isolate
407
     the test.  */
408
  test_expr = cond_exec_get_condition (BB_END (test_bb));
409
  if (! test_expr)
410
    return FALSE;
411
 
412
  /* If the conditional jump is more than just a conditional jump,
413
     then we can not do conditional execution conversion on this block.  */
414
  if (! onlyjump_p (BB_END (test_bb)))
415
    return FALSE;
416
 
417
  /* Collect the bounds of where we're to search, skipping any labels, jumps
418
     and notes at the beginning and end of the block.  Then count the total
419
     number of insns and see if it is small enough to convert.  */
420
  then_start = first_active_insn (then_bb);
421
  then_end = last_active_insn (then_bb, TRUE);
422
  n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
423
  max = MAX_CONDITIONAL_EXECUTE;
424
 
425
  if (else_bb)
426
    {
427
      max *= 2;
428
      else_start = first_active_insn (else_bb);
429
      else_end = last_active_insn (else_bb, TRUE);
430
      n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
431
    }
432
 
433
  if (n_insns > max)
434
    return FALSE;
435
 
436
  /* Map test_expr/test_jump into the appropriate MD tests to use on
437
     the conditionally executed code.  */
438
 
439
  true_expr = test_expr;
440
 
441
  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
442
  if (false_code != UNKNOWN)
443
    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
444
                                 XEXP (true_expr, 0), XEXP (true_expr, 1));
445
  else
446
    false_expr = NULL_RTX;
447
 
448
#ifdef IFCVT_MODIFY_TESTS
449
  /* If the machine description needs to modify the tests, such as setting a
450
     conditional execution register from a comparison, it can do so here.  */
451
  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
452
 
453
  /* See if the conversion failed.  */
454
  if (!true_expr || !false_expr)
455
    goto fail;
456
#endif
457
 
458
  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
459
  if (true_prob_val)
460
    {
461
      true_prob_val = XEXP (true_prob_val, 0);
462
      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
463
    }
464
  else
465
    false_prob_val = NULL_RTX;
466
 
467
  /* If we have && or || tests, do them here.  These tests are in the adjacent
468
     blocks after the first block containing the test.  */
469
  if (ce_info->num_multiple_test_blocks > 0)
470
    {
471
      basic_block bb = test_bb;
472
      basic_block last_test_bb = ce_info->last_test_bb;
473
 
474
      if (! false_expr)
475
        goto fail;
476
 
477
      do
478
        {
479
          rtx start, end;
480
          rtx t, f;
481
          enum rtx_code f_code;
482
 
483
          bb = block_fallthru (bb);
484
          start = first_active_insn (bb);
485
          end = last_active_insn (bb, TRUE);
486
          if (start
487
              && ! cond_exec_process_insns (ce_info, start, end, false_expr,
488
                                            false_prob_val, FALSE))
489
            goto fail;
490
 
491
          /* If the conditional jump is more than just a conditional jump, then
492
             we can not do conditional execution conversion on this block.  */
493
          if (! onlyjump_p (BB_END (bb)))
494
            goto fail;
495
 
496
          /* Find the conditional jump and isolate the test.  */
497
          t = cond_exec_get_condition (BB_END (bb));
498
          if (! t)
499
            goto fail;
500
 
501
          f_code = reversed_comparison_code (t, BB_END (bb));
502
          if (f_code == UNKNOWN)
503
            goto fail;
504
 
505
          f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
506
          if (ce_info->and_and_p)
507
            {
508
              t = gen_rtx_AND (GET_MODE (t), true_expr, t);
509
              f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
510
            }
511
          else
512
            {
513
              t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
514
              f = gen_rtx_AND (GET_MODE (t), false_expr, f);
515
            }
516
 
517
          /* If the machine description needs to modify the tests, such as
518
             setting a conditional execution register from a comparison, it can
519
             do so here.  */
520
#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
521
          IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
522
 
523
          /* See if the conversion failed.  */
524
          if (!t || !f)
525
            goto fail;
526
#endif
527
 
528
          true_expr = t;
529
          false_expr = f;
530
        }
531
      while (bb != last_test_bb);
532
    }
533
 
534
  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
535
     on then THEN block.  */
536
  then_mod_ok = (else_bb == NULL_BLOCK);
537
 
538
  /* Go through the THEN and ELSE blocks converting the insns if possible
539
     to conditional execution.  */
540
 
541
  if (then_end
542
      && (! false_expr
543
          || ! cond_exec_process_insns (ce_info, then_start, then_end,
544
                                        false_expr, false_prob_val,
545
                                        then_mod_ok)))
546
    goto fail;
547
 
548
  if (else_bb && else_end
549
      && ! cond_exec_process_insns (ce_info, else_start, else_end,
550
                                    true_expr, true_prob_val, TRUE))
551
    goto fail;
552
 
553
  /* If we cannot apply the changes, fail.  Do not go through the normal fail
554
     processing, since apply_change_group will call cancel_changes.  */
555
  if (! apply_change_group ())
556
    {
557
#ifdef IFCVT_MODIFY_CANCEL
558
      /* Cancel any machine dependent changes.  */
559
      IFCVT_MODIFY_CANCEL (ce_info);
560
#endif
561
      return FALSE;
562
    }
563
 
564
#ifdef IFCVT_MODIFY_FINAL
565
  /* Do any machine dependent final modifications.  */
566
  IFCVT_MODIFY_FINAL (ce_info);
567
#endif
568
 
569
  /* Conversion succeeded.  */
570
  if (dump_file)
571
    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
572
             n_insns, (n_insns == 1) ? " was" : "s were");
573
 
574
  /* Merge the blocks!  */
575
  merge_if_block (ce_info);
576
  cond_exec_changed_p = TRUE;
577
  return TRUE;
578
 
579
 fail:
580
#ifdef IFCVT_MODIFY_CANCEL
581
  /* Cancel any machine dependent changes.  */
582
  IFCVT_MODIFY_CANCEL (ce_info);
583
#endif
584
 
585
  cancel_changes (0);
586
  return FALSE;
587
}
588
 
589
/* Used by noce_process_if_block to communicate with its subroutines.
590
 
591
   The subroutines know that A and B may be evaluated freely.  They
592
   know that X is a register.  They should insert new instructions
593
   before cond_earliest.  */
594
 
595
struct noce_if_info
596
{
597
  basic_block test_bb;
598
  rtx insn_a, insn_b;
599
  rtx x, a, b;
600
  rtx jump, cond, cond_earliest;
601
  /* True if "b" was originally evaluated unconditionally.  */
602
  bool b_unconditional;
603
};
604
 
605
static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
606
static int noce_try_move (struct noce_if_info *);
607
static int noce_try_store_flag (struct noce_if_info *);
608
static int noce_try_addcc (struct noce_if_info *);
609
static int noce_try_store_flag_constants (struct noce_if_info *);
610
static int noce_try_store_flag_mask (struct noce_if_info *);
611
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
612
                            rtx, rtx, rtx);
613
static int noce_try_cmove (struct noce_if_info *);
614
static int noce_try_cmove_arith (struct noce_if_info *);
615
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
616
static int noce_try_minmax (struct noce_if_info *);
617
static int noce_try_abs (struct noce_if_info *);
618
static int noce_try_sign_mask (struct noce_if_info *);
619
 
620
/* Helper function for noce_try_store_flag*.  */
621
 
622
static rtx
623
noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
624
                      int normalize)
625
{
626
  rtx cond = if_info->cond;
627
  int cond_complex;
628
  enum rtx_code code;
629
 
630
  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
631
                  || ! general_operand (XEXP (cond, 1), VOIDmode));
632
 
633
  /* If earliest == jump, or when the condition is complex, try to
634
     build the store_flag insn directly.  */
635
 
636
  if (cond_complex)
637
    cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
638
 
639
  if (reversep)
640
    code = reversed_comparison_code (cond, if_info->jump);
641
  else
642
    code = GET_CODE (cond);
643
 
644
  if ((if_info->cond_earliest == if_info->jump || cond_complex)
645
      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
646
    {
647
      rtx tmp;
648
 
649
      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
650
                            XEXP (cond, 1));
651
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
652
 
653
      start_sequence ();
654
      tmp = emit_insn (tmp);
655
 
656
      if (recog_memoized (tmp) >= 0)
657
        {
658
          tmp = get_insns ();
659
          end_sequence ();
660
          emit_insn (tmp);
661
 
662
          if_info->cond_earliest = if_info->jump;
663
 
664
          return x;
665
        }
666
 
667
      end_sequence ();
668
    }
669
 
670
  /* Don't even try if the comparison operands or the mode of X are weird.  */
671
  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
672
    return NULL_RTX;
673
 
674
  return emit_store_flag (x, code, XEXP (cond, 0),
675
                          XEXP (cond, 1), VOIDmode,
676
                          (code == LTU || code == LEU
677
                           || code == GEU || code == GTU), normalize);
678
}
679
 
680
/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
681
   X is the destination/target and Y is the value to copy.  */
682
 
683
static void
684
noce_emit_move_insn (rtx x, rtx y)
685
{
686
  enum machine_mode outmode;
687
  rtx outer, inner;
688
  int bitpos;
689
 
690
  if (GET_CODE (x) != STRICT_LOW_PART)
691
    {
692
      rtx seq, insn, target;
693
      optab ot;
694
 
695
      start_sequence ();
696
      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
697
         otherwise construct a suitable SET pattern ourselves.  */
698
      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
699
             ? emit_move_insn (x, y)
700
             : emit_insn (gen_rtx_SET (VOIDmode, x, y));
701
      seq = get_insns ();
702
      end_sequence();
703
 
704
      if (recog_memoized (insn) <= 0)
705
        {
706
          if (GET_CODE (x) == ZERO_EXTRACT)
707
            {
708
              rtx op = XEXP (x, 0);
709
              unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
710
              unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
711
 
712
              /* store_bit_field expects START to be relative to
713
                 BYTES_BIG_ENDIAN and adjusts this value for machines with
714
                 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
715
                 invoke store_bit_field again it is necessary to have the START
716
                 value from the first call.  */
717
              if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
718
                {
719
                  if (MEM_P (op))
720
                    start = BITS_PER_UNIT - start - size;
721
                  else
722
                    {
723
                      gcc_assert (REG_P (op));
724
                      start = BITS_PER_WORD - start - size;
725
                    }
726
                }
727
 
728
              gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
729
              store_bit_field (op, size, start, GET_MODE (x), y);
730
              return;
731
            }
732
 
733
          switch (GET_RTX_CLASS (GET_CODE (y)))
734
            {
735
            case RTX_UNARY:
736
              ot = code_to_optab[GET_CODE (y)];
737
              if (ot)
738
                {
739
                  start_sequence ();
740
                  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
741
                  if (target != NULL_RTX)
742
                    {
743
                      if (target != x)
744
                        emit_move_insn (x, target);
745
                      seq = get_insns ();
746
                    }
747
                  end_sequence ();
748
                }
749
              break;
750
 
751
            case RTX_BIN_ARITH:
752
            case RTX_COMM_ARITH:
753
              ot = code_to_optab[GET_CODE (y)];
754
              if (ot)
755
                {
756
                  start_sequence ();
757
                  target = expand_binop (GET_MODE (y), ot,
758
                                         XEXP (y, 0), XEXP (y, 1),
759
                                         x, 0, OPTAB_DIRECT);
760
                  if (target != NULL_RTX)
761
                    {
762
                      if (target != x)
763
                          emit_move_insn (x, target);
764
                      seq = get_insns ();
765
                    }
766
                  end_sequence ();
767
                }
768
              break;
769
 
770
            default:
771
              break;
772
            }
773
        }
774
 
775
      emit_insn (seq);
776
      return;
777
    }
778
 
779
  outer = XEXP (x, 0);
780
  inner = XEXP (outer, 0);
781
  outmode = GET_MODE (outer);
782
  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
783
  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
784
}
785
 
786
/* Return sequence of instructions generated by if conversion.  This
787
   function calls end_sequence() to end the current stream, ensures
788
   that are instructions are unshared, recognizable non-jump insns.
789
   On failure, this function returns a NULL_RTX.  */
790
 
791
static rtx
792
end_ifcvt_sequence (struct noce_if_info *if_info)
793
{
794
  rtx insn;
795
  rtx seq = get_insns ();
796
 
797
  set_used_flags (if_info->x);
798
  set_used_flags (if_info->cond);
799
  unshare_all_rtl_in_chain (seq);
800
  end_sequence ();
801
 
802
  /* Make sure that all of the instructions emitted are recognizable,
803
     and that we haven't introduced a new jump instruction.
804
     As an exercise for the reader, build a general mechanism that
805
     allows proper placement of required clobbers.  */
806
  for (insn = seq; insn; insn = NEXT_INSN (insn))
807
    if (JUMP_P (insn)
808
        || recog_memoized (insn) == -1)
809
      return NULL_RTX;
810
 
811
  return seq;
812
}
813
 
814
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
815
   "if (a == b) x = a; else x = b" into "x = b".  */
816
 
817
static int
818
noce_try_move (struct noce_if_info *if_info)
819
{
820
  rtx cond = if_info->cond;
821
  enum rtx_code code = GET_CODE (cond);
822
  rtx y, seq;
823
 
824
  if (code != NE && code != EQ)
825
    return FALSE;
826
 
827
  /* This optimization isn't valid if either A or B could be a NaN
828
     or a signed zero.  */
829
  if (HONOR_NANS (GET_MODE (if_info->x))
830
      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
831
    return FALSE;
832
 
833
  /* Check whether the operands of the comparison are A and in
834
     either order.  */
835
  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
836
       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
837
      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
838
          && rtx_equal_p (if_info->b, XEXP (cond, 0))))
839
    {
840
      y = (code == EQ) ? if_info->a : if_info->b;
841
 
842
      /* Avoid generating the move if the source is the destination.  */
843
      if (! rtx_equal_p (if_info->x, y))
844
        {
845
          start_sequence ();
846
          noce_emit_move_insn (if_info->x, y);
847
          seq = end_ifcvt_sequence (if_info);
848
          if (!seq)
849
            return FALSE;
850
 
851
          emit_insn_before_setloc (seq, if_info->jump,
852
                                   INSN_LOCATOR (if_info->insn_a));
853
        }
854
      return TRUE;
855
    }
856
  return FALSE;
857
}
858
 
859
/* Convert "if (test) x = 1; else x = 0".
860
 
861
   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
862
   tried in noce_try_store_flag_constants after noce_try_cmove has had
863
   a go at the conversion.  */
864
 
865
static int
866
noce_try_store_flag (struct noce_if_info *if_info)
867
{
868
  int reversep;
869
  rtx target, seq;
870
 
871
  if (GET_CODE (if_info->b) == CONST_INT
872
      && INTVAL (if_info->b) == STORE_FLAG_VALUE
873
      && if_info->a == const0_rtx)
874
    reversep = 0;
875
  else if (if_info->b == const0_rtx
876
           && GET_CODE (if_info->a) == CONST_INT
877
           && INTVAL (if_info->a) == STORE_FLAG_VALUE
878
           && (reversed_comparison_code (if_info->cond, if_info->jump)
879
               != UNKNOWN))
880
    reversep = 1;
881
  else
882
    return FALSE;
883
 
884
  start_sequence ();
885
 
886
  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
887
  if (target)
888
    {
889
      if (target != if_info->x)
890
        noce_emit_move_insn (if_info->x, target);
891
 
892
      seq = end_ifcvt_sequence (if_info);
893
      if (! seq)
894
        return FALSE;
895
 
896
      emit_insn_before_setloc (seq, if_info->jump,
897
                               INSN_LOCATOR (if_info->insn_a));
898
      return TRUE;
899
    }
900
  else
901
    {
902
      end_sequence ();
903
      return FALSE;
904
    }
905
}
906
 
907
/* Convert "if (test) x = a; else x = b", for A and B constant.  */
908
 
909
static int
910
noce_try_store_flag_constants (struct noce_if_info *if_info)
911
{
912
  rtx target, seq;
913
  int reversep;
914
  HOST_WIDE_INT itrue, ifalse, diff, tmp;
915
  int normalize, can_reverse;
916
  enum machine_mode mode;
917
 
918
  if (! no_new_pseudos
919
      && GET_CODE (if_info->a) == CONST_INT
920
      && GET_CODE (if_info->b) == CONST_INT)
921
    {
922
      mode = GET_MODE (if_info->x);
923
      ifalse = INTVAL (if_info->a);
924
      itrue = INTVAL (if_info->b);
925
 
926
      /* Make sure we can represent the difference between the two values.  */
927
      if ((itrue - ifalse > 0)
928
          != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
929
        return FALSE;
930
 
931
      diff = trunc_int_for_mode (itrue - ifalse, mode);
932
 
933
      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
934
                     != UNKNOWN);
935
 
936
      reversep = 0;
937
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
938
        normalize = 0;
939
      else if (ifalse == 0 && exact_log2 (itrue) >= 0
940
               && (STORE_FLAG_VALUE == 1
941
                   || BRANCH_COST >= 2))
942
        normalize = 1;
943
      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
944
               && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
945
        normalize = 1, reversep = 1;
946
      else if (itrue == -1
947
               && (STORE_FLAG_VALUE == -1
948
                   || BRANCH_COST >= 2))
949
        normalize = -1;
950
      else if (ifalse == -1 && can_reverse
951
               && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
952
        normalize = -1, reversep = 1;
953
      else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
954
               || BRANCH_COST >= 3)
955
        normalize = -1;
956
      else
957
        return FALSE;
958
 
959
      if (reversep)
960
        {
961
          tmp = itrue; itrue = ifalse; ifalse = tmp;
962
          diff = trunc_int_for_mode (-diff, mode);
963
        }
964
 
965
      start_sequence ();
966
      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
967
      if (! target)
968
        {
969
          end_sequence ();
970
          return FALSE;
971
        }
972
 
973
      /* if (test) x = 3; else x = 4;
974
         =>   x = 3 + (test == 0);  */
975
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
976
        {
977
          target = expand_simple_binop (mode,
978
                                        (diff == STORE_FLAG_VALUE
979
                                         ? PLUS : MINUS),
980
                                        GEN_INT (ifalse), target, if_info->x, 0,
981
                                        OPTAB_WIDEN);
982
        }
983
 
984
      /* if (test) x = 8; else x = 0;
985
         =>   x = (test != 0) << 3;  */
986
      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
987
        {
988
          target = expand_simple_binop (mode, ASHIFT,
989
                                        target, GEN_INT (tmp), if_info->x, 0,
990
                                        OPTAB_WIDEN);
991
        }
992
 
993
      /* if (test) x = -1; else x = b;
994
         =>   x = -(test != 0) | b;  */
995
      else if (itrue == -1)
996
        {
997
          target = expand_simple_binop (mode, IOR,
998
                                        target, GEN_INT (ifalse), if_info->x, 0,
999
                                        OPTAB_WIDEN);
1000
        }
1001
 
1002
      /* if (test) x = a; else x = b;
1003
         =>   x = (-(test != 0) & (b - a)) + a;  */
1004
      else
1005
        {
1006
          target = expand_simple_binop (mode, AND,
1007
                                        target, GEN_INT (diff), if_info->x, 0,
1008
                                        OPTAB_WIDEN);
1009
          if (target)
1010
            target = expand_simple_binop (mode, PLUS,
1011
                                          target, GEN_INT (ifalse),
1012
                                          if_info->x, 0, OPTAB_WIDEN);
1013
        }
1014
 
1015
      if (! target)
1016
        {
1017
          end_sequence ();
1018
          return FALSE;
1019
        }
1020
 
1021
      if (target != if_info->x)
1022
        noce_emit_move_insn (if_info->x, target);
1023
 
1024
      seq = end_ifcvt_sequence (if_info);
1025
      if (!seq)
1026
        return FALSE;
1027
 
1028
      emit_insn_before_setloc (seq, if_info->jump,
1029
                               INSN_LOCATOR (if_info->insn_a));
1030
      return TRUE;
1031
    }
1032
 
1033
  return FALSE;
1034
}
1035
 
1036
/* Convert "if (test) foo++" into "foo += (test != 0)", and
1037
   similarly for "foo--".  */
1038
 
1039
static int
1040
noce_try_addcc (struct noce_if_info *if_info)
1041
{
1042
  rtx target, seq;
1043
  int subtract, normalize;
1044
 
1045
  if (! no_new_pseudos
1046
      && GET_CODE (if_info->a) == PLUS
1047
      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1048
      && (reversed_comparison_code (if_info->cond, if_info->jump)
1049
          != UNKNOWN))
1050
    {
1051
      rtx cond = if_info->cond;
1052
      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1053
 
1054
      /* First try to use addcc pattern.  */
1055
      if (general_operand (XEXP (cond, 0), VOIDmode)
1056
          && general_operand (XEXP (cond, 1), VOIDmode))
1057
        {
1058
          start_sequence ();
1059
          target = emit_conditional_add (if_info->x, code,
1060
                                         XEXP (cond, 0),
1061
                                         XEXP (cond, 1),
1062
                                         VOIDmode,
1063
                                         if_info->b,
1064
                                         XEXP (if_info->a, 1),
1065
                                         GET_MODE (if_info->x),
1066
                                         (code == LTU || code == GEU
1067
                                          || code == LEU || code == GTU));
1068
          if (target)
1069
            {
1070
              if (target != if_info->x)
1071
                noce_emit_move_insn (if_info->x, target);
1072
 
1073
              seq = end_ifcvt_sequence (if_info);
1074
              if (!seq)
1075
                return FALSE;
1076
 
1077
              emit_insn_before_setloc (seq, if_info->jump,
1078
                                       INSN_LOCATOR (if_info->insn_a));
1079
              return TRUE;
1080
            }
1081
          end_sequence ();
1082
        }
1083
 
1084
      /* If that fails, construct conditional increment or decrement using
1085
         setcc.  */
1086
      if (BRANCH_COST >= 2
1087
          && (XEXP (if_info->a, 1) == const1_rtx
1088
              || XEXP (if_info->a, 1) == constm1_rtx))
1089
        {
1090
          start_sequence ();
1091
          if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1092
            subtract = 0, normalize = 0;
1093
          else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1094
            subtract = 1, normalize = 0;
1095
          else
1096
            subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1097
 
1098
 
1099
          target = noce_emit_store_flag (if_info,
1100
                                         gen_reg_rtx (GET_MODE (if_info->x)),
1101
                                         1, normalize);
1102
 
1103
          if (target)
1104
            target = expand_simple_binop (GET_MODE (if_info->x),
1105
                                          subtract ? MINUS : PLUS,
1106
                                          if_info->b, target, if_info->x,
1107
                                          0, OPTAB_WIDEN);
1108
          if (target)
1109
            {
1110
              if (target != if_info->x)
1111
                noce_emit_move_insn (if_info->x, target);
1112
 
1113
              seq = end_ifcvt_sequence (if_info);
1114
              if (!seq)
1115
                return FALSE;
1116
 
1117
              emit_insn_before_setloc (seq, if_info->jump,
1118
                                       INSN_LOCATOR (if_info->insn_a));
1119
              return TRUE;
1120
            }
1121
          end_sequence ();
1122
        }
1123
    }
1124
 
1125
  return FALSE;
1126
}
1127
 
1128
/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1129
 
1130
static int
1131
noce_try_store_flag_mask (struct noce_if_info *if_info)
1132
{
1133
  rtx target, seq;
1134
  int reversep;
1135
 
1136
  reversep = 0;
1137
  if (! no_new_pseudos
1138
      && (BRANCH_COST >= 2
1139
          || STORE_FLAG_VALUE == -1)
1140
      && ((if_info->a == const0_rtx
1141
           && rtx_equal_p (if_info->b, if_info->x))
1142
          || ((reversep = (reversed_comparison_code (if_info->cond,
1143
                                                     if_info->jump)
1144
                           != UNKNOWN))
1145
              && if_info->b == const0_rtx
1146
              && rtx_equal_p (if_info->a, if_info->x))))
1147
    {
1148
      start_sequence ();
1149
      target = noce_emit_store_flag (if_info,
1150
                                     gen_reg_rtx (GET_MODE (if_info->x)),
1151
                                     reversep, -1);
1152
      if (target)
1153
        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1154
                                      if_info->x,
1155
                                      target, if_info->x, 0,
1156
                                      OPTAB_WIDEN);
1157
 
1158
      if (target)
1159
        {
1160
          if (target != if_info->x)
1161
            noce_emit_move_insn (if_info->x, target);
1162
 
1163
          seq = end_ifcvt_sequence (if_info);
1164
          if (!seq)
1165
            return FALSE;
1166
 
1167
          emit_insn_before_setloc (seq, if_info->jump,
1168
                                   INSN_LOCATOR (if_info->insn_a));
1169
          return TRUE;
1170
        }
1171
 
1172
      end_sequence ();
1173
    }
1174
 
1175
  return FALSE;
1176
}
1177
 
1178
/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1179
 
1180
static rtx
1181
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1182
                 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1183
{
1184
  /* If earliest == jump, try to build the cmove insn directly.
1185
     This is helpful when combine has created some complex condition
1186
     (like for alpha's cmovlbs) that we can't hope to regenerate
1187
     through the normal interface.  */
1188
 
1189
  if (if_info->cond_earliest == if_info->jump)
1190
    {
1191
      rtx tmp;
1192
 
1193
      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1194
      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1195
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
1196
 
1197
      start_sequence ();
1198
      tmp = emit_insn (tmp);
1199
 
1200
      if (recog_memoized (tmp) >= 0)
1201
        {
1202
          tmp = get_insns ();
1203
          end_sequence ();
1204
          emit_insn (tmp);
1205
 
1206
          return x;
1207
        }
1208
 
1209
      end_sequence ();
1210
    }
1211
 
1212
  /* Don't even try if the comparison operands are weird.  */
1213
  if (! general_operand (cmp_a, GET_MODE (cmp_a))
1214
      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1215
    return NULL_RTX;
1216
 
1217
#if HAVE_conditional_move
1218
  return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1219
                                vtrue, vfalse, GET_MODE (x),
1220
                                (code == LTU || code == GEU
1221
                                 || code == LEU || code == GTU));
1222
#else
1223
  /* We'll never get here, as noce_process_if_block doesn't call the
1224
     functions involved.  Ifdef code, however, should be discouraged
1225
     because it leads to typos in the code not selected.  However,
1226
     emit_conditional_move won't exist either.  */
1227
  return NULL_RTX;
1228
#endif
1229
}
1230
 
1231
/* Try only simple constants and registers here.  More complex cases
1232
   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1233
   has had a go at it.  */
1234
 
1235
static int
1236
noce_try_cmove (struct noce_if_info *if_info)
1237
{
1238
  enum rtx_code code;
1239
  rtx target, seq;
1240
 
1241
  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1242
      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1243
    {
1244
      start_sequence ();
1245
 
1246
      code = GET_CODE (if_info->cond);
1247
      target = noce_emit_cmove (if_info, if_info->x, code,
1248
                                XEXP (if_info->cond, 0),
1249
                                XEXP (if_info->cond, 1),
1250
                                if_info->a, if_info->b);
1251
 
1252
      if (target)
1253
        {
1254
          if (target != if_info->x)
1255
            noce_emit_move_insn (if_info->x, target);
1256
 
1257
          seq = end_ifcvt_sequence (if_info);
1258
          if (!seq)
1259
            return FALSE;
1260
 
1261
          emit_insn_before_setloc (seq, if_info->jump,
1262
                                   INSN_LOCATOR (if_info->insn_a));
1263
          return TRUE;
1264
        }
1265
      else
1266
        {
1267
          end_sequence ();
1268
          return FALSE;
1269
        }
1270
    }
1271
 
1272
  return FALSE;
1273
}
1274
 
1275
/* Try more complex cases involving conditional_move.  */
1276
 
1277
static int
1278
noce_try_cmove_arith (struct noce_if_info *if_info)
1279
{
1280
  rtx a = if_info->a;
1281
  rtx b = if_info->b;
1282
  rtx x = if_info->x;
1283
  rtx orig_a, orig_b;
1284
  rtx insn_a, insn_b;
1285
  rtx tmp, target;
1286
  int is_mem = 0;
1287
  int insn_cost;
1288
  enum rtx_code code;
1289
 
1290
  /* A conditional move from two memory sources is equivalent to a
1291
     conditional on their addresses followed by a load.  Don't do this
1292
     early because it'll screw alias analysis.  Note that we've
1293
     already checked for no side effects.  */
1294
  if (! no_new_pseudos && cse_not_expected
1295
      && MEM_P (a) && MEM_P (b)
1296
      && BRANCH_COST >= 5)
1297
    {
1298
      a = XEXP (a, 0);
1299
      b = XEXP (b, 0);
1300
      x = gen_reg_rtx (Pmode);
1301
      is_mem = 1;
1302
    }
1303
 
1304
  /* ??? We could handle this if we knew that a load from A or B could
1305
     not fault.  This is also true if we've already loaded
1306
     from the address along the path from ENTRY.  */
1307
  else if (may_trap_p (a) || may_trap_p (b))
1308
    return FALSE;
1309
 
1310
  /* if (test) x = a + b; else x = c - d;
1311
     => y = a + b;
1312
        x = c - d;
1313
        if (test)
1314
          x = y;
1315
  */
1316
 
1317
  code = GET_CODE (if_info->cond);
1318
  insn_a = if_info->insn_a;
1319
  insn_b = if_info->insn_b;
1320
 
1321
  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1322
     if insn_rtx_cost can't be estimated.  */
1323
  if (insn_a)
1324
    {
1325
      insn_cost = insn_rtx_cost (PATTERN (insn_a));
1326
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1327
        return FALSE;
1328
    }
1329
  else
1330
    {
1331
      insn_cost = 0;
1332
    }
1333
 
1334
  if (insn_b) {
1335
    insn_cost += insn_rtx_cost (PATTERN (insn_b));
1336
    if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1337
      return FALSE;
1338
  }
1339
 
1340
  /* Possibly rearrange operands to make things come out more natural.  */
1341
  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1342
    {
1343
      int reversep = 0;
1344
      if (rtx_equal_p (b, x))
1345
        reversep = 1;
1346
      else if (general_operand (b, GET_MODE (b)))
1347
        reversep = 1;
1348
 
1349
      if (reversep)
1350
        {
1351
          code = reversed_comparison_code (if_info->cond, if_info->jump);
1352
          tmp = a, a = b, b = tmp;
1353
          tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1354
        }
1355
    }
1356
 
1357
  start_sequence ();
1358
 
1359
  orig_a = a;
1360
  orig_b = b;
1361
 
1362
  /* If either operand is complex, load it into a register first.
1363
     The best way to do this is to copy the original insn.  In this
1364
     way we preserve any clobbers etc that the insn may have had.
1365
     This is of course not possible in the IS_MEM case.  */
1366
  if (! general_operand (a, GET_MODE (a)))
1367
    {
1368
      rtx set;
1369
 
1370
      if (no_new_pseudos)
1371
        goto end_seq_and_fail;
1372
 
1373
      if (is_mem)
1374
        {
1375
          tmp = gen_reg_rtx (GET_MODE (a));
1376
          tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1377
        }
1378
      else if (! insn_a)
1379
        goto end_seq_and_fail;
1380
      else
1381
        {
1382
          a = gen_reg_rtx (GET_MODE (a));
1383
          tmp = copy_rtx (insn_a);
1384
          set = single_set (tmp);
1385
          SET_DEST (set) = a;
1386
          tmp = emit_insn (PATTERN (tmp));
1387
        }
1388
      if (recog_memoized (tmp) < 0)
1389
        goto end_seq_and_fail;
1390
    }
1391
  if (! general_operand (b, GET_MODE (b)))
1392
    {
1393
      rtx set, last;
1394
 
1395
      if (no_new_pseudos)
1396
        goto end_seq_and_fail;
1397
 
1398
      if (is_mem)
1399
        {
1400
          tmp = gen_reg_rtx (GET_MODE (b));
1401
          tmp = gen_rtx_SET (VOIDmode, tmp, b);
1402
        }
1403
      else if (! insn_b)
1404
        goto end_seq_and_fail;
1405
      else
1406
        {
1407
          b = gen_reg_rtx (GET_MODE (b));
1408
          tmp = copy_rtx (insn_b);
1409
          set = single_set (tmp);
1410
          SET_DEST (set) = b;
1411
          tmp = PATTERN (tmp);
1412
        }
1413
 
1414
      /* If insn to set up A clobbers any registers B depends on, try to
1415
         swap insn that sets up A with the one that sets up B.  If even
1416
         that doesn't help, punt.  */
1417
      last = get_last_insn ();
1418
      if (last && modified_in_p (orig_b, last))
1419
        {
1420
          tmp = emit_insn_before (tmp, get_insns ());
1421
          if (modified_in_p (orig_a, tmp))
1422
            goto end_seq_and_fail;
1423
        }
1424
      else
1425
        tmp = emit_insn (tmp);
1426
 
1427
      if (recog_memoized (tmp) < 0)
1428
        goto end_seq_and_fail;
1429
    }
1430
 
1431
  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1432
                            XEXP (if_info->cond, 1), a, b);
1433
 
1434
  if (! target)
1435
    goto end_seq_and_fail;
1436
 
1437
  /* If we're handling a memory for above, emit the load now.  */
1438
  if (is_mem)
1439
    {
1440
      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1441
 
1442
      /* Copy over flags as appropriate.  */
1443
      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1444
        MEM_VOLATILE_P (tmp) = 1;
1445
      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1446
        MEM_IN_STRUCT_P (tmp) = 1;
1447
      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1448
        MEM_SCALAR_P (tmp) = 1;
1449
      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1450
        set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1451
      set_mem_align (tmp,
1452
                     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1453
 
1454
      noce_emit_move_insn (if_info->x, tmp);
1455
    }
1456
  else if (target != x)
1457
    noce_emit_move_insn (x, target);
1458
 
1459
  tmp = end_ifcvt_sequence (if_info);
1460
  if (!tmp)
1461
    return FALSE;
1462
 
1463
  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1464
  return TRUE;
1465
 
1466
 end_seq_and_fail:
1467
  end_sequence ();
1468
  return FALSE;
1469
}
1470
 
1471
/* For most cases, the simplified condition we found is the best
1472
   choice, but this is not the case for the min/max/abs transforms.
1473
   For these we wish to know that it is A or B in the condition.  */
1474
 
1475
static rtx
1476
noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1477
                        rtx *earliest)
1478
{
1479
  rtx cond, set, insn;
1480
  int reverse;
1481
 
1482
  /* If target is already mentioned in the known condition, return it.  */
1483
  if (reg_mentioned_p (target, if_info->cond))
1484
    {
1485
      *earliest = if_info->cond_earliest;
1486
      return if_info->cond;
1487
    }
1488
 
1489
  set = pc_set (if_info->jump);
1490
  cond = XEXP (SET_SRC (set), 0);
1491
  reverse
1492
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1493
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1494
 
1495
  /* If we're looking for a constant, try to make the conditional
1496
     have that constant in it.  There are two reasons why it may
1497
     not have the constant we want:
1498
 
1499
     1. GCC may have needed to put the constant in a register, because
1500
        the target can't compare directly against that constant.  For
1501
        this case, we look for a SET immediately before the comparison
1502
        that puts a constant in that register.
1503
 
1504
     2. GCC may have canonicalized the conditional, for example
1505
        replacing "if x < 4" with "if x <= 3".  We can undo that (or
1506
        make equivalent types of changes) to get the constants we need
1507
        if they're off by one in the right direction.  */
1508
 
1509
  if (GET_CODE (target) == CONST_INT)
1510
    {
1511
      enum rtx_code code = GET_CODE (if_info->cond);
1512
      rtx op_a = XEXP (if_info->cond, 0);
1513
      rtx op_b = XEXP (if_info->cond, 1);
1514
      rtx prev_insn;
1515
 
1516
      /* First, look to see if we put a constant in a register.  */
1517
      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1518
      if (prev_insn
1519
          && INSN_P (prev_insn)
1520
          && GET_CODE (PATTERN (prev_insn)) == SET)
1521
        {
1522
          rtx src = find_reg_equal_equiv_note (prev_insn);
1523
          if (!src)
1524
            src = SET_SRC (PATTERN (prev_insn));
1525
          if (GET_CODE (src) == CONST_INT)
1526
            {
1527
              if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1528
                op_a = src;
1529
              else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1530
                op_b = src;
1531
 
1532
              if (GET_CODE (op_a) == CONST_INT)
1533
                {
1534
                  rtx tmp = op_a;
1535
                  op_a = op_b;
1536
                  op_b = tmp;
1537
                  code = swap_condition (code);
1538
                }
1539
            }
1540
        }
1541
 
1542
      /* Now, look to see if we can get the right constant by
1543
         adjusting the conditional.  */
1544
      if (GET_CODE (op_b) == CONST_INT)
1545
        {
1546
          HOST_WIDE_INT desired_val = INTVAL (target);
1547
          HOST_WIDE_INT actual_val = INTVAL (op_b);
1548
 
1549
          switch (code)
1550
            {
1551
            case LT:
1552
              if (actual_val == desired_val + 1)
1553
                {
1554
                  code = LE;
1555
                  op_b = GEN_INT (desired_val);
1556
                }
1557
              break;
1558
            case LE:
1559
              if (actual_val == desired_val - 1)
1560
                {
1561
                  code = LT;
1562
                  op_b = GEN_INT (desired_val);
1563
                }
1564
              break;
1565
            case GT:
1566
              if (actual_val == desired_val - 1)
1567
                {
1568
                  code = GE;
1569
                  op_b = GEN_INT (desired_val);
1570
                }
1571
              break;
1572
            case GE:
1573
              if (actual_val == desired_val + 1)
1574
                {
1575
                  code = GT;
1576
                  op_b = GEN_INT (desired_val);
1577
                }
1578
              break;
1579
            default:
1580
              break;
1581
            }
1582
        }
1583
 
1584
      /* If we made any changes, generate a new conditional that is
1585
         equivalent to what we started with, but has the right
1586
         constants in it.  */
1587
      if (code != GET_CODE (if_info->cond)
1588
          || op_a != XEXP (if_info->cond, 0)
1589
          || op_b != XEXP (if_info->cond, 1))
1590
        {
1591
          cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1592
          *earliest = if_info->cond_earliest;
1593
          return cond;
1594
        }
1595
    }
1596
 
1597
  cond = canonicalize_condition (if_info->jump, cond, reverse,
1598
                                 earliest, target, false, true);
1599
  if (! cond || ! reg_mentioned_p (target, cond))
1600
    return NULL;
1601
 
1602
  /* We almost certainly searched back to a different place.
1603
     Need to re-verify correct lifetimes.  */
1604
 
1605
  /* X may not be mentioned in the range (cond_earliest, jump].  */
1606
  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1607
    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1608
      return NULL;
1609
 
1610
  /* A and B may not be modified in the range [cond_earliest, jump).  */
1611
  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1612
    if (INSN_P (insn)
1613
        && (modified_in_p (if_info->a, insn)
1614
            || modified_in_p (if_info->b, insn)))
1615
      return NULL;
1616
 
1617
  return cond;
1618
}
1619
 
1620
/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1621
 
1622
static int
1623
noce_try_minmax (struct noce_if_info *if_info)
1624
{
1625
  rtx cond, earliest, target, seq;
1626
  enum rtx_code code, op;
1627
  int unsignedp;
1628
 
1629
  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1630
  if (no_new_pseudos)
1631
    return FALSE;
1632
 
1633
  /* ??? Reject modes with NaNs or signed zeros since we don't know how
1634
     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1635
     to get the target to tell us...  */
1636
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1637
      || HONOR_NANS (GET_MODE (if_info->x)))
1638
    return FALSE;
1639
 
1640
  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1641
  if (!cond)
1642
    return FALSE;
1643
 
1644
  /* Verify the condition is of the form we expect, and canonicalize
1645
     the comparison code.  */
1646
  code = GET_CODE (cond);
1647
  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1648
    {
1649
      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1650
        return FALSE;
1651
    }
1652
  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1653
    {
1654
      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1655
        return FALSE;
1656
      code = swap_condition (code);
1657
    }
1658
  else
1659
    return FALSE;
1660
 
1661
  /* Determine what sort of operation this is.  Note that the code is for
1662
     a taken branch, so the code->operation mapping appears backwards.  */
1663
  switch (code)
1664
    {
1665
    case LT:
1666
    case LE:
1667
    case UNLT:
1668
    case UNLE:
1669
      op = SMAX;
1670
      unsignedp = 0;
1671
      break;
1672
    case GT:
1673
    case GE:
1674
    case UNGT:
1675
    case UNGE:
1676
      op = SMIN;
1677
      unsignedp = 0;
1678
      break;
1679
    case LTU:
1680
    case LEU:
1681
      op = UMAX;
1682
      unsignedp = 1;
1683
      break;
1684
    case GTU:
1685
    case GEU:
1686
      op = UMIN;
1687
      unsignedp = 1;
1688
      break;
1689
    default:
1690
      return FALSE;
1691
    }
1692
 
1693
  start_sequence ();
1694
 
1695
  target = expand_simple_binop (GET_MODE (if_info->x), op,
1696
                                if_info->a, if_info->b,
1697
                                if_info->x, unsignedp, OPTAB_WIDEN);
1698
  if (! target)
1699
    {
1700
      end_sequence ();
1701
      return FALSE;
1702
    }
1703
  if (target != if_info->x)
1704
    noce_emit_move_insn (if_info->x, target);
1705
 
1706
  seq = end_ifcvt_sequence (if_info);
1707
  if (!seq)
1708
    return FALSE;
1709
 
1710
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1711
  if_info->cond = cond;
1712
  if_info->cond_earliest = earliest;
1713
 
1714
  return TRUE;
1715
}
1716
 
1717
/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc.  */
1718
 
1719
static int
1720
noce_try_abs (struct noce_if_info *if_info)
1721
{
1722
  rtx cond, earliest, target, seq, a, b, c;
1723
  int negate;
1724
 
1725
  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1726
  if (no_new_pseudos)
1727
    return FALSE;
1728
 
1729
  /* Recognize A and B as constituting an ABS or NABS.  The canonical
1730
     form is a branch around the negation, taken when the object is the
1731
     first operand of a comparison against 0 that evaluates to true.  */
1732
  a = if_info->a;
1733
  b = if_info->b;
1734
  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1735
    negate = 0;
1736
  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1737
    {
1738
      c = a; a = b; b = c;
1739
      negate = 1;
1740
    }
1741
  else
1742
    return FALSE;
1743
 
1744
  cond = noce_get_alt_condition (if_info, b, &earliest);
1745
  if (!cond)
1746
    return FALSE;
1747
 
1748
  /* Verify the condition is of the form we expect.  */
1749
  if (rtx_equal_p (XEXP (cond, 0), b))
1750
    c = XEXP (cond, 1);
1751
  else if (rtx_equal_p (XEXP (cond, 1), b))
1752
    {
1753
      c = XEXP (cond, 0);
1754
      negate = !negate;
1755
    }
1756
  else
1757
    return FALSE;
1758
 
1759
  /* Verify that C is zero.  Search one step backward for a
1760
     REG_EQUAL note or a simple source if necessary.  */
1761
  if (REG_P (c))
1762
    {
1763
      rtx set, insn = prev_nonnote_insn (earliest);
1764
      if (insn
1765
          && (set = single_set (insn))
1766
          && rtx_equal_p (SET_DEST (set), c))
1767
        {
1768
          rtx note = find_reg_equal_equiv_note (insn);
1769
          if (note)
1770
            c = XEXP (note, 0);
1771
          else
1772
            c = SET_SRC (set);
1773
        }
1774
      else
1775
        return FALSE;
1776
    }
1777
  if (MEM_P (c)
1778
      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1779
      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1780
    c = get_pool_constant (XEXP (c, 0));
1781
 
1782
  /* Work around funny ideas get_condition has wrt canonicalization.
1783
     Note that these rtx constants are known to be CONST_INT, and
1784
     therefore imply integer comparisons.  */
1785
  if (c == constm1_rtx && GET_CODE (cond) == GT)
1786
    ;
1787
  else if (c == const1_rtx && GET_CODE (cond) == LT)
1788
    ;
1789
  else if (c != CONST0_RTX (GET_MODE (b)))
1790
    return FALSE;
1791
 
1792
  /* Determine what sort of operation this is.  */
1793
  switch (GET_CODE (cond))
1794
    {
1795
    case LT:
1796
    case LE:
1797
    case UNLT:
1798
    case UNLE:
1799
      negate = !negate;
1800
      break;
1801
    case GT:
1802
    case GE:
1803
    case UNGT:
1804
    case UNGE:
1805
      break;
1806
    default:
1807
      return FALSE;
1808
    }
1809
 
1810
  start_sequence ();
1811
 
1812
  target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1813
 
1814
  /* ??? It's a quandary whether cmove would be better here, especially
1815
     for integers.  Perhaps combine will clean things up.  */
1816
  if (target && negate)
1817
    target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1818
 
1819
  if (! target)
1820
    {
1821
      end_sequence ();
1822
      return FALSE;
1823
    }
1824
 
1825
  if (target != if_info->x)
1826
    noce_emit_move_insn (if_info->x, target);
1827
 
1828
  seq = end_ifcvt_sequence (if_info);
1829
  if (!seq)
1830
    return FALSE;
1831
 
1832
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1833
  if_info->cond = cond;
1834
  if_info->cond_earliest = earliest;
1835
 
1836
  return TRUE;
1837
}
1838
 
1839
/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
1840
 
1841
static int
1842
noce_try_sign_mask (struct noce_if_info *if_info)
1843
{
1844
  rtx cond, t, m, c, seq;
1845
  enum machine_mode mode;
1846
  enum rtx_code code;
1847
 
1848
  if (no_new_pseudos)
1849
    return FALSE;
1850
 
1851
  cond = if_info->cond;
1852
  code = GET_CODE (cond);
1853
  m = XEXP (cond, 0);
1854
  c = XEXP (cond, 1);
1855
 
1856
  t = NULL_RTX;
1857
  if (if_info->a == const0_rtx)
1858
    {
1859
      if ((code == LT && c == const0_rtx)
1860
          || (code == LE && c == constm1_rtx))
1861
        t = if_info->b;
1862
    }
1863
  else if (if_info->b == const0_rtx)
1864
    {
1865
      if ((code == GE && c == const0_rtx)
1866
          || (code == GT && c == constm1_rtx))
1867
        t = if_info->a;
1868
    }
1869
 
1870
  if (! t || side_effects_p (t))
1871
    return FALSE;
1872
 
1873
  /* We currently don't handle different modes.  */
1874
  mode = GET_MODE (t);
1875
  if (GET_MODE (m) != mode)
1876
    return FALSE;
1877
 
1878
  /* This is only profitable if T is cheap, or T is unconditionally
1879
     executed/evaluated in the original insn sequence.  */
1880
  if (rtx_cost (t, SET) >= COSTS_N_INSNS (2)
1881
      && (!if_info->b_unconditional
1882
          || t != if_info->b))
1883
    return FALSE;
1884
 
1885
  start_sequence ();
1886
  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1887
     "(signed) m >> 31" directly.  This benefits targets with specialized
1888
     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
1889
  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1890
  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1891
        : NULL_RTX;
1892
 
1893
  if (!t)
1894
    {
1895
      end_sequence ();
1896
      return FALSE;
1897
    }
1898
 
1899
  noce_emit_move_insn (if_info->x, t);
1900
 
1901
  seq = end_ifcvt_sequence (if_info);
1902
  if (!seq)
1903
    return FALSE;
1904
 
1905
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1906
  return TRUE;
1907
}
1908
 
1909
 
1910
/* Optimize away "if (x & C) x |= C" and similar bit manipulation
1911
   transformations.  */
1912
 
1913
static int
1914
noce_try_bitop (struct noce_if_info *if_info)
1915
{
1916
  rtx cond, x, a, result, seq;
1917
  enum machine_mode mode;
1918
  enum rtx_code code;
1919
  int bitnum;
1920
 
1921
  x = if_info->x;
1922
  cond = if_info->cond;
1923
  code = GET_CODE (cond);
1924
 
1925
  /* Check for no else condition.  */
1926
  if (! rtx_equal_p (x, if_info->b))
1927
    return FALSE;
1928
 
1929
  /* Check for a suitable condition.  */
1930
  if (code != NE && code != EQ)
1931
    return FALSE;
1932
  if (XEXP (cond, 1) != const0_rtx)
1933
    return FALSE;
1934
  cond = XEXP (cond, 0);
1935
 
1936
  /* ??? We could also handle AND here.  */
1937
  if (GET_CODE (cond) == ZERO_EXTRACT)
1938
    {
1939
      if (XEXP (cond, 1) != const1_rtx
1940
          || GET_CODE (XEXP (cond, 2)) != CONST_INT
1941
          || ! rtx_equal_p (x, XEXP (cond, 0)))
1942
        return FALSE;
1943
      bitnum = INTVAL (XEXP (cond, 2));
1944
      mode = GET_MODE (x);
1945
      if (bitnum >= HOST_BITS_PER_WIDE_INT)
1946
        return FALSE;
1947
    }
1948
  else
1949
    return FALSE;
1950
 
1951
  a = if_info->a;
1952
  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
1953
    {
1954
      /* Check for "if (X & C) x = x op C".  */
1955
      if (! rtx_equal_p (x, XEXP (a, 0))
1956
          || GET_CODE (XEXP (a, 1)) != CONST_INT
1957
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
1958
             != (unsigned HOST_WIDE_INT) 1 << bitnum)
1959
        return FALSE;
1960
 
1961
      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
1962
      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
1963
      if (GET_CODE (a) == IOR)
1964
        result = (code == NE) ? a : NULL_RTX;
1965
      else if (code == NE)
1966
        {
1967
          /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
1968
          result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
1969
          result = simplify_gen_binary (IOR, mode, x, result);
1970
        }
1971
      else
1972
        {
1973
          /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
1974
          result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
1975
          result = simplify_gen_binary (AND, mode, x, result);
1976
        }
1977
    }
1978
  else if (GET_CODE (a) == AND)
1979
    {
1980
      /* Check for "if (X & C) x &= ~C".  */
1981
      if (! rtx_equal_p (x, XEXP (a, 0))
1982
          || GET_CODE (XEXP (a, 1)) != CONST_INT
1983
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
1984
             != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
1985
        return FALSE;
1986
 
1987
      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
1988
      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
1989
      result = (code == EQ) ? a : NULL_RTX;
1990
    }
1991
  else
1992
    return FALSE;
1993
 
1994
  if (result)
1995
    {
1996
      start_sequence ();
1997
      noce_emit_move_insn (x, result);
1998
      seq = end_ifcvt_sequence (if_info);
1999
      if (!seq)
2000
        return FALSE;
2001
 
2002
      emit_insn_before_setloc (seq, if_info->jump,
2003
                               INSN_LOCATOR (if_info->insn_a));
2004
    }
2005
  return TRUE;
2006
}
2007
 
2008
 
2009
/* Similar to get_condition, only the resulting condition must be
2010
   valid at JUMP, instead of at EARLIEST.  */
2011
 
2012
static rtx
2013
noce_get_condition (rtx jump, rtx *earliest)
2014
{
2015
  rtx cond, set, tmp;
2016
  bool reverse;
2017
 
2018
  if (! any_condjump_p (jump))
2019
    return NULL_RTX;
2020
 
2021
  set = pc_set (jump);
2022
 
2023
  /* If this branches to JUMP_LABEL when the condition is false,
2024
     reverse the condition.  */
2025
  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2026
             && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2027
 
2028
  /* If the condition variable is a register and is MODE_INT, accept it.  */
2029
 
2030
  cond = XEXP (SET_SRC (set), 0);
2031
  tmp = XEXP (cond, 0);
2032
  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2033
    {
2034
      *earliest = jump;
2035
 
2036
      if (reverse)
2037
        cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2038
                               GET_MODE (cond), tmp, XEXP (cond, 1));
2039
      return cond;
2040
    }
2041
 
2042
  /* Otherwise, fall back on canonicalize_condition to do the dirty
2043
     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2044
  return canonicalize_condition (jump, cond, reverse, earliest,
2045
                                 NULL_RTX, false, true);
2046
}
2047
 
2048
/* Return true if OP is ok for if-then-else processing.  */
2049
 
2050
static int
2051
noce_operand_ok (rtx op)
2052
{
2053
  /* We special-case memories, so handle any of them with
2054
     no address side effects.  */
2055
  if (MEM_P (op))
2056
    return ! side_effects_p (XEXP (op, 0));
2057
 
2058
  if (side_effects_p (op))
2059
    return FALSE;
2060
 
2061
  return ! may_trap_p (op);
2062
}
2063
 
2064
/* Return true if a write into MEM may trap or fault.  */
2065
 
2066
static bool
2067
noce_mem_write_may_trap_or_fault_p (rtx mem)
2068
{
2069
  rtx addr;
2070
 
2071
  if (MEM_READONLY_P (mem))
2072
    return true;
2073
 
2074
  if (may_trap_or_fault_p (mem))
2075
    return true;
2076
 
2077
  addr = XEXP (mem, 0);
2078
 
2079
  /* Call target hook to avoid the effects of -fpic etc....  */
2080
  addr = targetm.delegitimize_address (addr);
2081
 
2082
  while (addr)
2083
    switch (GET_CODE (addr))
2084
      {
2085
      case CONST:
2086
      case PRE_DEC:
2087
      case PRE_INC:
2088
      case POST_DEC:
2089
      case POST_INC:
2090
      case POST_MODIFY:
2091
        addr = XEXP (addr, 0);
2092
        break;
2093
      case LO_SUM:
2094
      case PRE_MODIFY:
2095
        addr = XEXP (addr, 1);
2096
        break;
2097
      case PLUS:
2098
        if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2099
          addr = XEXP (addr, 0);
2100
        else
2101
          return false;
2102
        break;
2103
      case LABEL_REF:
2104
        return true;
2105
      case SYMBOL_REF:
2106
        if (SYMBOL_REF_DECL (addr)
2107
            && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2108
          return true;
2109
        return false;
2110
      default:
2111
        return false;
2112
      }
2113
 
2114
  return false;
2115
}
2116
 
2117
/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
2118
   without using conditional execution.  Return TRUE if we were
2119
   successful at converting the block.  */
2120
 
2121
static int
2122
noce_process_if_block (struct ce_if_block * ce_info)
2123
{
2124
  basic_block test_bb = ce_info->test_bb;       /* test block */
2125
  basic_block then_bb = ce_info->then_bb;       /* THEN */
2126
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
2127
  struct noce_if_info if_info;
2128
  rtx insn_a, insn_b;
2129
  rtx set_a, set_b;
2130
  rtx orig_x, x, a, b;
2131
  rtx jump, cond;
2132
 
2133
  /* We're looking for patterns of the form
2134
 
2135
     (1) if (...) x = a; else x = b;
2136
     (2) x = b; if (...) x = a;
2137
     (3) if (...) x = a;   // as if with an initial x = x.
2138
 
2139
     The later patterns require jumps to be more expensive.
2140
 
2141
     ??? For future expansion, look for multiple X in such patterns.  */
2142
 
2143
  /* If test is comprised of && or || elements, don't handle it unless it is
2144
     the special case of && elements without an ELSE block.  */
2145
  if (ce_info->num_multiple_test_blocks)
2146
    {
2147
      if (else_bb || ! ce_info->and_and_p)
2148
        return FALSE;
2149
 
2150
      ce_info->test_bb = test_bb = ce_info->last_test_bb;
2151
      ce_info->num_multiple_test_blocks = 0;
2152
      ce_info->num_and_and_blocks = 0;
2153
      ce_info->num_or_or_blocks = 0;
2154
    }
2155
 
2156
  /* If this is not a standard conditional jump, we can't parse it.  */
2157
  jump = BB_END (test_bb);
2158
  cond = noce_get_condition (jump, &if_info.cond_earliest);
2159
  if (! cond)
2160
    return FALSE;
2161
 
2162
  /* If the conditional jump is more than just a conditional
2163
     jump, then we can not do if-conversion on this block.  */
2164
  if (! onlyjump_p (jump))
2165
    return FALSE;
2166
 
2167
  /* We must be comparing objects whose modes imply the size.  */
2168
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2169
    return FALSE;
2170
 
2171
  /* Look for one of the potential sets.  */
2172
  insn_a = first_active_insn (then_bb);
2173
  if (! insn_a
2174
      || insn_a != last_active_insn (then_bb, FALSE)
2175
      || (set_a = single_set (insn_a)) == NULL_RTX)
2176
    return FALSE;
2177
 
2178
  x = SET_DEST (set_a);
2179
  a = SET_SRC (set_a);
2180
 
2181
  /* Look for the other potential set.  Make sure we've got equivalent
2182
     destinations.  */
2183
  /* ??? This is overconservative.  Storing to two different mems is
2184
     as easy as conditionally computing the address.  Storing to a
2185
     single mem merely requires a scratch memory to use as one of the
2186
     destination addresses; often the memory immediately below the
2187
     stack pointer is available for this.  */
2188
  set_b = NULL_RTX;
2189
  if (else_bb)
2190
    {
2191
      insn_b = first_active_insn (else_bb);
2192
      if (! insn_b
2193
          || insn_b != last_active_insn (else_bb, FALSE)
2194
          || (set_b = single_set (insn_b)) == NULL_RTX
2195
          || ! rtx_equal_p (x, SET_DEST (set_b)))
2196
        return FALSE;
2197
    }
2198
  else
2199
    {
2200
      insn_b = prev_nonnote_insn (if_info.cond_earliest);
2201
      /* We're going to be moving the evaluation of B down from above
2202
         COND_EARLIEST to JUMP.  Make sure the relevant data is still
2203
         intact.  */
2204
      if (! insn_b
2205
          || !NONJUMP_INSN_P (insn_b)
2206
          || (set_b = single_set (insn_b)) == NULL_RTX
2207
          || ! rtx_equal_p (x, SET_DEST (set_b))
2208
          || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2209
          || modified_between_p (SET_SRC (set_b),
2210
                                 PREV_INSN (if_info.cond_earliest), jump)
2211
          /* Likewise with X.  In particular this can happen when
2212
             noce_get_condition looks farther back in the instruction
2213
             stream than one might expect.  */
2214
          || reg_overlap_mentioned_p (x, cond)
2215
          || reg_overlap_mentioned_p (x, a)
2216
          || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
2217
        insn_b = set_b = NULL_RTX;
2218
    }
2219
 
2220
  /* If x has side effects then only the if-then-else form is safe to
2221
     convert.  But even in that case we would need to restore any notes
2222
     (such as REG_INC) at then end.  That can be tricky if
2223
     noce_emit_move_insn expands to more than one insn, so disable the
2224
     optimization entirely for now if there are side effects.  */
2225
  if (side_effects_p (x))
2226
    return FALSE;
2227
 
2228
  b = (set_b ? SET_SRC (set_b) : x);
2229
 
2230
  /* Only operate on register destinations, and even then avoid extending
2231
     the lifetime of hard registers on small register class machines.  */
2232
  orig_x = x;
2233
  if (!REG_P (x)
2234
      || (SMALL_REGISTER_CLASSES
2235
          && REGNO (x) < FIRST_PSEUDO_REGISTER))
2236
    {
2237
      if (no_new_pseudos || GET_MODE (x) == BLKmode)
2238
        return FALSE;
2239
 
2240
      if (GET_MODE (x) == ZERO_EXTRACT
2241
          && (GET_CODE (XEXP (x, 1)) != CONST_INT
2242
              || GET_CODE (XEXP (x, 2)) != CONST_INT))
2243
        return FALSE;
2244
 
2245
      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2246
                                 ? XEXP (x, 0) : x));
2247
    }
2248
 
2249
  /* Don't operate on sources that may trap or are volatile.  */
2250
  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2251
    return FALSE;
2252
 
2253
  /* Set up the info block for our subroutines.  */
2254
  if_info.test_bb = test_bb;
2255
  if_info.cond = cond;
2256
  if_info.jump = jump;
2257
  if_info.insn_a = insn_a;
2258
  if_info.insn_b = insn_b;
2259
  if_info.x = x;
2260
  if_info.a = a;
2261
  if_info.b = b;
2262
  if_info.b_unconditional = else_bb == 0;
2263
 
2264
  /* Try optimizations in some approximation of a useful order.  */
2265
  /* ??? Should first look to see if X is live incoming at all.  If it
2266
     isn't, we don't need anything but an unconditional set.  */
2267
 
2268
  /* Look and see if A and B are really the same.  Avoid creating silly
2269
     cmove constructs that no one will fix up later.  */
2270
  if (rtx_equal_p (a, b))
2271
    {
2272
      /* If we have an INSN_B, we don't have to create any new rtl.  Just
2273
         move the instruction that we already have.  If we don't have an
2274
         INSN_B, that means that A == X, and we've got a noop move.  In
2275
         that case don't do anything and let the code below delete INSN_A.  */
2276
      if (insn_b && else_bb)
2277
        {
2278
          rtx note;
2279
 
2280
          if (else_bb && insn_b == BB_END (else_bb))
2281
            BB_END (else_bb) = PREV_INSN (insn_b);
2282
          reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2283
 
2284
          /* If there was a REG_EQUAL note, delete it since it may have been
2285
             true due to this insn being after a jump.  */
2286
          if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2287
            remove_note (insn_b, note);
2288
 
2289
          insn_b = NULL_RTX;
2290
        }
2291
      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2292
         x must be executed twice.  */
2293
      else if (insn_b && side_effects_p (orig_x))
2294
        return FALSE;
2295
 
2296
      x = orig_x;
2297
      goto success;
2298
    }
2299
 
2300
  /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2301
     for optimizations if writing to x may trap or fault, i.e. it's a memory
2302
     other than a static var or a stack slot, is misaligned on strict
2303
     aligned machines or is read-only.
2304
     If x is a read-only memory, then the program is valid only if we
2305
     avoid the store into it.  If there are stores on both the THEN and
2306
     ELSE arms, then we can go ahead with the conversion; either the
2307
     program is broken, or the condition is always false such that the
2308
     other memory is selected.  */
2309
  if (!set_b && MEM_P (orig_x) && noce_mem_write_may_trap_or_fault_p (orig_x))
2310
    return FALSE;
2311
 
2312
  if (noce_try_move (&if_info))
2313
    goto success;
2314
  if (noce_try_store_flag (&if_info))
2315
    goto success;
2316
  if (noce_try_bitop (&if_info))
2317
    goto success;
2318
  if (noce_try_minmax (&if_info))
2319
    goto success;
2320
  if (noce_try_abs (&if_info))
2321
    goto success;
2322
  if (HAVE_conditional_move
2323
      && noce_try_cmove (&if_info))
2324
    goto success;
2325
  if (! HAVE_conditional_execution)
2326
    {
2327
      if (noce_try_store_flag_constants (&if_info))
2328
        goto success;
2329
      if (noce_try_addcc (&if_info))
2330
        goto success;
2331
      if (noce_try_store_flag_mask (&if_info))
2332
        goto success;
2333
      if (HAVE_conditional_move
2334
          && noce_try_cmove_arith (&if_info))
2335
        goto success;
2336
      if (noce_try_sign_mask (&if_info))
2337
        goto success;
2338
    }
2339
 
2340
  return FALSE;
2341
 
2342
 success:
2343
  /* The original sets may now be killed.  */
2344
  delete_insn (insn_a);
2345
 
2346
  /* Several special cases here: First, we may have reused insn_b above,
2347
     in which case insn_b is now NULL.  Second, we want to delete insn_b
2348
     if it came from the ELSE block, because follows the now correct
2349
     write that appears in the TEST block.  However, if we got insn_b from
2350
     the TEST block, it may in fact be loading data needed for the comparison.
2351
     We'll let life_analysis remove the insn if it's really dead.  */
2352
  if (insn_b && else_bb)
2353
    delete_insn (insn_b);
2354
 
2355
  /* The new insns will have been inserted immediately before the jump.  We
2356
     should be able to remove the jump with impunity, but the condition itself
2357
     may have been modified by gcse to be shared across basic blocks.  */
2358
  delete_insn (jump);
2359
 
2360
  /* If we used a temporary, fix it up now.  */
2361
  if (orig_x != x)
2362
    {
2363
      start_sequence ();
2364
      noce_emit_move_insn (orig_x, x);
2365
      insn_b = get_insns ();
2366
      set_used_flags (orig_x);
2367
      unshare_all_rtl_in_chain (insn_b);
2368
      end_sequence ();
2369
 
2370
      emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2371
    }
2372
 
2373
  /* Merge the blocks!  */
2374
  merge_if_block (ce_info);
2375
 
2376
  return TRUE;
2377
}
2378
 
2379
/* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2380
   straight line code.  Return true if successful.  */
2381
 
2382
static int
2383
process_if_block (struct ce_if_block * ce_info)
2384
{
2385
  if (! reload_completed
2386
      && noce_process_if_block (ce_info))
2387
    return TRUE;
2388
 
2389
  if (HAVE_conditional_execution && reload_completed)
2390
    {
2391
      /* If we have && and || tests, try to first handle combining the && and
2392
         || tests into the conditional code, and if that fails, go back and
2393
         handle it without the && and ||, which at present handles the && case
2394
         if there was no ELSE block.  */
2395
      if (cond_exec_process_if_block (ce_info, TRUE))
2396
        return TRUE;
2397
 
2398
      if (ce_info->num_multiple_test_blocks)
2399
        {
2400
          cancel_changes (0);
2401
 
2402
          if (cond_exec_process_if_block (ce_info, FALSE))
2403
            return TRUE;
2404
        }
2405
    }
2406
 
2407
  return FALSE;
2408
}
2409
 
2410
/* Merge the blocks and mark for local life update.  */
2411
 
2412
static void
2413
merge_if_block (struct ce_if_block * ce_info)
2414
{
2415
  basic_block test_bb = ce_info->test_bb;       /* last test block */
2416
  basic_block then_bb = ce_info->then_bb;       /* THEN */
2417
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
2418
  basic_block join_bb = ce_info->join_bb;       /* join block */
2419
  basic_block combo_bb;
2420
 
2421
  /* All block merging is done into the lower block numbers.  */
2422
 
2423
  combo_bb = test_bb;
2424
 
2425
  /* Merge any basic blocks to handle && and || subtests.  Each of
2426
     the blocks are on the fallthru path from the predecessor block.  */
2427
  if (ce_info->num_multiple_test_blocks > 0)
2428
    {
2429
      basic_block bb = test_bb;
2430
      basic_block last_test_bb = ce_info->last_test_bb;
2431
      basic_block fallthru = block_fallthru (bb);
2432
 
2433
      do
2434
        {
2435
          bb = fallthru;
2436
          fallthru = block_fallthru (bb);
2437
          merge_blocks (combo_bb, bb);
2438
          num_true_changes++;
2439
        }
2440
      while (bb != last_test_bb);
2441
    }
2442
 
2443
  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2444
     label, but it might if there were || tests.  That label's count should be
2445
     zero, and it normally should be removed.  */
2446
 
2447
  if (then_bb)
2448
    {
2449
      if (combo_bb->il.rtl->global_live_at_end)
2450
        COPY_REG_SET (combo_bb->il.rtl->global_live_at_end,
2451
                      then_bb->il.rtl->global_live_at_end);
2452
      merge_blocks (combo_bb, then_bb);
2453
      num_true_changes++;
2454
    }
2455
 
2456
  /* The ELSE block, if it existed, had a label.  That label count
2457
     will almost always be zero, but odd things can happen when labels
2458
     get their addresses taken.  */
2459
  if (else_bb)
2460
    {
2461
      merge_blocks (combo_bb, else_bb);
2462
      num_true_changes++;
2463
    }
2464
 
2465
  /* If there was no join block reported, that means it was not adjacent
2466
     to the others, and so we cannot merge them.  */
2467
 
2468
  if (! join_bb)
2469
    {
2470
      rtx last = BB_END (combo_bb);
2471
 
2472
      /* The outgoing edge for the current COMBO block should already
2473
         be correct.  Verify this.  */
2474
      if (EDGE_COUNT (combo_bb->succs) == 0)
2475
        gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2476
                    || (NONJUMP_INSN_P (last)
2477
                        && GET_CODE (PATTERN (last)) == TRAP_IF
2478
                        && (TRAP_CONDITION (PATTERN (last))
2479
                            == const_true_rtx)));
2480
 
2481
      else
2482
      /* There should still be something at the end of the THEN or ELSE
2483
         blocks taking us to our final destination.  */
2484
        gcc_assert (JUMP_P (last)
2485
                    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2486
                        && CALL_P (last)
2487
                        && SIBLING_CALL_P (last))
2488
                    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2489
                        && can_throw_internal (last)));
2490
    }
2491
 
2492
  /* The JOIN block may have had quite a number of other predecessors too.
2493
     Since we've already merged the TEST, THEN and ELSE blocks, we should
2494
     have only one remaining edge from our if-then-else diamond.  If there
2495
     is more than one remaining edge, it must come from elsewhere.  There
2496
     may be zero incoming edges if the THEN block didn't actually join
2497
     back up (as with a call to a non-return function).  */
2498
  else if (EDGE_COUNT (join_bb->preds) < 2
2499
           && join_bb != EXIT_BLOCK_PTR)
2500
    {
2501
      /* We can merge the JOIN.  */
2502
      if (combo_bb->il.rtl->global_live_at_end)
2503
        COPY_REG_SET (combo_bb->il.rtl->global_live_at_end,
2504
                      join_bb->il.rtl->global_live_at_end);
2505
 
2506
      merge_blocks (combo_bb, join_bb);
2507
      num_true_changes++;
2508
    }
2509
  else
2510
    {
2511
      /* We cannot merge the JOIN.  */
2512
 
2513
      /* The outgoing edge for the current COMBO block should already
2514
         be correct.  Verify this.  */
2515
      gcc_assert (single_succ_p (combo_bb)
2516
                  && single_succ (combo_bb) == join_bb);
2517
 
2518
      /* Remove the jump and cruft from the end of the COMBO block.  */
2519
      if (join_bb != EXIT_BLOCK_PTR)
2520
        tidy_fallthru_edge (single_succ_edge (combo_bb));
2521
    }
2522
 
2523
  num_updated_if_blocks++;
2524
}
2525
 
2526
/* Find a block ending in a simple IF condition and try to transform it
2527
   in some way.  When converting a multi-block condition, put the new code
2528
   in the first such block and delete the rest.  Return a pointer to this
2529
   first block if some transformation was done.  Return NULL otherwise.  */
2530
 
2531
static basic_block
2532
find_if_header (basic_block test_bb, int pass)
2533
{
2534
  ce_if_block_t ce_info;
2535
  edge then_edge;
2536
  edge else_edge;
2537
 
2538
  /* The kind of block we're looking for has exactly two successors.  */
2539
  if (EDGE_COUNT (test_bb->succs) != 2)
2540
    return NULL;
2541
 
2542
  then_edge = EDGE_SUCC (test_bb, 0);
2543
  else_edge = EDGE_SUCC (test_bb, 1);
2544
 
2545
  /* Neither edge should be abnormal.  */
2546
  if ((then_edge->flags & EDGE_COMPLEX)
2547
      || (else_edge->flags & EDGE_COMPLEX))
2548
    return NULL;
2549
 
2550
  /* Nor exit the loop.  */
2551
  if ((then_edge->flags & EDGE_LOOP_EXIT)
2552
      || (else_edge->flags & EDGE_LOOP_EXIT))
2553
    return NULL;
2554
 
2555
  /* The THEN edge is canonically the one that falls through.  */
2556
  if (then_edge->flags & EDGE_FALLTHRU)
2557
    ;
2558
  else if (else_edge->flags & EDGE_FALLTHRU)
2559
    {
2560
      edge e = else_edge;
2561
      else_edge = then_edge;
2562
      then_edge = e;
2563
    }
2564
  else
2565
    /* Otherwise this must be a multiway branch of some sort.  */
2566
    return NULL;
2567
 
2568
  memset (&ce_info, '\0', sizeof (ce_info));
2569
  ce_info.test_bb = test_bb;
2570
  ce_info.then_bb = then_edge->dest;
2571
  ce_info.else_bb = else_edge->dest;
2572
  ce_info.pass = pass;
2573
 
2574
#ifdef IFCVT_INIT_EXTRA_FIELDS
2575
  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2576
#endif
2577
 
2578
  if (find_if_block (&ce_info))
2579
    goto success;
2580
 
2581
  if (HAVE_trap && HAVE_conditional_trap
2582
      && find_cond_trap (test_bb, then_edge, else_edge))
2583
    goto success;
2584
 
2585
  if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2586
      && (! HAVE_conditional_execution || reload_completed))
2587
    {
2588
      if (find_if_case_1 (test_bb, then_edge, else_edge))
2589
        goto success;
2590
      if (find_if_case_2 (test_bb, then_edge, else_edge))
2591
        goto success;
2592
    }
2593
 
2594
  return NULL;
2595
 
2596
 success:
2597
  if (dump_file)
2598
    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2599
  return ce_info.test_bb;
2600
}
2601
 
2602
/* Return true if a block has two edges, one of which falls through to the next
2603
   block, and the other jumps to a specific block, so that we can tell if the
2604
   block is part of an && test or an || test.  Returns either -1 or the number
2605
   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
2606
 
2607
static int
2608
block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2609
{
2610
  edge cur_edge;
2611
  int fallthru_p = FALSE;
2612
  int jump_p = FALSE;
2613
  rtx insn;
2614
  rtx end;
2615
  int n_insns = 0;
2616
  edge_iterator ei;
2617
 
2618
  if (!cur_bb || !target_bb)
2619
    return -1;
2620
 
2621
  /* If no edges, obviously it doesn't jump or fallthru.  */
2622
  if (EDGE_COUNT (cur_bb->succs) == 0)
2623
    return FALSE;
2624
 
2625
  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
2626
    {
2627
      if (cur_edge->flags & EDGE_COMPLEX)
2628
        /* Anything complex isn't what we want.  */
2629
        return -1;
2630
 
2631
      else if (cur_edge->flags & EDGE_FALLTHRU)
2632
        fallthru_p = TRUE;
2633
 
2634
      else if (cur_edge->dest == target_bb)
2635
        jump_p = TRUE;
2636
 
2637
      else
2638
        return -1;
2639
    }
2640
 
2641
  if ((jump_p & fallthru_p) == 0)
2642
    return -1;
2643
 
2644
  /* Don't allow calls in the block, since this is used to group && and ||
2645
     together for conditional execution support.  ??? we should support
2646
     conditional execution support across calls for IA-64 some day, but
2647
     for now it makes the code simpler.  */
2648
  end = BB_END (cur_bb);
2649
  insn = BB_HEAD (cur_bb);
2650
 
2651
  while (insn != NULL_RTX)
2652
    {
2653
      if (CALL_P (insn))
2654
        return -1;
2655
 
2656
      if (INSN_P (insn)
2657
          && !JUMP_P (insn)
2658
          && GET_CODE (PATTERN (insn)) != USE
2659
          && GET_CODE (PATTERN (insn)) != CLOBBER)
2660
        n_insns++;
2661
 
2662
      if (insn == end)
2663
        break;
2664
 
2665
      insn = NEXT_INSN (insn);
2666
    }
2667
 
2668
  return n_insns;
2669
}
2670
 
2671
/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2672
   block.  If so, we'll try to convert the insns to not require the branch.
2673
   Return TRUE if we were successful at converting the block.  */
2674
 
2675
static int
2676
find_if_block (struct ce_if_block * ce_info)
2677
{
2678
  basic_block test_bb = ce_info->test_bb;
2679
  basic_block then_bb = ce_info->then_bb;
2680
  basic_block else_bb = ce_info->else_bb;
2681
  basic_block join_bb = NULL_BLOCK;
2682
  edge cur_edge;
2683
  basic_block next;
2684
  edge_iterator ei;
2685
 
2686
  ce_info->last_test_bb = test_bb;
2687
 
2688
  /* Discover if any fall through predecessors of the current test basic block
2689
     were && tests (which jump to the else block) or || tests (which jump to
2690
     the then block).  */
2691
  if (HAVE_conditional_execution && reload_completed
2692
      && single_pred_p (test_bb)
2693
      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
2694
    {
2695
      basic_block bb = single_pred (test_bb);
2696
      basic_block target_bb;
2697
      int max_insns = MAX_CONDITIONAL_EXECUTE;
2698
      int n_insns;
2699
 
2700
      /* Determine if the preceding block is an && or || block.  */
2701
      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2702
        {
2703
          ce_info->and_and_p = TRUE;
2704
          target_bb = else_bb;
2705
        }
2706
      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2707
        {
2708
          ce_info->and_and_p = FALSE;
2709
          target_bb = then_bb;
2710
        }
2711
      else
2712
        target_bb = NULL_BLOCK;
2713
 
2714
      if (target_bb && n_insns <= max_insns)
2715
        {
2716
          int total_insns = 0;
2717
          int blocks = 0;
2718
 
2719
          ce_info->last_test_bb = test_bb;
2720
 
2721
          /* Found at least one && or || block, look for more.  */
2722
          do
2723
            {
2724
              ce_info->test_bb = test_bb = bb;
2725
              total_insns += n_insns;
2726
              blocks++;
2727
 
2728
              if (!single_pred_p (bb))
2729
                break;
2730
 
2731
              bb = single_pred (bb);
2732
              n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2733
            }
2734
          while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
2735
 
2736
          ce_info->num_multiple_test_blocks = blocks;
2737
          ce_info->num_multiple_test_insns = total_insns;
2738
 
2739
          if (ce_info->and_and_p)
2740
            ce_info->num_and_and_blocks = blocks;
2741
          else
2742
            ce_info->num_or_or_blocks = blocks;
2743
        }
2744
    }
2745
 
2746
  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
2747
     other than any || blocks which jump to the THEN block.  */
2748
  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
2749
    return FALSE;
2750
 
2751
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
2752
  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
2753
    {
2754
      if (cur_edge->flags & EDGE_COMPLEX)
2755
        return FALSE;
2756
    }
2757
 
2758
  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
2759
    {
2760
      if (cur_edge->flags & EDGE_COMPLEX)
2761
        return FALSE;
2762
    }
2763
 
2764
  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
2765
  if (EDGE_COUNT (then_bb->succs) > 0
2766
      && (!single_succ_p (then_bb)
2767
          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2768
          || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
2769
    return FALSE;
2770
 
2771
  /* If the THEN block has no successors, conditional execution can still
2772
     make a conditional call.  Don't do this unless the ELSE block has
2773
     only one incoming edge -- the CFG manipulation is too ugly otherwise.
2774
     Check for the last insn of the THEN block being an indirect jump, which
2775
     is listed as not having any successors, but confuses the rest of the CE
2776
     code processing.  ??? we should fix this in the future.  */
2777
  if (EDGE_COUNT (then_bb->succs) == 0)
2778
    {
2779
      if (single_pred_p (else_bb))
2780
        {
2781
          rtx last_insn = BB_END (then_bb);
2782
 
2783
          while (last_insn
2784
                 && NOTE_P (last_insn)
2785
                 && last_insn != BB_HEAD (then_bb))
2786
            last_insn = PREV_INSN (last_insn);
2787
 
2788
          if (last_insn
2789
              && JUMP_P (last_insn)
2790
              && ! simplejump_p (last_insn))
2791
            return FALSE;
2792
 
2793
          join_bb = else_bb;
2794
          else_bb = NULL_BLOCK;
2795
        }
2796
      else
2797
        return FALSE;
2798
    }
2799
 
2800
  /* If the THEN block's successor is the other edge out of the TEST block,
2801
     then we have an IF-THEN combo without an ELSE.  */
2802
  else if (single_succ (then_bb) == else_bb)
2803
    {
2804
      join_bb = else_bb;
2805
      else_bb = NULL_BLOCK;
2806
    }
2807
 
2808
  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
2809
     has exactly one predecessor and one successor, and the outgoing edge
2810
     is not complex, then we have an IF-THEN-ELSE combo.  */
2811
  else if (single_succ_p (else_bb)
2812
           && single_succ (then_bb) == single_succ (else_bb)
2813
           && single_pred_p (else_bb)
2814
           && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2815
           && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
2816
    join_bb = single_succ (else_bb);
2817
 
2818
  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
2819
  else
2820
    return FALSE;
2821
 
2822
  num_possible_if_blocks++;
2823
 
2824
  if (dump_file)
2825
    {
2826
      fprintf (dump_file,
2827
               "\nIF-THEN%s block found, pass %d, start block %d "
2828
               "[insn %d], then %d [%d]",
2829
               (else_bb) ? "-ELSE" : "",
2830
               ce_info->pass,
2831
               test_bb->index,
2832
               BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
2833
               then_bb->index,
2834
               BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
2835
 
2836
      if (else_bb)
2837
        fprintf (dump_file, ", else %d [%d]",
2838
                 else_bb->index,
2839
                 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
2840
 
2841
      fprintf (dump_file, ", join %d [%d]",
2842
               join_bb->index,
2843
               BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
2844
 
2845
      if (ce_info->num_multiple_test_blocks > 0)
2846
        fprintf (dump_file, ", %d %s block%s last test %d [%d]",
2847
                 ce_info->num_multiple_test_blocks,
2848
                 (ce_info->and_and_p) ? "&&" : "||",
2849
                 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
2850
                 ce_info->last_test_bb->index,
2851
                 ((BB_HEAD (ce_info->last_test_bb))
2852
                  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
2853
                  : -1));
2854
 
2855
      fputc ('\n', dump_file);
2856
    }
2857
 
2858
  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
2859
     first condition for free, since we've already asserted that there's a
2860
     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
2861
     we checked the FALLTHRU flag, those are already adjacent to the last IF
2862
     block.  */
2863
  /* ??? As an enhancement, move the ELSE block.  Have to deal with
2864
     BLOCK notes, if by no other means than backing out the merge if they
2865
     exist.  Sticky enough I don't want to think about it now.  */
2866
  next = then_bb;
2867
  if (else_bb && (next = next->next_bb) != else_bb)
2868
    return FALSE;
2869
  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
2870
    {
2871
      if (else_bb)
2872
        join_bb = NULL;
2873
      else
2874
        return FALSE;
2875
    }
2876
 
2877
  /* Do the real work.  */
2878
  ce_info->else_bb = else_bb;
2879
  ce_info->join_bb = join_bb;
2880
 
2881
  return process_if_block (ce_info);
2882
}
2883
 
2884
/* Convert a branch over a trap, or a branch
2885
   to a trap, into a conditional trap.  */
2886
 
2887
static int
2888
find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
2889
{
2890
  basic_block then_bb = then_edge->dest;
2891
  basic_block else_bb = else_edge->dest;
2892
  basic_block other_bb, trap_bb;
2893
  rtx trap, jump, cond, cond_earliest, seq;
2894
  enum rtx_code code;
2895
 
2896
  /* Locate the block with the trap instruction.  */
2897
  /* ??? While we look for no successors, we really ought to allow
2898
     EH successors.  Need to fix merge_if_block for that to work.  */
2899
  if ((trap = block_has_only_trap (then_bb)) != NULL)
2900
    trap_bb = then_bb, other_bb = else_bb;
2901
  else if ((trap = block_has_only_trap (else_bb)) != NULL)
2902
    trap_bb = else_bb, other_bb = then_bb;
2903
  else
2904
    return FALSE;
2905
 
2906
  if (dump_file)
2907
    {
2908
      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
2909
               test_bb->index, trap_bb->index);
2910
    }
2911
 
2912
  /* If this is not a standard conditional jump, we can't parse it.  */
2913
  jump = BB_END (test_bb);
2914
  cond = noce_get_condition (jump, &cond_earliest);
2915
  if (! cond)
2916
    return FALSE;
2917
 
2918
  /* If the conditional jump is more than just a conditional jump, then
2919
     we can not do if-conversion on this block.  */
2920
  if (! onlyjump_p (jump))
2921
    return FALSE;
2922
 
2923
  /* We must be comparing objects whose modes imply the size.  */
2924
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2925
    return FALSE;
2926
 
2927
  /* Reverse the comparison code, if necessary.  */
2928
  code = GET_CODE (cond);
2929
  if (then_bb == trap_bb)
2930
    {
2931
      code = reversed_comparison_code (cond, jump);
2932
      if (code == UNKNOWN)
2933
        return FALSE;
2934
    }
2935
 
2936
  /* Attempt to generate the conditional trap.  */
2937
  seq = gen_cond_trap (code, XEXP (cond, 0),
2938
                       XEXP (cond, 1),
2939
                       TRAP_CODE (PATTERN (trap)));
2940
  if (seq == NULL)
2941
    return FALSE;
2942
 
2943
  num_true_changes++;
2944
 
2945
  /* Emit the new insns before cond_earliest.  */
2946
  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
2947
 
2948
  /* Delete the trap block if possible.  */
2949
  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
2950
  if (EDGE_COUNT (trap_bb->preds) == 0)
2951
    delete_basic_block (trap_bb);
2952
 
2953
  /* If the non-trap block and the test are now adjacent, merge them.
2954
     Otherwise we must insert a direct branch.  */
2955
  if (test_bb->next_bb == other_bb)
2956
    {
2957
      struct ce_if_block new_ce_info;
2958
      delete_insn (jump);
2959
      memset (&new_ce_info, '\0', sizeof (new_ce_info));
2960
      new_ce_info.test_bb = test_bb;
2961
      new_ce_info.then_bb = NULL;
2962
      new_ce_info.else_bb = NULL;
2963
      new_ce_info.join_bb = other_bb;
2964
      merge_if_block (&new_ce_info);
2965
    }
2966
  else
2967
    {
2968
      rtx lab, newjump;
2969
 
2970
      lab = JUMP_LABEL (jump);
2971
      newjump = emit_jump_insn_after (gen_jump (lab), jump);
2972
      LABEL_NUSES (lab) += 1;
2973
      JUMP_LABEL (newjump) = lab;
2974
      emit_barrier_after (newjump);
2975
 
2976
      delete_insn (jump);
2977
    }
2978
 
2979
  return TRUE;
2980
}
2981
 
2982
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
2983
   return it.  */
2984
 
2985
static rtx
2986
block_has_only_trap (basic_block bb)
2987
{
2988
  rtx trap;
2989
 
2990
  /* We're not the exit block.  */
2991
  if (bb == EXIT_BLOCK_PTR)
2992
    return NULL_RTX;
2993
 
2994
  /* The block must have no successors.  */
2995
  if (EDGE_COUNT (bb->succs) > 0)
2996
    return NULL_RTX;
2997
 
2998
  /* The only instruction in the THEN block must be the trap.  */
2999
  trap = first_active_insn (bb);
3000
  if (! (trap == BB_END (bb)
3001
         && GET_CODE (PATTERN (trap)) == TRAP_IF
3002
         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3003
    return NULL_RTX;
3004
 
3005
  return trap;
3006
}
3007
 
3008
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3009
   transformable, but not necessarily the other.  There need be no
3010
   JOIN block.
3011
 
3012
   Return TRUE if we were successful at converting the block.
3013
 
3014
   Cases we'd like to look at:
3015
 
3016
   (1)
3017
        if (test) goto over; // x not live
3018
        x = a;
3019
        goto label;
3020
        over:
3021
 
3022
   becomes
3023
 
3024
        x = a;
3025
        if (! test) goto label;
3026
 
3027
   (2)
3028
        if (test) goto E; // x not live
3029
        x = big();
3030
        goto L;
3031
        E:
3032
        x = b;
3033
        goto M;
3034
 
3035
   becomes
3036
 
3037
        x = b;
3038
        if (test) goto M;
3039
        x = big();
3040
        goto L;
3041
 
3042
   (3) // This one's really only interesting for targets that can do
3043
       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3044
       // it results in multiple branches on a cache line, which often
3045
       // does not sit well with predictors.
3046
 
3047
        if (test1) goto E; // predicted not taken
3048
        x = a;
3049
        if (test2) goto F;
3050
        ...
3051
        E:
3052
        x = b;
3053
        J:
3054
 
3055
   becomes
3056
 
3057
        x = a;
3058
        if (test1) goto E;
3059
        if (test2) goto F;
3060
 
3061
   Notes:
3062
 
3063
   (A) Don't do (2) if the branch is predicted against the block we're
3064
   eliminating.  Do it anyway if we can eliminate a branch; this requires
3065
   that the sole successor of the eliminated block postdominate the other
3066
   side of the if.
3067
 
3068
   (B) With CE, on (3) we can steal from both sides of the if, creating
3069
 
3070
        if (test1) x = a;
3071
        if (!test1) x = b;
3072
        if (test1) goto J;
3073
        if (test2) goto F;
3074
        ...
3075
        J:
3076
 
3077
   Again, this is most useful if J postdominates.
3078
 
3079
   (C) CE substitutes for helpful life information.
3080
 
3081
   (D) These heuristics need a lot of work.  */
3082
 
3083
/* Tests for case 1 above.  */
3084
 
3085
static int
3086
find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3087
{
3088
  basic_block then_bb = then_edge->dest;
3089
  basic_block else_bb = else_edge->dest, new_bb;
3090
  int then_bb_index;
3091
 
3092
  /* If we are partitioning hot/cold basic blocks, we don't want to
3093
     mess up unconditional or indirect jumps that cross between hot
3094
     and cold sections.
3095
 
3096
     Basic block partitioning may result in some jumps that appear to
3097
     be optimizable (or blocks that appear to be mergeable), but which really
3098
     must be left untouched (they are required to make it safely across
3099
     partition boundaries).  See  the comments at the top of
3100
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3101
 
3102
  if ((BB_END (then_bb)
3103
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3104
      || (BB_END (test_bb)
3105
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3106
      || (BB_END (else_bb)
3107
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3108
                            NULL_RTX)))
3109
    return FALSE;
3110
 
3111
  /* THEN has one successor.  */
3112
  if (!single_succ_p (then_bb))
3113
    return FALSE;
3114
 
3115
  /* THEN does not fall through, but is not strange either.  */
3116
  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3117
    return FALSE;
3118
 
3119
  /* THEN has one predecessor.  */
3120
  if (!single_pred_p (then_bb))
3121
    return FALSE;
3122
 
3123
  /* THEN must do something.  */
3124
  if (forwarder_block_p (then_bb))
3125
    return FALSE;
3126
 
3127
  num_possible_if_blocks++;
3128
  if (dump_file)
3129
    fprintf (dump_file,
3130
             "\nIF-CASE-1 found, start %d, then %d\n",
3131
             test_bb->index, then_bb->index);
3132
 
3133
  /* THEN is small.  */
3134
  if (! cheap_bb_rtx_cost_p (then_bb, COSTS_N_INSNS (BRANCH_COST)))
3135
    return FALSE;
3136
 
3137
  /* Registers set are dead, or are predicable.  */
3138
  if (! dead_or_predicable (test_bb, then_bb, else_bb,
3139
                            single_succ (then_bb), 1))
3140
    return FALSE;
3141
 
3142
  /* Conversion went ok, including moving the insns and fixing up the
3143
     jump.  Adjust the CFG to match.  */
3144
 
3145
  bitmap_ior (test_bb->il.rtl->global_live_at_end,
3146
              else_bb->il.rtl->global_live_at_start,
3147
              then_bb->il.rtl->global_live_at_end);
3148
 
3149
 
3150
  /* We can avoid creating a new basic block if then_bb is immediately
3151
     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3152
     thru to else_bb.  */
3153
 
3154
  if (then_bb->next_bb == else_bb
3155
      && then_bb->prev_bb == test_bb
3156
      && else_bb != EXIT_BLOCK_PTR)
3157
    {
3158
      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3159
      new_bb = 0;
3160
    }
3161
  else
3162
    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3163
                                             else_bb);
3164
 
3165
  then_bb_index = then_bb->index;
3166
  delete_basic_block (then_bb);
3167
 
3168
  /* Make rest of code believe that the newly created block is the THEN_BB
3169
     block we removed.  */
3170
  if (new_bb)
3171
    {
3172
      new_bb->index = then_bb_index;
3173
      BASIC_BLOCK (then_bb_index) = new_bb;
3174
      /* Since the fallthru edge was redirected from test_bb to new_bb,
3175
         we need to ensure that new_bb is in the same partition as
3176
         test bb (you can not fall through across section boundaries).  */
3177
      BB_COPY_PARTITION (new_bb, test_bb);
3178
    }
3179
  /* We've possibly created jump to next insn, cleanup_cfg will solve that
3180
     later.  */
3181
 
3182
  num_true_changes++;
3183
  num_updated_if_blocks++;
3184
 
3185
  return TRUE;
3186
}
3187
 
3188
/* Test for case 2 above.  */
3189
 
3190
static int
3191
find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3192
{
3193
  basic_block then_bb = then_edge->dest;
3194
  basic_block else_bb = else_edge->dest;
3195
  edge else_succ;
3196
  rtx note;
3197
 
3198
  /* If we are partitioning hot/cold basic blocks, we don't want to
3199
     mess up unconditional or indirect jumps that cross between hot
3200
     and cold sections.
3201
 
3202
     Basic block partitioning may result in some jumps that appear to
3203
     be optimizable (or blocks that appear to be mergeable), but which really
3204
     must be left untouched (they are required to make it safely across
3205
     partition boundaries).  See  the comments at the top of
3206
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3207
 
3208
  if ((BB_END (then_bb)
3209
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3210
      || (BB_END (test_bb)
3211
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3212
      || (BB_END (else_bb)
3213
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3214
                            NULL_RTX)))
3215
    return FALSE;
3216
 
3217
  /* ELSE has one successor.  */
3218
  if (!single_succ_p (else_bb))
3219
    return FALSE;
3220
  else
3221
    else_succ = single_succ_edge (else_bb);
3222
 
3223
  /* ELSE outgoing edge is not complex.  */
3224
  if (else_succ->flags & EDGE_COMPLEX)
3225
    return FALSE;
3226
 
3227
  /* ELSE has one predecessor.  */
3228
  if (!single_pred_p (else_bb))
3229
    return FALSE;
3230
 
3231
  /* THEN is not EXIT.  */
3232
  if (then_bb->index < 0)
3233
    return FALSE;
3234
 
3235
  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3236
  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3237
  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3238
    ;
3239
  else if (else_succ->dest->index < 0
3240
           || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3241
                              else_succ->dest))
3242
    ;
3243
  else
3244
    return FALSE;
3245
 
3246
  num_possible_if_blocks++;
3247
  if (dump_file)
3248
    fprintf (dump_file,
3249
             "\nIF-CASE-2 found, start %d, else %d\n",
3250
             test_bb->index, else_bb->index);
3251
 
3252
  /* ELSE is small.  */
3253
  if (! cheap_bb_rtx_cost_p (else_bb, COSTS_N_INSNS (BRANCH_COST)))
3254
    return FALSE;
3255
 
3256
  /* Registers set are dead, or are predicable.  */
3257
  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3258
    return FALSE;
3259
 
3260
  /* Conversion went ok, including moving the insns and fixing up the
3261
     jump.  Adjust the CFG to match.  */
3262
 
3263
  bitmap_ior (test_bb->il.rtl->global_live_at_end,
3264
              then_bb->il.rtl->global_live_at_start,
3265
              else_bb->il.rtl->global_live_at_end);
3266
 
3267
  delete_basic_block (else_bb);
3268
 
3269
  num_true_changes++;
3270
  num_updated_if_blocks++;
3271
 
3272
  /* ??? We may now fallthru from one of THEN's successors into a join
3273
     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
3274
 
3275
  return TRUE;
3276
}
3277
 
3278
/* A subroutine of dead_or_predicable called through for_each_rtx.
3279
   Return 1 if a memory is found.  */
3280
 
3281
static int
3282
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3283
{
3284
  return MEM_P (*px);
3285
}
3286
 
3287
/* Used by the code above to perform the actual rtl transformations.
3288
   Return TRUE if successful.
3289
 
3290
   TEST_BB is the block containing the conditional branch.  MERGE_BB
3291
   is the block containing the code to manipulate.  NEW_DEST is the
3292
   label TEST_BB should be branching to after the conversion.
3293
   REVERSEP is true if the sense of the branch should be reversed.  */
3294
 
3295
static int
3296
dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3297
                    basic_block other_bb, basic_block new_dest, int reversep)
3298
{
3299
  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3300
 
3301
  jump = BB_END (test_bb);
3302
 
3303
  /* Find the extent of the real code in the merge block.  */
3304
  head = BB_HEAD (merge_bb);
3305
  end = BB_END (merge_bb);
3306
 
3307
  if (LABEL_P (head))
3308
    head = NEXT_INSN (head);
3309
  if (NOTE_P (head))
3310
    {
3311
      if (head == end)
3312
        {
3313
          head = end = NULL_RTX;
3314
          goto no_body;
3315
        }
3316
      head = NEXT_INSN (head);
3317
    }
3318
 
3319
  if (JUMP_P (end))
3320
    {
3321
      if (head == end)
3322
        {
3323
          head = end = NULL_RTX;
3324
          goto no_body;
3325
        }
3326
      end = PREV_INSN (end);
3327
    }
3328
 
3329
  /* Disable handling dead code by conditional execution if the machine needs
3330
     to do anything funny with the tests, etc.  */
3331
#ifndef IFCVT_MODIFY_TESTS
3332
  if (HAVE_conditional_execution)
3333
    {
3334
      /* In the conditional execution case, we have things easy.  We know
3335
         the condition is reversible.  We don't have to check life info
3336
         because we're going to conditionally execute the code anyway.
3337
         All that's left is making sure the insns involved can actually
3338
         be predicated.  */
3339
 
3340
      rtx cond, prob_val;
3341
 
3342
      cond = cond_exec_get_condition (jump);
3343
      if (! cond)
3344
        return FALSE;
3345
 
3346
      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3347
      if (prob_val)
3348
        prob_val = XEXP (prob_val, 0);
3349
 
3350
      if (reversep)
3351
        {
3352
          enum rtx_code rev = reversed_comparison_code (cond, jump);
3353
          if (rev == UNKNOWN)
3354
            return FALSE;
3355
          cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3356
                                 XEXP (cond, 1));
3357
          if (prob_val)
3358
            prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3359
        }
3360
 
3361
      if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3362
                                     prob_val, 0))
3363
        goto cancel;
3364
 
3365
      earliest = jump;
3366
    }
3367
  else
3368
#endif
3369
    {
3370
      /* In the non-conditional execution case, we have to verify that there
3371
         are no trapping operations, no calls, no references to memory, and
3372
         that any registers modified are dead at the branch site.  */
3373
 
3374
      rtx insn, cond, prev;
3375
      regset merge_set, tmp, test_live, test_set;
3376
      struct propagate_block_info *pbi;
3377
      unsigned i, fail = 0;
3378
      bitmap_iterator bi;
3379
 
3380
      /* Check for no calls or trapping operations.  */
3381
      for (insn = head; ; insn = NEXT_INSN (insn))
3382
        {
3383
          if (CALL_P (insn))
3384
            return FALSE;
3385
          if (INSN_P (insn))
3386
            {
3387
              if (may_trap_p (PATTERN (insn)))
3388
                return FALSE;
3389
 
3390
              /* ??? Even non-trapping memories such as stack frame
3391
                 references must be avoided.  For stores, we collect
3392
                 no lifetime info; for reads, we'd have to assert
3393
                 true_dependence false against every store in the
3394
                 TEST range.  */
3395
              if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3396
                return FALSE;
3397
            }
3398
          if (insn == end)
3399
            break;
3400
        }
3401
 
3402
      if (! any_condjump_p (jump))
3403
        return FALSE;
3404
 
3405
      /* Find the extent of the conditional.  */
3406
      cond = noce_get_condition (jump, &earliest);
3407
      if (! cond)
3408
        return FALSE;
3409
 
3410
      /* Collect:
3411
           MERGE_SET = set of registers set in MERGE_BB
3412
           TEST_LIVE = set of registers live at EARLIEST
3413
           TEST_SET  = set of registers set between EARLIEST and the
3414
                       end of the block.  */
3415
 
3416
      tmp = ALLOC_REG_SET (&reg_obstack);
3417
      merge_set = ALLOC_REG_SET (&reg_obstack);
3418
      test_live = ALLOC_REG_SET (&reg_obstack);
3419
      test_set = ALLOC_REG_SET (&reg_obstack);
3420
 
3421
      /* ??? bb->local_set is only valid during calculate_global_regs_live,
3422
         so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
3423
         since we've already asserted that MERGE_BB is small.  */
3424
      /* If we allocated new pseudos (e.g. in the conditional move
3425
         expander called from noce_emit_cmove), we must resize the
3426
         array first.  */
3427
      if (max_regno < max_reg_num ())
3428
        {
3429
          max_regno = max_reg_num ();
3430
          allocate_reg_info (max_regno, FALSE, FALSE);
3431
        }
3432
      propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3433
 
3434
      /* For small register class machines, don't lengthen lifetimes of
3435
         hard registers before reload.  */
3436
      if (SMALL_REGISTER_CLASSES && ! reload_completed)
3437
        {
3438
          EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3439
            {
3440
              if (i < FIRST_PSEUDO_REGISTER
3441
                  && ! fixed_regs[i]
3442
                  && ! global_regs[i])
3443
                fail = 1;
3444
            }
3445
        }
3446
 
3447
      /* For TEST, we're interested in a range of insns, not a whole block.
3448
         Moreover, we're interested in the insns live from OTHER_BB.  */
3449
 
3450
      COPY_REG_SET (test_live, other_bb->il.rtl->global_live_at_start);
3451
      pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3452
                                       0);
3453
 
3454
      for (insn = jump; ; insn = prev)
3455
        {
3456
          prev = propagate_one_insn (pbi, insn);
3457
          if (insn == earliest)
3458
            break;
3459
        }
3460
 
3461
      free_propagate_block_info (pbi);
3462
 
3463
      /* We can perform the transformation if
3464
           MERGE_SET & (TEST_SET | TEST_LIVE)
3465
         and
3466
           TEST_SET & merge_bb->il.rtl->global_live_at_start
3467
         are empty.  */
3468
 
3469
      if (bitmap_intersect_p (test_set, merge_set)
3470
          || bitmap_intersect_p (test_live, merge_set)
3471
          || bitmap_intersect_p (test_set,
3472
                                 merge_bb->il.rtl->global_live_at_start))
3473
        fail = 1;
3474
 
3475
      FREE_REG_SET (tmp);
3476
      FREE_REG_SET (merge_set);
3477
      FREE_REG_SET (test_live);
3478
      FREE_REG_SET (test_set);
3479
 
3480
      if (fail)
3481
        return FALSE;
3482
    }
3483
 
3484
 no_body:
3485
  /* We don't want to use normal invert_jump or redirect_jump because
3486
     we don't want to delete_insn called.  Also, we want to do our own
3487
     change group management.  */
3488
 
3489
  old_dest = JUMP_LABEL (jump);
3490
  if (other_bb != new_dest)
3491
    {
3492
      new_label = block_label (new_dest);
3493
      if (reversep
3494
          ? ! invert_jump_1 (jump, new_label)
3495
          : ! redirect_jump_1 (jump, new_label))
3496
        goto cancel;
3497
    }
3498
 
3499
  if (! apply_change_group ())
3500
    return FALSE;
3501
 
3502
  if (other_bb != new_dest)
3503
    {
3504
      redirect_jump_2 (jump, old_dest, new_label, -1, reversep);
3505
 
3506
      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3507
      if (reversep)
3508
        {
3509
          gcov_type count, probability;
3510
          count = BRANCH_EDGE (test_bb)->count;
3511
          BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3512
          FALLTHRU_EDGE (test_bb)->count = count;
3513
          probability = BRANCH_EDGE (test_bb)->probability;
3514
          BRANCH_EDGE (test_bb)->probability
3515
            = FALLTHRU_EDGE (test_bb)->probability;
3516
          FALLTHRU_EDGE (test_bb)->probability = probability;
3517
          update_br_prob_note (test_bb);
3518
        }
3519
    }
3520
 
3521
  /* Move the insns out of MERGE_BB to before the branch.  */
3522
  if (head != NULL)
3523
    {
3524
      rtx insn;
3525
 
3526
      if (end == BB_END (merge_bb))
3527
        BB_END (merge_bb) = PREV_INSN (head);
3528
 
3529
      if (squeeze_notes (&head, &end))
3530
        return TRUE;
3531
 
3532
      /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
3533
         notes might become invalid.  */
3534
      insn = head;
3535
      do
3536
        {
3537
          rtx note, set;
3538
 
3539
          if (! INSN_P (insn))
3540
            continue;
3541
          note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
3542
          if (! note)
3543
            continue;
3544
          set = single_set (insn);
3545
          if (!set || !function_invariant_p (SET_SRC (set)))
3546
            remove_note (insn, note);
3547
        } while (insn != end && (insn = NEXT_INSN (insn)));
3548
 
3549
      reorder_insns (head, end, PREV_INSN (earliest));
3550
    }
3551
 
3552
  /* Remove the jump and edge if we can.  */
3553
  if (other_bb == new_dest)
3554
    {
3555
      delete_insn (jump);
3556
      remove_edge (BRANCH_EDGE (test_bb));
3557
      /* ??? Can't merge blocks here, as then_bb is still in use.
3558
         At minimum, the merge will get done just before bb-reorder.  */
3559
    }
3560
 
3561
  return TRUE;
3562
 
3563
 cancel:
3564
  cancel_changes (0);
3565
  return FALSE;
3566
}
3567
 
3568
/* Main entry point for all if-conversion.  */
3569
 
3570
void
3571
if_convert (int x_life_data_ok)
3572
{
3573
  basic_block bb;
3574
  int pass;
3575
 
3576
  num_possible_if_blocks = 0;
3577
  num_updated_if_blocks = 0;
3578
  num_true_changes = 0;
3579
  life_data_ok = (x_life_data_ok != 0);
3580
 
3581
  if ((! targetm.cannot_modify_jumps_p ())
3582
      && (!flag_reorder_blocks_and_partition || !no_new_pseudos
3583
          || !targetm.have_named_sections))
3584
    {
3585
      struct loops loops;
3586
 
3587
      flow_loops_find (&loops);
3588
      mark_loop_exit_edges (&loops);
3589
      flow_loops_free (&loops);
3590
      free_dominance_info (CDI_DOMINATORS);
3591
    }
3592
 
3593
  /* Compute postdominators if we think we'll use them.  */
3594
  if (HAVE_conditional_execution || life_data_ok)
3595
    calculate_dominance_info (CDI_POST_DOMINATORS);
3596
 
3597
  if (life_data_ok)
3598
    clear_bb_flags ();
3599
 
3600
  /* Go through each of the basic blocks looking for things to convert.  If we
3601
     have conditional execution, we make multiple passes to allow us to handle
3602
     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
3603
  pass = 0;
3604
  do
3605
    {
3606
      cond_exec_changed_p = FALSE;
3607
      pass++;
3608
 
3609
#ifdef IFCVT_MULTIPLE_DUMPS
3610
      if (dump_file && pass > 1)
3611
        fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3612
#endif
3613
 
3614
      FOR_EACH_BB (bb)
3615
        {
3616
          basic_block new_bb;
3617
          while ((new_bb = find_if_header (bb, pass)))
3618
            bb = new_bb;
3619
        }
3620
 
3621
#ifdef IFCVT_MULTIPLE_DUMPS
3622
      if (dump_file && cond_exec_changed_p)
3623
        print_rtl_with_bb (dump_file, get_insns ());
3624
#endif
3625
    }
3626
  while (cond_exec_changed_p);
3627
 
3628
#ifdef IFCVT_MULTIPLE_DUMPS
3629
  if (dump_file)
3630
    fprintf (dump_file, "\n\n========== no more changes\n");
3631
#endif
3632
 
3633
  free_dominance_info (CDI_POST_DOMINATORS);
3634
 
3635
  if (dump_file)
3636
    fflush (dump_file);
3637
 
3638
  clear_aux_for_blocks ();
3639
 
3640
  /* Rebuild life info for basic blocks that require it.  */
3641
  if (num_true_changes && life_data_ok)
3642
    {
3643
      /* If we allocated new pseudos, we must resize the array for sched1.  */
3644
      if (max_regno < max_reg_num ())
3645
        {
3646
          max_regno = max_reg_num ();
3647
          allocate_reg_info (max_regno, FALSE, FALSE);
3648
        }
3649
      update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3650
                                        PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3651
                                        | PROP_KILL_DEAD_CODE);
3652
    }
3653
 
3654
  /* Write the final stats.  */
3655
  if (dump_file && num_possible_if_blocks > 0)
3656
    {
3657
      fprintf (dump_file,
3658
               "\n%d possible IF blocks searched.\n",
3659
               num_possible_if_blocks);
3660
      fprintf (dump_file,
3661
               "%d IF blocks converted.\n",
3662
               num_updated_if_blocks);
3663
      fprintf (dump_file,
3664
               "%d true changes made.\n\n\n",
3665
               num_true_changes);
3666
    }
3667
 
3668
#ifdef ENABLE_CHECKING
3669
  verify_flow_info ();
3670
#endif
3671
}
3672
 
3673
static bool
3674
gate_handle_if_conversion (void)
3675
{
3676
  return (optimize > 0);
3677
}
3678
 
3679
/* If-conversion and CFG cleanup.  */
3680
static void
3681
rest_of_handle_if_conversion (void)
3682
{
3683
  if (flag_if_conversion)
3684
    {
3685
      if (dump_file)
3686
        dump_flow_info (dump_file);
3687
      cleanup_cfg (CLEANUP_EXPENSIVE);
3688
      reg_scan (get_insns (), max_reg_num ());
3689
      if_convert (0);
3690
    }
3691
 
3692
  timevar_push (TV_JUMP);
3693
  cleanup_cfg (CLEANUP_EXPENSIVE);
3694
  reg_scan (get_insns (), max_reg_num ());
3695
  timevar_pop (TV_JUMP);
3696
}
3697
 
3698
struct tree_opt_pass pass_rtl_ifcvt =
3699
{
3700
  "ce1",                                /* name */
3701
  gate_handle_if_conversion,            /* gate */
3702
  rest_of_handle_if_conversion,         /* execute */
3703
  NULL,                                 /* sub */
3704
  NULL,                                 /* next */
3705
  0,                                    /* static_pass_number */
3706
  TV_IFCVT,                             /* tv_id */
3707
  0,                                    /* properties_required */
3708
  0,                                    /* properties_provided */
3709
  0,                                    /* properties_destroyed */
3710
  0,                                    /* todo_flags_start */
3711
  TODO_dump_func,                       /* todo_flags_finish */
3712
  'C'                                   /* letter */
3713
};
3714
 
3715
static bool
3716
gate_handle_if_after_combine (void)
3717
{
3718
  return (optimize > 0 && flag_if_conversion);
3719
}
3720
 
3721
 
3722
/* Rerun if-conversion, as combine may have simplified things enough
3723
   to now meet sequence length restrictions.  */
3724
static void
3725
rest_of_handle_if_after_combine (void)
3726
{
3727
  no_new_pseudos = 0;
3728
/*   if_convert (1); */
3729
  no_new_pseudos = 1;
3730
}
3731
 
3732
struct tree_opt_pass pass_if_after_combine =
3733
{
3734
  "ce2",                                /* name */
3735
  gate_handle_if_after_combine,         /* gate */
3736
  rest_of_handle_if_after_combine,      /* execute */
3737
  NULL,                                 /* sub */
3738
  NULL,                                 /* next */
3739
  0,                                    /* static_pass_number */
3740
  TV_IFCVT,                             /* tv_id */
3741
  0,                                    /* properties_required */
3742
  0,                                    /* properties_provided */
3743
  0,                                    /* properties_destroyed */
3744
  0,                                    /* todo_flags_start */
3745
  TODO_dump_func |
3746
  TODO_ggc_collect,                     /* todo_flags_finish */
3747
  'C'                                   /* letter */
3748
};
3749
 
3750
 
3751
static bool
3752
gate_handle_if_after_reload (void)
3753
{
3754
  return (optimize > 0);
3755
}
3756
 
3757
static void
3758
rest_of_handle_if_after_reload (void)
3759
{
3760
  /* Last attempt to optimize CFG, as scheduling, peepholing and insn
3761
     splitting possibly introduced more crossjumping opportunities.  */
3762
  cleanup_cfg (CLEANUP_EXPENSIVE
3763
               | CLEANUP_UPDATE_LIFE
3764
               | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
3765
  if (flag_if_conversion2)
3766
    if_convert (1);
3767
}
3768
 
3769
 
3770
struct tree_opt_pass pass_if_after_reload =
3771
{
3772
  "ce3",                                /* name */
3773
  gate_handle_if_after_reload,          /* gate */
3774
  rest_of_handle_if_after_reload,       /* execute */
3775
  NULL,                                 /* sub */
3776
  NULL,                                 /* next */
3777
  0,                                    /* static_pass_number */
3778
  TV_IFCVT2,                            /* tv_id */
3779
  0,                                    /* properties_required */
3780
  0,                                    /* properties_provided */
3781
  0,                                    /* properties_destroyed */
3782
  0,                                    /* todo_flags_start */
3783
  TODO_dump_func |
3784
  TODO_ggc_collect,                     /* todo_flags_finish */
3785
  'E'                                   /* letter */
3786
};
3787
 
3788
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.