OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc1/] [gcc/] [ifcvt.c] - Blame information for rev 513

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* If-conversion support.
2
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 3, or (at your option)
10
   any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
 
26
#include "rtl.h"
27
#include "regs.h"
28
#include "function.h"
29
#include "flags.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "except.h"
33
#include "hard-reg-set.h"
34
#include "basic-block.h"
35
#include "expr.h"
36
#include "real.h"
37
#include "output.h"
38
#include "optabs.h"
39
#include "toplev.h"
40
#include "tm_p.h"
41
#include "cfgloop.h"
42
#include "target.h"
43
#include "timevar.h"
44
#include "tree-pass.h"
45
#include "df.h"
46
#include "vec.h"
47
#include "vecprim.h"
48
#include "dbgcnt.h"
49
 
50
#ifndef HAVE_conditional_move
51
#define HAVE_conditional_move 0
52
#endif
53
#ifndef HAVE_incscc
54
#define HAVE_incscc 0
55
#endif
56
#ifndef HAVE_decscc
57
#define HAVE_decscc 0
58
#endif
59
#ifndef HAVE_trap
60
#define HAVE_trap 0
61
#endif
62
 
63
#ifndef MAX_CONDITIONAL_EXECUTE
64
#define MAX_CONDITIONAL_EXECUTE \
65
  (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
66
   + 1)
67
#endif
68
 
69
#define IFCVT_MULTIPLE_DUMPS 1
70
 
71
#define NULL_BLOCK      ((basic_block) NULL)
72
 
73
/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74
static int num_possible_if_blocks;
75
 
76
/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77
   execution.  */
78
static int num_updated_if_blocks;
79
 
80
/* # of changes made.  */
81
static int num_true_changes;
82
 
83
/* Whether conditional execution changes were made.  */
84
static int cond_exec_changed_p;
85
 
86
/* Forward references.  */
87
static int count_bb_insns (const_basic_block);
88
static bool cheap_bb_rtx_cost_p (const_basic_block, int);
89
static rtx first_active_insn (basic_block);
90
static rtx last_active_insn (basic_block, int);
91
static basic_block block_fallthru (basic_block);
92
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
93
static rtx cond_exec_get_condition (rtx);
94
static rtx noce_get_condition (rtx, rtx *, bool);
95
static int noce_operand_ok (const_rtx);
96
static void merge_if_block (ce_if_block_t *);
97
static int find_cond_trap (basic_block, edge, edge);
98
static basic_block find_if_header (basic_block, int);
99
static int block_jumps_and_fallthru_p (basic_block, basic_block);
100
static int noce_find_if_block (basic_block, edge, edge, int);
101
static int cond_exec_find_if_block (ce_if_block_t *);
102
static int find_if_case_1 (basic_block, edge, edge);
103
static int find_if_case_2 (basic_block, edge, edge);
104
static int find_memory (rtx *, void *);
105
static int dead_or_predicable (basic_block, basic_block, basic_block,
106
                               basic_block, int);
107
static void noce_emit_move_insn (rtx, rtx);
108
static rtx block_has_only_trap (basic_block);
109
 
110
/* Count the number of non-jump active insns in BB.  */
111
 
112
static int
113
count_bb_insns (const_basic_block bb)
114
{
115
  int count = 0;
116
  rtx insn = BB_HEAD (bb);
117
 
118
  while (1)
119
    {
120
      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
121
        count++;
122
 
123
      if (insn == BB_END (bb))
124
        break;
125
      insn = NEXT_INSN (insn);
126
    }
127
 
128
  return count;
129
}
130
 
131
/* Determine whether the total insn_rtx_cost on non-jump insns in
132
   basic block BB is less than MAX_COST.  This function returns
133
   false if the cost of any instruction could not be estimated.  */
134
 
135
static bool
136
cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
137
{
138
  int count = 0;
139
  rtx insn = BB_HEAD (bb);
140
  bool speed = optimize_bb_for_speed_p (bb);
141
 
142
  while (1)
143
    {
144
      if (NONJUMP_INSN_P (insn))
145
        {
146
          int cost = insn_rtx_cost (PATTERN (insn), speed);
147
          if (cost == 0)
148
            return false;
149
 
150
          /* If this instruction is the load or set of a "stack" register,
151
             such as a floating point register on x87, then the cost of
152
             speculatively executing this insn may need to include
153
             the additional cost of popping its result off of the
154
             register stack.  Unfortunately, correctly recognizing and
155
             accounting for this additional overhead is tricky, so for
156
             now we simply prohibit such speculative execution.  */
157
#ifdef STACK_REGS
158
          {
159
            rtx set = single_set (insn);
160
            if (set && STACK_REG_P (SET_DEST (set)))
161
              return false;
162
          }
163
#endif
164
 
165
          count += cost;
166
          if (count >= max_cost)
167
            return false;
168
        }
169
      else if (CALL_P (insn))
170
        return false;
171
 
172
      if (insn == BB_END (bb))
173
        break;
174
      insn = NEXT_INSN (insn);
175
    }
176
 
177
  return true;
178
}
179
 
180
/* Return the first non-jump active insn in the basic block.  */
181
 
182
static rtx
183
first_active_insn (basic_block bb)
184
{
185
  rtx insn = BB_HEAD (bb);
186
 
187
  if (LABEL_P (insn))
188
    {
189
      if (insn == BB_END (bb))
190
        return NULL_RTX;
191
      insn = NEXT_INSN (insn);
192
    }
193
 
194
  while (NOTE_P (insn) || DEBUG_INSN_P (insn))
195
    {
196
      if (insn == BB_END (bb))
197
        return NULL_RTX;
198
      insn = NEXT_INSN (insn);
199
    }
200
 
201
  if (JUMP_P (insn))
202
    return NULL_RTX;
203
 
204
  return insn;
205
}
206
 
207
/* Return the last non-jump active (non-jump) insn in the basic block.  */
208
 
209
static rtx
210
last_active_insn (basic_block bb, int skip_use_p)
211
{
212
  rtx insn = BB_END (bb);
213
  rtx head = BB_HEAD (bb);
214
 
215
  while (NOTE_P (insn)
216
         || JUMP_P (insn)
217
         || DEBUG_INSN_P (insn)
218
         || (skip_use_p
219
             && NONJUMP_INSN_P (insn)
220
             && GET_CODE (PATTERN (insn)) == USE))
221
    {
222
      if (insn == head)
223
        return NULL_RTX;
224
      insn = PREV_INSN (insn);
225
    }
226
 
227
  if (LABEL_P (insn))
228
    return NULL_RTX;
229
 
230
  return insn;
231
}
232
 
233
/* Return the basic block reached by falling though the basic block BB.  */
234
 
235
static basic_block
236
block_fallthru (basic_block bb)
237
{
238
  edge e;
239
  edge_iterator ei;
240
 
241
  FOR_EACH_EDGE (e, ei, bb->succs)
242
    if (e->flags & EDGE_FALLTHRU)
243
      break;
244
 
245
  return (e) ? e->dest : NULL_BLOCK;
246
}
247
 
248
/* Go through a bunch of insns, converting them to conditional
249
   execution format if possible.  Return TRUE if all of the non-note
250
   insns were processed.  */
251
 
252
static int
253
cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254
                         /* if block information */rtx start,
255
                         /* first insn to look at */rtx end,
256
                         /* last insn to look at */rtx test,
257
                         /* conditional execution test */rtx prob_val,
258
                         /* probability of branch taken. */int mod_ok)
259
{
260
  int must_be_last = FALSE;
261
  rtx insn;
262
  rtx xtest;
263
  rtx pattern;
264
 
265
  if (!start || !end)
266
    return FALSE;
267
 
268
  for (insn = start; ; insn = NEXT_INSN (insn))
269
    {
270
      if (NOTE_P (insn) || DEBUG_INSN_P (insn))
271
        goto insn_done;
272
 
273
      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
274
 
275
      /* Remove USE insns that get in the way.  */
276
      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
277
        {
278
          /* ??? Ug.  Actually unlinking the thing is problematic,
279
             given what we'd have to coordinate with our callers.  */
280
          SET_INSN_DELETED (insn);
281
          goto insn_done;
282
        }
283
 
284
      /* Last insn wasn't last?  */
285
      if (must_be_last)
286
        return FALSE;
287
 
288
      if (modified_in_p (test, insn))
289
        {
290
          if (!mod_ok)
291
            return FALSE;
292
          must_be_last = TRUE;
293
        }
294
 
295
      /* Now build the conditional form of the instruction.  */
296
      pattern = PATTERN (insn);
297
      xtest = copy_rtx (test);
298
 
299
      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
300
         two conditions.  */
301
      if (GET_CODE (pattern) == COND_EXEC)
302
        {
303
          if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
304
            return FALSE;
305
 
306
          xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
307
                               COND_EXEC_TEST (pattern));
308
          pattern = COND_EXEC_CODE (pattern);
309
        }
310
 
311
      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
312
 
313
      /* If the machine needs to modify the insn being conditionally executed,
314
         say for example to force a constant integer operand into a temp
315
         register, do so here.  */
316
#ifdef IFCVT_MODIFY_INSN
317
      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
318
      if (! pattern)
319
        return FALSE;
320
#endif
321
 
322
      validate_change (insn, &PATTERN (insn), pattern, 1);
323
 
324
      if (CALL_P (insn) && prob_val)
325
        validate_change (insn, &REG_NOTES (insn),
326
                         alloc_EXPR_LIST (REG_BR_PROB, prob_val,
327
                                          REG_NOTES (insn)), 1);
328
 
329
    insn_done:
330
      if (insn == end)
331
        break;
332
    }
333
 
334
  return TRUE;
335
}
336
 
337
/* Return the condition for a jump.  Do not do any special processing.  */
338
 
339
static rtx
340
cond_exec_get_condition (rtx jump)
341
{
342
  rtx test_if, cond;
343
 
344
  if (any_condjump_p (jump))
345
    test_if = SET_SRC (pc_set (jump));
346
  else
347
    return NULL_RTX;
348
  cond = XEXP (test_if, 0);
349
 
350
  /* If this branches to JUMP_LABEL when the condition is false,
351
     reverse the condition.  */
352
  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
353
      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
354
    {
355
      enum rtx_code rev = reversed_comparison_code (cond, jump);
356
      if (rev == UNKNOWN)
357
        return NULL_RTX;
358
 
359
      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
360
                             XEXP (cond, 1));
361
    }
362
 
363
  return cond;
364
}
365
 
366
/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
367
   to conditional execution.  Return TRUE if we were successful at
368
   converting the block.  */
369
 
370
static int
371
cond_exec_process_if_block (ce_if_block_t * ce_info,
372
                            /* if block information */int do_multiple_p)
373
{
374
  basic_block test_bb = ce_info->test_bb;       /* last test block */
375
  basic_block then_bb = ce_info->then_bb;       /* THEN */
376
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
377
  rtx test_expr;                /* expression in IF_THEN_ELSE that is tested */
378
  rtx then_start;               /* first insn in THEN block */
379
  rtx then_end;                 /* last insn + 1 in THEN block */
380
  rtx else_start = NULL_RTX;    /* first insn in ELSE block or NULL */
381
  rtx else_end = NULL_RTX;      /* last insn + 1 in ELSE block */
382
  int max;                      /* max # of insns to convert.  */
383
  int then_mod_ok;              /* whether conditional mods are ok in THEN */
384
  rtx true_expr;                /* test for else block insns */
385
  rtx false_expr;               /* test for then block insns */
386
  rtx true_prob_val;            /* probability of else block */
387
  rtx false_prob_val;           /* probability of then block */
388
  int n_insns;
389
  enum rtx_code false_code;
390
 
391
  /* If test is comprised of && or || elements, and we've failed at handling
392
     all of them together, just use the last test if it is the special case of
393
     && elements without an ELSE block.  */
394
  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
395
    {
396
      if (else_bb || ! ce_info->and_and_p)
397
        return FALSE;
398
 
399
      ce_info->test_bb = test_bb = ce_info->last_test_bb;
400
      ce_info->num_multiple_test_blocks = 0;
401
      ce_info->num_and_and_blocks = 0;
402
      ce_info->num_or_or_blocks = 0;
403
    }
404
 
405
  /* Find the conditional jump to the ELSE or JOIN part, and isolate
406
     the test.  */
407
  test_expr = cond_exec_get_condition (BB_END (test_bb));
408
  if (! test_expr)
409
    return FALSE;
410
 
411
  /* If the conditional jump is more than just a conditional jump,
412
     then we can not do conditional execution conversion on this block.  */
413
  if (! onlyjump_p (BB_END (test_bb)))
414
    return FALSE;
415
 
416
  /* Collect the bounds of where we're to search, skipping any labels, jumps
417
     and notes at the beginning and end of the block.  Then count the total
418
     number of insns and see if it is small enough to convert.  */
419
  then_start = first_active_insn (then_bb);
420
  then_end = last_active_insn (then_bb, TRUE);
421
  n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
422
  max = MAX_CONDITIONAL_EXECUTE;
423
 
424
  if (else_bb)
425
    {
426
      max *= 2;
427
      else_start = first_active_insn (else_bb);
428
      else_end = last_active_insn (else_bb, TRUE);
429
      n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
430
    }
431
 
432
  if (n_insns > max)
433
    return FALSE;
434
 
435
  /* Map test_expr/test_jump into the appropriate MD tests to use on
436
     the conditionally executed code.  */
437
 
438
  true_expr = test_expr;
439
 
440
  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
441
  if (false_code != UNKNOWN)
442
    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
443
                                 XEXP (true_expr, 0), XEXP (true_expr, 1));
444
  else
445
    false_expr = NULL_RTX;
446
 
447
#ifdef IFCVT_MODIFY_TESTS
448
  /* If the machine description needs to modify the tests, such as setting a
449
     conditional execution register from a comparison, it can do so here.  */
450
  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
451
 
452
  /* See if the conversion failed.  */
453
  if (!true_expr || !false_expr)
454
    goto fail;
455
#endif
456
 
457
  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
458
  if (true_prob_val)
459
    {
460
      true_prob_val = XEXP (true_prob_val, 0);
461
      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
462
    }
463
  else
464
    false_prob_val = NULL_RTX;
465
 
466
  /* If we have && or || tests, do them here.  These tests are in the adjacent
467
     blocks after the first block containing the test.  */
468
  if (ce_info->num_multiple_test_blocks > 0)
469
    {
470
      basic_block bb = test_bb;
471
      basic_block last_test_bb = ce_info->last_test_bb;
472
 
473
      if (! false_expr)
474
        goto fail;
475
 
476
      do
477
        {
478
          rtx start, end;
479
          rtx t, f;
480
          enum rtx_code f_code;
481
 
482
          bb = block_fallthru (bb);
483
          start = first_active_insn (bb);
484
          end = last_active_insn (bb, TRUE);
485
          if (start
486
              && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487
                                            false_prob_val, FALSE))
488
            goto fail;
489
 
490
          /* If the conditional jump is more than just a conditional jump, then
491
             we can not do conditional execution conversion on this block.  */
492
          if (! onlyjump_p (BB_END (bb)))
493
            goto fail;
494
 
495
          /* Find the conditional jump and isolate the test.  */
496
          t = cond_exec_get_condition (BB_END (bb));
497
          if (! t)
498
            goto fail;
499
 
500
          f_code = reversed_comparison_code (t, BB_END (bb));
501
          if (f_code == UNKNOWN)
502
            goto fail;
503
 
504
          f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
505
          if (ce_info->and_and_p)
506
            {
507
              t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508
              f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509
            }
510
          else
511
            {
512
              t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513
              f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514
            }
515
 
516
          /* If the machine description needs to modify the tests, such as
517
             setting a conditional execution register from a comparison, it can
518
             do so here.  */
519
#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520
          IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521
 
522
          /* See if the conversion failed.  */
523
          if (!t || !f)
524
            goto fail;
525
#endif
526
 
527
          true_expr = t;
528
          false_expr = f;
529
        }
530
      while (bb != last_test_bb);
531
    }
532
 
533
  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534
     on then THEN block.  */
535
  then_mod_ok = (else_bb == NULL_BLOCK);
536
 
537
  /* Go through the THEN and ELSE blocks converting the insns if possible
538
     to conditional execution.  */
539
 
540
  if (then_end
541
      && (! false_expr
542
          || ! cond_exec_process_insns (ce_info, then_start, then_end,
543
                                        false_expr, false_prob_val,
544
                                        then_mod_ok)))
545
    goto fail;
546
 
547
  if (else_bb && else_end
548
      && ! cond_exec_process_insns (ce_info, else_start, else_end,
549
                                    true_expr, true_prob_val, TRUE))
550
    goto fail;
551
 
552
  /* If we cannot apply the changes, fail.  Do not go through the normal fail
553
     processing, since apply_change_group will call cancel_changes.  */
554
  if (! apply_change_group ())
555
    {
556
#ifdef IFCVT_MODIFY_CANCEL
557
      /* Cancel any machine dependent changes.  */
558
      IFCVT_MODIFY_CANCEL (ce_info);
559
#endif
560
      return FALSE;
561
    }
562
 
563
#ifdef IFCVT_MODIFY_FINAL
564
  /* Do any machine dependent final modifications.  */
565
  IFCVT_MODIFY_FINAL (ce_info);
566
#endif
567
 
568
  /* Conversion succeeded.  */
569
  if (dump_file)
570
    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
571
             n_insns, (n_insns == 1) ? " was" : "s were");
572
 
573
  /* Merge the blocks!  */
574
  merge_if_block (ce_info);
575
  cond_exec_changed_p = TRUE;
576
  return TRUE;
577
 
578
 fail:
579
#ifdef IFCVT_MODIFY_CANCEL
580
  /* Cancel any machine dependent changes.  */
581
  IFCVT_MODIFY_CANCEL (ce_info);
582
#endif
583
 
584
  cancel_changes (0);
585
  return FALSE;
586
}
587
 
588
/* Used by noce_process_if_block to communicate with its subroutines.
589
 
590
   The subroutines know that A and B may be evaluated freely.  They
591
   know that X is a register.  They should insert new instructions
592
   before cond_earliest.  */
593
 
594
struct noce_if_info
595
{
596
  /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
597
  basic_block test_bb, then_bb, else_bb, join_bb;
598
 
599
  /* The jump that ends TEST_BB.  */
600
  rtx jump;
601
 
602
  /* The jump condition.  */
603
  rtx cond;
604
 
605
  /* New insns should be inserted before this one.  */
606
  rtx cond_earliest;
607
 
608
  /* Insns in the THEN and ELSE block.  There is always just this
609
     one insns in those blocks.  The insns are single_set insns.
610
     If there was no ELSE block, INSN_B is the last insn before
611
     COND_EARLIEST, or NULL_RTX.  In the former case, the insn
612
     operands are still valid, as if INSN_B was moved down below
613
     the jump.  */
614
  rtx insn_a, insn_b;
615
 
616
  /* The SET_SRC of INSN_A and INSN_B.  */
617
  rtx a, b;
618
 
619
  /* The SET_DEST of INSN_A.  */
620
  rtx x;
621
 
622
  /* True if this if block is not canonical.  In the canonical form of
623
     if blocks, the THEN_BB is the block reached via the fallthru edge
624
     from TEST_BB.  For the noce transformations, we allow the symmetric
625
     form as well.  */
626
  bool then_else_reversed;
627
 
628
  /* Estimated cost of the particular branch instruction.  */
629
  int branch_cost;
630
};
631
 
632
static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
633
static int noce_try_move (struct noce_if_info *);
634
static int noce_try_store_flag (struct noce_if_info *);
635
static int noce_try_addcc (struct noce_if_info *);
636
static int noce_try_store_flag_constants (struct noce_if_info *);
637
static int noce_try_store_flag_mask (struct noce_if_info *);
638
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
639
                            rtx, rtx, rtx);
640
static int noce_try_cmove (struct noce_if_info *);
641
static int noce_try_cmove_arith (struct noce_if_info *);
642
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
643
static int noce_try_minmax (struct noce_if_info *);
644
static int noce_try_abs (struct noce_if_info *);
645
static int noce_try_sign_mask (struct noce_if_info *);
646
 
647
/* Helper function for noce_try_store_flag*.  */
648
 
649
static rtx
650
noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
651
                      int normalize)
652
{
653
  rtx cond = if_info->cond;
654
  int cond_complex;
655
  enum rtx_code code;
656
 
657
  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
658
                  || ! general_operand (XEXP (cond, 1), VOIDmode));
659
 
660
  /* If earliest == jump, or when the condition is complex, try to
661
     build the store_flag insn directly.  */
662
 
663
  if (cond_complex)
664
    {
665
      rtx set = pc_set (if_info->jump);
666
      cond = XEXP (SET_SRC (set), 0);
667
      if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
668
          && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
669
        reversep = !reversep;
670
      if (if_info->then_else_reversed)
671
        reversep = !reversep;
672
    }
673
 
674
  if (reversep)
675
    code = reversed_comparison_code (cond, if_info->jump);
676
  else
677
    code = GET_CODE (cond);
678
 
679
  if ((if_info->cond_earliest == if_info->jump || cond_complex)
680
      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
681
    {
682
      rtx tmp;
683
 
684
      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
685
                            XEXP (cond, 1));
686
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
687
 
688
      start_sequence ();
689
      tmp = emit_insn (tmp);
690
 
691
      if (recog_memoized (tmp) >= 0)
692
        {
693
          tmp = get_insns ();
694
          end_sequence ();
695
          emit_insn (tmp);
696
 
697
          if_info->cond_earliest = if_info->jump;
698
 
699
          return x;
700
        }
701
 
702
      end_sequence ();
703
    }
704
 
705
  /* Don't even try if the comparison operands or the mode of X are weird.  */
706
  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
707
    return NULL_RTX;
708
 
709
  return emit_store_flag (x, code, XEXP (cond, 0),
710
                          XEXP (cond, 1), VOIDmode,
711
                          (code == LTU || code == LEU
712
                           || code == GEU || code == GTU), normalize);
713
}
714
 
715
/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
716
   X is the destination/target and Y is the value to copy.  */
717
 
718
static void
719
noce_emit_move_insn (rtx x, rtx y)
720
{
721
  enum machine_mode outmode;
722
  rtx outer, inner;
723
  int bitpos;
724
 
725
  if (GET_CODE (x) != STRICT_LOW_PART)
726
    {
727
      rtx seq, insn, target;
728
      optab ot;
729
 
730
      start_sequence ();
731
      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
732
         otherwise construct a suitable SET pattern ourselves.  */
733
      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
734
             ? emit_move_insn (x, y)
735
             : emit_insn (gen_rtx_SET (VOIDmode, x, y));
736
      seq = get_insns ();
737
      end_sequence ();
738
 
739
      if (recog_memoized (insn) <= 0)
740
        {
741
          if (GET_CODE (x) == ZERO_EXTRACT)
742
            {
743
              rtx op = XEXP (x, 0);
744
              unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
745
              unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
746
 
747
              /* store_bit_field expects START to be relative to
748
                 BYTES_BIG_ENDIAN and adjusts this value for machines with
749
                 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
750
                 invoke store_bit_field again it is necessary to have the START
751
                 value from the first call.  */
752
              if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
753
                {
754
                  if (MEM_P (op))
755
                    start = BITS_PER_UNIT - start - size;
756
                  else
757
                    {
758
                      gcc_assert (REG_P (op));
759
                      start = BITS_PER_WORD - start - size;
760
                    }
761
                }
762
 
763
              gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
764
              store_bit_field (op, size, start, GET_MODE (x), y);
765
              return;
766
            }
767
 
768
          switch (GET_RTX_CLASS (GET_CODE (y)))
769
            {
770
            case RTX_UNARY:
771
              ot = code_to_optab[GET_CODE (y)];
772
              if (ot)
773
                {
774
                  start_sequence ();
775
                  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
776
                  if (target != NULL_RTX)
777
                    {
778
                      if (target != x)
779
                        emit_move_insn (x, target);
780
                      seq = get_insns ();
781
                    }
782
                  end_sequence ();
783
                }
784
              break;
785
 
786
            case RTX_BIN_ARITH:
787
            case RTX_COMM_ARITH:
788
              ot = code_to_optab[GET_CODE (y)];
789
              if (ot)
790
                {
791
                  start_sequence ();
792
                  target = expand_binop (GET_MODE (y), ot,
793
                                         XEXP (y, 0), XEXP (y, 1),
794
                                         x, 0, OPTAB_DIRECT);
795
                  if (target != NULL_RTX)
796
                    {
797
                      if (target != x)
798
                          emit_move_insn (x, target);
799
                      seq = get_insns ();
800
                    }
801
                  end_sequence ();
802
                }
803
              break;
804
 
805
            default:
806
              break;
807
            }
808
        }
809
 
810
      emit_insn (seq);
811
      return;
812
    }
813
 
814
  outer = XEXP (x, 0);
815
  inner = XEXP (outer, 0);
816
  outmode = GET_MODE (outer);
817
  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
818
  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
819
}
820
 
821
/* Return sequence of instructions generated by if conversion.  This
822
   function calls end_sequence() to end the current stream, ensures
823
   that are instructions are unshared, recognizable non-jump insns.
824
   On failure, this function returns a NULL_RTX.  */
825
 
826
static rtx
827
end_ifcvt_sequence (struct noce_if_info *if_info)
828
{
829
  rtx insn;
830
  rtx seq = get_insns ();
831
 
832
  set_used_flags (if_info->x);
833
  set_used_flags (if_info->cond);
834
  unshare_all_rtl_in_chain (seq);
835
  end_sequence ();
836
 
837
  /* Make sure that all of the instructions emitted are recognizable,
838
     and that we haven't introduced a new jump instruction.
839
     As an exercise for the reader, build a general mechanism that
840
     allows proper placement of required clobbers.  */
841
  for (insn = seq; insn; insn = NEXT_INSN (insn))
842
    if (JUMP_P (insn)
843
        || recog_memoized (insn) == -1)
844
      return NULL_RTX;
845
 
846
  return seq;
847
}
848
 
849
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
850
   "if (a == b) x = a; else x = b" into "x = b".  */
851
 
852
static int
853
noce_try_move (struct noce_if_info *if_info)
854
{
855
  rtx cond = if_info->cond;
856
  enum rtx_code code = GET_CODE (cond);
857
  rtx y, seq;
858
 
859
  if (code != NE && code != EQ)
860
    return FALSE;
861
 
862
  /* This optimization isn't valid if either A or B could be a NaN
863
     or a signed zero.  */
864
  if (HONOR_NANS (GET_MODE (if_info->x))
865
      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
866
    return FALSE;
867
 
868
  /* Check whether the operands of the comparison are A and in
869
     either order.  */
870
  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
871
       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
872
      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
873
          && rtx_equal_p (if_info->b, XEXP (cond, 0))))
874
    {
875
      y = (code == EQ) ? if_info->a : if_info->b;
876
 
877
      /* Avoid generating the move if the source is the destination.  */
878
      if (! rtx_equal_p (if_info->x, y))
879
        {
880
          start_sequence ();
881
          noce_emit_move_insn (if_info->x, y);
882
          seq = end_ifcvt_sequence (if_info);
883
          if (!seq)
884
            return FALSE;
885
 
886
          emit_insn_before_setloc (seq, if_info->jump,
887
                                   INSN_LOCATOR (if_info->insn_a));
888
        }
889
      return TRUE;
890
    }
891
  return FALSE;
892
}
893
 
894
/* Convert "if (test) x = 1; else x = 0".
895
 
896
   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
897
   tried in noce_try_store_flag_constants after noce_try_cmove has had
898
   a go at the conversion.  */
899
 
900
static int
901
noce_try_store_flag (struct noce_if_info *if_info)
902
{
903
  int reversep;
904
  rtx target, seq;
905
 
906
  if (CONST_INT_P (if_info->b)
907
      && INTVAL (if_info->b) == STORE_FLAG_VALUE
908
      && if_info->a == const0_rtx)
909
    reversep = 0;
910
  else if (if_info->b == const0_rtx
911
           && CONST_INT_P (if_info->a)
912
           && INTVAL (if_info->a) == STORE_FLAG_VALUE
913
           && (reversed_comparison_code (if_info->cond, if_info->jump)
914
               != UNKNOWN))
915
    reversep = 1;
916
  else
917
    return FALSE;
918
 
919
  start_sequence ();
920
 
921
  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
922
  if (target)
923
    {
924
      if (target != if_info->x)
925
        noce_emit_move_insn (if_info->x, target);
926
 
927
      seq = end_ifcvt_sequence (if_info);
928
      if (! seq)
929
        return FALSE;
930
 
931
      emit_insn_before_setloc (seq, if_info->jump,
932
                               INSN_LOCATOR (if_info->insn_a));
933
      return TRUE;
934
    }
935
  else
936
    {
937
      end_sequence ();
938
      return FALSE;
939
    }
940
}
941
 
942
/* Convert "if (test) x = a; else x = b", for A and B constant.  */
943
 
944
static int
945
noce_try_store_flag_constants (struct noce_if_info *if_info)
946
{
947
  rtx target, seq;
948
  int reversep;
949
  HOST_WIDE_INT itrue, ifalse, diff, tmp;
950
  int normalize, can_reverse;
951
  enum machine_mode mode;
952
 
953
  if (CONST_INT_P (if_info->a)
954
      && CONST_INT_P (if_info->b))
955
    {
956
      mode = GET_MODE (if_info->x);
957
      ifalse = INTVAL (if_info->a);
958
      itrue = INTVAL (if_info->b);
959
 
960
      /* Make sure we can represent the difference between the two values.  */
961
      if ((itrue - ifalse > 0)
962
          != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
963
        return FALSE;
964
 
965
      diff = trunc_int_for_mode (itrue - ifalse, mode);
966
 
967
      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
968
                     != UNKNOWN);
969
 
970
      reversep = 0;
971
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
972
        normalize = 0;
973
      else if (ifalse == 0 && exact_log2 (itrue) >= 0
974
               && (STORE_FLAG_VALUE == 1
975
                   || if_info->branch_cost >= 2))
976
        normalize = 1;
977
      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
978
               && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
979
        normalize = 1, reversep = 1;
980
      else if (itrue == -1
981
               && (STORE_FLAG_VALUE == -1
982
                   || if_info->branch_cost >= 2))
983
        normalize = -1;
984
      else if (ifalse == -1 && can_reverse
985
               && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
986
        normalize = -1, reversep = 1;
987
      else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
988
               || if_info->branch_cost >= 3)
989
        normalize = -1;
990
      else
991
        return FALSE;
992
 
993
      if (reversep)
994
        {
995
          tmp = itrue; itrue = ifalse; ifalse = tmp;
996
          diff = trunc_int_for_mode (-diff, mode);
997
        }
998
 
999
      start_sequence ();
1000
      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1001
      if (! target)
1002
        {
1003
          end_sequence ();
1004
          return FALSE;
1005
        }
1006
 
1007
      /* if (test) x = 3; else x = 4;
1008
         =>   x = 3 + (test == 0);  */
1009
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1010
        {
1011
          target = expand_simple_binop (mode,
1012
                                        (diff == STORE_FLAG_VALUE
1013
                                         ? PLUS : MINUS),
1014
                                        GEN_INT (ifalse), target, if_info->x, 0,
1015
                                        OPTAB_WIDEN);
1016
        }
1017
 
1018
      /* if (test) x = 8; else x = 0;
1019
         =>   x = (test != 0) << 3;  */
1020
      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1021
        {
1022
          target = expand_simple_binop (mode, ASHIFT,
1023
                                        target, GEN_INT (tmp), if_info->x, 0,
1024
                                        OPTAB_WIDEN);
1025
        }
1026
 
1027
      /* if (test) x = -1; else x = b;
1028
         =>   x = -(test != 0) | b;  */
1029
      else if (itrue == -1)
1030
        {
1031
          target = expand_simple_binop (mode, IOR,
1032
                                        target, GEN_INT (ifalse), if_info->x, 0,
1033
                                        OPTAB_WIDEN);
1034
        }
1035
 
1036
      /* if (test) x = a; else x = b;
1037
         =>   x = (-(test != 0) & (b - a)) + a;  */
1038
      else
1039
        {
1040
          target = expand_simple_binop (mode, AND,
1041
                                        target, GEN_INT (diff), if_info->x, 0,
1042
                                        OPTAB_WIDEN);
1043
          if (target)
1044
            target = expand_simple_binop (mode, PLUS,
1045
                                          target, GEN_INT (ifalse),
1046
                                          if_info->x, 0, OPTAB_WIDEN);
1047
        }
1048
 
1049
      if (! target)
1050
        {
1051
          end_sequence ();
1052
          return FALSE;
1053
        }
1054
 
1055
      if (target != if_info->x)
1056
        noce_emit_move_insn (if_info->x, target);
1057
 
1058
      seq = end_ifcvt_sequence (if_info);
1059
      if (!seq)
1060
        return FALSE;
1061
 
1062
      emit_insn_before_setloc (seq, if_info->jump,
1063
                               INSN_LOCATOR (if_info->insn_a));
1064
      return TRUE;
1065
    }
1066
 
1067
  return FALSE;
1068
}
1069
 
1070
/* Convert "if (test) foo++" into "foo += (test != 0)", and
1071
   similarly for "foo--".  */
1072
 
1073
static int
1074
noce_try_addcc (struct noce_if_info *if_info)
1075
{
1076
  rtx target, seq;
1077
  int subtract, normalize;
1078
 
1079
  if (GET_CODE (if_info->a) == PLUS
1080
      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1081
      && (reversed_comparison_code (if_info->cond, if_info->jump)
1082
          != UNKNOWN))
1083
    {
1084
      rtx cond = if_info->cond;
1085
      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1086
 
1087
      /* First try to use addcc pattern.  */
1088
      if (general_operand (XEXP (cond, 0), VOIDmode)
1089
          && general_operand (XEXP (cond, 1), VOIDmode))
1090
        {
1091
          start_sequence ();
1092
          target = emit_conditional_add (if_info->x, code,
1093
                                         XEXP (cond, 0),
1094
                                         XEXP (cond, 1),
1095
                                         VOIDmode,
1096
                                         if_info->b,
1097
                                         XEXP (if_info->a, 1),
1098
                                         GET_MODE (if_info->x),
1099
                                         (code == LTU || code == GEU
1100
                                          || code == LEU || code == GTU));
1101
          if (target)
1102
            {
1103
              if (target != if_info->x)
1104
                noce_emit_move_insn (if_info->x, target);
1105
 
1106
              seq = end_ifcvt_sequence (if_info);
1107
              if (!seq)
1108
                return FALSE;
1109
 
1110
              emit_insn_before_setloc (seq, if_info->jump,
1111
                                       INSN_LOCATOR (if_info->insn_a));
1112
              return TRUE;
1113
            }
1114
          end_sequence ();
1115
        }
1116
 
1117
      /* If that fails, construct conditional increment or decrement using
1118
         setcc.  */
1119
      if (if_info->branch_cost >= 2
1120
          && (XEXP (if_info->a, 1) == const1_rtx
1121
              || XEXP (if_info->a, 1) == constm1_rtx))
1122
        {
1123
          start_sequence ();
1124
          if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1125
            subtract = 0, normalize = 0;
1126
          else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1127
            subtract = 1, normalize = 0;
1128
          else
1129
            subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1130
 
1131
 
1132
          target = noce_emit_store_flag (if_info,
1133
                                         gen_reg_rtx (GET_MODE (if_info->x)),
1134
                                         1, normalize);
1135
 
1136
          if (target)
1137
            target = expand_simple_binop (GET_MODE (if_info->x),
1138
                                          subtract ? MINUS : PLUS,
1139
                                          if_info->b, target, if_info->x,
1140
                                          0, OPTAB_WIDEN);
1141
          if (target)
1142
            {
1143
              if (target != if_info->x)
1144
                noce_emit_move_insn (if_info->x, target);
1145
 
1146
              seq = end_ifcvt_sequence (if_info);
1147
              if (!seq)
1148
                return FALSE;
1149
 
1150
              emit_insn_before_setloc (seq, if_info->jump,
1151
                                       INSN_LOCATOR (if_info->insn_a));
1152
              return TRUE;
1153
            }
1154
          end_sequence ();
1155
        }
1156
    }
1157
 
1158
  return FALSE;
1159
}
1160
 
1161
/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1162
 
1163
static int
1164
noce_try_store_flag_mask (struct noce_if_info *if_info)
1165
{
1166
  rtx target, seq;
1167
  int reversep;
1168
 
1169
  reversep = 0;
1170
  if ((if_info->branch_cost >= 2
1171
       || STORE_FLAG_VALUE == -1)
1172
      && ((if_info->a == const0_rtx
1173
           && rtx_equal_p (if_info->b, if_info->x))
1174
          || ((reversep = (reversed_comparison_code (if_info->cond,
1175
                                                     if_info->jump)
1176
                           != UNKNOWN))
1177
              && if_info->b == const0_rtx
1178
              && rtx_equal_p (if_info->a, if_info->x))))
1179
    {
1180
      start_sequence ();
1181
      target = noce_emit_store_flag (if_info,
1182
                                     gen_reg_rtx (GET_MODE (if_info->x)),
1183
                                     reversep, -1);
1184
      if (target)
1185
        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1186
                                      if_info->x,
1187
                                      target, if_info->x, 0,
1188
                                      OPTAB_WIDEN);
1189
 
1190
      if (target)
1191
        {
1192
          if (target != if_info->x)
1193
            noce_emit_move_insn (if_info->x, target);
1194
 
1195
          seq = end_ifcvt_sequence (if_info);
1196
          if (!seq)
1197
            return FALSE;
1198
 
1199
          emit_insn_before_setloc (seq, if_info->jump,
1200
                                   INSN_LOCATOR (if_info->insn_a));
1201
          return TRUE;
1202
        }
1203
 
1204
      end_sequence ();
1205
    }
1206
 
1207
  return FALSE;
1208
}
1209
 
1210
/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1211
 
1212
static rtx
1213
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1214
                 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1215
{
1216
  /* If earliest == jump, try to build the cmove insn directly.
1217
     This is helpful when combine has created some complex condition
1218
     (like for alpha's cmovlbs) that we can't hope to regenerate
1219
     through the normal interface.  */
1220
 
1221
  if (if_info->cond_earliest == if_info->jump)
1222
    {
1223
      rtx tmp;
1224
 
1225
      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1226
      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1227
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
1228
 
1229
      start_sequence ();
1230
      tmp = emit_insn (tmp);
1231
 
1232
      if (recog_memoized (tmp) >= 0)
1233
        {
1234
          tmp = get_insns ();
1235
          end_sequence ();
1236
          emit_insn (tmp);
1237
 
1238
          return x;
1239
        }
1240
 
1241
      end_sequence ();
1242
    }
1243
 
1244
  /* Don't even try if the comparison operands are weird.  */
1245
  if (! general_operand (cmp_a, GET_MODE (cmp_a))
1246
      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1247
    return NULL_RTX;
1248
 
1249
#if HAVE_conditional_move
1250
  return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1251
                                vtrue, vfalse, GET_MODE (x),
1252
                                (code == LTU || code == GEU
1253
                                 || code == LEU || code == GTU));
1254
#else
1255
  /* We'll never get here, as noce_process_if_block doesn't call the
1256
     functions involved.  Ifdef code, however, should be discouraged
1257
     because it leads to typos in the code not selected.  However,
1258
     emit_conditional_move won't exist either.  */
1259
  return NULL_RTX;
1260
#endif
1261
}
1262
 
1263
/* Try only simple constants and registers here.  More complex cases
1264
   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1265
   has had a go at it.  */
1266
 
1267
static int
1268
noce_try_cmove (struct noce_if_info *if_info)
1269
{
1270
  enum rtx_code code;
1271
  rtx target, seq;
1272
 
1273
  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1274
      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1275
    {
1276
      start_sequence ();
1277
 
1278
      code = GET_CODE (if_info->cond);
1279
      target = noce_emit_cmove (if_info, if_info->x, code,
1280
                                XEXP (if_info->cond, 0),
1281
                                XEXP (if_info->cond, 1),
1282
                                if_info->a, if_info->b);
1283
 
1284
      if (target)
1285
        {
1286
          if (target != if_info->x)
1287
            noce_emit_move_insn (if_info->x, target);
1288
 
1289
          seq = end_ifcvt_sequence (if_info);
1290
          if (!seq)
1291
            return FALSE;
1292
 
1293
          emit_insn_before_setloc (seq, if_info->jump,
1294
                                   INSN_LOCATOR (if_info->insn_a));
1295
          return TRUE;
1296
        }
1297
      else
1298
        {
1299
          end_sequence ();
1300
          return FALSE;
1301
        }
1302
    }
1303
 
1304
  return FALSE;
1305
}
1306
 
1307
/* Try more complex cases involving conditional_move.  */
1308
 
1309
static int
1310
noce_try_cmove_arith (struct noce_if_info *if_info)
1311
{
1312
  rtx a = if_info->a;
1313
  rtx b = if_info->b;
1314
  rtx x = if_info->x;
1315
  rtx orig_a, orig_b;
1316
  rtx insn_a, insn_b;
1317
  rtx tmp, target;
1318
  int is_mem = 0;
1319
  int insn_cost;
1320
  enum rtx_code code;
1321
 
1322
  /* A conditional move from two memory sources is equivalent to a
1323
     conditional on their addresses followed by a load.  Don't do this
1324
     early because it'll screw alias analysis.  Note that we've
1325
     already checked for no side effects.  */
1326
  /* ??? FIXME: Magic number 5.  */
1327
  if (cse_not_expected
1328
      && MEM_P (a) && MEM_P (b)
1329
      && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1330
      && if_info->branch_cost >= 5)
1331
    {
1332
      enum machine_mode address_mode
1333
        = targetm.addr_space.address_mode (MEM_ADDR_SPACE (a));
1334
 
1335
      a = XEXP (a, 0);
1336
      b = XEXP (b, 0);
1337
      x = gen_reg_rtx (address_mode);
1338
      is_mem = 1;
1339
    }
1340
 
1341
  /* ??? We could handle this if we knew that a load from A or B could
1342
     not fault.  This is also true if we've already loaded
1343
     from the address along the path from ENTRY.  */
1344
  else if (may_trap_p (a) || may_trap_p (b))
1345
    return FALSE;
1346
 
1347
  /* if (test) x = a + b; else x = c - d;
1348
     => y = a + b;
1349
        x = c - d;
1350
        if (test)
1351
          x = y;
1352
  */
1353
 
1354
  code = GET_CODE (if_info->cond);
1355
  insn_a = if_info->insn_a;
1356
  insn_b = if_info->insn_b;
1357
 
1358
  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1359
     if insn_rtx_cost can't be estimated.  */
1360
  if (insn_a)
1361
    {
1362
      insn_cost = insn_rtx_cost (PATTERN (insn_a),
1363
                                 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1364
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1365
        return FALSE;
1366
    }
1367
  else
1368
    insn_cost = 0;
1369
 
1370
  if (insn_b)
1371
    {
1372
      insn_cost += insn_rtx_cost (PATTERN (insn_b),
1373
                                 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1374
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1375
        return FALSE;
1376
    }
1377
 
1378
  /* Possibly rearrange operands to make things come out more natural.  */
1379
  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1380
    {
1381
      int reversep = 0;
1382
      if (rtx_equal_p (b, x))
1383
        reversep = 1;
1384
      else if (general_operand (b, GET_MODE (b)))
1385
        reversep = 1;
1386
 
1387
      if (reversep)
1388
        {
1389
          code = reversed_comparison_code (if_info->cond, if_info->jump);
1390
          tmp = a, a = b, b = tmp;
1391
          tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1392
        }
1393
    }
1394
 
1395
  start_sequence ();
1396
 
1397
  orig_a = a;
1398
  orig_b = b;
1399
 
1400
  /* If either operand is complex, load it into a register first.
1401
     The best way to do this is to copy the original insn.  In this
1402
     way we preserve any clobbers etc that the insn may have had.
1403
     This is of course not possible in the IS_MEM case.  */
1404
  if (! general_operand (a, GET_MODE (a)))
1405
    {
1406
      rtx set;
1407
 
1408
      if (is_mem)
1409
        {
1410
          tmp = gen_reg_rtx (GET_MODE (a));
1411
          tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1412
        }
1413
      else if (! insn_a)
1414
        goto end_seq_and_fail;
1415
      else
1416
        {
1417
          a = gen_reg_rtx (GET_MODE (a));
1418
          tmp = copy_rtx (insn_a);
1419
          set = single_set (tmp);
1420
          SET_DEST (set) = a;
1421
          tmp = emit_insn (PATTERN (tmp));
1422
        }
1423
      if (recog_memoized (tmp) < 0)
1424
        goto end_seq_and_fail;
1425
    }
1426
  if (! general_operand (b, GET_MODE (b)))
1427
    {
1428
      rtx set, last;
1429
 
1430
      if (is_mem)
1431
        {
1432
          tmp = gen_reg_rtx (GET_MODE (b));
1433
          tmp = gen_rtx_SET (VOIDmode, tmp, b);
1434
        }
1435
      else if (! insn_b)
1436
        goto end_seq_and_fail;
1437
      else
1438
        {
1439
          b = gen_reg_rtx (GET_MODE (b));
1440
          tmp = copy_rtx (insn_b);
1441
          set = single_set (tmp);
1442
          SET_DEST (set) = b;
1443
          tmp = PATTERN (tmp);
1444
        }
1445
 
1446
      /* If insn to set up A clobbers any registers B depends on, try to
1447
         swap insn that sets up A with the one that sets up B.  If even
1448
         that doesn't help, punt.  */
1449
      last = get_last_insn ();
1450
      if (last && modified_in_p (orig_b, last))
1451
        {
1452
          tmp = emit_insn_before (tmp, get_insns ());
1453
          if (modified_in_p (orig_a, tmp))
1454
            goto end_seq_and_fail;
1455
        }
1456
      else
1457
        tmp = emit_insn (tmp);
1458
 
1459
      if (recog_memoized (tmp) < 0)
1460
        goto end_seq_and_fail;
1461
    }
1462
 
1463
  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1464
                            XEXP (if_info->cond, 1), a, b);
1465
 
1466
  if (! target)
1467
    goto end_seq_and_fail;
1468
 
1469
  /* If we're handling a memory for above, emit the load now.  */
1470
  if (is_mem)
1471
    {
1472
      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1473
 
1474
      /* Copy over flags as appropriate.  */
1475
      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1476
        MEM_VOLATILE_P (tmp) = 1;
1477
      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1478
        MEM_IN_STRUCT_P (tmp) = 1;
1479
      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1480
        MEM_SCALAR_P (tmp) = 1;
1481
      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1482
        set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1483
      set_mem_align (tmp,
1484
                     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1485
 
1486
      gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1487
      set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));
1488
 
1489
      noce_emit_move_insn (if_info->x, tmp);
1490
    }
1491
  else if (target != x)
1492
    noce_emit_move_insn (x, target);
1493
 
1494
  tmp = end_ifcvt_sequence (if_info);
1495
  if (!tmp)
1496
    return FALSE;
1497
 
1498
  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1499
  return TRUE;
1500
 
1501
 end_seq_and_fail:
1502
  end_sequence ();
1503
  return FALSE;
1504
}
1505
 
1506
/* For most cases, the simplified condition we found is the best
1507
   choice, but this is not the case for the min/max/abs transforms.
1508
   For these we wish to know that it is A or B in the condition.  */
1509
 
1510
static rtx
1511
noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1512
                        rtx *earliest)
1513
{
1514
  rtx cond, set, insn;
1515
  int reverse;
1516
 
1517
  /* If target is already mentioned in the known condition, return it.  */
1518
  if (reg_mentioned_p (target, if_info->cond))
1519
    {
1520
      *earliest = if_info->cond_earliest;
1521
      return if_info->cond;
1522
    }
1523
 
1524
  set = pc_set (if_info->jump);
1525
  cond = XEXP (SET_SRC (set), 0);
1526
  reverse
1527
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1528
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1529
  if (if_info->then_else_reversed)
1530
    reverse = !reverse;
1531
 
1532
  /* If we're looking for a constant, try to make the conditional
1533
     have that constant in it.  There are two reasons why it may
1534
     not have the constant we want:
1535
 
1536
     1. GCC may have needed to put the constant in a register, because
1537
        the target can't compare directly against that constant.  For
1538
        this case, we look for a SET immediately before the comparison
1539
        that puts a constant in that register.
1540
 
1541
     2. GCC may have canonicalized the conditional, for example
1542
        replacing "if x < 4" with "if x <= 3".  We can undo that (or
1543
        make equivalent types of changes) to get the constants we need
1544
        if they're off by one in the right direction.  */
1545
 
1546
  if (CONST_INT_P (target))
1547
    {
1548
      enum rtx_code code = GET_CODE (if_info->cond);
1549
      rtx op_a = XEXP (if_info->cond, 0);
1550
      rtx op_b = XEXP (if_info->cond, 1);
1551
      rtx prev_insn;
1552
 
1553
      /* First, look to see if we put a constant in a register.  */
1554
      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1555
      if (prev_insn
1556
          && BLOCK_FOR_INSN (prev_insn)
1557
             == BLOCK_FOR_INSN (if_info->cond_earliest)
1558
          && INSN_P (prev_insn)
1559
          && GET_CODE (PATTERN (prev_insn)) == SET)
1560
        {
1561
          rtx src = find_reg_equal_equiv_note (prev_insn);
1562
          if (!src)
1563
            src = SET_SRC (PATTERN (prev_insn));
1564
          if (CONST_INT_P (src))
1565
            {
1566
              if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1567
                op_a = src;
1568
              else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1569
                op_b = src;
1570
 
1571
              if (CONST_INT_P (op_a))
1572
                {
1573
                  rtx tmp = op_a;
1574
                  op_a = op_b;
1575
                  op_b = tmp;
1576
                  code = swap_condition (code);
1577
                }
1578
            }
1579
        }
1580
 
1581
      /* Now, look to see if we can get the right constant by
1582
         adjusting the conditional.  */
1583
      if (CONST_INT_P (op_b))
1584
        {
1585
          HOST_WIDE_INT desired_val = INTVAL (target);
1586
          HOST_WIDE_INT actual_val = INTVAL (op_b);
1587
 
1588
          switch (code)
1589
            {
1590
            case LT:
1591
              if (actual_val == desired_val + 1)
1592
                {
1593
                  code = LE;
1594
                  op_b = GEN_INT (desired_val);
1595
                }
1596
              break;
1597
            case LE:
1598
              if (actual_val == desired_val - 1)
1599
                {
1600
                  code = LT;
1601
                  op_b = GEN_INT (desired_val);
1602
                }
1603
              break;
1604
            case GT:
1605
              if (actual_val == desired_val - 1)
1606
                {
1607
                  code = GE;
1608
                  op_b = GEN_INT (desired_val);
1609
                }
1610
              break;
1611
            case GE:
1612
              if (actual_val == desired_val + 1)
1613
                {
1614
                  code = GT;
1615
                  op_b = GEN_INT (desired_val);
1616
                }
1617
              break;
1618
            default:
1619
              break;
1620
            }
1621
        }
1622
 
1623
      /* If we made any changes, generate a new conditional that is
1624
         equivalent to what we started with, but has the right
1625
         constants in it.  */
1626
      if (code != GET_CODE (if_info->cond)
1627
          || op_a != XEXP (if_info->cond, 0)
1628
          || op_b != XEXP (if_info->cond, 1))
1629
        {
1630
          cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1631
          *earliest = if_info->cond_earliest;
1632
          return cond;
1633
        }
1634
    }
1635
 
1636
  cond = canonicalize_condition (if_info->jump, cond, reverse,
1637
                                 earliest, target, false, true);
1638
  if (! cond || ! reg_mentioned_p (target, cond))
1639
    return NULL;
1640
 
1641
  /* We almost certainly searched back to a different place.
1642
     Need to re-verify correct lifetimes.  */
1643
 
1644
  /* X may not be mentioned in the range (cond_earliest, jump].  */
1645
  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1646
    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1647
      return NULL;
1648
 
1649
  /* A and B may not be modified in the range [cond_earliest, jump).  */
1650
  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1651
    if (INSN_P (insn)
1652
        && (modified_in_p (if_info->a, insn)
1653
            || modified_in_p (if_info->b, insn)))
1654
      return NULL;
1655
 
1656
  return cond;
1657
}
1658
 
1659
/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1660
 
1661
static int
1662
noce_try_minmax (struct noce_if_info *if_info)
1663
{
1664
  rtx cond, earliest, target, seq;
1665
  enum rtx_code code, op;
1666
  int unsignedp;
1667
 
1668
  /* ??? Reject modes with NaNs or signed zeros since we don't know how
1669
     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1670
     to get the target to tell us...  */
1671
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1672
      || HONOR_NANS (GET_MODE (if_info->x)))
1673
    return FALSE;
1674
 
1675
  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1676
  if (!cond)
1677
    return FALSE;
1678
 
1679
  /* Verify the condition is of the form we expect, and canonicalize
1680
     the comparison code.  */
1681
  code = GET_CODE (cond);
1682
  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1683
    {
1684
      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1685
        return FALSE;
1686
    }
1687
  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1688
    {
1689
      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1690
        return FALSE;
1691
      code = swap_condition (code);
1692
    }
1693
  else
1694
    return FALSE;
1695
 
1696
  /* Determine what sort of operation this is.  Note that the code is for
1697
     a taken branch, so the code->operation mapping appears backwards.  */
1698
  switch (code)
1699
    {
1700
    case LT:
1701
    case LE:
1702
    case UNLT:
1703
    case UNLE:
1704
      op = SMAX;
1705
      unsignedp = 0;
1706
      break;
1707
    case GT:
1708
    case GE:
1709
    case UNGT:
1710
    case UNGE:
1711
      op = SMIN;
1712
      unsignedp = 0;
1713
      break;
1714
    case LTU:
1715
    case LEU:
1716
      op = UMAX;
1717
      unsignedp = 1;
1718
      break;
1719
    case GTU:
1720
    case GEU:
1721
      op = UMIN;
1722
      unsignedp = 1;
1723
      break;
1724
    default:
1725
      return FALSE;
1726
    }
1727
 
1728
  start_sequence ();
1729
 
1730
  target = expand_simple_binop (GET_MODE (if_info->x), op,
1731
                                if_info->a, if_info->b,
1732
                                if_info->x, unsignedp, OPTAB_WIDEN);
1733
  if (! target)
1734
    {
1735
      end_sequence ();
1736
      return FALSE;
1737
    }
1738
  if (target != if_info->x)
1739
    noce_emit_move_insn (if_info->x, target);
1740
 
1741
  seq = end_ifcvt_sequence (if_info);
1742
  if (!seq)
1743
    return FALSE;
1744
 
1745
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1746
  if_info->cond = cond;
1747
  if_info->cond_earliest = earliest;
1748
 
1749
  return TRUE;
1750
}
1751
 
1752
/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
1753
   "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
1754
   etc.  */
1755
 
1756
static int
1757
noce_try_abs (struct noce_if_info *if_info)
1758
{
1759
  rtx cond, earliest, target, seq, a, b, c;
1760
  int negate;
1761
  bool one_cmpl = false;
1762
 
1763
  /* Reject modes with signed zeros.  */
1764
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1765
    return FALSE;
1766
 
1767
  /* Recognize A and B as constituting an ABS or NABS.  The canonical
1768
     form is a branch around the negation, taken when the object is the
1769
     first operand of a comparison against 0 that evaluates to true.  */
1770
  a = if_info->a;
1771
  b = if_info->b;
1772
  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1773
    negate = 0;
1774
  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1775
    {
1776
      c = a; a = b; b = c;
1777
      negate = 1;
1778
    }
1779
  else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
1780
    {
1781
      negate = 0;
1782
      one_cmpl = true;
1783
    }
1784
  else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
1785
    {
1786
      c = a; a = b; b = c;
1787
      negate = 1;
1788
      one_cmpl = true;
1789
    }
1790
  else
1791
    return FALSE;
1792
 
1793
  cond = noce_get_alt_condition (if_info, b, &earliest);
1794
  if (!cond)
1795
    return FALSE;
1796
 
1797
  /* Verify the condition is of the form we expect.  */
1798
  if (rtx_equal_p (XEXP (cond, 0), b))
1799
    c = XEXP (cond, 1);
1800
  else if (rtx_equal_p (XEXP (cond, 1), b))
1801
    {
1802
      c = XEXP (cond, 0);
1803
      negate = !negate;
1804
    }
1805
  else
1806
    return FALSE;
1807
 
1808
  /* Verify that C is zero.  Search one step backward for a
1809
     REG_EQUAL note or a simple source if necessary.  */
1810
  if (REG_P (c))
1811
    {
1812
      rtx set, insn = prev_nonnote_insn (earliest);
1813
      if (insn
1814
          && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
1815
          && (set = single_set (insn))
1816
          && rtx_equal_p (SET_DEST (set), c))
1817
        {
1818
          rtx note = find_reg_equal_equiv_note (insn);
1819
          if (note)
1820
            c = XEXP (note, 0);
1821
          else
1822
            c = SET_SRC (set);
1823
        }
1824
      else
1825
        return FALSE;
1826
    }
1827
  if (MEM_P (c)
1828
      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1829
      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1830
    c = get_pool_constant (XEXP (c, 0));
1831
 
1832
  /* Work around funny ideas get_condition has wrt canonicalization.
1833
     Note that these rtx constants are known to be CONST_INT, and
1834
     therefore imply integer comparisons.  */
1835
  if (c == constm1_rtx && GET_CODE (cond) == GT)
1836
    ;
1837
  else if (c == const1_rtx && GET_CODE (cond) == LT)
1838
    ;
1839
  else if (c != CONST0_RTX (GET_MODE (b)))
1840
    return FALSE;
1841
 
1842
  /* Determine what sort of operation this is.  */
1843
  switch (GET_CODE (cond))
1844
    {
1845
    case LT:
1846
    case LE:
1847
    case UNLT:
1848
    case UNLE:
1849
      negate = !negate;
1850
      break;
1851
    case GT:
1852
    case GE:
1853
    case UNGT:
1854
    case UNGE:
1855
      break;
1856
    default:
1857
      return FALSE;
1858
    }
1859
 
1860
  start_sequence ();
1861
  if (one_cmpl)
1862
    target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
1863
                                         if_info->x);
1864
  else
1865
    target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1866
 
1867
  /* ??? It's a quandary whether cmove would be better here, especially
1868
     for integers.  Perhaps combine will clean things up.  */
1869
  if (target && negate)
1870
    {
1871
      if (one_cmpl)
1872
        target = expand_simple_unop (GET_MODE (target), NOT, target,
1873
                                     if_info->x, 0);
1874
      else
1875
        target = expand_simple_unop (GET_MODE (target), NEG, target,
1876
                                     if_info->x, 0);
1877
    }
1878
 
1879
  if (! target)
1880
    {
1881
      end_sequence ();
1882
      return FALSE;
1883
    }
1884
 
1885
  if (target != if_info->x)
1886
    noce_emit_move_insn (if_info->x, target);
1887
 
1888
  seq = end_ifcvt_sequence (if_info);
1889
  if (!seq)
1890
    return FALSE;
1891
 
1892
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1893
  if_info->cond = cond;
1894
  if_info->cond_earliest = earliest;
1895
 
1896
  return TRUE;
1897
}
1898
 
1899
/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
1900
 
1901
static int
1902
noce_try_sign_mask (struct noce_if_info *if_info)
1903
{
1904
  rtx cond, t, m, c, seq;
1905
  enum machine_mode mode;
1906
  enum rtx_code code;
1907
  bool t_unconditional;
1908
 
1909
  cond = if_info->cond;
1910
  code = GET_CODE (cond);
1911
  m = XEXP (cond, 0);
1912
  c = XEXP (cond, 1);
1913
 
1914
  t = NULL_RTX;
1915
  if (if_info->a == const0_rtx)
1916
    {
1917
      if ((code == LT && c == const0_rtx)
1918
          || (code == LE && c == constm1_rtx))
1919
        t = if_info->b;
1920
    }
1921
  else if (if_info->b == const0_rtx)
1922
    {
1923
      if ((code == GE && c == const0_rtx)
1924
          || (code == GT && c == constm1_rtx))
1925
        t = if_info->a;
1926
    }
1927
 
1928
  if (! t || side_effects_p (t))
1929
    return FALSE;
1930
 
1931
  /* We currently don't handle different modes.  */
1932
  mode = GET_MODE (t);
1933
  if (GET_MODE (m) != mode)
1934
    return FALSE;
1935
 
1936
  /* This is only profitable if T is unconditionally executed/evaluated in the
1937
     original insn sequence or T is cheap.  The former happens if B is the
1938
     non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
1939
     INSN_B which can happen for e.g. conditional stores to memory.  For the
1940
     cost computation use the block TEST_BB where the evaluation will end up
1941
     after the transformation.  */
1942
  t_unconditional =
1943
    (t == if_info->b
1944
     && (if_info->insn_b == NULL_RTX
1945
         || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
1946
  if (!(t_unconditional
1947
        || (rtx_cost (t, SET, optimize_bb_for_speed_p (if_info->test_bb))
1948
            < COSTS_N_INSNS (2))))
1949
    return FALSE;
1950
 
1951
  start_sequence ();
1952
  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1953
     "(signed) m >> 31" directly.  This benefits targets with specialized
1954
     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
1955
  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1956
  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1957
        : NULL_RTX;
1958
 
1959
  if (!t)
1960
    {
1961
      end_sequence ();
1962
      return FALSE;
1963
    }
1964
 
1965
  noce_emit_move_insn (if_info->x, t);
1966
 
1967
  seq = end_ifcvt_sequence (if_info);
1968
  if (!seq)
1969
    return FALSE;
1970
 
1971
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1972
  return TRUE;
1973
}
1974
 
1975
 
1976
/* Optimize away "if (x & C) x |= C" and similar bit manipulation
1977
   transformations.  */
1978
 
1979
static int
1980
noce_try_bitop (struct noce_if_info *if_info)
1981
{
1982
  rtx cond, x, a, result, seq;
1983
  enum machine_mode mode;
1984
  enum rtx_code code;
1985
  int bitnum;
1986
 
1987
  x = if_info->x;
1988
  cond = if_info->cond;
1989
  code = GET_CODE (cond);
1990
 
1991
  /* Check for no else condition.  */
1992
  if (! rtx_equal_p (x, if_info->b))
1993
    return FALSE;
1994
 
1995
  /* Check for a suitable condition.  */
1996
  if (code != NE && code != EQ)
1997
    return FALSE;
1998
  if (XEXP (cond, 1) != const0_rtx)
1999
    return FALSE;
2000
  cond = XEXP (cond, 0);
2001
 
2002
  /* ??? We could also handle AND here.  */
2003
  if (GET_CODE (cond) == ZERO_EXTRACT)
2004
    {
2005
      if (XEXP (cond, 1) != const1_rtx
2006
          || !CONST_INT_P (XEXP (cond, 2))
2007
          || ! rtx_equal_p (x, XEXP (cond, 0)))
2008
        return FALSE;
2009
      bitnum = INTVAL (XEXP (cond, 2));
2010
      mode = GET_MODE (x);
2011
      if (BITS_BIG_ENDIAN)
2012
        bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2013
      if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2014
        return FALSE;
2015
    }
2016
  else
2017
    return FALSE;
2018
 
2019
  a = if_info->a;
2020
  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2021
    {
2022
      /* Check for "if (X & C) x = x op C".  */
2023
      if (! rtx_equal_p (x, XEXP (a, 0))
2024
          || !CONST_INT_P (XEXP (a, 1))
2025
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2026
             != (unsigned HOST_WIDE_INT) 1 << bitnum)
2027
        return FALSE;
2028
 
2029
      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2030
      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2031
      if (GET_CODE (a) == IOR)
2032
        result = (code == NE) ? a : NULL_RTX;
2033
      else if (code == NE)
2034
        {
2035
          /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2036
          result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2037
          result = simplify_gen_binary (IOR, mode, x, result);
2038
        }
2039
      else
2040
        {
2041
          /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2042
          result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2043
          result = simplify_gen_binary (AND, mode, x, result);
2044
        }
2045
    }
2046
  else if (GET_CODE (a) == AND)
2047
    {
2048
      /* Check for "if (X & C) x &= ~C".  */
2049
      if (! rtx_equal_p (x, XEXP (a, 0))
2050
          || !CONST_INT_P (XEXP (a, 1))
2051
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2052
             != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2053
        return FALSE;
2054
 
2055
      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2056
      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2057
      result = (code == EQ) ? a : NULL_RTX;
2058
    }
2059
  else
2060
    return FALSE;
2061
 
2062
  if (result)
2063
    {
2064
      start_sequence ();
2065
      noce_emit_move_insn (x, result);
2066
      seq = end_ifcvt_sequence (if_info);
2067
      if (!seq)
2068
        return FALSE;
2069
 
2070
      emit_insn_before_setloc (seq, if_info->jump,
2071
                               INSN_LOCATOR (if_info->insn_a));
2072
    }
2073
  return TRUE;
2074
}
2075
 
2076
 
2077
/* Similar to get_condition, only the resulting condition must be
2078
   valid at JUMP, instead of at EARLIEST.
2079
 
2080
   If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2081
   THEN block of the caller, and we have to reverse the condition.  */
2082
 
2083
static rtx
2084
noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
2085
{
2086
  rtx cond, set, tmp;
2087
  bool reverse;
2088
 
2089
  if (! any_condjump_p (jump))
2090
    return NULL_RTX;
2091
 
2092
  set = pc_set (jump);
2093
 
2094
  /* If this branches to JUMP_LABEL when the condition is false,
2095
     reverse the condition.  */
2096
  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2097
             && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2098
 
2099
  /* We may have to reverse because the caller's if block is not canonical,
2100
     i.e. the THEN block isn't the fallthrough block for the TEST block
2101
     (see find_if_header).  */
2102
  if (then_else_reversed)
2103
    reverse = !reverse;
2104
 
2105
  /* If the condition variable is a register and is MODE_INT, accept it.  */
2106
 
2107
  cond = XEXP (SET_SRC (set), 0);
2108
  tmp = XEXP (cond, 0);
2109
  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2110
    {
2111
      *earliest = jump;
2112
 
2113
      if (reverse)
2114
        cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2115
                               GET_MODE (cond), tmp, XEXP (cond, 1));
2116
      return cond;
2117
    }
2118
 
2119
  /* Otherwise, fall back on canonicalize_condition to do the dirty
2120
     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2121
  return canonicalize_condition (jump, cond, reverse, earliest,
2122
                                 NULL_RTX, false, true);
2123
}
2124
 
2125
/* Return true if OP is ok for if-then-else processing.  */
2126
 
2127
static int
2128
noce_operand_ok (const_rtx op)
2129
{
2130
  /* We special-case memories, so handle any of them with
2131
     no address side effects.  */
2132
  if (MEM_P (op))
2133
    return ! side_effects_p (XEXP (op, 0));
2134
 
2135
  if (side_effects_p (op))
2136
    return FALSE;
2137
 
2138
  return ! may_trap_p (op);
2139
}
2140
 
2141
/* Return true if a write into MEM may trap or fault.  */
2142
 
2143
static bool
2144
noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2145
{
2146
  rtx addr;
2147
 
2148
  if (MEM_READONLY_P (mem))
2149
    return true;
2150
 
2151
  if (may_trap_or_fault_p (mem))
2152
    return true;
2153
 
2154
  addr = XEXP (mem, 0);
2155
 
2156
  /* Call target hook to avoid the effects of -fpic etc....  */
2157
  addr = targetm.delegitimize_address (addr);
2158
 
2159
  while (addr)
2160
    switch (GET_CODE (addr))
2161
      {
2162
      case CONST:
2163
      case PRE_DEC:
2164
      case PRE_INC:
2165
      case POST_DEC:
2166
      case POST_INC:
2167
      case POST_MODIFY:
2168
        addr = XEXP (addr, 0);
2169
        break;
2170
      case LO_SUM:
2171
      case PRE_MODIFY:
2172
        addr = XEXP (addr, 1);
2173
        break;
2174
      case PLUS:
2175
        if (CONST_INT_P (XEXP (addr, 1)))
2176
          addr = XEXP (addr, 0);
2177
        else
2178
          return false;
2179
        break;
2180
      case LABEL_REF:
2181
        return true;
2182
      case SYMBOL_REF:
2183
        if (SYMBOL_REF_DECL (addr)
2184
            && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2185
          return true;
2186
        return false;
2187
      default:
2188
        return false;
2189
      }
2190
 
2191
  return false;
2192
}
2193
 
2194
/* Return whether we can use store speculation for MEM.  TOP_BB is the
2195
   basic block above the conditional block where we are considering
2196
   doing the speculative store.  We look for whether MEM is set
2197
   unconditionally later in the function.  */
2198
 
2199
static bool
2200
noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2201
{
2202
  basic_block dominator;
2203
 
2204
  for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2205
       dominator != NULL;
2206
       dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2207
    {
2208
      rtx insn;
2209
 
2210
      FOR_BB_INSNS (dominator, insn)
2211
        {
2212
          /* If we see something that might be a memory barrier, we
2213
             have to stop looking.  Even if the MEM is set later in
2214
             the function, we still don't want to set it
2215
             unconditionally before the barrier.  */
2216
          if (INSN_P (insn)
2217
              && (volatile_insn_p (PATTERN (insn))
2218
                  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2219
            return false;
2220
 
2221
          if (memory_modified_in_insn_p (mem, insn))
2222
            return true;
2223
          if (modified_in_p (XEXP (mem, 0), insn))
2224
            return false;
2225
 
2226
        }
2227
    }
2228
 
2229
  return false;
2230
}
2231
 
2232
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2233
   it without using conditional execution.  Return TRUE if we were successful
2234
   at converting the block.  */
2235
 
2236
static int
2237
noce_process_if_block (struct noce_if_info *if_info)
2238
{
2239
  basic_block test_bb = if_info->test_bb;       /* test block */
2240
  basic_block then_bb = if_info->then_bb;       /* THEN */
2241
  basic_block else_bb = if_info->else_bb;       /* ELSE or NULL */
2242
  basic_block join_bb = if_info->join_bb;       /* JOIN */
2243
  rtx jump = if_info->jump;
2244
  rtx cond = if_info->cond;
2245
  rtx insn_a, insn_b;
2246
  rtx set_a, set_b;
2247
  rtx orig_x, x, a, b;
2248
 
2249
  /* We're looking for patterns of the form
2250
 
2251
     (1) if (...) x = a; else x = b;
2252
     (2) x = b; if (...) x = a;
2253
     (3) if (...) x = a;   // as if with an initial x = x.
2254
 
2255
     The later patterns require jumps to be more expensive.
2256
 
2257
     ??? For future expansion, look for multiple X in such patterns.  */
2258
 
2259
  /* Look for one of the potential sets.  */
2260
  insn_a = first_active_insn (then_bb);
2261
  if (! insn_a
2262
      || insn_a != last_active_insn (then_bb, FALSE)
2263
      || (set_a = single_set (insn_a)) == NULL_RTX)
2264
    return FALSE;
2265
 
2266
  x = SET_DEST (set_a);
2267
  a = SET_SRC (set_a);
2268
 
2269
  /* Look for the other potential set.  Make sure we've got equivalent
2270
     destinations.  */
2271
  /* ??? This is overconservative.  Storing to two different mems is
2272
     as easy as conditionally computing the address.  Storing to a
2273
     single mem merely requires a scratch memory to use as one of the
2274
     destination addresses; often the memory immediately below the
2275
     stack pointer is available for this.  */
2276
  set_b = NULL_RTX;
2277
  if (else_bb)
2278
    {
2279
      insn_b = first_active_insn (else_bb);
2280
      if (! insn_b
2281
          || insn_b != last_active_insn (else_bb, FALSE)
2282
          || (set_b = single_set (insn_b)) == NULL_RTX
2283
          || ! rtx_equal_p (x, SET_DEST (set_b)))
2284
        return FALSE;
2285
    }
2286
  else
2287
    {
2288
      insn_b = prev_nonnote_insn (if_info->cond_earliest);
2289
      while (insn_b && DEBUG_INSN_P (insn_b))
2290
        insn_b = prev_nonnote_insn (insn_b);
2291
      /* We're going to be moving the evaluation of B down from above
2292
         COND_EARLIEST to JUMP.  Make sure the relevant data is still
2293
         intact.  */
2294
      if (! insn_b
2295
          || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2296
          || !NONJUMP_INSN_P (insn_b)
2297
          || (set_b = single_set (insn_b)) == NULL_RTX
2298
          || ! rtx_equal_p (x, SET_DEST (set_b))
2299
          || ! noce_operand_ok (SET_SRC (set_b))
2300
          || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2301
          || modified_between_p (SET_SRC (set_b), insn_b, jump)
2302
          /* Likewise with X.  In particular this can happen when
2303
             noce_get_condition looks farther back in the instruction
2304
             stream than one might expect.  */
2305
          || reg_overlap_mentioned_p (x, cond)
2306
          || reg_overlap_mentioned_p (x, a)
2307
          || modified_between_p (x, insn_b, jump))
2308
        insn_b = set_b = NULL_RTX;
2309
    }
2310
 
2311
  /* If x has side effects then only the if-then-else form is safe to
2312
     convert.  But even in that case we would need to restore any notes
2313
     (such as REG_INC) at then end.  That can be tricky if
2314
     noce_emit_move_insn expands to more than one insn, so disable the
2315
     optimization entirely for now if there are side effects.  */
2316
  if (side_effects_p (x))
2317
    return FALSE;
2318
 
2319
  b = (set_b ? SET_SRC (set_b) : x);
2320
 
2321
  /* Only operate on register destinations, and even then avoid extending
2322
     the lifetime of hard registers on small register class machines.  */
2323
  orig_x = x;
2324
  if (!REG_P (x)
2325
      || (SMALL_REGISTER_CLASSES
2326
          && REGNO (x) < FIRST_PSEUDO_REGISTER))
2327
    {
2328
      if (GET_MODE (x) == BLKmode)
2329
        return FALSE;
2330
 
2331
      if (GET_CODE (x) == ZERO_EXTRACT
2332
          && (!CONST_INT_P (XEXP (x, 1))
2333
              || !CONST_INT_P (XEXP (x, 2))))
2334
        return FALSE;
2335
 
2336
      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2337
                                 ? XEXP (x, 0) : x));
2338
    }
2339
 
2340
  /* Don't operate on sources that may trap or are volatile.  */
2341
  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2342
    return FALSE;
2343
 
2344
 retry:
2345
  /* Set up the info block for our subroutines.  */
2346
  if_info->insn_a = insn_a;
2347
  if_info->insn_b = insn_b;
2348
  if_info->x = x;
2349
  if_info->a = a;
2350
  if_info->b = b;
2351
 
2352
  /* Try optimizations in some approximation of a useful order.  */
2353
  /* ??? Should first look to see if X is live incoming at all.  If it
2354
     isn't, we don't need anything but an unconditional set.  */
2355
 
2356
  /* Look and see if A and B are really the same.  Avoid creating silly
2357
     cmove constructs that no one will fix up later.  */
2358
  if (rtx_equal_p (a, b))
2359
    {
2360
      /* If we have an INSN_B, we don't have to create any new rtl.  Just
2361
         move the instruction that we already have.  If we don't have an
2362
         INSN_B, that means that A == X, and we've got a noop move.  In
2363
         that case don't do anything and let the code below delete INSN_A.  */
2364
      if (insn_b && else_bb)
2365
        {
2366
          rtx note;
2367
 
2368
          if (else_bb && insn_b == BB_END (else_bb))
2369
            BB_END (else_bb) = PREV_INSN (insn_b);
2370
          reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2371
 
2372
          /* If there was a REG_EQUAL note, delete it since it may have been
2373
             true due to this insn being after a jump.  */
2374
          if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2375
            remove_note (insn_b, note);
2376
 
2377
          insn_b = NULL_RTX;
2378
        }
2379
      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2380
         x must be executed twice.  */
2381
      else if (insn_b && side_effects_p (orig_x))
2382
        return FALSE;
2383
 
2384
      x = orig_x;
2385
      goto success;
2386
    }
2387
 
2388
  if (!set_b && MEM_P (orig_x))
2389
    {
2390
      /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2391
         for optimizations if writing to x may trap or fault,
2392
         i.e. it's a memory other than a static var or a stack slot,
2393
         is misaligned on strict aligned machines or is read-only.  If
2394
         x is a read-only memory, then the program is valid only if we
2395
         avoid the store into it.  If there are stores on both the
2396
         THEN and ELSE arms, then we can go ahead with the conversion;
2397
         either the program is broken, or the condition is always
2398
         false such that the other memory is selected.  */
2399
      if (noce_mem_write_may_trap_or_fault_p (orig_x))
2400
        return FALSE;
2401
 
2402
      /* Avoid store speculation: given "if (...) x = a" where x is a
2403
         MEM, we only want to do the store if x is always set
2404
         somewhere in the function.  This avoids cases like
2405
           if (pthread_mutex_trylock(mutex))
2406
             ++global_variable;
2407
         where we only want global_variable to be changed if the mutex
2408
         is held.  FIXME: This should ideally be expressed directly in
2409
         RTL somehow.  */
2410
      if (!noce_can_store_speculate_p (test_bb, orig_x))
2411
        return FALSE;
2412
    }
2413
 
2414
  if (noce_try_move (if_info))
2415
    goto success;
2416
  if (noce_try_store_flag (if_info))
2417
    goto success;
2418
  if (noce_try_bitop (if_info))
2419
    goto success;
2420
  if (noce_try_minmax (if_info))
2421
    goto success;
2422
  if (noce_try_abs (if_info))
2423
    goto success;
2424
  if (HAVE_conditional_move
2425
      && noce_try_cmove (if_info))
2426
    goto success;
2427
  if (! targetm.have_conditional_execution ())
2428
    {
2429
      if (noce_try_store_flag_constants (if_info))
2430
        goto success;
2431
      if (noce_try_addcc (if_info))
2432
        goto success;
2433
      if (noce_try_store_flag_mask (if_info))
2434
        goto success;
2435
      if (HAVE_conditional_move
2436
          && noce_try_cmove_arith (if_info))
2437
        goto success;
2438
      if (noce_try_sign_mask (if_info))
2439
        goto success;
2440
    }
2441
 
2442
  if (!else_bb && set_b)
2443
    {
2444
      insn_b = set_b = NULL_RTX;
2445
      b = orig_x;
2446
      goto retry;
2447
    }
2448
 
2449
  return FALSE;
2450
 
2451
 success:
2452
 
2453
  /* If we used a temporary, fix it up now.  */
2454
  if (orig_x != x)
2455
    {
2456
      rtx seq;
2457
 
2458
      start_sequence ();
2459
      noce_emit_move_insn (orig_x, x);
2460
      seq = get_insns ();
2461
      set_used_flags (orig_x);
2462
      unshare_all_rtl_in_chain (seq);
2463
      end_sequence ();
2464
 
2465
      emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
2466
    }
2467
 
2468
  /* The original THEN and ELSE blocks may now be removed.  The test block
2469
     must now jump to the join block.  If the test block and the join block
2470
     can be merged, do so.  */
2471
  if (else_bb)
2472
    {
2473
      delete_basic_block (else_bb);
2474
      num_true_changes++;
2475
    }
2476
  else
2477
    remove_edge (find_edge (test_bb, join_bb));
2478
 
2479
  remove_edge (find_edge (then_bb, join_bb));
2480
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2481
  delete_basic_block (then_bb);
2482
  num_true_changes++;
2483
 
2484
  if (can_merge_blocks_p (test_bb, join_bb))
2485
    {
2486
      merge_blocks (test_bb, join_bb);
2487
      num_true_changes++;
2488
    }
2489
 
2490
  num_updated_if_blocks++;
2491
  return TRUE;
2492
}
2493
 
2494
/* Check whether a block is suitable for conditional move conversion.
2495
   Every insn must be a simple set of a register to a constant or a
2496
   register.  For each assignment, store the value in the array VALS,
2497
   indexed by register number, then store the register number in
2498
   REGS.  COND is the condition we will test.  */
2499
 
2500
static int
2501
check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs, rtx cond)
2502
{
2503
  rtx insn;
2504
 
2505
   /* We can only handle simple jumps at the end of the basic block.
2506
      It is almost impossible to update the CFG otherwise.  */
2507
  insn = BB_END (bb);
2508
  if (JUMP_P (insn) && !onlyjump_p (insn))
2509
    return FALSE;
2510
 
2511
  FOR_BB_INSNS (bb, insn)
2512
    {
2513
      rtx set, dest, src;
2514
 
2515
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2516
        continue;
2517
      set = single_set (insn);
2518
      if (!set)
2519
        return FALSE;
2520
 
2521
      dest = SET_DEST (set);
2522
      src = SET_SRC (set);
2523
      if (!REG_P (dest)
2524
          || (SMALL_REGISTER_CLASSES && HARD_REGISTER_P (dest)))
2525
        return FALSE;
2526
 
2527
      if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2528
        return FALSE;
2529
 
2530
      if (side_effects_p (src) || side_effects_p (dest))
2531
        return FALSE;
2532
 
2533
      if (may_trap_p (src) || may_trap_p (dest))
2534
        return FALSE;
2535
 
2536
      /* Don't try to handle this if the source register was
2537
         modified earlier in the block.  */
2538
      if ((REG_P (src)
2539
           && vals[REGNO (src)] != NULL)
2540
          || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2541
              && vals[REGNO (SUBREG_REG (src))] != NULL))
2542
        return FALSE;
2543
 
2544
      /* Don't try to handle this if the destination register was
2545
         modified earlier in the block.  */
2546
      if (vals[REGNO (dest)] != NULL)
2547
        return FALSE;
2548
 
2549
      /* Don't try to handle this if the condition uses the
2550
         destination register.  */
2551
      if (reg_overlap_mentioned_p (dest, cond))
2552
        return FALSE;
2553
 
2554
      /* Don't try to handle this if the source register is modified
2555
         later in the block.  */
2556
      if (!CONSTANT_P (src)
2557
          && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2558
        return FALSE;
2559
 
2560
      vals[REGNO (dest)] = src;
2561
 
2562
      VEC_safe_push (int, heap, *regs, REGNO (dest));
2563
    }
2564
 
2565
  return TRUE;
2566
}
2567
 
2568
/* Given a basic block BB suitable for conditional move conversion,
2569
   a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2570
   register values depending on COND, emit the insns in the block as
2571
   conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
2572
   processed.  The caller has started a sequence for the conversion.
2573
   Return true if successful, false if something goes wrong.  */
2574
 
2575
static bool
2576
cond_move_convert_if_block (struct noce_if_info *if_infop,
2577
                            basic_block bb, rtx cond,
2578
                            rtx *then_vals, rtx *else_vals,
2579
                            bool else_block_p)
2580
{
2581
  enum rtx_code code;
2582
  rtx insn, cond_arg0, cond_arg1;
2583
 
2584
  code = GET_CODE (cond);
2585
  cond_arg0 = XEXP (cond, 0);
2586
  cond_arg1 = XEXP (cond, 1);
2587
 
2588
  FOR_BB_INSNS (bb, insn)
2589
    {
2590
      rtx set, target, dest, t, e;
2591
      unsigned int regno;
2592
 
2593
      /* ??? Maybe emit conditional debug insn?  */
2594
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2595
        continue;
2596
      set = single_set (insn);
2597
      gcc_assert (set && REG_P (SET_DEST (set)));
2598
 
2599
      dest = SET_DEST (set);
2600
      regno = REGNO (dest);
2601
 
2602
      t = then_vals[regno];
2603
      e = else_vals[regno];
2604
 
2605
      if (else_block_p)
2606
        {
2607
          /* If this register was set in the then block, we already
2608
             handled this case there.  */
2609
          if (t)
2610
            continue;
2611
          t = dest;
2612
          gcc_assert (e);
2613
        }
2614
      else
2615
        {
2616
          gcc_assert (t);
2617
          if (!e)
2618
            e = dest;
2619
        }
2620
 
2621
      target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2622
                                t, e);
2623
      if (!target)
2624
        return false;
2625
 
2626
      if (target != dest)
2627
        noce_emit_move_insn (dest, target);
2628
    }
2629
 
2630
  return true;
2631
}
2632
 
2633
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2634
   it using only conditional moves.  Return TRUE if we were successful at
2635
   converting the block.  */
2636
 
2637
static int
2638
cond_move_process_if_block (struct noce_if_info *if_info)
2639
{
2640
  basic_block test_bb = if_info->test_bb;
2641
  basic_block then_bb = if_info->then_bb;
2642
  basic_block else_bb = if_info->else_bb;
2643
  basic_block join_bb = if_info->join_bb;
2644
  rtx jump = if_info->jump;
2645
  rtx cond = if_info->cond;
2646
  rtx seq, loc_insn;
2647
  int max_reg, size, c, reg;
2648
  rtx *then_vals;
2649
  rtx *else_vals;
2650
  VEC (int, heap) *then_regs = NULL;
2651
  VEC (int, heap) *else_regs = NULL;
2652
  unsigned int i;
2653
 
2654
  /* Build a mapping for each block to the value used for each
2655
     register.  */
2656
  max_reg = max_reg_num ();
2657
  size = (max_reg + 1) * sizeof (rtx);
2658
  then_vals = (rtx *) alloca (size);
2659
  else_vals = (rtx *) alloca (size);
2660
  memset (then_vals, 0, size);
2661
  memset (else_vals, 0, size);
2662
 
2663
  /* Make sure the blocks are suitable.  */
2664
  if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2665
      || (else_bb && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2666
    {
2667
      VEC_free (int, heap, then_regs);
2668
      VEC_free (int, heap, else_regs);
2669
      return FALSE;
2670
    }
2671
 
2672
  /* Make sure the blocks can be used together.  If the same register
2673
     is set in both blocks, and is not set to a constant in both
2674
     cases, then both blocks must set it to the same register.  We
2675
     have already verified that if it is set to a register, that the
2676
     source register does not change after the assignment.  Also count
2677
     the number of registers set in only one of the blocks.  */
2678
  c = 0;
2679
  for (i = 0; VEC_iterate (int, then_regs, i, reg); i++)
2680
    {
2681
      if (!then_vals[reg] && !else_vals[reg])
2682
        continue;
2683
 
2684
      if (!else_vals[reg])
2685
        ++c;
2686
      else
2687
        {
2688
          if (!CONSTANT_P (then_vals[reg])
2689
              && !CONSTANT_P (else_vals[reg])
2690
              && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2691
            {
2692
              VEC_free (int, heap, then_regs);
2693
              VEC_free (int, heap, else_regs);
2694
              return FALSE;
2695
            }
2696
        }
2697
    }
2698
 
2699
  /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
2700
  for (i = 0; VEC_iterate (int, else_regs, i, reg); ++i)
2701
    if (!then_vals[reg])
2702
      ++c;
2703
 
2704
  /* Make sure it is reasonable to convert this block.  What matters
2705
     is the number of assignments currently made in only one of the
2706
     branches, since if we convert we are going to always execute
2707
     them.  */
2708
  if (c > MAX_CONDITIONAL_EXECUTE)
2709
    {
2710
      VEC_free (int, heap, then_regs);
2711
      VEC_free (int, heap, else_regs);
2712
      return FALSE;
2713
    }
2714
 
2715
  /* Try to emit the conditional moves.  First do the then block,
2716
     then do anything left in the else blocks.  */
2717
  start_sequence ();
2718
  if (!cond_move_convert_if_block (if_info, then_bb, cond,
2719
                                   then_vals, else_vals, false)
2720
      || (else_bb
2721
          && !cond_move_convert_if_block (if_info, else_bb, cond,
2722
                                          then_vals, else_vals, true)))
2723
    {
2724
      end_sequence ();
2725
      VEC_free (int, heap, then_regs);
2726
      VEC_free (int, heap, else_regs);
2727
      return FALSE;
2728
    }
2729
  seq = end_ifcvt_sequence (if_info);
2730
  if (!seq)
2731
    {
2732
      VEC_free (int, heap, then_regs);
2733
      VEC_free (int, heap, else_regs);
2734
      return FALSE;
2735
    }
2736
 
2737
  loc_insn = first_active_insn (then_bb);
2738
  if (!loc_insn)
2739
    {
2740
      loc_insn = first_active_insn (else_bb);
2741
      gcc_assert (loc_insn);
2742
    }
2743
  emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2744
 
2745
  if (else_bb)
2746
    {
2747
      delete_basic_block (else_bb);
2748
      num_true_changes++;
2749
    }
2750
  else
2751
    remove_edge (find_edge (test_bb, join_bb));
2752
 
2753
  remove_edge (find_edge (then_bb, join_bb));
2754
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2755
  delete_basic_block (then_bb);
2756
  num_true_changes++;
2757
 
2758
  if (can_merge_blocks_p (test_bb, join_bb))
2759
    {
2760
      merge_blocks (test_bb, join_bb);
2761
      num_true_changes++;
2762
    }
2763
 
2764
  num_updated_if_blocks++;
2765
 
2766
  VEC_free (int, heap, then_regs);
2767
  VEC_free (int, heap, else_regs);
2768
  return TRUE;
2769
}
2770
 
2771
 
2772
/* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2773
   IF-THEN-ELSE-JOIN block.
2774
 
2775
   If so, we'll try to convert the insns to not require the branch,
2776
   using only transformations that do not require conditional execution.
2777
 
2778
   Return TRUE if we were successful at converting the block.  */
2779
 
2780
static int
2781
noce_find_if_block (basic_block test_bb,
2782
                    edge then_edge, edge else_edge,
2783
                    int pass)
2784
{
2785
  basic_block then_bb, else_bb, join_bb;
2786
  bool then_else_reversed = false;
2787
  rtx jump, cond;
2788
  rtx cond_earliest;
2789
  struct noce_if_info if_info;
2790
 
2791
  /* We only ever should get here before reload.  */
2792
  gcc_assert (!reload_completed);
2793
 
2794
  /* Recognize an IF-THEN-ELSE-JOIN block.  */
2795
  if (single_pred_p (then_edge->dest)
2796
      && single_succ_p (then_edge->dest)
2797
      && single_pred_p (else_edge->dest)
2798
      && single_succ_p (else_edge->dest)
2799
      && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2800
    {
2801
      then_bb = then_edge->dest;
2802
      else_bb = else_edge->dest;
2803
      join_bb = single_succ (then_bb);
2804
    }
2805
  /* Recognize an IF-THEN-JOIN block.  */
2806
  else if (single_pred_p (then_edge->dest)
2807
           && single_succ_p (then_edge->dest)
2808
           && single_succ (then_edge->dest) == else_edge->dest)
2809
    {
2810
      then_bb = then_edge->dest;
2811
      else_bb = NULL_BLOCK;
2812
      join_bb = else_edge->dest;
2813
    }
2814
  /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
2815
     of basic blocks in cfglayout mode does not matter, so the fallthrough
2816
     edge can go to any basic block (and not just to bb->next_bb, like in
2817
     cfgrtl mode).  */
2818
  else if (single_pred_p (else_edge->dest)
2819
           && single_succ_p (else_edge->dest)
2820
           && single_succ (else_edge->dest) == then_edge->dest)
2821
    {
2822
      /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
2823
         To make this work, we have to invert the THEN and ELSE blocks
2824
         and reverse the jump condition.  */
2825
      then_bb = else_edge->dest;
2826
      else_bb = NULL_BLOCK;
2827
      join_bb = single_succ (then_bb);
2828
      then_else_reversed = true;
2829
    }
2830
  else
2831
    /* Not a form we can handle.  */
2832
    return FALSE;
2833
 
2834
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
2835
  if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2836
    return FALSE;
2837
  if (else_bb
2838
      && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2839
    return FALSE;
2840
 
2841
  num_possible_if_blocks++;
2842
 
2843
  if (dump_file)
2844
    {
2845
      fprintf (dump_file,
2846
               "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
2847
               (else_bb) ? "-ELSE" : "",
2848
               pass, test_bb->index, then_bb->index);
2849
 
2850
      if (else_bb)
2851
        fprintf (dump_file, ", else %d", else_bb->index);
2852
 
2853
      fprintf (dump_file, ", join %d\n", join_bb->index);
2854
    }
2855
 
2856
  /* If the conditional jump is more than just a conditional
2857
     jump, then we can not do if-conversion on this block.  */
2858
  jump = BB_END (test_bb);
2859
  if (! onlyjump_p (jump))
2860
    return FALSE;
2861
 
2862
  /* If this is not a standard conditional jump, we can't parse it.  */
2863
  cond = noce_get_condition (jump,
2864
                             &cond_earliest,
2865
                             then_else_reversed);
2866
  if (!cond)
2867
    return FALSE;
2868
 
2869
  /* We must be comparing objects whose modes imply the size.  */
2870
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2871
    return FALSE;
2872
 
2873
  /* Initialize an IF_INFO struct to pass around.  */
2874
  memset (&if_info, 0, sizeof if_info);
2875
  if_info.test_bb = test_bb;
2876
  if_info.then_bb = then_bb;
2877
  if_info.else_bb = else_bb;
2878
  if_info.join_bb = join_bb;
2879
  if_info.cond = cond;
2880
  if_info.cond_earliest = cond_earliest;
2881
  if_info.jump = jump;
2882
  if_info.then_else_reversed = then_else_reversed;
2883
  if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
2884
                                     predictable_edge_p (then_edge));
2885
 
2886
  /* Do the real work.  */
2887
 
2888
  if (noce_process_if_block (&if_info))
2889
    return TRUE;
2890
 
2891
  if (HAVE_conditional_move
2892
      && cond_move_process_if_block (&if_info))
2893
    return TRUE;
2894
 
2895
  return FALSE;
2896
}
2897
 
2898
 
2899
/* Merge the blocks and mark for local life update.  */
2900
 
2901
static void
2902
merge_if_block (struct ce_if_block * ce_info)
2903
{
2904
  basic_block test_bb = ce_info->test_bb;       /* last test block */
2905
  basic_block then_bb = ce_info->then_bb;       /* THEN */
2906
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
2907
  basic_block join_bb = ce_info->join_bb;       /* join block */
2908
  basic_block combo_bb;
2909
 
2910
  /* All block merging is done into the lower block numbers.  */
2911
 
2912
  combo_bb = test_bb;
2913
  df_set_bb_dirty (test_bb);
2914
 
2915
  /* Merge any basic blocks to handle && and || subtests.  Each of
2916
     the blocks are on the fallthru path from the predecessor block.  */
2917
  if (ce_info->num_multiple_test_blocks > 0)
2918
    {
2919
      basic_block bb = test_bb;
2920
      basic_block last_test_bb = ce_info->last_test_bb;
2921
      basic_block fallthru = block_fallthru (bb);
2922
 
2923
      do
2924
        {
2925
          bb = fallthru;
2926
          fallthru = block_fallthru (bb);
2927
          merge_blocks (combo_bb, bb);
2928
          num_true_changes++;
2929
        }
2930
      while (bb != last_test_bb);
2931
    }
2932
 
2933
  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2934
     label, but it might if there were || tests.  That label's count should be
2935
     zero, and it normally should be removed.  */
2936
 
2937
  if (then_bb)
2938
    {
2939
      merge_blocks (combo_bb, then_bb);
2940
      num_true_changes++;
2941
    }
2942
 
2943
  /* The ELSE block, if it existed, had a label.  That label count
2944
     will almost always be zero, but odd things can happen when labels
2945
     get their addresses taken.  */
2946
  if (else_bb)
2947
    {
2948
      merge_blocks (combo_bb, else_bb);
2949
      num_true_changes++;
2950
    }
2951
 
2952
  /* If there was no join block reported, that means it was not adjacent
2953
     to the others, and so we cannot merge them.  */
2954
 
2955
  if (! join_bb)
2956
    {
2957
      rtx last = BB_END (combo_bb);
2958
 
2959
      /* The outgoing edge for the current COMBO block should already
2960
         be correct.  Verify this.  */
2961
      if (EDGE_COUNT (combo_bb->succs) == 0)
2962
        gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2963
                    || (NONJUMP_INSN_P (last)
2964
                        && GET_CODE (PATTERN (last)) == TRAP_IF
2965
                        && (TRAP_CONDITION (PATTERN (last))
2966
                            == const_true_rtx)));
2967
 
2968
      else
2969
      /* There should still be something at the end of the THEN or ELSE
2970
         blocks taking us to our final destination.  */
2971
        gcc_assert (JUMP_P (last)
2972
                    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2973
                        && CALL_P (last)
2974
                        && SIBLING_CALL_P (last))
2975
                    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2976
                        && can_throw_internal (last)));
2977
    }
2978
 
2979
  /* The JOIN block may have had quite a number of other predecessors too.
2980
     Since we've already merged the TEST, THEN and ELSE blocks, we should
2981
     have only one remaining edge from our if-then-else diamond.  If there
2982
     is more than one remaining edge, it must come from elsewhere.  There
2983
     may be zero incoming edges if the THEN block didn't actually join
2984
     back up (as with a call to a non-return function).  */
2985
  else if (EDGE_COUNT (join_bb->preds) < 2
2986
           && join_bb != EXIT_BLOCK_PTR)
2987
    {
2988
      /* We can merge the JOIN cleanly and update the dataflow try
2989
         again on this pass.*/
2990
      merge_blocks (combo_bb, join_bb);
2991
      num_true_changes++;
2992
    }
2993
  else
2994
    {
2995
      /* We cannot merge the JOIN.  */
2996
 
2997
      /* The outgoing edge for the current COMBO block should already
2998
         be correct.  Verify this.  */
2999
      gcc_assert (single_succ_p (combo_bb)
3000
                  && single_succ (combo_bb) == join_bb);
3001
 
3002
      /* Remove the jump and cruft from the end of the COMBO block.  */
3003
      if (join_bb != EXIT_BLOCK_PTR)
3004
        tidy_fallthru_edge (single_succ_edge (combo_bb));
3005
    }
3006
 
3007
  num_updated_if_blocks++;
3008
}
3009
 
3010
/* Find a block ending in a simple IF condition and try to transform it
3011
   in some way.  When converting a multi-block condition, put the new code
3012
   in the first such block and delete the rest.  Return a pointer to this
3013
   first block if some transformation was done.  Return NULL otherwise.  */
3014
 
3015
static basic_block
3016
find_if_header (basic_block test_bb, int pass)
3017
{
3018
  ce_if_block_t ce_info;
3019
  edge then_edge;
3020
  edge else_edge;
3021
 
3022
  /* The kind of block we're looking for has exactly two successors.  */
3023
  if (EDGE_COUNT (test_bb->succs) != 2)
3024
    return NULL;
3025
 
3026
  then_edge = EDGE_SUCC (test_bb, 0);
3027
  else_edge = EDGE_SUCC (test_bb, 1);
3028
 
3029
  if (df_get_bb_dirty (then_edge->dest))
3030
    return NULL;
3031
  if (df_get_bb_dirty (else_edge->dest))
3032
    return NULL;
3033
 
3034
  /* Neither edge should be abnormal.  */
3035
  if ((then_edge->flags & EDGE_COMPLEX)
3036
      || (else_edge->flags & EDGE_COMPLEX))
3037
    return NULL;
3038
 
3039
  /* Nor exit the loop.  */
3040
  if ((then_edge->flags & EDGE_LOOP_EXIT)
3041
      || (else_edge->flags & EDGE_LOOP_EXIT))
3042
    return NULL;
3043
 
3044
  /* The THEN edge is canonically the one that falls through.  */
3045
  if (then_edge->flags & EDGE_FALLTHRU)
3046
    ;
3047
  else if (else_edge->flags & EDGE_FALLTHRU)
3048
    {
3049
      edge e = else_edge;
3050
      else_edge = then_edge;
3051
      then_edge = e;
3052
    }
3053
  else
3054
    /* Otherwise this must be a multiway branch of some sort.  */
3055
    return NULL;
3056
 
3057
  memset (&ce_info, '\0', sizeof (ce_info));
3058
  ce_info.test_bb = test_bb;
3059
  ce_info.then_bb = then_edge->dest;
3060
  ce_info.else_bb = else_edge->dest;
3061
  ce_info.pass = pass;
3062
 
3063
#ifdef IFCVT_INIT_EXTRA_FIELDS
3064
  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
3065
#endif
3066
 
3067
  if (! reload_completed
3068
      && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3069
    goto success;
3070
 
3071
  if (targetm.have_conditional_execution () && reload_completed
3072
      && cond_exec_find_if_block (&ce_info))
3073
    goto success;
3074
 
3075
  if (HAVE_trap
3076
      && optab_handler (ctrap_optab, word_mode)->insn_code != CODE_FOR_nothing
3077
      && find_cond_trap (test_bb, then_edge, else_edge))
3078
    goto success;
3079
 
3080
  if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3081
      && (! targetm.have_conditional_execution () || reload_completed))
3082
    {
3083
      if (find_if_case_1 (test_bb, then_edge, else_edge))
3084
        goto success;
3085
      if (find_if_case_2 (test_bb, then_edge, else_edge))
3086
        goto success;
3087
    }
3088
 
3089
  return NULL;
3090
 
3091
 success:
3092
  if (dump_file)
3093
    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3094
  /* Set this so we continue looking.  */
3095
  cond_exec_changed_p = TRUE;
3096
  return ce_info.test_bb;
3097
}
3098
 
3099
/* Return true if a block has two edges, one of which falls through to the next
3100
   block, and the other jumps to a specific block, so that we can tell if the
3101
   block is part of an && test or an || test.  Returns either -1 or the number
3102
   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
3103
 
3104
static int
3105
block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3106
{
3107
  edge cur_edge;
3108
  int fallthru_p = FALSE;
3109
  int jump_p = FALSE;
3110
  rtx insn;
3111
  rtx end;
3112
  int n_insns = 0;
3113
  edge_iterator ei;
3114
 
3115
  if (!cur_bb || !target_bb)
3116
    return -1;
3117
 
3118
  /* If no edges, obviously it doesn't jump or fallthru.  */
3119
  if (EDGE_COUNT (cur_bb->succs) == 0)
3120
    return FALSE;
3121
 
3122
  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3123
    {
3124
      if (cur_edge->flags & EDGE_COMPLEX)
3125
        /* Anything complex isn't what we want.  */
3126
        return -1;
3127
 
3128
      else if (cur_edge->flags & EDGE_FALLTHRU)
3129
        fallthru_p = TRUE;
3130
 
3131
      else if (cur_edge->dest == target_bb)
3132
        jump_p = TRUE;
3133
 
3134
      else
3135
        return -1;
3136
    }
3137
 
3138
  if ((jump_p & fallthru_p) == 0)
3139
    return -1;
3140
 
3141
  /* Don't allow calls in the block, since this is used to group && and ||
3142
     together for conditional execution support.  ??? we should support
3143
     conditional execution support across calls for IA-64 some day, but
3144
     for now it makes the code simpler.  */
3145
  end = BB_END (cur_bb);
3146
  insn = BB_HEAD (cur_bb);
3147
 
3148
  while (insn != NULL_RTX)
3149
    {
3150
      if (CALL_P (insn))
3151
        return -1;
3152
 
3153
      if (INSN_P (insn)
3154
          && !JUMP_P (insn)
3155
          && !DEBUG_INSN_P (insn)
3156
          && GET_CODE (PATTERN (insn)) != USE
3157
          && GET_CODE (PATTERN (insn)) != CLOBBER)
3158
        n_insns++;
3159
 
3160
      if (insn == end)
3161
        break;
3162
 
3163
      insn = NEXT_INSN (insn);
3164
    }
3165
 
3166
  return n_insns;
3167
}
3168
 
3169
/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3170
   block.  If so, we'll try to convert the insns to not require the branch.
3171
   Return TRUE if we were successful at converting the block.  */
3172
 
3173
static int
3174
cond_exec_find_if_block (struct ce_if_block * ce_info)
3175
{
3176
  basic_block test_bb = ce_info->test_bb;
3177
  basic_block then_bb = ce_info->then_bb;
3178
  basic_block else_bb = ce_info->else_bb;
3179
  basic_block join_bb = NULL_BLOCK;
3180
  edge cur_edge;
3181
  basic_block next;
3182
  edge_iterator ei;
3183
 
3184
  ce_info->last_test_bb = test_bb;
3185
 
3186
  /* We only ever should get here after reload,
3187
     and only if we have conditional execution.  */
3188
  gcc_assert (targetm.have_conditional_execution () && reload_completed);
3189
 
3190
  /* Discover if any fall through predecessors of the current test basic block
3191
     were && tests (which jump to the else block) or || tests (which jump to
3192
     the then block).  */
3193
  if (single_pred_p (test_bb)
3194
      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3195
    {
3196
      basic_block bb = single_pred (test_bb);
3197
      basic_block target_bb;
3198
      int max_insns = MAX_CONDITIONAL_EXECUTE;
3199
      int n_insns;
3200
 
3201
      /* Determine if the preceding block is an && or || block.  */
3202
      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3203
        {
3204
          ce_info->and_and_p = TRUE;
3205
          target_bb = else_bb;
3206
        }
3207
      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3208
        {
3209
          ce_info->and_and_p = FALSE;
3210
          target_bb = then_bb;
3211
        }
3212
      else
3213
        target_bb = NULL_BLOCK;
3214
 
3215
      if (target_bb && n_insns <= max_insns)
3216
        {
3217
          int total_insns = 0;
3218
          int blocks = 0;
3219
 
3220
          ce_info->last_test_bb = test_bb;
3221
 
3222
          /* Found at least one && or || block, look for more.  */
3223
          do
3224
            {
3225
              ce_info->test_bb = test_bb = bb;
3226
              total_insns += n_insns;
3227
              blocks++;
3228
 
3229
              if (!single_pred_p (bb))
3230
                break;
3231
 
3232
              bb = single_pred (bb);
3233
              n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3234
            }
3235
          while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3236
 
3237
          ce_info->num_multiple_test_blocks = blocks;
3238
          ce_info->num_multiple_test_insns = total_insns;
3239
 
3240
          if (ce_info->and_and_p)
3241
            ce_info->num_and_and_blocks = blocks;
3242
          else
3243
            ce_info->num_or_or_blocks = blocks;
3244
        }
3245
    }
3246
 
3247
  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3248
     other than any || blocks which jump to the THEN block.  */
3249
  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3250
    return FALSE;
3251
 
3252
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3253
  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3254
    {
3255
      if (cur_edge->flags & EDGE_COMPLEX)
3256
        return FALSE;
3257
    }
3258
 
3259
  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3260
    {
3261
      if (cur_edge->flags & EDGE_COMPLEX)
3262
        return FALSE;
3263
    }
3264
 
3265
  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3266
  if (EDGE_COUNT (then_bb->succs) > 0
3267
      && (!single_succ_p (then_bb)
3268
          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3269
          || (epilogue_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
3270
    return FALSE;
3271
 
3272
  /* If the THEN block has no successors, conditional execution can still
3273
     make a conditional call.  Don't do this unless the ELSE block has
3274
     only one incoming edge -- the CFG manipulation is too ugly otherwise.
3275
     Check for the last insn of the THEN block being an indirect jump, which
3276
     is listed as not having any successors, but confuses the rest of the CE
3277
     code processing.  ??? we should fix this in the future.  */
3278
  if (EDGE_COUNT (then_bb->succs) == 0)
3279
    {
3280
      if (single_pred_p (else_bb))
3281
        {
3282
          rtx last_insn = BB_END (then_bb);
3283
 
3284
          while (last_insn
3285
                 && NOTE_P (last_insn)
3286
                 && last_insn != BB_HEAD (then_bb))
3287
            last_insn = PREV_INSN (last_insn);
3288
 
3289
          if (last_insn
3290
              && JUMP_P (last_insn)
3291
              && ! simplejump_p (last_insn))
3292
            return FALSE;
3293
 
3294
          join_bb = else_bb;
3295
          else_bb = NULL_BLOCK;
3296
        }
3297
      else
3298
        return FALSE;
3299
    }
3300
 
3301
  /* If the THEN block's successor is the other edge out of the TEST block,
3302
     then we have an IF-THEN combo without an ELSE.  */
3303
  else if (single_succ (then_bb) == else_bb)
3304
    {
3305
      join_bb = else_bb;
3306
      else_bb = NULL_BLOCK;
3307
    }
3308
 
3309
  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3310
     has exactly one predecessor and one successor, and the outgoing edge
3311
     is not complex, then we have an IF-THEN-ELSE combo.  */
3312
  else if (single_succ_p (else_bb)
3313
           && single_succ (then_bb) == single_succ (else_bb)
3314
           && single_pred_p (else_bb)
3315
           && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3316
           && ! (epilogue_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
3317
    join_bb = single_succ (else_bb);
3318
 
3319
  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3320
  else
3321
    return FALSE;
3322
 
3323
  num_possible_if_blocks++;
3324
 
3325
  if (dump_file)
3326
    {
3327
      fprintf (dump_file,
3328
               "\nIF-THEN%s block found, pass %d, start block %d "
3329
               "[insn %d], then %d [%d]",
3330
               (else_bb) ? "-ELSE" : "",
3331
               ce_info->pass,
3332
               test_bb->index,
3333
               BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3334
               then_bb->index,
3335
               BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3336
 
3337
      if (else_bb)
3338
        fprintf (dump_file, ", else %d [%d]",
3339
                 else_bb->index,
3340
                 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3341
 
3342
      fprintf (dump_file, ", join %d [%d]",
3343
               join_bb->index,
3344
               BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3345
 
3346
      if (ce_info->num_multiple_test_blocks > 0)
3347
        fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3348
                 ce_info->num_multiple_test_blocks,
3349
                 (ce_info->and_and_p) ? "&&" : "||",
3350
                 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3351
                 ce_info->last_test_bb->index,
3352
                 ((BB_HEAD (ce_info->last_test_bb))
3353
                  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3354
                  : -1));
3355
 
3356
      fputc ('\n', dump_file);
3357
    }
3358
 
3359
  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3360
     first condition for free, since we've already asserted that there's a
3361
     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3362
     we checked the FALLTHRU flag, those are already adjacent to the last IF
3363
     block.  */
3364
  /* ??? As an enhancement, move the ELSE block.  Have to deal with
3365
     BLOCK notes, if by no other means than backing out the merge if they
3366
     exist.  Sticky enough I don't want to think about it now.  */
3367
  next = then_bb;
3368
  if (else_bb && (next = next->next_bb) != else_bb)
3369
    return FALSE;
3370
  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3371
    {
3372
      if (else_bb)
3373
        join_bb = NULL;
3374
      else
3375
        return FALSE;
3376
    }
3377
 
3378
  /* Do the real work.  */
3379
 
3380
  ce_info->else_bb = else_bb;
3381
  ce_info->join_bb = join_bb;
3382
 
3383
  /* If we have && and || tests, try to first handle combining the && and ||
3384
     tests into the conditional code, and if that fails, go back and handle
3385
     it without the && and ||, which at present handles the && case if there
3386
     was no ELSE block.  */
3387
  if (cond_exec_process_if_block (ce_info, TRUE))
3388
    return TRUE;
3389
 
3390
  if (ce_info->num_multiple_test_blocks)
3391
    {
3392
      cancel_changes (0);
3393
 
3394
      if (cond_exec_process_if_block (ce_info, FALSE))
3395
        return TRUE;
3396
    }
3397
 
3398
  return FALSE;
3399
}
3400
 
3401
/* Convert a branch over a trap, or a branch
3402
   to a trap, into a conditional trap.  */
3403
 
3404
static int
3405
find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3406
{
3407
  basic_block then_bb = then_edge->dest;
3408
  basic_block else_bb = else_edge->dest;
3409
  basic_block other_bb, trap_bb;
3410
  rtx trap, jump, cond, cond_earliest, seq;
3411
  enum rtx_code code;
3412
 
3413
  /* Locate the block with the trap instruction.  */
3414
  /* ??? While we look for no successors, we really ought to allow
3415
     EH successors.  Need to fix merge_if_block for that to work.  */
3416
  if ((trap = block_has_only_trap (then_bb)) != NULL)
3417
    trap_bb = then_bb, other_bb = else_bb;
3418
  else if ((trap = block_has_only_trap (else_bb)) != NULL)
3419
    trap_bb = else_bb, other_bb = then_bb;
3420
  else
3421
    return FALSE;
3422
 
3423
  if (dump_file)
3424
    {
3425
      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3426
               test_bb->index, trap_bb->index);
3427
    }
3428
 
3429
  /* If this is not a standard conditional jump, we can't parse it.  */
3430
  jump = BB_END (test_bb);
3431
  cond = noce_get_condition (jump, &cond_earliest, false);
3432
  if (! cond)
3433
    return FALSE;
3434
 
3435
  /* If the conditional jump is more than just a conditional jump, then
3436
     we can not do if-conversion on this block.  */
3437
  if (! onlyjump_p (jump))
3438
    return FALSE;
3439
 
3440
  /* We must be comparing objects whose modes imply the size.  */
3441
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3442
    return FALSE;
3443
 
3444
  /* Reverse the comparison code, if necessary.  */
3445
  code = GET_CODE (cond);
3446
  if (then_bb == trap_bb)
3447
    {
3448
      code = reversed_comparison_code (cond, jump);
3449
      if (code == UNKNOWN)
3450
        return FALSE;
3451
    }
3452
 
3453
  /* Attempt to generate the conditional trap.  */
3454
  seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3455
                       copy_rtx (XEXP (cond, 1)),
3456
                       TRAP_CODE (PATTERN (trap)));
3457
  if (seq == NULL)
3458
    return FALSE;
3459
 
3460
  /* Emit the new insns before cond_earliest.  */
3461
  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3462
 
3463
  /* Delete the trap block if possible.  */
3464
  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3465
  df_set_bb_dirty (test_bb);
3466
  df_set_bb_dirty (then_bb);
3467
  df_set_bb_dirty (else_bb);
3468
 
3469
  if (EDGE_COUNT (trap_bb->preds) == 0)
3470
    {
3471
      delete_basic_block (trap_bb);
3472
      num_true_changes++;
3473
    }
3474
 
3475
  /* Wire together the blocks again.  */
3476
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
3477
    single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3478
  else
3479
    {
3480
      rtx lab, newjump;
3481
 
3482
      lab = JUMP_LABEL (jump);
3483
      newjump = emit_jump_insn_after (gen_jump (lab), jump);
3484
      LABEL_NUSES (lab) += 1;
3485
      JUMP_LABEL (newjump) = lab;
3486
      emit_barrier_after (newjump);
3487
    }
3488
  delete_insn (jump);
3489
 
3490
  if (can_merge_blocks_p (test_bb, other_bb))
3491
    {
3492
      merge_blocks (test_bb, other_bb);
3493
      num_true_changes++;
3494
    }
3495
 
3496
  num_updated_if_blocks++;
3497
  return TRUE;
3498
}
3499
 
3500
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
3501
   return it.  */
3502
 
3503
static rtx
3504
block_has_only_trap (basic_block bb)
3505
{
3506
  rtx trap;
3507
 
3508
  /* We're not the exit block.  */
3509
  if (bb == EXIT_BLOCK_PTR)
3510
    return NULL_RTX;
3511
 
3512
  /* The block must have no successors.  */
3513
  if (EDGE_COUNT (bb->succs) > 0)
3514
    return NULL_RTX;
3515
 
3516
  /* The only instruction in the THEN block must be the trap.  */
3517
  trap = first_active_insn (bb);
3518
  if (! (trap == BB_END (bb)
3519
         && GET_CODE (PATTERN (trap)) == TRAP_IF
3520
         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3521
    return NULL_RTX;
3522
 
3523
  return trap;
3524
}
3525
 
3526
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3527
   transformable, but not necessarily the other.  There need be no
3528
   JOIN block.
3529
 
3530
   Return TRUE if we were successful at converting the block.
3531
 
3532
   Cases we'd like to look at:
3533
 
3534
   (1)
3535
        if (test) goto over; // x not live
3536
        x = a;
3537
        goto label;
3538
        over:
3539
 
3540
   becomes
3541
 
3542
        x = a;
3543
        if (! test) goto label;
3544
 
3545
   (2)
3546
        if (test) goto E; // x not live
3547
        x = big();
3548
        goto L;
3549
        E:
3550
        x = b;
3551
        goto M;
3552
 
3553
   becomes
3554
 
3555
        x = b;
3556
        if (test) goto M;
3557
        x = big();
3558
        goto L;
3559
 
3560
   (3) // This one's really only interesting for targets that can do
3561
       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3562
       // it results in multiple branches on a cache line, which often
3563
       // does not sit well with predictors.
3564
 
3565
        if (test1) goto E; // predicted not taken
3566
        x = a;
3567
        if (test2) goto F;
3568
        ...
3569
        E:
3570
        x = b;
3571
        J:
3572
 
3573
   becomes
3574
 
3575
        x = a;
3576
        if (test1) goto E;
3577
        if (test2) goto F;
3578
 
3579
   Notes:
3580
 
3581
   (A) Don't do (2) if the branch is predicted against the block we're
3582
   eliminating.  Do it anyway if we can eliminate a branch; this requires
3583
   that the sole successor of the eliminated block postdominate the other
3584
   side of the if.
3585
 
3586
   (B) With CE, on (3) we can steal from both sides of the if, creating
3587
 
3588
        if (test1) x = a;
3589
        if (!test1) x = b;
3590
        if (test1) goto J;
3591
        if (test2) goto F;
3592
        ...
3593
        J:
3594
 
3595
   Again, this is most useful if J postdominates.
3596
 
3597
   (C) CE substitutes for helpful life information.
3598
 
3599
   (D) These heuristics need a lot of work.  */
3600
 
3601
/* Tests for case 1 above.  */
3602
 
3603
static int
3604
find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3605
{
3606
  basic_block then_bb = then_edge->dest;
3607
  basic_block else_bb = else_edge->dest;
3608
  basic_block new_bb;
3609
  int then_bb_index;
3610
 
3611
  /* If we are partitioning hot/cold basic blocks, we don't want to
3612
     mess up unconditional or indirect jumps that cross between hot
3613
     and cold sections.
3614
 
3615
     Basic block partitioning may result in some jumps that appear to
3616
     be optimizable (or blocks that appear to be mergeable), but which really
3617
     must be left untouched (they are required to make it safely across
3618
     partition boundaries).  See  the comments at the top of
3619
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3620
 
3621
  if ((BB_END (then_bb)
3622
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3623
      || (BB_END (test_bb)
3624
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3625
      || (BB_END (else_bb)
3626
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3627
                            NULL_RTX)))
3628
    return FALSE;
3629
 
3630
  /* THEN has one successor.  */
3631
  if (!single_succ_p (then_bb))
3632
    return FALSE;
3633
 
3634
  /* THEN does not fall through, but is not strange either.  */
3635
  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3636
    return FALSE;
3637
 
3638
  /* THEN has one predecessor.  */
3639
  if (!single_pred_p (then_bb))
3640
    return FALSE;
3641
 
3642
  /* THEN must do something.  */
3643
  if (forwarder_block_p (then_bb))
3644
    return FALSE;
3645
 
3646
  num_possible_if_blocks++;
3647
  if (dump_file)
3648
    fprintf (dump_file,
3649
             "\nIF-CASE-1 found, start %d, then %d\n",
3650
             test_bb->index, then_bb->index);
3651
 
3652
  /* THEN is small.  */
3653
  if (! cheap_bb_rtx_cost_p (then_bb,
3654
        COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3655
                                    predictable_edge_p (then_edge)))))
3656
    return FALSE;
3657
 
3658
  /* Registers set are dead, or are predicable.  */
3659
  if (! dead_or_predicable (test_bb, then_bb, else_bb,
3660
                            single_succ (then_bb), 1))
3661
    return FALSE;
3662
 
3663
  /* Conversion went ok, including moving the insns and fixing up the
3664
     jump.  Adjust the CFG to match.  */
3665
 
3666
  /* We can avoid creating a new basic block if then_bb is immediately
3667
     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3668
     thru to else_bb.  */
3669
 
3670
  if (then_bb->next_bb == else_bb
3671
      && then_bb->prev_bb == test_bb
3672
      && else_bb != EXIT_BLOCK_PTR)
3673
    {
3674
      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3675
      new_bb = 0;
3676
    }
3677
  else
3678
    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3679
                                             else_bb);
3680
 
3681
  df_set_bb_dirty (test_bb);
3682
  df_set_bb_dirty (else_bb);
3683
 
3684
  then_bb_index = then_bb->index;
3685
  delete_basic_block (then_bb);
3686
 
3687
  /* Make rest of code believe that the newly created block is the THEN_BB
3688
     block we removed.  */
3689
  if (new_bb)
3690
    {
3691
      df_bb_replace (then_bb_index, new_bb);
3692
      /* Since the fallthru edge was redirected from test_bb to new_bb,
3693
         we need to ensure that new_bb is in the same partition as
3694
         test bb (you can not fall through across section boundaries).  */
3695
      BB_COPY_PARTITION (new_bb, test_bb);
3696
    }
3697
 
3698
  num_true_changes++;
3699
  num_updated_if_blocks++;
3700
 
3701
  return TRUE;
3702
}
3703
 
3704
/* Test for case 2 above.  */
3705
 
3706
static int
3707
find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3708
{
3709
  basic_block then_bb = then_edge->dest;
3710
  basic_block else_bb = else_edge->dest;
3711
  edge else_succ;
3712
  rtx note;
3713
 
3714
  /* If we are partitioning hot/cold basic blocks, we don't want to
3715
     mess up unconditional or indirect jumps that cross between hot
3716
     and cold sections.
3717
 
3718
     Basic block partitioning may result in some jumps that appear to
3719
     be optimizable (or blocks that appear to be mergeable), but which really
3720
     must be left untouched (they are required to make it safely across
3721
     partition boundaries).  See  the comments at the top of
3722
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3723
 
3724
  if ((BB_END (then_bb)
3725
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3726
      || (BB_END (test_bb)
3727
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3728
      || (BB_END (else_bb)
3729
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3730
                            NULL_RTX)))
3731
    return FALSE;
3732
 
3733
  /* ELSE has one successor.  */
3734
  if (!single_succ_p (else_bb))
3735
    return FALSE;
3736
  else
3737
    else_succ = single_succ_edge (else_bb);
3738
 
3739
  /* ELSE outgoing edge is not complex.  */
3740
  if (else_succ->flags & EDGE_COMPLEX)
3741
    return FALSE;
3742
 
3743
  /* ELSE has one predecessor.  */
3744
  if (!single_pred_p (else_bb))
3745
    return FALSE;
3746
 
3747
  /* THEN is not EXIT.  */
3748
  if (then_bb->index < NUM_FIXED_BLOCKS)
3749
    return FALSE;
3750
 
3751
  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3752
  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3753
  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3754
    ;
3755
  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3756
           || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3757
                              else_succ->dest))
3758
    ;
3759
  else
3760
    return FALSE;
3761
 
3762
  num_possible_if_blocks++;
3763
  if (dump_file)
3764
    fprintf (dump_file,
3765
             "\nIF-CASE-2 found, start %d, else %d\n",
3766
             test_bb->index, else_bb->index);
3767
 
3768
  /* ELSE is small.  */
3769
  if (! cheap_bb_rtx_cost_p (else_bb,
3770
        COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
3771
                                    predictable_edge_p (else_edge)))))
3772
    return FALSE;
3773
 
3774
  /* Registers set are dead, or are predicable.  */
3775
  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3776
    return FALSE;
3777
 
3778
  /* Conversion went ok, including moving the insns and fixing up the
3779
     jump.  Adjust the CFG to match.  */
3780
 
3781
  df_set_bb_dirty (test_bb);
3782
  df_set_bb_dirty (then_bb);
3783
  delete_basic_block (else_bb);
3784
 
3785
  num_true_changes++;
3786
  num_updated_if_blocks++;
3787
 
3788
  /* ??? We may now fallthru from one of THEN's successors into a join
3789
     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
3790
 
3791
  return TRUE;
3792
}
3793
 
3794
/* A subroutine of dead_or_predicable called through for_each_rtx.
3795
   Return 1 if a memory is found.  */
3796
 
3797
static int
3798
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3799
{
3800
  return MEM_P (*px);
3801
}
3802
 
3803
/* Used by the code above to perform the actual rtl transformations.
3804
   Return TRUE if successful.
3805
 
3806
   TEST_BB is the block containing the conditional branch.  MERGE_BB
3807
   is the block containing the code to manipulate.  NEW_DEST is the
3808
   label TEST_BB should be branching to after the conversion.
3809
   REVERSEP is true if the sense of the branch should be reversed.  */
3810
 
3811
static int
3812
dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3813
                    basic_block other_bb, basic_block new_dest, int reversep)
3814
{
3815
  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3816
  /* Number of pending changes.  */
3817
  int n_validated_changes = 0;
3818
 
3819
  jump = BB_END (test_bb);
3820
 
3821
  /* Find the extent of the real code in the merge block.  */
3822
  head = BB_HEAD (merge_bb);
3823
  end = BB_END (merge_bb);
3824
 
3825
  while (DEBUG_INSN_P (end) && end != head)
3826
    end = PREV_INSN (end);
3827
 
3828
  /* If merge_bb ends with a tablejump, predicating/moving insn's
3829
     into test_bb and then deleting merge_bb will result in the jumptable
3830
     that follows merge_bb being removed along with merge_bb and then we
3831
     get an unresolved reference to the jumptable.  */
3832
  if (tablejump_p (end, NULL, NULL))
3833
    return FALSE;
3834
 
3835
  if (LABEL_P (head))
3836
    head = NEXT_INSN (head);
3837
  while (DEBUG_INSN_P (head) && head != end)
3838
    head = NEXT_INSN (head);
3839
  if (NOTE_P (head))
3840
    {
3841
      if (head == end)
3842
        {
3843
          head = end = NULL_RTX;
3844
          goto no_body;
3845
        }
3846
      head = NEXT_INSN (head);
3847
      while (DEBUG_INSN_P (head) && head != end)
3848
        head = NEXT_INSN (head);
3849
    }
3850
 
3851
  if (JUMP_P (end))
3852
    {
3853
      if (head == end)
3854
        {
3855
          head = end = NULL_RTX;
3856
          goto no_body;
3857
        }
3858
      end = PREV_INSN (end);
3859
      while (DEBUG_INSN_P (end) && end != head)
3860
        end = PREV_INSN (end);
3861
    }
3862
 
3863
  /* Disable handling dead code by conditional execution if the machine needs
3864
     to do anything funny with the tests, etc.  */
3865
#ifndef IFCVT_MODIFY_TESTS
3866
  if (targetm.have_conditional_execution ())
3867
    {
3868
      /* In the conditional execution case, we have things easy.  We know
3869
         the condition is reversible.  We don't have to check life info
3870
         because we're going to conditionally execute the code anyway.
3871
         All that's left is making sure the insns involved can actually
3872
         be predicated.  */
3873
 
3874
      rtx cond, prob_val;
3875
 
3876
      cond = cond_exec_get_condition (jump);
3877
      if (! cond)
3878
        return FALSE;
3879
 
3880
      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3881
      if (prob_val)
3882
        prob_val = XEXP (prob_val, 0);
3883
 
3884
      if (reversep)
3885
        {
3886
          enum rtx_code rev = reversed_comparison_code (cond, jump);
3887
          if (rev == UNKNOWN)
3888
            return FALSE;
3889
          cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3890
                                 XEXP (cond, 1));
3891
          if (prob_val)
3892
            prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3893
        }
3894
 
3895
      if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
3896
          && verify_changes (0))
3897
        n_validated_changes = num_validated_changes ();
3898
      else
3899
        cancel_changes (0);
3900
 
3901
      earliest = jump;
3902
    }
3903
#endif
3904
  /* Try the NCE path if the CE path did not result in any changes.  */
3905
  if (n_validated_changes == 0)
3906
    {
3907
      /* In the non-conditional execution case, we have to verify that there
3908
         are no trapping operations, no calls, no references to memory, and
3909
         that any registers modified are dead at the branch site.  */
3910
 
3911
      rtx insn, cond, prev;
3912
      bitmap merge_set, test_live, test_set;
3913
      unsigned i, fail = 0;
3914
      bitmap_iterator bi;
3915
 
3916
      /* Check for no calls or trapping operations.  */
3917
      for (insn = head; ; insn = NEXT_INSN (insn))
3918
        {
3919
          if (CALL_P (insn))
3920
            return FALSE;
3921
          if (NONDEBUG_INSN_P (insn))
3922
            {
3923
              if (may_trap_p (PATTERN (insn)))
3924
                return FALSE;
3925
 
3926
              /* ??? Even non-trapping memories such as stack frame
3927
                 references must be avoided.  For stores, we collect
3928
                 no lifetime info; for reads, we'd have to assert
3929
                 true_dependence false against every store in the
3930
                 TEST range.  */
3931
              if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3932
                return FALSE;
3933
            }
3934
          if (insn == end)
3935
            break;
3936
        }
3937
 
3938
      if (! any_condjump_p (jump))
3939
        return FALSE;
3940
 
3941
      /* Find the extent of the conditional.  */
3942
      cond = noce_get_condition (jump, &earliest, false);
3943
      if (! cond)
3944
        return FALSE;
3945
 
3946
      /* Collect:
3947
           MERGE_SET = set of registers set in MERGE_BB
3948
           TEST_LIVE = set of registers live at EARLIEST
3949
           TEST_SET  = set of registers set between EARLIEST and the
3950
                       end of the block.  */
3951
 
3952
      merge_set = BITMAP_ALLOC (&reg_obstack);
3953
      test_live = BITMAP_ALLOC (&reg_obstack);
3954
      test_set = BITMAP_ALLOC (&reg_obstack);
3955
 
3956
      /* ??? bb->local_set is only valid during calculate_global_regs_live,
3957
         so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
3958
         since we've already asserted that MERGE_BB is small.  */
3959
      /* If we allocated new pseudos (e.g. in the conditional move
3960
         expander called from noce_emit_cmove), we must resize the
3961
         array first.  */
3962
      if (max_regno < max_reg_num ())
3963
        max_regno = max_reg_num ();
3964
 
3965
      FOR_BB_INSNS (merge_bb, insn)
3966
        {
3967
          if (NONDEBUG_INSN_P (insn))
3968
            {
3969
              unsigned int uid = INSN_UID (insn);
3970
              df_ref *def_rec;
3971
              for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3972
                {
3973
                  df_ref def = *def_rec;
3974
                  bitmap_set_bit (merge_set, DF_REF_REGNO (def));
3975
                }
3976
            }
3977
        }
3978
 
3979
      /* For small register class machines, don't lengthen lifetimes of
3980
         hard registers before reload.  */
3981
      if (SMALL_REGISTER_CLASSES && ! reload_completed)
3982
        {
3983
          EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3984
            {
3985
              if (i < FIRST_PSEUDO_REGISTER
3986
                  && ! fixed_regs[i]
3987
                  && ! global_regs[i])
3988
                fail = 1;
3989
            }
3990
        }
3991
 
3992
      /* For TEST, we're interested in a range of insns, not a whole block.
3993
         Moreover, we're interested in the insns live from OTHER_BB.  */
3994
 
3995
      /* The loop below takes the set of live registers
3996
         after JUMP, and calculates the live set before EARLIEST. */
3997
      bitmap_copy (test_live, df_get_live_in (other_bb));
3998
      df_simulate_initialize_backwards (test_bb, test_live);
3999
      for (insn = jump; ; insn = prev)
4000
        {
4001
          if (INSN_P (insn))
4002
            {
4003
              df_simulate_find_defs (insn, test_set);
4004
              df_simulate_one_insn_backwards (test_bb, insn, test_live);
4005
            }
4006
          prev = PREV_INSN (insn);
4007
          if (insn == earliest)
4008
            break;
4009
        }
4010
 
4011
      /* We can perform the transformation if
4012
           MERGE_SET & (TEST_SET | TEST_LIVE)
4013
         and
4014
           TEST_SET & DF_LIVE_IN (merge_bb)
4015
         are empty.  */
4016
 
4017
      if (bitmap_intersect_p (test_set, merge_set)
4018
          || bitmap_intersect_p (test_live, merge_set)
4019
          || bitmap_intersect_p (test_set, df_get_live_in (merge_bb)))
4020
        fail = 1;
4021
 
4022
      BITMAP_FREE (merge_set);
4023
      BITMAP_FREE (test_live);
4024
      BITMAP_FREE (test_set);
4025
 
4026
      if (fail)
4027
        return FALSE;
4028
    }
4029
 
4030
 no_body:
4031
  /* We don't want to use normal invert_jump or redirect_jump because
4032
     we don't want to delete_insn called.  Also, we want to do our own
4033
     change group management.  */
4034
 
4035
  old_dest = JUMP_LABEL (jump);
4036
  if (other_bb != new_dest)
4037
    {
4038
      new_label = block_label (new_dest);
4039
      if (reversep
4040
          ? ! invert_jump_1 (jump, new_label)
4041
          : ! redirect_jump_1 (jump, new_label))
4042
        goto cancel;
4043
    }
4044
 
4045
  if (verify_changes (n_validated_changes))
4046
    confirm_change_group ();
4047
  else
4048
    goto cancel;
4049
 
4050
  if (other_bb != new_dest)
4051
    {
4052
      redirect_jump_2 (jump, old_dest, new_label, 0, reversep);
4053
 
4054
      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4055
      if (reversep)
4056
        {
4057
          gcov_type count, probability;
4058
          count = BRANCH_EDGE (test_bb)->count;
4059
          BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4060
          FALLTHRU_EDGE (test_bb)->count = count;
4061
          probability = BRANCH_EDGE (test_bb)->probability;
4062
          BRANCH_EDGE (test_bb)->probability
4063
            = FALLTHRU_EDGE (test_bb)->probability;
4064
          FALLTHRU_EDGE (test_bb)->probability = probability;
4065
          update_br_prob_note (test_bb);
4066
        }
4067
    }
4068
 
4069
  /* Move the insns out of MERGE_BB to before the branch.  */
4070
  if (head != NULL)
4071
    {
4072
      rtx insn;
4073
 
4074
      if (end == BB_END (merge_bb))
4075
        BB_END (merge_bb) = PREV_INSN (head);
4076
 
4077
      /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
4078
         notes might become invalid.  */
4079
      insn = head;
4080
      do
4081
        {
4082
          rtx note, set;
4083
 
4084
          if (! INSN_P (insn))
4085
            continue;
4086
          note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4087
          if (! note)
4088
            continue;
4089
          set = single_set (insn);
4090
          if (!set || !function_invariant_p (SET_SRC (set))
4091
              || !function_invariant_p (XEXP (note, 0)))
4092
            remove_note (insn, note);
4093
        } while (insn != end && (insn = NEXT_INSN (insn)));
4094
 
4095
      reorder_insns (head, end, PREV_INSN (earliest));
4096
    }
4097
 
4098
  /* Remove the jump and edge if we can.  */
4099
  if (other_bb == new_dest)
4100
    {
4101
      delete_insn (jump);
4102
      remove_edge (BRANCH_EDGE (test_bb));
4103
      /* ??? Can't merge blocks here, as then_bb is still in use.
4104
         At minimum, the merge will get done just before bb-reorder.  */
4105
    }
4106
 
4107
  return TRUE;
4108
 
4109
 cancel:
4110
  cancel_changes (0);
4111
  return FALSE;
4112
}
4113
 
4114
/* Main entry point for all if-conversion.  */
4115
 
4116
static void
4117
if_convert (void)
4118
{
4119
  basic_block bb;
4120
  int pass;
4121
 
4122
  if (optimize == 1)
4123
    {
4124
      df_live_add_problem ();
4125
      df_live_set_all_dirty ();
4126
    }
4127
 
4128
  num_possible_if_blocks = 0;
4129
  num_updated_if_blocks = 0;
4130
  num_true_changes = 0;
4131
 
4132
  loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4133
  mark_loop_exit_edges ();
4134
  loop_optimizer_finalize ();
4135
  free_dominance_info (CDI_DOMINATORS);
4136
 
4137
  /* Compute postdominators.  */
4138
  calculate_dominance_info (CDI_POST_DOMINATORS);
4139
 
4140
  df_set_flags (DF_LR_RUN_DCE);
4141
 
4142
  /* Go through each of the basic blocks looking for things to convert.  If we
4143
     have conditional execution, we make multiple passes to allow us to handle
4144
     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
4145
  pass = 0;
4146
  do
4147
    {
4148
      df_analyze ();
4149
      /* Only need to do dce on the first pass.  */
4150
      df_clear_flags (DF_LR_RUN_DCE);
4151
      cond_exec_changed_p = FALSE;
4152
      pass++;
4153
 
4154
#ifdef IFCVT_MULTIPLE_DUMPS
4155
      if (dump_file && pass > 1)
4156
        fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4157
#endif
4158
 
4159
      FOR_EACH_BB (bb)
4160
        {
4161
          basic_block new_bb;
4162
          while (!df_get_bb_dirty (bb)
4163
                 && (new_bb = find_if_header (bb, pass)) != NULL)
4164
            bb = new_bb;
4165
        }
4166
 
4167
#ifdef IFCVT_MULTIPLE_DUMPS
4168
      if (dump_file && cond_exec_changed_p)
4169
        {
4170
          if (dump_flags & TDF_SLIM)
4171
            print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
4172
          else
4173
            print_rtl_with_bb (dump_file, get_insns ());
4174
        }
4175
#endif
4176
    }
4177
  while (cond_exec_changed_p);
4178
 
4179
#ifdef IFCVT_MULTIPLE_DUMPS
4180
  if (dump_file)
4181
    fprintf (dump_file, "\n\n========== no more changes\n");
4182
#endif
4183
 
4184
  free_dominance_info (CDI_POST_DOMINATORS);
4185
 
4186
  if (dump_file)
4187
    fflush (dump_file);
4188
 
4189
  clear_aux_for_blocks ();
4190
 
4191
  /* If we allocated new pseudos, we must resize the array for sched1.  */
4192
  if (max_regno < max_reg_num ())
4193
    max_regno = max_reg_num ();
4194
 
4195
  /* Write the final stats.  */
4196
  if (dump_file && num_possible_if_blocks > 0)
4197
    {
4198
      fprintf (dump_file,
4199
               "\n%d possible IF blocks searched.\n",
4200
               num_possible_if_blocks);
4201
      fprintf (dump_file,
4202
               "%d IF blocks converted.\n",
4203
               num_updated_if_blocks);
4204
      fprintf (dump_file,
4205
               "%d true changes made.\n\n\n",
4206
               num_true_changes);
4207
    }
4208
 
4209
  if (optimize == 1)
4210
    df_remove_problem (df_live);
4211
 
4212
#ifdef ENABLE_CHECKING
4213
  verify_flow_info ();
4214
#endif
4215
}
4216
 
4217
static bool
4218
gate_handle_if_conversion (void)
4219
{
4220
  return (optimize > 0)
4221
    && dbg_cnt (if_conversion);
4222
}
4223
 
4224
/* If-conversion and CFG cleanup.  */
4225
static unsigned int
4226
rest_of_handle_if_conversion (void)
4227
{
4228
  if (flag_if_conversion)
4229
    {
4230
      if (dump_file)
4231
        dump_flow_info (dump_file, dump_flags);
4232
      cleanup_cfg (CLEANUP_EXPENSIVE);
4233
      if_convert ();
4234
    }
4235
 
4236
  cleanup_cfg (0);
4237
  return 0;
4238
}
4239
 
4240
struct rtl_opt_pass pass_rtl_ifcvt =
4241
{
4242
 {
4243
  RTL_PASS,
4244
  "ce1",                                /* name */
4245
  gate_handle_if_conversion,            /* gate */
4246
  rest_of_handle_if_conversion,         /* execute */
4247
  NULL,                                 /* sub */
4248
  NULL,                                 /* next */
4249
  0,                                    /* static_pass_number */
4250
  TV_IFCVT,                             /* tv_id */
4251
  0,                                    /* properties_required */
4252
  0,                                    /* properties_provided */
4253
  0,                                    /* properties_destroyed */
4254
  0,                                    /* todo_flags_start */
4255
  TODO_df_finish | TODO_verify_rtl_sharing |
4256
  TODO_dump_func                        /* todo_flags_finish */
4257
 }
4258
};
4259
 
4260
static bool
4261
gate_handle_if_after_combine (void)
4262
{
4263
  return optimize > 0 && flag_if_conversion
4264
    && dbg_cnt (if_after_combine);
4265
}
4266
 
4267
 
4268
/* Rerun if-conversion, as combine may have simplified things enough
4269
   to now meet sequence length restrictions.  */
4270
static unsigned int
4271
rest_of_handle_if_after_combine (void)
4272
{
4273
  if_convert ();
4274
  return 0;
4275
}
4276
 
4277
struct rtl_opt_pass pass_if_after_combine =
4278
{
4279
 {
4280
  RTL_PASS,
4281
  "ce2",                                /* name */
4282
  gate_handle_if_after_combine,         /* gate */
4283
  rest_of_handle_if_after_combine,      /* execute */
4284
  NULL,                                 /* sub */
4285
  NULL,                                 /* next */
4286
  0,                                    /* static_pass_number */
4287
  TV_IFCVT,                             /* tv_id */
4288
  0,                                    /* properties_required */
4289
  0,                                    /* properties_provided */
4290
  0,                                    /* properties_destroyed */
4291
  0,                                    /* todo_flags_start */
4292
  TODO_df_finish | TODO_verify_rtl_sharing |
4293
  TODO_dump_func |
4294
  TODO_ggc_collect                      /* todo_flags_finish */
4295
 }
4296
};
4297
 
4298
 
4299
static bool
4300
gate_handle_if_after_reload (void)
4301
{
4302
  return optimize > 0 && flag_if_conversion2
4303
    && dbg_cnt (if_after_reload);
4304
}
4305
 
4306
static unsigned int
4307
rest_of_handle_if_after_reload (void)
4308
{
4309
  if_convert ();
4310
  return 0;
4311
}
4312
 
4313
 
4314
struct rtl_opt_pass pass_if_after_reload =
4315
{
4316
 {
4317
  RTL_PASS,
4318
  "ce3",                                /* name */
4319
  gate_handle_if_after_reload,          /* gate */
4320
  rest_of_handle_if_after_reload,       /* execute */
4321
  NULL,                                 /* sub */
4322
  NULL,                                 /* next */
4323
  0,                                    /* static_pass_number */
4324
  TV_IFCVT2,                            /* tv_id */
4325
  0,                                    /* properties_required */
4326
  0,                                    /* properties_provided */
4327
  0,                                    /* properties_destroyed */
4328
  0,                                    /* todo_flags_start */
4329
  TODO_df_finish | TODO_verify_rtl_sharing |
4330
  TODO_dump_func |
4331
  TODO_ggc_collect                      /* todo_flags_finish */
4332
 }
4333
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.