OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [ifcvt.c] - Blame information for rev 533

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* If-conversion support.
2
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2010
3
   Free Software Foundation, Inc.
4
 
5
   This file is part of GCC.
6
 
7
   GCC is free software; you can redistribute it and/or modify it
8
   under the terms of the GNU General Public License as published by
9
   the Free Software Foundation; either version 3, or (at your option)
10
   any later version.
11
 
12
   GCC is distributed in the hope that it will be useful, but WITHOUT
13
   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14
   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15
   License for more details.
16
 
17
   You should have received a copy of the GNU General Public License
18
   along with GCC; see the file COPYING3.  If not see
19
   <http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
#include "system.h"
23
#include "coretypes.h"
24
#include "tm.h"
25
 
26
#include "rtl.h"
27
#include "regs.h"
28
#include "function.h"
29
#include "flags.h"
30
#include "insn-config.h"
31
#include "recog.h"
32
#include "except.h"
33
#include "hard-reg-set.h"
34
#include "basic-block.h"
35
#include "expr.h"
36
#include "real.h"
37
#include "output.h"
38
#include "optabs.h"
39
#include "toplev.h"
40
#include "tm_p.h"
41
#include "cfgloop.h"
42
#include "target.h"
43
#include "timevar.h"
44
#include "tree-pass.h"
45
#include "df.h"
46
#include "vec.h"
47
#include "vecprim.h"
48
#include "dbgcnt.h"
49
 
50
#ifndef HAVE_conditional_move
51
#define HAVE_conditional_move 0
52
#endif
53
#ifndef HAVE_incscc
54
#define HAVE_incscc 0
55
#endif
56
#ifndef HAVE_decscc
57
#define HAVE_decscc 0
58
#endif
59
#ifndef HAVE_trap
60
#define HAVE_trap 0
61
#endif
62
 
63
#ifndef MAX_CONDITIONAL_EXECUTE
64
#define MAX_CONDITIONAL_EXECUTE \
65
  (BRANCH_COST (optimize_function_for_speed_p (cfun), false) \
66
   + 1)
67
#endif
68
 
69
#define IFCVT_MULTIPLE_DUMPS 1
70
 
71
#define NULL_BLOCK      ((basic_block) NULL)
72
 
73
/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74
static int num_possible_if_blocks;
75
 
76
/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77
   execution.  */
78
static int num_updated_if_blocks;
79
 
80
/* # of changes made.  */
81
static int num_true_changes;
82
 
83
/* Whether conditional execution changes were made.  */
84
static int cond_exec_changed_p;
85
 
86
/* Forward references.  */
87
static int count_bb_insns (const_basic_block);
88
static bool cheap_bb_rtx_cost_p (const_basic_block, int);
89
static rtx first_active_insn (basic_block);
90
static rtx last_active_insn (basic_block, int);
91
static basic_block block_fallthru (basic_block);
92
static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
93
static rtx cond_exec_get_condition (rtx);
94
static rtx noce_get_condition (rtx, rtx *, bool);
95
static int noce_operand_ok (const_rtx);
96
static void merge_if_block (ce_if_block_t *);
97
static int find_cond_trap (basic_block, edge, edge);
98
static basic_block find_if_header (basic_block, int);
99
static int block_jumps_and_fallthru_p (basic_block, basic_block);
100
static int noce_find_if_block (basic_block, edge, edge, int);
101
static int cond_exec_find_if_block (ce_if_block_t *);
102
static int find_if_case_1 (basic_block, edge, edge);
103
static int find_if_case_2 (basic_block, edge, edge);
104
static int find_memory (rtx *, void *);
105
static int dead_or_predicable (basic_block, basic_block, basic_block,
106
                               basic_block, int);
107
static void noce_emit_move_insn (rtx, rtx);
108
static rtx block_has_only_trap (basic_block);
109
 
110
/* Count the number of non-jump active insns in BB.  */
111
 
112
static int
113
count_bb_insns (const_basic_block bb)
114
{
115
  int count = 0;
116
  rtx insn = BB_HEAD (bb);
117
 
118
  while (1)
119
    {
120
      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
121
        count++;
122
 
123
      if (insn == BB_END (bb))
124
        break;
125
      insn = NEXT_INSN (insn);
126
    }
127
 
128
  return count;
129
}
130
 
131
/* Determine whether the total insn_rtx_cost on non-jump insns in
132
   basic block BB is less than MAX_COST.  This function returns
133
   false if the cost of any instruction could not be estimated.  */
134
 
135
static bool
136
cheap_bb_rtx_cost_p (const_basic_block bb, int max_cost)
137
{
138
  int count = 0;
139
  rtx insn = BB_HEAD (bb);
140
  bool speed = optimize_bb_for_speed_p (bb);
141
 
142
  while (1)
143
    {
144
      if (NONJUMP_INSN_P (insn))
145
        {
146
          int cost = insn_rtx_cost (PATTERN (insn), speed);
147
          if (cost == 0)
148
            return false;
149
 
150
          /* If this instruction is the load or set of a "stack" register,
151
             such as a floating point register on x87, then the cost of
152
             speculatively executing this insn may need to include
153
             the additional cost of popping its result off of the
154
             register stack.  Unfortunately, correctly recognizing and
155
             accounting for this additional overhead is tricky, so for
156
             now we simply prohibit such speculative execution.  */
157
#ifdef STACK_REGS
158
          {
159
            rtx set = single_set (insn);
160
            if (set && STACK_REG_P (SET_DEST (set)))
161
              return false;
162
          }
163
#endif
164
 
165
          count += cost;
166
          if (count >= max_cost)
167
            return false;
168
        }
169
      else if (CALL_P (insn))
170
        return false;
171
 
172
      if (insn == BB_END (bb))
173
        break;
174
      insn = NEXT_INSN (insn);
175
    }
176
 
177
  return true;
178
}
179
 
180
/* Return the first non-jump active insn in the basic block.  */
181
 
182
static rtx
183
first_active_insn (basic_block bb)
184
{
185
  rtx insn = BB_HEAD (bb);
186
 
187
  if (LABEL_P (insn))
188
    {
189
      if (insn == BB_END (bb))
190
        return NULL_RTX;
191
      insn = NEXT_INSN (insn);
192
    }
193
 
194
  while (NOTE_P (insn) || DEBUG_INSN_P (insn))
195
    {
196
      if (insn == BB_END (bb))
197
        return NULL_RTX;
198
      insn = NEXT_INSN (insn);
199
    }
200
 
201
  if (JUMP_P (insn))
202
    return NULL_RTX;
203
 
204
  return insn;
205
}
206
 
207
/* Return the last non-jump active (non-jump) insn in the basic block.  */
208
 
209
static rtx
210
last_active_insn (basic_block bb, int skip_use_p)
211
{
212
  rtx insn = BB_END (bb);
213
  rtx head = BB_HEAD (bb);
214
 
215
  while (NOTE_P (insn)
216
         || JUMP_P (insn)
217
         || DEBUG_INSN_P (insn)
218
         || (skip_use_p
219
             && NONJUMP_INSN_P (insn)
220
             && GET_CODE (PATTERN (insn)) == USE))
221
    {
222
      if (insn == head)
223
        return NULL_RTX;
224
      insn = PREV_INSN (insn);
225
    }
226
 
227
  if (LABEL_P (insn))
228
    return NULL_RTX;
229
 
230
  return insn;
231
}
232
 
233
/* Return the basic block reached by falling though the basic block BB.  */
234
 
235
static basic_block
236
block_fallthru (basic_block bb)
237
{
238
  edge e;
239
  edge_iterator ei;
240
 
241
  FOR_EACH_EDGE (e, ei, bb->succs)
242
    if (e->flags & EDGE_FALLTHRU)
243
      break;
244
 
245
  return (e) ? e->dest : NULL_BLOCK;
246
}
247
 
248
/* Go through a bunch of insns, converting them to conditional
249
   execution format if possible.  Return TRUE if all of the non-note
250
   insns were processed.  */
251
 
252
static int
253
cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
254
                         /* if block information */rtx start,
255
                         /* first insn to look at */rtx end,
256
                         /* last insn to look at */rtx test,
257
                         /* conditional execution test */rtx prob_val,
258
                         /* probability of branch taken. */int mod_ok)
259
{
260
  int must_be_last = FALSE;
261
  rtx insn;
262
  rtx xtest;
263
  rtx pattern;
264
 
265
  if (!start || !end)
266
    return FALSE;
267
 
268
  for (insn = start; ; insn = NEXT_INSN (insn))
269
    {
270
      if (NOTE_P (insn) || DEBUG_INSN_P (insn))
271
        goto insn_done;
272
 
273
      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
274
 
275
      /* Remove USE insns that get in the way.  */
276
      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
277
        {
278
          /* ??? Ug.  Actually unlinking the thing is problematic,
279
             given what we'd have to coordinate with our callers.  */
280
          SET_INSN_DELETED (insn);
281
          goto insn_done;
282
        }
283
 
284
      /* Last insn wasn't last?  */
285
      if (must_be_last)
286
        return FALSE;
287
 
288
      if (modified_in_p (test, insn))
289
        {
290
          if (!mod_ok)
291
            return FALSE;
292
          must_be_last = TRUE;
293
        }
294
 
295
      /* Now build the conditional form of the instruction.  */
296
      pattern = PATTERN (insn);
297
      xtest = copy_rtx (test);
298
 
299
      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
300
         two conditions.  */
301
      if (GET_CODE (pattern) == COND_EXEC)
302
        {
303
          if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
304
            return FALSE;
305
 
306
          xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
307
                               COND_EXEC_TEST (pattern));
308
          pattern = COND_EXEC_CODE (pattern);
309
        }
310
 
311
      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
312
 
313
      /* If the machine needs to modify the insn being conditionally executed,
314
         say for example to force a constant integer operand into a temp
315
         register, do so here.  */
316
#ifdef IFCVT_MODIFY_INSN
317
      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
318
      if (! pattern)
319
        return FALSE;
320
#endif
321
 
322
      validate_change (insn, &PATTERN (insn), pattern, 1);
323
 
324
      if (CALL_P (insn) && prob_val)
325
        validate_change (insn, &REG_NOTES (insn),
326
                         alloc_EXPR_LIST (REG_BR_PROB, prob_val,
327
                                          REG_NOTES (insn)), 1);
328
 
329
    insn_done:
330
      if (insn == end)
331
        break;
332
    }
333
 
334
  return TRUE;
335
}
336
 
337
/* Return the condition for a jump.  Do not do any special processing.  */
338
 
339
static rtx
340
cond_exec_get_condition (rtx jump)
341
{
342
  rtx test_if, cond;
343
 
344
  if (any_condjump_p (jump))
345
    test_if = SET_SRC (pc_set (jump));
346
  else
347
    return NULL_RTX;
348
  cond = XEXP (test_if, 0);
349
 
350
  /* If this branches to JUMP_LABEL when the condition is false,
351
     reverse the condition.  */
352
  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
353
      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
354
    {
355
      enum rtx_code rev = reversed_comparison_code (cond, jump);
356
      if (rev == UNKNOWN)
357
        return NULL_RTX;
358
 
359
      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
360
                             XEXP (cond, 1));
361
    }
362
 
363
  return cond;
364
}
365
 
366
/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
367
   to conditional execution.  Return TRUE if we were successful at
368
   converting the block.  */
369
 
370
static int
371
cond_exec_process_if_block (ce_if_block_t * ce_info,
372
                            /* if block information */int do_multiple_p)
373
{
374
  basic_block test_bb = ce_info->test_bb;       /* last test block */
375
  basic_block then_bb = ce_info->then_bb;       /* THEN */
376
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
377
  rtx test_expr;                /* expression in IF_THEN_ELSE that is tested */
378
  rtx then_start;               /* first insn in THEN block */
379
  rtx then_end;                 /* last insn + 1 in THEN block */
380
  rtx else_start = NULL_RTX;    /* first insn in ELSE block or NULL */
381
  rtx else_end = NULL_RTX;      /* last insn + 1 in ELSE block */
382
  int max;                      /* max # of insns to convert.  */
383
  int then_mod_ok;              /* whether conditional mods are ok in THEN */
384
  rtx true_expr;                /* test for else block insns */
385
  rtx false_expr;               /* test for then block insns */
386
  rtx true_prob_val;            /* probability of else block */
387
  rtx false_prob_val;           /* probability of then block */
388
  int n_insns;
389
  enum rtx_code false_code;
390
 
391
  /* If test is comprised of && or || elements, and we've failed at handling
392
     all of them together, just use the last test if it is the special case of
393
     && elements without an ELSE block.  */
394
  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
395
    {
396
      if (else_bb || ! ce_info->and_and_p)
397
        return FALSE;
398
 
399
      ce_info->test_bb = test_bb = ce_info->last_test_bb;
400
      ce_info->num_multiple_test_blocks = 0;
401
      ce_info->num_and_and_blocks = 0;
402
      ce_info->num_or_or_blocks = 0;
403
    }
404
 
405
  /* Find the conditional jump to the ELSE or JOIN part, and isolate
406
     the test.  */
407
  test_expr = cond_exec_get_condition (BB_END (test_bb));
408
  if (! test_expr)
409
    return FALSE;
410
 
411
  /* If the conditional jump is more than just a conditional jump,
412
     then we can not do conditional execution conversion on this block.  */
413
  if (! onlyjump_p (BB_END (test_bb)))
414
    return FALSE;
415
 
416
  /* Collect the bounds of where we're to search, skipping any labels, jumps
417
     and notes at the beginning and end of the block.  Then count the total
418
     number of insns and see if it is small enough to convert.  */
419
  then_start = first_active_insn (then_bb);
420
  then_end = last_active_insn (then_bb, TRUE);
421
  n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
422
  max = MAX_CONDITIONAL_EXECUTE;
423
 
424
  if (else_bb)
425
    {
426
      max *= 2;
427
      else_start = first_active_insn (else_bb);
428
      else_end = last_active_insn (else_bb, TRUE);
429
      n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
430
    }
431
 
432
  if (n_insns > max)
433
    return FALSE;
434
 
435
  /* Map test_expr/test_jump into the appropriate MD tests to use on
436
     the conditionally executed code.  */
437
 
438
  true_expr = test_expr;
439
 
440
  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
441
  if (false_code != UNKNOWN)
442
    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
443
                                 XEXP (true_expr, 0), XEXP (true_expr, 1));
444
  else
445
    false_expr = NULL_RTX;
446
 
447
#ifdef IFCVT_MODIFY_TESTS
448
  /* If the machine description needs to modify the tests, such as setting a
449
     conditional execution register from a comparison, it can do so here.  */
450
  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
451
 
452
  /* See if the conversion failed.  */
453
  if (!true_expr || !false_expr)
454
    goto fail;
455
#endif
456
 
457
  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
458
  if (true_prob_val)
459
    {
460
      true_prob_val = XEXP (true_prob_val, 0);
461
      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
462
    }
463
  else
464
    false_prob_val = NULL_RTX;
465
 
466
  /* If we have && or || tests, do them here.  These tests are in the adjacent
467
     blocks after the first block containing the test.  */
468
  if (ce_info->num_multiple_test_blocks > 0)
469
    {
470
      basic_block bb = test_bb;
471
      basic_block last_test_bb = ce_info->last_test_bb;
472
 
473
      if (! false_expr)
474
        goto fail;
475
 
476
      do
477
        {
478
          rtx start, end;
479
          rtx t, f;
480
          enum rtx_code f_code;
481
 
482
          bb = block_fallthru (bb);
483
          start = first_active_insn (bb);
484
          end = last_active_insn (bb, TRUE);
485
          if (start
486
              && ! cond_exec_process_insns (ce_info, start, end, false_expr,
487
                                            false_prob_val, FALSE))
488
            goto fail;
489
 
490
          /* If the conditional jump is more than just a conditional jump, then
491
             we can not do conditional execution conversion on this block.  */
492
          if (! onlyjump_p (BB_END (bb)))
493
            goto fail;
494
 
495
          /* Find the conditional jump and isolate the test.  */
496
          t = cond_exec_get_condition (BB_END (bb));
497
          if (! t)
498
            goto fail;
499
 
500
          f_code = reversed_comparison_code (t, BB_END (bb));
501
          if (f_code == UNKNOWN)
502
            goto fail;
503
 
504
          f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
505
          if (ce_info->and_and_p)
506
            {
507
              t = gen_rtx_AND (GET_MODE (t), true_expr, t);
508
              f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
509
            }
510
          else
511
            {
512
              t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
513
              f = gen_rtx_AND (GET_MODE (t), false_expr, f);
514
            }
515
 
516
          /* If the machine description needs to modify the tests, such as
517
             setting a conditional execution register from a comparison, it can
518
             do so here.  */
519
#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
520
          IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
521
 
522
          /* See if the conversion failed.  */
523
          if (!t || !f)
524
            goto fail;
525
#endif
526
 
527
          true_expr = t;
528
          false_expr = f;
529
        }
530
      while (bb != last_test_bb);
531
    }
532
 
533
  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
534
     on then THEN block.  */
535
  then_mod_ok = (else_bb == NULL_BLOCK);
536
 
537
  /* Go through the THEN and ELSE blocks converting the insns if possible
538
     to conditional execution.  */
539
 
540
  if (then_end
541
      && (! false_expr
542
          || ! cond_exec_process_insns (ce_info, then_start, then_end,
543
                                        false_expr, false_prob_val,
544
                                        then_mod_ok)))
545
    goto fail;
546
 
547
  if (else_bb && else_end
548
      && ! cond_exec_process_insns (ce_info, else_start, else_end,
549
                                    true_expr, true_prob_val, TRUE))
550
    goto fail;
551
 
552
  /* If we cannot apply the changes, fail.  Do not go through the normal fail
553
     processing, since apply_change_group will call cancel_changes.  */
554
  if (! apply_change_group ())
555
    {
556
#ifdef IFCVT_MODIFY_CANCEL
557
      /* Cancel any machine dependent changes.  */
558
      IFCVT_MODIFY_CANCEL (ce_info);
559
#endif
560
      return FALSE;
561
    }
562
 
563
#ifdef IFCVT_MODIFY_FINAL
564
  /* Do any machine dependent final modifications.  */
565
  IFCVT_MODIFY_FINAL (ce_info);
566
#endif
567
 
568
  /* Conversion succeeded.  */
569
  if (dump_file)
570
    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
571
             n_insns, (n_insns == 1) ? " was" : "s were");
572
 
573
  /* Merge the blocks!  */
574
  merge_if_block (ce_info);
575
  cond_exec_changed_p = TRUE;
576
  return TRUE;
577
 
578
 fail:
579
#ifdef IFCVT_MODIFY_CANCEL
580
  /* Cancel any machine dependent changes.  */
581
  IFCVT_MODIFY_CANCEL (ce_info);
582
#endif
583
 
584
  cancel_changes (0);
585
  return FALSE;
586
}
587
 
588
/* Used by noce_process_if_block to communicate with its subroutines.
589
 
590
   The subroutines know that A and B may be evaluated freely.  They
591
   know that X is a register.  They should insert new instructions
592
   before cond_earliest.  */
593
 
594
struct noce_if_info
595
{
596
  /* The basic blocks that make up the IF-THEN-{ELSE-,}JOIN block.  */
597
  basic_block test_bb, then_bb, else_bb, join_bb;
598
 
599
  /* The jump that ends TEST_BB.  */
600
  rtx jump;
601
 
602
  /* The jump condition.  */
603
  rtx cond;
604
 
605
  /* New insns should be inserted before this one.  */
606
  rtx cond_earliest;
607
 
608
  /* Insns in the THEN and ELSE block.  There is always just this
609
     one insns in those blocks.  The insns are single_set insns.
610
     If there was no ELSE block, INSN_B is the last insn before
611
     COND_EARLIEST, or NULL_RTX.  In the former case, the insn
612
     operands are still valid, as if INSN_B was moved down below
613
     the jump.  */
614
  rtx insn_a, insn_b;
615
 
616
  /* The SET_SRC of INSN_A and INSN_B.  */
617
  rtx a, b;
618
 
619
  /* The SET_DEST of INSN_A.  */
620
  rtx x;
621
 
622
  /* True if this if block is not canonical.  In the canonical form of
623
     if blocks, the THEN_BB is the block reached via the fallthru edge
624
     from TEST_BB.  For the noce transformations, we allow the symmetric
625
     form as well.  */
626
  bool then_else_reversed;
627
 
628
  /* Estimated cost of the particular branch instruction.  */
629
  int branch_cost;
630
};
631
 
632
static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
633
static int noce_try_move (struct noce_if_info *);
634
static int noce_try_store_flag (struct noce_if_info *);
635
static int noce_try_addcc (struct noce_if_info *);
636
static int noce_try_store_flag_constants (struct noce_if_info *);
637
static int noce_try_store_flag_mask (struct noce_if_info *);
638
static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
639
                            rtx, rtx, rtx);
640
static int noce_try_cmove (struct noce_if_info *);
641
static int noce_try_cmove_arith (struct noce_if_info *);
642
static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
643
static int noce_try_minmax (struct noce_if_info *);
644
static int noce_try_abs (struct noce_if_info *);
645
static int noce_try_sign_mask (struct noce_if_info *);
646
 
647
/* Helper function for noce_try_store_flag*.  */
648
 
649
static rtx
650
noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
651
                      int normalize)
652
{
653
  rtx cond = if_info->cond;
654
  int cond_complex;
655
  enum rtx_code code;
656
 
657
  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
658
                  || ! general_operand (XEXP (cond, 1), VOIDmode));
659
 
660
  /* If earliest == jump, or when the condition is complex, try to
661
     build the store_flag insn directly.  */
662
 
663
  if (cond_complex)
664
    {
665
      rtx set = pc_set (if_info->jump);
666
      cond = XEXP (SET_SRC (set), 0);
667
      if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
668
          && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump))
669
        reversep = !reversep;
670
      if (if_info->then_else_reversed)
671
        reversep = !reversep;
672
    }
673
 
674
  if (reversep)
675
    code = reversed_comparison_code (cond, if_info->jump);
676
  else
677
    code = GET_CODE (cond);
678
 
679
  if ((if_info->cond_earliest == if_info->jump || cond_complex)
680
      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
681
    {
682
      rtx tmp;
683
 
684
      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
685
                            XEXP (cond, 1));
686
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
687
 
688
      start_sequence ();
689
      tmp = emit_insn (tmp);
690
 
691
      if (recog_memoized (tmp) >= 0)
692
        {
693
          tmp = get_insns ();
694
          end_sequence ();
695
          emit_insn (tmp);
696
 
697
          if_info->cond_earliest = if_info->jump;
698
 
699
          return x;
700
        }
701
 
702
      end_sequence ();
703
    }
704
 
705
  /* Don't even try if the comparison operands or the mode of X are weird.  */
706
  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
707
    return NULL_RTX;
708
 
709
  return emit_store_flag (x, code, XEXP (cond, 0),
710
                          XEXP (cond, 1), VOIDmode,
711
                          (code == LTU || code == LEU
712
                           || code == GEU || code == GTU), normalize);
713
}
714
 
715
/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
716
   X is the destination/target and Y is the value to copy.  */
717
 
718
static void
719
noce_emit_move_insn (rtx x, rtx y)
720
{
721
  enum machine_mode outmode;
722
  rtx outer, inner;
723
  int bitpos;
724
 
725
  if (GET_CODE (x) != STRICT_LOW_PART)
726
    {
727
      rtx seq, insn, target;
728
      optab ot;
729
 
730
      start_sequence ();
731
      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
732
         otherwise construct a suitable SET pattern ourselves.  */
733
      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
734
             ? emit_move_insn (x, y)
735
             : emit_insn (gen_rtx_SET (VOIDmode, x, y));
736
      seq = get_insns ();
737
      end_sequence ();
738
 
739
      if (recog_memoized (insn) <= 0)
740
        {
741
          if (GET_CODE (x) == ZERO_EXTRACT)
742
            {
743
              rtx op = XEXP (x, 0);
744
              unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
745
              unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
746
 
747
              /* store_bit_field expects START to be relative to
748
                 BYTES_BIG_ENDIAN and adjusts this value for machines with
749
                 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
750
                 invoke store_bit_field again it is necessary to have the START
751
                 value from the first call.  */
752
              if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
753
                {
754
                  if (MEM_P (op))
755
                    start = BITS_PER_UNIT - start - size;
756
                  else
757
                    {
758
                      gcc_assert (REG_P (op));
759
                      start = BITS_PER_WORD - start - size;
760
                    }
761
                }
762
 
763
              gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
764
              store_bit_field (op, size, start, GET_MODE (x), y);
765
              return;
766
            }
767
 
768
          switch (GET_RTX_CLASS (GET_CODE (y)))
769
            {
770
            case RTX_UNARY:
771
              ot = code_to_optab[GET_CODE (y)];
772
              if (ot)
773
                {
774
                  start_sequence ();
775
                  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
776
                  if (target != NULL_RTX)
777
                    {
778
                      if (target != x)
779
                        emit_move_insn (x, target);
780
                      seq = get_insns ();
781
                    }
782
                  end_sequence ();
783
                }
784
              break;
785
 
786
            case RTX_BIN_ARITH:
787
            case RTX_COMM_ARITH:
788
              ot = code_to_optab[GET_CODE (y)];
789
              if (ot)
790
                {
791
                  start_sequence ();
792
                  target = expand_binop (GET_MODE (y), ot,
793
                                         XEXP (y, 0), XEXP (y, 1),
794
                                         x, 0, OPTAB_DIRECT);
795
                  if (target != NULL_RTX)
796
                    {
797
                      if (target != x)
798
                          emit_move_insn (x, target);
799
                      seq = get_insns ();
800
                    }
801
                  end_sequence ();
802
                }
803
              break;
804
 
805
            default:
806
              break;
807
            }
808
        }
809
 
810
      emit_insn (seq);
811
      return;
812
    }
813
 
814
  outer = XEXP (x, 0);
815
  inner = XEXP (outer, 0);
816
  outmode = GET_MODE (outer);
817
  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
818
  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
819
}
820
 
821
/* Return sequence of instructions generated by if conversion.  This
822
   function calls end_sequence() to end the current stream, ensures
823
   that are instructions are unshared, recognizable non-jump insns.
824
   On failure, this function returns a NULL_RTX.  */
825
 
826
static rtx
827
end_ifcvt_sequence (struct noce_if_info *if_info)
828
{
829
  rtx insn;
830
  rtx seq = get_insns ();
831
 
832
  set_used_flags (if_info->x);
833
  set_used_flags (if_info->cond);
834
  unshare_all_rtl_in_chain (seq);
835
  end_sequence ();
836
 
837
  /* Make sure that all of the instructions emitted are recognizable,
838
     and that we haven't introduced a new jump instruction.
839
     As an exercise for the reader, build a general mechanism that
840
     allows proper placement of required clobbers.  */
841
  for (insn = seq; insn; insn = NEXT_INSN (insn))
842
    if (JUMP_P (insn)
843
        || recog_memoized (insn) == -1)
844
      return NULL_RTX;
845
 
846
  return seq;
847
}
848
 
849
/* Convert "if (a != b) x = a; else x = b" into "x = a" and
850
   "if (a == b) x = a; else x = b" into "x = b".  */
851
 
852
static int
853
noce_try_move (struct noce_if_info *if_info)
854
{
855
  rtx cond = if_info->cond;
856
  enum rtx_code code = GET_CODE (cond);
857
  rtx y, seq;
858
 
859
  if (code != NE && code != EQ)
860
    return FALSE;
861
 
862
  /* This optimization isn't valid if either A or B could be a NaN
863
     or a signed zero.  */
864
  if (HONOR_NANS (GET_MODE (if_info->x))
865
      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
866
    return FALSE;
867
 
868
  /* Check whether the operands of the comparison are A and in
869
     either order.  */
870
  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
871
       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
872
      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
873
          && rtx_equal_p (if_info->b, XEXP (cond, 0))))
874
    {
875
      y = (code == EQ) ? if_info->a : if_info->b;
876
 
877
      /* Avoid generating the move if the source is the destination.  */
878
      if (! rtx_equal_p (if_info->x, y))
879
        {
880
          start_sequence ();
881
          noce_emit_move_insn (if_info->x, y);
882
          seq = end_ifcvt_sequence (if_info);
883
          if (!seq)
884
            return FALSE;
885
 
886
          emit_insn_before_setloc (seq, if_info->jump,
887
                                   INSN_LOCATOR (if_info->insn_a));
888
        }
889
      return TRUE;
890
    }
891
  return FALSE;
892
}
893
 
894
/* Convert "if (test) x = 1; else x = 0".
895
 
896
   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
897
   tried in noce_try_store_flag_constants after noce_try_cmove has had
898
   a go at the conversion.  */
899
 
900
static int
901
noce_try_store_flag (struct noce_if_info *if_info)
902
{
903
  int reversep;
904
  rtx target, seq;
905
 
906
  if (CONST_INT_P (if_info->b)
907
      && INTVAL (if_info->b) == STORE_FLAG_VALUE
908
      && if_info->a == const0_rtx)
909
    reversep = 0;
910
  else if (if_info->b == const0_rtx
911
           && CONST_INT_P (if_info->a)
912
           && INTVAL (if_info->a) == STORE_FLAG_VALUE
913
           && (reversed_comparison_code (if_info->cond, if_info->jump)
914
               != UNKNOWN))
915
    reversep = 1;
916
  else
917
    return FALSE;
918
 
919
  start_sequence ();
920
 
921
  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
922
  if (target)
923
    {
924
      if (target != if_info->x)
925
        noce_emit_move_insn (if_info->x, target);
926
 
927
      seq = end_ifcvt_sequence (if_info);
928
      if (! seq)
929
        return FALSE;
930
 
931
      emit_insn_before_setloc (seq, if_info->jump,
932
                               INSN_LOCATOR (if_info->insn_a));
933
      return TRUE;
934
    }
935
  else
936
    {
937
      end_sequence ();
938
      return FALSE;
939
    }
940
}
941
 
942
/* Convert "if (test) x = a; else x = b", for A and B constant.  */
943
 
944
static int
945
noce_try_store_flag_constants (struct noce_if_info *if_info)
946
{
947
  rtx target, seq;
948
  int reversep;
949
  HOST_WIDE_INT itrue, ifalse, diff, tmp;
950
  int normalize, can_reverse;
951
  enum machine_mode mode;
952
 
953
  if (CONST_INT_P (if_info->a)
954
      && CONST_INT_P (if_info->b))
955
    {
956
      mode = GET_MODE (if_info->x);
957
      ifalse = INTVAL (if_info->a);
958
      itrue = INTVAL (if_info->b);
959
 
960
      /* Make sure we can represent the difference between the two values.  */
961
      if ((itrue - ifalse > 0)
962
          != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
963
        return FALSE;
964
 
965
      diff = trunc_int_for_mode (itrue - ifalse, mode);
966
 
967
      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
968
                     != UNKNOWN);
969
 
970
      reversep = 0;
971
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
972
        normalize = 0;
973
      else if (ifalse == 0 && exact_log2 (itrue) >= 0
974
               && (STORE_FLAG_VALUE == 1
975
                   || if_info->branch_cost >= 2))
976
        normalize = 1;
977
      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
978
               && (STORE_FLAG_VALUE == 1 || if_info->branch_cost >= 2))
979
        normalize = 1, reversep = 1;
980
      else if (itrue == -1
981
               && (STORE_FLAG_VALUE == -1
982
                   || if_info->branch_cost >= 2))
983
        normalize = -1;
984
      else if (ifalse == -1 && can_reverse
985
               && (STORE_FLAG_VALUE == -1 || if_info->branch_cost >= 2))
986
        normalize = -1, reversep = 1;
987
      else if ((if_info->branch_cost >= 2 && STORE_FLAG_VALUE == -1)
988
               || if_info->branch_cost >= 3)
989
        normalize = -1;
990
      else
991
        return FALSE;
992
 
993
      if (reversep)
994
        {
995
          tmp = itrue; itrue = ifalse; ifalse = tmp;
996
          diff = trunc_int_for_mode (-diff, mode);
997
        }
998
 
999
      start_sequence ();
1000
      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
1001
      if (! target)
1002
        {
1003
          end_sequence ();
1004
          return FALSE;
1005
        }
1006
 
1007
      /* if (test) x = 3; else x = 4;
1008
         =>   x = 3 + (test == 0);  */
1009
      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
1010
        {
1011
          target = expand_simple_binop (mode,
1012
                                        (diff == STORE_FLAG_VALUE
1013
                                         ? PLUS : MINUS),
1014
                                        GEN_INT (ifalse), target, if_info->x, 0,
1015
                                        OPTAB_WIDEN);
1016
        }
1017
 
1018
      /* if (test) x = 8; else x = 0;
1019
         =>   x = (test != 0) << 3;  */
1020
      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
1021
        {
1022
          target = expand_simple_binop (mode, ASHIFT,
1023
                                        target, GEN_INT (tmp), if_info->x, 0,
1024
                                        OPTAB_WIDEN);
1025
        }
1026
 
1027
      /* if (test) x = -1; else x = b;
1028
         =>   x = -(test != 0) | b;  */
1029
      else if (itrue == -1)
1030
        {
1031
          target = expand_simple_binop (mode, IOR,
1032
                                        target, GEN_INT (ifalse), if_info->x, 0,
1033
                                        OPTAB_WIDEN);
1034
        }
1035
 
1036
      /* if (test) x = a; else x = b;
1037
         =>   x = (-(test != 0) & (b - a)) + a;  */
1038
      else
1039
        {
1040
          target = expand_simple_binop (mode, AND,
1041
                                        target, GEN_INT (diff), if_info->x, 0,
1042
                                        OPTAB_WIDEN);
1043
          if (target)
1044
            target = expand_simple_binop (mode, PLUS,
1045
                                          target, GEN_INT (ifalse),
1046
                                          if_info->x, 0, OPTAB_WIDEN);
1047
        }
1048
 
1049
      if (! target)
1050
        {
1051
          end_sequence ();
1052
          return FALSE;
1053
        }
1054
 
1055
      if (target != if_info->x)
1056
        noce_emit_move_insn (if_info->x, target);
1057
 
1058
      seq = end_ifcvt_sequence (if_info);
1059
      if (!seq)
1060
        return FALSE;
1061
 
1062
      emit_insn_before_setloc (seq, if_info->jump,
1063
                               INSN_LOCATOR (if_info->insn_a));
1064
      return TRUE;
1065
    }
1066
 
1067
  return FALSE;
1068
}
1069
 
1070
/* Convert "if (test) foo++" into "foo += (test != 0)", and
1071
   similarly for "foo--".  */
1072
 
1073
static int
1074
noce_try_addcc (struct noce_if_info *if_info)
1075
{
1076
  rtx target, seq;
1077
  int subtract, normalize;
1078
 
1079
  if (GET_CODE (if_info->a) == PLUS
1080
      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1081
      && (reversed_comparison_code (if_info->cond, if_info->jump)
1082
          != UNKNOWN))
1083
    {
1084
      rtx cond = if_info->cond;
1085
      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1086
 
1087
      /* First try to use addcc pattern.  */
1088
      if (general_operand (XEXP (cond, 0), VOIDmode)
1089
          && general_operand (XEXP (cond, 1), VOIDmode))
1090
        {
1091
          start_sequence ();
1092
          target = emit_conditional_add (if_info->x, code,
1093
                                         XEXP (cond, 0),
1094
                                         XEXP (cond, 1),
1095
                                         VOIDmode,
1096
                                         if_info->b,
1097
                                         XEXP (if_info->a, 1),
1098
                                         GET_MODE (if_info->x),
1099
                                         (code == LTU || code == GEU
1100
                                          || code == LEU || code == GTU));
1101
          if (target)
1102
            {
1103
              if (target != if_info->x)
1104
                noce_emit_move_insn (if_info->x, target);
1105
 
1106
              seq = end_ifcvt_sequence (if_info);
1107
              if (!seq)
1108
                return FALSE;
1109
 
1110
              emit_insn_before_setloc (seq, if_info->jump,
1111
                                       INSN_LOCATOR (if_info->insn_a));
1112
              return TRUE;
1113
            }
1114
          end_sequence ();
1115
        }
1116
 
1117
      /* If that fails, construct conditional increment or decrement using
1118
         setcc.  */
1119
      if (if_info->branch_cost >= 2
1120
          && (XEXP (if_info->a, 1) == const1_rtx
1121
              || XEXP (if_info->a, 1) == constm1_rtx))
1122
        {
1123
          start_sequence ();
1124
          if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1125
            subtract = 0, normalize = 0;
1126
          else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1127
            subtract = 1, normalize = 0;
1128
          else
1129
            subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1130
 
1131
 
1132
          target = noce_emit_store_flag (if_info,
1133
                                         gen_reg_rtx (GET_MODE (if_info->x)),
1134
                                         1, normalize);
1135
 
1136
          if (target)
1137
            target = expand_simple_binop (GET_MODE (if_info->x),
1138
                                          subtract ? MINUS : PLUS,
1139
                                          if_info->b, target, if_info->x,
1140
                                          0, OPTAB_WIDEN);
1141
          if (target)
1142
            {
1143
              if (target != if_info->x)
1144
                noce_emit_move_insn (if_info->x, target);
1145
 
1146
              seq = end_ifcvt_sequence (if_info);
1147
              if (!seq)
1148
                return FALSE;
1149
 
1150
              emit_insn_before_setloc (seq, if_info->jump,
1151
                                       INSN_LOCATOR (if_info->insn_a));
1152
              return TRUE;
1153
            }
1154
          end_sequence ();
1155
        }
1156
    }
1157
 
1158
  return FALSE;
1159
}
1160
 
1161
/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1162
 
1163
static int
1164
noce_try_store_flag_mask (struct noce_if_info *if_info)
1165
{
1166
  rtx target, seq;
1167
  int reversep;
1168
 
1169
  reversep = 0;
1170
  if ((if_info->branch_cost >= 2
1171
       || STORE_FLAG_VALUE == -1)
1172
      && ((if_info->a == const0_rtx
1173
           && rtx_equal_p (if_info->b, if_info->x))
1174
          || ((reversep = (reversed_comparison_code (if_info->cond,
1175
                                                     if_info->jump)
1176
                           != UNKNOWN))
1177
              && if_info->b == const0_rtx
1178
              && rtx_equal_p (if_info->a, if_info->x))))
1179
    {
1180
      start_sequence ();
1181
      target = noce_emit_store_flag (if_info,
1182
                                     gen_reg_rtx (GET_MODE (if_info->x)),
1183
                                     reversep, -1);
1184
      if (target)
1185
        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1186
                                      if_info->x,
1187
                                      target, if_info->x, 0,
1188
                                      OPTAB_WIDEN);
1189
 
1190
      if (target)
1191
        {
1192
          if (target != if_info->x)
1193
            noce_emit_move_insn (if_info->x, target);
1194
 
1195
          seq = end_ifcvt_sequence (if_info);
1196
          if (!seq)
1197
            return FALSE;
1198
 
1199
          emit_insn_before_setloc (seq, if_info->jump,
1200
                                   INSN_LOCATOR (if_info->insn_a));
1201
          return TRUE;
1202
        }
1203
 
1204
      end_sequence ();
1205
    }
1206
 
1207
  return FALSE;
1208
}
1209
 
1210
/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1211
 
1212
static rtx
1213
noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1214
                 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1215
{
1216
  /* If earliest == jump, try to build the cmove insn directly.
1217
     This is helpful when combine has created some complex condition
1218
     (like for alpha's cmovlbs) that we can't hope to regenerate
1219
     through the normal interface.  */
1220
 
1221
  if (if_info->cond_earliest == if_info->jump)
1222
    {
1223
      rtx tmp;
1224
 
1225
      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1226
      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1227
      tmp = gen_rtx_SET (VOIDmode, x, tmp);
1228
 
1229
      start_sequence ();
1230
      tmp = emit_insn (tmp);
1231
 
1232
      if (recog_memoized (tmp) >= 0)
1233
        {
1234
          tmp = get_insns ();
1235
          end_sequence ();
1236
          emit_insn (tmp);
1237
 
1238
          return x;
1239
        }
1240
 
1241
      end_sequence ();
1242
    }
1243
 
1244
  /* Don't even try if the comparison operands are weird.  */
1245
  if (! general_operand (cmp_a, GET_MODE (cmp_a))
1246
      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1247
    return NULL_RTX;
1248
 
1249
#if HAVE_conditional_move
1250
  return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1251
                                vtrue, vfalse, GET_MODE (x),
1252
                                (code == LTU || code == GEU
1253
                                 || code == LEU || code == GTU));
1254
#else
1255
  /* We'll never get here, as noce_process_if_block doesn't call the
1256
     functions involved.  Ifdef code, however, should be discouraged
1257
     because it leads to typos in the code not selected.  However,
1258
     emit_conditional_move won't exist either.  */
1259
  return NULL_RTX;
1260
#endif
1261
}
1262
 
1263
/* Try only simple constants and registers here.  More complex cases
1264
   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1265
   has had a go at it.  */
1266
 
1267
static int
1268
noce_try_cmove (struct noce_if_info *if_info)
1269
{
1270
  enum rtx_code code;
1271
  rtx target, seq;
1272
 
1273
  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1274
      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1275
    {
1276
      start_sequence ();
1277
 
1278
      code = GET_CODE (if_info->cond);
1279
      target = noce_emit_cmove (if_info, if_info->x, code,
1280
                                XEXP (if_info->cond, 0),
1281
                                XEXP (if_info->cond, 1),
1282
                                if_info->a, if_info->b);
1283
 
1284
      if (target)
1285
        {
1286
          if (target != if_info->x)
1287
            noce_emit_move_insn (if_info->x, target);
1288
 
1289
          seq = end_ifcvt_sequence (if_info);
1290
          if (!seq)
1291
            return FALSE;
1292
 
1293
          emit_insn_before_setloc (seq, if_info->jump,
1294
                                   INSN_LOCATOR (if_info->insn_a));
1295
          return TRUE;
1296
        }
1297
      else
1298
        {
1299
          end_sequence ();
1300
          return FALSE;
1301
        }
1302
    }
1303
 
1304
  return FALSE;
1305
}
1306
 
1307
/* Try more complex cases involving conditional_move.  */
1308
 
1309
static int
1310
noce_try_cmove_arith (struct noce_if_info *if_info)
1311
{
1312
  rtx a = if_info->a;
1313
  rtx b = if_info->b;
1314
  rtx x = if_info->x;
1315
  rtx orig_a, orig_b;
1316
  rtx insn_a, insn_b;
1317
  rtx tmp, target;
1318
  int is_mem = 0;
1319
  int insn_cost;
1320
  enum rtx_code code;
1321
 
1322
  /* A conditional move from two memory sources is equivalent to a
1323
     conditional on their addresses followed by a load.  Don't do this
1324
     early because it'll screw alias analysis.  Note that we've
1325
     already checked for no side effects.  */
1326
  /* ??? FIXME: Magic number 5.  */
1327
  if (cse_not_expected
1328
      && MEM_P (a) && MEM_P (b)
1329
      && MEM_ADDR_SPACE (a) == MEM_ADDR_SPACE (b)
1330
      && if_info->branch_cost >= 5)
1331
    {
1332
      enum machine_mode address_mode
1333
        = targetm.addr_space.address_mode (MEM_ADDR_SPACE (a));
1334
 
1335
      a = XEXP (a, 0);
1336
      b = XEXP (b, 0);
1337
      x = gen_reg_rtx (address_mode);
1338
      is_mem = 1;
1339
    }
1340
 
1341
  /* ??? We could handle this if we knew that a load from A or B could
1342
     not fault.  This is also true if we've already loaded
1343
     from the address along the path from ENTRY.  */
1344
  else if (may_trap_p (a) || may_trap_p (b))
1345
    return FALSE;
1346
 
1347
  /* if (test) x = a + b; else x = c - d;
1348
     => y = a + b;
1349
        x = c - d;
1350
        if (test)
1351
          x = y;
1352
  */
1353
 
1354
  code = GET_CODE (if_info->cond);
1355
  insn_a = if_info->insn_a;
1356
  insn_b = if_info->insn_b;
1357
 
1358
  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1359
     if insn_rtx_cost can't be estimated.  */
1360
  if (insn_a)
1361
    {
1362
      insn_cost = insn_rtx_cost (PATTERN (insn_a),
1363
                                 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_a)));
1364
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1365
        return FALSE;
1366
    }
1367
  else
1368
    insn_cost = 0;
1369
 
1370
  if (insn_b)
1371
    {
1372
      insn_cost += insn_rtx_cost (PATTERN (insn_b),
1373
                                 optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn_b)));
1374
      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (if_info->branch_cost))
1375
        return FALSE;
1376
    }
1377
 
1378
  /* Possibly rearrange operands to make things come out more natural.  */
1379
  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1380
    {
1381
      int reversep = 0;
1382
      if (rtx_equal_p (b, x))
1383
        reversep = 1;
1384
      else if (general_operand (b, GET_MODE (b)))
1385
        reversep = 1;
1386
 
1387
      if (reversep)
1388
        {
1389
          code = reversed_comparison_code (if_info->cond, if_info->jump);
1390
          tmp = a, a = b, b = tmp;
1391
          tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1392
        }
1393
    }
1394
 
1395
  start_sequence ();
1396
 
1397
  orig_a = a;
1398
  orig_b = b;
1399
 
1400
  /* If either operand is complex, load it into a register first.
1401
     The best way to do this is to copy the original insn.  In this
1402
     way we preserve any clobbers etc that the insn may have had.
1403
     This is of course not possible in the IS_MEM case.  */
1404
  if (! general_operand (a, GET_MODE (a)))
1405
    {
1406
      rtx set;
1407
 
1408
      if (is_mem)
1409
        {
1410
          tmp = gen_reg_rtx (GET_MODE (a));
1411
          tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1412
        }
1413
      else if (! insn_a)
1414
        goto end_seq_and_fail;
1415
      else
1416
        {
1417
          a = gen_reg_rtx (GET_MODE (a));
1418
          tmp = copy_rtx (insn_a);
1419
          set = single_set (tmp);
1420
          SET_DEST (set) = a;
1421
          tmp = emit_insn (PATTERN (tmp));
1422
        }
1423
      if (recog_memoized (tmp) < 0)
1424
        goto end_seq_and_fail;
1425
    }
1426
  if (! general_operand (b, GET_MODE (b)))
1427
    {
1428
      rtx set, last;
1429
 
1430
      if (is_mem)
1431
        {
1432
          tmp = gen_reg_rtx (GET_MODE (b));
1433
          tmp = gen_rtx_SET (VOIDmode, tmp, b);
1434
        }
1435
      else if (! insn_b)
1436
        goto end_seq_and_fail;
1437
      else
1438
        {
1439
          b = gen_reg_rtx (GET_MODE (b));
1440
          tmp = copy_rtx (insn_b);
1441
          set = single_set (tmp);
1442
          SET_DEST (set) = b;
1443
          tmp = PATTERN (tmp);
1444
        }
1445
 
1446
      /* If insn to set up A clobbers any registers B depends on, try to
1447
         swap insn that sets up A with the one that sets up B.  If even
1448
         that doesn't help, punt.  */
1449
      last = get_last_insn ();
1450
      if (last && modified_in_p (orig_b, last))
1451
        {
1452
          tmp = emit_insn_before (tmp, get_insns ());
1453
          if (modified_in_p (orig_a, tmp))
1454
            goto end_seq_and_fail;
1455
        }
1456
      else
1457
        tmp = emit_insn (tmp);
1458
 
1459
      if (recog_memoized (tmp) < 0)
1460
        goto end_seq_and_fail;
1461
    }
1462
 
1463
  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1464
                            XEXP (if_info->cond, 1), a, b);
1465
 
1466
  if (! target)
1467
    goto end_seq_and_fail;
1468
 
1469
  /* If we're handling a memory for above, emit the load now.  */
1470
  if (is_mem)
1471
    {
1472
      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1473
 
1474
      /* Copy over flags as appropriate.  */
1475
      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1476
        MEM_VOLATILE_P (tmp) = 1;
1477
      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1478
        MEM_IN_STRUCT_P (tmp) = 1;
1479
      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1480
        MEM_SCALAR_P (tmp) = 1;
1481
      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1482
        set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1483
      set_mem_align (tmp,
1484
                     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1485
 
1486
      gcc_assert (MEM_ADDR_SPACE (if_info->a) == MEM_ADDR_SPACE (if_info->b));
1487
      set_mem_addr_space (tmp, MEM_ADDR_SPACE (if_info->a));
1488
 
1489
      noce_emit_move_insn (if_info->x, tmp);
1490
    }
1491
  else if (target != x)
1492
    noce_emit_move_insn (x, target);
1493
 
1494
  tmp = end_ifcvt_sequence (if_info);
1495
  if (!tmp)
1496
    return FALSE;
1497
 
1498
  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1499
  return TRUE;
1500
 
1501
 end_seq_and_fail:
1502
  end_sequence ();
1503
  return FALSE;
1504
}
1505
 
1506
/* For most cases, the simplified condition we found is the best
1507
   choice, but this is not the case for the min/max/abs transforms.
1508
   For these we wish to know that it is A or B in the condition.  */
1509
 
1510
static rtx
1511
noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1512
                        rtx *earliest)
1513
{
1514
  rtx cond, set, insn;
1515
  int reverse;
1516
 
1517
  /* If target is already mentioned in the known condition, return it.  */
1518
  if (reg_mentioned_p (target, if_info->cond))
1519
    {
1520
      *earliest = if_info->cond_earliest;
1521
      return if_info->cond;
1522
    }
1523
 
1524
  set = pc_set (if_info->jump);
1525
  cond = XEXP (SET_SRC (set), 0);
1526
  reverse
1527
    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1528
      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1529
  if (if_info->then_else_reversed)
1530
    reverse = !reverse;
1531
 
1532
  /* If we're looking for a constant, try to make the conditional
1533
     have that constant in it.  There are two reasons why it may
1534
     not have the constant we want:
1535
 
1536
     1. GCC may have needed to put the constant in a register, because
1537
        the target can't compare directly against that constant.  For
1538
        this case, we look for a SET immediately before the comparison
1539
        that puts a constant in that register.
1540
 
1541
     2. GCC may have canonicalized the conditional, for example
1542
        replacing "if x < 4" with "if x <= 3".  We can undo that (or
1543
        make equivalent types of changes) to get the constants we need
1544
        if they're off by one in the right direction.  */
1545
 
1546
  if (CONST_INT_P (target))
1547
    {
1548
      enum rtx_code code = GET_CODE (if_info->cond);
1549
      rtx op_a = XEXP (if_info->cond, 0);
1550
      rtx op_b = XEXP (if_info->cond, 1);
1551
      rtx prev_insn;
1552
 
1553
      /* First, look to see if we put a constant in a register.  */
1554
      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1555
      if (prev_insn
1556
          && BLOCK_FOR_INSN (prev_insn)
1557
             == BLOCK_FOR_INSN (if_info->cond_earliest)
1558
          && INSN_P (prev_insn)
1559
          && GET_CODE (PATTERN (prev_insn)) == SET)
1560
        {
1561
          rtx src = find_reg_equal_equiv_note (prev_insn);
1562
          if (!src)
1563
            src = SET_SRC (PATTERN (prev_insn));
1564
          if (CONST_INT_P (src))
1565
            {
1566
              if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1567
                op_a = src;
1568
              else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1569
                op_b = src;
1570
 
1571
              if (CONST_INT_P (op_a))
1572
                {
1573
                  rtx tmp = op_a;
1574
                  op_a = op_b;
1575
                  op_b = tmp;
1576
                  code = swap_condition (code);
1577
                }
1578
            }
1579
        }
1580
 
1581
      /* Now, look to see if we can get the right constant by
1582
         adjusting the conditional.  */
1583
      if (CONST_INT_P (op_b))
1584
        {
1585
          HOST_WIDE_INT desired_val = INTVAL (target);
1586
          HOST_WIDE_INT actual_val = INTVAL (op_b);
1587
 
1588
          switch (code)
1589
            {
1590
            case LT:
1591
              if (actual_val == desired_val + 1)
1592
                {
1593
                  code = LE;
1594
                  op_b = GEN_INT (desired_val);
1595
                }
1596
              break;
1597
            case LE:
1598
              if (actual_val == desired_val - 1)
1599
                {
1600
                  code = LT;
1601
                  op_b = GEN_INT (desired_val);
1602
                }
1603
              break;
1604
            case GT:
1605
              if (actual_val == desired_val - 1)
1606
                {
1607
                  code = GE;
1608
                  op_b = GEN_INT (desired_val);
1609
                }
1610
              break;
1611
            case GE:
1612
              if (actual_val == desired_val + 1)
1613
                {
1614
                  code = GT;
1615
                  op_b = GEN_INT (desired_val);
1616
                }
1617
              break;
1618
            default:
1619
              break;
1620
            }
1621
        }
1622
 
1623
      /* If we made any changes, generate a new conditional that is
1624
         equivalent to what we started with, but has the right
1625
         constants in it.  */
1626
      if (code != GET_CODE (if_info->cond)
1627
          || op_a != XEXP (if_info->cond, 0)
1628
          || op_b != XEXP (if_info->cond, 1))
1629
        {
1630
          cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1631
          *earliest = if_info->cond_earliest;
1632
          return cond;
1633
        }
1634
    }
1635
 
1636
  cond = canonicalize_condition (if_info->jump, cond, reverse,
1637
                                 earliest, target, false, true);
1638
  if (! cond || ! reg_mentioned_p (target, cond))
1639
    return NULL;
1640
 
1641
  /* We almost certainly searched back to a different place.
1642
     Need to re-verify correct lifetimes.  */
1643
 
1644
  /* X may not be mentioned in the range (cond_earliest, jump].  */
1645
  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1646
    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1647
      return NULL;
1648
 
1649
  /* A and B may not be modified in the range [cond_earliest, jump).  */
1650
  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1651
    if (INSN_P (insn)
1652
        && (modified_in_p (if_info->a, insn)
1653
            || modified_in_p (if_info->b, insn)))
1654
      return NULL;
1655
 
1656
  return cond;
1657
}
1658
 
1659
/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1660
 
1661
static int
1662
noce_try_minmax (struct noce_if_info *if_info)
1663
{
1664
  rtx cond, earliest, target, seq;
1665
  enum rtx_code code, op;
1666
  int unsignedp;
1667
 
1668
  /* ??? Reject modes with NaNs or signed zeros since we don't know how
1669
     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1670
     to get the target to tell us...  */
1671
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1672
      || HONOR_NANS (GET_MODE (if_info->x)))
1673
    return FALSE;
1674
 
1675
  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1676
  if (!cond)
1677
    return FALSE;
1678
 
1679
  /* Verify the condition is of the form we expect, and canonicalize
1680
     the comparison code.  */
1681
  code = GET_CODE (cond);
1682
  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1683
    {
1684
      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1685
        return FALSE;
1686
    }
1687
  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1688
    {
1689
      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1690
        return FALSE;
1691
      code = swap_condition (code);
1692
    }
1693
  else
1694
    return FALSE;
1695
 
1696
  /* Determine what sort of operation this is.  Note that the code is for
1697
     a taken branch, so the code->operation mapping appears backwards.  */
1698
  switch (code)
1699
    {
1700
    case LT:
1701
    case LE:
1702
    case UNLT:
1703
    case UNLE:
1704
      op = SMAX;
1705
      unsignedp = 0;
1706
      break;
1707
    case GT:
1708
    case GE:
1709
    case UNGT:
1710
    case UNGE:
1711
      op = SMIN;
1712
      unsignedp = 0;
1713
      break;
1714
    case LTU:
1715
    case LEU:
1716
      op = UMAX;
1717
      unsignedp = 1;
1718
      break;
1719
    case GTU:
1720
    case GEU:
1721
      op = UMIN;
1722
      unsignedp = 1;
1723
      break;
1724
    default:
1725
      return FALSE;
1726
    }
1727
 
1728
  start_sequence ();
1729
 
1730
  target = expand_simple_binop (GET_MODE (if_info->x), op,
1731
                                if_info->a, if_info->b,
1732
                                if_info->x, unsignedp, OPTAB_WIDEN);
1733
  if (! target)
1734
    {
1735
      end_sequence ();
1736
      return FALSE;
1737
    }
1738
  if (target != if_info->x)
1739
    noce_emit_move_insn (if_info->x, target);
1740
 
1741
  seq = end_ifcvt_sequence (if_info);
1742
  if (!seq)
1743
    return FALSE;
1744
 
1745
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1746
  if_info->cond = cond;
1747
  if_info->cond_earliest = earliest;
1748
 
1749
  return TRUE;
1750
}
1751
 
1752
/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);",
1753
   "if (a < 0) x = ~a; else x = a;" to "x = one_cmpl_abs(a);",
1754
   etc.  */
1755
 
1756
static int
1757
noce_try_abs (struct noce_if_info *if_info)
1758
{
1759
  rtx cond, earliest, target, seq, a, b, c;
1760
  int negate;
1761
  bool one_cmpl = false;
1762
 
1763
  /* Reject modes with signed zeros.  */
1764
  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
1765
    return FALSE;
1766
 
1767
  /* Recognize A and B as constituting an ABS or NABS.  The canonical
1768
     form is a branch around the negation, taken when the object is the
1769
     first operand of a comparison against 0 that evaluates to true.  */
1770
  a = if_info->a;
1771
  b = if_info->b;
1772
  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1773
    negate = 0;
1774
  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1775
    {
1776
      c = a; a = b; b = c;
1777
      negate = 1;
1778
    }
1779
  else if (GET_CODE (a) == NOT && rtx_equal_p (XEXP (a, 0), b))
1780
    {
1781
      negate = 0;
1782
      one_cmpl = true;
1783
    }
1784
  else if (GET_CODE (b) == NOT && rtx_equal_p (XEXP (b, 0), a))
1785
    {
1786
      c = a; a = b; b = c;
1787
      negate = 1;
1788
      one_cmpl = true;
1789
    }
1790
  else
1791
    return FALSE;
1792
 
1793
  cond = noce_get_alt_condition (if_info, b, &earliest);
1794
  if (!cond)
1795
    return FALSE;
1796
 
1797
  /* Verify the condition is of the form we expect.  */
1798
  if (rtx_equal_p (XEXP (cond, 0), b))
1799
    c = XEXP (cond, 1);
1800
  else if (rtx_equal_p (XEXP (cond, 1), b))
1801
    {
1802
      c = XEXP (cond, 0);
1803
      negate = !negate;
1804
    }
1805
  else
1806
    return FALSE;
1807
 
1808
  /* Verify that C is zero.  Search one step backward for a
1809
     REG_EQUAL note or a simple source if necessary.  */
1810
  if (REG_P (c))
1811
    {
1812
      rtx set, insn = prev_nonnote_insn (earliest);
1813
      if (insn
1814
          && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (earliest)
1815
          && (set = single_set (insn))
1816
          && rtx_equal_p (SET_DEST (set), c))
1817
        {
1818
          rtx note = find_reg_equal_equiv_note (insn);
1819
          if (note)
1820
            c = XEXP (note, 0);
1821
          else
1822
            c = SET_SRC (set);
1823
        }
1824
      else
1825
        return FALSE;
1826
    }
1827
  if (MEM_P (c)
1828
      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1829
      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1830
    c = get_pool_constant (XEXP (c, 0));
1831
 
1832
  /* Work around funny ideas get_condition has wrt canonicalization.
1833
     Note that these rtx constants are known to be CONST_INT, and
1834
     therefore imply integer comparisons.  */
1835
  if (c == constm1_rtx && GET_CODE (cond) == GT)
1836
    ;
1837
  else if (c == const1_rtx && GET_CODE (cond) == LT)
1838
    ;
1839
  else if (c != CONST0_RTX (GET_MODE (b)))
1840
    return FALSE;
1841
 
1842
  /* Determine what sort of operation this is.  */
1843
  switch (GET_CODE (cond))
1844
    {
1845
    case LT:
1846
    case LE:
1847
    case UNLT:
1848
    case UNLE:
1849
      negate = !negate;
1850
      break;
1851
    case GT:
1852
    case GE:
1853
    case UNGT:
1854
    case UNGE:
1855
      break;
1856
    default:
1857
      return FALSE;
1858
    }
1859
 
1860
  start_sequence ();
1861
  if (one_cmpl)
1862
    target = expand_one_cmpl_abs_nojump (GET_MODE (if_info->x), b,
1863
                                         if_info->x);
1864
  else
1865
    target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1866
 
1867
  /* ??? It's a quandary whether cmove would be better here, especially
1868
     for integers.  Perhaps combine will clean things up.  */
1869
  if (target && negate)
1870
    {
1871
      if (one_cmpl)
1872
        target = expand_simple_unop (GET_MODE (target), NOT, target,
1873
                                     if_info->x, 0);
1874
      else
1875
        target = expand_simple_unop (GET_MODE (target), NEG, target,
1876
                                     if_info->x, 0);
1877
    }
1878
 
1879
  if (! target)
1880
    {
1881
      end_sequence ();
1882
      return FALSE;
1883
    }
1884
 
1885
  if (target != if_info->x)
1886
    noce_emit_move_insn (if_info->x, target);
1887
 
1888
  seq = end_ifcvt_sequence (if_info);
1889
  if (!seq)
1890
    return FALSE;
1891
 
1892
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1893
  if_info->cond = cond;
1894
  if_info->cond_earliest = earliest;
1895
 
1896
  return TRUE;
1897
}
1898
 
1899
/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
1900
 
1901
static int
1902
noce_try_sign_mask (struct noce_if_info *if_info)
1903
{
1904
  rtx cond, t, m, c, seq;
1905
  enum machine_mode mode;
1906
  enum rtx_code code;
1907
  bool t_unconditional;
1908
 
1909
  cond = if_info->cond;
1910
  code = GET_CODE (cond);
1911
  m = XEXP (cond, 0);
1912
  c = XEXP (cond, 1);
1913
 
1914
  t = NULL_RTX;
1915
  if (if_info->a == const0_rtx)
1916
    {
1917
      if ((code == LT && c == const0_rtx)
1918
          || (code == LE && c == constm1_rtx))
1919
        t = if_info->b;
1920
    }
1921
  else if (if_info->b == const0_rtx)
1922
    {
1923
      if ((code == GE && c == const0_rtx)
1924
          || (code == GT && c == constm1_rtx))
1925
        t = if_info->a;
1926
    }
1927
 
1928
  if (! t || side_effects_p (t))
1929
    return FALSE;
1930
 
1931
  /* We currently don't handle different modes.  */
1932
  mode = GET_MODE (t);
1933
  if (GET_MODE (m) != mode)
1934
    return FALSE;
1935
 
1936
  /* This is only profitable if T is unconditionally executed/evaluated in the
1937
     original insn sequence or T is cheap.  The former happens if B is the
1938
     non-zero (T) value and if INSN_B was taken from TEST_BB, or there was no
1939
     INSN_B which can happen for e.g. conditional stores to memory.  For the
1940
     cost computation use the block TEST_BB where the evaluation will end up
1941
     after the transformation.  */
1942
  t_unconditional =
1943
    (t == if_info->b
1944
     && (if_info->insn_b == NULL_RTX
1945
         || BLOCK_FOR_INSN (if_info->insn_b) == if_info->test_bb));
1946
  if (!(t_unconditional
1947
        || (rtx_cost (t, SET, optimize_bb_for_speed_p (if_info->test_bb))
1948
            < COSTS_N_INSNS (2))))
1949
    return FALSE;
1950
 
1951
  start_sequence ();
1952
  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1953
     "(signed) m >> 31" directly.  This benefits targets with specialized
1954
     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
1955
  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1956
  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1957
        : NULL_RTX;
1958
 
1959
  if (!t)
1960
    {
1961
      end_sequence ();
1962
      return FALSE;
1963
    }
1964
 
1965
  noce_emit_move_insn (if_info->x, t);
1966
 
1967
  seq = end_ifcvt_sequence (if_info);
1968
  if (!seq)
1969
    return FALSE;
1970
 
1971
  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1972
  return TRUE;
1973
}
1974
 
1975
 
1976
/* Optimize away "if (x & C) x |= C" and similar bit manipulation
1977
   transformations.  */
1978
 
1979
static int
1980
noce_try_bitop (struct noce_if_info *if_info)
1981
{
1982
  rtx cond, x, a, result, seq;
1983
  enum machine_mode mode;
1984
  enum rtx_code code;
1985
  int bitnum;
1986
 
1987
  x = if_info->x;
1988
  cond = if_info->cond;
1989
  code = GET_CODE (cond);
1990
 
1991
  /* Check for no else condition.  */
1992
  if (! rtx_equal_p (x, if_info->b))
1993
    return FALSE;
1994
 
1995
  /* Check for a suitable condition.  */
1996
  if (code != NE && code != EQ)
1997
    return FALSE;
1998
  if (XEXP (cond, 1) != const0_rtx)
1999
    return FALSE;
2000
  cond = XEXP (cond, 0);
2001
 
2002
  /* ??? We could also handle AND here.  */
2003
  if (GET_CODE (cond) == ZERO_EXTRACT)
2004
    {
2005
      if (XEXP (cond, 1) != const1_rtx
2006
          || !CONST_INT_P (XEXP (cond, 2))
2007
          || ! rtx_equal_p (x, XEXP (cond, 0)))
2008
        return FALSE;
2009
      bitnum = INTVAL (XEXP (cond, 2));
2010
      mode = GET_MODE (x);
2011
      if (BITS_BIG_ENDIAN)
2012
        bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
2013
      if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
2014
        return FALSE;
2015
    }
2016
  else
2017
    return FALSE;
2018
 
2019
  a = if_info->a;
2020
  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
2021
    {
2022
      /* Check for "if (X & C) x = x op C".  */
2023
      if (! rtx_equal_p (x, XEXP (a, 0))
2024
          || !CONST_INT_P (XEXP (a, 1))
2025
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2026
             != (unsigned HOST_WIDE_INT) 1 << bitnum)
2027
        return FALSE;
2028
 
2029
      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
2030
      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
2031
      if (GET_CODE (a) == IOR)
2032
        result = (code == NE) ? a : NULL_RTX;
2033
      else if (code == NE)
2034
        {
2035
          /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
2036
          result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
2037
          result = simplify_gen_binary (IOR, mode, x, result);
2038
        }
2039
      else
2040
        {
2041
          /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
2042
          result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
2043
          result = simplify_gen_binary (AND, mode, x, result);
2044
        }
2045
    }
2046
  else if (GET_CODE (a) == AND)
2047
    {
2048
      /* Check for "if (X & C) x &= ~C".  */
2049
      if (! rtx_equal_p (x, XEXP (a, 0))
2050
          || !CONST_INT_P (XEXP (a, 1))
2051
          || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
2052
             != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
2053
        return FALSE;
2054
 
2055
      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
2056
      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
2057
      result = (code == EQ) ? a : NULL_RTX;
2058
    }
2059
  else
2060
    return FALSE;
2061
 
2062
  if (result)
2063
    {
2064
      start_sequence ();
2065
      noce_emit_move_insn (x, result);
2066
      seq = end_ifcvt_sequence (if_info);
2067
      if (!seq)
2068
        return FALSE;
2069
 
2070
      emit_insn_before_setloc (seq, if_info->jump,
2071
                               INSN_LOCATOR (if_info->insn_a));
2072
    }
2073
  return TRUE;
2074
}
2075
 
2076
 
2077
/* Similar to get_condition, only the resulting condition must be
2078
   valid at JUMP, instead of at EARLIEST.
2079
 
2080
   If THEN_ELSE_REVERSED is true, the fallthrough does not go to the
2081
   THEN block of the caller, and we have to reverse the condition.  */
2082
 
2083
static rtx
2084
noce_get_condition (rtx jump, rtx *earliest, bool then_else_reversed)
2085
{
2086
  rtx cond, set, tmp;
2087
  bool reverse;
2088
 
2089
  if (! any_condjump_p (jump))
2090
    return NULL_RTX;
2091
 
2092
  set = pc_set (jump);
2093
 
2094
  /* If this branches to JUMP_LABEL when the condition is false,
2095
     reverse the condition.  */
2096
  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2097
             && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2098
 
2099
  /* We may have to reverse because the caller's if block is not canonical,
2100
     i.e. the THEN block isn't the fallthrough block for the TEST block
2101
     (see find_if_header).  */
2102
  if (then_else_reversed)
2103
    reverse = !reverse;
2104
 
2105
  /* If the condition variable is a register and is MODE_INT, accept it.  */
2106
 
2107
  cond = XEXP (SET_SRC (set), 0);
2108
  tmp = XEXP (cond, 0);
2109
  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2110
    {
2111
      *earliest = jump;
2112
 
2113
      if (reverse)
2114
        cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2115
                               GET_MODE (cond), tmp, XEXP (cond, 1));
2116
      return cond;
2117
    }
2118
 
2119
  /* Otherwise, fall back on canonicalize_condition to do the dirty
2120
     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2121
  return canonicalize_condition (jump, cond, reverse, earliest,
2122
                                 NULL_RTX, false, true);
2123
}
2124
 
2125
/* Return true if OP is ok for if-then-else processing.  */
2126
 
2127
static int
2128
noce_operand_ok (const_rtx op)
2129
{
2130
  /* We special-case memories, so handle any of them with
2131
     no address side effects.  */
2132
  if (MEM_P (op))
2133
    return ! side_effects_p (XEXP (op, 0));
2134
 
2135
  if (side_effects_p (op))
2136
    return FALSE;
2137
 
2138
  return ! may_trap_p (op);
2139
}
2140
 
2141
/* Return true if a write into MEM may trap or fault.  */
2142
 
2143
static bool
2144
noce_mem_write_may_trap_or_fault_p (const_rtx mem)
2145
{
2146
  rtx addr;
2147
 
2148
  if (MEM_READONLY_P (mem))
2149
    return true;
2150
 
2151
  if (may_trap_or_fault_p (mem))
2152
    return true;
2153
 
2154
  addr = XEXP (mem, 0);
2155
 
2156
  /* Call target hook to avoid the effects of -fpic etc....  */
2157
  addr = targetm.delegitimize_address (addr);
2158
 
2159
  while (addr)
2160
    switch (GET_CODE (addr))
2161
      {
2162
      case CONST:
2163
      case PRE_DEC:
2164
      case PRE_INC:
2165
      case POST_DEC:
2166
      case POST_INC:
2167
      case POST_MODIFY:
2168
        addr = XEXP (addr, 0);
2169
        break;
2170
      case LO_SUM:
2171
      case PRE_MODIFY:
2172
        addr = XEXP (addr, 1);
2173
        break;
2174
      case PLUS:
2175
        if (CONST_INT_P (XEXP (addr, 1)))
2176
          addr = XEXP (addr, 0);
2177
        else
2178
          return false;
2179
        break;
2180
      case LABEL_REF:
2181
        return true;
2182
      case SYMBOL_REF:
2183
        if (SYMBOL_REF_DECL (addr)
2184
            && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2185
          return true;
2186
        return false;
2187
      default:
2188
        return false;
2189
      }
2190
 
2191
  return false;
2192
}
2193
 
2194
/* Return whether we can use store speculation for MEM.  TOP_BB is the
2195
   basic block above the conditional block where we are considering
2196
   doing the speculative store.  We look for whether MEM is set
2197
   unconditionally later in the function.  */
2198
 
2199
static bool
2200
noce_can_store_speculate_p (basic_block top_bb, const_rtx mem)
2201
{
2202
  basic_block dominator;
2203
 
2204
  for (dominator = get_immediate_dominator (CDI_POST_DOMINATORS, top_bb);
2205
       dominator != NULL;
2206
       dominator = get_immediate_dominator (CDI_POST_DOMINATORS, dominator))
2207
    {
2208
      rtx insn;
2209
 
2210
      FOR_BB_INSNS (dominator, insn)
2211
        {
2212
          /* If we see something that might be a memory barrier, we
2213
             have to stop looking.  Even if the MEM is set later in
2214
             the function, we still don't want to set it
2215
             unconditionally before the barrier.  */
2216
          if (INSN_P (insn)
2217
              && (volatile_insn_p (PATTERN (insn))
2218
                  || (CALL_P (insn) && (!RTL_CONST_CALL_P (insn)))))
2219
            return false;
2220
 
2221
          if (memory_modified_in_insn_p (mem, insn))
2222
            return true;
2223
          if (modified_in_p (XEXP (mem, 0), insn))
2224
            return false;
2225
 
2226
        }
2227
    }
2228
 
2229
  return false;
2230
}
2231
 
2232
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2233
   it without using conditional execution.  Return TRUE if we were successful
2234
   at converting the block.  */
2235
 
2236
static int
2237
noce_process_if_block (struct noce_if_info *if_info)
2238
{
2239
  basic_block test_bb = if_info->test_bb;       /* test block */
2240
  basic_block then_bb = if_info->then_bb;       /* THEN */
2241
  basic_block else_bb = if_info->else_bb;       /* ELSE or NULL */
2242
  basic_block join_bb = if_info->join_bb;       /* JOIN */
2243
  rtx jump = if_info->jump;
2244
  rtx cond = if_info->cond;
2245
  rtx insn_a, insn_b;
2246
  rtx set_a, set_b;
2247
  rtx orig_x, x, a, b;
2248
 
2249
  /* We're looking for patterns of the form
2250
 
2251
     (1) if (...) x = a; else x = b;
2252
     (2) x = b; if (...) x = a;
2253
     (3) if (...) x = a;   // as if with an initial x = x.
2254
 
2255
     The later patterns require jumps to be more expensive.
2256
 
2257
     ??? For future expansion, look for multiple X in such patterns.  */
2258
 
2259
  /* Look for one of the potential sets.  */
2260
  insn_a = first_active_insn (then_bb);
2261
  if (! insn_a
2262
      || insn_a != last_active_insn (then_bb, FALSE)
2263
      || (set_a = single_set (insn_a)) == NULL_RTX)
2264
    return FALSE;
2265
 
2266
  x = SET_DEST (set_a);
2267
  a = SET_SRC (set_a);
2268
 
2269
  /* Look for the other potential set.  Make sure we've got equivalent
2270
     destinations.  */
2271
  /* ??? This is overconservative.  Storing to two different mems is
2272
     as easy as conditionally computing the address.  Storing to a
2273
     single mem merely requires a scratch memory to use as one of the
2274
     destination addresses; often the memory immediately below the
2275
     stack pointer is available for this.  */
2276
  set_b = NULL_RTX;
2277
  if (else_bb)
2278
    {
2279
      insn_b = first_active_insn (else_bb);
2280
      if (! insn_b
2281
          || insn_b != last_active_insn (else_bb, FALSE)
2282
          || (set_b = single_set (insn_b)) == NULL_RTX
2283
          || ! rtx_equal_p (x, SET_DEST (set_b)))
2284
        return FALSE;
2285
    }
2286
  else
2287
    {
2288 378 julius
      insn_b = prev_nonnote_nondebug_insn (if_info->cond_earliest);
2289 280 jeremybenn
      /* We're going to be moving the evaluation of B down from above
2290
         COND_EARLIEST to JUMP.  Make sure the relevant data is still
2291
         intact.  */
2292
      if (! insn_b
2293
          || BLOCK_FOR_INSN (insn_b) != BLOCK_FOR_INSN (if_info->cond_earliest)
2294
          || !NONJUMP_INSN_P (insn_b)
2295
          || (set_b = single_set (insn_b)) == NULL_RTX
2296
          || ! rtx_equal_p (x, SET_DEST (set_b))
2297
          || ! noce_operand_ok (SET_SRC (set_b))
2298
          || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2299
          || modified_between_p (SET_SRC (set_b), insn_b, jump)
2300
          /* Likewise with X.  In particular this can happen when
2301
             noce_get_condition looks farther back in the instruction
2302
             stream than one might expect.  */
2303
          || reg_overlap_mentioned_p (x, cond)
2304
          || reg_overlap_mentioned_p (x, a)
2305
          || modified_between_p (x, insn_b, jump))
2306
        insn_b = set_b = NULL_RTX;
2307
    }
2308
 
2309
  /* If x has side effects then only the if-then-else form is safe to
2310
     convert.  But even in that case we would need to restore any notes
2311
     (such as REG_INC) at then end.  That can be tricky if
2312
     noce_emit_move_insn expands to more than one insn, so disable the
2313
     optimization entirely for now if there are side effects.  */
2314
  if (side_effects_p (x))
2315
    return FALSE;
2316
 
2317
  b = (set_b ? SET_SRC (set_b) : x);
2318
 
2319
  /* Only operate on register destinations, and even then avoid extending
2320
     the lifetime of hard registers on small register class machines.  */
2321
  orig_x = x;
2322
  if (!REG_P (x)
2323
      || (SMALL_REGISTER_CLASSES
2324
          && REGNO (x) < FIRST_PSEUDO_REGISTER))
2325
    {
2326
      if (GET_MODE (x) == BLKmode)
2327
        return FALSE;
2328
 
2329
      if (GET_CODE (x) == ZERO_EXTRACT
2330
          && (!CONST_INT_P (XEXP (x, 1))
2331
              || !CONST_INT_P (XEXP (x, 2))))
2332
        return FALSE;
2333
 
2334
      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2335
                                 ? XEXP (x, 0) : x));
2336
    }
2337
 
2338
  /* Don't operate on sources that may trap or are volatile.  */
2339
  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2340
    return FALSE;
2341
 
2342
 retry:
2343
  /* Set up the info block for our subroutines.  */
2344
  if_info->insn_a = insn_a;
2345
  if_info->insn_b = insn_b;
2346
  if_info->x = x;
2347
  if_info->a = a;
2348
  if_info->b = b;
2349
 
2350
  /* Try optimizations in some approximation of a useful order.  */
2351
  /* ??? Should first look to see if X is live incoming at all.  If it
2352
     isn't, we don't need anything but an unconditional set.  */
2353
 
2354
  /* Look and see if A and B are really the same.  Avoid creating silly
2355
     cmove constructs that no one will fix up later.  */
2356
  if (rtx_equal_p (a, b))
2357
    {
2358
      /* If we have an INSN_B, we don't have to create any new rtl.  Just
2359
         move the instruction that we already have.  If we don't have an
2360
         INSN_B, that means that A == X, and we've got a noop move.  In
2361
         that case don't do anything and let the code below delete INSN_A.  */
2362
      if (insn_b && else_bb)
2363
        {
2364
          rtx note;
2365
 
2366
          if (else_bb && insn_b == BB_END (else_bb))
2367
            BB_END (else_bb) = PREV_INSN (insn_b);
2368
          reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2369
 
2370
          /* If there was a REG_EQUAL note, delete it since it may have been
2371
             true due to this insn being after a jump.  */
2372
          if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2373
            remove_note (insn_b, note);
2374
 
2375
          insn_b = NULL_RTX;
2376
        }
2377
      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2378
         x must be executed twice.  */
2379
      else if (insn_b && side_effects_p (orig_x))
2380
        return FALSE;
2381
 
2382
      x = orig_x;
2383
      goto success;
2384
    }
2385
 
2386
  if (!set_b && MEM_P (orig_x))
2387
    {
2388
      /* Disallow the "if (...) x = a;" form (implicit "else x = x;")
2389
         for optimizations if writing to x may trap or fault,
2390
         i.e. it's a memory other than a static var or a stack slot,
2391
         is misaligned on strict aligned machines or is read-only.  If
2392
         x is a read-only memory, then the program is valid only if we
2393
         avoid the store into it.  If there are stores on both the
2394
         THEN and ELSE arms, then we can go ahead with the conversion;
2395
         either the program is broken, or the condition is always
2396
         false such that the other memory is selected.  */
2397
      if (noce_mem_write_may_trap_or_fault_p (orig_x))
2398
        return FALSE;
2399
 
2400
      /* Avoid store speculation: given "if (...) x = a" where x is a
2401
         MEM, we only want to do the store if x is always set
2402
         somewhere in the function.  This avoids cases like
2403
           if (pthread_mutex_trylock(mutex))
2404
             ++global_variable;
2405
         where we only want global_variable to be changed if the mutex
2406
         is held.  FIXME: This should ideally be expressed directly in
2407
         RTL somehow.  */
2408
      if (!noce_can_store_speculate_p (test_bb, orig_x))
2409
        return FALSE;
2410
    }
2411
 
2412
  if (noce_try_move (if_info))
2413
    goto success;
2414
  if (noce_try_store_flag (if_info))
2415
    goto success;
2416
  if (noce_try_bitop (if_info))
2417
    goto success;
2418
  if (noce_try_minmax (if_info))
2419
    goto success;
2420
  if (noce_try_abs (if_info))
2421
    goto success;
2422
  if (HAVE_conditional_move
2423
      && noce_try_cmove (if_info))
2424
    goto success;
2425
  if (! targetm.have_conditional_execution ())
2426
    {
2427
      if (noce_try_store_flag_constants (if_info))
2428
        goto success;
2429
      if (noce_try_addcc (if_info))
2430
        goto success;
2431
      if (noce_try_store_flag_mask (if_info))
2432
        goto success;
2433
      if (HAVE_conditional_move
2434
          && noce_try_cmove_arith (if_info))
2435
        goto success;
2436
      if (noce_try_sign_mask (if_info))
2437
        goto success;
2438
    }
2439
 
2440
  if (!else_bb && set_b)
2441
    {
2442
      insn_b = set_b = NULL_RTX;
2443
      b = orig_x;
2444
      goto retry;
2445
    }
2446
 
2447
  return FALSE;
2448
 
2449
 success:
2450
 
2451
  /* If we used a temporary, fix it up now.  */
2452
  if (orig_x != x)
2453
    {
2454
      rtx seq;
2455
 
2456
      start_sequence ();
2457
      noce_emit_move_insn (orig_x, x);
2458
      seq = get_insns ();
2459
      set_used_flags (orig_x);
2460
      unshare_all_rtl_in_chain (seq);
2461
      end_sequence ();
2462
 
2463
      emit_insn_before_setloc (seq, BB_END (test_bb), INSN_LOCATOR (insn_a));
2464
    }
2465
 
2466
  /* The original THEN and ELSE blocks may now be removed.  The test block
2467
     must now jump to the join block.  If the test block and the join block
2468
     can be merged, do so.  */
2469
  if (else_bb)
2470
    {
2471
      delete_basic_block (else_bb);
2472
      num_true_changes++;
2473
    }
2474
  else
2475
    remove_edge (find_edge (test_bb, join_bb));
2476
 
2477
  remove_edge (find_edge (then_bb, join_bb));
2478
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2479
  delete_basic_block (then_bb);
2480
  num_true_changes++;
2481
 
2482
  if (can_merge_blocks_p (test_bb, join_bb))
2483
    {
2484
      merge_blocks (test_bb, join_bb);
2485
      num_true_changes++;
2486
    }
2487
 
2488
  num_updated_if_blocks++;
2489
  return TRUE;
2490
}
2491
 
2492
/* Check whether a block is suitable for conditional move conversion.
2493
   Every insn must be a simple set of a register to a constant or a
2494
   register.  For each assignment, store the value in the array VALS,
2495
   indexed by register number, then store the register number in
2496
   REGS.  COND is the condition we will test.  */
2497
 
2498
static int
2499
check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) **regs, rtx cond)
2500
{
2501
  rtx insn;
2502
 
2503
   /* We can only handle simple jumps at the end of the basic block.
2504
      It is almost impossible to update the CFG otherwise.  */
2505
  insn = BB_END (bb);
2506
  if (JUMP_P (insn) && !onlyjump_p (insn))
2507
    return FALSE;
2508
 
2509
  FOR_BB_INSNS (bb, insn)
2510
    {
2511
      rtx set, dest, src;
2512
 
2513
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2514
        continue;
2515
      set = single_set (insn);
2516
      if (!set)
2517
        return FALSE;
2518
 
2519
      dest = SET_DEST (set);
2520
      src = SET_SRC (set);
2521
      if (!REG_P (dest)
2522
          || (SMALL_REGISTER_CLASSES && HARD_REGISTER_P (dest)))
2523
        return FALSE;
2524
 
2525
      if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2526
        return FALSE;
2527
 
2528
      if (side_effects_p (src) || side_effects_p (dest))
2529
        return FALSE;
2530
 
2531
      if (may_trap_p (src) || may_trap_p (dest))
2532
        return FALSE;
2533
 
2534
      /* Don't try to handle this if the source register was
2535
         modified earlier in the block.  */
2536
      if ((REG_P (src)
2537
           && vals[REGNO (src)] != NULL)
2538
          || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2539
              && vals[REGNO (SUBREG_REG (src))] != NULL))
2540
        return FALSE;
2541
 
2542
      /* Don't try to handle this if the destination register was
2543
         modified earlier in the block.  */
2544
      if (vals[REGNO (dest)] != NULL)
2545
        return FALSE;
2546
 
2547
      /* Don't try to handle this if the condition uses the
2548
         destination register.  */
2549
      if (reg_overlap_mentioned_p (dest, cond))
2550
        return FALSE;
2551
 
2552
      /* Don't try to handle this if the source register is modified
2553
         later in the block.  */
2554
      if (!CONSTANT_P (src)
2555
          && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2556
        return FALSE;
2557
 
2558
      vals[REGNO (dest)] = src;
2559
 
2560
      VEC_safe_push (int, heap, *regs, REGNO (dest));
2561
    }
2562
 
2563
  return TRUE;
2564
}
2565
 
2566
/* Given a basic block BB suitable for conditional move conversion,
2567
   a condition COND, and arrays THEN_VALS and ELSE_VALS containing the
2568
   register values depending on COND, emit the insns in the block as
2569
   conditional moves.  If ELSE_BLOCK is true, THEN_BB was already
2570
   processed.  The caller has started a sequence for the conversion.
2571
   Return true if successful, false if something goes wrong.  */
2572
 
2573
static bool
2574
cond_move_convert_if_block (struct noce_if_info *if_infop,
2575
                            basic_block bb, rtx cond,
2576
                            rtx *then_vals, rtx *else_vals,
2577
                            bool else_block_p)
2578
{
2579
  enum rtx_code code;
2580
  rtx insn, cond_arg0, cond_arg1;
2581
 
2582
  code = GET_CODE (cond);
2583
  cond_arg0 = XEXP (cond, 0);
2584
  cond_arg1 = XEXP (cond, 1);
2585
 
2586
  FOR_BB_INSNS (bb, insn)
2587
    {
2588
      rtx set, target, dest, t, e;
2589
      unsigned int regno;
2590
 
2591
      /* ??? Maybe emit conditional debug insn?  */
2592
      if (!NONDEBUG_INSN_P (insn) || JUMP_P (insn))
2593
        continue;
2594
      set = single_set (insn);
2595
      gcc_assert (set && REG_P (SET_DEST (set)));
2596
 
2597
      dest = SET_DEST (set);
2598
      regno = REGNO (dest);
2599
 
2600
      t = then_vals[regno];
2601
      e = else_vals[regno];
2602
 
2603
      if (else_block_p)
2604
        {
2605
          /* If this register was set in the then block, we already
2606
             handled this case there.  */
2607
          if (t)
2608
            continue;
2609
          t = dest;
2610
          gcc_assert (e);
2611
        }
2612
      else
2613
        {
2614
          gcc_assert (t);
2615
          if (!e)
2616
            e = dest;
2617
        }
2618
 
2619
      target = noce_emit_cmove (if_infop, dest, code, cond_arg0, cond_arg1,
2620
                                t, e);
2621
      if (!target)
2622
        return false;
2623
 
2624
      if (target != dest)
2625
        noce_emit_move_insn (dest, target);
2626
    }
2627
 
2628
  return true;
2629
}
2630
 
2631
/* Given a simple IF-THEN-JOIN or IF-THEN-ELSE-JOIN block, attempt to convert
2632
   it using only conditional moves.  Return TRUE if we were successful at
2633
   converting the block.  */
2634
 
2635
static int
2636
cond_move_process_if_block (struct noce_if_info *if_info)
2637
{
2638
  basic_block test_bb = if_info->test_bb;
2639
  basic_block then_bb = if_info->then_bb;
2640
  basic_block else_bb = if_info->else_bb;
2641
  basic_block join_bb = if_info->join_bb;
2642
  rtx jump = if_info->jump;
2643
  rtx cond = if_info->cond;
2644
  rtx seq, loc_insn;
2645
  int max_reg, size, c, reg;
2646
  rtx *then_vals;
2647
  rtx *else_vals;
2648
  VEC (int, heap) *then_regs = NULL;
2649
  VEC (int, heap) *else_regs = NULL;
2650
  unsigned int i;
2651
 
2652
  /* Build a mapping for each block to the value used for each
2653
     register.  */
2654
  max_reg = max_reg_num ();
2655
  size = (max_reg + 1) * sizeof (rtx);
2656
  then_vals = (rtx *) alloca (size);
2657
  else_vals = (rtx *) alloca (size);
2658
  memset (then_vals, 0, size);
2659
  memset (else_vals, 0, size);
2660
 
2661
  /* Make sure the blocks are suitable.  */
2662
  if (!check_cond_move_block (then_bb, then_vals, &then_regs, cond)
2663
      || (else_bb && !check_cond_move_block (else_bb, else_vals, &else_regs, cond)))
2664
    {
2665
      VEC_free (int, heap, then_regs);
2666
      VEC_free (int, heap, else_regs);
2667
      return FALSE;
2668
    }
2669
 
2670
  /* Make sure the blocks can be used together.  If the same register
2671
     is set in both blocks, and is not set to a constant in both
2672
     cases, then both blocks must set it to the same register.  We
2673
     have already verified that if it is set to a register, that the
2674
     source register does not change after the assignment.  Also count
2675
     the number of registers set in only one of the blocks.  */
2676
  c = 0;
2677
  for (i = 0; VEC_iterate (int, then_regs, i, reg); i++)
2678
    {
2679
      if (!then_vals[reg] && !else_vals[reg])
2680
        continue;
2681
 
2682
      if (!else_vals[reg])
2683
        ++c;
2684
      else
2685
        {
2686
          if (!CONSTANT_P (then_vals[reg])
2687
              && !CONSTANT_P (else_vals[reg])
2688
              && !rtx_equal_p (then_vals[reg], else_vals[reg]))
2689
            {
2690
              VEC_free (int, heap, then_regs);
2691
              VEC_free (int, heap, else_regs);
2692
              return FALSE;
2693
            }
2694
        }
2695
    }
2696
 
2697
  /* Finish off c for MAX_CONDITIONAL_EXECUTE.  */
2698
  for (i = 0; VEC_iterate (int, else_regs, i, reg); ++i)
2699
    if (!then_vals[reg])
2700
      ++c;
2701
 
2702
  /* Make sure it is reasonable to convert this block.  What matters
2703
     is the number of assignments currently made in only one of the
2704
     branches, since if we convert we are going to always execute
2705
     them.  */
2706
  if (c > MAX_CONDITIONAL_EXECUTE)
2707
    {
2708
      VEC_free (int, heap, then_regs);
2709
      VEC_free (int, heap, else_regs);
2710
      return FALSE;
2711
    }
2712
 
2713
  /* Try to emit the conditional moves.  First do the then block,
2714
     then do anything left in the else blocks.  */
2715
  start_sequence ();
2716
  if (!cond_move_convert_if_block (if_info, then_bb, cond,
2717
                                   then_vals, else_vals, false)
2718
      || (else_bb
2719
          && !cond_move_convert_if_block (if_info, else_bb, cond,
2720
                                          then_vals, else_vals, true)))
2721
    {
2722
      end_sequence ();
2723
      VEC_free (int, heap, then_regs);
2724
      VEC_free (int, heap, else_regs);
2725
      return FALSE;
2726
    }
2727
  seq = end_ifcvt_sequence (if_info);
2728
  if (!seq)
2729
    {
2730
      VEC_free (int, heap, then_regs);
2731
      VEC_free (int, heap, else_regs);
2732
      return FALSE;
2733
    }
2734
 
2735
  loc_insn = first_active_insn (then_bb);
2736
  if (!loc_insn)
2737
    {
2738
      loc_insn = first_active_insn (else_bb);
2739
      gcc_assert (loc_insn);
2740
    }
2741
  emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2742
 
2743
  if (else_bb)
2744
    {
2745
      delete_basic_block (else_bb);
2746
      num_true_changes++;
2747
    }
2748
  else
2749
    remove_edge (find_edge (test_bb, join_bb));
2750
 
2751
  remove_edge (find_edge (then_bb, join_bb));
2752
  redirect_edge_and_branch_force (single_succ_edge (test_bb), join_bb);
2753
  delete_basic_block (then_bb);
2754
  num_true_changes++;
2755
 
2756
  if (can_merge_blocks_p (test_bb, join_bb))
2757
    {
2758
      merge_blocks (test_bb, join_bb);
2759
      num_true_changes++;
2760
    }
2761
 
2762
  num_updated_if_blocks++;
2763
 
2764
  VEC_free (int, heap, then_regs);
2765
  VEC_free (int, heap, else_regs);
2766
  return TRUE;
2767
}
2768
 
2769
 
2770
/* Determine if a given basic block heads a simple IF-THEN-JOIN or an
2771
   IF-THEN-ELSE-JOIN block.
2772
 
2773
   If so, we'll try to convert the insns to not require the branch,
2774
   using only transformations that do not require conditional execution.
2775
 
2776
   Return TRUE if we were successful at converting the block.  */
2777
 
2778
static int
2779
noce_find_if_block (basic_block test_bb,
2780
                    edge then_edge, edge else_edge,
2781
                    int pass)
2782
{
2783
  basic_block then_bb, else_bb, join_bb;
2784
  bool then_else_reversed = false;
2785
  rtx jump, cond;
2786
  rtx cond_earliest;
2787
  struct noce_if_info if_info;
2788
 
2789
  /* We only ever should get here before reload.  */
2790
  gcc_assert (!reload_completed);
2791
 
2792
  /* Recognize an IF-THEN-ELSE-JOIN block.  */
2793
  if (single_pred_p (then_edge->dest)
2794
      && single_succ_p (then_edge->dest)
2795
      && single_pred_p (else_edge->dest)
2796
      && single_succ_p (else_edge->dest)
2797
      && single_succ (then_edge->dest) == single_succ (else_edge->dest))
2798
    {
2799
      then_bb = then_edge->dest;
2800
      else_bb = else_edge->dest;
2801
      join_bb = single_succ (then_bb);
2802
    }
2803
  /* Recognize an IF-THEN-JOIN block.  */
2804
  else if (single_pred_p (then_edge->dest)
2805
           && single_succ_p (then_edge->dest)
2806
           && single_succ (then_edge->dest) == else_edge->dest)
2807
    {
2808
      then_bb = then_edge->dest;
2809
      else_bb = NULL_BLOCK;
2810
      join_bb = else_edge->dest;
2811
    }
2812
  /* Recognize an IF-ELSE-JOIN block.  We can have those because the order
2813
     of basic blocks in cfglayout mode does not matter, so the fallthrough
2814
     edge can go to any basic block (and not just to bb->next_bb, like in
2815
     cfgrtl mode).  */
2816
  else if (single_pred_p (else_edge->dest)
2817
           && single_succ_p (else_edge->dest)
2818
           && single_succ (else_edge->dest) == then_edge->dest)
2819
    {
2820
      /* The noce transformations do not apply to IF-ELSE-JOIN blocks.
2821
         To make this work, we have to invert the THEN and ELSE blocks
2822
         and reverse the jump condition.  */
2823
      then_bb = else_edge->dest;
2824
      else_bb = NULL_BLOCK;
2825
      join_bb = single_succ (then_bb);
2826
      then_else_reversed = true;
2827
    }
2828
  else
2829
    /* Not a form we can handle.  */
2830
    return FALSE;
2831
 
2832
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
2833
  if (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
2834
    return FALSE;
2835
  if (else_bb
2836
      && single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
2837
    return FALSE;
2838
 
2839
  num_possible_if_blocks++;
2840
 
2841
  if (dump_file)
2842
    {
2843
      fprintf (dump_file,
2844
               "\nIF-THEN%s-JOIN block found, pass %d, test %d, then %d",
2845
               (else_bb) ? "-ELSE" : "",
2846
               pass, test_bb->index, then_bb->index);
2847
 
2848
      if (else_bb)
2849
        fprintf (dump_file, ", else %d", else_bb->index);
2850
 
2851
      fprintf (dump_file, ", join %d\n", join_bb->index);
2852
    }
2853
 
2854
  /* If the conditional jump is more than just a conditional
2855
     jump, then we can not do if-conversion on this block.  */
2856
  jump = BB_END (test_bb);
2857
  if (! onlyjump_p (jump))
2858
    return FALSE;
2859
 
2860
  /* If this is not a standard conditional jump, we can't parse it.  */
2861
  cond = noce_get_condition (jump,
2862
                             &cond_earliest,
2863
                             then_else_reversed);
2864
  if (!cond)
2865
    return FALSE;
2866
 
2867
  /* We must be comparing objects whose modes imply the size.  */
2868
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2869
    return FALSE;
2870
 
2871
  /* Initialize an IF_INFO struct to pass around.  */
2872
  memset (&if_info, 0, sizeof if_info);
2873
  if_info.test_bb = test_bb;
2874
  if_info.then_bb = then_bb;
2875
  if_info.else_bb = else_bb;
2876
  if_info.join_bb = join_bb;
2877
  if_info.cond = cond;
2878
  if_info.cond_earliest = cond_earliest;
2879
  if_info.jump = jump;
2880
  if_info.then_else_reversed = then_else_reversed;
2881
  if_info.branch_cost = BRANCH_COST (optimize_bb_for_speed_p (test_bb),
2882
                                     predictable_edge_p (then_edge));
2883
 
2884
  /* Do the real work.  */
2885
 
2886
  if (noce_process_if_block (&if_info))
2887
    return TRUE;
2888
 
2889
  if (HAVE_conditional_move
2890
      && cond_move_process_if_block (&if_info))
2891
    return TRUE;
2892
 
2893
  return FALSE;
2894
}
2895
 
2896
 
2897
/* Merge the blocks and mark for local life update.  */
2898
 
2899
static void
2900
merge_if_block (struct ce_if_block * ce_info)
2901
{
2902
  basic_block test_bb = ce_info->test_bb;       /* last test block */
2903
  basic_block then_bb = ce_info->then_bb;       /* THEN */
2904
  basic_block else_bb = ce_info->else_bb;       /* ELSE or NULL */
2905
  basic_block join_bb = ce_info->join_bb;       /* join block */
2906
  basic_block combo_bb;
2907
 
2908
  /* All block merging is done into the lower block numbers.  */
2909
 
2910
  combo_bb = test_bb;
2911
  df_set_bb_dirty (test_bb);
2912
 
2913
  /* Merge any basic blocks to handle && and || subtests.  Each of
2914
     the blocks are on the fallthru path from the predecessor block.  */
2915
  if (ce_info->num_multiple_test_blocks > 0)
2916
    {
2917
      basic_block bb = test_bb;
2918
      basic_block last_test_bb = ce_info->last_test_bb;
2919
      basic_block fallthru = block_fallthru (bb);
2920
 
2921
      do
2922
        {
2923
          bb = fallthru;
2924
          fallthru = block_fallthru (bb);
2925
          merge_blocks (combo_bb, bb);
2926
          num_true_changes++;
2927
        }
2928
      while (bb != last_test_bb);
2929
    }
2930
 
2931
  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2932
     label, but it might if there were || tests.  That label's count should be
2933
     zero, and it normally should be removed.  */
2934
 
2935
  if (then_bb)
2936
    {
2937
      merge_blocks (combo_bb, then_bb);
2938
      num_true_changes++;
2939
    }
2940
 
2941
  /* The ELSE block, if it existed, had a label.  That label count
2942
     will almost always be zero, but odd things can happen when labels
2943
     get their addresses taken.  */
2944
  if (else_bb)
2945
    {
2946
      merge_blocks (combo_bb, else_bb);
2947
      num_true_changes++;
2948
    }
2949
 
2950
  /* If there was no join block reported, that means it was not adjacent
2951
     to the others, and so we cannot merge them.  */
2952
 
2953
  if (! join_bb)
2954
    {
2955
      rtx last = BB_END (combo_bb);
2956
 
2957
      /* The outgoing edge for the current COMBO block should already
2958
         be correct.  Verify this.  */
2959
      if (EDGE_COUNT (combo_bb->succs) == 0)
2960
        gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2961
                    || (NONJUMP_INSN_P (last)
2962
                        && GET_CODE (PATTERN (last)) == TRAP_IF
2963
                        && (TRAP_CONDITION (PATTERN (last))
2964
                            == const_true_rtx)));
2965
 
2966
      else
2967
      /* There should still be something at the end of the THEN or ELSE
2968
         blocks taking us to our final destination.  */
2969
        gcc_assert (JUMP_P (last)
2970
                    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2971
                        && CALL_P (last)
2972
                        && SIBLING_CALL_P (last))
2973
                    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2974
                        && can_throw_internal (last)));
2975
    }
2976
 
2977
  /* The JOIN block may have had quite a number of other predecessors too.
2978
     Since we've already merged the TEST, THEN and ELSE blocks, we should
2979
     have only one remaining edge from our if-then-else diamond.  If there
2980
     is more than one remaining edge, it must come from elsewhere.  There
2981
     may be zero incoming edges if the THEN block didn't actually join
2982
     back up (as with a call to a non-return function).  */
2983
  else if (EDGE_COUNT (join_bb->preds) < 2
2984
           && join_bb != EXIT_BLOCK_PTR)
2985
    {
2986
      /* We can merge the JOIN cleanly and update the dataflow try
2987
         again on this pass.*/
2988
      merge_blocks (combo_bb, join_bb);
2989
      num_true_changes++;
2990
    }
2991
  else
2992
    {
2993
      /* We cannot merge the JOIN.  */
2994
 
2995
      /* The outgoing edge for the current COMBO block should already
2996
         be correct.  Verify this.  */
2997
      gcc_assert (single_succ_p (combo_bb)
2998
                  && single_succ (combo_bb) == join_bb);
2999
 
3000
      /* Remove the jump and cruft from the end of the COMBO block.  */
3001
      if (join_bb != EXIT_BLOCK_PTR)
3002
        tidy_fallthru_edge (single_succ_edge (combo_bb));
3003
    }
3004
 
3005
  num_updated_if_blocks++;
3006
}
3007
 
3008
/* Find a block ending in a simple IF condition and try to transform it
3009
   in some way.  When converting a multi-block condition, put the new code
3010
   in the first such block and delete the rest.  Return a pointer to this
3011
   first block if some transformation was done.  Return NULL otherwise.  */
3012
 
3013
static basic_block
3014
find_if_header (basic_block test_bb, int pass)
3015
{
3016
  ce_if_block_t ce_info;
3017
  edge then_edge;
3018
  edge else_edge;
3019
 
3020
  /* The kind of block we're looking for has exactly two successors.  */
3021
  if (EDGE_COUNT (test_bb->succs) != 2)
3022
    return NULL;
3023
 
3024
  then_edge = EDGE_SUCC (test_bb, 0);
3025
  else_edge = EDGE_SUCC (test_bb, 1);
3026
 
3027
  if (df_get_bb_dirty (then_edge->dest))
3028
    return NULL;
3029
  if (df_get_bb_dirty (else_edge->dest))
3030
    return NULL;
3031
 
3032
  /* Neither edge should be abnormal.  */
3033
  if ((then_edge->flags & EDGE_COMPLEX)
3034
      || (else_edge->flags & EDGE_COMPLEX))
3035
    return NULL;
3036
 
3037
  /* Nor exit the loop.  */
3038
  if ((then_edge->flags & EDGE_LOOP_EXIT)
3039
      || (else_edge->flags & EDGE_LOOP_EXIT))
3040
    return NULL;
3041
 
3042
  /* The THEN edge is canonically the one that falls through.  */
3043
  if (then_edge->flags & EDGE_FALLTHRU)
3044
    ;
3045
  else if (else_edge->flags & EDGE_FALLTHRU)
3046
    {
3047
      edge e = else_edge;
3048
      else_edge = then_edge;
3049
      then_edge = e;
3050
    }
3051
  else
3052
    /* Otherwise this must be a multiway branch of some sort.  */
3053
    return NULL;
3054
 
3055
  memset (&ce_info, '\0', sizeof (ce_info));
3056
  ce_info.test_bb = test_bb;
3057
  ce_info.then_bb = then_edge->dest;
3058
  ce_info.else_bb = else_edge->dest;
3059
  ce_info.pass = pass;
3060
 
3061
#ifdef IFCVT_INIT_EXTRA_FIELDS
3062
  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
3063
#endif
3064
 
3065
  if (! reload_completed
3066
      && noce_find_if_block (test_bb, then_edge, else_edge, pass))
3067
    goto success;
3068
 
3069
  if (targetm.have_conditional_execution () && reload_completed
3070
      && cond_exec_find_if_block (&ce_info))
3071
    goto success;
3072
 
3073
  if (HAVE_trap
3074
      && optab_handler (ctrap_optab, word_mode)->insn_code != CODE_FOR_nothing
3075
      && find_cond_trap (test_bb, then_edge, else_edge))
3076
    goto success;
3077
 
3078
  if (dom_info_state (CDI_POST_DOMINATORS) >= DOM_NO_FAST_QUERY
3079
      && (! targetm.have_conditional_execution () || reload_completed))
3080
    {
3081
      if (find_if_case_1 (test_bb, then_edge, else_edge))
3082
        goto success;
3083
      if (find_if_case_2 (test_bb, then_edge, else_edge))
3084
        goto success;
3085
    }
3086
 
3087
  return NULL;
3088
 
3089
 success:
3090
  if (dump_file)
3091
    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
3092
  /* Set this so we continue looking.  */
3093
  cond_exec_changed_p = TRUE;
3094
  return ce_info.test_bb;
3095
}
3096
 
3097
/* Return true if a block has two edges, one of which falls through to the next
3098
   block, and the other jumps to a specific block, so that we can tell if the
3099
   block is part of an && test or an || test.  Returns either -1 or the number
3100
   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
3101
 
3102
static int
3103
block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
3104
{
3105
  edge cur_edge;
3106
  int fallthru_p = FALSE;
3107
  int jump_p = FALSE;
3108
  rtx insn;
3109
  rtx end;
3110
  int n_insns = 0;
3111
  edge_iterator ei;
3112
 
3113
  if (!cur_bb || !target_bb)
3114
    return -1;
3115
 
3116
  /* If no edges, obviously it doesn't jump or fallthru.  */
3117
  if (EDGE_COUNT (cur_bb->succs) == 0)
3118
    return FALSE;
3119
 
3120
  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
3121
    {
3122
      if (cur_edge->flags & EDGE_COMPLEX)
3123
        /* Anything complex isn't what we want.  */
3124
        return -1;
3125
 
3126
      else if (cur_edge->flags & EDGE_FALLTHRU)
3127
        fallthru_p = TRUE;
3128
 
3129
      else if (cur_edge->dest == target_bb)
3130
        jump_p = TRUE;
3131
 
3132
      else
3133
        return -1;
3134
    }
3135
 
3136
  if ((jump_p & fallthru_p) == 0)
3137
    return -1;
3138
 
3139
  /* Don't allow calls in the block, since this is used to group && and ||
3140
     together for conditional execution support.  ??? we should support
3141
     conditional execution support across calls for IA-64 some day, but
3142
     for now it makes the code simpler.  */
3143
  end = BB_END (cur_bb);
3144
  insn = BB_HEAD (cur_bb);
3145
 
3146
  while (insn != NULL_RTX)
3147
    {
3148
      if (CALL_P (insn))
3149
        return -1;
3150
 
3151
      if (INSN_P (insn)
3152
          && !JUMP_P (insn)
3153
          && !DEBUG_INSN_P (insn)
3154
          && GET_CODE (PATTERN (insn)) != USE
3155
          && GET_CODE (PATTERN (insn)) != CLOBBER)
3156
        n_insns++;
3157
 
3158
      if (insn == end)
3159
        break;
3160
 
3161
      insn = NEXT_INSN (insn);
3162
    }
3163
 
3164
  return n_insns;
3165
}
3166
 
3167
/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
3168
   block.  If so, we'll try to convert the insns to not require the branch.
3169
   Return TRUE if we were successful at converting the block.  */
3170
 
3171
static int
3172
cond_exec_find_if_block (struct ce_if_block * ce_info)
3173
{
3174
  basic_block test_bb = ce_info->test_bb;
3175
  basic_block then_bb = ce_info->then_bb;
3176
  basic_block else_bb = ce_info->else_bb;
3177
  basic_block join_bb = NULL_BLOCK;
3178
  edge cur_edge;
3179
  basic_block next;
3180
  edge_iterator ei;
3181
 
3182
  ce_info->last_test_bb = test_bb;
3183
 
3184
  /* We only ever should get here after reload,
3185
     and only if we have conditional execution.  */
3186
  gcc_assert (targetm.have_conditional_execution () && reload_completed);
3187
 
3188
  /* Discover if any fall through predecessors of the current test basic block
3189
     were && tests (which jump to the else block) or || tests (which jump to
3190
     the then block).  */
3191
  if (single_pred_p (test_bb)
3192
      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
3193
    {
3194
      basic_block bb = single_pred (test_bb);
3195
      basic_block target_bb;
3196
      int max_insns = MAX_CONDITIONAL_EXECUTE;
3197
      int n_insns;
3198
 
3199
      /* Determine if the preceding block is an && or || block.  */
3200
      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
3201
        {
3202
          ce_info->and_and_p = TRUE;
3203
          target_bb = else_bb;
3204
        }
3205
      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
3206
        {
3207
          ce_info->and_and_p = FALSE;
3208
          target_bb = then_bb;
3209
        }
3210
      else
3211
        target_bb = NULL_BLOCK;
3212
 
3213
      if (target_bb && n_insns <= max_insns)
3214
        {
3215
          int total_insns = 0;
3216
          int blocks = 0;
3217
 
3218
          ce_info->last_test_bb = test_bb;
3219
 
3220
          /* Found at least one && or || block, look for more.  */
3221
          do
3222
            {
3223
              ce_info->test_bb = test_bb = bb;
3224
              total_insns += n_insns;
3225
              blocks++;
3226
 
3227
              if (!single_pred_p (bb))
3228
                break;
3229
 
3230
              bb = single_pred (bb);
3231
              n_insns = block_jumps_and_fallthru_p (bb, target_bb);
3232
            }
3233
          while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3234
 
3235
          ce_info->num_multiple_test_blocks = blocks;
3236
          ce_info->num_multiple_test_insns = total_insns;
3237
 
3238
          if (ce_info->and_and_p)
3239
            ce_info->num_and_and_blocks = blocks;
3240
          else
3241
            ce_info->num_or_or_blocks = blocks;
3242
        }
3243
    }
3244
 
3245
  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3246
     other than any || blocks which jump to the THEN block.  */
3247
  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3248
    return FALSE;
3249
 
3250
  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3251
  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3252
    {
3253
      if (cur_edge->flags & EDGE_COMPLEX)
3254
        return FALSE;
3255
    }
3256
 
3257
  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3258
    {
3259
      if (cur_edge->flags & EDGE_COMPLEX)
3260
        return FALSE;
3261
    }
3262
 
3263
  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3264
  if (EDGE_COUNT (then_bb->succs) > 0
3265
      && (!single_succ_p (then_bb)
3266
          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3267
          || (epilogue_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
3268
    return FALSE;
3269
 
3270
  /* If the THEN block has no successors, conditional execution can still
3271
     make a conditional call.  Don't do this unless the ELSE block has
3272
     only one incoming edge -- the CFG manipulation is too ugly otherwise.
3273
     Check for the last insn of the THEN block being an indirect jump, which
3274
     is listed as not having any successors, but confuses the rest of the CE
3275
     code processing.  ??? we should fix this in the future.  */
3276
  if (EDGE_COUNT (then_bb->succs) == 0)
3277
    {
3278
      if (single_pred_p (else_bb))
3279
        {
3280
          rtx last_insn = BB_END (then_bb);
3281
 
3282
          while (last_insn
3283
                 && NOTE_P (last_insn)
3284
                 && last_insn != BB_HEAD (then_bb))
3285
            last_insn = PREV_INSN (last_insn);
3286
 
3287
          if (last_insn
3288
              && JUMP_P (last_insn)
3289
              && ! simplejump_p (last_insn))
3290
            return FALSE;
3291
 
3292
          join_bb = else_bb;
3293
          else_bb = NULL_BLOCK;
3294
        }
3295
      else
3296
        return FALSE;
3297
    }
3298
 
3299
  /* If the THEN block's successor is the other edge out of the TEST block,
3300
     then we have an IF-THEN combo without an ELSE.  */
3301
  else if (single_succ (then_bb) == else_bb)
3302
    {
3303
      join_bb = else_bb;
3304
      else_bb = NULL_BLOCK;
3305
    }
3306
 
3307
  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3308
     has exactly one predecessor and one successor, and the outgoing edge
3309
     is not complex, then we have an IF-THEN-ELSE combo.  */
3310
  else if (single_succ_p (else_bb)
3311
           && single_succ (then_bb) == single_succ (else_bb)
3312
           && single_pred_p (else_bb)
3313
           && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3314
           && ! (epilogue_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
3315
    join_bb = single_succ (else_bb);
3316
 
3317
  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3318
  else
3319
    return FALSE;
3320
 
3321
  num_possible_if_blocks++;
3322
 
3323
  if (dump_file)
3324
    {
3325
      fprintf (dump_file,
3326
               "\nIF-THEN%s block found, pass %d, start block %d "
3327
               "[insn %d], then %d [%d]",
3328
               (else_bb) ? "-ELSE" : "",
3329
               ce_info->pass,
3330
               test_bb->index,
3331
               BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3332
               then_bb->index,
3333
               BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3334
 
3335
      if (else_bb)
3336
        fprintf (dump_file, ", else %d [%d]",
3337
                 else_bb->index,
3338
                 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3339
 
3340
      fprintf (dump_file, ", join %d [%d]",
3341
               join_bb->index,
3342
               BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3343
 
3344
      if (ce_info->num_multiple_test_blocks > 0)
3345
        fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3346
                 ce_info->num_multiple_test_blocks,
3347
                 (ce_info->and_and_p) ? "&&" : "||",
3348
                 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3349
                 ce_info->last_test_bb->index,
3350
                 ((BB_HEAD (ce_info->last_test_bb))
3351
                  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3352
                  : -1));
3353
 
3354
      fputc ('\n', dump_file);
3355
    }
3356
 
3357
  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3358
     first condition for free, since we've already asserted that there's a
3359
     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3360
     we checked the FALLTHRU flag, those are already adjacent to the last IF
3361
     block.  */
3362
  /* ??? As an enhancement, move the ELSE block.  Have to deal with
3363
     BLOCK notes, if by no other means than backing out the merge if they
3364
     exist.  Sticky enough I don't want to think about it now.  */
3365
  next = then_bb;
3366
  if (else_bb && (next = next->next_bb) != else_bb)
3367
    return FALSE;
3368
  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3369
    {
3370
      if (else_bb)
3371
        join_bb = NULL;
3372
      else
3373
        return FALSE;
3374
    }
3375
 
3376
  /* Do the real work.  */
3377
 
3378
  ce_info->else_bb = else_bb;
3379
  ce_info->join_bb = join_bb;
3380
 
3381
  /* If we have && and || tests, try to first handle combining the && and ||
3382
     tests into the conditional code, and if that fails, go back and handle
3383
     it without the && and ||, which at present handles the && case if there
3384
     was no ELSE block.  */
3385
  if (cond_exec_process_if_block (ce_info, TRUE))
3386
    return TRUE;
3387
 
3388
  if (ce_info->num_multiple_test_blocks)
3389
    {
3390
      cancel_changes (0);
3391
 
3392
      if (cond_exec_process_if_block (ce_info, FALSE))
3393
        return TRUE;
3394
    }
3395
 
3396
  return FALSE;
3397
}
3398
 
3399
/* Convert a branch over a trap, or a branch
3400
   to a trap, into a conditional trap.  */
3401
 
3402
static int
3403
find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3404
{
3405
  basic_block then_bb = then_edge->dest;
3406
  basic_block else_bb = else_edge->dest;
3407
  basic_block other_bb, trap_bb;
3408
  rtx trap, jump, cond, cond_earliest, seq;
3409
  enum rtx_code code;
3410
 
3411
  /* Locate the block with the trap instruction.  */
3412
  /* ??? While we look for no successors, we really ought to allow
3413
     EH successors.  Need to fix merge_if_block for that to work.  */
3414
  if ((trap = block_has_only_trap (then_bb)) != NULL)
3415
    trap_bb = then_bb, other_bb = else_bb;
3416
  else if ((trap = block_has_only_trap (else_bb)) != NULL)
3417
    trap_bb = else_bb, other_bb = then_bb;
3418
  else
3419
    return FALSE;
3420
 
3421
  if (dump_file)
3422
    {
3423
      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3424
               test_bb->index, trap_bb->index);
3425
    }
3426
 
3427
  /* If this is not a standard conditional jump, we can't parse it.  */
3428
  jump = BB_END (test_bb);
3429
  cond = noce_get_condition (jump, &cond_earliest, false);
3430
  if (! cond)
3431
    return FALSE;
3432
 
3433
  /* If the conditional jump is more than just a conditional jump, then
3434
     we can not do if-conversion on this block.  */
3435
  if (! onlyjump_p (jump))
3436
    return FALSE;
3437
 
3438
  /* We must be comparing objects whose modes imply the size.  */
3439
  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3440
    return FALSE;
3441
 
3442
  /* Reverse the comparison code, if necessary.  */
3443
  code = GET_CODE (cond);
3444
  if (then_bb == trap_bb)
3445
    {
3446
      code = reversed_comparison_code (cond, jump);
3447
      if (code == UNKNOWN)
3448
        return FALSE;
3449
    }
3450
 
3451
  /* Attempt to generate the conditional trap.  */
3452
  seq = gen_cond_trap (code, copy_rtx (XEXP (cond, 0)),
3453
                       copy_rtx (XEXP (cond, 1)),
3454
                       TRAP_CODE (PATTERN (trap)));
3455
  if (seq == NULL)
3456
    return FALSE;
3457
 
3458
  /* Emit the new insns before cond_earliest.  */
3459
  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3460
 
3461
  /* Delete the trap block if possible.  */
3462
  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3463
  df_set_bb_dirty (test_bb);
3464
  df_set_bb_dirty (then_bb);
3465
  df_set_bb_dirty (else_bb);
3466
 
3467
  if (EDGE_COUNT (trap_bb->preds) == 0)
3468
    {
3469
      delete_basic_block (trap_bb);
3470
      num_true_changes++;
3471
    }
3472
 
3473
  /* Wire together the blocks again.  */
3474
  if (current_ir_type () == IR_RTL_CFGLAYOUT)
3475
    single_succ_edge (test_bb)->flags |= EDGE_FALLTHRU;
3476
  else
3477
    {
3478
      rtx lab, newjump;
3479
 
3480
      lab = JUMP_LABEL (jump);
3481
      newjump = emit_jump_insn_after (gen_jump (lab), jump);
3482
      LABEL_NUSES (lab) += 1;
3483
      JUMP_LABEL (newjump) = lab;
3484
      emit_barrier_after (newjump);
3485
    }
3486
  delete_insn (jump);
3487
 
3488
  if (can_merge_blocks_p (test_bb, other_bb))
3489
    {
3490
      merge_blocks (test_bb, other_bb);
3491
      num_true_changes++;
3492
    }
3493
 
3494
  num_updated_if_blocks++;
3495
  return TRUE;
3496
}
3497
 
3498
/* Subroutine of find_cond_trap: if BB contains only a trap insn,
3499
   return it.  */
3500
 
3501
static rtx
3502
block_has_only_trap (basic_block bb)
3503
{
3504
  rtx trap;
3505
 
3506
  /* We're not the exit block.  */
3507
  if (bb == EXIT_BLOCK_PTR)
3508
    return NULL_RTX;
3509
 
3510
  /* The block must have no successors.  */
3511
  if (EDGE_COUNT (bb->succs) > 0)
3512
    return NULL_RTX;
3513
 
3514
  /* The only instruction in the THEN block must be the trap.  */
3515
  trap = first_active_insn (bb);
3516
  if (! (trap == BB_END (bb)
3517
         && GET_CODE (PATTERN (trap)) == TRAP_IF
3518
         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3519
    return NULL_RTX;
3520
 
3521
  return trap;
3522
}
3523
 
3524
/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3525
   transformable, but not necessarily the other.  There need be no
3526
   JOIN block.
3527
 
3528
   Return TRUE if we were successful at converting the block.
3529
 
3530
   Cases we'd like to look at:
3531
 
3532
   (1)
3533
        if (test) goto over; // x not live
3534
        x = a;
3535
        goto label;
3536
        over:
3537
 
3538
   becomes
3539
 
3540
        x = a;
3541
        if (! test) goto label;
3542
 
3543
   (2)
3544
        if (test) goto E; // x not live
3545
        x = big();
3546
        goto L;
3547
        E:
3548
        x = b;
3549
        goto M;
3550
 
3551
   becomes
3552
 
3553
        x = b;
3554
        if (test) goto M;
3555
        x = big();
3556
        goto L;
3557
 
3558
   (3) // This one's really only interesting for targets that can do
3559
       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3560
       // it results in multiple branches on a cache line, which often
3561
       // does not sit well with predictors.
3562
 
3563
        if (test1) goto E; // predicted not taken
3564
        x = a;
3565
        if (test2) goto F;
3566
        ...
3567
        E:
3568
        x = b;
3569
        J:
3570
 
3571
   becomes
3572
 
3573
        x = a;
3574
        if (test1) goto E;
3575
        if (test2) goto F;
3576
 
3577
   Notes:
3578
 
3579
   (A) Don't do (2) if the branch is predicted against the block we're
3580
   eliminating.  Do it anyway if we can eliminate a branch; this requires
3581
   that the sole successor of the eliminated block postdominate the other
3582
   side of the if.
3583
 
3584
   (B) With CE, on (3) we can steal from both sides of the if, creating
3585
 
3586
        if (test1) x = a;
3587
        if (!test1) x = b;
3588
        if (test1) goto J;
3589
        if (test2) goto F;
3590
        ...
3591
        J:
3592
 
3593
   Again, this is most useful if J postdominates.
3594
 
3595
   (C) CE substitutes for helpful life information.
3596
 
3597
   (D) These heuristics need a lot of work.  */
3598
 
3599
/* Tests for case 1 above.  */
3600
 
3601
static int
3602
find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3603
{
3604
  basic_block then_bb = then_edge->dest;
3605
  basic_block else_bb = else_edge->dest;
3606
  basic_block new_bb;
3607
  int then_bb_index;
3608
 
3609
  /* If we are partitioning hot/cold basic blocks, we don't want to
3610
     mess up unconditional or indirect jumps that cross between hot
3611
     and cold sections.
3612
 
3613
     Basic block partitioning may result in some jumps that appear to
3614
     be optimizable (or blocks that appear to be mergeable), but which really
3615
     must be left untouched (they are required to make it safely across
3616
     partition boundaries).  See  the comments at the top of
3617
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3618
 
3619
  if ((BB_END (then_bb)
3620
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3621
      || (BB_END (test_bb)
3622
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3623
      || (BB_END (else_bb)
3624
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3625
                            NULL_RTX)))
3626
    return FALSE;
3627
 
3628
  /* THEN has one successor.  */
3629
  if (!single_succ_p (then_bb))
3630
    return FALSE;
3631
 
3632
  /* THEN does not fall through, but is not strange either.  */
3633
  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3634
    return FALSE;
3635
 
3636
  /* THEN has one predecessor.  */
3637
  if (!single_pred_p (then_bb))
3638
    return FALSE;
3639
 
3640
  /* THEN must do something.  */
3641
  if (forwarder_block_p (then_bb))
3642
    return FALSE;
3643
 
3644
  num_possible_if_blocks++;
3645
  if (dump_file)
3646
    fprintf (dump_file,
3647
             "\nIF-CASE-1 found, start %d, then %d\n",
3648
             test_bb->index, then_bb->index);
3649
 
3650
  /* THEN is small.  */
3651
  if (! cheap_bb_rtx_cost_p (then_bb,
3652
        COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (then_edge->src),
3653
                                    predictable_edge_p (then_edge)))))
3654
    return FALSE;
3655
 
3656
  /* Registers set are dead, or are predicable.  */
3657
  if (! dead_or_predicable (test_bb, then_bb, else_bb,
3658
                            single_succ (then_bb), 1))
3659
    return FALSE;
3660
 
3661
  /* Conversion went ok, including moving the insns and fixing up the
3662
     jump.  Adjust the CFG to match.  */
3663
 
3664
  /* We can avoid creating a new basic block if then_bb is immediately
3665
     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3666
     thru to else_bb.  */
3667
 
3668
  if (then_bb->next_bb == else_bb
3669
      && then_bb->prev_bb == test_bb
3670
      && else_bb != EXIT_BLOCK_PTR)
3671
    {
3672
      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3673
      new_bb = 0;
3674
    }
3675
  else
3676
    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3677
                                             else_bb);
3678
 
3679
  df_set_bb_dirty (test_bb);
3680
  df_set_bb_dirty (else_bb);
3681
 
3682
  then_bb_index = then_bb->index;
3683
  delete_basic_block (then_bb);
3684
 
3685
  /* Make rest of code believe that the newly created block is the THEN_BB
3686
     block we removed.  */
3687
  if (new_bb)
3688
    {
3689
      df_bb_replace (then_bb_index, new_bb);
3690
      /* Since the fallthru edge was redirected from test_bb to new_bb,
3691
         we need to ensure that new_bb is in the same partition as
3692
         test bb (you can not fall through across section boundaries).  */
3693
      BB_COPY_PARTITION (new_bb, test_bb);
3694
    }
3695
 
3696
  num_true_changes++;
3697
  num_updated_if_blocks++;
3698
 
3699
  return TRUE;
3700
}
3701
 
3702
/* Test for case 2 above.  */
3703
 
3704
static int
3705
find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3706
{
3707
  basic_block then_bb = then_edge->dest;
3708
  basic_block else_bb = else_edge->dest;
3709
  edge else_succ;
3710
  rtx note;
3711
 
3712
  /* If we are partitioning hot/cold basic blocks, we don't want to
3713
     mess up unconditional or indirect jumps that cross between hot
3714
     and cold sections.
3715
 
3716
     Basic block partitioning may result in some jumps that appear to
3717
     be optimizable (or blocks that appear to be mergeable), but which really
3718
     must be left untouched (they are required to make it safely across
3719
     partition boundaries).  See  the comments at the top of
3720
     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3721
 
3722
  if ((BB_END (then_bb)
3723
       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3724
      || (BB_END (test_bb)
3725
          && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3726
      || (BB_END (else_bb)
3727
          && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3728
                            NULL_RTX)))
3729
    return FALSE;
3730
 
3731
  /* ELSE has one successor.  */
3732
  if (!single_succ_p (else_bb))
3733
    return FALSE;
3734
  else
3735
    else_succ = single_succ_edge (else_bb);
3736
 
3737
  /* ELSE outgoing edge is not complex.  */
3738
  if (else_succ->flags & EDGE_COMPLEX)
3739
    return FALSE;
3740
 
3741
  /* ELSE has one predecessor.  */
3742
  if (!single_pred_p (else_bb))
3743
    return FALSE;
3744
 
3745
  /* THEN is not EXIT.  */
3746
  if (then_bb->index < NUM_FIXED_BLOCKS)
3747
    return FALSE;
3748
 
3749
  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3750
  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3751
  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3752
    ;
3753
  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3754
           || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3755
                              else_succ->dest))
3756
    ;
3757
  else
3758
    return FALSE;
3759
 
3760
  num_possible_if_blocks++;
3761
  if (dump_file)
3762
    fprintf (dump_file,
3763
             "\nIF-CASE-2 found, start %d, else %d\n",
3764
             test_bb->index, else_bb->index);
3765
 
3766
  /* ELSE is small.  */
3767
  if (! cheap_bb_rtx_cost_p (else_bb,
3768
        COSTS_N_INSNS (BRANCH_COST (optimize_bb_for_speed_p (else_edge->src),
3769
                                    predictable_edge_p (else_edge)))))
3770
    return FALSE;
3771
 
3772
  /* Registers set are dead, or are predicable.  */
3773
  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3774
    return FALSE;
3775
 
3776
  /* Conversion went ok, including moving the insns and fixing up the
3777
     jump.  Adjust the CFG to match.  */
3778
 
3779
  df_set_bb_dirty (test_bb);
3780
  df_set_bb_dirty (then_bb);
3781
  delete_basic_block (else_bb);
3782
 
3783
  num_true_changes++;
3784
  num_updated_if_blocks++;
3785
 
3786
  /* ??? We may now fallthru from one of THEN's successors into a join
3787
     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
3788
 
3789
  return TRUE;
3790
}
3791
 
3792
/* A subroutine of dead_or_predicable called through for_each_rtx.
3793
   Return 1 if a memory is found.  */
3794
 
3795
static int
3796
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3797
{
3798
  return MEM_P (*px);
3799
}
3800
 
3801
/* Used by the code above to perform the actual rtl transformations.
3802
   Return TRUE if successful.
3803
 
3804
   TEST_BB is the block containing the conditional branch.  MERGE_BB
3805
   is the block containing the code to manipulate.  NEW_DEST is the
3806
   label TEST_BB should be branching to after the conversion.
3807
   REVERSEP is true if the sense of the branch should be reversed.  */
3808
 
3809
static int
3810
dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3811
                    basic_block other_bb, basic_block new_dest, int reversep)
3812
{
3813
  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3814
  /* Number of pending changes.  */
3815
  int n_validated_changes = 0;
3816
 
3817
  jump = BB_END (test_bb);
3818
 
3819
  /* Find the extent of the real code in the merge block.  */
3820
  head = BB_HEAD (merge_bb);
3821
  end = BB_END (merge_bb);
3822
 
3823
  while (DEBUG_INSN_P (end) && end != head)
3824
    end = PREV_INSN (end);
3825
 
3826
  /* If merge_bb ends with a tablejump, predicating/moving insn's
3827
     into test_bb and then deleting merge_bb will result in the jumptable
3828
     that follows merge_bb being removed along with merge_bb and then we
3829
     get an unresolved reference to the jumptable.  */
3830
  if (tablejump_p (end, NULL, NULL))
3831
    return FALSE;
3832
 
3833
  if (LABEL_P (head))
3834
    head = NEXT_INSN (head);
3835
  while (DEBUG_INSN_P (head) && head != end)
3836
    head = NEXT_INSN (head);
3837
  if (NOTE_P (head))
3838
    {
3839
      if (head == end)
3840
        {
3841
          head = end = NULL_RTX;
3842
          goto no_body;
3843
        }
3844
      head = NEXT_INSN (head);
3845
      while (DEBUG_INSN_P (head) && head != end)
3846
        head = NEXT_INSN (head);
3847
    }
3848
 
3849
  if (JUMP_P (end))
3850
    {
3851
      if (head == end)
3852
        {
3853
          head = end = NULL_RTX;
3854
          goto no_body;
3855
        }
3856
      end = PREV_INSN (end);
3857
      while (DEBUG_INSN_P (end) && end != head)
3858
        end = PREV_INSN (end);
3859
    }
3860
 
3861
  /* Disable handling dead code by conditional execution if the machine needs
3862
     to do anything funny with the tests, etc.  */
3863
#ifndef IFCVT_MODIFY_TESTS
3864
  if (targetm.have_conditional_execution ())
3865
    {
3866
      /* In the conditional execution case, we have things easy.  We know
3867
         the condition is reversible.  We don't have to check life info
3868
         because we're going to conditionally execute the code anyway.
3869
         All that's left is making sure the insns involved can actually
3870
         be predicated.  */
3871
 
3872
      rtx cond, prob_val;
3873
 
3874
      cond = cond_exec_get_condition (jump);
3875
      if (! cond)
3876
        return FALSE;
3877
 
3878
      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3879
      if (prob_val)
3880
        prob_val = XEXP (prob_val, 0);
3881
 
3882
      if (reversep)
3883
        {
3884
          enum rtx_code rev = reversed_comparison_code (cond, jump);
3885
          if (rev == UNKNOWN)
3886
            return FALSE;
3887
          cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3888
                                 XEXP (cond, 1));
3889
          if (prob_val)
3890
            prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3891
        }
3892
 
3893
      if (cond_exec_process_insns (NULL, head, end, cond, prob_val, 0)
3894
          && verify_changes (0))
3895
        n_validated_changes = num_validated_changes ();
3896
      else
3897
        cancel_changes (0);
3898
 
3899
      earliest = jump;
3900
    }
3901
#endif
3902
  /* Try the NCE path if the CE path did not result in any changes.  */
3903
  if (n_validated_changes == 0)
3904
    {
3905
      /* In the non-conditional execution case, we have to verify that there
3906
         are no trapping operations, no calls, no references to memory, and
3907
         that any registers modified are dead at the branch site.  */
3908
 
3909
      rtx insn, cond, prev;
3910
      bitmap merge_set, test_live, test_set;
3911
      unsigned i, fail = 0;
3912
      bitmap_iterator bi;
3913
 
3914
      /* Check for no calls or trapping operations.  */
3915
      for (insn = head; ; insn = NEXT_INSN (insn))
3916
        {
3917
          if (CALL_P (insn))
3918
            return FALSE;
3919
          if (NONDEBUG_INSN_P (insn))
3920
            {
3921
              if (may_trap_p (PATTERN (insn)))
3922
                return FALSE;
3923
 
3924
              /* ??? Even non-trapping memories such as stack frame
3925
                 references must be avoided.  For stores, we collect
3926
                 no lifetime info; for reads, we'd have to assert
3927
                 true_dependence false against every store in the
3928
                 TEST range.  */
3929
              if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3930
                return FALSE;
3931
            }
3932
          if (insn == end)
3933
            break;
3934
        }
3935
 
3936
      if (! any_condjump_p (jump))
3937
        return FALSE;
3938
 
3939
      /* Find the extent of the conditional.  */
3940
      cond = noce_get_condition (jump, &earliest, false);
3941
      if (! cond)
3942
        return FALSE;
3943
 
3944
      /* Collect:
3945
           MERGE_SET = set of registers set in MERGE_BB
3946
           TEST_LIVE = set of registers live at EARLIEST
3947
           TEST_SET  = set of registers set between EARLIEST and the
3948
                       end of the block.  */
3949
 
3950
      merge_set = BITMAP_ALLOC (&reg_obstack);
3951
      test_live = BITMAP_ALLOC (&reg_obstack);
3952
      test_set = BITMAP_ALLOC (&reg_obstack);
3953
 
3954
      /* ??? bb->local_set is only valid during calculate_global_regs_live,
3955
         so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
3956
         since we've already asserted that MERGE_BB is small.  */
3957
      /* If we allocated new pseudos (e.g. in the conditional move
3958
         expander called from noce_emit_cmove), we must resize the
3959
         array first.  */
3960
      if (max_regno < max_reg_num ())
3961
        max_regno = max_reg_num ();
3962
 
3963
      FOR_BB_INSNS (merge_bb, insn)
3964
        {
3965
          if (NONDEBUG_INSN_P (insn))
3966
            {
3967
              unsigned int uid = INSN_UID (insn);
3968
              df_ref *def_rec;
3969
              for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
3970
                {
3971
                  df_ref def = *def_rec;
3972
                  bitmap_set_bit (merge_set, DF_REF_REGNO (def));
3973
                }
3974
            }
3975
        }
3976
 
3977
      /* For small register class machines, don't lengthen lifetimes of
3978
         hard registers before reload.  */
3979
      if (SMALL_REGISTER_CLASSES && ! reload_completed)
3980
        {
3981
          EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3982
            {
3983
              if (i < FIRST_PSEUDO_REGISTER
3984
                  && ! fixed_regs[i]
3985
                  && ! global_regs[i])
3986
                fail = 1;
3987
            }
3988
        }
3989
 
3990
      /* For TEST, we're interested in a range of insns, not a whole block.
3991
         Moreover, we're interested in the insns live from OTHER_BB.  */
3992
 
3993
      /* The loop below takes the set of live registers
3994
         after JUMP, and calculates the live set before EARLIEST. */
3995
      bitmap_copy (test_live, df_get_live_in (other_bb));
3996
      df_simulate_initialize_backwards (test_bb, test_live);
3997
      for (insn = jump; ; insn = prev)
3998
        {
3999
          if (INSN_P (insn))
4000
            {
4001
              df_simulate_find_defs (insn, test_set);
4002
              df_simulate_one_insn_backwards (test_bb, insn, test_live);
4003
            }
4004
          prev = PREV_INSN (insn);
4005
          if (insn == earliest)
4006
            break;
4007
        }
4008
 
4009
      /* We can perform the transformation if
4010
           MERGE_SET & (TEST_SET | TEST_LIVE)
4011
         and
4012
           TEST_SET & DF_LIVE_IN (merge_bb)
4013
         are empty.  */
4014
 
4015
      if (bitmap_intersect_p (test_set, merge_set)
4016
          || bitmap_intersect_p (test_live, merge_set)
4017
          || bitmap_intersect_p (test_set, df_get_live_in (merge_bb)))
4018
        fail = 1;
4019
 
4020
      BITMAP_FREE (merge_set);
4021
      BITMAP_FREE (test_live);
4022
      BITMAP_FREE (test_set);
4023
 
4024
      if (fail)
4025
        return FALSE;
4026
    }
4027
 
4028
 no_body:
4029
  /* We don't want to use normal invert_jump or redirect_jump because
4030
     we don't want to delete_insn called.  Also, we want to do our own
4031
     change group management.  */
4032
 
4033
  old_dest = JUMP_LABEL (jump);
4034
  if (other_bb != new_dest)
4035
    {
4036
      new_label = block_label (new_dest);
4037
      if (reversep
4038
          ? ! invert_jump_1 (jump, new_label)
4039
          : ! redirect_jump_1 (jump, new_label))
4040
        goto cancel;
4041
    }
4042
 
4043
  if (verify_changes (n_validated_changes))
4044
    confirm_change_group ();
4045
  else
4046
    goto cancel;
4047
 
4048
  if (other_bb != new_dest)
4049
    {
4050
      redirect_jump_2 (jump, old_dest, new_label, 0, reversep);
4051
 
4052
      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
4053
      if (reversep)
4054
        {
4055
          gcov_type count, probability;
4056
          count = BRANCH_EDGE (test_bb)->count;
4057
          BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
4058
          FALLTHRU_EDGE (test_bb)->count = count;
4059
          probability = BRANCH_EDGE (test_bb)->probability;
4060
          BRANCH_EDGE (test_bb)->probability
4061
            = FALLTHRU_EDGE (test_bb)->probability;
4062
          FALLTHRU_EDGE (test_bb)->probability = probability;
4063
          update_br_prob_note (test_bb);
4064
        }
4065
    }
4066
 
4067
  /* Move the insns out of MERGE_BB to before the branch.  */
4068
  if (head != NULL)
4069
    {
4070
      rtx insn;
4071
 
4072
      if (end == BB_END (merge_bb))
4073
        BB_END (merge_bb) = PREV_INSN (head);
4074
 
4075
      /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
4076
         notes might become invalid.  */
4077
      insn = head;
4078
      do
4079
        {
4080
          rtx note, set;
4081
 
4082
          if (! INSN_P (insn))
4083
            continue;
4084
          note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
4085
          if (! note)
4086
            continue;
4087
          set = single_set (insn);
4088
          if (!set || !function_invariant_p (SET_SRC (set))
4089
              || !function_invariant_p (XEXP (note, 0)))
4090
            remove_note (insn, note);
4091
        } while (insn != end && (insn = NEXT_INSN (insn)));
4092
 
4093
      reorder_insns (head, end, PREV_INSN (earliest));
4094
    }
4095
 
4096
  /* Remove the jump and edge if we can.  */
4097
  if (other_bb == new_dest)
4098
    {
4099
      delete_insn (jump);
4100
      remove_edge (BRANCH_EDGE (test_bb));
4101
      /* ??? Can't merge blocks here, as then_bb is still in use.
4102
         At minimum, the merge will get done just before bb-reorder.  */
4103
    }
4104
 
4105
  return TRUE;
4106
 
4107
 cancel:
4108
  cancel_changes (0);
4109
  return FALSE;
4110
}
4111
 
4112
/* Main entry point for all if-conversion.  */
4113
 
4114
static void
4115
if_convert (void)
4116
{
4117
  basic_block bb;
4118
  int pass;
4119
 
4120
  if (optimize == 1)
4121
    {
4122
      df_live_add_problem ();
4123
      df_live_set_all_dirty ();
4124
    }
4125
 
4126
  num_possible_if_blocks = 0;
4127
  num_updated_if_blocks = 0;
4128
  num_true_changes = 0;
4129
 
4130
  loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
4131
  mark_loop_exit_edges ();
4132
  loop_optimizer_finalize ();
4133
  free_dominance_info (CDI_DOMINATORS);
4134
 
4135
  /* Compute postdominators.  */
4136
  calculate_dominance_info (CDI_POST_DOMINATORS);
4137
 
4138
  df_set_flags (DF_LR_RUN_DCE);
4139
 
4140
  /* Go through each of the basic blocks looking for things to convert.  If we
4141
     have conditional execution, we make multiple passes to allow us to handle
4142
     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
4143
  pass = 0;
4144
  do
4145
    {
4146
      df_analyze ();
4147
      /* Only need to do dce on the first pass.  */
4148
      df_clear_flags (DF_LR_RUN_DCE);
4149
      cond_exec_changed_p = FALSE;
4150
      pass++;
4151
 
4152
#ifdef IFCVT_MULTIPLE_DUMPS
4153
      if (dump_file && pass > 1)
4154
        fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
4155
#endif
4156
 
4157
      FOR_EACH_BB (bb)
4158
        {
4159
          basic_block new_bb;
4160
          while (!df_get_bb_dirty (bb)
4161
                 && (new_bb = find_if_header (bb, pass)) != NULL)
4162
            bb = new_bb;
4163
        }
4164
 
4165
#ifdef IFCVT_MULTIPLE_DUMPS
4166
      if (dump_file && cond_exec_changed_p)
4167
        {
4168
          if (dump_flags & TDF_SLIM)
4169
            print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
4170
          else
4171
            print_rtl_with_bb (dump_file, get_insns ());
4172
        }
4173
#endif
4174
    }
4175
  while (cond_exec_changed_p);
4176
 
4177
#ifdef IFCVT_MULTIPLE_DUMPS
4178
  if (dump_file)
4179
    fprintf (dump_file, "\n\n========== no more changes\n");
4180
#endif
4181
 
4182
  free_dominance_info (CDI_POST_DOMINATORS);
4183
 
4184
  if (dump_file)
4185
    fflush (dump_file);
4186
 
4187
  clear_aux_for_blocks ();
4188
 
4189
  /* If we allocated new pseudos, we must resize the array for sched1.  */
4190
  if (max_regno < max_reg_num ())
4191
    max_regno = max_reg_num ();
4192
 
4193
  /* Write the final stats.  */
4194
  if (dump_file && num_possible_if_blocks > 0)
4195
    {
4196
      fprintf (dump_file,
4197
               "\n%d possible IF blocks searched.\n",
4198
               num_possible_if_blocks);
4199
      fprintf (dump_file,
4200
               "%d IF blocks converted.\n",
4201
               num_updated_if_blocks);
4202
      fprintf (dump_file,
4203
               "%d true changes made.\n\n\n",
4204
               num_true_changes);
4205
    }
4206
 
4207
  if (optimize == 1)
4208
    df_remove_problem (df_live);
4209
 
4210
#ifdef ENABLE_CHECKING
4211
  verify_flow_info ();
4212
#endif
4213
}
4214
 
4215
static bool
4216
gate_handle_if_conversion (void)
4217
{
4218
  return (optimize > 0)
4219
    && dbg_cnt (if_conversion);
4220
}
4221
 
4222
/* If-conversion and CFG cleanup.  */
4223
static unsigned int
4224
rest_of_handle_if_conversion (void)
4225
{
4226
  if (flag_if_conversion)
4227
    {
4228
      if (dump_file)
4229
        dump_flow_info (dump_file, dump_flags);
4230
      cleanup_cfg (CLEANUP_EXPENSIVE);
4231
      if_convert ();
4232
    }
4233
 
4234
  cleanup_cfg (0);
4235
  return 0;
4236
}
4237
 
4238
struct rtl_opt_pass pass_rtl_ifcvt =
4239
{
4240
 {
4241
  RTL_PASS,
4242
  "ce1",                                /* name */
4243
  gate_handle_if_conversion,            /* gate */
4244
  rest_of_handle_if_conversion,         /* execute */
4245
  NULL,                                 /* sub */
4246
  NULL,                                 /* next */
4247
  0,                                    /* static_pass_number */
4248
  TV_IFCVT,                             /* tv_id */
4249
  0,                                    /* properties_required */
4250
  0,                                    /* properties_provided */
4251
  0,                                    /* properties_destroyed */
4252
  0,                                    /* todo_flags_start */
4253
  TODO_df_finish | TODO_verify_rtl_sharing |
4254
  TODO_dump_func                        /* todo_flags_finish */
4255
 }
4256
};
4257
 
4258
static bool
4259
gate_handle_if_after_combine (void)
4260
{
4261
  return optimize > 0 && flag_if_conversion
4262
    && dbg_cnt (if_after_combine);
4263
}
4264
 
4265
 
4266
/* Rerun if-conversion, as combine may have simplified things enough
4267
   to now meet sequence length restrictions.  */
4268
static unsigned int
4269
rest_of_handle_if_after_combine (void)
4270
{
4271
  if_convert ();
4272
  return 0;
4273
}
4274
 
4275
struct rtl_opt_pass pass_if_after_combine =
4276
{
4277
 {
4278
  RTL_PASS,
4279
  "ce2",                                /* name */
4280
  gate_handle_if_after_combine,         /* gate */
4281
  rest_of_handle_if_after_combine,      /* execute */
4282
  NULL,                                 /* sub */
4283
  NULL,                                 /* next */
4284
  0,                                    /* static_pass_number */
4285
  TV_IFCVT,                             /* tv_id */
4286
  0,                                    /* properties_required */
4287
  0,                                    /* properties_provided */
4288
  0,                                    /* properties_destroyed */
4289
  0,                                    /* todo_flags_start */
4290
  TODO_df_finish | TODO_verify_rtl_sharing |
4291
  TODO_dump_func |
4292
  TODO_ggc_collect                      /* todo_flags_finish */
4293
 }
4294
};
4295
 
4296
 
4297
static bool
4298
gate_handle_if_after_reload (void)
4299
{
4300
  return optimize > 0 && flag_if_conversion2
4301
    && dbg_cnt (if_after_reload);
4302
}
4303
 
4304
static unsigned int
4305
rest_of_handle_if_after_reload (void)
4306
{
4307
  if_convert ();
4308
  return 0;
4309
}
4310
 
4311
 
4312
struct rtl_opt_pass pass_if_after_reload =
4313
{
4314
 {
4315
  RTL_PASS,
4316
  "ce3",                                /* name */
4317
  gate_handle_if_after_reload,          /* gate */
4318
  rest_of_handle_if_after_reload,       /* execute */
4319
  NULL,                                 /* sub */
4320
  NULL,                                 /* next */
4321
  0,                                    /* static_pass_number */
4322
  TV_IFCVT2,                            /* tv_id */
4323
  0,                                    /* properties_required */
4324
  0,                                    /* properties_provided */
4325
  0,                                    /* properties_destroyed */
4326
  0,                                    /* todo_flags_start */
4327
  TODO_df_finish | TODO_verify_rtl_sharing |
4328
  TODO_dump_func |
4329
  TODO_ggc_collect                      /* todo_flags_finish */
4330
 }
4331
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.