summaryrefslogtreecommitdiffstats
path: root/gcc/cfgbuild.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cfgbuild.c')
-rw-r--r--gcc/cfgbuild.c791
1 files changed, 791 insertions, 0 deletions
diff --git a/gcc/cfgbuild.c b/gcc/cfgbuild.c
new file mode 100644
index 00000000000..2f62b067ff5
--- /dev/null
+++ b/gcc/cfgbuild.c
@@ -0,0 +1,791 @@
+/* Control flow graph building code for GNU compiler.
+ Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
+ 1999, 2000, 2001 Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
+
+/* find_basic_blocks divides the current function's rtl into basic
+ blocks and constructs the CFG. The blocks are recorded in the
+ basic_block_info array; the CFG exists in the edge structures
+ referenced by the blocks.
+
+ find_basic_blocks also finds any unreachable loops and deletes them.
+
+ Available functionality:
+ - CFG construction
+ find_basic_blocks
+ - Local CFG construction
+ find_sub_basic_blocks
+ */
+
+#include "config.h"
+#include "system.h"
+#include "tree.h"
+#include "rtl.h"
+#include "hard-reg-set.h"
+#include "basic-block.h"
+#include "regs.h"
+#include "flags.h"
+#include "output.h"
+#include "function.h"
+#include "except.h"
+#include "toplev.h"
+#include "timevar.h"
+
+#include "obstack.h"
+static int count_basic_blocks PARAMS ((rtx));
+static void find_basic_blocks_1 PARAMS ((rtx));
+static rtx find_label_refs PARAMS ((rtx, rtx));
+static void make_edges PARAMS ((rtx, int, int, int));
+static void make_label_edge PARAMS ((sbitmap *, basic_block,
+ rtx, int));
+static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
+
+/* Count the basic blocks of the function. */
+
+static int
+count_basic_blocks (f)
+ rtx f;
+{
+ register rtx insn;
+ register RTX_CODE prev_code;
+ register int count = 0;
+ int saw_abnormal_edge = 0;
+
+ prev_code = JUMP_INSN;
+ for (insn = f; insn; insn = NEXT_INSN (insn))
+ {
+ enum rtx_code code = GET_CODE (insn);
+
+ if (code == CODE_LABEL
+ || (GET_RTX_CLASS (code) == 'i'
+ && (prev_code == JUMP_INSN
+ || prev_code == BARRIER
+ || saw_abnormal_edge)))
+ {
+ saw_abnormal_edge = 0;
+ count++;
+ }
+
+ /* Record whether this insn created an edge. */
+ if (code == CALL_INSN)
+ {
+ rtx note;
+
+ /* If there is a nonlocal goto label and the specified
+ region number isn't -1, we have an edge. */
+ if (nonlocal_goto_handler_labels
+ && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
+ || INTVAL (XEXP (note, 0)) >= 0))
+ saw_abnormal_edge = 1;
+
+ else if (can_throw_internal (insn))
+ saw_abnormal_edge = 1;
+ }
+ else if (flag_non_call_exceptions
+ && code == INSN
+ && can_throw_internal (insn))
+ saw_abnormal_edge = 1;
+
+ if (code != NOTE)
+ prev_code = code;
+ }
+
+ /* The rest of the compiler works a bit smoother when we don't have to
+ check for the edge case of do-nothing functions with no basic blocks. */
+ if (count == 0)
+ {
+ emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
+ count = 1;
+ }
+
+ return count;
+}
+
+/* Scan a list of insns for labels referred to other than by jumps.
+ This is used to scan the alternatives of a call placeholder. */
+static rtx
+find_label_refs (f, lvl)
+ rtx f;
+ rtx lvl;
+{
+ rtx insn;
+
+ for (insn = f; insn; insn = NEXT_INSN (insn))
+ if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
+ {
+ rtx note;
+
+ /* Make a list of all labels referred to other than by jumps
+ (which just don't have the REG_LABEL notes).
+
+ Make a special exception for labels followed by an ADDR*VEC,
+ as this would be a part of the tablejump setup code.
+
+ Make a special exception to registers loaded with label
+ values just before jump insns that use them. */
+
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_LABEL)
+ {
+ rtx lab = XEXP (note, 0), next;
+
+ if ((next = next_nonnote_insn (lab)) != NULL
+ && GET_CODE (next) == JUMP_INSN
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ ;
+ else if (GET_CODE (lab) == NOTE)
+ ;
+ else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
+ ;
+ else
+ lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
+ }
+ }
+
+ return lvl;
+}
+
+/* Create an edge between two basic blocks. FLAGS are auxiliary information
+ about the edge that is accumulated between calls. */
+
+/* Create an edge from a basic block to a label. */
+
+static void
+make_label_edge (edge_cache, src, label, flags)
+ sbitmap *edge_cache;
+ basic_block src;
+ rtx label;
+ int flags;
+{
+ if (GET_CODE (label) != CODE_LABEL)
+ abort ();
+
+ /* If the label was never emitted, this insn is junk, but avoid a
+ crash trying to refer to BLOCK_FOR_INSN (label). This can happen
+ as a result of a syntax error and a diagnostic has already been
+ printed. */
+
+ if (INSN_UID (label) == 0)
+ return;
+
+ make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
+}
+
+/* Create the edges generated by INSN in REGION. */
+
+static void
+make_eh_edge (edge_cache, src, insn)
+ sbitmap *edge_cache;
+ basic_block src;
+ rtx insn;
+{
+ int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
+ rtx handlers, i;
+
+ handlers = reachable_handlers (insn);
+
+ for (i = handlers; i; i = XEXP (i, 1))
+ make_label_edge (edge_cache, src, XEXP (i, 0),
+ EDGE_ABNORMAL | EDGE_EH | is_call);
+
+ free_INSN_LIST_list (&handlers);
+}
+/* Identify the edges between basic blocks MIN to MAX.
+
+ NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
+ that are otherwise unreachable may be reachable with a non-local goto.
+
+ BB_EH_END is an array indexed by basic block number in which we record
+ the list of exception regions active at the end of the basic block. */
+
+static void
+make_edges (label_value_list, min, max, update_p)
+ rtx label_value_list;
+ int min, max, update_p;
+{
+ int i;
+ sbitmap *edge_cache = NULL;
+
+ /* Assume no computed jump; revise as we create edges. */
+ current_function_has_computed_jump = 0;
+
+ /* Heavy use of computed goto in machine-generated code can lead to
+ nearly fully-connected CFGs. In that case we spend a significant
+ amount of time searching the edge lists for duplicates. */
+ if (forced_labels || label_value_list)
+ {
+ edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
+ sbitmap_vector_zero (edge_cache, n_basic_blocks);
+
+ if (update_p)
+ for (i = min; i <= max; ++i)
+ {
+ edge e;
+ for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
+ if (e->dest != EXIT_BLOCK_PTR)
+ SET_BIT (edge_cache[i], e->dest->index);
+ }
+ }
+
+ /* By nature of the way these get numbered, block 0 is always the entry. */
+ make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
+
+ for (i = min; i <= max; ++i)
+ {
+ basic_block bb = BASIC_BLOCK (i);
+ rtx insn, x;
+ enum rtx_code code;
+ int force_fallthru = 0;
+
+ if (GET_CODE (bb->head) == CODE_LABEL
+ && LABEL_ALTERNATE_NAME (bb->head))
+ make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
+
+ /* Examine the last instruction of the block, and discover the
+ ways we can leave the block. */
+
+ insn = bb->end;
+ code = GET_CODE (insn);
+
+ /* A branch. */
+ if (code == JUMP_INSN)
+ {
+ rtx tmp;
+
+ /* Recognize exception handling placeholders. */
+ if (GET_CODE (PATTERN (insn)) == RESX)
+ make_eh_edge (edge_cache, bb, insn);
+
+ /* Recognize a non-local goto as a branch outside the
+ current function. */
+ else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
+ ;
+
+ /* ??? Recognize a tablejump and do the right thing. */
+ else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
+ && (tmp = NEXT_INSN (tmp)) != NULL_RTX
+ && GET_CODE (tmp) == JUMP_INSN
+ && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
+ || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
+ {
+ rtvec vec;
+ int j;
+
+ if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
+ vec = XVEC (PATTERN (tmp), 0);
+ else
+ vec = XVEC (PATTERN (tmp), 1);
+
+ for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
+ make_label_edge (edge_cache, bb,
+ XEXP (RTVEC_ELT (vec, j), 0), 0);
+
+ /* Some targets (eg, ARM) emit a conditional jump that also
+ contains the out-of-range target. Scan for these and
+ add an edge if necessary. */
+ if ((tmp = single_set (insn)) != NULL
+ && SET_DEST (tmp) == pc_rtx
+ && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
+ && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
+ make_label_edge (edge_cache, bb,
+ XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
+
+#ifdef CASE_DROPS_THROUGH
+ /* Silly VAXen. The ADDR_VEC is going to be in the way of
+ us naturally detecting fallthru into the next block. */
+ force_fallthru = 1;
+#endif
+ }
+
+ /* If this is a computed jump, then mark it as reaching
+ everything on the label_value_list and forced_labels list. */
+ else if (computed_jump_p (insn))
+ {
+ current_function_has_computed_jump = 1;
+
+ for (x = label_value_list; x; x = XEXP (x, 1))
+ make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
+
+ for (x = forced_labels; x; x = XEXP (x, 1))
+ make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
+ }
+
+ /* Returns create an exit out. */
+ else if (returnjump_p (insn))
+ make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
+
+ /* Otherwise, we have a plain conditional or unconditional jump. */
+ else
+ {
+ if (! JUMP_LABEL (insn))
+ abort ();
+ make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
+ }
+ }
+
+ /* If this is a sibling call insn, then this is in effect a
+ combined call and return, and so we need an edge to the
+ exit block. No need to worry about EH edges, since we
+ wouldn't have created the sibling call in the first place. */
+
+ if (code == CALL_INSN && SIBLING_CALL_P (insn))
+ make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
+ EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
+
+ /* If this is a CALL_INSN, then mark it as reaching the active EH
+ handler for this CALL_INSN. If we're handling non-call
+ exceptions then any insn can reach any of the active handlers.
+
+ Also mark the CALL_INSN as reaching any nonlocal goto handler. */
+
+ else if (code == CALL_INSN || flag_non_call_exceptions)
+ {
+ /* Add any appropriate EH edges. */
+ make_eh_edge (edge_cache, bb, insn);
+
+ if (code == CALL_INSN && nonlocal_goto_handler_labels)
+ {
+ /* ??? This could be made smarter: in some cases it's possible
+ to tell that certain calls will not do a nonlocal goto.
+
+ For example, if the nested functions that do the nonlocal
+ gotos do not have their addresses taken, then only calls to
+ those functions or to other nested functions that use them
+ could possibly do nonlocal gotos. */
+ /* We do know that a REG_EH_REGION note with a value less
+ than 0 is guaranteed not to perform a non-local goto. */
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) >= 0)
+ for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
+ make_label_edge (edge_cache, bb, XEXP (x, 0),
+ EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
+ }
+ }
+
+ /* Find out if we can drop through to the next block. */
+ insn = next_nonnote_insn (insn);
+ if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
+ make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
+ else if (i + 1 < n_basic_blocks)
+ {
+ rtx tmp = BLOCK_HEAD (i + 1);
+ if (GET_CODE (tmp) == NOTE)
+ tmp = next_nonnote_insn (tmp);
+ if (force_fallthru || insn == tmp)
+ make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
+ }
+ }
+
+ if (edge_cache)
+ sbitmap_vector_free (edge_cache);
+}
+
+/* Find all basic blocks of the function whose first insn is F.
+
+ Collect and return a list of labels whose addresses are taken. This
+ will be used in make_edges for use with computed gotos. */
+
+static void
+find_basic_blocks_1 (f)
+ rtx f;
+{
+ register rtx insn, next;
+ int i = 0;
+ rtx bb_note = NULL_RTX;
+ rtx lvl = NULL_RTX;
+ rtx trll = NULL_RTX;
+ rtx head = NULL_RTX;
+ rtx end = NULL_RTX;
+
+ /* We process the instructions in a slightly different way than we did
+ previously. This is so that we see a NOTE_BASIC_BLOCK after we have
+ closed out the previous block, so that it gets attached at the proper
+ place. Since this form should be equivalent to the previous,
+ count_basic_blocks continues to use the old form as a check. */
+
+ for (insn = f; insn; insn = next)
+ {
+ enum rtx_code code = GET_CODE (insn);
+
+ next = NEXT_INSN (insn);
+
+ switch (code)
+ {
+ case NOTE:
+ {
+ int kind = NOTE_LINE_NUMBER (insn);
+
+ /* Look for basic block notes with which to keep the
+ basic_block_info pointers stable. Unthread the note now;
+ we'll put it back at the right place in create_basic_block.
+ Or not at all if we've already found a note in this block. */
+ if (kind == NOTE_INSN_BASIC_BLOCK)
+ {
+ if (bb_note == NULL_RTX)
+ bb_note = insn;
+ else
+ next = flow_delete_insn (insn);
+ }
+ break;
+ }
+
+ case CODE_LABEL:
+ /* A basic block starts at a label. If we've closed one off due
+ to a barrier or some such, no need to do it again. */
+ if (head != NULL_RTX)
+ {
+ create_basic_block (i++, head, end, bb_note);
+ bb_note = NULL_RTX;
+ }
+
+ head = end = insn;
+ break;
+
+ case JUMP_INSN:
+ /* A basic block ends at a jump. */
+ if (head == NULL_RTX)
+ head = insn;
+ else
+ {
+ /* ??? Make a special check for table jumps. The way this
+ happens is truly and amazingly gross. We are about to
+ create a basic block that contains just a code label and
+ an addr*vec jump insn. Worse, an addr_diff_vec creates
+ its own natural loop.
+
+ Prevent this bit of brain damage, pasting things together
+ correctly in make_edges.
+
+ The correct solution involves emitting the table directly
+ on the tablejump instruction as a note, or JUMP_LABEL. */
+
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ {
+ head = end = NULL;
+ n_basic_blocks--;
+ break;
+ }
+ }
+ end = insn;
+ goto new_bb_inclusive;
+
+ case BARRIER:
+ /* A basic block ends at a barrier. It may be that an unconditional
+ jump already closed the basic block -- no need to do it again. */
+ if (head == NULL_RTX)
+ break;
+ goto new_bb_exclusive;
+
+ case CALL_INSN:
+ {
+ /* Record whether this call created an edge. */
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ int region = (note ? INTVAL (XEXP (note, 0)) : 0);
+
+ if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ {
+ /* Scan each of the alternatives for label refs. */
+ lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
+ lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
+ lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
+ /* Record its tail recursion label, if any. */
+ if (XEXP (PATTERN (insn), 3) != NULL_RTX)
+ trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
+ }
+
+ /* A basic block ends at a call that can either throw or
+ do a non-local goto. */
+ if ((nonlocal_goto_handler_labels && region >= 0)
+ || can_throw_internal (insn))
+ {
+ new_bb_inclusive:
+ if (head == NULL_RTX)
+ head = insn;
+ end = insn;
+
+ new_bb_exclusive:
+ create_basic_block (i++, head, end, bb_note);
+ head = end = NULL_RTX;
+ bb_note = NULL_RTX;
+ break;
+ }
+ }
+ /* Fall through. */
+
+ case INSN:
+ /* Non-call exceptions generate new blocks just like calls. */
+ if (flag_non_call_exceptions && can_throw_internal (insn))
+ goto new_bb_inclusive;
+
+ if (head == NULL_RTX)
+ head = insn;
+ end = insn;
+ break;
+
+ default:
+ abort ();
+ }
+
+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
+ {
+ rtx note;
+
+ /* Make a list of all labels referred to other than by jumps.
+
+ Make a special exception for labels followed by an ADDR*VEC,
+ as this would be a part of the tablejump setup code.
+
+ Make a special exception to registers loaded with label
+ values just before jump insns that use them. */
+
+ for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
+ if (REG_NOTE_KIND (note) == REG_LABEL)
+ {
+ rtx lab = XEXP (note, 0), next;
+
+ if ((next = next_nonnote_insn (lab)) != NULL
+ && GET_CODE (next) == JUMP_INSN
+ && (GET_CODE (PATTERN (next)) == ADDR_VEC
+ || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
+ ;
+ else if (GET_CODE (lab) == NOTE)
+ ;
+ else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
+ ;
+ else
+ lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
+ }
+ }
+ }
+
+ if (head != NULL_RTX)
+ create_basic_block (i++, head, end, bb_note);
+ else if (bb_note)
+ flow_delete_insn (bb_note);
+
+ if (i != n_basic_blocks)
+ abort ();
+
+ label_value_list = lvl;
+ tail_recursion_label_list = trll;
+}
+
+
+/* Find basic blocks of the current function.
+ F is the first insn of the function and NREGS the number of register
+ numbers in use. */
+
+void
+find_basic_blocks (f, nregs, file)
+ rtx f;
+ int nregs ATTRIBUTE_UNUSED;
+ FILE *file ATTRIBUTE_UNUSED;
+{
+ int max_uid;
+ timevar_push (TV_CFG);
+
+ /* Flush out existing data. */
+ if (basic_block_info != NULL)
+ {
+ int i;
+
+ clear_edges ();
+
+ /* Clear bb->aux on all extant basic blocks. We'll use this as a
+ tag for reuse during create_basic_block, just in case some pass
+ copies around basic block notes improperly. */
+ for (i = 0; i < n_basic_blocks; ++i)
+ BASIC_BLOCK (i)->aux = NULL;
+
+ VARRAY_FREE (basic_block_info);
+ }
+
+ n_basic_blocks = count_basic_blocks (f);
+
+ /* Size the basic block table. The actual structures will be allocated
+ by find_basic_blocks_1, since we want to keep the structure pointers
+ stable across calls to find_basic_blocks. */
+ /* ??? This whole issue would be much simpler if we called find_basic_blocks
+ exactly once, and thereafter we don't have a single long chain of
+ instructions at all until close to the end of compilation when we
+ actually lay them out. */
+
+ VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
+
+ find_basic_blocks_1 (f);
+
+ /* Record the block to which an insn belongs. */
+ /* ??? This should be done another way, by which (perhaps) a label is
+ tagged directly with the basic block that it starts. It is used for
+ more than that currently, but IMO that is the only valid use. */
+
+ max_uid = get_max_uid ();
+#ifdef AUTO_INC_DEC
+ /* Leave space for insns life_analysis makes in some cases for auto-inc.
+ These cases are rare, so we don't need too much space. */
+ max_uid += max_uid / 10;
+#endif
+
+ compute_bb_for_insn (max_uid);
+
+ /* Discover the edges of our cfg. */
+ make_edges (label_value_list, 0, n_basic_blocks - 1, 0);
+
+ /* Do very simple cleanup now, for the benefit of code that runs between
+ here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
+ tidy_fallthru_edges ();
+
+ mark_critical_edges ();
+
+#ifdef ENABLE_CHECKING
+ verify_flow_info ();
+#endif
+ timevar_pop (TV_CFG);
+}
+
+/* Assume that someone emitted code with control flow instructions to the
+ basic block. Update the data structure. */
+void
+find_sub_basic_blocks (bb)
+ basic_block bb;
+{
+ rtx insn = bb->head;
+ rtx end = bb->end;
+ rtx jump_insn = NULL_RTX;
+ edge falltru = 0;
+ basic_block first_bb = bb;
+ int i;
+
+ if (insn == bb->end)
+ return;
+
+ if (GET_CODE (insn) == CODE_LABEL)
+ insn = NEXT_INSN (insn);
+
+ /* Scan insn chain and try to find new basic block boundaries. */
+ while (1)
+ {
+ enum rtx_code code = GET_CODE (insn);
+ switch (code)
+ {
+ case BARRIER:
+ if (!jump_insn)
+ abort ();
+ break;
+ /* On code label, split current basic block. */
+ case CODE_LABEL:
+ falltru = split_block (bb, PREV_INSN (insn));
+ if (jump_insn)
+ bb->end = jump_insn;
+ bb = falltru->dest;
+ remove_edge (falltru);
+ jump_insn = 0;
+ if (LABEL_ALTERNATE_NAME (insn))
+ make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
+ break;
+ case INSN:
+ case JUMP_INSN:
+ /* In case we've previously split insn on the JUMP_INSN, move the
+ block header to proper place. */
+ if (jump_insn)
+ {
+ falltru = split_block (bb, PREV_INSN (insn));
+ bb->end = jump_insn;
+ bb = falltru->dest;
+ remove_edge (falltru);
+ jump_insn = 0;
+ }
+ /* We need some special care for those expressions. */
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ if (GET_CODE (PATTERN (insn)) == ADDR_VEC
+ || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
+ abort();
+ jump_insn = insn;
+ }
+ break;
+ default:
+ break;
+ }
+ if (insn == end)
+ break;
+ insn = NEXT_INSN (insn);
+ }
+
+ /* In case expander replaced normal insn by sequence terminating by
+ return and barrier, or possibly other sequence not behaving like
+ ordinary jump, we need to take care and move basic block boundary. */
+ if (jump_insn && GET_CODE (bb->end) != JUMP_INSN)
+ bb->end = jump_insn;
+
+ /* We've possibly replaced the conditional jump by conditional jump
+ followed by cleanup at fallthru edge, so the outgoing edges may
+ be dead. */
+ purge_dead_edges (bb);
+
+ /* Now re-scan and wire in all edges. This expect simple (conditional)
+ jumps at the end of each new basic blocks. */
+ make_edges (NULL, first_bb->index, bb->index, 1);
+
+ /* Update branch probabilities. Expect only (un)conditional jumps
+ to be created with only the forward edges. */
+ for (i = first_bb->index; i <= bb->index; i++)
+ {
+ edge e,f;
+ basic_block b = BASIC_BLOCK (i);
+ if (b != first_bb)
+ {
+ b->count = 0;
+ b->frequency = 0;
+ for (e = b->pred; e; e=e->pred_next)
+ {
+ b->count += e->count;
+ b->frequency += EDGE_FREQUENCY (e);
+ }
+ }
+ if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
+ {
+ rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
+ int probability;
+
+ if (!note)
+ continue;
+ probability = INTVAL (XEXP (find_reg_note (b->end,
+ REG_BR_PROB,
+ NULL), 0));
+ e = BRANCH_EDGE (b);
+ e->probability = probability;
+ e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
+ / REG_BR_PROB_BASE);
+ f = FALLTHRU_EDGE (b);
+ f->probability = REG_BR_PROB_BASE - probability;
+ f->count = b->count - e->count;
+ }
+ if (b->succ && !b->succ->succ_next)
+ {
+ e = b->succ;
+ e->probability = REG_BR_PROB_BASE;
+ e->count = b->count;
+ }
+ }
+}
OpenPOWER on IntegriCloud