summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrth <rth@138bc75d-0d04-0410-961f-82ee72b054a4>1999-02-02 21:22:52 +0000
committerrth <rth@138bc75d-0d04-0410-961f-82ee72b054a4>1999-02-02 21:22:52 +0000
commit29bd1808340d56f994e393a956ebd4266565b242 (patch)
treebf741c1416516d6d4b65f9e54691a31b98962066
parent31a0482ddd058487fa2d22824997c09c00742d98 (diff)
downloadppe42-gcc-29bd1808340d56f994e393a956ebd4266565b242.tar.gz
ppe42-gcc-29bd1808340d56f994e393a956ebd4266565b242.zip
Bob Manson <manson@charmed.cygnus.com>
* resource.c, resource.h: New files. * haifa-sched.c (regno_use_in): Moved to rtlanal.c. (split_block_insns): Moved to recog.c. (update_flow_info): Make public. * reorg.c: Moved the functions dealing with computing resource usage to resource.c. * sched.c (regno_use_in): Moved to rtlanal.c. (update_flow_info): Make public. (schedule_insns): Use split_block_insns. * recog.c (split_block_insns): New function. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@24982 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog17
-rw-r--r--gcc/haifa-sched.c113
-rw-r--r--gcc/recog.c81
-rw-r--r--gcc/reorg.c1162
-rw-r--r--gcc/resource.c1239
-rw-r--r--gcc/resource.h46
-rw-r--r--gcc/sched.c110
7 files changed, 1403 insertions, 1365 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 9c35572ad7a..c95413e879f 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,20 @@
+Fri Jan 29 21:00:56 1999 Bob Manson <manson@charmed.cygnus.com>
+
+ * resource.c, resource.h: New files.
+
+ * haifa-sched.c (regno_use_in): Moved to rtlanal.c.
+ (split_block_insns): Moved to recog.c.
+ (update_flow_info): Make public.
+
+ * reorg.c: Moved the functions dealing with computing resource
+ usage to resource.c.
+
+ * sched.c (regno_use_in): Moved to rtlanal.c.
+ (update_flow_info): Make public.
+ (schedule_insns): Use split_block_insns.
+
+ * recog.c (split_block_insns): New function.
+
Tue Feb 2 22:03:26 1999 David Edelsohn <edelsohn@mhpcc.edu>
* rs6000/linux.h (LINK_START_DEFAULT_SPEC): Delete, unused.
diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index 2bf2c5ea07f..b0d34b6f67d 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -448,11 +448,9 @@ static void attach_deaths_insn PROTO ((rtx));
static int new_sometimes_live PROTO ((struct sometimes *, int, int));
static void finish_sometimes_live PROTO ((struct sometimes *, int));
static int schedule_block PROTO ((int, int));
-static rtx regno_use_in PROTO ((int, rtx));
static void split_hard_reg_notes PROTO ((rtx, rtx, rtx));
static void new_insn_dead_notes PROTO ((rtx, rtx, rtx, rtx));
static void update_n_sets PROTO ((rtx, int));
-static void update_flow_info PROTO ((rtx, rtx, rtx, rtx));
static char *safe_concat PROTO ((char *, char *, char *));
static int insn_issue_delay PROTO ((rtx));
static int birthing_insn_p PROTO ((rtx));
@@ -765,7 +763,6 @@ static rtx group_leader PROTO ((rtx));
static int set_priorities PROTO ((int));
static void init_rtx_vector PROTO ((rtx **, rtx *, int, int));
static void schedule_region PROTO ((int));
-static void split_block_insns PROTO ((int));
#endif /* INSN_SCHEDULING */
@@ -7699,39 +7696,6 @@ schedule_region (rgn)
FREE_REG_SET (reg_pending_sets);
}
-/* Subroutine of split_hard_reg_notes. Searches X for any reference to
- REGNO, returning the rtx of the reference found if any. Otherwise,
- returns 0. */
-
-static rtx
-regno_use_in (regno, x)
- int regno;
- rtx x;
-{
- register char *fmt;
- int i, j;
- rtx tem;
-
- if (GET_CODE (x) == REG && REGNO (x) == regno)
- return x;
-
- fmt = GET_RTX_FORMAT (GET_CODE (x));
- for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
- {
- if (fmt[i] == 'e')
- {
- if ((tem = regno_use_in (regno, XEXP (x, i))))
- return tem;
- }
- else if (fmt[i] == 'E')
- for (j = XVECLEN (x, i) - 1; j >= 0; j--)
- if ((tem = regno_use_in (regno, XVECEXP (x, i, j))))
- return tem;
- }
-
- return 0;
-}
-
/* Subroutine of update_flow_info. Determines whether any new REG_NOTEs are
needed for the hard register mentioned in the note. This can happen
if the reference to the hard register in the original insn was split into
@@ -7918,7 +7882,7 @@ update_n_sets (x, inc)
the insns from FIRST to LAST inclusive that were created by splitting
ORIG_INSN. NOTES are the original REG_NOTES. */
-static void
+void
update_flow_info (notes, first, last, orig_insn)
rtx notes;
rtx first, last;
@@ -8409,79 +8373,6 @@ update_flow_info (notes, first, last, orig_insn)
}
}
-/* Do the splitting of insns in the block b. */
-
-static void
-split_block_insns (b)
- int b;
-{
- rtx insn, next;
-
- for (insn = BLOCK_HEAD (b);; insn = next)
- {
- rtx set, last, first, notes;
-
- /* Can't use `next_real_insn' because that
- might go across CODE_LABELS and short-out basic blocks. */
- next = NEXT_INSN (insn);
- if (GET_CODE (insn) != INSN)
- {
- if (insn == BLOCK_END (b))
- break;
-
- continue;
- }
-
- /* Don't split no-op move insns. These should silently disappear
- later in final. Splitting such insns would break the code
- that handles REG_NO_CONFLICT blocks. */
- set = single_set (insn);
- if (set && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
- {
- if (insn == BLOCK_END (b))
- break;
-
- /* Nops get in the way while scheduling, so delete them now if
- register allocation has already been done. It is too risky
- to try to do this before register allocation, and there are
- unlikely to be very many nops then anyways. */
- if (reload_completed)
- {
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
- }
-
- continue;
- }
-
- /* Split insns here to get max fine-grain parallelism. */
- first = PREV_INSN (insn);
- notes = REG_NOTES (insn);
- last = try_split (PATTERN (insn), insn, 1);
- if (last != insn)
- {
- /* try_split returns the NOTE that INSN became. */
- first = NEXT_INSN (first);
- update_flow_info (notes, first, last, insn);
-
- PUT_CODE (insn, NOTE);
- NOTE_SOURCE_FILE (insn) = 0;
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- if (insn == BLOCK_HEAD (b))
- BLOCK_HEAD (b) = first;
- if (insn == BLOCK_END (b))
- {
- BLOCK_END (b) = last;
- break;
- }
- }
-
- if (insn == BLOCK_END (b))
- break;
- }
-}
-
/* The one entry point in this file. DUMP_FILE is the dump file for
this pass. */
@@ -8535,7 +8426,7 @@ schedule_insns (dump_file)
/* do the splitting first for all blocks */
for (b = 0; b < n_basic_blocks; b++)
- split_block_insns (b);
+ split_block_insns (b, 1);
max_uid = (get_max_uid () + 1);
diff --git a/gcc/recog.c b/gcc/recog.c
index c62929c4938..a62027db0b7 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -32,6 +32,7 @@ Boston, MA 02111-1307, USA. */
#include "flags.h"
#include "real.h"
#include "toplev.h"
+#include "basic-block.h"
#ifndef STACK_PUSH_CODE
#ifdef STACK_GROWS_DOWNWARD
@@ -2596,3 +2597,83 @@ reg_fits_class_p (operand, class, offset, mode)
}
#endif /* REGISTER_CONSTRAINTS */
+
+/* Do the splitting of insns in the block B. Only try to actually split if
+ DO_SPLIT is true; otherwise, just remove nops. */
+
+void
+split_block_insns (b, do_split)
+ int b;
+ int do_split;
+{
+ rtx insn, next;
+
+ for (insn = BLOCK_HEAD (b);; insn = next)
+ {
+ rtx set;
+
+ /* Can't use `next_real_insn' because that
+ might go across CODE_LABELS and short-out basic blocks. */
+ next = NEXT_INSN (insn);
+ if (GET_CODE (insn) != INSN)
+ {
+ if (insn == BLOCK_END (b))
+ break;
+
+ continue;
+ }
+
+ /* Don't split no-op move insns. These should silently disappear
+ later in final. Splitting such insns would break the code
+ that handles REG_NO_CONFLICT blocks. */
+ set = single_set (insn);
+ if (set && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
+ {
+ if (insn == BLOCK_END (b))
+ break;
+
+ /* Nops get in the way while scheduling, so delete them now if
+ register allocation has already been done. It is too risky
+ to try to do this before register allocation, and there are
+ unlikely to be very many nops then anyways. */
+ if (reload_completed)
+ {
+
+ PUT_CODE (insn, NOTE);
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ NOTE_SOURCE_FILE (insn) = 0;
+ }
+
+ continue;
+ }
+
+ if (do_split)
+ {
+ /* Split insns here to get max fine-grain parallelism. */
+ rtx first = PREV_INSN (insn);
+ rtx notes = REG_NOTES (insn);
+ rtx last = try_split (PATTERN (insn), insn, 1);
+
+ if (last != insn)
+ {
+ /* try_split returns the NOTE that INSN became. */
+ first = NEXT_INSN (first);
+ update_flow_info (notes, first, last, insn);
+
+ PUT_CODE (insn, NOTE);
+ NOTE_SOURCE_FILE (insn) = 0;
+ NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
+ if (insn == BLOCK_HEAD (b))
+ BLOCK_HEAD (b) = first;
+ if (insn == BLOCK_END (b))
+ {
+ BLOCK_END (b) = last;
+ break;
+ }
+ }
+ }
+
+ if (insn == BLOCK_END (b))
+ break;
+ }
+}
diff --git a/gcc/reorg.c b/gcc/reorg.c
index 2e41e71e492..a8f52f955c9 100644
--- a/gcc/reorg.c
+++ b/gcc/reorg.c
@@ -134,6 +134,7 @@ Boston, MA 02111-1307, USA. */
#include "output.h"
#include "obstack.h"
#include "insn-attr.h"
+#include "resource.h"
#ifdef DELAY_SLOTS
@@ -164,57 +165,10 @@ static rtx *unfilled_firstobj;
((rtx *) obstack_next_free (&unfilled_slots_obstack))
/* This structure is used to indicate which hardware resources are set or
- needed by insns so far. */
-
-struct resources
-{
- char memory; /* Insn sets or needs a memory location. */
- char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
- char volatil; /* Insn sets or needs a volatile memory loc. */
- char cc; /* Insn sets or needs the condition codes. */
- HARD_REG_SET regs; /* Which registers are set or needed. */
-};
-
-/* Macro to clear all resources. */
-#define CLEAR_RESOURCE(RES) \
- do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
- CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
-
-/* Indicates what resources are required at the beginning of the epilogue. */
-static struct resources start_of_epilogue_needs;
-
-/* Indicates what resources are required at function end. */
-static struct resources end_of_function_needs;
/* Points to the label before the end of the function. */
static rtx end_of_function_label;
-/* This structure is used to record liveness information at the targets or
- fallthrough insns of branches. We will most likely need the information
- at targets again, so save them in a hash table rather than recomputing them
- each time. */
-
-struct target_info
-{
- int uid; /* INSN_UID of target. */
- struct target_info *next; /* Next info for same hash bucket. */
- HARD_REG_SET live_regs; /* Registers live at target. */
- int block; /* Basic block number containing target. */
- int bb_tick; /* Generation count of basic block info. */
-};
-
-#define TARGET_HASH_PRIME 257
-
-/* Define the hash table itself. */
-static struct target_info **target_hash_table;
-
-/* For each basic block, we maintain a generation number of its basic
- block info, which is updated each time we move an insn from the
- target of a jump. This is the generation number indexed by block
- number. */
-
-static int *bb_ticks;
-
/* Mapping between INSN_UID's and position in the code since INSN_UID's do
not always monotonically increase. */
static int *uid_to_ruid;
@@ -222,9 +176,6 @@ static int *uid_to_ruid;
/* Highest valid index in `uid_to_ruid'. */
static int max_uid;
-static void mark_referenced_resources PROTO((rtx, struct resources *, int));
-static void mark_set_resources PROTO((rtx, struct resources *,
- int, int));
static int stop_search_p PROTO((rtx, int));
static int resource_conflicts_p PROTO((struct resources *,
struct resources *));
@@ -242,6 +193,9 @@ static int rare_destination PROTO((rtx));
static int mostly_true_jump PROTO((rtx, rtx));
static rtx get_branch_condition PROTO((rtx, rtx));
static int condition_dominates_p PROTO((rtx, rtx));
+static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
+static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
+static int check_annul_list_true_false PROTO ((int, rtx));
static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
struct resources *,
struct resources *,
@@ -252,443 +206,20 @@ static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
struct resources *,
struct resources *,
int, int *, int *));
-static rtx find_dead_or_set_registers PROTO ((rtx, struct resources *, rtx *,
- int, struct resources,
- struct resources));
static void try_merge_delay_insns PROTO((rtx, rtx));
static rtx redundant_insn PROTO((rtx, rtx, rtx));
static int own_thread_p PROTO((rtx, rtx, int));
-static int find_basic_block PROTO((rtx));
static void update_block PROTO((rtx, rtx));
static int reorg_redirect_jump PROTO((rtx, rtx));
static void update_reg_dead_notes PROTO((rtx, rtx));
static void fix_reg_dead_note PROTO((rtx, rtx));
static void update_reg_unused_notes PROTO((rtx, rtx));
-static void update_live_status PROTO((rtx, rtx));
-static rtx next_insn_no_annul PROTO((rtx));
-static void mark_target_live_regs PROTO((rtx, struct resources *));
static void fill_simple_delay_slots PROTO((int));
static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
int, int, int *, rtx));
static void fill_eager_delay_slots PROTO((void));
static void relax_delay_slots PROTO((rtx));
static void make_return_insns PROTO((rtx));
-static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
-static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
-static int check_annul_list_true_false PROTO ((int, rtx));
-
-/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
- which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
- is TRUE, resources used by the called routine will be included for
- CALL_INSNs. */
-
-static void
-mark_referenced_resources (x, res, include_delayed_effects)
- register rtx x;
- register struct resources *res;
- register int include_delayed_effects;
-{
- register enum rtx_code code = GET_CODE (x);
- register int i, j;
- register char *format_ptr;
-
- /* Handle leaf items for which we set resource flags. Also, special-case
- CALL, SET and CLOBBER operators. */
- switch (code)
- {
- case CONST:
- case CONST_INT:
- case CONST_DOUBLE:
- case PC:
- case SYMBOL_REF:
- case LABEL_REF:
- return;
-
- case SUBREG:
- if (GET_CODE (SUBREG_REG (x)) != REG)
- mark_referenced_resources (SUBREG_REG (x), res, 0);
- else
- {
- int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
- int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- for (i = regno; i < last_regno; i++)
- SET_HARD_REG_BIT (res->regs, i);
- }
- return;
-
- case REG:
- for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
- SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
- return;
-
- case MEM:
- /* If this memory shouldn't change, it really isn't referencing
- memory. */
- if (RTX_UNCHANGING_P (x))
- res->unch_memory = 1;
- else
- res->memory = 1;
- res->volatil = MEM_VOLATILE_P (x);
-
- /* Mark registers used to access memory. */
- mark_referenced_resources (XEXP (x, 0), res, 0);
- return;
-
- case CC0:
- res->cc = 1;
- return;
-
- case UNSPEC_VOLATILE:
- case ASM_INPUT:
- /* Traditional asm's are always volatile. */
- res->volatil = 1;
- return;
-
- case TRAP_IF:
- res->volatil = 1;
- break;
-
- case ASM_OPERANDS:
- res->volatil = MEM_VOLATILE_P (x);
-
- /* For all ASM_OPERANDS, we must traverse the vector of input operands.
- We can not just fall through here since then we would be confused
- by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
- traditional asms unlike their normal usage. */
-
- for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
- mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
- return;
-
- case CALL:
- /* The first operand will be a (MEM (xxx)) but doesn't really reference
- memory. The second operand may be referenced, though. */
- mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
- mark_referenced_resources (XEXP (x, 1), res, 0);
- return;
-
- case SET:
- /* Usually, the first operand of SET is set, not referenced. But
- registers used to access memory are referenced. SET_DEST is
- also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
-
- mark_referenced_resources (SET_SRC (x), res, 0);
-
- x = SET_DEST (x);
- if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
- mark_referenced_resources (x, res, 0);
- else if (GET_CODE (x) == SUBREG)
- x = SUBREG_REG (x);
- if (GET_CODE (x) == MEM)
- mark_referenced_resources (XEXP (x, 0), res, 0);
- return;
-
- case CLOBBER:
- return;
-
- case CALL_INSN:
- if (include_delayed_effects)
- {
- /* A CALL references memory, the frame pointer if it exists, the
- stack pointer, any global registers and any registers given in
- USE insns immediately in front of the CALL.
-
- However, we may have moved some of the parameter loading insns
- into the delay slot of this CALL. If so, the USE's for them
- don't count and should be skipped. */
- rtx insn = PREV_INSN (x);
- rtx sequence = 0;
- int seq_size = 0;
- rtx next = NEXT_INSN (x);
- int i;
-
- /* If we are part of a delay slot sequence, point at the SEQUENCE. */
- if (NEXT_INSN (insn) != x)
- {
- next = NEXT_INSN (NEXT_INSN (insn));
- sequence = PATTERN (NEXT_INSN (insn));
- seq_size = XVECLEN (sequence, 0);
- if (GET_CODE (sequence) != SEQUENCE)
- abort ();
- }
-
- res->memory = 1;
- SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
- if (frame_pointer_needed)
- {
- SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
-#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
-#endif
- }
-
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (global_regs[i])
- SET_HARD_REG_BIT (res->regs, i);
-
- /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
- assume that this call can need any register.
-
- This is done to be more conservative about how we handle setjmp.
- We assume that they both use and set all registers. Using all
- registers ensures that a register will not be considered dead
- just because it crosses a setjmp call. A register should be
- considered dead only if the setjmp call returns non-zero. */
- if (next && GET_CODE (next) == NOTE
- && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
- SET_HARD_REG_SET (res->regs);
-
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (x);
- link;
- link = XEXP (link, 1))
- if (GET_CODE (XEXP (link, 0)) == USE)
- {
- for (i = 1; i < seq_size; i++)
- {
- rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
- if (GET_CODE (slot_pat) == SET
- && rtx_equal_p (SET_DEST (slot_pat),
- SET_DEST (XEXP (link, 0))))
- break;
- }
- if (i >= seq_size)
- mark_referenced_resources (SET_DEST (XEXP (link, 0)),
- res, 0);
- }
- }
- }
-
- /* ... fall through to other INSN processing ... */
-
- case INSN:
- case JUMP_INSN:
-
-#ifdef INSN_REFERENCES_ARE_DELAYED
- if (! include_delayed_effects
- && INSN_REFERENCES_ARE_DELAYED (x))
- return;
-#endif
-
- /* No special processing, just speed up. */
- mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
- return;
-
- default:
- break;
- }
-
- /* Process each sub-expression and flag what it needs. */
- format_ptr = GET_RTX_FORMAT (code);
- for (i = 0; i < GET_RTX_LENGTH (code); i++)
- switch (*format_ptr++)
- {
- case 'e':
- mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
- break;
-
- case 'E':
- for (j = 0; j < XVECLEN (x, i); j++)
- mark_referenced_resources (XVECEXP (x, i, j), res,
- include_delayed_effects);
- break;
- }
-}
-
-/* Given X, a part of an insn, and a pointer to a `struct resource',
- RES, indicate which resources are modified by the insn. If
- INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
- set by the called routine.
-
- If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
- objects are being referenced instead of set.
-
- We never mark the insn as modifying the condition code unless it explicitly
- SETs CC0 even though this is not totally correct. The reason for this is
- that we require a SET of CC0 to immediately precede the reference to CC0.
- So if some other insn sets CC0 as a side-effect, we know it cannot affect
- our computation and thus may be placed in a delay slot. */
-
-static void
-mark_set_resources (x, res, in_dest, include_delayed_effects)
- register rtx x;
- register struct resources *res;
- int in_dest;
- int include_delayed_effects;
-{
- register enum rtx_code code;
- register int i, j;
- register char *format_ptr;
-
- restart:
-
- code = GET_CODE (x);
-
- switch (code)
- {
- case NOTE:
- case BARRIER:
- case CODE_LABEL:
- case USE:
- case CONST_INT:
- case CONST_DOUBLE:
- case LABEL_REF:
- case SYMBOL_REF:
- case CONST:
- case PC:
- /* These don't set any resources. */
- return;
-
- case CC0:
- if (in_dest)
- res->cc = 1;
- return;
-
- case CALL_INSN:
- /* Called routine modifies the condition code, memory, any registers
- that aren't saved across calls, global registers and anything
- explicitly CLOBBERed immediately after the CALL_INSN. */
-
- if (include_delayed_effects)
- {
- rtx next = NEXT_INSN (x);
- rtx prev = PREV_INSN (x);
- rtx link;
-
- res->cc = res->memory = 1;
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i] || global_regs[i])
- SET_HARD_REG_BIT (res->regs, i);
-
- /* If X is part of a delay slot sequence, then NEXT should be
- the first insn after the sequence. */
- if (NEXT_INSN (prev) != x)
- next = NEXT_INSN (NEXT_INSN (prev));
-
- for (link = CALL_INSN_FUNCTION_USAGE (x);
- link; link = XEXP (link, 1))
- if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
-
- /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
- assume that this call can clobber any register. */
- if (next && GET_CODE (next) == NOTE
- && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
- SET_HARD_REG_SET (res->regs);
- }
-
- /* ... and also what its RTL says it modifies, if anything. */
-
- case JUMP_INSN:
- case INSN:
-
- /* An insn consisting of just a CLOBBER (or USE) is just for flow
- and doesn't actually do anything, so we ignore it. */
-
-#ifdef INSN_SETS_ARE_DELAYED
- if (! include_delayed_effects
- && INSN_SETS_ARE_DELAYED (x))
- return;
-#endif
-
- x = PATTERN (x);
- if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
- goto restart;
- return;
-
- case SET:
- /* If the source of a SET is a CALL, this is actually done by
- the called routine. So only include it if we are to include the
- effects of the calling routine. */
-
- mark_set_resources (SET_DEST (x), res,
- (include_delayed_effects
- || GET_CODE (SET_SRC (x)) != CALL),
- 0);
-
- mark_set_resources (SET_SRC (x), res, 0, 0);
- return;
-
- case CLOBBER:
- mark_set_resources (XEXP (x, 0), res, 1, 0);
- return;
-
- case SEQUENCE:
- for (i = 0; i < XVECLEN (x, 0); i++)
- if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
- && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
- mark_set_resources (XVECEXP (x, 0, i), res, 0,
- include_delayed_effects);
- return;
-
- case POST_INC:
- case PRE_INC:
- case POST_DEC:
- case PRE_DEC:
- mark_set_resources (XEXP (x, 0), res, 1, 0);
- return;
-
- case ZERO_EXTRACT:
- mark_set_resources (XEXP (x, 0), res, in_dest, 0);
- mark_set_resources (XEXP (x, 1), res, 0, 0);
- mark_set_resources (XEXP (x, 2), res, 0, 0);
- return;
-
- case MEM:
- if (in_dest)
- {
- res->memory = 1;
- res->unch_memory = RTX_UNCHANGING_P (x);
- res->volatil = MEM_VOLATILE_P (x);
- }
-
- mark_set_resources (XEXP (x, 0), res, 0, 0);
- return;
-
- case SUBREG:
- if (in_dest)
- {
- if (GET_CODE (SUBREG_REG (x)) != REG)
- mark_set_resources (SUBREG_REG (x), res,
- in_dest, include_delayed_effects);
- else
- {
- int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
- int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- for (i = regno; i < last_regno; i++)
- SET_HARD_REG_BIT (res->regs, i);
- }
- }
- return;
-
- case REG:
- if (in_dest)
- for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
- SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
- return;
-
- default:
- break;
- }
-
- /* Process each sub-expression and flag what it needs. */
- format_ptr = GET_RTX_FORMAT (code);
- for (i = 0; i < GET_RTX_LENGTH (code); i++)
- switch (*format_ptr++)
- {
- case 'e':
- mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
- break;
-
- case 'E':
- for (j = 0; j < XVECLEN (x, i); j++)
- mark_set_resources (XVECEXP (x, i, j), res, in_dest,
- include_delayed_effects);
- break;
- }
-}
/* Return TRUE if this insn should stop the search for insn to fill delay
slots. LABELS_P indicates that labels should terminate the search.
@@ -988,16 +519,7 @@ add_to_delay_list (insn, delay_list)
if (delay_list == 0)
{
- struct target_info *tinfo;
-
- for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
- tinfo; tinfo = tinfo->next)
- if (tinfo->uid == INSN_UID (insn))
- break;
-
- if (tinfo)
- tinfo->block = -1;
-
+ clear_hashed_info_for_insn (insn);
return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
}
@@ -2324,41 +1846,6 @@ own_thread_p (thread, label, allow_fallthrough)
return 1;
}
-/* Find the number of the basic block that starts closest to INSN. Return -1
- if we couldn't find such a basic block. */
-
-static int
-find_basic_block (insn)
- rtx insn;
-{
- int i;
-
- /* Scan backwards to the previous BARRIER. Then see if we can find a
- label that starts a basic block. Return the basic block number. */
-
- for (insn = prev_nonnote_insn (insn);
- insn && GET_CODE (insn) != BARRIER;
- insn = prev_nonnote_insn (insn))
- ;
-
- /* The start of the function is basic block zero. */
- if (insn == 0)
- return 0;
-
- /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
- anything other than a CODE_LABEL or note, we can't find this code. */
- for (insn = next_nonnote_insn (insn);
- insn && GET_CODE (insn) == CODE_LABEL;
- insn = next_nonnote_insn (insn))
- {
- for (i = 0; i < n_basic_blocks; i++)
- if (insn == BLOCK_HEAD (i))
- return i;
- }
-
- return -1;
-}
-
/* Called when INSN is being moved from a location near the target of a jump.
We leave a marker of the form (use (INSN)) immediately in front
of WHERE for mark_target_live_regs. These markers will be deleted when
@@ -2373,8 +1860,6 @@ update_block (insn, where)
rtx insn;
rtx where;
{
- int b;
-
/* Ignore if this was in a delay slot and it came from the target of
a branch. */
if (INSN_FROM_TARGET_P (insn))
@@ -2385,9 +1870,7 @@ update_block (insn, where)
/* INSN might be making a value live in a block where it didn't use to
be. So recompute liveness information for this block. */
- b = find_basic_block (insn);
- if (b != -1)
- bb_ticks[b]++;
+ incr_ticks_for_insn (insn);
}
/* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
@@ -2398,11 +1881,7 @@ reorg_redirect_jump (jump, nlabel)
rtx jump;
rtx nlabel;
{
- int b = find_basic_block (jump);
-
- if (b != -1)
- bb_ticks[b]++;
-
+ incr_ticks_for_insn (jump);
return redirect_jump (jump, nlabel);
}
@@ -2502,549 +1981,6 @@ update_reg_unused_notes (insn, redundant_insn)
}
}
-/* Marks registers possibly live at the current place being scanned by
- mark_target_live_regs. Used only by next two function. */
-
-static HARD_REG_SET current_live_regs;
-
-/* Marks registers for which we have seen a REG_DEAD note but no assignment.
- Also only used by the next two functions. */
-
-static HARD_REG_SET pending_dead_regs;
-
-/* Utility function called from mark_target_live_regs via note_stores.
- It deadens any CLOBBERed registers and livens any SET registers. */
-
-static void
-update_live_status (dest, x)
- rtx dest;
- rtx x;
-{
- int first_regno, last_regno;
- int i;
-
- if (GET_CODE (dest) != REG
- && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
- return;
-
- if (GET_CODE (dest) == SUBREG)
- first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
- else
- first_regno = REGNO (dest);
-
- last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
-
- if (GET_CODE (x) == CLOBBER)
- for (i = first_regno; i < last_regno; i++)
- CLEAR_HARD_REG_BIT (current_live_regs, i);
- else
- for (i = first_regno; i < last_regno; i++)
- {
- SET_HARD_REG_BIT (current_live_regs, i);
- CLEAR_HARD_REG_BIT (pending_dead_regs, i);
- }
-}
-
-/* Similar to next_insn, but ignores insns in the delay slots of
- an annulled branch. */
-
-static rtx
-next_insn_no_annul (insn)
- rtx insn;
-{
- if (insn)
- {
- /* If INSN is an annulled branch, skip any insns from the target
- of the branch. */
- if (INSN_ANNULLED_BRANCH_P (insn)
- && NEXT_INSN (PREV_INSN (insn)) != insn)
- while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
- insn = NEXT_INSN (insn);
-
- insn = NEXT_INSN (insn);
- if (insn && GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
- }
-
- return insn;
-}
-
-/* A subroutine of mark_target_live_regs. Search forward from TARGET
- looking for registers that are set before they are used. These are dead.
- Stop after passing a few conditional jumps, and/or a small
- number of unconditional branches. */
-
-static rtx
-find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
- rtx target;
- struct resources *res;
- rtx *jump_target;
- int jump_count;
- struct resources set, needed;
-{
- HARD_REG_SET scratch;
- rtx insn, next;
- rtx jump_insn = 0;
- int i;
-
- for (insn = target; insn; insn = next)
- {
- rtx this_jump_insn = insn;
-
- next = NEXT_INSN (insn);
- switch (GET_CODE (insn))
- {
- case CODE_LABEL:
- /* After a label, any pending dead registers that weren't yet
- used can be made dead. */
- AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
- AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
- CLEAR_HARD_REG_SET (pending_dead_regs);
-
- continue;
-
- case BARRIER:
- case NOTE:
- continue;
-
- case INSN:
- if (GET_CODE (PATTERN (insn)) == USE)
- {
- /* If INSN is a USE made by update_block, we care about the
- underlying insn. Any registers set by the underlying insn
- are live since the insn is being done somewhere else. */
- if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
- mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
-
- /* All other USE insns are to be ignored. */
- continue;
- }
- else if (GET_CODE (PATTERN (insn)) == CLOBBER)
- continue;
- else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
- {
- /* An unconditional jump can be used to fill the delay slot
- of a call, so search for a JUMP_INSN in any position. */
- for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
- {
- this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
- if (GET_CODE (this_jump_insn) == JUMP_INSN)
- break;
- }
- }
-
- default:
- break;
- }
-
- if (GET_CODE (this_jump_insn) == JUMP_INSN)
- {
- if (jump_count++ < 10)
- {
- if (simplejump_p (this_jump_insn)
- || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
- {
- next = JUMP_LABEL (this_jump_insn);
- if (jump_insn == 0)
- {
- jump_insn = insn;
- if (jump_target)
- *jump_target = JUMP_LABEL (this_jump_insn);
- }
- }
- else if (condjump_p (this_jump_insn)
- || condjump_in_parallel_p (this_jump_insn))
- {
- struct resources target_set, target_res;
- struct resources fallthrough_res;
-
- /* We can handle conditional branches here by following
- both paths, and then IOR the results of the two paths
- together, which will give us registers that are dead
- on both paths. Since this is expensive, we give it
- a much higher cost than unconditional branches. The
- cost was chosen so that we will follow at most 1
- conditional branch. */
-
- jump_count += 4;
- if (jump_count >= 10)
- break;
-
- mark_referenced_resources (insn, &needed, 1);
-
- /* For an annulled branch, mark_set_resources ignores slots
- filled by instructions from the target. This is correct
- if the branch is not taken. Since we are following both
- paths from the branch, we must also compute correct info
- if the branch is taken. We do this by inverting all of
- the INSN_FROM_TARGET_P bits, calling mark_set_resources,
- and then inverting the INSN_FROM_TARGET_P bits again. */
-
- if (GET_CODE (PATTERN (insn)) == SEQUENCE
- && INSN_ANNULLED_BRANCH_P (this_jump_insn))
- {
- for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
- INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
- = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
-
- target_set = set;
- mark_set_resources (insn, &target_set, 0, 1);
-
- for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
- INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
- = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
-
- mark_set_resources (insn, &set, 0, 1);
- }
- else
- {
- mark_set_resources (insn, &set, 0, 1);
- target_set = set;
- }
-
- target_res = *res;
- COPY_HARD_REG_SET (scratch, target_set.regs);
- AND_COMPL_HARD_REG_SET (scratch, needed.regs);
- AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
-
- fallthrough_res = *res;
- COPY_HARD_REG_SET (scratch, set.regs);
- AND_COMPL_HARD_REG_SET (scratch, needed.regs);
- AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
-
- find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
- &target_res, 0, jump_count,
- target_set, needed);
- find_dead_or_set_registers (next,
- &fallthrough_res, 0, jump_count,
- set, needed);
- IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
- AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
- break;
- }
- else
- break;
- }
- else
- {
- /* Don't try this optimization if we expired our jump count
- above, since that would mean there may be an infinite loop
- in the function being compiled. */
- jump_insn = 0;
- break;
- }
- }
-
- mark_referenced_resources (insn, &needed, 1);
- mark_set_resources (insn, &set, 0, 1);
-
- COPY_HARD_REG_SET (scratch, set.regs);
- AND_COMPL_HARD_REG_SET (scratch, needed.regs);
- AND_COMPL_HARD_REG_SET (res->regs, scratch);
- }
-
- return jump_insn;
-}
-
-/* Set the resources that are live at TARGET.
-
- If TARGET is zero, we refer to the end of the current function and can
- return our precomputed value.
-
- Otherwise, we try to find out what is live by consulting the basic block
- information. This is tricky, because we must consider the actions of
- reload and jump optimization, which occur after the basic block information
- has been computed.
-
- Accordingly, we proceed as follows::
-
- We find the previous BARRIER and look at all immediately following labels
- (with no intervening active insns) to see if any of them start a basic
- block. If we hit the start of the function first, we use block 0.
-
- Once we have found a basic block and a corresponding first insns, we can
- accurately compute the live status from basic_block_live_regs and
- reg_renumber. (By starting at a label following a BARRIER, we are immune
- to actions taken by reload and jump.) Then we scan all insns between
- that point and our target. For each CLOBBER (or for call-clobbered regs
- when we pass a CALL_INSN), mark the appropriate registers are dead. For
- a SET, mark them as live.
-
- We have to be careful when using REG_DEAD notes because they are not
- updated by such things as find_equiv_reg. So keep track of registers
- marked as dead that haven't been assigned to, and mark them dead at the
- next CODE_LABEL since reload and jump won't propagate values across labels.
-
- If we cannot find the start of a basic block (should be a very rare
- case, if it can happen at all), mark everything as potentially live.
-
- Next, scan forward from TARGET looking for things set or clobbered
- before they are used. These are not live.
-
- Because we can be called many times on the same target, save our results
- in a hash table indexed by INSN_UID. */
-
-static void
-mark_target_live_regs (target, res)
- rtx target;
- struct resources *res;
-{
- int b = -1;
- int i;
- struct target_info *tinfo;
- rtx insn;
- rtx jump_insn = 0;
- rtx jump_target;
- HARD_REG_SET scratch;
- struct resources set, needed;
-
- /* Handle end of function. */
- if (target == 0)
- {
- *res = end_of_function_needs;
- return;
- }
-
- /* We have to assume memory is needed, but the CC isn't. */
- res->memory = 1;
- res->volatil = res->unch_memory = 0;
- res->cc = 0;
-
- /* See if we have computed this value already. */
- for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
- tinfo; tinfo = tinfo->next)
- if (tinfo->uid == INSN_UID (target))
- break;
-
- /* Start by getting the basic block number. If we have saved information,
- we can get it from there unless the insn at the start of the basic block
- has been deleted. */
- if (tinfo && tinfo->block != -1
- && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
- b = tinfo->block;
-
- if (b == -1)
- b = find_basic_block (target);
-
- if (tinfo)
- {
- /* If the information is up-to-date, use it. Otherwise, we will
- update it below. */
- if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
- {
- COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
- return;
- }
- }
- else
- {
- /* Allocate a place to put our results and chain it into the
- hash table. */
- tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
- tinfo->uid = INSN_UID (target);
- tinfo->block = b;
- tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
- target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
- }
-
- CLEAR_HARD_REG_SET (pending_dead_regs);
-
- /* If we found a basic block, get the live registers from it and update
- them with anything set or killed between its start and the insn before
- TARGET. Otherwise, we must assume everything is live. */
- if (b != -1)
- {
- regset regs_live = basic_block_live_at_start[b];
- int j;
- int regno;
- rtx start_insn, stop_insn;
-
- /* Compute hard regs live at start of block -- this is the real hard regs
- marked live, plus live pseudo regs that have been renumbered to
- hard regs. */
-
- REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
-
- EXECUTE_IF_SET_IN_REG_SET
- (regs_live, FIRST_PSEUDO_REGISTER, i,
- {
- if ((regno = reg_renumber[i]) >= 0)
- for (j = regno;
- j < regno + HARD_REGNO_NREGS (regno,
- PSEUDO_REGNO_MODE (i));
- j++)
- SET_HARD_REG_BIT (current_live_regs, j);
- });
-
- /* Get starting and ending insn, handling the case where each might
- be a SEQUENCE. */
- start_insn = (b == 0 ? get_insns () : BLOCK_HEAD (b));
- stop_insn = target;
-
- if (GET_CODE (start_insn) == INSN
- && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
- start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
-
- if (GET_CODE (stop_insn) == INSN
- && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
- stop_insn = next_insn (PREV_INSN (stop_insn));
-
- for (insn = start_insn; insn != stop_insn;
- insn = next_insn_no_annul (insn))
- {
- rtx link;
- rtx real_insn = insn;
-
- /* If this insn is from the target of a branch, it isn't going to
- be used in the sequel. If it is used in both cases, this
- test will not be true. */
- if (INSN_FROM_TARGET_P (insn))
- continue;
-
- /* If this insn is a USE made by update_block, we care about the
- underlying insn. */
- if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
- && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
- real_insn = XEXP (PATTERN (insn), 0);
-
- if (GET_CODE (real_insn) == CALL_INSN)
- {
- /* CALL clobbers all call-used regs that aren't fixed except
- sp, ap, and fp. Do this before setting the result of the
- call live. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i]
- && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
- && i != ARG_POINTER_REGNUM
-#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
- && i != HARD_FRAME_POINTER_REGNUM
-#endif
-#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
- && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
-#endif
-#ifdef PIC_OFFSET_TABLE_REGNUM
- && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
-#endif
- )
- CLEAR_HARD_REG_BIT (current_live_regs, i);
-
- /* A CALL_INSN sets any global register live, since it may
- have been modified by the call. */
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (global_regs[i])
- SET_HARD_REG_BIT (current_live_regs, i);
- }
-
- /* Mark anything killed in an insn to be deadened at the next
- label. Ignore USE insns; the only REG_DEAD notes will be for
- parameters. But they might be early. A CALL_INSN will usually
- clobber registers used for parameters. It isn't worth bothering
- with the unlikely case when it won't. */
- if ((GET_CODE (real_insn) == INSN
- && GET_CODE (PATTERN (real_insn)) != USE
- && GET_CODE (PATTERN (real_insn)) != CLOBBER)
- || GET_CODE (real_insn) == JUMP_INSN
- || GET_CODE (real_insn) == CALL_INSN)
- {
- for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
- if (REG_NOTE_KIND (link) == REG_DEAD
- && GET_CODE (XEXP (link, 0)) == REG
- && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
- {
- int first_regno = REGNO (XEXP (link, 0));
- int last_regno
- = (first_regno
- + HARD_REGNO_NREGS (first_regno,
- GET_MODE (XEXP (link, 0))));
-
- for (i = first_regno; i < last_regno; i++)
- SET_HARD_REG_BIT (pending_dead_regs, i);
- }
-
- note_stores (PATTERN (real_insn), update_live_status);
-
- /* If any registers were unused after this insn, kill them.
- These notes will always be accurate. */
- for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
- if (REG_NOTE_KIND (link) == REG_UNUSED
- && GET_CODE (XEXP (link, 0)) == REG
- && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
- {
- int first_regno = REGNO (XEXP (link, 0));
- int last_regno
- = (first_regno
- + HARD_REGNO_NREGS (first_regno,
- GET_MODE (XEXP (link, 0))));
-
- for (i = first_regno; i < last_regno; i++)
- CLEAR_HARD_REG_BIT (current_live_regs, i);
- }
- }
-
- else if (GET_CODE (real_insn) == CODE_LABEL)
- {
- /* A label clobbers the pending dead registers since neither
- reload nor jump will propagate a value across a label. */
- AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
- CLEAR_HARD_REG_SET (pending_dead_regs);
- }
-
- /* The beginning of the epilogue corresponds to the end of the
- RTL chain when there are no epilogue insns. Certain resources
- are implicitly required at that point. */
- else if (GET_CODE (real_insn) == NOTE
- && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
- IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
- }
-
- COPY_HARD_REG_SET (res->regs, current_live_regs);
- tinfo->block = b;
- tinfo->bb_tick = bb_ticks[b];
- }
- else
- /* We didn't find the start of a basic block. Assume everything
- in use. This should happen only extremely rarely. */
- SET_HARD_REG_SET (res->regs);
-
- CLEAR_RESOURCE (&set);
- CLEAR_RESOURCE (&needed);
-
- jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
- set, needed);
-
- /* If we hit an unconditional branch, we have another way of finding out
- what is live: we can see what is live at the branch target and include
- anything used but not set before the branch. The only things that are
- live are those that are live using the above test and the test below. */
-
- if (jump_insn)
- {
- struct resources new_resources;
- rtx stop_insn = next_active_insn (jump_insn);
-
- mark_target_live_regs (next_active_insn (jump_target), &new_resources);
- CLEAR_RESOURCE (&set);
- CLEAR_RESOURCE (&needed);
-
- /* Include JUMP_INSN in the needed registers. */
- for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
- {
- mark_referenced_resources (insn, &needed, 1);
-
- COPY_HARD_REG_SET (scratch, needed.regs);
- AND_COMPL_HARD_REG_SET (scratch, set.regs);
- IOR_HARD_REG_SET (new_resources.regs, scratch);
-
- mark_set_resources (insn, &set, 0, 1);
- }
-
- AND_HARD_REG_SET (res->regs, new_resources.regs);
- }
-
- COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
-}
-
/* Scan a function looking for insns that need a delay slot and find insns to
put into the delay slot.
@@ -3317,9 +2253,11 @@ fill_simple_delay_slots (non_jumps_p)
break;
else if (JUMP_LABEL (trial_delay) != target)
{
- mark_target_live_regs
- (next_active_insn (JUMP_LABEL (trial_delay)),
- &needed_at_jump);
+ rtx ninsn =
+ next_active_insn (JUMP_LABEL (trial_delay));
+
+ mark_target_live_regs (get_insns (), ninsn,
+ &needed_at_jump);
needed.memory |= needed_at_jump.memory;
needed.unch_memory |= needed_at_jump.unch_memory;
IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
@@ -3505,7 +2443,7 @@ fill_simple_delay_slots (non_jumps_p)
current_function_epilogue_delay_list
= gen_rtx_INSN_LIST (VOIDmode, trial,
current_function_epilogue_delay_list);
- mark_referenced_resources (trial, &end_of_function_needs, 1);
+ mark_end_of_function_resources (trial, 1);
update_block (trial, trial);
delete_insn (trial);
@@ -3587,7 +2525,7 @@ fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
if (condition == const_true_rtx)
CLEAR_RESOURCE (&opposite_needed);
else
- mark_target_live_regs (opposite_thread, &opposite_needed);
+ mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
/* If the insn at THREAD can be split, do it here to avoid having to
update THREAD and NEW_THREAD if it is done in the loop below. Also
@@ -4623,80 +3561,11 @@ dbr_schedule (first, file)
redirect_jump (insn, target);
}
- /* Indicate what resources are required to be valid at the end of the current
- function. The condition code never is and memory always is. If the
- frame pointer is needed, it is and so is the stack pointer unless
- EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
- stack pointer is. Registers used to return the function value are
- needed. Registers holding global variables are needed. */
-
- end_of_function_needs.cc = 0;
- end_of_function_needs.memory = 1;
- end_of_function_needs.unch_memory = 0;
- CLEAR_HARD_REG_SET (end_of_function_needs.regs);
-
- if (frame_pointer_needed)
- {
- SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
-#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
- SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
-#endif
-#ifdef EXIT_IGNORE_STACK
- if (! EXIT_IGNORE_STACK
- || current_function_sp_is_unchanging)
-#endif
- SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
- }
- else
- SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
-
- if (current_function_return_rtx != 0)
- mark_referenced_resources (current_function_return_rtx,
- &end_of_function_needs, 1);
-
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (global_regs[i]
-#ifdef EPILOGUE_USES
- || EPILOGUE_USES (i)
-#endif
- )
- SET_HARD_REG_BIT (end_of_function_needs.regs, i);
-
- /* The registers required to be live at the end of the function are
- represented in the flow information as being dead just prior to
- reaching the end of the function. For example, the return of a value
- might be represented by a USE of the return register immediately
- followed by an unconditional jump to the return label where the
- return label is the end of the RTL chain. The end of the RTL chain
- is then taken to mean that the return register is live.
-
- This sequence is no longer maintained when epilogue instructions are
- added to the RTL chain. To reconstruct the original meaning, the
- start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
- point where these registers become live (start_of_epilogue_needs).
- If epilogue instructions are present, the registers set by those
- instructions won't have been processed by flow. Thus, those
- registers are additionally required at the end of the RTL chain
- (end_of_function_needs). */
-
- start_of_epilogue_needs = end_of_function_needs;
-
- while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
- mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
+ init_resource_info (epilogue_insn);
/* Show we haven't computed an end-of-function label yet. */
end_of_function_label = 0;
- /* Allocate and initialize the tables used by mark_target_live_regs. */
- target_hash_table
- = (struct target_info **) alloca ((TARGET_HASH_PRIME
- * sizeof (struct target_info *)));
- bzero ((char *) target_hash_table,
- TARGET_HASH_PRIME * sizeof (struct target_info *));
-
- bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
- bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
-
/* Initialize the statistics for this function. */
bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
bzero ((char *) num_filled_delays, sizeof num_filled_delays);
@@ -4800,5 +3669,6 @@ dbr_schedule (first, file)
GEN_INT (pred_flags),
REG_NOTES (insn));
}
+ free_resource_info ();
}
#endif /* DELAY_SLOTS */
diff --git a/gcc/resource.c b/gcc/resource.c
new file mode 100644
index 00000000000..020212c21fe
--- /dev/null
+++ b/gcc/resource.c
@@ -0,0 +1,1239 @@
+#include "config.h"
+#include "rtl.h"
+#include "hard-reg-set.h"
+#include "system.h"
+#include "basic-block.h"
+#include "regs.h"
+#include "flags.h"
+#include "output.h"
+#include "resource.h"
+
+/* This structure is used to record liveness information at the targets or
+ fallthrough insns of branches. We will most likely need the information
+ at targets again, so save them in a hash table rather than recomputing them
+ each time. */
+
+struct target_info
+{
+ int uid; /* INSN_UID of target. */
+ struct target_info *next; /* Next info for same hash bucket. */
+ HARD_REG_SET live_regs; /* Registers live at target. */
+ int block; /* Basic block number containing target. */
+ int bb_tick; /* Generation count of basic block info. */
+};
+
+#define TARGET_HASH_PRIME 257
+
+/* Indicates what resources are required at the beginning of the epilogue. */
+static struct resources start_of_epilogue_needs;
+
+/* Indicates what resources are required at function end. */
+static struct resources end_of_function_needs;
+
+/* Define the hash table itself. */
+static struct target_info **target_hash_table = NULL;
+
+/* For each basic block, we maintain a generation number of its basic
+ block info, which is updated each time we move an insn from the
+ target of a jump. This is the generation number indexed by block
+ number. */
+
+static int *bb_ticks;
+
+/* Marks registers possibly live at the current place being scanned by
+ mark_target_live_regs. Used only by next two function. */
+
+static HARD_REG_SET current_live_regs;
+
+/* Marks registers for which we have seen a REG_DEAD note but no assignment.
+ Also only used by the next two functions. */
+
+static HARD_REG_SET pending_dead_regs;
+
+/* Utility function called from mark_target_live_regs via note_stores.
+ It deadens any CLOBBERed registers and livens any SET registers. */
+
+static void
+update_live_status (dest, x)
+ rtx dest;
+ rtx x;
+{
+ int first_regno, last_regno;
+ int i;
+
+ if (GET_CODE (dest) != REG
+ && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
+ return;
+
+ if (GET_CODE (dest) == SUBREG)
+ first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
+ else
+ first_regno = REGNO (dest);
+
+ last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
+
+ if (GET_CODE (x) == CLOBBER)
+ for (i = first_regno; i < last_regno; i++)
+ CLEAR_HARD_REG_BIT (current_live_regs, i);
+ else
+ for (i = first_regno; i < last_regno; i++)
+ {
+ SET_HARD_REG_BIT (current_live_regs, i);
+ CLEAR_HARD_REG_BIT (pending_dead_regs, i);
+ }
+}
+/* Find the number of the basic block that starts closest to INSN. Return -1
+ if we couldn't find such a basic block. */
+
+static int
+find_basic_block (insn)
+ rtx insn;
+{
+ int i;
+
+ /* Scan backwards to the previous BARRIER. Then see if we can find a
+ label that starts a basic block. Return the basic block number. */
+
+ for (insn = prev_nonnote_insn (insn);
+ insn && GET_CODE (insn) != BARRIER;
+ insn = prev_nonnote_insn (insn))
+ ;
+
+ /* The start of the function is basic block zero. */
+ if (insn == 0)
+ return 0;
+
+ /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
+ anything other than a CODE_LABEL or note, we can't find this code. */
+ for (insn = next_nonnote_insn (insn);
+ insn && GET_CODE (insn) == CODE_LABEL;
+ insn = next_nonnote_insn (insn))
+ {
+ for (i = 0; i < n_basic_blocks; i++)
+ if (insn == BLOCK_HEAD (i))
+ return i;
+ }
+
+ return -1;
+}
+
+/* Similar to next_insn, but ignores insns in the delay slots of
+ an annulled branch. */
+
+static rtx
+next_insn_no_annul (insn)
+ rtx insn;
+{
+ if (insn)
+ {
+ /* If INSN is an annulled branch, skip any insns from the target
+ of the branch. */
+ if (INSN_ANNULLED_BRANCH_P (insn)
+ && NEXT_INSN (PREV_INSN (insn)) != insn)
+ while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
+ insn = NEXT_INSN (insn);
+
+ insn = NEXT_INSN (insn);
+ if (insn && GET_CODE (insn) == INSN
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+ }
+
+ return insn;
+}
+
+/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
+ which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
+ is TRUE, resources used by the called routine will be included for
+ CALL_INSNs. */
+
+void
+mark_referenced_resources (x, res, include_delayed_effects)
+ register rtx x;
+ register struct resources *res;
+ register int include_delayed_effects;
+{
+ register enum rtx_code code = GET_CODE (x);
+ register int i, j;
+ register char *format_ptr;
+
+ /* Handle leaf items for which we set resource flags. Also, special-case
+ CALL, SET and CLOBBER operators. */
+ switch (code)
+ {
+ case CONST:
+ case CONST_INT:
+ case CONST_DOUBLE:
+ case PC:
+ case SYMBOL_REF:
+ case LABEL_REF:
+ return;
+
+ case SUBREG:
+ if (GET_CODE (SUBREG_REG (x)) != REG)
+ mark_referenced_resources (SUBREG_REG (x), res, 0);
+ else
+ {
+ int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
+ int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+ for (i = regno; i < last_regno; i++)
+ SET_HARD_REG_BIT (res->regs, i);
+ }
+ return;
+
+ case REG:
+ for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
+ SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
+ return;
+
+ case MEM:
+ /* If this memory shouldn't change, it really isn't referencing
+ memory. */
+ if (RTX_UNCHANGING_P (x))
+ res->unch_memory = 1;
+ else
+ res->memory = 1;
+ res->volatil = MEM_VOLATILE_P (x);
+
+ /* Mark registers used to access memory. */
+ mark_referenced_resources (XEXP (x, 0), res, 0);
+ return;
+
+ case CC0:
+ res->cc = 1;
+ return;
+
+ case UNSPEC_VOLATILE:
+ case ASM_INPUT:
+ /* Traditional asm's are always volatile. */
+ res->volatil = 1;
+ return;
+
+ case TRAP_IF:
+ res->volatil = 1;
+ break;
+
+ case ASM_OPERANDS:
+ res->volatil = MEM_VOLATILE_P (x);
+
+ /* For all ASM_OPERANDS, we must traverse the vector of input operands.
+ We can not just fall through here since then we would be confused
+ by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
+ traditional asms unlike their normal usage. */
+
+ for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
+ mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
+ return;
+
+ case CALL:
+ /* The first operand will be a (MEM (xxx)) but doesn't really reference
+ memory. The second operand may be referenced, though. */
+ mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
+ mark_referenced_resources (XEXP (x, 1), res, 0);
+ return;
+
+ case SET:
+ /* Usually, the first operand of SET is set, not referenced. But
+ registers used to access memory are referenced. SET_DEST is
+ also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
+
+ mark_referenced_resources (SET_SRC (x), res, 0);
+
+ x = SET_DEST (x);
+ if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
+ mark_referenced_resources (x, res, 0);
+ else if (GET_CODE (x) == SUBREG)
+ x = SUBREG_REG (x);
+ if (GET_CODE (x) == MEM)
+ mark_referenced_resources (XEXP (x, 0), res, 0);
+ return;
+
+ case CLOBBER:
+ return;
+
+ case CALL_INSN:
+ if (include_delayed_effects)
+ {
+ /* A CALL references memory, the frame pointer if it exists, the
+ stack pointer, any global registers and any registers given in
+ USE insns immediately in front of the CALL.
+
+ However, we may have moved some of the parameter loading insns
+ into the delay slot of this CALL. If so, the USE's for them
+ don't count and should be skipped. */
+ rtx insn = PREV_INSN (x);
+ rtx sequence = 0;
+ int seq_size = 0;
+ rtx next = NEXT_INSN (x);
+ int i;
+
+ /* If we are part of a delay slot sequence, point at the SEQUENCE. */
+ if (NEXT_INSN (insn) != x)
+ {
+ next = NEXT_INSN (NEXT_INSN (insn));
+ sequence = PATTERN (NEXT_INSN (insn));
+ seq_size = XVECLEN (sequence, 0);
+ if (GET_CODE (sequence) != SEQUENCE)
+ abort ();
+ }
+
+ res->memory = 1;
+ SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
+ if (frame_pointer_needed)
+ {
+ SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
+#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
+#endif
+ }
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (global_regs[i])
+ SET_HARD_REG_BIT (res->regs, i);
+
+ /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
+ assume that this call can need any register.
+
+ This is done to be more conservative about how we handle setjmp.
+ We assume that they both use and set all registers. Using all
+ registers ensures that a register will not be considered dead
+ just because it crosses a setjmp call. A register should be
+ considered dead only if the setjmp call returns non-zero. */
+ if (next && GET_CODE (next) == NOTE
+ && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
+ SET_HARD_REG_SET (res->regs);
+
+ {
+ rtx link;
+
+ for (link = CALL_INSN_FUNCTION_USAGE (x);
+ link;
+ link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == USE)
+ {
+ for (i = 1; i < seq_size; i++)
+ {
+ rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
+ if (GET_CODE (slot_pat) == SET
+ && rtx_equal_p (SET_DEST (slot_pat),
+ SET_DEST (XEXP (link, 0))))
+ break;
+ }
+ if (i >= seq_size)
+ mark_referenced_resources (SET_DEST (XEXP (link, 0)),
+ res, 0);
+ }
+ }
+ }
+
+ /* ... fall through to other INSN processing ... */
+
+ case INSN:
+ case JUMP_INSN:
+
+#ifdef INSN_REFERENCES_ARE_DELAYED
+ if (! include_delayed_effects
+ && INSN_REFERENCES_ARE_DELAYED (x))
+ return;
+#endif
+
+ /* No special processing, just speed up. */
+ mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
+ return;
+
+ default:
+ break;
+ }
+
+ /* Process each sub-expression and flag what it needs. */
+ format_ptr = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
+ switch (*format_ptr++)
+ {
+ case 'e':
+ mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
+ break;
+
+ case 'E':
+ for (j = 0; j < XVECLEN (x, i); j++)
+ mark_referenced_resources (XVECEXP (x, i, j), res,
+ include_delayed_effects);
+ break;
+ }
+}
+
+/* A subroutine of mark_target_live_regs. Search forward from TARGET
+ looking for registers that are set before they are used. These are dead.
+ Stop after passing a few conditional jumps, and/or a small
+ number of unconditional branches. */
+
+static rtx
+find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
+ rtx target;
+ struct resources *res;
+ rtx *jump_target;
+ int jump_count;
+ struct resources set, needed;
+{
+ HARD_REG_SET scratch;
+ rtx insn, next;
+ rtx jump_insn = 0;
+ int i;
+
+ for (insn = target; insn; insn = next)
+ {
+ rtx this_jump_insn = insn;
+
+ next = NEXT_INSN (insn);
+ switch (GET_CODE (insn))
+ {
+ case CODE_LABEL:
+ /* After a label, any pending dead registers that weren't yet
+ used can be made dead. */
+ AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
+ AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
+ CLEAR_HARD_REG_SET (pending_dead_regs);
+
+ continue;
+
+ case BARRIER:
+ case NOTE:
+ continue;
+
+ case INSN:
+ if (GET_CODE (PATTERN (insn)) == USE)
+ {
+ /* If INSN is a USE made by update_block, we care about the
+ underlying insn. Any registers set by the underlying insn
+ are live since the insn is being done somewhere else. */
+ if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
+ mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
+
+ /* All other USE insns are to be ignored. */
+ continue;
+ }
+ else if (GET_CODE (PATTERN (insn)) == CLOBBER)
+ continue;
+ else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
+ {
+ /* An unconditional jump can be used to fill the delay slot
+ of a call, so search for a JUMP_INSN in any position. */
+ for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
+ {
+ this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
+ if (GET_CODE (this_jump_insn) == JUMP_INSN)
+ break;
+ }
+ }
+
+ default:
+ break;
+ }
+
+ if (GET_CODE (this_jump_insn) == JUMP_INSN)
+ {
+ if (jump_count++ < 10)
+ {
+ if (simplejump_p (this_jump_insn)
+ || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
+ {
+ next = JUMP_LABEL (this_jump_insn);
+ if (jump_insn == 0)
+ {
+ jump_insn = insn;
+ if (jump_target)
+ *jump_target = JUMP_LABEL (this_jump_insn);
+ }
+ }
+ else if (condjump_p (this_jump_insn)
+ || condjump_in_parallel_p (this_jump_insn))
+ {
+ struct resources target_set, target_res;
+ struct resources fallthrough_res;
+
+ /* We can handle conditional branches here by following
+ both paths, and then IOR the results of the two paths
+ together, which will give us registers that are dead
+ on both paths. Since this is expensive, we give it
+ a much higher cost than unconditional branches. The
+ cost was chosen so that we will follow at most 1
+ conditional branch. */
+
+ jump_count += 4;
+ if (jump_count >= 10)
+ break;
+
+ mark_referenced_resources (insn, &needed, 1);
+
+ /* For an annulled branch, mark_set_resources ignores slots
+ filled by instructions from the target. This is correct
+ if the branch is not taken. Since we are following both
+ paths from the branch, we must also compute correct info
+ if the branch is taken. We do this by inverting all of
+ the INSN_FROM_TARGET_P bits, calling mark_set_resources,
+ and then inverting the INSN_FROM_TARGET_P bits again. */
+
+ if (GET_CODE (PATTERN (insn)) == SEQUENCE
+ && INSN_ANNULLED_BRANCH_P (this_jump_insn))
+ {
+ for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
+ INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
+ = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
+
+ target_set = set;
+ mark_set_resources (insn, &target_set, 0, 1);
+
+ for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
+ INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
+ = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
+
+ mark_set_resources (insn, &set, 0, 1);
+ }
+ else
+ {
+ mark_set_resources (insn, &set, 0, 1);
+ target_set = set;
+ }
+
+ target_res = *res;
+ COPY_HARD_REG_SET (scratch, target_set.regs);
+ AND_COMPL_HARD_REG_SET (scratch, needed.regs);
+ AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
+
+ fallthrough_res = *res;
+ COPY_HARD_REG_SET (scratch, set.regs);
+ AND_COMPL_HARD_REG_SET (scratch, needed.regs);
+ AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
+
+ find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
+ &target_res, 0, jump_count,
+ target_set, needed);
+ find_dead_or_set_registers (next,
+ &fallthrough_res, 0, jump_count,
+ set, needed);
+ IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
+ AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
+ break;
+ }
+ else
+ break;
+ }
+ else
+ {
+ /* Don't try this optimization if we expired our jump count
+ above, since that would mean there may be an infinite loop
+ in the function being compiled. */
+ jump_insn = 0;
+ break;
+ }
+ }
+
+ mark_referenced_resources (insn, &needed, 1);
+ mark_set_resources (insn, &set, 0, 1);
+
+ COPY_HARD_REG_SET (scratch, set.regs);
+ AND_COMPL_HARD_REG_SET (scratch, needed.regs);
+ AND_COMPL_HARD_REG_SET (res->regs, scratch);
+ }
+
+ return jump_insn;
+}
+
+/* Given X, a part of an insn, and a pointer to a `struct resource',
+ RES, indicate which resources are modified by the insn. If
+ INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
+ set by the called routine.
+
+ If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
+ objects are being referenced instead of set.
+
+ We never mark the insn as modifying the condition code unless it explicitly
+ SETs CC0 even though this is not totally correct. The reason for this is
+ that we require a SET of CC0 to immediately precede the reference to CC0.
+ So if some other insn sets CC0 as a side-effect, we know it cannot affect
+ our computation and thus may be placed in a delay slot. */
+
+void
+mark_set_resources (x, res, in_dest, include_delayed_effects)
+ register rtx x;
+ register struct resources *res;
+ int in_dest;
+ int include_delayed_effects;
+{
+ register enum rtx_code code;
+ register int i, j;
+ register char *format_ptr;
+
+ restart:
+
+ code = GET_CODE (x);
+
+ switch (code)
+ {
+ case NOTE:
+ case BARRIER:
+ case CODE_LABEL:
+ case USE:
+ case CONST_INT:
+ case CONST_DOUBLE:
+ case LABEL_REF:
+ case SYMBOL_REF:
+ case CONST:
+ case PC:
+ /* These don't set any resources. */
+ return;
+
+ case CC0:
+ if (in_dest)
+ res->cc = 1;
+ return;
+
+ case CALL_INSN:
+ /* Called routine modifies the condition code, memory, any registers
+ that aren't saved across calls, global registers and anything
+ explicitly CLOBBERed immediately after the CALL_INSN. */
+
+ if (include_delayed_effects)
+ {
+ rtx next = NEXT_INSN (x);
+ rtx prev = PREV_INSN (x);
+ rtx link;
+
+ res->cc = res->memory = 1;
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (call_used_regs[i] || global_regs[i])
+ SET_HARD_REG_BIT (res->regs, i);
+
+ /* If X is part of a delay slot sequence, then NEXT should be
+ the first insn after the sequence. */
+ if (NEXT_INSN (prev) != x)
+ next = NEXT_INSN (NEXT_INSN (prev));
+
+ for (link = CALL_INSN_FUNCTION_USAGE (x);
+ link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
+
+ /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
+ assume that this call can clobber any register. */
+ if (next && GET_CODE (next) == NOTE
+ && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
+ SET_HARD_REG_SET (res->regs);
+ }
+
+ /* ... and also what its RTL says it modifies, if anything. */
+
+ case JUMP_INSN:
+ case INSN:
+
+ /* An insn consisting of just a CLOBBER (or USE) is just for flow
+ and doesn't actually do anything, so we ignore it. */
+
+#ifdef INSN_SETS_ARE_DELAYED
+ if (! include_delayed_effects
+ && INSN_SETS_ARE_DELAYED (x))
+ return;
+#endif
+
+ x = PATTERN (x);
+ if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
+ goto restart;
+ return;
+
+ case SET:
+ /* If the source of a SET is a CALL, this is actually done by
+ the called routine. So only include it if we are to include the
+ effects of the calling routine. */
+
+ mark_set_resources (SET_DEST (x), res,
+ (include_delayed_effects
+ || GET_CODE (SET_SRC (x)) != CALL),
+ 0);
+
+ mark_set_resources (SET_SRC (x), res, 0, 0);
+ return;
+
+ case CLOBBER:
+ mark_set_resources (XEXP (x, 0), res, 1, 0);
+ return;
+
+ case SEQUENCE:
+ for (i = 0; i < XVECLEN (x, 0); i++)
+ if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
+ && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
+ mark_set_resources (XVECEXP (x, 0, i), res, 0,
+ include_delayed_effects);
+ return;
+
+ case POST_INC:
+ case PRE_INC:
+ case POST_DEC:
+ case PRE_DEC:
+ mark_set_resources (XEXP (x, 0), res, 1, 0);
+ return;
+
+ case ZERO_EXTRACT:
+ mark_set_resources (XEXP (x, 0), res, in_dest, 0);
+ mark_set_resources (XEXP (x, 1), res, 0, 0);
+ mark_set_resources (XEXP (x, 2), res, 0, 0);
+ return;
+
+ case MEM:
+ if (in_dest)
+ {
+ res->memory = 1;
+ res->unch_memory = RTX_UNCHANGING_P (x);
+ res->volatil = MEM_VOLATILE_P (x);
+ }
+
+ mark_set_resources (XEXP (x, 0), res, 0, 0);
+ return;
+
+ case SUBREG:
+ if (in_dest)
+ {
+ if (GET_CODE (SUBREG_REG (x)) != REG)
+ mark_set_resources (SUBREG_REG (x), res,
+ in_dest, include_delayed_effects);
+ else
+ {
+ int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
+ int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+ for (i = regno; i < last_regno; i++)
+ SET_HARD_REG_BIT (res->regs, i);
+ }
+ }
+ return;
+
+ case REG:
+ if (in_dest)
+ for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
+ SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
+ return;
+
+ default:
+ break;
+ }
+
+ /* Process each sub-expression and flag what it needs. */
+ format_ptr = GET_RTX_FORMAT (code);
+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
+ switch (*format_ptr++)
+ {
+ case 'e':
+ mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
+ break;
+
+ case 'E':
+ for (j = 0; j < XVECLEN (x, i); j++)
+ mark_set_resources (XVECEXP (x, i, j), res, in_dest,
+ include_delayed_effects);
+ break;
+ }
+}
+
+/* Set the resources that are live at TARGET.
+
+ If TARGET is zero, we refer to the end of the current function and can
+ return our precomputed value.
+
+ Otherwise, we try to find out what is live by consulting the basic block
+ information. This is tricky, because we must consider the actions of
+ reload and jump optimization, which occur after the basic block information
+ has been computed.
+
+ Accordingly, we proceed as follows::
+
+ We find the previous BARRIER and look at all immediately following labels
+ (with no intervening active insns) to see if any of them start a basic
+ block. If we hit the start of the function first, we use block 0.
+
+ Once we have found a basic block and a corresponding first insns, we can
+ accurately compute the live status from basic_block_live_regs and
+ reg_renumber. (By starting at a label following a BARRIER, we are immune
+ to actions taken by reload and jump.) Then we scan all insns between
+ that point and our target. For each CLOBBER (or for call-clobbered regs
+ when we pass a CALL_INSN), mark the appropriate registers are dead. For
+ a SET, mark them as live.
+
+ We have to be careful when using REG_DEAD notes because they are not
+ updated by such things as find_equiv_reg. So keep track of registers
+ marked as dead that haven't been assigned to, and mark them dead at the
+ next CODE_LABEL since reload and jump won't propagate values across labels.
+
+ If we cannot find the start of a basic block (should be a very rare
+ case, if it can happen at all), mark everything as potentially live.
+
+ Next, scan forward from TARGET looking for things set or clobbered
+ before they are used. These are not live.
+
+ Because we can be called many times on the same target, save our results
+ in a hash table indexed by INSN_UID. This is only done if the function
+ init_resource_info () was invoked before we are called. */
+
+void
+mark_target_live_regs (insns, target, res)
+ rtx insns;
+ rtx target;
+ struct resources *res;
+{
+ int b = -1;
+ int i;
+ struct target_info *tinfo = NULL;
+ rtx insn;
+ rtx jump_insn = 0;
+ rtx jump_target;
+ HARD_REG_SET scratch;
+ struct resources set, needed;
+
+ /* Handle end of function. */
+ if (target == 0)
+ {
+ *res = end_of_function_needs;
+ return;
+ }
+
+ /* We have to assume memory is needed, but the CC isn't. */
+ res->memory = 1;
+ res->volatil = res->unch_memory = 0;
+ res->cc = 0;
+
+ /* See if we have computed this value already. */
+ if (target_hash_table != NULL)
+ {
+ for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
+ tinfo; tinfo = tinfo->next)
+ if (tinfo->uid == INSN_UID (target))
+ break;
+
+ /* Start by getting the basic block number. If we have saved
+ information, we can get it from there unless the insn at the
+ start of the basic block has been deleted. */
+ if (tinfo && tinfo->block != -1
+ && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
+ b = tinfo->block;
+ }
+
+ if (b == -1)
+ b = find_basic_block (target);
+
+ if (target_hash_table != NULL)
+ {
+ if (tinfo)
+ {
+ /* If the information is up-to-date, use it. Otherwise, we will
+ update it below. */
+ if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
+ {
+ COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
+ return;
+ }
+ }
+ else
+ {
+ /* Allocate a place to put our results and chain it into the
+ hash table. */
+ tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
+ tinfo->uid = INSN_UID (target);
+ tinfo->block = b;
+ tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
+ target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
+ }
+ }
+
+ CLEAR_HARD_REG_SET (pending_dead_regs);
+
+ /* If we found a basic block, get the live registers from it and update
+ them with anything set or killed between its start and the insn before
+ TARGET. Otherwise, we must assume everything is live. */
+ if (b != -1)
+ {
+ regset regs_live = basic_block_live_at_start[b];
+ int j;
+ int regno;
+ rtx start_insn, stop_insn;
+
+ /* Compute hard regs live at start of block -- this is the real hard regs
+ marked live, plus live pseudo regs that have been renumbered to
+ hard regs. */
+
+ REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
+
+ EXECUTE_IF_SET_IN_REG_SET
+ (regs_live, FIRST_PSEUDO_REGISTER, i,
+ {
+ if ((regno = reg_renumber[i]) >= 0)
+ for (j = regno;
+ j < regno + HARD_REGNO_NREGS (regno,
+ PSEUDO_REGNO_MODE (i));
+ j++)
+ SET_HARD_REG_BIT (current_live_regs, j);
+ });
+
+ /* Get starting and ending insn, handling the case where each might
+ be a SEQUENCE. */
+ start_insn = (b == 0 ? insns : BLOCK_HEAD (b));
+ stop_insn = target;
+
+ if (GET_CODE (start_insn) == INSN
+ && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
+ start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
+
+ if (GET_CODE (stop_insn) == INSN
+ && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
+ stop_insn = next_insn (PREV_INSN (stop_insn));
+
+ for (insn = start_insn; insn != stop_insn;
+ insn = next_insn_no_annul (insn))
+ {
+ rtx link;
+ rtx real_insn = insn;
+
+ /* If this insn is from the target of a branch, it isn't going to
+ be used in the sequel. If it is used in both cases, this
+ test will not be true. */
+ if (INSN_FROM_TARGET_P (insn))
+ continue;
+
+ /* If this insn is a USE made by update_block, we care about the
+ underlying insn. */
+ if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
+ && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
+ real_insn = XEXP (PATTERN (insn), 0);
+
+ if (GET_CODE (real_insn) == CALL_INSN)
+ {
+ /* CALL clobbers all call-used regs that aren't fixed except
+ sp, ap, and fp. Do this before setting the result of the
+ call live. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (call_used_regs[i]
+ && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
+ && i != ARG_POINTER_REGNUM
+#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
+ && i != HARD_FRAME_POINTER_REGNUM
+#endif
+#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
+ && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
+#endif
+#ifdef PIC_OFFSET_TABLE_REGNUM
+ && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
+#endif
+ )
+ CLEAR_HARD_REG_BIT (current_live_regs, i);
+
+ /* A CALL_INSN sets any global register live, since it may
+ have been modified by the call. */
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (global_regs[i])
+ SET_HARD_REG_BIT (current_live_regs, i);
+ }
+
+ /* Mark anything killed in an insn to be deadened at the next
+ label. Ignore USE insns; the only REG_DEAD notes will be for
+ parameters. But they might be early. A CALL_INSN will usually
+ clobber registers used for parameters. It isn't worth bothering
+ with the unlikely case when it won't. */
+ if ((GET_CODE (real_insn) == INSN
+ && GET_CODE (PATTERN (real_insn)) != USE
+ && GET_CODE (PATTERN (real_insn)) != CLOBBER)
+ || GET_CODE (real_insn) == JUMP_INSN
+ || GET_CODE (real_insn) == CALL_INSN)
+ {
+ for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
+ if (REG_NOTE_KIND (link) == REG_DEAD
+ && GET_CODE (XEXP (link, 0)) == REG
+ && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
+ {
+ int first_regno = REGNO (XEXP (link, 0));
+ int last_regno
+ = (first_regno
+ + HARD_REGNO_NREGS (first_regno,
+ GET_MODE (XEXP (link, 0))));
+
+ for (i = first_regno; i < last_regno; i++)
+ SET_HARD_REG_BIT (pending_dead_regs, i);
+ }
+
+ note_stores (PATTERN (real_insn), update_live_status);
+
+ /* If any registers were unused after this insn, kill them.
+ These notes will always be accurate. */
+ for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
+ if (REG_NOTE_KIND (link) == REG_UNUSED
+ && GET_CODE (XEXP (link, 0)) == REG
+ && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
+ {
+ int first_regno = REGNO (XEXP (link, 0));
+ int last_regno
+ = (first_regno
+ + HARD_REGNO_NREGS (first_regno,
+ GET_MODE (XEXP (link, 0))));
+
+ for (i = first_regno; i < last_regno; i++)
+ CLEAR_HARD_REG_BIT (current_live_regs, i);
+ }
+ }
+
+ else if (GET_CODE (real_insn) == CODE_LABEL)
+ {
+ /* A label clobbers the pending dead registers since neither
+ reload nor jump will propagate a value across a label. */
+ AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
+ CLEAR_HARD_REG_SET (pending_dead_regs);
+ }
+
+ /* The beginning of the epilogue corresponds to the end of the
+ RTL chain when there are no epilogue insns. Certain resources
+ are implicitly required at that point. */
+ else if (GET_CODE (real_insn) == NOTE
+ && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
+ IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
+ }
+
+ COPY_HARD_REG_SET (res->regs, current_live_regs);
+ if (tinfo != NULL)
+ {
+ tinfo->block = b;
+ tinfo->bb_tick = bb_ticks[b];
+ }
+ }
+ else
+ /* We didn't find the start of a basic block. Assume everything
+ in use. This should happen only extremely rarely. */
+ SET_HARD_REG_SET (res->regs);
+
+ CLEAR_RESOURCE (&set);
+ CLEAR_RESOURCE (&needed);
+
+ jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
+ set, needed);
+
+ /* If we hit an unconditional branch, we have another way of finding out
+ what is live: we can see what is live at the branch target and include
+ anything used but not set before the branch. The only things that are
+ live are those that are live using the above test and the test below. */
+
+ if (jump_insn)
+ {
+ struct resources new_resources;
+ rtx stop_insn = next_active_insn (jump_insn);
+
+ mark_target_live_regs (insns, next_active_insn (jump_target),
+ &new_resources);
+ CLEAR_RESOURCE (&set);
+ CLEAR_RESOURCE (&needed);
+
+ /* Include JUMP_INSN in the needed registers. */
+ for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
+ {
+ mark_referenced_resources (insn, &needed, 1);
+
+ COPY_HARD_REG_SET (scratch, needed.regs);
+ AND_COMPL_HARD_REG_SET (scratch, set.regs);
+ IOR_HARD_REG_SET (new_resources.regs, scratch);
+
+ mark_set_resources (insn, &set, 0, 1);
+ }
+
+ AND_HARD_REG_SET (res->regs, new_resources.regs);
+ }
+
+ if (tinfo != NULL)
+ {
+ COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
+ }
+}
+
+/* Initialize the resources required by mark_target_live_regs ().
+ This should be invoked before the first call to mark_target_live_regs. */
+
+void
+init_resource_info (epilogue_insn)
+ rtx epilogue_insn;
+{
+ int i;
+
+ /* Indicate what resources are required to be valid at the end of the current
+ function. The condition code never is and memory always is. If the
+ frame pointer is needed, it is and so is the stack pointer unless
+ EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
+ stack pointer is. Registers used to return the function value are
+ needed. Registers holding global variables are needed. */
+
+ end_of_function_needs.cc = 0;
+ end_of_function_needs.memory = 1;
+ end_of_function_needs.unch_memory = 0;
+ CLEAR_HARD_REG_SET (end_of_function_needs.regs);
+
+ if (frame_pointer_needed)
+ {
+ SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
+#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
+ SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
+#endif
+#ifdef EXIT_IGNORE_STACK
+ if (! EXIT_IGNORE_STACK
+ || current_function_sp_is_unchanging)
+#endif
+ SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
+ }
+ else
+ SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
+
+ if (current_function_return_rtx != 0)
+ mark_referenced_resources (current_function_return_rtx,
+ &end_of_function_needs, 1);
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ if (global_regs[i]
+#ifdef EPILOGUE_USES
+ || EPILOGUE_USES (i)
+#endif
+ )
+ SET_HARD_REG_BIT (end_of_function_needs.regs, i);
+
+ /* The registers required to be live at the end of the function are
+ represented in the flow information as being dead just prior to
+ reaching the end of the function. For example, the return of a value
+ might be represented by a USE of the return register immediately
+ followed by an unconditional jump to the return label where the
+ return label is the end of the RTL chain. The end of the RTL chain
+ is then taken to mean that the return register is live.
+
+ This sequence is no longer maintained when epilogue instructions are
+ added to the RTL chain. To reconstruct the original meaning, the
+ start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
+ point where these registers become live (start_of_epilogue_needs).
+ If epilogue instructions are present, the registers set by those
+ instructions won't have been processed by flow. Thus, those
+ registers are additionally required at the end of the RTL chain
+ (end_of_function_needs). */
+
+ start_of_epilogue_needs = end_of_function_needs;
+
+ while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
+ mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
+
+ /* Allocate and initialize the tables used by mark_target_live_regs. */
+ target_hash_table
+ = (struct target_info **) xmalloc ((TARGET_HASH_PRIME
+ * sizeof (struct target_info *)));
+ bzero ((char *) target_hash_table,
+ TARGET_HASH_PRIME * sizeof (struct target_info *));
+
+ bb_ticks = (int *) xmalloc (n_basic_blocks * sizeof (int));
+ bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
+}
+
+/* Free up the resources allcated to mark_target_live_regs (). This
+ should be invoked after the last call to mark_target_live_regs (). */
+
+void
+free_resource_info ()
+{
+ if (target_hash_table != NULL)
+ {
+ free (target_hash_table);
+ target_hash_table = NULL;
+ }
+
+ if (bb_ticks != NULL)
+ {
+ free (bb_ticks);
+ bb_ticks = NULL;
+ }
+}
+
+/* Clear any hashed information that we have stored for INSN. */
+
+void
+clear_hashed_info_for_insn (insn)
+ rtx insn;
+{
+ struct target_info *tinfo;
+
+ if (target_hash_table != NULL)
+ {
+ for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
+ tinfo; tinfo = tinfo->next)
+ if (tinfo->uid == INSN_UID (insn))
+ break;
+
+ if (tinfo)
+ tinfo->block = -1;
+ }
+}
+
+/* Increment the tick count for the basic block that contains INSN. */
+
+void
+incr_ticks_for_insn (insn)
+ rtx insn;
+{
+ int b = find_basic_block (insn);
+
+ if (b != -1)
+ bb_ticks[b]++;
+}
+
+/* Add TRIAL to the set of resources used at the end of the current
+ function. */
+void
+mark_end_of_function_resources (trial, include_delayed_effects)
+ rtx trial;
+ int include_delayed_effects;
+{
+ mark_referenced_resources (trial, &end_of_function_needs,
+ include_delayed_effects);
+}
+
+/* Try to find an available hard register of mode MODE at
+ CURRENT_INSN, matching the register class in CLASS_STR. Registers
+ that already have bits set in REG_SET will not be considered.
+
+ If an appropriate register is available, it will be returned and the
+ corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
+ returned. */
+
+rtx
+find_free_register (current_insn, class_str, mode, reg_set)
+ rtx current_insn;
+ char *class_str;
+ int mode;
+ HARD_REG_SET *reg_set;
+{
+ int i, j;
+ struct resources used;
+ unsigned char clet = class_str[0];
+ enum reg_class class
+ = (clet == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (clet));
+
+ mark_target_live_regs (get_insns (), current_insn, &used);
+
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
+ int success = 1;
+
+ if (! TEST_HARD_REG_BIT (reg_class_contents[class], i))
+ continue;
+ for (j = HARD_REGNO_NREGS (i, mode) - 1; j >= 0; j--)
+ {
+ if (TEST_HARD_REG_BIT (*reg_set, i + j)
+ || TEST_HARD_REG_BIT (used.regs, i + j))
+ {
+ success = 0;
+ break;
+ }
+ }
+ if (success)
+ {
+ for (j = HARD_REGNO_NREGS (i, mode) - 1; j >= 0; j--)
+ {
+ SET_HARD_REG_BIT (*reg_set, i + j);
+ }
+ return gen_rtx_REG (mode, i);
+ }
+ }
+ return NULL_RTX;
+}
diff --git a/gcc/resource.h b/gcc/resource.h
new file mode 100644
index 00000000000..d3a8e2c7088
--- /dev/null
+++ b/gcc/resource.h
@@ -0,0 +1,46 @@
+/* Definitions for computing resource usage of specific insns.
+ Copyright (C) 1999 Free Software Foundation, Inc.
+
+This file is part of GNU CC.
+
+GNU CC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GNU CC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GNU CC; see the file COPYING. If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
+
+/* Macro to clear all resources. */
+#define CLEAR_RESOURCE(RES) \
+ do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
+ CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
+
+/* The resources used by a given insn. */
+struct resources
+{
+ char memory; /* Insn sets or needs a memory location. */
+ char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
+ char volatil; /* Insn sets or needs a volatile memory loc. */
+ char cc; /* Insn sets or needs the condition codes. */
+ HARD_REG_SET regs; /* Which registers are set or needed. */
+};
+
+extern void mark_target_live_regs PROTO((rtx, rtx, struct resources *));
+extern void mark_set_resources PROTO((rtx, struct resources *, int,
+ int));
+extern void mark_referenced_resources PROTO((rtx, struct resources *, int));
+extern void clear_hashed_info_for_insn PROTO((rtx));
+extern void incr_ticks_for_insn PROTO((rtx));
+extern void mark_end_of_function_resources PROTO ((rtx, int));
+extern void init_resource_info PROTO((rtx));
+extern void free_resource_info PROTO((void));
+extern rtx find_free_register PROTO((rtx, char *, int,
+ HARD_REG_SET *));
diff --git a/gcc/sched.c b/gcc/sched.c
index b3c3df4d628..a8bcaf7437c 100644
--- a/gcc/sched.c
+++ b/gcc/sched.c
@@ -342,11 +342,9 @@ static int new_sometimes_live PROTO((struct sometimes *, int, int));
static void finish_sometimes_live PROTO((struct sometimes *, int));
static rtx reemit_notes PROTO((rtx, rtx));
static void schedule_block PROTO((int, FILE *));
-static rtx regno_use_in PROTO((int, rtx));
static void split_hard_reg_notes PROTO((rtx, rtx, rtx));
static void new_insn_dead_notes PROTO((rtx, rtx, rtx, rtx));
static void update_n_sets PROTO((rtx, int));
-static void update_flow_info PROTO((rtx, rtx, rtx, rtx));
/* Main entry point of this file. */
void schedule_insns PROTO((FILE *));
@@ -3533,39 +3531,6 @@ ret:
return;
}
-/* Subroutine of split_hard_reg_notes. Searches X for any reference to
- REGNO, returning the rtx of the reference found if any. Otherwise,
- returns 0. */
-
-static rtx
-regno_use_in (regno, x)
- int regno;
- rtx x;
-{
- register char *fmt;
- int i, j;
- rtx tem;
-
- if (GET_CODE (x) == REG && REGNO (x) == regno)
- return x;
-
- fmt = GET_RTX_FORMAT (GET_CODE (x));
- for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
- {
- if (fmt[i] == 'e')
- {
- if ((tem = regno_use_in (regno, XEXP (x, i))))
- return tem;
- }
- else if (fmt[i] == 'E')
- for (j = XVECLEN (x, i) - 1; j >= 0; j--)
- if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
- return tem;
- }
-
- return 0;
-}
-
/* Subroutine of update_flow_info. Determines whether any new REG_NOTEs are
needed for the hard register mentioned in the note. This can happen
if the reference to the hard register in the original insn was split into
@@ -3760,7 +3725,7 @@ update_n_sets (x, inc)
the insns from FIRST to LAST inclusive that were created by splitting
ORIG_INSN. NOTES are the original REG_NOTES. */
-static void
+void
update_flow_info (notes, first, last, orig_insn)
rtx notes;
rtx first, last;
@@ -4366,78 +4331,7 @@ schedule_insns (dump_file)
note_list = 0;
- for (insn = BLOCK_HEAD (b); ; insn = next)
- {
- rtx prev;
- rtx set;
-
- /* Can't use `next_real_insn' because that
- might go across CODE_LABELS and short-out basic blocks. */
- next = NEXT_INSN (insn);
- if (GET_CODE (insn) != INSN)
- {
- if (insn == BLOCK_END (b))
- break;
-
- continue;
- }
-
- /* Don't split no-op move insns. These should silently disappear
- later in final. Splitting such insns would break the code
- that handles REG_NO_CONFLICT blocks. */
- set = single_set (insn);
- if (set && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
- {
- if (insn == BLOCK_END (b))
- break;
-
- /* Nops get in the way while scheduling, so delete them now if
- register allocation has already been done. It is too risky
- to try to do this before register allocation, and there are
- unlikely to be very many nops then anyways. */
- if (reload_completed)
- {
- PUT_CODE (insn, NOTE);
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- NOTE_SOURCE_FILE (insn) = 0;
- }
-
- continue;
- }
-
- /* Split insns here to get max fine-grain parallelism. */
- prev = PREV_INSN (insn);
- /* It is probably not worthwhile to try to split again in the
- second pass. However, if flag_schedule_insns is not set,
- the first and only (if any) scheduling pass is after reload. */
- if (reload_completed == 0 || ! flag_schedule_insns)
- {
- rtx last, first = PREV_INSN (insn);
- rtx notes = REG_NOTES (insn);
-
- last = try_split (PATTERN (insn), insn, 1);
- if (last != insn)
- {
- /* try_split returns the NOTE that INSN became. */
- first = NEXT_INSN (first);
- update_flow_info (notes, first, last, insn);
-
- PUT_CODE (insn, NOTE);
- NOTE_SOURCE_FILE (insn) = 0;
- NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
- if (insn == BLOCK_HEAD (b))
- BLOCK_HEAD (b) = first;
- if (insn == BLOCK_END (b))
- {
- BLOCK_END (b) = last;
- break;
- }
- }
- }
-
- if (insn == BLOCK_END (b))
- break;
- }
+ split_block_insns (b, reload_completed == 0 || ! flag_schedule_insns);
schedule_block (b, dump_file);
OpenPOWER on IntegriCloud