summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog273
-rw-r--r--gcc/Makefile.in9
-rw-r--r--gcc/basic-block.h2
-rw-r--r--gcc/common.opt16
-rw-r--r--gcc/doc/invoke.texi39
-rw-r--r--gcc/doc/tree-ssa.texi132
-rw-r--r--gcc/dominance.c21
-rw-r--r--gcc/domwalk.c12
-rw-r--r--gcc/domwalk.h8
-rw-r--r--gcc/fold-const.c15
-rw-r--r--gcc/opts.c4
-rw-r--r--gcc/testsuite/ChangeLog17
-rw-r--r--gcc/testsuite/g++.dg/tree-ssa/pr18178.C46
-rw-r--r--gcc/testsuite/gcc.c-torture/execute/20030216-1.x12
-rw-r--r--gcc/testsuite/gcc.c-torture/execute/20041019-1.c52
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20030731-2.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20030917-1.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20030917-3.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20040721-1.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20041008-1.c38
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-1.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-12.c32
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-2.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-3.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-7.c6
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-9.c6
-rw-r--r--gcc/timevar.def5
-rw-r--r--gcc/tree-cfg.c12
-rw-r--r--gcc/tree-complex.c2
-rw-r--r--gcc/tree-dfa.c42
-rw-r--r--gcc/tree-flow-inline.h11
-rw-r--r--gcc/tree-flow.h61
-rw-r--r--gcc/tree-gimple.c1
-rw-r--r--gcc/tree-if-conv.c51
-rw-r--r--gcc/tree-into-ssa.c2110
-rw-r--r--gcc/tree-loop-linear.c5
-rw-r--r--gcc/tree-optimize.c38
-rw-r--r--gcc/tree-pass.h65
-rw-r--r--gcc/tree-phinodes.c67
-rw-r--r--gcc/tree-pretty-print.c8
-rw-r--r--gcc/tree-scalar-evolution.c23
-rw-r--r--gcc/tree-sra.c47
-rw-r--r--gcc/tree-ssa-alias.c121
-rw-r--r--gcc/tree-ssa-ccp.c1178
-rw-r--r--gcc/tree-ssa-copy.c851
-rw-r--r--gcc/tree-ssa-dce.c17
-rw-r--r--gcc/tree-ssa-dom.c62
-rw-r--r--gcc/tree-ssa-dse.c21
-rw-r--r--gcc/tree-ssa-loop-ch.c4
-rw-r--r--gcc/tree-ssa-loop-im.c22
-rw-r--r--gcc/tree-ssa-loop-ivopts.c53
-rw-r--r--gcc/tree-ssa-loop-manip.c5
-rw-r--r--gcc/tree-ssa-loop.c33
-rw-r--r--gcc/tree-ssa-operands.c48
-rw-r--r--gcc/tree-ssa-phiopt.c10
-rw-r--r--gcc/tree-ssa-propagate.c415
-rw-r--r--gcc/tree-ssa-propagate.h34
-rw-r--r--gcc/tree-ssa-sink.c5
-rw-r--r--gcc/tree-ssa.c315
-rw-r--r--gcc/tree-vect-transform.c13
-rw-r--r--gcc/tree-vectorizer.c4
-rw-r--r--gcc/tree-vectorizer.h6
-rw-r--r--gcc/tree-vrp.c2265
-rw-r--r--gcc/tree.def14
-rw-r--r--gcc/tree.h19
65 files changed, 7275 insertions, 1570 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 269275c3ee5..6b7192f1b21 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,276 @@
+2005-04-08 Diego Novillo <dnovillo@redhat.com>
+
+ Merge from tree-cleanup-branch: VRP, store CCP, store
+ copy-prop, incremental SSA updating of FUD chains and
+ newly exposed symbols.
+
+ * Makefile.in (tree-ssa-copy.o): Depend on tree-ssa-propagate.h.
+ (OBJS-common): Add tree-vrp.o.
+ (tree-vrp.o): New rule.
+ * basic-block.h (nearest_common_dominator_for_set): Declare.
+ * common.opt (ftree-store-ccp): New flag.
+ (ftree-copy-prop): New flag.
+ (ftree-vrp): New flag.
+ (ftree-store-copy-prop): New flag.
+ * dominance.c (nearest_common_dominator_for_set): New.
+ * domwalk.c (walk_dominator_tree): Only traverse
+ statements in blocks marked in walk_data->interesting_blocks.
+ * domwalk.h (struct dom_walk_data): Add field interesting_blocks.
+ * fold-const.c (fold): Handle ASSERT_EXPR.
+ * opts.c (decode_options): Set flag_tree_copy_prop at -O1.
+ Set flag_tree_store_ccp, flag_tree_store_copy_prop and
+ flag_tree_vrp at -O2.
+ * timevar.def (TV_TREE_VRP): Define.
+ (TV_TREE_COPY_PROP): Define.
+ (TV_TREE_STORE_COPY_PROP): Define.
+ (TV_TREE_SSA_INCREMENTAL): Define.
+ (TV_TREE_STORE_CCP): Define.
+ * tree-cfg.c (tree_can_merge_blocks_p): Remove reference
+ to kill_redundant_phi_nodes from comment.
+ (verify_expr): Handle ASSERT_EXPR.
+ * tree-dfa.c (mark_new_vars_to_rename): Remove second
+ argument. Update all users.
+ (mark_call_clobbered_vars_to_rename): Remove. Update all
+ users.
+ * tree-flow-inline.h (unmodifiable_var_p): New.
+ * tree-flow.h (enum value_range_type): Declare.
+ (struct value_range_def): Declare.
+ (value_range): Declare.
+ (remove_all_phi_nodes_for): Remove. Update all users.
+ (find_phi_node_for): Declare.
+ (add_type_alias): Declare.
+ (count_uses_and_derefs): Declare.
+ (kill_redundant_phi_nodes): Remove.
+ (rewrite_into_ssa): Remove.
+ (rewrite_def_def_chains): Remove.
+ (update_ssa, register_new_name_mapping, create_new_def_for,
+ need_ssa_update_p, name_registered_for_update_p,
+ release_ssa_name_after_update_ssa, dump_repl_tbl,
+ debug_repl_tbl, dump_names_replaced_by,
+ debug_names_replaced_by, mark_sym_for_renaming,
+ mark_set_for_renaming, get_current_def, set_current_def,
+ get_value_range, dump_value_range, debug_value_range,
+ dump_all_value_ranges, debug_all_value_ranges,
+ expr_computes_nonzero, loop_depth_of_name,
+ unmodifiable_var_p): Declare.
+ * tree-gimple.c (is_gimple_formal_tmp_rhs): Handle
+ ASSERT_EXPR.
+ * tree-into-ssa.c (block_defs_stack): Update comment.
+ (old_ssa_names, new_ssa_names, old_virtual_ssa_names,
+ syms_to_rename, names_to_release, repl_tbl,
+ need_to_initialize_update_ssa_p, need_to_update_vops_p,
+ need_to_replace_names_p): New locals.
+ (NAME_SETS_GROWTH_FACTOR): Define.
+ (struct repl_map_d): Declare.
+ (struct mark_def_sites_global_data): Add field
+ interesting_blocks.
+ (enum rewrite_mode): Declare.
+ (REGISTER_DEFS_IN_THIS_STMT): Define.
+ (compute_global_livein): Use last_basic_block instead of
+ n_basic_blocks.
+ (set_def_block): Remove last argument. Update all callers.
+ (prepare_use_operand_for_rename): Remove. Update all callers.
+ (prepare_def_operand_for_rename): Remove. Update all callers.
+ (symbol_marked_for_renaming): New.
+ (is_old_name): New.
+ (is_new_name): New.
+ (repl_map_hash): New.
+ (repl_map_eq): New.
+ (repl_map_free): New.
+ (names_replaced_by): New.
+ (add_to_repl_tbl): New.
+ (add_new_name_mapping): New.
+ (mark_def_sites): Assume that all the operands in the
+ statement are in normal form.
+ (find_idf): Assert that the block in the stack is valid.
+ (get_default_def_for): New.
+ (insert_phi_nodes_for): Add new argument 'update_p'.
+ Add documentation.
+ If update_p is true, add a new mapping between the LHS of
+ each new PHI and the name that it replaces.
+ (insert_phi_nodes_1): Only call find_idf if needed.
+ (get_reaching_def): Call get_default_def_for.
+ (rewrite_operand): Remove.
+ (rewrite_stmt): Do nothing if REGISTER_DEFS_IN_THIS_STMT
+ and REWRITE_THIS_STMT are false.
+ Assume that all the operands in the statement are in
+ normal form.
+ (rewrite_add_phi_arguments): Don't use PHI_REWRITTEN.
+ (rewrite_virtual_phi_arguments): Remove.
+ (invalidate_name_tags): Remove.
+ (register_new_update_single, register_new_update_set,
+ rewrite_update_init_block, replace_use,
+ rewrite_update_fini_block, rewrite_update_stmt,
+ rewrite_update_phi_arguments): New.
+ rewrite_blocks): Remove argument 'fix_virtual_phis'.
+ Add arguments 'entry', 'what' and 'blocks'.
+ Initialize the dominator walker according to 'what' and
+ 'blocks'.
+ Start the dominator walk at 'entry'.
+ (mark_def_site_blocks): Add argument 'interesting_blocks'.
+ Use it to configure the dominator walker.
+ (rewrite_into_ssa): Remove argument 'all'.
+ Make internal.
+ (rewrite_all_into_ssa): Remove.
+ (rewrite_def_def_chains): Remove.
+ (mark_def_interesting, mark_use_interesting,
+ prepare_phi_args_for_update, prepare_block_for_update,
+ prepare_def_site_for, prepare_def_sites,
+ dump_names_replaced_by, debug_names_replaced_by,
+ dump_repl_tbl, debug_repl_tbl, init_update_ssa,
+ delete_update_ssa, create_new_def_for,
+ register_new_name_mapping, mark_sym_for_renaming,
+ mark_set_for_renaming, need_ssa_update_p,
+ name_registered_for_update_p, ssa_names_to_replace,
+ release_ssa_name_after_update_ssa,
+ insert_updated_phi_nodes_for, update_ssa): New.
+ * tree-loop-linear.c (linear_transform_loops): Call
+ update_ssa instead of rewrite_into_ssa.
+ * tree-optimize.c (vars_to_rename): Remove.
+ Update all users.
+ (init_tree_optimization_passes): Replace
+ pass_redundant_phi with pass_copy_prop.
+ Add pass_vrp.
+ Replace pass_ccp with pass_store_ccp.
+ Add pass_store_copy_prop after pass_store_ccp.
+ (execute_todo): If the TODO_ flags don't include updating
+ the SSA form, assert that it does not need to be updated.
+ Call update_ssa instead of rewrite_into_ssa and
+ rewrite_def_def_chains.
+ If TODO_verify_loops is set, call verify_loop_closed_ssa.
+ (tree_rest_of_compilation):
+ * tree-pass.h (TODO_dump_func, TODO_ggc_collect,
+ TODO_verify_ssa, TODO_verify_flow, TODO_verify_stmts,
+ TODO_cleanup_cfg): Renumber.
+ (TODO_verify_loops, TODO_update_ssa,
+ TODO_update_ssa_no_phi, TODO_update_ssa_full_phi,
+ TODO_update_ssa_only_virtuals): Define.
+ (pass_copy_prop, pass_store_ccp, pass_store_copy_prop, pass_vrp):
+ Declare.
+ * tree-phinodes.c (make_phi_node): Update documentation.
+ (remove_all_phi_nodes_for): Remove.
+ (find_phi_node_for): New.
+ * tree-pretty-print.c (dump_generic_node): Handle ASSERT_EXPR.
+ * tree-scalar-evolution.c (follow_ssa_edge_in_rhs): Likewise.
+ (interpret_rhs_modify_expr): Likewise.
+ * tree-sra.c (decide_instantiations): Mark all symbols in
+ SRA_CANDIDATES for renaming.
+ (mark_all_v_defs_1): Rename from mark_all_v_defs.
+ (mark_all_v_defs): New function. Update all users to call it
+ with the whole list of scalarized statements, not just the
+ first one.
+ * tree-ssa-alias.c (count_ptr_derefs): Make extern.
+ (compute_flow_insensitive_aliasing): If the tag is
+ unmodifiable and the variable isn't or vice-versa, don't
+ make them alias of each other.
+ (setup_pointers_and_addressables): If the type tag for
+ VAR is about to change, mark the old one for renaming.
+ (add_type_alias): New.
+ * tree-ssa-ccp.c: Document SSA-CCP and STORE-CCP.
+ (ccp_lattice_t): Rename from latticevalue.
+ (value): Remove. Update all users.
+ (const_val): New local variable.
+ (do_store_ccp): New local variable.
+ (dump_lattice_value): Handle UNINITIALIZED.
+ (debug_lattice_value): New.
+ (get_default_value): Re-write.
+ (set_lattice_value): Re-write.
+ (def_to_varying): Remove. Update all users.
+ (likely_value): Return VARYING for statements that make
+ stores when STORE_CCP is false.
+ Return VARYING for any statement other than MODIFY_EXPR,
+ COND_EXPR and SWITCH_EXPR.
+ (ccp_initialize): Re-write.
+ (replace_uses_in, replace_vuse_in, substitute_and_fold):
+ Move to tree-ssa-propagate.c.
+ (ccp_lattice_meet): Handle memory stores when
+ DO_STORE_CCP is true.
+ (ccp_visit_phi_node): Likewise.
+ (ccp_fold): Likewise.
+ (evaluate_stmt): Likewise.
+ (visit_assignment): Likewise.
+ (ccp_visit_stmt): Likewise.
+ (execute_ssa_ccp): Add argument 'store_ccp'. Copy it
+ into DO_STORE_CCP.
+ (do_ssa_ccp): New.
+ (pass_ccp): Use it.
+ (do_ssa_store_ccp): New.
+ (gate_store_ccp): New.
+ (pass_store_ccp): Declare.
+ * tree-ssa-copy.c: Include tree-ssa-propagate.h.
+ (may_propagate_copy): Reformat.
+ Don't abort if ORIG is a virtual and DEST isn't.
+ If NEW does not have alias information but DEST does,
+ copy it.
+ (copy_of, cached_last_copy_of, do_store_copy_prop, enum
+ copy_prop_kind, which_copy_prop): Declare.
+ (stmt_may_generate_copy, get_copy_of_val,
+ get_last_copy_of, set_copy_of_val, dump_copy_of,
+ copy_prop_visit_assignment, copy_prop_visit_cond_stmt,
+ copy_prop_visit_stmt, copy_prop_visit_phi_node,
+ init_copy_prop, fini_copy_prop, execute_copy_prop,
+ gate_copy_prop, do_copy_prop, gate_store_copy_prop,
+ store_copy_prop): New.
+ (pass_copy_prop, pass_store_copy_prop): Declare.
+ * tree-ssa-dom.c (struct opt_stats_d): Add fields
+ 'num_const_prop' and 'num_copy_prop'.
+ (cprop_operand): Update them.
+ (dump_dominator_optimization_stats): Dump them.
+ (tree_ssa_dominator_optimize): Call update_ssa instead of
+ rewrite_into_ssa.
+ (loop_depth_of_name): Declare extern.
+ (simplify_cond_and_lookup_avail_expr): Guard against NULL
+ values for LOW or HIGH.
+ (cprop_into_successor_phis): Only propagate if NEW != ORIG.
+ (record_equivalences_from_stmt): Call expr_computes_nonzero.
+ (cprop_operand): Only propagate if VAL != OP.
+ * tree-ssa-dse.c (dse_optimize_stmt): Mark symbols in removed
+ statement for renaming.
+ * tree-ssa-loop-im.c (move_computations): Call update_ssa.
+ * tree-ssa-loop-ivopts.c (rewrite_address_base): Call
+ add_type_alias if necessary.
+ Call mark_new_vars_to_rename.
+ (tree_ssa_iv_optimize): If new symbols need to be renamed,
+ mark every statement updated, call update_ssa and
+ rewrite_into_loop_closed_ssa.
+ * tree-ssa-loop-manip.c (add_exit_phis): Do not remove DEF_BB
+ from LIVEIN if VAR is a virtual.
+ * tree-ssa-loop.c (tree_loop_optimizer_init): Call update_ssa.
+ * tree-ssa-operands.c (get_expr_operands): Handle ASSERT_EXPR.
+ (get_call_expr_operands): Reformat statement.
+ (add_stmt_operand): Don't create V_MAY_DEFs for read-only
+ symbols.
+ * tree-ssa-propagate.c (ssa_prop_init): Initialize
+ SSA_NAME_VALUE for every name.
+ (first_vdef, stmt_makes_single_load, stmt_makes_single_store,
+ get_value_loaded_by): New.
+ (replace_uses_in, replace_vuses_in, replace_phi_args_in,
+ substitute_and_fold): Move from tree-ssa-ccp.c.
+ * tree-ssa-propagate.h (struct prop_value_d, prop_value_t,
+ first_vdef, stmt_makes_single_load, stmt_makes_single_store,
+ get_value_loaded_by, replace_uses_in, substitute_and_fold):
+ Declare.
+ * tree-ssa.c (verify_use): Fix error message.
+ (propagate_into_addr, replace_immediate_uses, get_eq_name,
+ check_phi_redundancy, kill_redundant_phi_nodes,
+ pass_redundant_phi): Remove. Update all users.
+ * tree-vect-transform.c (vect_create_data_ref_ptr): Call
+ add_type_alias, if necessary.
+ * tree-vectorizer.h (struct _stmt_vect_info): Update
+ documentation for field 'memtag'.
+ * tree-vrp.c: New file.
+ * tree.def (ASSERT_EXPR): Define.
+ * tree.h (ASSERT_EXPR_VAR): Define.
+ (ASSERT_EXPR_COND): Define.
+ (SSA_NAME_VALUE_RANGE): Define.
+ (struct tree_ssa_name): Add field 'value_range'.
+ (PHI_REWRITTEN): Remove.
+ (struct tree_phi_node): Remove field 'rewritten'.
+ * doc/invoke.texi (-fdump-tree-storeccp, -ftree-copy-prop,
+ -ftree-store-copy-prop): Document.
+ * doc/tree-ssa.texi: Remove broken link to McCAT's compiler.
+ Document usage of update_ssa.
+
2005-04-08 David Edelsohn <edelsohn@gnu.org>
PR target/20814
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index 2576739aa17..f115e5d97b2 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -958,7 +958,8 @@ OBJS-common = \
varasm.o varray.o vec.o version.o vmsdbgout.o xcoffout.o alloc-pool.o \
et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o passes.o \
rtl-profile.o tree-profile.o rtlhooks.o cfgexpand.o lambda-mat.o \
- lambda-trans.o lambda-code.o tree-loop-linear.o tree-ssa-sink.o
+ lambda-trans.o lambda-code.o tree-loop-linear.o tree-ssa-sink.o \
+ tree-vrp.o
OBJS-md = $(out_object_file)
OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) tree-inline.o \
@@ -1654,7 +1655,7 @@ tree-nrv.o : tree-nrv.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
tree-ssa-copy.o : tree-ssa-copy.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h diagnostic.h \
errors.h function.h $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
- $(BASIC_BLOCK_H) tree-pass.h langhooks.h
+ $(BASIC_BLOCK_H) tree-pass.h langhooks.h tree-ssa-propagate.h
tree-ssa-propagate.o : tree-ssa-propagate.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h \
diagnostic.h errors.h function.h $(TIMEVAR_H) $(TM_H) coretypes.h \
@@ -1691,6 +1692,10 @@ tree-ssa-pre.o : tree-ssa-pre.c $(TREE_FLOW_H) $(CONFIG_H) \
tree-vn.o : tree-vn.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(GGC_H) \
$(TREE_H) $(TREE_FLOW_H) $(HASHTAB_H) langhooks.h tree-pass.h \
$(TREE_DUMP_H) diagnostic.h
+tree-vrp.o : tree-vrp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
+ $(TREE_FLOW_H) tree-pass.h $(TREE_DUMP_H) diagnostic.h $(GGC_H) \
+ $(BASIC_BLOCK_H) tree-ssa-propagate.h $(FLAGS_H) $(TREE_DUMP_H) \
+ $(CFGLOOP_H) tree-scalar-evolution.h tree-chrec.h
tree-cfg.o : tree-cfg.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) $(FLAGS_H) output.h \
diagnostic.h errors.h function.h $(TIMEVAR_H) $(TM_H) coretypes.h \
diff --git a/gcc/basic-block.h b/gcc/basic-block.h
index a767c6b7fa1..0fa8ce55e83 100644
--- a/gcc/basic-block.h
+++ b/gcc/basic-block.h
@@ -896,6 +896,8 @@ extern void calculate_dominance_info (enum cdi_direction);
extern void free_dominance_info (enum cdi_direction);
extern basic_block nearest_common_dominator (enum cdi_direction,
basic_block, basic_block);
+extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
+ bitmap);
extern void set_immediate_dominator (enum cdi_direction, basic_block,
basic_block);
extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
diff --git a/gcc/common.opt b/gcc/common.opt
index ae1fe9c01d6..b75785c5087 100644
--- a/gcc/common.opt
+++ b/gcc/common.opt
@@ -828,6 +828,10 @@ ftree-ccp
Common Report Var(flag_tree_ccp)
Enable SSA-CCP optimization on trees
+ftree-store-ccp
+Common Report Var(flag_tree_store_ccp)
+Enable SSA-CCP optimization for stores and loads
+
ftree-ch
Common Report Var(flag_tree_ch)
Enable loop header copying on trees
@@ -840,6 +844,14 @@ ftree-copyrename
Common Report Var(flag_tree_copyrename)
Replace SSA temporaries with better names in copies.
+ftree-copy-prop
+Common Report Var(flag_tree_copy_prop)
+Enable copy propagation on trees
+
+ftree-store-copy-prop
+Common Report Var(flag_tree_store_copy_prop)
+Enable copy propagation for stores and loads
+
ftree-dce
Common Report Var(flag_tree_dce)
Enable SSA dead code elimination optimization on trees
@@ -896,6 +908,10 @@ ftree-lrs
Common Report Var(flag_tree_live_range_split)
Perform live range splitting during the SSA->normal pass.
+ftree-vrp
+Common Report Var(flag_tree_vrp) Init(0)
+Perform Value Range Propagation on trees
+
funit-at-a-time
Common Report Var(flag_unit_at_a_time)
Compile whole compilation unit at a time
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
index e36aa3a2e04..8a1cfbb73ad 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
@@ -269,6 +269,7 @@ Objective-C and Objective-C++ Dialects}.
-fdump-tree-salias @gol
-fdump-tree-fre@r{[}-@var{n}@r{]} @gol
-ftree-vectorizer-verbose=@var{n} @gol
+-fdump-tree-storeccp@r{[}-@var{n}@r{]} @gol
-feliminate-dwarf2-dups -feliminate-unused-debug-types @gol
-feliminate-unused-debug-symbols -fmem-report -fprofile-arcs -ftree-based-profiling @gol
-frandom-seed=@var{string} -fsched-verbose=@var{n} @gol
@@ -324,6 +325,7 @@ Objective-C and Objective-C++ Dialects}.
-ftree-dominator-opts -ftree-dse -ftree-copyrename -ftree-sink @gol
-ftree-ch -ftree-sra -ftree-ter -ftree-lrs -ftree-fre -ftree-vectorize @gol
-ftree-salias -fweb @gol
+-ftree-copy-prop -ftree-store-ccp -ftree-store-copy-prop @gol
--param @var{name}=@var{value}
-O -O0 -O1 -O2 -O3 -Os}
@@ -3879,6 +3881,11 @@ appending @file{.alias} to the source file name.
Dump each function after CCP@. The file name is made by appending
@file{.ccp} to the source file name.
+@item storeccp
+@opindex fdump-tree-storeccp
+Dump each function after STORE-CCP. The file name is made by appending
+@file{.storeccp} to the source file name.
+
@item pre
@opindex fdump-tree-pre
Dump trees after partial redundancy elimination. The file name is made
@@ -3889,6 +3896,16 @@ by appending @file{.pre} to the source file name.
Dump trees after full redundancy elimination. The file name is made
by appending @file{.fre} to the source file name.
+@item copyprop
+@opindex fdump-tree-copyprop
+Dump trees after copy propagation. The file name is made
+by appending @file{.copyprop} to the source file name.
+
+@item store_copyprop
+@opindex fdump-tree-store_copyprop
+Dump trees after store copy-propagation. The file name is made
+by appending @file{.store_copyprop} to the source file name.
+
@item dce
@opindex fdump-tree-dce
Dump each function after dead code elimination. The file name is made by
@@ -4745,6 +4762,17 @@ that are computed on all paths leading to the redundant computation.
This analysis faster than PRE, though it exposes fewer redundancies.
This flag is enabled by default at @option{-O} and higher.
+@item -ftree-copy-prop
+Perform copy propagation on trees. This pass eliminates unnecessary
+copy operations. This flag is enabled by default at @option{-O} and
+higher.
+
+@item -ftree-store-copy-prop
+Perform copy propagation of memory loads and stores. This pass
+eliminates unnecessary copy operations in memory references
+(structures, global variables, arrays, etc). This flag is enabled by
+default at @option{-O2} and higher.
+
@item -ftree-salias
Perform structural alias analysis on trees. This flag
is enabled by default at @option{-O} and higher.
@@ -4754,8 +4782,15 @@ Perform forward store motion on trees. This flag is
enabled by default at @option{-O} and higher.
@item -ftree-ccp
-Perform sparse conditional constant propagation (CCP) on trees. This flag
-is enabled by default at @option{-O} and higher.
+Perform sparse conditional constant propagation (CCP) on trees. This
+pass only operates on local scalar variables and is enabled by default
+at @option{-O} and higher.
+
+@item -ftree-store-ccp
+Perform sparse conditional constant propagation (CCP) on trees. This
+pass operates on both local scalar variables and memory stores and
+loads (global variables, structures, arrays, etc). This flag is
+enabled by default at @option{-O2} and higher.
@item -ftree-dce
Perform dead code elimination (DCE) on trees. This flag is enabled by
diff --git a/gcc/doc/tree-ssa.texi b/gcc/doc/tree-ssa.texi
index 27e0d34f550..d4cf83825c5 100644
--- a/gcc/doc/tree-ssa.texi
+++ b/gcc/doc/tree-ssa.texi
@@ -83,8 +83,7 @@ perfectly happy to take it as input and spit out GIMPLE@.
GIMPLE is a simplified subset of GENERIC for use in optimization. The
particular subset chosen (and the name) was heavily influenced by the
-SIMPLE IL used by the McCAT compiler project at McGill University
-(@uref{http://www-acaps.cs.mcgill.ca/info/McCAT/McCAT.html}),
+SIMPLE IL used by the McCAT compiler project at McGill University,
though we have made some different choices. For one thing, SIMPLE
doesn't support @code{goto}; a production compiler can't afford that
kind of restriction.
@@ -1111,18 +1110,129 @@ Returns the @code{SSA_NAME} for the @var{i}th argument of @var{phi}.
@subsection Preserving the SSA form
-@findex vars_to_rename
+@findex update_ssa
@cindex preserving SSA form
Some optimization passes make changes to the function that
invalidate the SSA property. This can happen when a pass has
-added new variables or changed the program so that variables that
-were previously aliased aren't anymore.
-
-Whenever something like this happens, the affected variables must
-be renamed into SSA form again. To do this, you should mark the
-new variables in the global bitmap @code{vars_to_rename}. Once
-your pass has finished, the pass manager will invoke the SSA
-renamer to put the program into SSA once more.
+added new symbols or changed the program so that variables that
+were previously aliased aren't anymore. Whenever something like this
+happens, the affected symbols must be renamed into SSA form again.
+Transformations that emit new code or replicate existing statements
+will also need to update the SSA form@.
+
+Since GCC implements two different SSA forms for register and virtual
+variables, keeping the SSA form up to date depends on whether you are
+updating register or virtual names. In both cases, the general idea
+behind incremental SSA updates is similar: when new SSA names are
+created, they typically are meant to replace other existing names in
+the program@.
+
+For instance, given the following code:
+
+@smallexample
+ 1 L0:
+ 2 x_1 = PHI (0, x_5)
+ 3 if (x_1 < 10)
+ 4 if (x_1 > 7)
+ 5 y_2 = 0
+ 6 else
+ 7 y_3 = x_1 + x_7
+ 8 endif
+ 9 x_5 = x_1 + 1
+ 10 goto L0;
+ 11 endif
+@end smallexample
+
+Suppose that we insert new names @code{x_10} and @code{x_11} (lines
+@code{4} and @code{8})@.
+
+@smallexample
+ 1 L0:
+ 2 x_1 = PHI (0, x_5)
+ 3 if (x_1 < 10)
+ 4 x_10 = ...
+ 5 if (x_1 > 7)
+ 6 y_2 = 0
+ 7 else
+ 8 x_11 = ...
+ 9 y_3 = x_1 + x_7
+ 10 endif
+ 11 x_5 = x_1 + 1
+ 12 goto L0;
+ 13 endif
+@end smallexample
+
+We want to replace all the uses of @code{x_1} with the new definitions
+of @code{x_10} and @code{x_11}. Note that the only uses that should
+be replaced are those at lines @code{5}, @code{9} and @code{11}.
+Also, the use of @code{x_7} at line @code{9} should @emph{not} be
+replaced (this is why we cannot just mark symbol @code{x} for
+renaming)@.
+
+Additionally, we may need to insert a PHI node at line @code{11}
+because that is a merge point for @code{x_10} and @code{x_11}. So the
+use of @code{x_1} at line @code{11} will be replaced with the new PHI
+node. The insertion of PHI nodes is optional. They are not strictly
+necessary to preserve the SSA form, and depending on what the caller
+inserted, they may not even be useful for the optimizers@.
+
+Updating the SSA form is a two step process. First, the pass has to
+identify which names need to be updated and/or which symbols need to
+be renamed into SSA form for the first time. When new names are
+introduced to replace existing names in the program, the mapping
+between the old and the new names are registered by calling
+@code{register_new_name_mapping} (note that if your pass creates new
+code by duplicating basic blocks, the call to @code{tree_duplicate_bb}
+will set up the necessary mappings automatically). On the other hand,
+if your pass exposes a new symbol that should be put in SSA form for
+the first time, the new symbol should be registered with
+@code{mark_sym_for_renaming}.
+
+After the replacement mappings have been registered and new symbols
+marked for renaming, a call to @code{update_ssa} makes the registered
+changes. This can be done with an explicit call or by creating
+@code{TODO} flags in the @code{tree_opt_pass} structure for your pass.
+There are several @code{TODO} flags that control the behaviour of
+@code{update_ssa}:
+
+@itemize @bullet
+@item @code{TODO_update_ssa}. Update the SSA form inserting PHI nodes
+ for newly exposed symbols and virtual names marked for updating.
+ When updating real names, only insert PHI nodes for a real name
+ @code{O_j} in blocks reached by all the new and old definitions for
+ @code{O_j}. If the iterated dominance frontier for @code{O_j}
+ is not pruned, we may end up inserting PHI nodes in blocks that
+ have one or more edges with no incoming definition for
+ @code{O_j}. This would lead to uninitialized warnings for
+ @code{O_j}'s symbol@.
+
+@item @code{TODO_update_ssa_no_phi}. Update the SSA form without
+ inserting any new PHI nodes at all. This is used by passes that
+ have either inserted all the PHI nodes themselves or passes that
+ need only to patch use-def and def-def chains for virtuals
+ (e.g., DCE)@.
+
+
+@item @code{TODO_update_ssa_full_phi}. Insert PHI nodes everywhere
+ they are needed. No prunning of the IDF is done. This is used
+ by passes that need the PHI nodes for @code{O_j} even if it
+ means that some arguments will come from the default definition
+ of @code{O_j}'s symbol (e.g., @code{pass_linear_transform})@.
+
+ WARNING: If you need to use this flag, chances are that your
+ pass may be doing something wrong. Inserting PHI nodes for an
+ old name where not all edges carry a new replacement may lead to
+ silent codegen errors or spurious uninitialized warnings@.
+
+@item @code{TODO_update_ssa_only_virtuals}. Passes that update the
+ SSA form on their own may want to delegate the updating of
+ virtual names to the generic updater. Since FUD chains are
+ easier to maintain, this simplifies the work they need to do.
+ NOTE: If this flag is used, any OLD->NEW mappings for real names
+ are explicitly destroyed and only the symbols marked for
+ renaming are processed@.
+@end itemize
+
@subsection Examining @code{SSA_NAME} nodes
@cindex examining SSA_NAMEs
diff --git a/gcc/dominance.c b/gcc/dominance.c
index d48701d96a5..b07f6c23da4 100644
--- a/gcc/dominance.c
+++ b/gcc/dominance.c
@@ -797,6 +797,27 @@ nearest_common_dominator (enum cdi_direction dir, basic_block bb1, basic_block b
return et_nca (bb1->dom[dir], bb2->dom[dir])->data;
}
+
+/* Find the nearest common dominator for the basic blocks in BLOCKS,
+ using dominance direction DIR. */
+
+basic_block
+nearest_common_dominator_for_set (enum cdi_direction dir, bitmap blocks)
+{
+ unsigned i, first;
+ bitmap_iterator bi;
+ basic_block dom;
+
+ first = bitmap_first_set_bit (blocks);
+ dom = BASIC_BLOCK (first);
+ EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
+ if (dom != BASIC_BLOCK (i))
+ dom = nearest_common_dominator (dir, dom, BASIC_BLOCK (i));
+
+ return dom;
+}
+
+
/* Return TRUE in case BB1 is dominated by BB2. */
bool
dominated_by_p (enum cdi_direction dir, basic_block bb1, basic_block bb2)
diff --git a/gcc/domwalk.c b/gcc/domwalk.c
index 15b1dff82db..87c841b9ffa 100644
--- a/gcc/domwalk.c
+++ b/gcc/domwalk.c
@@ -145,6 +145,14 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
void *bd = NULL;
basic_block dest;
block_stmt_iterator bsi;
+ bool is_interesting;
+
+ /* If block BB is not interesting to the caller, then none of the
+ callbacks that walk the statements in BB are going to be
+ executed. */
+ is_interesting = bb->index < 0
+ || walk_data->interesting_blocks == NULL
+ || TEST_BIT (walk_data->interesting_blocks, bb->index);
/* Callback to initialize the local data structure. */
if (walk_data->initialize_block_local_data)
@@ -179,7 +187,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
(*walk_data->before_dom_children_before_stmts) (walk_data, bb);
/* Statement walk before walking dominator children. */
- if (walk_data->before_dom_children_walk_stmts)
+ if (is_interesting && walk_data->before_dom_children_walk_stmts)
{
if (walk_data->walk_stmts_backward)
for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
@@ -211,7 +219,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
(*walk_data->after_dom_children_before_stmts) (walk_data, bb);
/* Statement walk after walking dominator children. */
- if (walk_data->after_dom_children_walk_stmts)
+ if (is_interesting && walk_data->after_dom_children_walk_stmts)
{
if (walk_data->walk_stmts_backward)
for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
diff --git a/gcc/domwalk.h b/gcc/domwalk.h
index fbf549bbd98..44ea39676e9 100644
--- a/gcc/domwalk.h
+++ b/gcc/domwalk.h
@@ -105,6 +105,14 @@ struct dom_walk_data
/* Stack of available block local structures. */
varray_type free_block_data;
+
+ /* Interesting blocks to process. If this field is not NULL, this
+ set is used to determine which blocks to walk. If we encounter
+ block I in the dominator traversal, but block I is not present in
+ INTERESTING_BLOCKS, then none of the callback functions are
+ invoked on it. This is useful when a particular traversal wants
+ to filter out non-interesting blocks from the dominator tree. */
+ sbitmap interesting_blocks;
};
void walk_dominator_tree (struct dom_walk_data *, basic_block);
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index c59a981351c..6a84630bb60 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -10005,6 +10005,21 @@ fold (tree expr)
case CONST_DECL:
return fold (DECL_INITIAL (t));
+ case ASSERT_EXPR:
+ {
+ /* Given ASSERT_EXPR <Y, COND>, return Y if COND can be folded
+ to boolean_true_node. If COND folds to boolean_false_node,
+ return ASSERT_EXPR <Y, 0>. Otherwise, return the original
+ expression. */
+ tree c = fold (ASSERT_EXPR_COND (t));
+ if (c == boolean_true_node)
+ return ASSERT_EXPR_VAR (t);
+ else if (c == boolean_false_node)
+ return build (ASSERT_EXPR, TREE_TYPE (t), ASSERT_EXPR_VAR (t), c);
+ else
+ return t;
+ }
+
default:
return t;
} /* switch (code) */
diff --git a/gcc/opts.c b/gcc/opts.c
index bd971c8e3f9..70a9b22ac5b 100644
--- a/gcc/opts.c
+++ b/gcc/opts.c
@@ -524,6 +524,7 @@ decode_options (unsigned int argc, const char **argv)
flag_tree_sra = 1;
flag_tree_copyrename = 1;
flag_tree_fre = 1;
+ flag_tree_copy_prop = 1;
flag_tree_sink = 1;
flag_tree_salias = 1;
@@ -562,6 +563,9 @@ decode_options (unsigned int argc, const char **argv)
flag_reorder_blocks = 1;
flag_reorder_functions = 1;
flag_unit_at_a_time = 1;
+ flag_tree_store_ccp = 1;
+ flag_tree_store_copy_prop = 1;
+ flag_tree_vrp = 1;
if (!optimize_size)
{
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index c8ccbcde4fe..f6f8cefcdaf 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,20 @@
+2005-04-08 Diego Novillo <dnovillo@redhat.com>
+
+ * g++.dg/tree-ssa/pr18178.C: New test.
+ * gcc.c-torture/execute/20030216-1.x: Ignore at -O1.
+ * gcc.c-torture/execute/20041019-1.c: New test.
+ * gcc.dg/tree-ssa/20041008-1.c: New test.
+ * gcc.dg/tree-ssa/ssa-ccp-12.c: New test.
+ * gcc.dg/tree-ssa/20030731-2.c: Update to use -fdump-tree-store_ccp.
+ * gcc.dg/tree-ssa/20030917-1.c: Likewise.
+ * gcc.dg/tree-ssa/20030917-3.c: Likewise.
+ * gcc.dg/tree-ssa/20040721-1.c: Likewise.
+ * gcc.dg/tree-ssa/ssa-ccp-1.c: Likewise.
+ * gcc.dg/tree-ssa/ssa-ccp-2.c: Likewise.
+ * gcc.dg/tree-ssa/ssa-ccp-3.c: Likewise.
+ * gcc.dg/tree-ssa/ssa-ccp-7.c: Likewise.
+ * gcc.dg/tree-ssa/ssa-ccp-9.c: Likewise.
+
2005-04-09 Hans-Peter Nilsson <hp@axis.com>
PR rtl-optimization/20466
diff --git a/gcc/testsuite/g++.dg/tree-ssa/pr18178.C b/gcc/testsuite/g++.dg/tree-ssa/pr18178.C
new file mode 100644
index 00000000000..fd1777ad013
--- /dev/null
+++ b/gcc/testsuite/g++.dg/tree-ssa/pr18178.C
@@ -0,0 +1,46 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-vrp" } */
+
+// Define this to see it work.
+// #define WORK_WORK_WORK
+
+#define THIRD
+
+#ifdef THIRD
+#define FIRST i < 0 ||
+#define ORIG int
+#define CAST
+#else
+
+#define FIRST
+#ifdef WORK_WORK_WORK
+#define ORIG unsigned int
+#define CAST
+#else
+#define ORIG int
+#define CAST (unsigned)
+#endif // WORK_WORK_WORK
+
+#endif // THIRD
+
+struct array
+{
+ const ORIG len;
+ int *data;
+};
+
+extern void call (ORIG);
+
+void doit (array *a)
+{
+ for (ORIG i = 0; i < a->len; ++i)
+ {
+ if (FIRST CAST (i) >= CAST (a->len))
+ throw 5;
+ call (a->data[i]);
+ }
+}
+
+/* VRP should remove all but 1 if() in the loop. */
+
+/* { dg-final { scan-tree-dump-times "if " 1 "vrp"} } */
diff --git a/gcc/testsuite/gcc.c-torture/execute/20030216-1.x b/gcc/testsuite/gcc.c-torture/execute/20030216-1.x
new file mode 100644
index 00000000000..a0e03e379df
--- /dev/null
+++ b/gcc/testsuite/gcc.c-torture/execute/20030216-1.x
@@ -0,0 +1,12 @@
+# This test requires constant propagation of loads and stores to be
+# enabled. This is only guaranteed at -O2 and higher. Do not run
+# at -O1.
+
+set torture_eval_before_compile {
+ if {[string match {*-O1*} "$option"]} {
+ continue
+ }
+}
+
+return 0
+
diff --git a/gcc/testsuite/gcc.c-torture/execute/20041019-1.c b/gcc/testsuite/gcc.c-torture/execute/20041019-1.c
new file mode 100644
index 00000000000..3c56b31e9ce
--- /dev/null
+++ b/gcc/testsuite/gcc.c-torture/execute/20041019-1.c
@@ -0,0 +1,52 @@
+test_store_ccp (int i)
+{
+ int *p, a, b, c;
+
+ if (i < 5)
+ p = &a;
+ else if (i > 8)
+ p = &b;
+ else
+ p = &c;
+
+ *p = 10;
+ b = 3;
+
+ /* STORE-CCP was wrongfully propagating 10 into *p. */
+ return *p + 2;
+}
+
+
+test_store_copy_prop (int i)
+{
+ int *p, a, b, c;
+
+ if (i < 5)
+ p = &a;
+ else if (i > 8)
+ p = &b;
+ else
+ p = &c;
+
+ *p = i;
+ b = i + 1;
+
+ /* STORE-COPY-PROP was wrongfully propagating i into *p. */
+ return *p;
+}
+
+
+main()
+{
+ int x;
+
+ x = test_store_ccp (10);
+ if (x == 12)
+ abort ();
+
+ x = test_store_copy_prop (9);
+ if (x == 9)
+ abort ();
+
+ return 0;
+}
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20030731-2.c b/gcc/testsuite/gcc.dg/tree-ssa/20030731-2.c
index 885bca11771..9b205813caf 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20030731-2.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20030731-2.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
bar (int i, int partial, int args_addr)
@@ -13,5 +13,5 @@ bar (int i, int partial, int args_addr)
/* There should be only one IF conditional since the first does nothing
useful. */
-/* { dg-final { scan-tree-dump-times "if " 1 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "if " 1 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20030917-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20030917-1.c
index 62f5b2497cc..b7a5450619c 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20030917-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20030917-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
extern int board[];
@@ -15,5 +15,5 @@ findbestextension (int blah, int blah2)
}
/* The argument to "foo" should be a variable, not a constant. */
-/* { dg-final { scan-tree-dump-times "foo .defval" 1 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "foo .defval" 1 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20030917-3.c b/gcc/testsuite/gcc.dg/tree-ssa/20030917-3.c
index bfa2f8a7379..24201469069 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20030917-3.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20030917-3.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fno-tree-dominator-opts -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fno-tree-dominator-opts -fdump-tree-store_ccp" } */
extern int printf (const char *, ...);
@@ -20,5 +20,5 @@ main ()
/* The argument to "printf" should be a constant, not a variable. */
-/* { dg-final { scan-tree-dump-times "printf.*, 0" 1 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "printf.*, 0" 1 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20040721-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20040721-1.c
index 44dacef0d9c..4df313c7bc1 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20040721-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20040721-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp-vops" } */
+/* { dg-options "-O2 -fdump-tree-store_ccp-vops" } */
/* Test to check whether global variables are being
constant propagated. */
@@ -24,5 +24,5 @@ main ()
}
/* There should be no G on the RHS of an assignment. */
-/* { dg-final { scan-tree-dump-times "= G;" 0 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "= G;" 0 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20041008-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20041008-1.c
new file mode 100644
index 00000000000..ca24427b65c
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20041008-1.c
@@ -0,0 +1,38 @@
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+struct A {
+ int x;
+ int y;
+};
+
+baz (struct A *a)
+{
+ a->x = 3;
+ a->y = 2;
+}
+
+foo (int i)
+{
+ struct A a;
+
+ /* Make sure we can't scalarize 'a'. */
+ baz (&a);
+
+ if (i > 10)
+ a.x = i;
+ else
+ a.x = i;
+
+ /* Copy propagation should prove that this predicate is always false. */
+ if (a.x != i)
+ link_error ();
+
+ return a.x;
+}
+
+main ()
+{
+ foo (30);
+ return 0;
+}
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-1.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-1.c
index a7d5cd82a50..259d12ac683 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
extern void link_error (void);
@@ -71,5 +71,5 @@ void test11111 (int p, int q, int r)
/* There should be not link_error calls, if there is any the
optimization has failed */
-/* { dg-final { scan-tree-dump-times "link_error" 0 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-12.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-12.c
new file mode 100644
index 00000000000..8ee9eb8d0e4
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-12.c
@@ -0,0 +1,32 @@
+/* { dg-do run } */
+/* { dg-options "-O2" } */
+
+struct A
+{
+ int a;
+ int b;
+};
+
+struct A a;
+const int B = 42;
+
+void foo (int i)
+{
+ if (i > 10)
+ a.a = 42;
+ else
+ {
+ a.b = 21;
+ a.a = a.b + 21;
+ }
+
+ /* This should be folded to 'if (0)' as a.a and B are both 42. */
+ if (a.a != B)
+ link_error ();
+}
+
+main ()
+{
+ foo (3);
+ return 0;
+}
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-2.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-2.c
index aad1a3f3cc4..5f614f08212 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-2.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-2.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
extern void link_error (void);
@@ -168,5 +168,5 @@ int test99999 (void)
/* There should be not link_error calls, if there is any the
optimization has failed */
-/* { dg-final { scan-tree-dump-times "link_error" 0 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-3.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-3.c
index f15ba1ee113..d978511c281 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-3.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-3.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
extern void link_error (void);
@@ -131,5 +131,5 @@ int* test666 (int * __restrict__ rp1, int * __restrict__ rp2, int *p1)
optimization has failed */
/* ??? While we indeed don't handle some of these, a couple of the
restrict tests are incorrect. */
-/* { dg-final { scan-tree-dump-times "link_error" 0 "ccp" { xfail *-*-* } } } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "store_ccp" { xfail *-*-* } } } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-7.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-7.c
index bd97b897c3e..7f20f062ddb 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-7.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-7.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
extern void link_error (void);
@@ -23,5 +23,5 @@ int test7 (int a)
/* There should be not link_error calls, if there is any the
optimization has failed */
-/* { dg-final { scan-tree-dump-times "link_error" 0 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-9.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-9.c
index db19b00d145..1508ff06eec 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-9.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-9.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-ccp" } */
+/* { dg-options "-O1 -fdump-tree-store_ccp" } */
/* Check that cprop works for assignments to array elements and structs. */
@@ -51,5 +51,5 @@ test99999 (int *arr, int j)
/* There should be no link_error calls, if there is any, the
optimization has failed */
-/* { dg-final { scan-tree-dump-times "link_error" 0 "ccp"} } */
-/* { dg-final { cleanup-tree-dump "ccp" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "store_ccp"} } */
+/* { dg-final { cleanup-tree-dump "store_ccp" } } */
diff --git a/gcc/timevar.def b/gcc/timevar.def
index 3df7c25ed51..b8d94083a08 100644
--- a/gcc/timevar.def
+++ b/gcc/timevar.def
@@ -65,15 +65,20 @@ DEFTIMEVAR (TV_TREE_GIMPLIFY , "tree gimplify")
DEFTIMEVAR (TV_TREE_EH , "tree eh")
DEFTIMEVAR (TV_TREE_CFG , "tree CFG construction")
DEFTIMEVAR (TV_TREE_CLEANUP_CFG , "tree CFG cleanup")
+DEFTIMEVAR (TV_TREE_VRP , "tree VRP")
+DEFTIMEVAR (TV_TREE_COPY_PROP , "tree copy propagation")
+DEFTIMEVAR (TV_TREE_STORE_COPY_PROP , "tree store copy propagation")
DEFTIMEVAR (TV_FIND_REFERENCED_VARS , "tree find referenced vars")
DEFTIMEVAR (TV_TREE_PTA , "tree PTA")
DEFTIMEVAR (TV_TREE_MAY_ALIAS , "tree alias analysis")
DEFTIMEVAR (TV_TREE_INSERT_PHI_NODES , "tree PHI insertion")
DEFTIMEVAR (TV_TREE_SSA_REWRITE_BLOCKS, "tree SSA rewrite")
DEFTIMEVAR (TV_TREE_SSA_OTHER , "tree SSA other")
+DEFTIMEVAR (TV_TREE_SSA_INCREMENTAL , "tree SSA incremental")
DEFTIMEVAR (TV_TREE_OPS , "tree operand scan")
DEFTIMEVAR (TV_TREE_SSA_DOMINATOR_OPTS , "dominator optimization")
DEFTIMEVAR (TV_TREE_SRA , "tree SRA")
+DEFTIMEVAR (TV_TREE_STORE_CCP , "tree STORE-CCP")
DEFTIMEVAR (TV_TREE_CCP , "tree CCP")
DEFTIMEVAR (TV_TREE_SPLIT_EDGES , "tree split crit edges")
DEFTIMEVAR (TV_TREE_PRE , "tree PRE")
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index bb8e256a50f..38d8215d03c 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -1282,8 +1282,7 @@ tree_can_merge_blocks_p (basic_block a, basic_block b)
&& DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
return false;
- /* There may be no phi nodes at the start of b. Most of these degenerate
- phi nodes should be cleaned up by kill_redundant_phi_nodes. */
+ /* There may be no PHI nodes at the start of B. */
if (phi_nodes (b))
return false;
@@ -3428,6 +3427,15 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
}
break;
+ case ASSERT_EXPR:
+ x = fold (ASSERT_EXPR_COND (t));
+ if (x == boolean_false_node)
+ {
+ error ("ASSERT_EXPR with an always-false condition");
+ return *tp;
+ }
+ break;
+
case MODIFY_EXPR:
x = TREE_OPERAND (t, 0);
if (TREE_CODE (x) == BIT_FIELD_REF
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index e673aed4fce..74e746578cd 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -1023,7 +1023,7 @@ struct tree_opt_pass pass_lower_vector_ssa =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars /* todo_flags_finish */
+ TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
| TODO_ggc_collect | TODO_verify_ssa
| TODO_verify_stmts | TODO_verify_flow,
0 /* letter */
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 38d60c0a136..c923cdad7fc 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -215,8 +215,9 @@ make_rename_temp (tree type, const char *prefix)
if (referenced_vars)
{
add_referenced_tmp_var (t);
- bitmap_set_bit (vars_to_rename, var_ann (t)->uid);
+ mark_sym_for_renaming (t);
}
+
return t;
}
@@ -617,11 +618,11 @@ add_referenced_tmp_var (tree var)
}
-/* Add all the non-SSA variables found in STMT's operands to the bitmap
- VARS_TO_RENAME. */
+/* Mark all the non-SSA variables found in STMT's operands to be
+ processed by update_ssa. */
void
-mark_new_vars_to_rename (tree stmt, bitmap vars_to_rename)
+mark_new_vars_to_rename (tree stmt)
{
ssa_op_iter iter;
tree val;
@@ -660,13 +661,11 @@ mark_new_vars_to_rename (tree stmt, bitmap vars_to_rename)
v_must_defs_after = NUM_V_MUST_DEFS (STMT_V_MUST_DEF_OPS (stmt));
FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_ALL_OPERANDS)
- {
- if (DECL_P (val))
- {
- found_exposed_symbol = true;
- bitmap_set_bit (vars_to_rename, var_ann (val)->uid);
- }
- }
+ if (DECL_P (val))
+ {
+ found_exposed_symbol = true;
+ mark_sym_for_renaming (val);
+ }
/* If we found any newly exposed symbols, or if there are fewer VDEF
operands in the statement, add the variables we had set in
@@ -676,7 +675,7 @@ mark_new_vars_to_rename (tree stmt, bitmap vars_to_rename)
if (found_exposed_symbol
|| v_may_defs_before > v_may_defs_after
|| v_must_defs_before > v_must_defs_after)
- bitmap_ior_into (vars_to_rename, vars_in_vops_to_rename);
+ mark_set_for_renaming (vars_in_vops_to_rename);
BITMAP_FREE (vars_in_vops_to_rename);
}
@@ -691,7 +690,10 @@ find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
tree t = *tp;
if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
- add_referenced_tmp_var (t);
+ {
+ add_referenced_tmp_var (t);
+ mark_sym_for_renaming (t);
+ }
if (IS_TYPE_OR_DECL_P (t))
*walk_subtrees = 0;
@@ -706,20 +708,6 @@ find_new_referenced_vars (tree *stmt_p)
}
-/* Mark all call-clobbered variables for renaming. */
-
-void
-mark_call_clobbered_vars_to_rename (void)
-{
- unsigned i;
- bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
- }
-}
-
/* If REF is a COMPONENT_REF for a structure that can have sub-variables, and
we know where REF is accessing, return the variable in REF that has the
sub-variables. If the return value is not NULL, POFFSET will be the
diff --git a/gcc/tree-flow-inline.h b/gcc/tree-flow-inline.h
index 169dce2f402..993c1ded320 100644
--- a/gcc/tree-flow-inline.h
+++ b/gcc/tree-flow-inline.h
@@ -1176,6 +1176,17 @@ op_iter_init_maydef (ssa_op_iter *ptr, tree stmt, use_operand_p *use,
op_iter_next_maydef (use, def, ptr);
}
+/* Return true if VAR cannot be modified by the program. */
+
+static inline bool
+unmodifiable_var_p (tree var)
+{
+ if (TREE_CODE (var) == SSA_NAME)
+ var = SSA_NAME_VAR (var);
+ return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
+}
+
+
/* Initialize iterator PTR to the operands in STMT. Return the first operands
in KILL and DEF. */
static inline void
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 00a1af7d564..eaaa32dc426 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -80,6 +80,34 @@ struct ptr_info_def GTY(())
};
+/* Types of value ranges. */
+enum value_range_type { VR_UNDEFINED, VR_RANGE, VR_ANTI_RANGE, VR_VARYING };
+
+
+/* Ranges of values that can be associated with an SSA_NAME after VRP
+ has executed. */
+struct value_range_def GTY(())
+{
+ /* Lattice value represented by this range. */
+ enum value_range_type type;
+
+ /* Minimum and maximum values represented by this range. These
+ values are _CST nodes that should be interpreted as follows:
+
+ - If TYPE == VR_UNDEFINED then MIN and MAX must be NULL.
+
+ - If TYPE == VR_RANGE then MIN holds the minimum value and
+ MAX holds the maximum value of the range [MIN, MAX].
+
+ - If TYPE == ANTI_RANGE the variable is known to NOT
+ take any values in the range [MIN, MAX]. */
+ tree min;
+ tree max;
+};
+
+typedef struct value_range_def value_range;
+
+
/*---------------------------------------------------------------------------
Tree annotations stored in tree_common.ann
---------------------------------------------------------------------------*/
@@ -534,7 +562,7 @@ extern tree create_phi_node (tree, basic_block);
extern void add_phi_arg (tree, tree, edge);
extern void remove_phi_args (edge);
extern void remove_phi_node (tree, tree);
-extern void remove_all_phi_nodes_for (bitmap);
+extern tree find_phi_node_for (basic_block, tree, tree *);
extern tree phi_reverse (tree);
extern void dump_dfa_stats (FILE *);
extern void debug_dfa_stats (void);
@@ -544,9 +572,8 @@ extern void dump_variable (FILE *, tree);
extern void debug_variable (tree);
extern tree get_virtual_var (tree);
extern void add_referenced_tmp_var (tree);
-extern void mark_new_vars_to_rename (tree, bitmap);
+extern void mark_new_vars_to_rename (tree);
extern void find_new_referenced_vars (tree *);
-void mark_call_clobbered_vars_to_rename (void);
extern tree make_rename_temp (tree, const char *);
@@ -568,6 +595,8 @@ extern void dump_points_to_info_for (FILE *, tree);
extern void debug_points_to_info_for (tree);
extern bool may_be_aliased (tree);
extern struct ptr_info_def *get_ptr_info (tree);
+extern void add_type_alias (tree, tree);
+extern void count_uses_and_derefs (tree, tree, unsigned *, unsigned *, bool *);
static inline subvar_t get_subvars_for_var (tree);
static inline bool ref_contains_array_ref (tree);
extern tree okay_component_ref_for_subvars (tree, HOST_WIDE_INT *,
@@ -596,24 +625,43 @@ extern void verify_ssa (bool);
extern void delete_tree_ssa (void);
extern void register_new_def (tree, VEC (tree_on_heap) **);
extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
-extern void kill_redundant_phi_nodes (void);
extern bool stmt_references_memory_p (tree);
/* In tree-into-ssa.c */
-extern void rewrite_into_ssa (bool);
extern void rewrite_ssa_into_ssa (void);
-extern void rewrite_def_def_chains (void);
+void update_ssa (unsigned);
+void register_new_name_mapping (tree, tree);
+tree create_new_def_for (tree, tree, def_operand_p);
+bool need_ssa_update_p (void);
+bool name_registered_for_update_p (tree);
+bitmap ssa_names_to_replace (void);
+void release_ssa_name_after_update_ssa (tree name);
+void dump_repl_tbl (FILE *);
+void debug_repl_tbl (void);
+void dump_names_replaced_by (FILE *, tree);
+void debug_names_replaced_by (tree);
void compute_global_livein (bitmap, bitmap);
tree duplicate_ssa_name (tree, tree);
+void mark_sym_for_renaming (tree);
+void mark_set_for_renaming (bitmap);
/* In tree-ssa-ccp.c */
bool fold_stmt (tree *);
tree widen_bitfield (tree, tree, tree);
+/* In tree-vrp.c */
+value_range *get_value_range (tree);
+void dump_value_range (FILE *, value_range *);
+void debug_value_range (value_range *);
+void dump_all_value_ranges (FILE *);
+void debug_all_value_ranges (void);
+bool expr_computes_nonzero (tree);
+
/* In tree-ssa-dom.c */
extern void dump_dominator_optimization_stats (FILE *);
extern void debug_dominator_optimization_stats (void);
+int loop_depth_of_name (tree);
/* In tree-ssa-copy.c */
extern void propagate_value (use_operand_p, tree);
@@ -711,6 +759,7 @@ extern enum move_pos movement_possibility (tree);
static inline bool is_call_clobbered (tree);
static inline void mark_call_clobbered (tree);
static inline void set_is_used (tree);
+static inline bool unmodifiable_var_p (tree);
/* In tree-eh.c */
extern void make_eh_edges (tree);
diff --git a/gcc/tree-gimple.c b/gcc/tree-gimple.c
index 5d1edcee40a..e723b472177 100644
--- a/gcc/tree-gimple.c
+++ b/gcc/tree-gimple.c
@@ -73,6 +73,7 @@ is_gimple_formal_tmp_rhs (tree t)
case COMPLEX_CST:
case VECTOR_CST:
case OBJ_TYPE_REF:
+ case ASSERT_EXPR:
return true;
default:
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index fe446d9b0ab..e63dc669df2 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -117,7 +117,8 @@ static void add_to_predicate_list (basic_block, tree);
static tree add_to_dst_predicate_list (struct loop * loop, basic_block, tree, tree,
block_stmt_iterator *);
static void clean_predicate_lists (struct loop *loop);
-static basic_block find_phi_replacement_condition (basic_block, tree *,
+static basic_block find_phi_replacement_condition (struct loop *loop,
+ basic_block, tree *,
block_stmt_iterator *);
static void replace_phi_with_cond_modify_expr (tree, tree, basic_block,
block_stmt_iterator *);
@@ -677,7 +678,8 @@ clean_predicate_lists (struct loop *loop)
whose phi arguments are selected when cond is true. */
static basic_block
-find_phi_replacement_condition (basic_block bb, tree *cond,
+find_phi_replacement_condition (struct loop *loop,
+ basic_block bb, tree *cond,
block_stmt_iterator *bsi)
{
edge e;
@@ -702,12 +704,22 @@ find_phi_replacement_condition (basic_block bb, tree *cond,
tmp_cond = p1->aux;
if (TREE_CODE (tmp_cond) == TRUTH_NOT_EXPR)
{
- *cond = p2->aux;
+ /* If p2 is loop->header than its aux field does not have useful
+ info. Instead use !(cond) where cond is p1's aux field. */
+ if (p2 == loop->header)
+ *cond = invert_truthvalue (unshare_expr (p1->aux));
+ else
+ *cond = p2->aux;
true_bb = p2;
}
else
{
- *cond = p1->aux;
+ /* If p1 is loop->header than its aux field does not have useful
+ info. Instead use !(cond) where cond is p2's aux field. */
+ if (p1 == loop->header)
+ *cond = invert_truthvalue (unshare_expr (p2->aux));
+ else
+ *cond = p1->aux;
true_bb = p1;
}
@@ -828,7 +840,7 @@ process_phi_nodes (struct loop *loop)
/* BB has two predecessors. Using predecessor's aux field, set
appropriate condition for the PHI node replacement. */
if (phi)
- true_bb = find_phi_replacement_condition (bb, &cond, &bsi);
+ true_bb = find_phi_replacement_condition (loop, bb, &cond, &bsi);
while (phi)
{
@@ -1113,20 +1125,17 @@ gate_tree_if_conversion (void)
struct tree_opt_pass pass_if_conversion =
{
- "ifcvt", /* name */
- gate_tree_if_conversion, /* gate */
- main_tree_if_conversion, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- 0, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- TODO_dump_func, /* todo_flags_start */
- TODO_dump_func
- | TODO_verify_ssa
- | TODO_verify_stmts
- | TODO_verify_flow, /* todo_flags_finish */
- 0 /* letter */
+ "ifcvt", /* name */
+ gate_tree_if_conversion, /* gate */
+ main_tree_if_conversion, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
+ 0 /* letter */
};
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index 09e6d1f51cc..f15b5ef48ad 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -84,50 +84,95 @@ struct def_blocks_d
static htab_t def_blocks;
/* Stack of trees used to restore the global currdefs to its original
- state after completing rewriting of a block and its dominator children.
+ state after completing rewriting of a block and its dominator
+ children. Its elements have the following properties:
- This vector is used in two contexts. The first is rewriting of _DECL
- nodes into SSA_NAMEs. In that context its elements have the
- following properties:
+ - An SSA_NAME indicates that the current definition of the
+ underlying variable should be set to the given SSA_NAME.
- An SSA_NAME indicates that the current definition of the underlying
- variable should be set to the given SSA_NAME.
+ - A _DECL node indicates that the underlying variable has no
+ current definition.
- A _DECL node indicates that the underlying variable has no current
- definition.
+ - A NULL node is used to mark the last node associated with the
+ current block.
- A NULL node is used to mark the last node associated with the
- current block.
-
- This vector is also used when rewriting an SSA_NAME which has multiple
- definition sites into multiple SSA_NAMEs. In that context entries come
- in pairs.
-
- The top entry is an SSA_NAME and the top-1 entry is the
- current value for that SSA_NAME.
-
- A NULL node at the top entry is used to mark the last node associated
- with the current block. */
+ - A NULL node at the top entry is used to mark the last node
+ associated with the current block. */
static VEC(tree_on_heap) *block_defs_stack;
/* Basic block vectors used in this file ought to be allocated in the heap. */
DEF_VEC_MALLOC_P(int);
+/* Set of existing SSA names being replaced by update_ssa. */
+static sbitmap old_ssa_names;
+
+/* Set of new SSA names being added by update_ssa. Note that both
+ NEW_SSA_NAMES and OLD_SSA_NAMES are dense bitmaps because most of
+ the operations done on them are presence tests. */
+static sbitmap new_ssa_names;
+
+/* Set of virtual SSA names to be updated. Since virtuals are always
+ in FUD chain form, these names are not used as a mapping mechanism
+ like OLD_SSA_NAMES and NEW_SSA_NAMES. Instead, the names in this
+ set are used by ssa_names_to_replace to inform its caller which
+ names are going to be updated. */
+static bitmap old_virtual_ssa_names;
+
+/* Symbols whose SSA form needs to be updated or created for the first
+ time. */
+static bitmap syms_to_rename;
+
+/* Set of SSA names that have been marked to be released after they
+ were registered in the replacement table. They will be finally
+ released after we finish updating the SSA web. */
+static bitmap names_to_release;
+
+/* Growth factor for NEW_SSA_NAMES and OLD_SSA_NAMES. These sets need
+ to grow as the callers to register_new_name_mapping will typically
+ create new names on the fly. FIXME. Currently set to 1/3 to avoid
+ frequent reallocations but still need to find a reasonable growth
+ strategy. */
+#define NAME_SETS_GROWTH_FACTOR (MAX (3, num_ssa_names / 3))
+
+/* Tuple used to represent replacement mappings. */
+struct repl_map_d
+{
+ tree name;
+ bitmap set;
+};
+
+/* NEW -> OLD_SET replacement table. If we are replacing several
+ existing SSA names O_1, O_2, ..., O_j with a new name N_i,
+ then REPL_TBL[N_i] = { O_1, O_2, ..., O_j }. */
+static htab_t repl_tbl;
+
+/* true if register_new_name_mapping needs to initialize the data
+ structures needed by update_ssa. */
+static bool need_to_initialize_update_ssa_p = true;
+
+/* true if update_ssa needs to update virtual operands. */
+static bool need_to_update_vops_p = false;
+
+/* true if update_ssa is replacing existing SSA names. */
+static bool need_to_replace_names_p = false;
+
/* Global data to attach to the main dominator walk structure. */
struct mark_def_sites_global_data
{
- /* This sbitmap contains the variables which are set before they
- are used in a basic block. We keep it as a global variable
- solely to avoid the overhead of allocating and deallocating
- the bitmap. */
+ /* This bitmap contains the variables which are set before they
+ are used in a basic block. */
bitmap kills;
/* Bitmap of names to rename. */
sbitmap names_to_rename;
+
+ /* Set of blocks that mark_def_sites deems interesting for the
+ renamer to process. */
+ sbitmap interesting_blocks;
};
-/* Information stored for ssa names. */
+/* Information stored for SSA names. */
struct ssa_name_info
{
/* This field indicates whether or not the variable may need PHI nodes.
@@ -140,11 +185,36 @@ struct ssa_name_info
};
+/* The main entry point to the SSA renamer (rewrite_blocks) may be
+ called several times to do different, but related, tasks.
+ Initially, we need it to rename the whole program into SSA form.
+ At other times, we may need it to only rename into SSA newly
+ exposed symbols. Finally, we can also call it to incrementally fix
+ an already built SSA web. */
+enum rewrite_mode {
+ /* Convert the whole function into SSA form. */
+ REWRITE_ALL,
+
+ /* Incrementally update the SSA web by replacing existing SSA
+ names with new ones. See update_ssa for details. */
+ REWRITE_UPDATE
+};
+
+
/* Use TREE_VISITED to keep track of which statements we want to
rename. When renaming a subset of the variables, not all
statements will be processed. This is decided in mark_def_sites. */
#define REWRITE_THIS_STMT(T) TREE_VISITED (T)
+/* Use the unsigned flag to keep track of which statements we want to
+ visit when marking new definition sites. This is slightly
+ different than REWRITE_THIS_STMT: it's used by update_ssa to
+ distinguish statements that need to have both uses and defs
+ processed from those that only need to have their defs processed.
+ Statements that define new SSA names only need to have their defs
+ registered, but they don't need to have their uses renamed. */
+#define REGISTER_DEFS_IN_THIS_STMT(T) (T)->common.unsigned_flag
+
/* Get the information associated with NAME. */
@@ -222,7 +292,7 @@ compute_global_livein (bitmap livein, bitmap def_blocks)
bitmap_iterator bi;
tos = worklist
- = (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ = (basic_block *) xmalloc (sizeof (basic_block) * (last_basic_block + 1));
EXECUTE_IF_SET_IN_BITMAP (livein, 0, i, bi)
{
@@ -288,18 +358,14 @@ get_def_blocks_for (tree var)
/* Mark block BB as the definition site for variable VAR. PHI_P is true if
- VAR is defined by a PHI node. IS_UPDATE is true if the caller is
- updating an existing SSA form. */
+ VAR is defined by a PHI node. */
static void
-set_def_block (tree var, basic_block bb, bool phi_p, bool is_update)
+set_def_block (tree var, basic_block bb, bool phi_p)
{
struct def_blocks_d *db_p;
enum need_phi_state state;
- if (!is_update && TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
-
state = get_phi_state (var);
db_p = get_def_blocks_for (var);
@@ -360,50 +426,170 @@ set_livein_block (tree var, basic_block bb)
}
-/* If the use operand pointed to by OP_P needs to be renamed, then strip away
- any SSA_NAME wrapping the operand, set *UID_P to the underlying variable's
- uid, and return true. Otherwise return false. If the operand was an
- SSA_NAME, change it to the stripped name. */
+/* Return true if symbol SYM is marked for renaming. */
-static bool
-prepare_use_operand_for_rename (use_operand_p op_p, size_t *uid_p)
+static inline bool
+symbol_marked_for_renaming (tree sym)
{
- tree use = USE_FROM_PTR (op_p);
- tree var = (TREE_CODE (use) != SSA_NAME) ? use : SSA_NAME_VAR (use);
- *uid_p = var_ann (var)->uid;
+ gcc_assert (DECL_P (sym));
+ return bitmap_bit_p (syms_to_rename, var_ann (sym)->uid);
+}
- /* Ignore variables that don't need to be renamed. */
- if (vars_to_rename && !bitmap_bit_p (vars_to_rename, *uid_p))
+
+/* Return true if NAME is in OLD_SSA_NAMES. */
+
+static inline bool
+is_old_name (tree name)
+{
+ if (!need_to_replace_names_p)
return false;
- /* The variable needs to be renamed. If this is a use which already
- has an SSA_NAME, then strip it off.
+ return TEST_BIT (old_ssa_names, SSA_NAME_VERSION (name));
+}
+
- By not throwing away SSA_NAMEs on assignments, we avoid a lot of
- useless churn of SSA_NAMEs without having to overly complicate the
- renamer. */
- if (TREE_CODE (use) == SSA_NAME)
- SET_USE (op_p, var);
+/* Return true if NAME is in NEW_SSA_NAMES. */
- return true;
+static inline bool
+is_new_name (tree name)
+{
+ if (!need_to_replace_names_p)
+ return false;
+
+ return TEST_BIT (new_ssa_names, SSA_NAME_VERSION (name));
}
-/* If the def variable DEF needs to be renamed, then strip away any SSA_NAME
- wrapping the operand, set *UID_P to the underlying variable's uid and return
- true. Otherwise return false. */
+/* Hashing and equality functions for REPL_TBL. */
-static bool
-prepare_def_operand_for_rename (tree def, size_t *uid_p)
+static hashval_t
+repl_map_hash (const void *p)
{
- tree var = (TREE_CODE (def) != SSA_NAME) ? def : SSA_NAME_VAR (def);
- *uid_p = var_ann (var)->uid;
+ return htab_hash_pointer ((const void *)((const struct repl_map_d *)p)->name);
+}
- /* Ignore variables that don't need to be renamed. */
- if (vars_to_rename && !bitmap_bit_p (vars_to_rename, *uid_p))
- return false;
+static int
+repl_map_eq (const void *p1, const void *p2)
+{
+ return ((const struct repl_map_d *)p1)->name
+ == ((const struct repl_map_d *)p2)->name;
+}
+
+static void
+repl_map_free (void *p)
+{
+ BITMAP_FREE (((struct repl_map_d *)p)->set);
+ free (p);
+}
+
+
+/* Return the names replaced by NEW (i.e., REPL_TBL[NEW].SET). */
+
+static inline bitmap
+names_replaced_by (tree new)
+{
+ struct repl_map_d m;
+ void **slot;
+
+ m.name = new;
+ slot = htab_find_slot (repl_tbl, (void *) &m, NO_INSERT);
+
+ /* If N was not registered in the replacement table, return NULL. */
+ if (slot == NULL || *slot == NULL)
+ return NULL;
+
+ return ((struct repl_map_d *) *slot)->set;
+}
+
+
+/* Add OLD to REPL_TBL[NEW].SET. */
- return true;
+static inline void
+add_to_repl_tbl (tree new, tree old)
+{
+ struct repl_map_d m, *mp;
+ void **slot;
+
+ m.name = new;
+ slot = htab_find_slot (repl_tbl, (void *) &m, INSERT);
+ if (*slot == NULL)
+ {
+ mp = xmalloc (sizeof (*mp));
+ mp->name = new;
+ mp->set = BITMAP_ALLOC (NULL);
+ *slot = (void *) mp;
+ }
+ else
+ mp = (struct repl_map_d *) *slot;
+
+ bitmap_set_bit (mp->set, SSA_NAME_VERSION (old));
+}
+
+
+/* Add a new mapping NEW -> OLD REPL_TBL. Every entry N_i in REPL_TBL
+ represents the set of names O_1 ... O_j replaced by N_i. This is
+ used by update_ssa and its helpers to introduce new SSA names in an
+ already formed SSA web. */
+
+static void
+add_new_name_mapping (tree new, tree old)
+{
+ timevar_push (TV_TREE_SSA_INCREMENTAL);
+
+ /* We may need to grow NEW_SSA_NAMES and OLD_SSA_NAMES because our
+ caller may have created new names since the set was created. */
+ if (new_ssa_names->n_bits <= num_ssa_names - 1)
+ {
+ unsigned int new_sz = num_ssa_names + NAME_SETS_GROWTH_FACTOR;
+ new_ssa_names = sbitmap_resize (new_ssa_names, new_sz, 0);
+ old_ssa_names = sbitmap_resize (old_ssa_names, new_sz, 0);
+ }
+
+ /* We don't need to keep replacement mappings for virtual names.
+ Since these names are kept in FUD-chain form, we need to traverse
+ the CFG from ENTRY to repair FUD chains. */
+ if (!is_gimple_reg (new))
+ {
+ tree sym;
+
+ gcc_assert (!is_gimple_reg (old));
+
+ if (DECL_P (old))
+ sym = new;
+ else
+ {
+ sym = SSA_NAME_VAR (old);
+ bitmap_set_bit (old_virtual_ssa_names, SSA_NAME_VERSION (old));
+ }
+
+ mark_sym_for_renaming (sym);
+ need_to_update_vops_p = true;
+
+ timevar_pop (TV_TREE_SSA_INCREMENTAL);
+
+ return;
+ }
+
+ /* Assume that OLD and NEW are different GIMPLE register names. */
+ gcc_assert (new != old && is_gimple_reg (old));
+
+ /* Update the REPL_TBL table. */
+ add_to_repl_tbl (new, old);
+
+ /* If OLD had already been registered as a new name, then all the
+ names that OLD replaces should also be replaced by NEW. */
+ if (is_new_name (old))
+ bitmap_ior_into (names_replaced_by (new), names_replaced_by (old));
+
+ /* Register NEW and OLD in NEW_SSA_NAMES and OLD_SSA_NAMES,
+ respectively. */
+ SET_BIT (new_ssa_names, SSA_NAME_VERSION (new));
+ SET_BIT (old_ssa_names, SSA_NAME_VERSION (old));
+
+ /* Indicate that we are going to be replacing existing names. */
+ need_to_replace_names_p = true;
+
+ timevar_pop (TV_TREE_SSA_INCREMENTAL);
}
@@ -429,31 +615,27 @@ mark_def_sites (struct dom_walk_data *walk_data,
{
struct mark_def_sites_global_data *gd = walk_data->global_data;
bitmap kills = gd->kills;
- size_t uid;
tree stmt, def;
use_operand_p use_p;
def_operand_p def_p;
ssa_op_iter iter;
- /* Mark all the blocks that have definitions for each variable in the
- VARS_TO_RENAME bitmap. */
stmt = bsi_stmt (bsi);
update_stmt_if_modified (stmt);
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
REWRITE_THIS_STMT (stmt) = 0;
/* If a variable is used before being set, then the variable is live
across a block boundary, so mark it live-on-entry to BB. */
-
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
SSA_OP_USE | SSA_OP_VUSE | SSA_OP_VMUSTDEFKILL)
{
- if (prepare_use_operand_for_rename (use_p, &uid))
- {
- REWRITE_THIS_STMT (stmt) = 1;
- if (!bitmap_bit_p (kills, uid))
- set_livein_block (USE_FROM_PTR (use_p), bb);
- }
+ tree sym = USE_FROM_PTR (use_p);
+ gcc_assert (DECL_P (sym));
+ if (!bitmap_bit_p (kills, var_ann (sym)->uid))
+ set_livein_block (sym, bb);
+ REWRITE_THIS_STMT (stmt) = 1;
}
/* Note that virtual definitions are irrelevant for computing KILLS
@@ -463,29 +645,27 @@ mark_def_sites (struct dom_walk_data *walk_data,
live-on-entry. */
FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, stmt, iter)
{
- if (prepare_use_operand_for_rename (use_p, &uid))
- {
- /* If we do not already have an SSA_NAME for our destination,
- then set the destination to the source. */
- if (TREE_CODE (DEF_FROM_PTR (def_p)) != SSA_NAME)
- SET_DEF (def_p, USE_FROM_PTR (use_p));
-
- set_livein_block (USE_FROM_PTR (use_p), bb);
- set_def_block (DEF_FROM_PTR (def_p), bb, false, false);
- REWRITE_THIS_STMT (stmt) = 1;
- }
+ tree sym = USE_FROM_PTR (use_p);
+ gcc_assert (DECL_P (sym));
+ set_livein_block (sym, bb);
+ set_def_block (sym, bb, false);
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
+ REWRITE_THIS_STMT (stmt) = 1;
}
/* Now process the defs and must-defs made by this statement. */
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF | SSA_OP_VMUSTDEF)
{
- if (prepare_def_operand_for_rename (def, &uid))
- {
- set_def_block (def, bb, false, false);
- bitmap_set_bit (kills, uid);
- REWRITE_THIS_STMT (stmt) = 1;
- }
+ gcc_assert (DECL_P (def));
+ set_def_block (def, bb, false);
+ bitmap_set_bit (kills, var_ann (def)->uid);
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
}
+
+ /* If we found the statement interesting then also mark the block BB
+ as interesting. */
+ if (REWRITE_THIS_STMT (stmt) || REGISTER_DEFS_IN_THIS_STMT (stmt))
+ SET_BIT (gd->interesting_blocks, bb->index);
}
@@ -525,9 +705,16 @@ find_idf (bitmap def_blocks, bitmap *dfs)
while (VEC_length (int, work_stack) > 0)
{
bb_index = VEC_pop (int, work_stack);
-
+
+ /* Since the registration of NEW -> OLD name mappings is done
+ separately from the call to update_ssa, when updating the SSA
+ form, the basic blocks where new and/or old names are defined
+ may have disappeared by CFG cleanup calls. In this case,
+ we may pull a non-existing block from the work stack. */
+ gcc_assert (bb_index < (unsigned) last_basic_block);
+
EXECUTE_IF_AND_COMPL_IN_BITMAP (dfs[bb_index], phi_insertion_points,
- 0, bb_index, bi)
+ 0, bb_index, bi)
{
/* Use a safe push because if there is a definition of VAR
in every basic block, then WORK_STACK may eventually have
@@ -556,11 +743,36 @@ find_def_blocks_for (tree var)
}
+/* Retrieve or create a default definition for symbol SYM. */
+
+static inline tree
+get_default_def_for (tree sym)
+{
+ tree ddef = default_def (sym);
+
+ if (ddef == NULL_TREE)
+ {
+ ddef = make_ssa_name (sym, build_empty_stmt ());
+ set_default_def (sym, ddef);
+ }
+
+ return ddef;
+}
+
+
/* Insert PHI nodes for variable VAR using the iterated dominance
- frontier given in PHI_INSERTION_POINTS. */
+ frontier given in PHI_INSERTION_POINTS. If UPDATE_P is true, this
+ function assumes that the caller is incrementally updating the SSA
+ form, in which case (1) VAR is assumed to be an SSA name, (2) a new
+ SSA name is created for VAR's symbol, and, (3) all the arguments
+ for the newly created PHI node are set to VAR.
+
+ PHI_INSERTION_POINTS is updated to reflect nodes that already had a
+ PHI node for VAR. On exit, only the nodes that received a PHI node
+ for VAR will be present in PHI_INSERTION_POINTS. */
static void
-insert_phi_nodes_for (tree var, bitmap phi_insertion_points)
+insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
{
unsigned bb_index;
edge e;
@@ -570,6 +782,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points)
struct def_blocks_d *def_map;
def_map = find_def_blocks_for (var);
+ gcc_assert (def_map);
/* Remove the blocks where we already have PHI nodes for VAR. */
bitmap_and_compl_into (phi_insertion_points, def_map->phi_blocks);
@@ -585,13 +798,36 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points)
bb = BASIC_BLOCK (bb_index);
phi = create_phi_node (var, bb);
- /* If we are rewriting SSA names, add also the PHI arguments. */
if (TREE_CODE (var) == SSA_NAME)
{
edge_iterator ei;
+
+ /* FIXME. After removing rewrite_ssa_into_ssa, change this
+ if() to gcc_assert(). */
+ if (update_p)
+ {
+ /* If we are rewriting SSA names, create the LHS of the
+ PHI node by duplicating VAR. This is useful in the
+ case of pointers, to also duplicate pointer
+ attributes (alias information, in particular). */
+ tree new_lhs = duplicate_ssa_name (var, phi);
+ SET_PHI_RESULT (phi, new_lhs);
+ add_new_name_mapping (new_lhs, var);
+ }
+
+ /* Add VAR to every argument slot of PHI. We need VAR in
+ every argument so that rewrite_update_phi_arguments knows
+ which name is this PHI node replacing. If VAR is a
+ symbol marked for renaming, this is not necessary, the
+ renamer will use the symbol on the LHS to get its
+ reaching definition. */
FOR_EACH_EDGE (e, ei, bb->preds)
add_phi_arg (phi, var, e);
}
+
+ /* Mark this PHI node as interesting for update_ssa. */
+ REGISTER_DEFS_IN_THIS_STMT (phi) = 1;
+ REWRITE_THIS_STMT (phi) = 1;
}
}
@@ -609,12 +845,12 @@ insert_phi_nodes_1 (tree var, bitmap *dfs)
if (def_map == NULL)
return;
- idf = find_idf (def_map->def_blocks, dfs);
-
if (get_phi_state (var) != NEED_PHI_STATE_NO)
- insert_phi_nodes_for (var, idf);
-
- BITMAP_FREE (idf);
+ {
+ idf = find_idf (def_map->def_blocks, dfs);
+ insert_phi_nodes_for (var, idf, false);
+ BITMAP_FREE (idf);
+ }
}
@@ -630,25 +866,17 @@ static void
insert_phi_nodes (bitmap *dfs, bitmap names_to_rename)
{
unsigned i;
- bitmap_iterator bi;
timevar_push (TV_TREE_INSERT_PHI_NODES);
- /* Iterate over all variables in VARS_TO_RENAME. For each variable, add
- to the work list all the blocks that have a definition for the
- variable. PHI nodes will be added to the dominance frontier blocks of
- each definition block. */
if (names_to_rename)
{
+ bitmap_iterator bi;
+
EXECUTE_IF_SET_IN_BITMAP (names_to_rename, 0, i, bi)
if (ssa_name (i))
insert_phi_nodes_1 (ssa_name (i), dfs);
}
- else if (vars_to_rename)
- {
- EXECUTE_IF_SET_IN_BITMAP (vars_to_rename, 0, i, bi)
- insert_phi_nodes_1 (referenced_var (i), dfs);
- }
else
{
for (i = 0; i < num_referenced_vars; i++)
@@ -757,57 +985,23 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
static tree
get_reaching_def (tree var)
{
- tree default_d, currdef_var, avar;
+ tree currdef_var, avar;
/* Lookup the current reaching definition for VAR. */
- default_d = NULL_TREE;
currdef_var = get_current_def (var);
/* If there is no reaching definition for VAR, create and register a
default definition for it (if needed). */
if (currdef_var == NULL_TREE)
{
- if (TREE_CODE (var) == SSA_NAME)
- avar = SSA_NAME_VAR (var);
- else
- avar = var;
-
- default_d = default_def (avar);
- if (default_d == NULL_TREE)
- {
- default_d = make_ssa_name (avar, build_empty_stmt ());
- set_default_def (avar, default_d);
- }
- set_current_def (var, default_d);
+ avar = DECL_P (var) ? var : SSA_NAME_VAR (var);
+ currdef_var = get_default_def_for (avar);
+ set_current_def (var, currdef_var);
}
/* Return the current reaching definition for VAR, or the default
definition, if we had to create one. */
- return (currdef_var) ? currdef_var : default_d;
-}
-
-
-/* Replace the operand pointed by OP_P with its immediate reaching
- definition. */
-
-static inline void
-rewrite_operand (use_operand_p op_p)
-{
- tree var = USE_FROM_PTR (op_p);
- if (TREE_CODE (var) != SSA_NAME)
- SET_USE (op_p, get_reaching_def (var));
- else
- {
-#if defined ENABLE_CHECKING
- /* If we get to this point, VAR is an SSA_NAME. If VAR's symbol
- was marked for renaming, make sure that its reaching
- definition is VAR itself. Otherwise, something has gone
- wrong. */
- tree sym = SSA_NAME_VAR (var);
- if (bitmap_bit_p (vars_to_rename, var_ann (sym)->uid))
- gcc_assert (var == get_reaching_def (SSA_NAME_VAR (var)));
-#endif
- }
+ return currdef_var;
}
@@ -829,7 +1023,7 @@ rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* If mark_def_sites decided that we don't need to rewrite this
statement, ignore it. */
- if (!REWRITE_THIS_STMT (stmt))
+ if (!REWRITE_THIS_STMT (stmt) && !REGISTER_DEFS_IN_THIS_STMT (stmt))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -839,22 +1033,25 @@ rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
fprintf (dump_file, "\n");
}
- get_stmt_operands (stmt);
-
/* Step 1. Rewrite USES and VUSES in the statement. */
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
- rewrite_operand (use_p);
+ if (REWRITE_THIS_STMT (stmt))
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
+ SSA_OP_ALL_USES|SSA_OP_ALL_KILLS)
+ {
+ tree var = USE_FROM_PTR (use_p);
+ gcc_assert (DECL_P (var));
+ SET_USE (use_p, get_reaching_def (var));
+ }
/* Step 2. Register the statement's DEF and VDEF operands. */
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
- {
- if (TREE_CODE (DEF_FROM_PTR (def_p)) != SSA_NAME)
- SET_DEF (def_p, make_ssa_name (DEF_FROM_PTR (def_p), stmt));
-
- /* FIXME: We shouldn't be registering new defs if the variable
- doesn't need to be renamed. */
- register_new_def (DEF_FROM_PTR (def_p), &block_defs_stack);
- }
+ if (REGISTER_DEFS_IN_THIS_STMT (stmt))
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
+ {
+ tree var = DEF_FROM_PTR (def_p);
+ gcc_assert (DECL_P (var));
+ SET_DEF (def_p, make_ssa_name (var, stmt));
+ register_new_def (DEF_FROM_PTR (def_p), &block_defs_stack);
+ }
}
@@ -877,13 +1074,6 @@ rewrite_add_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
{
tree currdef;
-
- /* If this PHI node has already been rewritten, then there is
- nothing to do for this PHI or any following PHIs since we
- always add new PHI nodes at the start of the PHI chain. */
- if (PHI_REWRITTEN (phi))
- break;
-
currdef = get_reaching_def (SSA_NAME_VAR (PHI_RESULT (phi)));
add_phi_arg (phi, currdef, e);
}
@@ -891,43 +1081,6 @@ rewrite_add_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
-/* Rewrite existing virtual PHI arguments so that they have the correct
- reaching definitions. BB is the basic block whose successors contain the
- PHI nodes we want to add arguments for. */
-
-static void
-rewrite_virtual_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb)
-{
- edge e;
- use_operand_p op;
- edge_iterator ei;
-
- FOR_EACH_EDGE (e, ei, bb->succs)
- {
- tree phi;
-
- if (e->dest == EXIT_BLOCK_PTR)
- continue;
-
- for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
- {
- tree result = PHI_RESULT (phi);
- op = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
-
- if (is_gimple_reg (result)
- || !bitmap_bit_p (vars_to_rename,
- var_ann (SSA_NAME_VAR (result))->uid))
- continue;
-
- SET_USE (op, get_reaching_def (SSA_NAME_VAR (result)));
- if (e->flags & EDGE_ABNORMAL)
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (op)) = 1;
- }
- }
-}
-
-
/* Called after visiting basic block BB. Restore CURRDEFS to its
original value. */
@@ -1084,65 +1237,334 @@ debug_def_blocks (void)
}
-/* If a variable V in VARS_TO_RENAME is a pointer, the renaming
- process will cause us to lose the name memory tags that may have
- been associated with the various SSA_NAMEs of V. This means that
- the variables aliased to those name tags also need to be renamed
- again.
+/* Register NEW_NAME to be the new reaching definition for OLD_NAME. */
- FIXME 1- We should either have a better scheme for renaming
- pointers that doesn't lose name tags or re-run alias
- analysis to recover points-to information.
+static inline void
+register_new_update_single (tree new_name, tree old_name)
+{
+ tree currdef = get_current_def (old_name);
- 2- Currently we just invalidate *all* the name tags. This
- should be more selective. */
+ /* Push the current reaching definition into *BLOCK_DEFS_P.
+ This stack is later used by the dominator tree callbacks to
+ restore the reaching definitions for all the variables
+ defined in the block after a recursive visit to all its
+ immediately dominated blocks. */
+ VEC_safe_push (tree_on_heap, block_defs_stack, currdef);
+ VEC_safe_push (tree_on_heap, block_defs_stack, old_name);
-static void
-invalidate_name_tags (bitmap vars_to_rename)
+ /* Set the current reaching definition for OLD_NAME to be
+ NEW_NAME. */
+ set_current_def (old_name, new_name);
+}
+
+
+/* Register NEW_NAME to be the new reaching definition for all the
+ names in OLD_NAMES. Used by the incremental SSA update routines to
+ replace old SSA names with new ones. */
+
+static inline void
+register_new_update_set (tree new_name, bitmap old_names)
{
- unsigned i;
- bool rename_name_tags_p;
bitmap_iterator bi;
+ unsigned i;
+
+ EXECUTE_IF_SET_IN_BITMAP (old_names, 0, i, bi)
+ register_new_update_single (new_name, ssa_name (i));
+}
+
+
+/* Initialization of block data structures for the incremental SSA
+ update pass. Create a block local stack of reaching definitions
+ for new SSA names produced in this block (BLOCK_DEFS). Register
+ new definitions for every PHI node in the block. */
+
+static void
+rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+ tree phi;
+ bool is_abnormal_phi;
- rename_name_tags_p = false;
- EXECUTE_IF_SET_IN_BITMAP (vars_to_rename, 0, i, bi)
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "\n\nRegistering new PHI nodes in block #%d\n\n",
+ bb->index);
+
+ /* Mark the unwind point for this block. */
+ VEC_safe_push (tree_on_heap, block_defs_stack, NULL_TREE);
+
+ /* Mark the LHS if any of the arguments flows through an abnormal
+ edge. */
+ is_abnormal_phi = false;
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ is_abnormal_phi = true;
+ break;
+ }
+
+ /* If any of the PHI nodes is a replacement for a name in
+ OLD_SSA_NAMES or it's one of the names in NEW_SSA_NAMES, then
+ register it as a new definition for its corresponding name. Also
+ register definitions for names whose underlying symbols are
+ marked for renaming. */
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
- if (POINTER_TYPE_P (TREE_TYPE (referenced_var (i))))
+ tree lhs, lhs_sym;
+
+ if (!REGISTER_DEFS_IN_THIS_STMT (phi))
+ continue;
+
+ lhs = PHI_RESULT (phi);
+ lhs_sym = SSA_NAME_VAR (lhs);
+
+ if (symbol_marked_for_renaming (lhs_sym))
+ register_new_update_single (lhs, lhs_sym);
+ else
{
- rename_name_tags_p = true;
- break;
+ /* If LHS is a new name, register a new definition for all
+ the names replaced by LHS. */
+ if (is_new_name (lhs))
+ register_new_update_set (lhs, names_replaced_by (lhs));
+
+ /* If LHS is an OLD name, register it as a new definition
+ for itself. */
+ if (is_old_name (lhs))
+ register_new_update_single (lhs, lhs);
}
+
+ if (is_abnormal_phi)
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs) = 1;
}
+}
- if (rename_name_tags_p)
- for (i = 0; i < num_referenced_vars; i++)
- {
- var_ann_t ann = var_ann (referenced_var (i));
- if (ann->mem_tag_kind == NAME_TAG)
+/* Replace the operand pointed by USE_P with USE's current reaching
+ definition. */
+
+static inline void
+replace_use (use_operand_p use_p, tree use)
+{
+ tree rdef = get_reaching_def (use);
+ if (rdef != use)
+ SET_USE (use_p, rdef);
+}
+
+
+/* Called after visiting block BB. Unwind BLOCK_DEFS_STACK to restore
+ the current reaching definition of every name re-written in BB to
+ the original reaching definition before visiting BB. This
+ unwinding must be done in the opposite order to what is done in
+ register_new_update_set. */
+
+static void
+rewrite_update_fini_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb ATTRIBUTE_UNUSED)
+{
+ while (VEC_length (tree_on_heap, block_defs_stack) > 0)
+ {
+ tree var = VEC_pop (tree_on_heap, block_defs_stack);
+ tree saved_def;
+
+ /* NULL indicates the unwind stop point for this block (see
+ rewrite_update_init_block). */
+ if (var == NULL)
+ return;
+
+ saved_def = VEC_pop (tree_on_heap, block_defs_stack);
+ set_current_def (var, saved_def);
+ }
+}
+
+
+/* Update every variable used in the statement pointed-to by SI. The
+ statement is assumed to be in SSA form already. Names in
+ OLD_SSA_NAMES used by SI will be updated to their current reaching
+ definition. Names in OLD_SSA_NAMES or NEW_SSA_NAMES defined by SI
+ will be registered as a new definition for their corresponding name
+ in OLD_SSA_NAMES. */
+
+static void
+rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb ATTRIBUTE_UNUSED,
+ block_stmt_iterator si)
+{
+ stmt_ann_t ann;
+ tree stmt;
+ use_operand_p use_p;
+ def_operand_p def_p;
+ ssa_op_iter iter;
+
+ stmt = bsi_stmt (si);
+ ann = stmt_ann (stmt);
+
+ /* Only update marked statements. */
+ if (!REWRITE_THIS_STMT (stmt) && !REGISTER_DEFS_IN_THIS_STMT (stmt))
+ return;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Updating SSA information for statement ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ fprintf (dump_file, "\n");
+ }
+
+ /* Rewrite USES included in OLD_SSA_NAMES and USES whose underlying
+ symbol is marked for renaming. */
+ if (REWRITE_THIS_STMT (stmt))
+ {
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
+ {
+ tree use = USE_FROM_PTR (use_p);
+ tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
+
+ if (symbol_marked_for_renaming (sym))
+ replace_use (use_p, sym);
+ else if (is_old_name (use))
+ replace_use (use_p, use);
+ }
+
+ if (need_to_update_vops_p)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
+ SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
{
- size_t j;
- varray_type may_aliases = ann->may_aliases;
+ tree use = USE_FROM_PTR (use_p);
+ tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
- bitmap_set_bit (vars_to_rename, ann->uid);
- if (ann->may_aliases)
- for (j = 0; j < VARRAY_ACTIVE_SIZE (may_aliases); j++)
+ if (symbol_marked_for_renaming (sym))
+ replace_use (use_p, sym);
+ }
+ }
+
+ /* Register definitions of names in NEW_SSA_NAMES and OLD_SSA_NAMES.
+ Also register definitions for names whose underlying symbol is
+ marked for renaming. */
+ if (REGISTER_DEFS_IN_THIS_STMT (stmt))
+ {
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
+ {
+ tree def = DEF_FROM_PTR (def_p);
+ tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
+
+ /* If DEF is a naked symbol that needs renaming, create a
+ new name for it. */
+ if (symbol_marked_for_renaming (sym))
+ {
+ if (DECL_P (def))
{
- tree var = VARRAY_TREE (may_aliases, j);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ def = make_ssa_name (def, stmt);
+ SET_DEF (def_p, def);
}
+
+ register_new_update_single (def, sym);
+ }
+ else
+ {
+ /* If DEF is a new name, register it as a new definition
+ for all the names replaced by DEF. */
+ if (is_new_name (def))
+ register_new_update_set (def, names_replaced_by (def));
+
+ /* If DEF is an old name, register DEF as a new
+ definition for itself. */
+ if (is_old_name (def))
+ register_new_update_single (def, def);
+ }
+ }
+
+ if (need_to_update_vops_p)
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_VIRTUAL_DEFS)
+ {
+ tree def = DEF_FROM_PTR (def_p);
+ tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
+
+ if (symbol_marked_for_renaming (sym))
+ {
+ if (DECL_P (def))
+ {
+ def = make_ssa_name (def, stmt);
+ SET_DEF (def_p, def);
+ }
+
+ register_new_update_single (def, sym);
+ }
}
- }
+ }
+}
+
+
+/* Visit all the successor blocks of BB looking for PHI nodes. For
+ every PHI node found, check if any of its arguments is in
+ OLD_SSA_NAMES. If so, and if the argument has a current reaching
+ definition, replace it. */
+
+static void
+rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
+ basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ tree phi;
+
+ for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+ {
+ tree arg;
+ use_operand_p arg_p;
+
+ /* Skip PHI nodes that are not marked for rewrite. */
+ if (!REWRITE_THIS_STMT (phi))
+ continue;
+
+ arg_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
+ arg = USE_FROM_PTR (arg_p);
+
+ if (arg && !DECL_P (arg) && TREE_CODE (arg) != SSA_NAME)
+ continue;
+
+ if (arg == NULL_TREE)
+ {
+ /* When updating a PHI node for a recently introduced
+ symbol we may find NULL arguments. That's why we
+ take the symbol from the LHS of the PHI node. */
+ replace_use (arg_p, SSA_NAME_VAR (PHI_RESULT (phi)));
+ }
+ else
+ {
+ tree sym = DECL_P (arg) ? arg : SSA_NAME_VAR (arg);
+
+ if (symbol_marked_for_renaming (sym))
+ replace_use (arg_p, sym);
+ else if (is_old_name (arg))
+ replace_use (arg_p, arg);
+ }
+
+ if (e->flags & EDGE_ABNORMAL)
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (arg_p)) = 1;
+ }
+ }
}
/* Rewrite the actual blocks, statements, and PHI arguments, to be in SSA
- form. FIX_VIRTUAL_PHIS is true if we should only be fixing up virtual
- PHI arguments, instead of adding new PHI arguments for just added PHI
- nodes. */
+ form.
+
+ ENTRY indicates the block where to start. Every block dominated by
+ ENTRY will be rewritten.
+
+ WHAT indicates what actions will be taken by the renamer (see enum
+ rewrite_mode).
+
+ BLOCKS are the set of interesting blocks for the dominator walker
+ to process. If this set is NULL, then all the nodes dominated
+ by ENTRY are walked. Otherwise, blocks dominated by ENTRY that
+ are not present in BLOCKS are ignored. */
static void
-rewrite_blocks (bool fix_virtual_phis)
+rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
{
struct dom_walk_data walk_data;
@@ -1150,22 +1572,36 @@ rewrite_blocks (bool fix_virtual_phis)
timevar_push (TV_TREE_SSA_REWRITE_BLOCKS);
/* Setup callbacks for the generic dominator tree walker. */
- walk_data.walk_stmts_backward = false;
+ memset (&walk_data, 0, sizeof (walk_data));
+
walk_data.dom_direction = CDI_DOMINATORS;
- walk_data.initialize_block_local_data = NULL;
- walk_data.before_dom_children_before_stmts = rewrite_initialize_block;
- walk_data.before_dom_children_walk_stmts = rewrite_stmt;
- walk_data.before_dom_children_after_stmts = NULL;
- if (!fix_virtual_phis)
+ walk_data.interesting_blocks = blocks;
+
+ if (what == REWRITE_UPDATE)
+ walk_data.before_dom_children_before_stmts = rewrite_update_init_block;
+ else
+ walk_data.before_dom_children_before_stmts = rewrite_initialize_block;
+
+ if (what == REWRITE_ALL)
+ walk_data.before_dom_children_walk_stmts = rewrite_stmt;
+ else if (what == REWRITE_UPDATE)
+ walk_data.before_dom_children_walk_stmts = rewrite_update_stmt;
+ else
+ gcc_unreachable ();
+
+ if (what == REWRITE_ALL)
walk_data.before_dom_children_after_stmts = rewrite_add_phi_arguments;
+ else if (what == REWRITE_UPDATE)
+ walk_data.before_dom_children_after_stmts = rewrite_update_phi_arguments;
else
- walk_data.before_dom_children_after_stmts = rewrite_virtual_phi_arguments;
+ gcc_unreachable ();
- walk_data.after_dom_children_before_stmts = NULL;
- walk_data.after_dom_children_walk_stmts = NULL;
- walk_data.after_dom_children_after_stmts = rewrite_finalize_block;
- walk_data.global_data = NULL;
- walk_data.block_local_data_size = 0;
+ if (what == REWRITE_ALL)
+ walk_data.after_dom_children_after_stmts = rewrite_finalize_block;
+ else if (what == REWRITE_UPDATE)
+ walk_data.after_dom_children_after_stmts = rewrite_update_fini_block;
+ else
+ gcc_unreachable ();
block_defs_stack = VEC_alloc (tree_on_heap, 10);
@@ -1174,7 +1610,7 @@ rewrite_blocks (bool fix_virtual_phis)
/* Recursively walk the dominator tree rewriting each statement in
each basic block. */
- walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
+ walk_dominator_tree (&walk_data, entry);
/* Finalize the dominator walker. */
fini_walk_dominator_tree (&walk_data);
@@ -1183,11 +1619,15 @@ rewrite_blocks (bool fix_virtual_phis)
if (dump_file && (dump_flags & TDF_STATS))
{
dump_dfa_stats (dump_file);
- dump_tree_ssa_stats (dump_file);
+ if (def_blocks)
+ dump_tree_ssa_stats (dump_file);
}
- htab_delete (def_blocks);
- def_blocks = NULL;
+ if (def_blocks)
+ {
+ htab_delete (def_blocks);
+ def_blocks = NULL;
+ }
VEC_free (tree_on_heap, block_defs_stack);
block_defs_stack = NULL;
@@ -1209,11 +1649,15 @@ mark_def_sites_initialize_block (struct dom_walk_data *walk_data,
}
-/* Mark the definition site blocks for each variable, so that we know where
- the variable is actually live. */
+/* Mark the definition site blocks for each variable, so that we know
+ where the variable is actually live.
-static void
-mark_def_site_blocks (void)
+ INTERESTING_BLOCKS will be filled in with all the blocks that
+ should be processed by the renamer. It is assumed to be
+ initialized and zeroed by the caller. */
+
+static void
+mark_def_site_blocks (sbitmap interesting_blocks)
{
size_t i;
struct dom_walk_data walk_data;
@@ -1226,9 +1670,6 @@ mark_def_site_blocks (void)
for (i = 0; i < num_referenced_vars; i++)
set_current_def (referenced_var (i), NULL_TREE);
- /* Ensure that the dominance information is OK. */
- calculate_dominance_info (CDI_DOMINATORS);
-
/* Setup callbacks for the generic dominator tree walker to find and
mark definition sites. */
walk_data.walk_stmts_backward = false;
@@ -1240,11 +1681,16 @@ mark_def_site_blocks (void)
walk_data.after_dom_children_before_stmts = NULL;
walk_data.after_dom_children_walk_stmts = NULL;
walk_data.after_dom_children_after_stmts = NULL;
+ walk_data.interesting_blocks = NULL;
/* Notice that this bitmap is indexed using variable UIDs, so it must be
large enough to accommodate all the variables referenced in the
function, not just the ones we are renaming. */
mark_def_sites_global_data.kills = BITMAP_ALLOC (NULL);
+
+ /* Create the set of interesting blocks that will be filled by
+ mark_def_sites. */
+ mark_def_sites_global_data.interesting_blocks = interesting_blocks;
walk_data.global_data = &mark_def_sites_global_data;
/* We do not have any local data. */
@@ -1265,96 +1711,73 @@ mark_def_site_blocks (void)
/* Main entry point into the SSA builder. The renaming process
- proceeds in five main phases:
+ proceeds in four main phases:
- 1- If VARS_TO_RENAME has any entries, any existing PHI nodes for
- those variables are removed from the flow graph so that they can
- be computed again.
+ 1- Compute dominance frontier and immediate dominators, needed to
+ insert PHI nodes and rename the function in dominator tree
+ order.
- 2- Compute dominance frontier, needed to insert PHI nodes and
- rename the function in dominator tree order.
-
- 3- Find and mark all the blocks that define variables
+ 2- Find and mark all the blocks that define variables
(mark_def_site_blocks).
- 4- Insert PHI nodes at dominance frontiers (insert_phi_nodes).
+ 3- Insert PHI nodes at dominance frontiers (insert_phi_nodes).
- 5- Rename all the blocks (rewrite_blocks) and statements in the program.
+ 4- Rename all the blocks (rewrite_blocks) and statements in the program.
Steps 3 and 5 are done using the dominator tree walker
- (walk_dominator_tree).
-
- ALL is true if all variables should be renamed (otherwise just those
- mentioned in vars_to_rename are taken into account). */
+ (walk_dominator_tree). */
-void
-rewrite_into_ssa (bool all)
+static void
+rewrite_into_ssa (void)
{
bitmap *dfs;
basic_block bb;
- bitmap old_vars_to_rename = vars_to_rename;
+ sbitmap interesting_blocks;
timevar_push (TV_TREE_SSA_OTHER);
- if (all)
- vars_to_rename = NULL;
- else
- {
- /* Initialize the array of variables to rename. */
- gcc_assert (vars_to_rename);
-
- if (bitmap_empty_p (vars_to_rename))
- {
- timevar_pop (TV_TREE_SSA_OTHER);
- return;
- }
-
- invalidate_name_tags (vars_to_rename);
-
- /* Now remove all the existing PHI nodes (if any) for the variables
- that we are about to rename into SSA. */
- remove_all_phi_nodes_for (vars_to_rename);
- }
+ /* Initialize operand data structures. */
+ init_ssa_operands ();
- mark_def_site_blocks ();
+ /* Initialize the set of interesting blocks. The callback
+ mark_def_sites will add to this set those blocks that the renamer
+ should process. */
+ interesting_blocks = sbitmap_alloc (last_basic_block);
+ sbitmap_zero (interesting_blocks);
/* Initialize dominance frontier. */
dfs = (bitmap *) xmalloc (last_basic_block * sizeof (bitmap *));
FOR_EACH_BB (bb)
dfs[bb->index] = BITMAP_ALLOC (NULL);
- /* Compute dominance frontiers. */
+ /* 1- Compute dominance frontiers. */
+ calculate_dominance_info (CDI_DOMINATORS);
compute_dominance_frontiers (dfs);
- /* Insert PHI nodes at dominance frontiers of definition blocks. */
+ /* 2- Find and mark definition sites. */
+ mark_def_site_blocks (interesting_blocks);
+
+ /* 3- Insert PHI nodes at dominance frontiers of definition blocks. */
insert_phi_nodes (dfs, NULL);
- rewrite_blocks (false);
+ /* 4- Rename all the blocks. */
+ rewrite_blocks (ENTRY_BLOCK_PTR, REWRITE_ALL, interesting_blocks);
/* Free allocated memory. */
FOR_EACH_BB (bb)
BITMAP_FREE (dfs[bb->index]);
free (dfs);
+ sbitmap_free (interesting_blocks);
- vars_to_rename = old_vars_to_rename;
timevar_pop (TV_TREE_SSA_OTHER);
}
-/* Rewrites all variables into SSA. */
-
-static void
-rewrite_all_into_ssa (void)
-{
- init_ssa_operands ();
- rewrite_into_ssa (true);
-}
-
struct tree_opt_pass pass_build_ssa =
{
"ssa", /* name */
NULL, /* gate */
- rewrite_all_into_ssa, /* execute */
+ rewrite_into_ssa, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
@@ -1368,19 +1791,1054 @@ struct tree_opt_pass pass_build_ssa =
};
-/* Rewrite the def-def chains of virtual operands so that they have
- the correct reaching definitions. */
+/* Mark the definition of VAR at STMT and BB as interesting for the
+ renamer. BLOCKS is the set of blocks that need updating. */
+
+static void
+mark_def_interesting (tree var, tree stmt, basic_block bb, bitmap blocks,
+ bool insert_phi_p)
+{
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
+ bitmap_set_bit (blocks, bb->index);
+
+ if (insert_phi_p)
+ {
+ bool is_phi_p = TREE_CODE (stmt) == PHI_NODE;
+
+#if defined ENABLE_CHECKING
+ /* If VAR is a virtual, then it had better be a symbol.
+ Virtuals are in FUD-chain form, so we are interested in the
+ definition and use sites of the symbol, not the individual
+ SSA names. */
+ if (!is_gimple_reg (var))
+ gcc_assert (DECL_P (var));
+#endif
+
+ set_def_block (var, bb, is_phi_p);
+
+ /* If VAR is an SSA name in NEW_SSA_NAMES, this is a definition
+ site for both itself and all the old names replaced by it. */
+ if (TREE_CODE (var) == SSA_NAME && is_new_name (var))
+ {
+ bitmap_iterator bi;
+ unsigned i;
+ bitmap set = names_replaced_by (var);
+ if (set)
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ set_def_block (ssa_name (i), bb, is_phi_p);
+ }
+ }
+}
+
+
+/* Mark the use of VAR at STMT and BB as interesting for the
+ renamer. INSERT_PHI_P is true if we are going to insert new PHI
+ nodes. BLOCKS is the set of blocks that need updating. */
+
+static inline void
+mark_use_interesting (tree var, tree stmt, basic_block bb, bitmap blocks,
+ bool insert_phi_p)
+{
+ REWRITE_THIS_STMT (stmt) = 1;
+ bitmap_set_bit (blocks, bb->index);
+
+ /* If VAR has not been defined in BB, then it is live-on-entry
+ to BB. Note that we cannot just use the block holding VAR's
+ definition because if VAR is one of the names in OLD_SSA_NAMES,
+ it will have several definitions (itself and all the names that
+ replace it). */
+ if (insert_phi_p)
+ {
+ struct def_blocks_d *db_p;
+
+#if defined ENABLE_CHECKING
+ /* If VAR is a virtual, then it had better be a symbol.
+ Virtuals are in FUD-chain form, so we are interested in the
+ definition and use sites of the symbol, not the individual
+ SSA names. */
+ if (!is_gimple_reg (var))
+ gcc_assert (DECL_P (var));
+#endif
+
+ db_p = get_def_blocks_for (var);
+ if (!bitmap_bit_p (db_p->def_blocks, bb->index))
+ set_livein_block (var, bb);
+ }
+}
+
+
+/* If any of the arguments of PHI is in OLD_SSA_NAMES, mark PHI to
+ be rewritten. BB is the block where PHI resides, BLOCKS is the
+ region to be renamed and INSERT_PHI_P is true if the updating
+ process should insert new PHI nodes. */
+
+static void
+prepare_phi_args_for_update (tree phi, basic_block bb, bitmap blocks,
+ bool insert_phi_p)
+{
+ int i;
+
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ {
+ tree arg = PHI_ARG_DEF (phi, i);
+
+ if (TREE_CODE (arg) == SSA_NAME && is_old_name (arg))
+ {
+ /* Mark this use of ARG interesting for the renamer. Notice
+ that we explicitly call mark_use_interesting with
+ INSERT_PHI_P == false.
+
+ This is to avoid marking ARG as live-in in this block BB.
+ If we were to mark ARG live-in to BB, then ARG would be
+ considered live-in through ALL incoming edges to BB which
+ is not what we want. Since we are updating the SSA form
+ for ARG, we don't really know what other names of ARG are
+ coming in through other edges into BB.
+
+ If we considered ARG live-in at BB, then the PHI
+ placement algorithm may try to insert PHI nodes in blocks
+ that are not only unnecessary but also the renamer would
+ not know how to fill in. */
+ mark_use_interesting (arg, phi, bb, blocks, false);
+
+ /* As discussed above, we only want to mark ARG live-in
+ through the edge corresponding to its slot inside the PHI
+ argument list. So, we look for the block BB1 where ARG is
+ flowing through. If BB1 does not contain a definition of
+ ARG, then consider ARG live-in at BB1. */
+ if (insert_phi_p)
+ {
+ edge e = PHI_ARG_EDGE (phi, i);
+ basic_block bb1 = e->src;
+ struct def_blocks_d *db = get_def_blocks_for (arg);
+
+ if (!bitmap_bit_p (db->def_blocks, bb1->index))
+ set_livein_block (arg, bb1);
+ }
+ }
+ }
+}
+
+
+/* Do a dominator walk starting at BB processing statements that
+ reference variables in OLD_SSA_NAMES and NEW_SSA_NAMES.
+
+ 1- Mark in BLOCKS the defining block of every name N in
+ NEW_SSA_NAMES.
+
+ 2- Mark in BLOCKS the defining block of every name O in
+ OLD_SSA_NAMES.
+
+ 3- For every statement or PHI node that uses a name O in
+ OLD_SSA_NAMES. If INSERT_PHI_P is true, mark those uses as live
+ in the corresponding block. This is later used by the PHI
+ placement algorithm to make PHI pruning decisions.
+
+ If VISIT_DOM_P is true, all the dominator children of BB are also
+ visited.
+
+ FIXME. This process is slower than necessary. Once we have
+ immediate uses merged in, we should be able to just visit the
+ immediate uses of all the names that we are about to replace,
+ instead of visiting the whole block. */
+
+static void
+prepare_block_for_update (basic_block bb, bool insert_phi_p,
+ bitmap blocks, bool visit_dom_p)
+{
+ basic_block son;
+ block_stmt_iterator si;
+ tree phi;
+
+ /* Process PHI nodes marking interesting those that define or use
+ the names that we are interested in. */
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ tree lhs_sym, lhs = PHI_RESULT (phi);
+
+ REWRITE_THIS_STMT (phi) = 0;
+ REGISTER_DEFS_IN_THIS_STMT (phi) = 0;
+
+ /* Ignore virtual PHIs if we are not updating virtual operands.
+ Note that even if NEED_TO_REPLACE_NAMES_P is false, we need
+ to process real PHIs because we may be rewriting GIMPLE regs
+ into SSA for the first time. Therefore, we cannot do a
+ similar shortcut for real PHIs. */
+ if (!need_to_update_vops_p && !is_gimple_reg (lhs))
+ continue;
+
+ lhs_sym = DECL_P (lhs) ? lhs : SSA_NAME_VAR (lhs);
+
+ if (symbol_marked_for_renaming (lhs_sym))
+ {
+ /* If the LHS is a virtual symbol marked for renaming, then
+ we don't need to scan the argument list. Since virtual
+ operands are in FUD-chain form, all the arguments of this
+ PHI must be the same symbol as the LHS. So, we just need
+ to mark this site as both an interesting use and an
+ interesting def for the symbol. */
+ mark_use_interesting (lhs_sym, phi, bb, blocks, insert_phi_p);
+ mark_def_interesting (lhs_sym, phi, bb, blocks, insert_phi_p);
+ }
+ else if (need_to_replace_names_p)
+ {
+ /* If the LHS is in OLD_SSA_NAMES or NEW_SSA_NAMES, this is
+ a definition site for it. */
+ if (is_old_name (lhs) || is_new_name (lhs))
+ mark_def_interesting (lhs, phi, bb, blocks, insert_phi_p);
+
+ prepare_phi_args_for_update (phi, bb, blocks, insert_phi_p);
+ }
+ }
+
+ /* Process the statements. */
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt;
+ ssa_op_iter i;
+ use_operand_p use_p;
+ def_operand_p def_p;
+
+ stmt = bsi_stmt (si);
+
+ REWRITE_THIS_STMT (stmt) = 0;
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
+
+ /* Note, even if NEED_TO_REPLACE_NAMES_P is false, we need to
+ scan real uses and defs, as we may be renaming a GIMPLE
+ register for the first time. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_USE)
+ {
+ tree use = USE_FROM_PTR (use_p);
+ tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
+ if (symbol_marked_for_renaming (sym) || is_old_name (use))
+ mark_use_interesting (use, stmt, bb, blocks, insert_phi_p);
+ }
+
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_DEF)
+ {
+ tree def = DEF_FROM_PTR (def_p);
+ tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
+
+ if (symbol_marked_for_renaming (sym)
+ || is_new_name (def)
+ || is_old_name (def))
+ mark_def_interesting (def, stmt, bb, blocks, insert_phi_p);
+ }
+
+ /* If we don't need to update virtual operands, continue to the
+ next statement. */
+ if (!need_to_update_vops_p)
+ continue;
+
+ /* For every interesting N_i = V_MAY_DEF <N_j> and
+ N_i = V_MUST_DEF <N_j>, mark the statement as interesting.
+ Notice that N_j may in fact be a naked symbol (if this
+ statement is the result of basic block duplication). The
+ rename process will later fill in the appropriate reaching
+ definition for the symbol. */
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ {
+ tree def = DEF_FROM_PTR (def_p);
+ tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
+
+ if (symbol_marked_for_renaming (sym))
+ {
+ mark_use_interesting (sym, stmt, bb, blocks, insert_phi_p);
+ mark_def_interesting (sym, stmt, bb, blocks, insert_phi_p);
+ }
+ }
+
+ /* Similarly, for V_USE <N_i>. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_VUSE)
+ {
+ tree use = USE_FROM_PTR (use_p);
+ tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
+
+ if (symbol_marked_for_renaming (sym))
+ mark_use_interesting (sym, stmt, bb, blocks, insert_phi_p);
+ }
+ }
+
+ /* Now visit all the blocks dominated by BB. */
+ if (visit_dom_p)
+ for (son = first_dom_son (CDI_DOMINATORS, bb);
+ son;
+ son = next_dom_son (CDI_DOMINATORS, son))
+ prepare_block_for_update (son, insert_phi_p, blocks, true);
+}
+
+
+/* Helper for prepare_def_sites. Mark the definition site for NAME as
+ interesting. BLOCKS and INSERT_PHI_P are as in prepare_def_sites. */
+
+static void
+prepare_def_site_for (tree name, bitmap blocks, bool insert_phi_p)
+{
+ tree stmt;
+ basic_block bb;
+
+ gcc_assert (name && is_gimple_reg (name));
+ gcc_assert (names_to_release == NULL
+ || !bitmap_bit_p (names_to_release, SSA_NAME_VERSION (name)));
+
+ stmt = SSA_NAME_DEF_STMT (name);
+ bb = bb_for_stmt (stmt);
+ if (bb)
+ {
+ gcc_assert (bb->index < last_basic_block);
+ mark_def_interesting (name, stmt, bb, blocks, insert_phi_p);
+ }
+}
+
+
+/* Mark definition sites of names in NEW_SSA_NAMES and OLD_SSA_NAMES.
+ Add each definition block to BLOCKS. INSERT_PHI_P is true if the
+ caller wants to insert PHI nodes for newly created names. */
+
+static void
+prepare_def_sites (bitmap blocks, bool insert_phi_p)
+{
+ unsigned i;
+ bitmap_iterator bi;
+
+ /* If a name N from NEW_SSA_NAMES is also marked to be released,
+ remove it from NEW_SSA_NAMES so that we don't try to visit its
+ defining basic block (which most likely doesn't exist). Notice
+ that we cannot do the same with names in OLD_SSA_NAMES because we
+ want to replace existing instances. */
+ if (names_to_release)
+ EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
+ RESET_BIT (new_ssa_names, i);
+
+ /* If an old name is in NAMES_TO_RELEASE, we cannot remove it from
+ OLD_SSA_NAMES, but we have to ignore its definition site. */
+ EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i,
+ if (names_to_release == NULL || !bitmap_bit_p (names_to_release, i))
+ prepare_def_site_for (ssa_name (i), blocks, insert_phi_p));
+
+ EXECUTE_IF_SET_IN_SBITMAP (new_ssa_names, 0, i,
+ prepare_def_site_for (ssa_name (i), blocks, insert_phi_p));
+}
+
+
+/* Dump all the names replaced by NAME to FILE. */
void
-rewrite_def_def_chains (void)
+dump_names_replaced_by (FILE *file, tree name)
{
- /* Ensure that the dominance information is OK. */
- calculate_dominance_info (CDI_DOMINATORS);
- mark_def_site_blocks ();
- rewrite_blocks (true);
+ unsigned i;
+ bitmap old_set;
+ bitmap_iterator bi;
+
+ print_generic_expr (file, name, 0);
+ fprintf (file, " -> { ");
+
+ old_set = names_replaced_by (name);
+ EXECUTE_IF_SET_IN_BITMAP (old_set, 0, i, bi)
+ {
+ print_generic_expr (file, ssa_name (i), 0);
+ fprintf (file, " ");
+ }
+
+ fprintf (file, "}\n");
+}
+
+
+/* Dump all the names replaced by NAME to stderr. */
+
+void
+debug_names_replaced_by (tree name)
+{
+ dump_names_replaced_by (stderr, name);
+}
+
+
+/* Dump the SSA name replacement table to FILE. */
+
+void
+dump_repl_tbl (FILE *file)
+{
+ unsigned i;
+ bitmap_iterator bi;
+
+ if (!need_ssa_update_p ())
+ return;
+
+ if (new_ssa_names && sbitmap_first_set_bit (new_ssa_names) >= 0)
+ {
+ fprintf (file, "\nSSA replacement table\n");
+ fprintf (file, "N_i -> { O_1 ... O_j } means that N_i replaces "
+ "O_1, ..., O_j\n\n");
+
+ EXECUTE_IF_SET_IN_SBITMAP (new_ssa_names, 0, i,
+ dump_names_replaced_by (file, ssa_name (i)));
+ }
+
+ if (syms_to_rename && !bitmap_empty_p (syms_to_rename))
+ {
+ fprintf (file, "\n\nSymbols to be put in SSA form\n\n");
+ EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
+ {
+ print_generic_expr (file, referenced_var (i), 0);
+ fprintf (file, " ");
+ }
+ }
+
+ if (old_virtual_ssa_names && !bitmap_empty_p (old_virtual_ssa_names))
+ {
+ fprintf (file, "\n\nVirtual SSA names to be updated\n\n");
+ EXECUTE_IF_SET_IN_BITMAP (old_virtual_ssa_names, 0, i, bi)
+ {
+ print_generic_expr (file, ssa_name (i), 0);
+ fprintf (file, " ");
+ }
+ }
+
+ if (names_to_release && !bitmap_empty_p (names_to_release))
+ {
+ fprintf (file, "\n\nSSA names to release after updating the SSA web\n\n");
+ EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
+ {
+ print_generic_expr (file, ssa_name (i), 0);
+ fprintf (file, " ");
+ }
+ }
+
+ fprintf (file, "\n\n");
+}
+
+
+/* Dump the SSA name replacement table to stderr. */
+
+void
+debug_repl_tbl (void)
+{
+ dump_repl_tbl (stderr);
+}
+
+
+/* Initialize data structures used for incremental SSA updates. */
+
+static void
+init_update_ssa (void)
+{
+ /* Reserve 1/3 more than the current number of names. The calls to
+ add_new_name_mapping are typically done after creating new SSA
+ names, so we'll need to reallocate these arrays. */
+ old_ssa_names = sbitmap_alloc (num_ssa_names + NAME_SETS_GROWTH_FACTOR);
+ sbitmap_zero (old_ssa_names);
+
+ new_ssa_names = sbitmap_alloc (num_ssa_names + NAME_SETS_GROWTH_FACTOR);
+ sbitmap_zero (new_ssa_names);
+
+ repl_tbl = htab_create (20, repl_map_hash, repl_map_eq, repl_map_free);
+ need_to_initialize_update_ssa_p = false;
+ need_to_update_vops_p = false;
+ need_to_replace_names_p = false;
+ syms_to_rename = BITMAP_ALLOC (NULL);
+ old_virtual_ssa_names = BITMAP_ALLOC (NULL);
+ names_to_release = NULL;
}
+/* Deallocate data structures used for incremental SSA updates. */
+
+static void
+delete_update_ssa (void)
+{
+ unsigned i;
+ bitmap_iterator bi;
+
+ sbitmap_free (old_ssa_names);
+ old_ssa_names = NULL;
+
+ sbitmap_free (new_ssa_names);
+ new_ssa_names = NULL;
+
+ htab_delete (repl_tbl);
+ repl_tbl = NULL;
+
+ need_to_initialize_update_ssa_p = true;
+ need_to_update_vops_p = false;
+ need_to_replace_names_p = false;
+ BITMAP_FREE (syms_to_rename);
+ BITMAP_FREE (old_virtual_ssa_names);
+
+ if (names_to_release)
+ {
+ EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
+ release_ssa_name (ssa_name (i));
+ BITMAP_FREE (names_to_release);
+ }
+
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree n = ssa_name (i);
+
+ if (n)
+ {
+ free (SSA_NAME_AUX (n));
+ SSA_NAME_AUX (n) = NULL;
+ }
+ }
+
+ /* Unmark all the names we may have protected from being released in
+ insert_updated_phi_nodes_for. */
+ unmark_all_for_rewrite ();
+}
+
+
+/* Create a new name for OLD_NAME in statement STMT and replace the
+ operand pointed to by DEF_P with the newly created name. Return
+ the new name and register the replacement mapping <NEW, OLD> in
+ update_ssa's tables. */
+
+tree
+create_new_def_for (tree old_name, tree stmt, def_operand_p def)
+{
+ tree new_name = duplicate_ssa_name (old_name, stmt);
+
+ SET_DEF (def, new_name);
+
+ if (TREE_CODE (stmt) == PHI_NODE)
+ {
+ edge e;
+ edge_iterator ei;
+ basic_block bb = bb_for_stmt (stmt);
+
+ /* If needed, mark NEW_NAME as occurring in an abnormal PHI node. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (e->flags & EDGE_ABNORMAL)
+ {
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name) = 1;
+ break;
+ }
+ }
+
+ register_new_name_mapping (new_name, old_name);
+
+ /* For the benefit of passes that will be updating the SSA form on
+ their own, set the current reaching definition of OLD_NAME to be
+ NEW_NAME. */
+ set_current_def (old_name, new_name);
+
+ return new_name;
+}
+
+
+/* Register name NEW to be a replacement for name OLD. This function
+ must be called for every replacement that should be performed by
+ update_ssa. */
+
+void
+register_new_name_mapping (tree new, tree old)
+{
+ if (need_to_initialize_update_ssa_p)
+ init_update_ssa ();
+
+ add_new_name_mapping (new, old);
+}
+
+
+/* Register symbol SYM to be renamed by update_ssa. */
+
+void
+mark_sym_for_renaming (tree sym)
+{
+ if (need_to_initialize_update_ssa_p)
+ init_update_ssa ();
+
+ bitmap_set_bit (syms_to_rename, var_ann (sym)->uid);
+
+ if (!is_gimple_reg (sym))
+ need_to_update_vops_p = true;
+}
+
+
+/* Register all the symbols in SET to be renamed by update_ssa. */
+
+void
+mark_set_for_renaming (bitmap set)
+{
+ bitmap_iterator bi;
+ unsigned i;
+
+ if (need_to_initialize_update_ssa_p)
+ init_update_ssa ();
+
+ bitmap_ior_into (syms_to_rename, set);
+
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ if (!is_gimple_reg (referenced_var (i)))
+ {
+ need_to_update_vops_p = true;
+ break;
+ }
+}
+
+
+/* Return true if there is any work to be done by update_ssa. */
+
+bool
+need_ssa_update_p (void)
+{
+ return syms_to_rename || old_ssa_names || new_ssa_names;
+}
+
+
+/* Return true if name N has been registered in the replacement table. */
+
+bool
+name_registered_for_update_p (tree n)
+{
+ if (!need_ssa_update_p ())
+ return false;
+
+ return is_new_name (n)
+ || is_old_name (n)
+ || symbol_marked_for_renaming (SSA_NAME_VAR (n));
+}
+
+
+/* Return the set of all the SSA names marked to be replaced. */
+
+bitmap
+ssa_names_to_replace (void)
+{
+ unsigned i;
+ bitmap ret;
+
+ ret = BITMAP_ALLOC (NULL);
+ EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i,
+ bitmap_set_bit (ret, i));
+
+ bitmap_ior_into (ret, old_virtual_ssa_names);
+
+ return ret;
+}
+
+
+/* Mark NAME to be released after update_ssa has finished. */
+
+void
+release_ssa_name_after_update_ssa (tree name)
+{
+ gcc_assert (!need_to_initialize_update_ssa_p);
+
+ if (names_to_release == NULL)
+ names_to_release = BITMAP_ALLOC (NULL);
+
+ bitmap_set_bit (names_to_release, SSA_NAME_VERSION (name));
+}
+
+
+/* Insert new PHI nodes to replace VAR. DFS contains dominance
+ frontier information. BLOCKS is the set of blocks to be updated.
+
+ This is slightly different than the regular PHI insertion
+ algorithm. The value of UPDATE_FLAGS controls how PHI nodes for
+ real names (i.e., GIMPLE registers) are inserted:
+
+ - If UPDATE_FLAGS == TODO_update_ssa, we are only interested in PHI
+ nodes inside the region affected by the block that defines VAR
+ and the blocks that define all its replacements. All these
+ definition blocks have been gathered by prepare_block_for_update
+ and they are stored in DEF_BLOCKS[VAR]->DEF_BLOCKS.
+
+ First, we compute the entry point to the region (ENTRY). This is
+ given by the nearest common dominator to all the definition
+ blocks. When computing the iterated dominance frontier (IDF), any
+ block not strictly dominated by ENTRY is ignored.
+
+ We then call the standard PHI insertion algorithm with the pruned
+ IDF.
+
+ - If UPDATE_FLAGS == TODO_update_ssa_full_phi, the IDF for real
+ names is not pruned. PHI nodes are inserted at every IDF block. */
+
+static void
+insert_updated_phi_nodes_for (tree var, bitmap *dfs, bitmap blocks,
+ unsigned update_flags)
+{
+ basic_block entry;
+ struct def_blocks_d *db;
+ bitmap idf, pruned_idf;
+ bitmap_iterator bi;
+ unsigned i;
+
+#if defined ENABLE_CHECKING
+ if (TREE_CODE (var) == SSA_NAME)
+ gcc_assert (is_old_name (var));
+ else
+ gcc_assert (symbol_marked_for_renaming (var));
+#endif
+
+ /* Get all the definition sites for VAR. */
+ db = find_def_blocks_for (var);
+
+ /* No need to do anything if there were no definitions to VAR. */
+ if (db == NULL || bitmap_empty_p (db->def_blocks))
+ return;
+
+ /* Compute the initial iterated dominance frontier. */
+ idf = find_idf (db->def_blocks, dfs);
+ pruned_idf = BITMAP_ALLOC (NULL);
+
+ if (TREE_CODE (var) == SSA_NAME)
+ {
+ if (update_flags == TODO_update_ssa)
+ {
+ /* If doing regular SSA updates for GIMPLE registers, we are
+ only interested in IDF blocks dominated by the nearest
+ common dominator of all the definition blocks. */
+ entry = nearest_common_dominator_for_set (CDI_DOMINATORS,
+ db->def_blocks);
+
+ if (entry != ENTRY_BLOCK_PTR)
+ EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
+ if (BASIC_BLOCK (i) != entry
+ && dominated_by_p (CDI_DOMINATORS, BASIC_BLOCK (i), entry))
+ bitmap_set_bit (pruned_idf, i);
+ }
+ else
+ {
+ /* Otherwise, do not prune the IDF for VAR. */
+ gcc_assert (update_flags == TODO_update_ssa_full_phi);
+ bitmap_copy (pruned_idf, idf);
+ }
+ }
+ else
+ {
+ /* Otherwise, VAR is a symbol that needs to be put into SSA form
+ for the first time, so we need to compute the full IDF for
+ it. */
+ bitmap_copy (pruned_idf, idf);
+
+ /* There may already be PHI nodes for VAR in the flowgraph.
+ Some of them are no longer necessary. PRUNED_IDF is
+ the set of blocks that need PHI nodes for VAR and
+ DB.PHI_BLOCKS is the set of blocks that already contain a PHI
+ node for VAR. Therefore, the set DB.PHI_BLOCKS - PRUNED_IDF
+ gives us the set of blocks that contain PHI nodes which are
+ no longer needed. */
+ if (!bitmap_empty_p (db->phi_blocks) && !bitmap_empty_p (pruned_idf))
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (db->phi_blocks, pruned_idf, 0, i, bi)
+ {
+ tree phi, prev;
+ unsigned ver;
+
+ phi = find_phi_node_for (BASIC_BLOCK (i), var, &prev);
+
+ /* Protect the name on PHI's LHS from being released into
+ the SSA name free list. Since we have still not
+ updated the SSA form of the program, there may be
+ instances of PHI's LHS in the IL. */
+ ver = SSA_NAME_VERSION (PHI_RESULT (phi));
+ mark_for_rewrite (PHI_RESULT (phi));
+ release_ssa_name_after_update_ssa (PHI_RESULT (phi));
+ remove_phi_node (phi, prev);
+ }
+ }
+
+ if (!bitmap_empty_p (pruned_idf))
+ {
+ /* Make sure that PRUNED_IDF blocks and all their feeding blocks
+ are included in the region to be updated. The feeding blocks
+ are important to guarantee that the PHI arguments are renamed
+ properly. */
+ bitmap_ior_into (blocks, pruned_idf);
+ EXECUTE_IF_SET_IN_BITMAP (pruned_idf, 0, i, bi)
+ {
+ edge e;
+ edge_iterator ei;
+ basic_block bb = BASIC_BLOCK (i);
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (e->src->index >= 0)
+ bitmap_set_bit (blocks, e->src->index);
+ }
+
+ insert_phi_nodes_for (var, pruned_idf, true);
+ }
+
+ BITMAP_FREE (pruned_idf);
+ BITMAP_FREE (idf);
+}
+
+
+/* Given a set of newly created SSA names (NEW_SSA_NAMES) and a set of
+ existing SSA names (OLD_SSA_NAMES), update the SSA form so that:
+
+ 1- The names in OLD_SSA_NAMES dominated by the definitions of
+ NEW_SSA_NAMES are all re-written to be reached by the
+ appropriate definition from NEW_SSA_NAMES.
+
+ 2- If needed, new PHI nodes are added to the iterated dominance
+ frontier of the blocks where each of NEW_SSA_NAMES are defined.
+
+ The mapping between OLD_SSA_NAMES and NEW_SSA_NAMES is setup by
+ calling register_new_name_mapping for every pair of names that the
+ caller wants to replace.
+
+ The caller identifies the new names that have been inserted and the
+ names that need to be replaced by calling register_new_name_mapping
+ for every pair <NEW, OLD>. Note that the function assumes that the
+ new names have already been inserted in the IL.
+
+ For instance, given the following code:
+
+ 1 L0:
+ 2 x_1 = PHI (0, x_5)
+ 3 if (x_1 < 10)
+ 4 if (x_1 > 7)
+ 5 y_2 = 0
+ 6 else
+ 7 y_3 = x_1 + x_7
+ 8 endif
+ 9 x_5 = x_1 + 1
+ 10 goto L0;
+ 11 endif
+
+ Suppose that we insert new names x_10 and x_11 (lines 4 and 8).
+
+ 1 L0:
+ 2 x_1 = PHI (0, x_5)
+ 3 if (x_1 < 10)
+ 4 x_10 = ...
+ 5 if (x_1 > 7)
+ 6 y_2 = 0
+ 7 else
+ 8 x_11 = ...
+ 9 y_3 = x_1 + x_7
+ 10 endif
+ 11 x_5 = x_1 + 1
+ 12 goto L0;
+ 13 endif
+
+ We want to replace all the uses of x_1 with the new definitions of
+ x_10 and x_11. Note that the only uses that should be replaced are
+ those at lines 5, 9 and 11. Also, the use of x_7 at line 9 should
+ *not* be replaced (this is why we cannot just mark symbol 'x' for
+ renaming).
+
+ Additionally, we may need to insert a PHI node at line 11 because
+ that is a merge point for x_10 and x_11. So the use of x_1 at line
+ 11 will be replaced with the new PHI node. The insertion of PHI
+ nodes is optional. They are not strictly necessary to preserve the
+ SSA form, and depending on what the caller inserted, they may not
+ even be useful for the optimizers. UPDATE_FLAGS controls various
+ aspects of how update_ssa operates, see the documentation for
+ TODO_update_ssa*. */
+
+void
+update_ssa (unsigned update_flags)
+{
+ bitmap *dfs, blocks;
+ basic_block bb, start_bb;
+ bitmap_iterator bi;
+ unsigned i;
+ sbitmap tmp;
+ bool insert_phi_p;
+
+ if (!need_ssa_update_p ())
+ return;
+
+ timevar_push (TV_TREE_SSA_INCREMENTAL);
+
+ /* Ensure that the dominance information is up-to-date. */
+ calculate_dominance_info (CDI_DOMINATORS);
+
+ /* Only one update flag should be set. */
+ gcc_assert (update_flags == TODO_update_ssa
+ || update_flags == TODO_update_ssa_no_phi
+ || update_flags == TODO_update_ssa_full_phi
+ || update_flags == TODO_update_ssa_only_virtuals);
+
+ /* If we only need to update virtuals, remove all the mappings for
+ real names before proceeding. */
+ if (update_flags == TODO_update_ssa_only_virtuals)
+ {
+ sbitmap_zero (old_ssa_names);
+ sbitmap_zero (new_ssa_names);
+ htab_empty (repl_tbl);
+ need_to_replace_names_p = false;
+ }
+
+ if (update_flags == TODO_update_ssa
+ || update_flags == TODO_update_ssa_full_phi
+ || update_flags == TODO_update_ssa_only_virtuals)
+ insert_phi_p = true;
+ else
+ insert_phi_p = false;
+
+ if (insert_phi_p)
+ {
+ /* If the caller requested PHI nodes to be added, compute
+ dominance frontiers and initialize live-in information data
+ structures (DEF_BLOCKS). */
+ dfs = (bitmap *) xmalloc (last_basic_block * sizeof (bitmap *));
+ FOR_EACH_BB (bb)
+ dfs[bb->index] = BITMAP_ALLOC (NULL);
+ compute_dominance_frontiers (dfs);
+
+ /* For each SSA name N, the DEF_BLOCKS table describes where the
+ name is defined, which blocks have PHI nodes for N, and which
+ blocks have uses of N (i.e., N is live-on-entry in those
+ blocks). */
+ def_blocks = htab_create (num_ssa_names, def_blocks_hash,
+ def_blocks_eq, def_blocks_free);
+ }
+ else
+ {
+ dfs = NULL;
+ def_blocks = NULL;
+ }
+
+ blocks = BITMAP_ALLOC (NULL);
+
+ /* Determine the CFG region that we are going to update. First add
+ all the blocks that define each of the names in NEW_SSA_NAMES
+ and OLD_SSA_NAMES. */
+ prepare_def_sites (blocks, insert_phi_p);
+
+ /* Next, determine the nearest common dominator START_BB for all the
+ blocks in the region. */
+ if (!bitmap_empty_p (syms_to_rename) || bitmap_empty_p (blocks))
+ {
+ /* If the region to update is seemingly empty, or if we have to
+ rename some symbols from scratch, we need to start the
+ process at the root of the CFG.
+
+ FIXME, it should be possible to determine the nearest block
+ that had a definition for each of the symbols that are marked
+ for updating. For now this seems more work than it's worth. */
+ start_bb = ENTRY_BLOCK_PTR;
+ }
+ else
+ start_bb = nearest_common_dominator_for_set (CDI_DOMINATORS, blocks);
+
+ /* Traverse all the blocks dominated by START_BB. Mark interesting
+ blocks and statements and set local live-in information for the
+ PHI placement heuristics. */
+ prepare_block_for_update (start_bb, insert_phi_p, blocks, true);
+
+ /* If are going to insert PHI nodes, blocks in the dominance
+ frontier of START_BB may be affected. Note that we don't need to
+ visit the dominator children of blocks in the dominance frontier
+ of START_BB. None of the changes inside this region can affect
+ blocks on the outside. */
+ if (insert_phi_p && start_bb->index >= 0)
+ EXECUTE_IF_SET_IN_BITMAP (dfs[start_bb->index], 0, i, bi)
+ prepare_block_for_update (BASIC_BLOCK (i), insert_phi_p,
+ blocks, false);
+
+ /* If requested, insert PHI nodes at the iterated dominance frontier
+ of every block making new definitions for names in OLD_SSA_NAMES
+ and for symbols in SYMS_TO_RENAME. */
+ if (insert_phi_p)
+ {
+ if (sbitmap_first_set_bit (old_ssa_names) >= 0)
+ {
+ /* insert_update_phi_nodes_for will call
+ add_new_name_mapping when inserting new PHI nodes, so the
+ set OLD_SSA_NAMES will grow while we are traversing it
+ (but it will not gain any new members). Copy
+ OLD_SSA_NAMES to a temporary for traversal. */
+ sbitmap tmp = sbitmap_alloc (old_ssa_names->n_bits);
+ sbitmap_copy (tmp, old_ssa_names);
+ EXECUTE_IF_SET_IN_SBITMAP (tmp, 0, i,
+ insert_updated_phi_nodes_for (ssa_name (i), dfs, blocks,
+ update_flags));
+ sbitmap_free (tmp);
+ }
+
+ EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
+ insert_updated_phi_nodes_for (referenced_var (i), dfs, blocks,
+ update_flags);
+
+ /* Insertion of PHI nodes may have added blocks to the region.
+ We need to re-compute START_BB to include the newly added
+ blocks. */
+ if (start_bb != ENTRY_BLOCK_PTR)
+ start_bb = nearest_common_dominator_for_set (CDI_DOMINATORS, blocks);
+ }
+
+ /* Reset the current definition for name and symbol before renaming
+ the sub-graph. */
+ if (update_flags == TODO_update_ssa_full_phi)
+ {
+ /* If we are not prunning the IDF for new PHI nodes, set the
+ current name of every GIMPLE register to NULL. This way, PHI
+ arguments coming from edges with uninitialized values will be
+ renamed to use the symbol's default definition. */
+ EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i,
+ set_current_def (ssa_name (i), NULL_TREE));
+ }
+ else
+ {
+ /* Otherwise, set each old name to be its current reaching
+ definition. */
+ EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i,
+ set_current_def (ssa_name (i), NULL_TREE));
+ }
+
+ EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
+ set_current_def (referenced_var (i), NULL_TREE);
+
+ /* Now start the renaming process at START_BB. */
+ tmp = sbitmap_alloc (last_basic_block);
+ sbitmap_zero (tmp);
+ EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
+ SET_BIT (tmp, i);
+
+ rewrite_blocks (start_bb, REWRITE_UPDATE, tmp);
+
+ sbitmap_free (tmp);
+
+ /* Debugging dumps. */
+ if (dump_file)
+ {
+ int c;
+ unsigned i;
+
+ dump_repl_tbl (dump_file);
+
+ fprintf (dump_file, "Incremental SSA update started at block: %d\n\n",
+ start_bb->index);
+
+ c = 0;
+ EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
+ c++;
+ fprintf (dump_file, "Number of blocks in CFG: %d\n", last_basic_block);
+ fprintf (dump_file, "Number of blocks to update: %d (%3.0f%%)\n\n",
+ c, PERCENT (c, last_basic_block));
+
+ if (dump_flags & TDF_DETAILS)
+ {
+ fprintf (dump_file, "Affected blocks: ");
+ EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
+ fprintf (dump_file, "%u ", i);
+ fprintf (dump_file, "\n");
+ }
+
+ fprintf (dump_file, "\n\n");
+ }
+
+ /* Free allocated memory. */
+ if (insert_phi_p)
+ {
+ FOR_EACH_BB (bb)
+ BITMAP_FREE (dfs[bb->index]);
+ free (dfs);
+ }
+
+ BITMAP_FREE (blocks);
+ delete_update_ssa ();
+
+ timevar_pop (TV_TREE_SSA_INCREMENTAL);
+}
+
/*---------------------------------------------------------------------------
Functions to fix a program in invalid SSA form into valid SSA
@@ -1605,7 +3063,7 @@ ssa_mark_def_sites (struct dom_walk_data *walk_data,
if (TEST_BIT (gd->names_to_rename, def_uid))
{
- set_def_block (def, bb, false, true);
+ set_def_block (def, bb, false);
bitmap_set_bit (kills, def_uid);
}
}
@@ -1634,7 +3092,7 @@ ssa_mark_def_sites_initialize_block (struct dom_walk_data *walk_data,
if (!TEST_BIT (gd->names_to_rename, def_uid))
continue;
- set_def_block (def, bb, true, true);
+ set_def_block (def, bb, true);
bitmap_set_bit (kills, def_uid);
}
}
@@ -1714,6 +3172,7 @@ rewrite_ssa_into_ssa (void)
mark definition sites. */
walk_data.walk_stmts_backward = false;
walk_data.dom_direction = CDI_DOMINATORS;
+ walk_data.interesting_blocks = NULL;
walk_data.initialize_block_local_data = NULL;
walk_data.before_dom_children_before_stmts
= ssa_mark_def_sites_initialize_block;
@@ -1761,6 +3220,7 @@ rewrite_ssa_into_ssa (void)
/* Setup callbacks for the generic dominator tree walker. */
walk_data.walk_stmts_backward = false;
walk_data.dom_direction = CDI_DOMINATORS;
+ walk_data.interesting_blocks = NULL;
walk_data.initialize_block_local_data = NULL;
walk_data.before_dom_children_before_stmts = ssa_rewrite_initialize_block;
walk_data.before_dom_children_walk_stmts = ssa_rewrite_stmt;
diff --git a/gcc/tree-loop-linear.c b/gcc/tree-loop-linear.c
index 0835a451dcb..f2bd75d3766 100644
--- a/gcc/tree-loop-linear.c
+++ b/gcc/tree-loop-linear.c
@@ -371,9 +371,6 @@ linear_transform_loops (struct loops *loops)
free_data_refs (datarefs);
}
scev_reset ();
- rewrite_into_ssa (false);
+ update_ssa (TODO_update_ssa);
rewrite_into_loop_closed_ssa (NULL);
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
}
diff --git a/gcc/tree-optimize.c b/gcc/tree-optimize.c
index e6439151303..34ed9794367 100644
--- a/gcc/tree-optimize.c
+++ b/gcc/tree-optimize.c
@@ -52,7 +52,6 @@ Boston, MA 02111-1307, USA. */
/* Global variables used to communicate with passes. */
int dump_flags;
-bitmap vars_to_rename;
bool in_gimple_form;
/* The root of the compilation pass tree, once constructed. */
@@ -355,8 +354,9 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_early_warn_uninitialized);
NEXT_PASS (pass_dce);
NEXT_PASS (pass_dominator);
- NEXT_PASS (pass_redundant_phi);
+ NEXT_PASS (pass_copy_prop);
NEXT_PASS (pass_dce);
+ NEXT_PASS (pass_vrp);
NEXT_PASS (pass_merge_phi);
NEXT_PASS (pass_forwprop);
NEXT_PASS (pass_phiopt);
@@ -371,14 +371,14 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_may_alias);
NEXT_PASS (pass_rename_ssa_copies);
NEXT_PASS (pass_dominator);
- NEXT_PASS (pass_redundant_phi);
+ NEXT_PASS (pass_copy_prop);
NEXT_PASS (pass_dce);
NEXT_PASS (pass_dse);
NEXT_PASS (pass_may_alias);
NEXT_PASS (pass_forwprop);
NEXT_PASS (pass_phiopt);
- NEXT_PASS (pass_ccp);
- NEXT_PASS (pass_redundant_phi);
+ NEXT_PASS (pass_store_ccp);
+ NEXT_PASS (pass_store_copy_prop);
NEXT_PASS (pass_fold_builtins);
/* FIXME: May alias should a TODO but for 4.0.0,
we add may_alias right after fold builtins
@@ -389,7 +389,7 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_sink_code);
NEXT_PASS (pass_loop);
NEXT_PASS (pass_dominator);
- NEXT_PASS (pass_redundant_phi);
+ NEXT_PASS (pass_copy_prop);
/* FIXME: If DCE is not run before checking for uninitialized uses,
we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c).
However, this also causes us to misdiagnose cases that should be
@@ -415,6 +415,7 @@ init_tree_optimization_passes (void)
p = &pass_loop.sub;
NEXT_PASS (pass_loop_init);
+ NEXT_PASS (pass_copy_prop);
NEXT_PASS (pass_lim);
NEXT_PASS (pass_unswitch);
NEXT_PASS (pass_record_bounds);
@@ -443,15 +444,15 @@ execute_todo (struct tree_opt_pass *pass, unsigned int flags, bool use_required)
int properties
= use_required ? pass->properties_required : pass->properties_provided;
- if (flags & TODO_rename_vars)
- {
- rewrite_into_ssa (false);
- bitmap_clear (vars_to_rename);
- }
- if (flags & TODO_fix_def_def_chains)
+#if defined ENABLE_CHECKING
+ if (need_ssa_update_p ())
+ gcc_assert (flags & TODO_update_ssa_any);
+#endif
+
+ if (flags & TODO_update_ssa_any)
{
- rewrite_def_def_chains ();
- bitmap_clear (vars_to_rename);
+ unsigned update_flags = flags & TODO_update_ssa_any;
+ update_ssa (update_flags);
}
if (flags & TODO_cleanup_cfg)
@@ -482,15 +483,16 @@ execute_todo (struct tree_opt_pass *pass, unsigned int flags, bool use_required)
ggc_collect ();
}
-#ifdef ENABLE_CHECKING
+#if defined ENABLE_CHECKING
if ((pass->properties_required & PROP_ssa)
&& !(pass->properties_destroyed & PROP_ssa))
- verify_ssa (true);
-
+ verify_ssa (true);
if (flags & TODO_verify_flow)
verify_flow_info ();
if (flags & TODO_verify_stmts)
verify_stmts ();
+ if (flags & TODO_verify_loops)
+ verify_loop_closed_ssa ();
#endif
}
@@ -687,8 +689,6 @@ tree_rest_of_compilation (tree fndecl)
bitmap_obstack_initialize (NULL);
bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
- vars_to_rename = BITMAP_ALLOC (NULL);
-
/* Perform all tree transforms and optimizations. */
execute_pass_list (all_passes);
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index da5b8994fcf..f2e4cb29530 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -28,8 +28,6 @@ extern FILE *dump_file;
extern int dump_flags;
extern const char *dump_file_name;
-extern struct bitmap_head_def *vars_to_rename;
-
/* Return the dump_file_info for the given phase. */
extern struct dump_file_info *get_dump_file_info (enum tree_dump_index);
@@ -101,19 +99,60 @@ struct dump_file_info
(PROP_gimple_any | PROP_gimple_lcf | PROP_gimple_leh)
/* To-do flags. */
-#define TODO_dump_func (1 << 0) /* pass doesn't dump itself */
-#define TODO_rename_vars (1 << 1) /* rewrite new vars to ssa */
-#define TODO_ggc_collect (1 << 2) /* run the collector */
-#define TODO_verify_ssa (1 << 3)
-#define TODO_verify_flow (1 << 4)
-#define TODO_verify_stmts (1 << 5)
-#define TODO_fix_def_def_chains (1 << 6) /* rewrite def-def chains */
-#define TODO_cleanup_cfg (1 << 7) /* cleanup the cfg. */
+#define TODO_dump_func (1 << 0)
+#define TODO_ggc_collect (1 << 1)
+#define TODO_verify_ssa (1 << 2)
+#define TODO_verify_flow (1 << 3)
+#define TODO_verify_stmts (1 << 4)
+#define TODO_cleanup_cfg (1 << 5)
+#define TODO_verify_loops (1 << 6)
+
+/* To-do flags for calls to update_ssa. */
+
+/* Update the SSA form inserting PHI nodes for newly exposed symbols
+ and virtual names marked for updating. When updating real names,
+ only insert PHI nodes for a real name O_j in blocks reached by all
+ the new and old definitions for O_j. If the iterated dominance
+ frontier for O_j is not pruned, we may end up inserting PHI nodes
+ in blocks that have one or more edges with no incoming definition
+ for O_j. This would lead to uninitialized warnings for O_j's
+ symbol. */
+#define TODO_update_ssa (1 << 7)
+
+/* Update the SSA form without inserting any new PHI nodes at all.
+ This is used by passes that have either inserted all the PHI nodes
+ themselves or passes that need only to patch use-def and def-def
+ chains for virtuals (e.g., DCE). */
+#define TODO_update_ssa_no_phi (1 << 8)
+
+/* Insert PHI nodes everywhere they are needed. No prunning of the
+ IDF is done. This is used by passes that need the PHI nodes for
+ O_j even if it means that some arguments will come from the default
+ definition of O_j's symbol (e.g., pass_linear_transform).
+
+ WARNING: If you need to use this flag, chances are that your pass
+ may be doing something wrong. Inserting PHI nodes for an old name
+ where not all edges carry a new replacement may lead to silent
+ codegen errors or spurious uninitialized warnings. */
+#define TODO_update_ssa_full_phi (1 << 9)
+
+/* Passes that update the SSA form on their own may want to delegate
+ the updating of virtual names to the generic updater. Since FUD
+ chains are easier to maintain, this simplifies the work they need
+ to do. NOTE: If this flag is used, any OLD->NEW mappings for real
+ names are explicitly destroyed and only the symbols marked for
+ renaming are processed. */
+#define TODO_update_ssa_only_virtuals (1 << 10)
+
+#define TODO_update_ssa_any \
+ (TODO_update_ssa \
+ | TODO_update_ssa_no_phi \
+ | TODO_update_ssa_full_phi \
+ | TODO_update_ssa_only_virtuals)
#define TODO_verify_all \
(TODO_verify_ssa | TODO_verify_flow | TODO_verify_stmts)
-
extern struct tree_opt_pass pass_mudflap_1;
extern struct tree_opt_pass pass_mudflap_2;
extern struct tree_opt_pass pass_remove_useless_stmts;
@@ -167,6 +206,10 @@ extern struct tree_opt_pass pass_rest_of_compilation;
extern struct tree_opt_pass pass_sink_code;
extern struct tree_opt_pass pass_fre;
extern struct tree_opt_pass pass_linear_transform;
+extern struct tree_opt_pass pass_copy_prop;
+extern struct tree_opt_pass pass_store_ccp;
+extern struct tree_opt_pass pass_store_copy_prop;
+extern struct tree_opt_pass pass_vrp;
extern struct tree_opt_pass pass_create_structure_vars;
#endif /* GCC_TREE_PASS_H */
diff --git a/gcc/tree-phinodes.c b/gcc/tree-phinodes.c
index 963ef0a9578..929480f80c0 100644
--- a/gcc/tree-phinodes.c
+++ b/gcc/tree-phinodes.c
@@ -197,10 +197,8 @@ ideal_phi_node_len (int len)
return new_len;
}
-/* Return a PHI node for variable VAR defined in statement STMT.
- STMT may be an empty statement for artificial references (e.g., default
- definitions created when a variable is used without a preceding
- definition). */
+
+/* Return a PHI node with LEN argument slots for variable VAR. */
static tree
make_phi_node (tree var, int len)
@@ -468,58 +466,30 @@ remove_phi_node (tree phi, tree prev)
}
-/* Remove all the PHI nodes for variables in the VARS bitmap. */
+/* Find the first PHI node P in basic block BB for symbol SYM. If
+ PREV_P is given, the PHI node preceding P is stored in *PREV_P. */
-void
-remove_all_phi_nodes_for (bitmap vars)
+tree
+find_phi_node_for (basic_block bb, tree sym, tree *prev_p)
{
- basic_block bb;
-
- FOR_EACH_BB (bb)
- {
- /* Build a new PHI list for BB without variables in VARS. */
- tree phi, new_phi_list, next;
- tree *lastp = &new_phi_list;
+ tree phi;
- for (phi = phi_nodes (bb); phi; phi = next)
- {
- tree var = SSA_NAME_VAR (PHI_RESULT (phi));
-
- next = PHI_CHAIN (phi);
- /* Only add PHI nodes for variables not in VARS. */
- if (!bitmap_bit_p (vars, var_ann (var)->uid))
- {
- /* If we're not removing this PHI node, then it must have
- been rewritten by a previous call into the SSA rewriter.
- Note that fact in PHI_REWRITTEN. */
- PHI_REWRITTEN (phi) = 1;
-
- *lastp = phi;
- lastp = &PHI_CHAIN (phi);
- }
- else
- {
- /* If we are deleting the PHI node, then we should release the
- SSA_NAME node so that it can be reused. */
- release_phi_node (phi);
- release_ssa_name (PHI_RESULT (phi));
- }
- }
+ if (prev_p)
+ *prev_p = NULL_TREE;
- /* Make sure the last node in the new list has no successors. */
- *lastp = NULL;
- bb_ann (bb)->phi_nodes = new_phi_list;
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ if (SSA_NAME_VAR (PHI_RESULT (phi)) == sym)
+ return phi;
-#if defined ENABLE_CHECKING
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- tree var = SSA_NAME_VAR (PHI_RESULT (phi));
- gcc_assert (!bitmap_bit_p (vars, var_ann (var)->uid));
- }
-#endif
+ if (prev_p)
+ *prev_p = phi;
}
+
+ return NULL_TREE;
}
+
/* Reverse the order of PHI nodes in the chain PHI.
Return the new head of the chain (old last PHI node). */
@@ -537,4 +507,3 @@ phi_reverse (tree phi)
}
#include "gt-tree-phinodes.h"
-
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 06c9f132024..598cc907819 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -1430,6 +1430,14 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
pp_printf (buffer, "VH.%d", VALUE_HANDLE_ID (node));
break;
+ case ASSERT_EXPR:
+ pp_string (buffer, "ASSERT_EXPR <");
+ dump_generic_node (buffer, ASSERT_EXPR_VAR (node), spc, flags, false);
+ pp_string (buffer, ", ");
+ dump_generic_node (buffer, ASSERT_EXPR_COND (node), spc, flags, false);
+ pp_string (buffer, ">");
+ break;
+
case SCEV_KNOWN:
pp_string (buffer, "scev_known");
break;
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index 576fce0ae11..ffead4903b7 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -1065,8 +1065,8 @@ follow_ssa_edge_in_rhs (struct loop *loop,
- an INTEGER_CST,
- a PLUS_EXPR,
- a MINUS_EXPR,
- - other cases are not yet handled.
- */
+ - an ASSERT_EXPR,
+ - other cases are not yet handled. */
switch (TREE_CODE (rhs))
{
case NOP_EXPR:
@@ -1247,6 +1247,20 @@ follow_ssa_edge_in_rhs (struct loop *loop,
break;
+ case ASSERT_EXPR:
+ {
+ /* This assignment is of the form: "a_1 = ASSERT_EXPR <a_2, ...>"
+ It must be handled as a copy assignment of the form a_1 = a_2. */
+ tree op0 = ASSERT_EXPR_VAR (rhs);
+ if (TREE_CODE (op0) == SSA_NAME)
+ res = follow_ssa_edge (loop, SSA_NAME_DEF_STMT (op0),
+ halting_phi, evolution_of_loop);
+ else
+ res = false;
+ break;
+ }
+
+
default:
res = false;
break;
@@ -1701,6 +1715,11 @@ interpret_rhs_modify_expr (struct loop *loop,
case SSA_NAME:
res = chrec_convert (type, analyze_scalar_evolution (loop, opnd1));
break;
+
+ case ASSERT_EXPR:
+ opnd10 = ASSERT_EXPR_VAR (opnd1);
+ res = chrec_convert (type, analyze_scalar_evolution (loop, opnd10));
+ break;
case NOP_EXPR:
case CONVERT_EXPR:
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index b933fbc32e0..1ca629fe5d0 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -1428,6 +1428,8 @@ decide_instantiations (void)
}
bitmap_clear (&done_head);
+ mark_set_for_renaming (sra_candidates);
+
if (dump_file)
fputc ('\n', dump_file);
}
@@ -1439,7 +1441,7 @@ decide_instantiations (void)
renaming. This becomes necessary when we modify all of a non-scalar. */
static void
-mark_all_v_defs (tree stmt)
+mark_all_v_defs_1 (tree stmt)
{
tree sym;
ssa_op_iter iter;
@@ -1450,10 +1452,28 @@ mark_all_v_defs (tree stmt)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
- bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
+ mark_sym_for_renaming (sym);
+ }
+}
+
+
+/* Mark all the variables in virtual operands in all the statements in
+ LIST for renaming. */
+
+static void
+mark_all_v_defs (tree list)
+{
+ if (TREE_CODE (list) != STATEMENT_LIST)
+ mark_all_v_defs_1 (list);
+ else
+ {
+ tree_stmt_iterator i;
+ for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
+ mark_all_v_defs_1 (tsi_stmt (i));
}
}
+
/* Build a single level component reference to ELT rooted at BASE. */
static tree
@@ -1706,7 +1726,7 @@ generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
new = num_referenced_vars;
for (j = old; j < new; ++j)
- bitmap_set_bit (vars_to_rename, j);
+ mark_sym_for_renaming (referenced_var (j));
}
return ret;
@@ -1820,7 +1840,7 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
if (list == NULL)
return;
- mark_all_v_defs (expr_first (list));
+ mark_all_v_defs (list);
if (is_output)
sra_insert_after (bsi, list);
else
@@ -1865,7 +1885,7 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
generate_element_ref (rhs_elt), &list);
if (list)
{
- mark_all_v_defs (expr_first (list));
+ mark_all_v_defs (list);
sra_insert_before (bsi, list);
}
@@ -1873,7 +1893,10 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
generate_copy_inout (lhs_elt, true,
generate_element_ref (lhs_elt), &list);
if (list)
- sra_insert_after (bsi, list);
+ {
+ mark_all_v_defs (list);
+ sra_insert_after (bsi, list);
+ }
}
else
{
@@ -1887,6 +1910,7 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
list = NULL;
generate_element_copy (lhs_elt, rhs_elt, &list);
gcc_assert (list);
+ mark_all_v_defs (list);
sra_replace (bsi, list);
}
}
@@ -1936,7 +1960,7 @@ scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
exposes constants to later optimizations. */
if (list)
{
- mark_all_v_defs (expr_first (list));
+ mark_all_v_defs (list);
sra_insert_after (bsi, list);
}
}
@@ -1946,6 +1970,7 @@ scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
replaces the original structure assignment. */
gcc_assert (list);
mark_all_v_defs (bsi_stmt (*bsi));
+ mark_all_v_defs (list);
sra_replace (bsi, list);
}
}
@@ -1996,6 +2021,7 @@ scalarize_ldst (struct sra_elt *elt, tree other,
mark_all_v_defs (stmt);
generate_copy_inout (elt, is_output, other, &list);
+ mark_all_v_defs (list);
gcc_assert (list);
/* Preserve EH semantics. */
@@ -2051,7 +2077,10 @@ scalarize_parms (void)
}
if (list)
- insert_edge_copies (list, ENTRY_BLOCK_PTR);
+ {
+ insert_edge_copies (list, ENTRY_BLOCK_PTR);
+ mark_all_v_defs (list);
+ }
}
/* Entry point to phase 4. Update the function to match replacements. */
@@ -2154,7 +2183,7 @@ struct tree_opt_pass pass_sra =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
+ TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 17bf8c5cb76..de39ed128f4 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -371,7 +371,7 @@ struct tree_opt_pass pass_may_alias =
PROP_alias, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
+ TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
| TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
@@ -407,7 +407,7 @@ count_ptr_derefs (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
*NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at
least one of those dereferences is a store operation. */
-static void
+void
count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
unsigned *num_derefs_p, bool *is_store)
{
@@ -770,7 +770,7 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
/* Mark variables in V_MAY_DEF operands as being written to. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
- tree var = SSA_NAME_VAR (op);
+ tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
bitmap_set_bit (ai->written_vars, ann->uid);
}
@@ -855,7 +855,7 @@ create_name_tags (struct alias_info *ai)
needs to be removed from the IL, so we mark it for
renaming. */
if (old_name_tag && old_name_tag != pi->name_mem_tag)
- bitmap_set_bit (vars_to_rename, var_ann (old_name_tag)->uid);
+ mark_sym_for_renaming (old_name_tag);
}
else if (pi->pt_malloc)
{
@@ -875,7 +875,7 @@ create_name_tags (struct alias_info *ai)
|= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
/* Mark the new name tag for renaming. */
- bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
+ mark_sym_for_renaming (pi->name_mem_tag);
}
}
@@ -1000,7 +1000,11 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
|| bitmap_bit_p (ai->written_vars, v_ann->uid);
if (!tag_stored_p && !var_stored_p)
continue;
-
+
+ if ((unmodifiable_var_p (tag) && !unmodifiable_var_p (var))
+ || (unmodifiable_var_p (var) && !unmodifiable_var_p (tag)))
+ continue;
+
if (may_alias_p (p_map->var, p_map->set, var, v_map->set))
{
subvar_t svars;
@@ -1449,9 +1453,10 @@ setup_pointers_and_addressables (struct alias_info *ai)
&& !is_global_var (var))
{
bool okay_to_mark = true;
+
/* Since VAR is now a regular GIMPLE register, we will need
to rename VAR into SSA afterwards. */
- bitmap_set_bit (vars_to_rename, v_ann->uid);
+ mark_sym_for_renaming (var);
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
@@ -1463,15 +1468,15 @@ setup_pointers_and_addressables (struct alias_info *ai)
var_ann_t svann = var_ann (sv->var);
if (bitmap_bit_p (ai->addresses_needed, svann->uid))
okay_to_mark = false;
- bitmap_set_bit (vars_to_rename, svann->uid);
+ mark_sym_for_renaming (sv->var);
}
}
+
/* The address of VAR is not needed, remove the
addressable bit, so that it can be optimized as a
regular variable. */
if (okay_to_mark)
mark_non_addressable (var);
-
}
else
{
@@ -1496,7 +1501,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
if (may_be_aliased (var))
{
create_alias_map_for (var, ai);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ mark_sym_for_renaming (var);
}
/* Add pointer variables that have been dereferenced to the POINTERS
@@ -1519,7 +1524,13 @@ setup_pointers_and_addressables (struct alias_info *ai)
afterwards. Note that we cannot do this inside
get_tmt_for because aliasing may run multiple times
and we only create type tags the first time. */
- bitmap_set_bit (vars_to_rename, t_ann->uid);
+ mark_sym_for_renaming (tag);
+
+ /* Similarly, if pointer VAR used to have another type
+ tag, we will need to process it in the renamer to
+ remove the stale virtual operands. */
+ if (v_ann->type_mem_tag)
+ mark_sym_for_renaming (v_ann->type_mem_tag);
/* Associate the tag with pointer VAR. */
v_ann->type_mem_tag = tag;
@@ -1555,7 +1566,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
tree tag = ann->type_mem_tag;
if (tag)
{
- bitmap_set_bit (vars_to_rename, var_ann (tag)->uid);
+ mark_sym_for_renaming (tag);
ann->type_mem_tag = NULL_TREE;
}
}
@@ -1661,11 +1672,11 @@ maybe_create_global_var (struct alias_info *ai)
{
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (vars_to_rename, var_ann (sv->var)->uid);
+ mark_sym_for_renaming (sv->var);
}
}
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
+ mark_sym_for_renaming (var);
}
}
@@ -1802,7 +1813,7 @@ set_pt_anything (tree ptr)
disassociated from PTR. */
if (pi->name_mem_tag)
{
- bitmap_set_bit (vars_to_rename, var_ann (pi->name_mem_tag)->uid);
+ mark_sym_for_renaming (pi->name_mem_tag);
pi->name_mem_tag = NULL_TREE;
}
}
@@ -2358,7 +2369,7 @@ create_global_var (void)
TREE_ADDRESSABLE (global_var) = 0;
add_referenced_tmp_var (global_var);
- bitmap_set_bit (vars_to_rename, var_ann (global_var)->uid);
+ mark_sym_for_renaming (global_var);
}
@@ -2673,6 +2684,83 @@ may_be_aliased (tree var)
return true;
}
+
+/* Add VAR to the list of may-aliases of PTR's type tag. If PTR
+ doesn't already have a type tag, create one. */
+
+void
+add_type_alias (tree ptr, tree var)
+{
+ varray_type aliases;
+ tree tag;
+ var_ann_t ann = var_ann (ptr);
+
+ if (ann->type_mem_tag == NULL_TREE)
+ {
+ size_t i;
+ tree q = NULL_TREE;
+ tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
+ HOST_WIDE_INT tag_set = get_alias_set (tag_type);
+
+ /* PTR doesn't have a type tag, create a new one and add VAR to
+ the new tag's alias set.
+
+ FIXME, This is slower than necessary. We need to determine
+ whether there is another pointer Q with the same alias set as
+ PTR. This could be sped up by having type tags associated
+ with types. */
+ for (i = 0; i < num_referenced_vars; i++)
+ {
+ q = referenced_var (i);
+
+ if (POINTER_TYPE_P (TREE_TYPE (q))
+ && tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
+ {
+ /* Found another pointer Q with the same alias set as
+ the PTR's pointed-to type. If Q has a type tag, use
+ it. Otherwise, create a new memory tag for PTR. */
+ var_ann_t ann1 = var_ann (q);
+ if (ann1->type_mem_tag)
+ ann->type_mem_tag = ann1->type_mem_tag;
+ else
+ ann->type_mem_tag = create_memory_tag (tag_type, true);
+ goto found_tag;
+ }
+ }
+
+ /* Couldn't find any other pointer with a type tag we could use.
+ Create a new memory tag for PTR. */
+ ann->type_mem_tag = create_memory_tag (tag_type, true);
+ }
+
+found_tag:
+ /* If VAR is not already PTR's type tag, add it to the may-alias set
+ for PTR's type tag. */
+ gcc_assert (var_ann (var)->type_mem_tag == NOT_A_TAG);
+ tag = ann->type_mem_tag;
+ add_may_alias (tag, var);
+
+ /* TAG and its set of aliases need to be marked for renaming. */
+ mark_sym_for_renaming (tag);
+ if ((aliases = var_ann (tag)->may_aliases) != NULL)
+ {
+ size_t i;
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ }
+
+ /* If we had grouped aliases, VAR may have aliases of its own. Mark
+ them for renaming as well. Other statements referencing the
+ aliases of VAR will need to be updated. */
+ if ((aliases = var_ann (var)->may_aliases) != NULL)
+ {
+ size_t i;
+ for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
+ mark_sym_for_renaming (VARRAY_TREE (aliases, i));
+ }
+}
+
+
/* This structure is simply used during pushing fields onto the fieldstack
to track the offset of the field, since bitpos_of_field gives it relative
to its immediate containing type, and we want it relative to the ultimate
@@ -3168,4 +3256,3 @@ struct tree_opt_pass pass_create_structure_vars =
TODO_dump_func, /* todo_flags_finish */
0 /* letter */
};
-
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index 26e1a2ea1b8..443d8dca879 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -21,7 +21,161 @@ along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
-/* Conditional constant propagation.
+/* Conditional constant propagation (CCP) is based on the SSA
+ propagation engine (tree-ssa-propagate.c). Constant assignments of
+ the form VAR = CST are propagated from the assignments into uses of
+ VAR, which in turn may generate new constants. The simulation uses
+ a four level lattice to keep track of constant values associated
+ with SSA names. Given an SSA name V_i, it may take one of the
+ following values:
+
+ UNINITIALIZED -> This is the default starting value. V_i
+ has not been processed yet.
+
+ UNDEFINED -> V_i is a local variable whose definition
+ has not been processed yet. Therefore we
+ don't yet know if its value is a constant
+ or not.
+
+ CONSTANT -> V_i has been found to hold a constant
+ value C.
+
+ VARYING -> V_i cannot take a constant value, or if it
+ does, it is not possible to determine it
+ at compile time.
+
+ The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
+
+ 1- In ccp_visit_stmt, we are interested in assignments whose RHS
+ evaluates into a constant and conditional jumps whose predicate
+ evaluates into a boolean true or false. When an assignment of
+ the form V_i = CONST is found, V_i's lattice value is set to
+ CONSTANT and CONST is associated with it. This causes the
+ propagation engine to add all the SSA edges coming out the
+ assignment into the worklists, so that statements that use V_i
+ can be visited.
+
+ If the statement is a conditional with a constant predicate, we
+ mark the outgoing edges as executable or not executable
+ depending on the predicate's value. This is then used when
+ visiting PHI nodes to know when a PHI argument can be ignored.
+
+
+ 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
+ same constant C, then the LHS of the PHI is set to C. This
+ evaluation is known as the "meet operation". Since one of the
+ goals of this evaluation is to optimistically return constant
+ values as often as possible, it uses two main short cuts:
+
+ - If an argument is flowing in through a non-executable edge, it
+ is ignored. This is useful in cases like this:
+
+ if (PRED)
+ a_9 = 3;
+ else
+ a_10 = 100;
+ a_11 = PHI (a_9, a_10)
+
+ If PRED is known to always evaluate to false, then we can
+ assume that a_11 will always take its value from a_10, meaning
+ that instead of consider it VARYING (a_9 and a_10 have
+ different values), we can consider it CONSTANT 100.
+
+ - If an argument has an UNDEFINED value, then it does not affect
+ the outcome of the meet operation. If a variable V_i has an
+ UNDEFINED value, it means that either its defining statement
+ hasn't been visited yet or V_i has no defining statement, in
+ which case the original symbol 'V' is being used
+ uninitialized. Since 'V' is a local variable, the compiler
+ may assume any initial value for it.
+
+
+ After propagation, every variable V_i that ends up with a lattice
+ value of CONSTANT will have the associated constant value in the
+ array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
+ final substitution and folding.
+
+
+ Constant propagation in stores and loads (STORE-CCP)
+ ----------------------------------------------------
+
+ While CCP has all the logic to propagate constants in GIMPLE
+ registers, it is missing the ability to associate constants with
+ stores and loads (i.e., pointer dereferences, structures and
+ global/aliased variables). We don't keep loads and stores in
+ SSA, but we do build a factored use-def web for them (in the
+ virtual operands).
+
+ For instance, consider the following code fragment:
+
+ struct A a;
+ const int B = 42;
+
+ void foo (int i)
+ {
+ if (i > 10)
+ a.a = 42;
+ else
+ {
+ a.b = 21;
+ a.a = a.b + 21;
+ }
+
+ if (a.a != B)
+ never_executed ();
+ }
+
+ We should be able to deduce that the predicate 'a.a != B' is always
+ false. To achieve this, we associate constant values to the SSA
+ names in the V_MAY_DEF and V_MUST_DEF operands for each store.
+ Additionally, since we also glob partial loads/stores with the base
+ symbol, we also keep track of the memory reference where the
+ constant value was stored (in the MEM_REF field of PROP_VALUE_T).
+ For instance,
+
+ # a_5 = V_MAY_DEF <a_4>
+ a.a = 2;
+
+ # VUSE <a_5>
+ x_3 = a.b;
+
+ In the example above, CCP will associate value '2' with 'a_5', but
+ it would be wrong to replace the load from 'a.b' with '2', because
+ '2' had been stored into a.a.
+
+ To support STORE-CCP, it is necessary to add a new value to the
+ constant propagation lattice. When evaluating a load for a memory
+ reference we can no longer assume a value of UNDEFINED if we
+ haven't seen a preceding store to the same memory location.
+ Consider, for instance global variables:
+
+ int A;
+
+ foo (int i)
+ {
+ if (i_3 > 10)
+ A_4 = 3;
+ # A_5 = PHI (A_4, A_2);
+
+ # VUSE <A_5>
+ A.0_6 = A;
+
+ return A.0_6;
+ }
+
+ The value of A_2 cannot be assumed to be UNDEFINED, as it may have
+ been defined outside of foo. If we were to assume it UNDEFINED, we
+ would erroneously optimize the above into 'return 3;'. Therefore,
+ when doing STORE-CCP, we introduce a fifth lattice value
+ (UNKNOWN_VAL), which overrides any other value when computing the
+ meet operation in PHI nodes.
+
+ Though STORE-CCP is not too expensive, it does have to do more work
+ than regular CCP, so it is only enabled at -O2. Both regular CCP
+ and STORE-CCP use the exact same algorithm. The only distinction
+ is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
+ set to true. This affects the evaluation of statements and PHI
+ nodes.
References:
@@ -65,27 +219,29 @@ typedef enum
UNKNOWN_VAL,
CONSTANT,
VARYING
-} latticevalue;
+} ccp_lattice_t;
-/* Main structure for CCP. Contains the lattice value and, if it's a
- constant, the constant value. */
-typedef struct
-{
- latticevalue lattice_val;
- tree const_val;
-} value;
+/* Array of propagated constant values. After propagation,
+ CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
+ the constant is held in an SSA name representing a memory store
+ (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
+ contain the actual memory reference used to store (i.e., the LHS of
+ the assignment doing the store). */
+prop_value_t *const_val;
-/* This is used to track the current value of each variable. */
-static value *value_vector;
+/* True if we are also propagating constants in stores and loads. */
+static bool do_store_ccp;
-
-/* Dump lattice value VAL to file OUTF prefixed by PREFIX. */
+/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
static void
-dump_lattice_value (FILE *outf, const char *prefix, value val)
+dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
{
switch (val.lattice_val)
{
+ case UNINITIALIZED:
+ fprintf (outf, "%sUNINITIALIZED", prefix);
+ break;
case UNDEFINED:
fprintf (outf, "%sUNDEFINED", prefix);
break;
@@ -97,7 +253,7 @@ dump_lattice_value (FILE *outf, const char *prefix, value val)
break;
case CONSTANT:
fprintf (outf, "%sCONSTANT ", prefix);
- print_generic_expr (outf, val.const_val, dump_flags);
+ print_generic_expr (outf, val.value, dump_flags);
break;
default:
gcc_unreachable ();
@@ -105,169 +261,173 @@ dump_lattice_value (FILE *outf, const char *prefix, value val)
}
-/* Return a default value for variable VAR using the following rules:
+/* Print lattice value VAL to stderr. */
+
+void debug_lattice_value (prop_value_t val);
+
+void
+debug_lattice_value (prop_value_t val)
+{
+ dump_lattice_value (stderr, "", val);
+ fprintf (stderr, "\n");
+}
- 1- Function arguments are considered VARYING.
-
- 2- Global and static variables that are declared constant are
- considered CONSTANT.
- 3- Any other virtually defined variable is considered UNKNOWN_VAL.
+/* Compute a default value for variable VAR and store it in the
+ CONST_VAL array. The following rules are used to get default
+ values:
- 4- Any other value is considered UNDEFINED. This is useful when
+ 1- Global and static variables that are declared constant are
+ considered CONSTANT.
+
+ 2- Any other value is considered UNDEFINED. This is useful when
considering PHI nodes. PHI arguments that are undefined do not
change the constant value of the PHI node, which allows for more
- constants to be propagated. */
+ constants to be propagated.
-static value
-get_default_value (tree var)
-{
- value val;
- tree sym;
+ 3- If SSA_NAME_VALUE is set and it is a constant, its value is
+ used.
- if (TREE_CODE (var) == SSA_NAME)
- sym = SSA_NAME_VAR (var);
- else
- {
- gcc_assert (DECL_P (var));
- sym = var;
- }
+ 4- Variables defined by statements other than assignments and PHI
+ nodes are considered VARYING.
- val.lattice_val = UNDEFINED;
- val.const_val = NULL_TREE;
+ 5- Variables that are not GIMPLE registers are considered
+ UNKNOWN_VAL, which is really a stronger version of UNDEFINED.
+ It's used to avoid the short circuit evaluation implied by
+ UNDEFINED in ccp_lattice_meet. */
- if (TREE_CODE (var) == SSA_NAME
- && SSA_NAME_VALUE (var)
- && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
- {
- val.lattice_val = CONSTANT;
- val.const_val = SSA_NAME_VALUE (var);
- }
- else if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
+static prop_value_t
+get_default_value (tree var)
+{
+ tree sym = SSA_NAME_VAR (var);
+ prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
+
+ if (!do_store_ccp && !is_gimple_reg (var))
{
- /* Function arguments and volatile variables are considered VARYING. */
+ /* Short circuit for regular CCP. We are not interested in any
+ non-register when DO_STORE_CCP is false. */
val.lattice_val = VARYING;
}
- else if (TREE_STATIC (sym))
+ else if (SSA_NAME_VALUE (var)
+ && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
{
- /* Globals and static variables are considered UNKNOWN_VAL,
- unless they are declared 'const'. */
- if (TREE_READONLY (sym)
- && DECL_INITIAL (sym)
- && is_gimple_min_invariant (DECL_INITIAL (sym)))
- {
- val.lattice_val = CONSTANT;
- val.const_val = DECL_INITIAL (sym);
- }
- else
- {
- val.const_val = NULL_TREE;
- val.lattice_val = UNKNOWN_VAL;
- }
+ val.lattice_val = CONSTANT;
+ val.value = SSA_NAME_VALUE (var);
}
- else if (!is_gimple_reg (sym))
+ else if (TREE_STATIC (sym)
+ && TREE_READONLY (sym)
+ && DECL_INITIAL (sym)
+ && is_gimple_min_invariant (DECL_INITIAL (sym)))
{
- val.const_val = NULL_TREE;
- val.lattice_val = UNKNOWN_VAL;
+ /* Globals and static variables declared 'const' take their
+ initial value. */
+ val.lattice_val = CONSTANT;
+ val.value = DECL_INITIAL (sym);
+ val.mem_ref = sym;
}
else
{
- enum tree_code code;
tree stmt = SSA_NAME_DEF_STMT (var);
- if (!IS_EMPTY_STMT (stmt))
- {
- code = TREE_CODE (stmt);
- if (code != MODIFY_EXPR && code != PHI_NODE)
+ if (IS_EMPTY_STMT (stmt))
+ {
+ /* Variables defined by an empty statement are those used
+ before being initialized. If VAR is a local variable, we
+ can assume initially that it is UNDEFINED. If we are
+ doing STORE-CCP, function arguments and non-register
+ variables are initially UNKNOWN_VAL, because we cannot
+ discard the value incoming from outside of this function
+ (see ccp_lattice_meet for details). */
+ if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
+ val.lattice_val = UNDEFINED;
+ else if (do_store_ccp)
+ val.lattice_val = UNKNOWN_VAL;
+ else
val.lattice_val = VARYING;
}
+ else if (TREE_CODE (stmt) == MODIFY_EXPR
+ || TREE_CODE (stmt) == PHI_NODE)
+ {
+ /* Any other variable defined by an assignment or a PHI node
+ is considered UNDEFINED (or UNKNOWN_VAL if VAR is not a
+ GIMPLE register). */
+ val.lattice_val = is_gimple_reg (sym) ? UNDEFINED : UNKNOWN_VAL;
+ }
+ else
+ {
+ /* Otherwise, VAR will never take on a constant value. */
+ val.lattice_val = VARYING;
+ }
}
return val;
}
-/* Get the constant value associated with variable VAR. */
-
-static value *
-get_value (tree var)
-{
- value *val;
- gcc_assert (TREE_CODE (var) == SSA_NAME);
+/* Get the constant value associated with variable VAR. If
+ MAY_USE_DEFAULT_P is true, call get_default_value on variables that
+ have the lattice value UNINITIALIZED. */
- val = &value_vector[SSA_NAME_VERSION (var)];
- if (val->lattice_val == UNINITIALIZED)
+static prop_value_t *
+get_value (tree var, bool may_use_default_p)
+{
+ prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
+ if (may_use_default_p && val->lattice_val == UNINITIALIZED)
*val = get_default_value (var);
return val;
}
-/* Set the lattice value for variable VAR to VAL. Return true if VAL
- is different from VAR's previous value. */
+/* Set the value for variable VAR to NEW_VAL. Return true if the new
+ value is different from VAR's previous value. */
static bool
-set_lattice_value (tree var, value val)
+set_lattice_value (tree var, prop_value_t new_val)
{
- value *old = get_value (var);
-
- if (val.lattice_val == UNDEFINED)
- {
- /* CONSTANT->UNDEFINED is never a valid state transition. */
- gcc_assert (old->lattice_val != CONSTANT);
-
- /* UNKNOWN_VAL->UNDEFINED is never a valid state transition. */
- gcc_assert (old->lattice_val != UNKNOWN_VAL);
-
- /* VARYING->UNDEFINED is generally not a valid state transition,
- except for values which are initialized to VARYING. */
- gcc_assert (old->lattice_val != VARYING
- || get_default_value (var).lattice_val == VARYING);
- }
- else if (val.lattice_val == CONSTANT)
- /* VARYING -> CONSTANT is an invalid state transition, except
- for objects which start off in a VARYING state. */
- gcc_assert (old->lattice_val != VARYING
- || get_default_value (var).lattice_val == VARYING);
-
- /* If the constant for VAR has changed, then this VAR is really varying. */
- if (old->lattice_val == CONSTANT
- && val.lattice_val == CONSTANT
- && !simple_cst_equal (old->const_val, val.const_val))
- {
- val.lattice_val = VARYING;
- val.const_val = NULL_TREE;
- }
-
- if (old->lattice_val != val.lattice_val)
+ prop_value_t *old_val = get_value (var, false);
+
+ /* Lattice transitions must always be monotonically increasing in
+ value. We allow two exceptions:
+
+ 1- If *OLD_VAL and NEW_VAL are the same, return false to
+ inform the caller that this was a non-transition.
+
+ 2- If we are doing store-ccp (i.e., DOING_STORE_CCP is true),
+ allow CONSTANT->UNKNOWN_VAL. The UNKNOWN_VAL state is a
+ special type of UNDEFINED state which prevents the short
+ circuit evaluation of PHI arguments (see ccp_visit_phi_node
+ and ccp_lattice_meet). */
+ gcc_assert (old_val->lattice_val <= new_val.lattice_val
+ || (old_val->lattice_val == new_val.lattice_val
+ && old_val->value == new_val.value
+ && old_val->mem_ref == new_val.mem_ref)
+ || (do_store_ccp
+ && old_val->lattice_val == CONSTANT
+ && new_val.lattice_val == UNKNOWN_VAL));
+
+ if (old_val->lattice_val != new_val.lattice_val)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
- dump_lattice_value (dump_file, "Lattice value changed to ", val);
- fprintf (dump_file, ". Adding definition to SSA edges.\n");
+ dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
+ fprintf (dump_file, ". %sdding SSA edges to worklist.\n",
+ new_val.lattice_val != UNDEFINED ? "A" : "Not a");
}
- *old = val;
- return true;
+ *old_val = new_val;
+
+ /* Transitions UNINITIALIZED -> UNDEFINED are never interesting
+ for propagation purposes. In these cases return false to
+ avoid doing useless work. */
+ return (new_val.lattice_val != UNDEFINED);
}
return false;
}
-/* Set the lattice value for the variable VAR to VARYING. */
-
-static void
-def_to_varying (tree var)
-{
- value val;
- val.lattice_val = VARYING;
- val.const_val = NULL_TREE;
- set_lattice_value (var, val);
-}
-
-
-/* Return the likely latticevalue for STMT.
+/* Return the likely CCP lattice value for STMT.
If STMT has no operands, then return CONSTANT.
@@ -277,57 +437,71 @@ def_to_varying (tree var)
Else return VARYING. */
-static latticevalue
+static ccp_lattice_t
likely_value (tree stmt)
{
- vuse_optype vuses;
- int found_constant = 0;
+ bool found_constant;
stmt_ann_t ann;
tree use;
ssa_op_iter iter;
- /* If the statement makes aliased loads or has volatile operands, it
- won't fold to a constant value. */
ann = stmt_ann (stmt);
- if (ann->makes_aliased_loads || ann->has_volatile_ops)
+
+ /* If the statement has volatile operands, it won't fold to a
+ constant value. */
+ if (ann->has_volatile_ops)
+ return VARYING;
+
+ /* If we are not doing store-ccp, statements with loads
+ and/or stores will never fold into a constant. */
+ if (!do_store_ccp
+ && (ann->makes_aliased_stores
+ || ann->makes_aliased_loads
+ || NUM_VUSES (VUSE_OPS (ann)) > 0
+ || NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) > 0
+ || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) > 0))
return VARYING;
- /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
- in the presence of const and pure calls. */
+
+ /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
+ conservative, in the presence of const and pure calls. */
if (get_call_expr_in (stmt) != NULL_TREE)
return VARYING;
+ /* Anything other than assignments and conditional jumps are not
+ interesting for CCP. */
+ if (TREE_CODE (stmt) != MODIFY_EXPR
+ && TREE_CODE (stmt) != COND_EXPR
+ && TREE_CODE (stmt) != SWITCH_EXPR)
+ return VARYING;
+
get_stmt_operands (stmt);
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
+ found_constant = false;
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
{
- value *val = get_value (use);
+ prop_value_t *val = get_value (use, true);
- if (val->lattice_val == UNDEFINED)
- return UNDEFINED;
+ if (val->lattice_val == VARYING)
+ return VARYING;
- if (val->lattice_val == CONSTANT)
- found_constant = 1;
- }
-
- vuses = VUSE_OPS (ann);
-
- if (NUM_VUSES (vuses))
- {
- tree vuse = VUSE_OP (vuses, 0);
- value *val = get_value (vuse);
-
if (val->lattice_val == UNKNOWN_VAL)
- return UNKNOWN_VAL;
-
- /* There should be no VUSE operands that are UNDEFINED. */
- gcc_assert (val->lattice_val != UNDEFINED);
-
+ {
+ /* UNKNOWN_VAL is invalid when not doing STORE-CCP. */
+ gcc_assert (do_store_ccp);
+ return UNKNOWN_VAL;
+ }
+
if (val->lattice_val == CONSTANT)
- found_constant = 1;
+ found_constant = true;
}
- return ((found_constant || (!USE_OPS (ann) && !vuses)) ? CONSTANT : VARYING);
+ if (found_constant
+ || NUM_USES (USE_OPS (ann)) == 0
+ || NUM_VUSES (VUSE_OPS (ann)) == 0)
+ return CONSTANT;
+
+ return UNDEFINED;
}
@@ -337,53 +511,41 @@ static void
ccp_initialize (void)
{
basic_block bb;
- sbitmap is_may_def;
- value_vector = (value *) xmalloc (num_ssa_names * sizeof (value));
- memset (value_vector, 0, num_ssa_names * sizeof (value));
-
- /* Set of SSA_NAMEs that are defined by a V_MAY_DEF. */
- is_may_def = sbitmap_alloc (num_ssa_names);
- sbitmap_zero (is_may_def);
+ const_val = xmalloc (num_ssa_names * sizeof (*const_val));
+ memset (const_val, 0, num_ssa_names * sizeof (*const_val));
/* Initialize simulation flags for PHI nodes and statements. */
FOR_EACH_BB (bb)
{
block_stmt_iterator i;
- /* Mark all V_MAY_DEF operands VARYING. */
for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
{
bool is_varying = false;
tree stmt = bsi_stmt (i);
- ssa_op_iter iter;
- tree def;
get_stmt_operands (stmt);
- /* Get the default value for each DEF and V_MUST_DEF. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter,
- (SSA_OP_DEF | SSA_OP_VMUSTDEF))
- {
- if (get_value (def)->lattice_val == VARYING)
- is_varying = true;
- }
+ if (likely_value (stmt) == VARYING)
- /* Mark all V_MAY_DEF operands VARYING. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
{
- get_value (def)->lattice_val = VARYING;
- SET_BIT (is_may_def, SSA_NAME_VERSION (def));
+ tree def;
+ ssa_op_iter iter;
+
+ /* If the statement will not produce a constant, mark
+ all its outputs VARYING. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
+ get_value (def, false)->lattice_val = VARYING;
+
+ /* Never mark conditional jumps with DONT_SIMULATE_AGAIN,
+ otherwise the propagator will never add the outgoing
+ control edges. */
+ if (TREE_CODE (stmt) != COND_EXPR
+ && TREE_CODE (stmt) != SWITCH_EXPR)
+ is_varying = true;
}
- /* Statements other than MODIFY_EXPR, COND_EXPR and
- SWITCH_EXPR are not interesting for constant propagation.
- Mark them VARYING. */
- if (TREE_CODE (stmt) != MODIFY_EXPR
- && TREE_CODE (stmt) != COND_EXPR
- && TREE_CODE (stmt) != SWITCH_EXPR)
- is_varying = true;
-
DONT_SIMULATE_AGAIN (stmt) = is_varying;
}
}
@@ -391,301 +553,148 @@ ccp_initialize (void)
/* Now process PHI nodes. */
FOR_EACH_BB (bb)
{
- tree phi, var;
- int x;
+ tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
- value *val = get_value (PHI_RESULT (phi));
+ int i;
+ tree arg;
+ prop_value_t *val = get_value (PHI_RESULT (phi), false);
- for (x = 0; x < PHI_NUM_ARGS (phi); x++)
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
{
- var = PHI_ARG_DEF (phi, x);
+ arg = PHI_ARG_DEF (phi, i);
- /* If one argument has a V_MAY_DEF, the result is
- VARYING. */
- if (TREE_CODE (var) == SSA_NAME)
+ if (TREE_CODE (arg) == SSA_NAME
+ && get_value (arg, false)->lattice_val == VARYING)
{
- if (TEST_BIT (is_may_def, SSA_NAME_VERSION (var)))
- {
- val->lattice_val = VARYING;
- SET_BIT (is_may_def, SSA_NAME_VERSION (PHI_RESULT (phi)));
- break;
- }
+ val->lattice_val = VARYING;
+ break;
}
}
DONT_SIMULATE_AGAIN (phi) = (val->lattice_val == VARYING);
}
}
-
- sbitmap_free (is_may_def);
}
-/* Replace USE references in statement STMT with their immediate reaching
- definition. Return true if at least one reference was replaced. If
- REPLACED_ADDRESSES_P is given, it will be set to true if an address
- constant was replaced. */
-
-static bool
-replace_uses_in (tree stmt, bool *replaced_addresses_p)
-{
- bool replaced = false;
- use_operand_p use;
- ssa_op_iter iter;
+/* Do final substitution of propagated values, cleanup the flowgraph and
+ free allocated storage. */
- if (replaced_addresses_p)
- *replaced_addresses_p = false;
-
- get_stmt_operands (stmt);
-
- FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
- {
- tree tuse = USE_FROM_PTR (use);
- value *val = get_value (tuse);
-
- if (val->lattice_val != CONSTANT)
- continue;
-
- if (TREE_CODE (stmt) == ASM_EXPR
- && !may_propagate_copy_into_asm (tuse))
- continue;
-
- SET_USE (use, val->const_val);
-
- replaced = true;
- if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
- *replaced_addresses_p = true;
- }
-
- return replaced;
-}
-
-
-/* Replace the VUSE references in statement STMT with its immediate reaching
- definition. Return true if the reference was replaced. If
- REPLACED_ADDRESSES_P is given, it will be set to true if an address
- constant was replaced. */
-
-static bool
-replace_vuse_in (tree stmt, bool *replaced_addresses_p)
+static void
+ccp_finalize (void)
{
- bool replaced = false;
- vuse_optype vuses;
- use_operand_p vuse;
- value *val;
-
- if (replaced_addresses_p)
- *replaced_addresses_p = false;
-
- get_stmt_operands (stmt);
-
- vuses = STMT_VUSE_OPS (stmt);
-
- if (NUM_VUSES (vuses) != 1)
- return false;
-
- vuse = VUSE_OP_PTR (vuses, 0);
- val = get_value (USE_FROM_PTR (vuse));
-
- if (val->lattice_val == CONSTANT
- && TREE_CODE (stmt) == MODIFY_EXPR
- && DECL_P (TREE_OPERAND (stmt, 1))
- && TREE_OPERAND (stmt, 1) == SSA_NAME_VAR (USE_FROM_PTR (vuse)))
- {
- TREE_OPERAND (stmt, 1) = val->const_val;
- replaced = true;
- if (POINTER_TYPE_P (TREE_TYPE (USE_FROM_PTR (vuse)))
- && replaced_addresses_p)
- *replaced_addresses_p = true;
- }
+ /* Perform substitutions based on the known constant values. */
+ substitute_and_fold (const_val);
- return replaced;
+ free (const_val);
}
-/* Perform final substitution and folding. After this pass the program
- should still be in SSA form. */
+/* Compute the meet operator between *VAL1 and *VAL2. Store the result
+ in VAL1.
+
+ any M UNDEFINED = any
+ any M UNKNOWN_VAL = UNKNOWN_VAL
+ any M VARYING = VARYING
+ Ci M Cj = Ci if (i == j)
+ Ci M Cj = VARYING if (i != j)
+
+ Lattice values UNKNOWN_VAL and UNDEFINED are similar but have
+ different semantics at PHI nodes. Both values imply that we don't
+ know whether the variable is constant or not. However, UNKNOWN_VAL
+ values override all others. For instance, suppose that A is a
+ global variable:
+
+ +------+
+ | |
+ | / \
+ | / \
+ | | A_1 = 4
+ | \ /
+ | \ /
+ | A_3 = PHI (A_2, A_1)
+ | ... = A_3
+ | |
+ +----+
+
+ If the edge into A_2 is not executable, the first visit to A_3 will
+ yield the constant 4. But the second visit to A_3 will be with A_2
+ in state UNKNOWN_VAL. We can no longer conclude that A_3 is 4
+ because A_2 may have been set in another function. If we had used
+ the lattice value UNDEFINED, we would have had wrongly concluded
+ that A_3 is 4. */
+
static void
-substitute_and_fold (void)
+ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
{
- basic_block bb;
- unsigned int i;
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file,
- "\nSubstituing constants and folding statements\n\n");
-
- /* Substitute constants in every statement of every basic block. */
- FOR_EACH_BB (bb)
+ if (val1->lattice_val == UNDEFINED)
{
- block_stmt_iterator i;
- tree phi;
-
- /* Propagate our known constants into PHI nodes. */
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- int i;
-
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
- {
- value *new_val;
- use_operand_p orig_p = PHI_ARG_DEF_PTR (phi, i);
- tree orig = USE_FROM_PTR (orig_p);
-
- if (! SSA_VAR_P (orig))
- break;
-
- new_val = get_value (orig);
- if (new_val->lattice_val == CONSTANT
- && may_propagate_copy (orig, new_val->const_val))
- SET_USE (orig_p, new_val->const_val);
- }
- }
-
- for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
- {
- bool replaced_address;
- tree stmt = bsi_stmt (i);
-
- /* Skip statements that have been folded already. */
- if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
- continue;
-
- /* Replace the statement with its folded version and mark it
- folded. */
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
- print_generic_stmt (dump_file, stmt, TDF_SLIM);
- }
-
- if (replace_uses_in (stmt, &replaced_address)
- || replace_vuse_in (stmt, &replaced_address))
- {
- bool changed = fold_stmt (bsi_stmt_ptr (i));
- stmt = bsi_stmt(i);
-
- /* If we folded a builtin function, we'll likely
- need to rename VDEFs. */
- if (replaced_address || changed)
- mark_new_vars_to_rename (stmt, vars_to_rename);
-
- /* If we cleaned up EH information from the statement,
- remove EH edges. */
- if (maybe_clean_eh_stmt (stmt))
- tree_purge_dead_eh_edges (bb);
-
- update_stmt (stmt);
- }
-
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, " with ");
- print_generic_stmt (dump_file, stmt, TDF_SLIM);
- fprintf (dump_file, "\n");
- }
- }
+ /* UNDEFINED M any = any */
+ *val1 = *val2;
}
-
- /* And transfer what we learned from VALUE_VECTOR into the
- SSA_NAMEs themselves. This probably isn't terribly important
- since we probably constant propagated the values to their
- use sites above. */
- for (i = 0; i < num_ssa_names; i++)
+ else if (val2->lattice_val == UNDEFINED)
{
- tree name = ssa_name (i);
- value *value;
-
- if (!name)
- continue;
-
- value = get_value (name);
- if (value->lattice_val == CONSTANT
- && is_gimple_reg (name)
- && is_gimple_min_invariant (value->const_val))
- SSA_NAME_VALUE (name) = value->const_val;
+ /* any M UNDEFINED = any
+ Nothing to do. VAL1 already contains the value we want. */
+ ;
}
-}
-
-
-/* Free allocated storage. */
-
-static void
-ccp_finalize (void)
-{
- /* Perform substitutions based on the known constant values. */
- substitute_and_fold ();
-
- free (value_vector);
-}
-
-
-
-/* Compute the meet operator between VAL1 and VAL2:
-
- any M UNDEFINED = any
- any M VARYING = VARYING
- any M UNKNOWN_VAL = UNKNOWN_VAL
- Ci M Cj = Ci if (i == j)
- Ci M Cj = VARYING if (i != j) */
-static value
-ccp_lattice_meet (value val1, value val2)
-{
- value result;
-
- /* any M UNDEFINED = any. */
- if (val1.lattice_val == UNDEFINED)
- return val2;
- else if (val2.lattice_val == UNDEFINED)
- return val1;
-
- /* any M VARYING = VARYING. */
- if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
+ else if (val1->lattice_val == UNKNOWN_VAL
+ || val2->lattice_val == UNKNOWN_VAL)
{
- result.lattice_val = VARYING;
- result.const_val = NULL_TREE;
- return result;
- }
+ /* UNKNOWN_VAL values are invalid if we are not doing STORE-CCP. */
+ gcc_assert (do_store_ccp);
- /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
- if (val1.lattice_val == UNKNOWN_VAL
- || val2.lattice_val == UNKNOWN_VAL)
+ /* any M UNKNOWN_VAL = UNKNOWN_VAL. */
+ val1->lattice_val = UNKNOWN_VAL;
+ val1->value = NULL_TREE;
+ val1->mem_ref = NULL_TREE;
+ }
+ else if (val1->lattice_val == VARYING
+ || val2->lattice_val == VARYING)
{
- result.lattice_val = UNKNOWN_VAL;
- result.const_val = NULL_TREE;
- return result;
+ /* any M VARYING = VARYING. */
+ val1->lattice_val = VARYING;
+ val1->value = NULL_TREE;
+ val1->mem_ref = NULL_TREE;
}
-
- /* Ci M Cj = Ci if (i == j)
- Ci M Cj = VARYING if (i != j) */
- if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
+ else if (val1->lattice_val == CONSTANT
+ && val2->lattice_val == CONSTANT
+ && simple_cst_equal (val1->value, val2->value) == 1
+ && (!do_store_ccp
+ || simple_cst_equal (val1->mem_ref, val2->mem_ref) == 1))
{
- result.lattice_val = CONSTANT;
- result.const_val = val1.const_val;
+ /* Ci M Cj = Ci if (i == j)
+ Ci M Cj = VARYING if (i != j)
+
+ If these two values come from memory stores, make sure that
+ they come from the same memory reference. */
+ val1->lattice_val = CONSTANT;
+ val1->value = val1->value;
+ val1->mem_ref = val1->mem_ref;
}
else
{
- result.lattice_val = VARYING;
- result.const_val = NULL_TREE;
+ /* Any other combination is VARYING. */
+ val1->lattice_val = VARYING;
+ val1->value = NULL_TREE;
+ val1->mem_ref = NULL_TREE;
}
-
- return result;
}
/* Loop through the PHI_NODE's parameters for BLOCK and compare their
lattice values to determine PHI_NODE's lattice value. The value of a
- PHI node is determined calling ccp_lattice_meet() with all the arguments
+ PHI node is determined calling ccp_lattice_meet with all the arguments
of the PHI node that are incoming via executable edges. */
static enum ssa_prop_result
ccp_visit_phi_node (tree phi)
{
- value new_val, *old_val;
int i;
+ prop_value_t *old_val, new_val;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -693,11 +702,11 @@ ccp_visit_phi_node (tree phi)
print_generic_expr (dump_file, phi, dump_flags);
}
- old_val = get_value (PHI_RESULT (phi));
+ old_val = get_value (PHI_RESULT (phi), false);
switch (old_val->lattice_val)
{
case VARYING:
- return SSA_PROP_NOT_INTERESTING;
+ return SSA_PROP_VARYING;
case CONSTANT:
new_val = *old_val;
@@ -710,14 +719,15 @@ ccp_visit_phi_node (tree phi)
UNDEFINED. If the PHI node's old value was UNKNOWN_VAL and
the new value is UNDEFINED, then we prevent the invalid
transition by not calling set_lattice_value. */
- new_val.lattice_val = UNDEFINED;
- new_val.const_val = NULL_TREE;
- break;
+ gcc_assert (do_store_ccp);
+
+ /* FALLTHRU */
case UNDEFINED:
case UNINITIALIZED:
new_val.lattice_val = UNDEFINED;
- new_val.const_val = NULL_TREE;
+ new_val.value = NULL_TREE;
+ new_val.mem_ref = NULL_TREE;
break;
default:
@@ -726,7 +736,8 @@ ccp_visit_phi_node (tree phi)
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
{
- /* Compute the meet operator over all the PHI arguments. */
+ /* Compute the meet operator over all the PHI arguments flowing
+ through executable edges. */
edge e = PHI_ARG_EDGE (phi, i);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -741,25 +752,25 @@ ccp_visit_phi_node (tree phi)
the existing value of the PHI node and the current PHI argument. */
if (e->flags & EDGE_EXECUTABLE)
{
- tree rdef = PHI_ARG_DEF (phi, i);
- value *rdef_val, val;
+ tree arg = PHI_ARG_DEF (phi, i);
+ prop_value_t arg_val;
- if (is_gimple_min_invariant (rdef))
+ if (is_gimple_min_invariant (arg))
{
- val.lattice_val = CONSTANT;
- val.const_val = rdef;
- rdef_val = &val;
+ arg_val.lattice_val = CONSTANT;
+ arg_val.value = arg;
+ arg_val.mem_ref = NULL_TREE;
}
else
- rdef_val = get_value (rdef);
+ arg_val = *(get_value (arg, true));
- new_val = ccp_lattice_meet (new_val, *rdef_val);
+ ccp_lattice_meet (&new_val, &arg_val);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "\t");
- print_generic_expr (dump_file, rdef, dump_flags);
- dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
+ print_generic_expr (dump_file, arg, dump_flags);
+ dump_lattice_value (dump_file, "\tValue: ", arg_val);
fprintf (dump_file, "\n");
}
@@ -775,7 +786,8 @@ ccp_visit_phi_node (tree phi)
}
/* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
- if (old_val->lattice_val == UNKNOWN_VAL
+ if (do_store_ccp
+ && old_val->lattice_val == UNKNOWN_VAL
&& new_val.lattice_val == UNDEFINED)
return SSA_PROP_NOT_INTERESTING;
@@ -808,18 +820,23 @@ ccp_fold (tree stmt)
enum tree_code code = TREE_CODE (rhs);
enum tree_code_class kind = TREE_CODE_CLASS (code);
tree retval = NULL_TREE;
- vuse_optype vuses;
-
- vuses = STMT_VUSE_OPS (stmt);
- /* If the RHS is just a variable, then that variable must now have
- a constant value that we can return directly. */
if (TREE_CODE (rhs) == SSA_NAME)
- return get_value (rhs)->const_val;
- else if (DECL_P (rhs)
- && NUM_VUSES (vuses) == 1
- && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
- return get_value (VUSE_OP (vuses, 0))->const_val;
+ {
+ /* If the RHS is an SSA_NAME, return its known constant value,
+ if any. */
+ return get_value (rhs, true)->value;
+ }
+ else if (do_store_ccp && stmt_makes_single_load (stmt))
+ {
+ /* If the RHS is a memory load, see if the VUSEs associated with
+ it are a valid constant for that memory load. */
+ prop_value_t *val = get_value_loaded_by (stmt, const_val);
+ if (val && simple_cst_equal (val->mem_ref, rhs) == 1)
+ return val->value;
+ else
+ return NULL_TREE;
+ }
/* Unary operators. Note that we know the single operand must
be a constant. So this should almost always return a
@@ -832,9 +849,9 @@ ccp_fold (tree stmt)
/* Simplify the operand down to a constant. */
if (TREE_CODE (op0) == SSA_NAME)
{
- value *val = get_value (op0);
+ prop_value_t *val = get_value (op0, true);
if (val->lattice_val == CONSTANT)
- op0 = get_value (op0)->const_val;
+ op0 = get_value (op0, true)->value;
}
retval = fold_unary_to_constant (code, TREE_TYPE (rhs), op0);
@@ -876,16 +893,16 @@ ccp_fold (tree stmt)
/* Simplify the operands down to constants when appropriate. */
if (TREE_CODE (op0) == SSA_NAME)
{
- value *val = get_value (op0);
+ prop_value_t *val = get_value (op0, true);
if (val->lattice_val == CONSTANT)
- op0 = val->const_val;
+ op0 = val->value;
}
if (TREE_CODE (op1) == SSA_NAME)
{
- value *val = get_value (op1);
+ prop_value_t *val = get_value (op1, true);
if (val->lattice_val == CONSTANT)
- op1 = val->const_val;
+ op1 = val->value;
}
retval = fold_binary_to_constant (code, TREE_TYPE (rhs), op0, op1);
@@ -934,7 +951,7 @@ ccp_fold (tree stmt)
orig[i] = USE_OP (uses, i);
/* Substitute operands with their values and try to fold. */
- replace_uses_in (stmt, NULL);
+ replace_uses_in (stmt, NULL, const_val);
fndecl = get_callee_fndecl (rhs);
arglist = TREE_OPERAND (rhs, 1);
retval = fold_builtin (fndecl, arglist, false);
@@ -959,12 +976,14 @@ ccp_fold (tree stmt)
/* Evaluate statement STMT. */
-static value
+static prop_value_t
evaluate_stmt (tree stmt)
{
- value val;
+ prop_value_t val;
tree simplified;
- latticevalue likelyvalue = likely_value (stmt);
+ ccp_lattice_t likelyvalue = likely_value (stmt);
+
+ val.mem_ref = NULL_TREE;
/* If the statement is likely to have a CONSTANT result, then try
to fold the statement to determine the constant value. */
@@ -983,18 +1002,15 @@ evaluate_stmt (tree stmt)
{
/* The statement produced a constant value. */
val.lattice_val = CONSTANT;
- val.const_val = simplified;
+ val.value = simplified;
}
else
{
/* The statement produced a nonconstant value. If the statement
- had undefined or virtual operands, then the result of the
- statement should be undefined or virtual respectively.
- Else the result of the statement is VARYING. */
- val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
- val.lattice_val = (likelyvalue == UNKNOWN_VAL
- ? UNKNOWN_VAL : val.lattice_val);
- val.const_val = NULL_TREE;
+ had UNDEFINED operands, then the result of the statement
+ should be UNDEFINED. Otherwise, the statement is VARYING. */
+ val.lattice_val = (likelyvalue == UNDEFINED) ? UNDEFINED : VARYING;
+ val.value = NULL_TREE;
}
return val;
@@ -1002,48 +1018,38 @@ evaluate_stmt (tree stmt)
/* Visit the assignment statement STMT. Set the value of its LHS to the
- value computed by the RHS and store LHS in *OUTPUT_P. */
+ value computed by the RHS and store LHS in *OUTPUT_P. If STMT
+ creates virtual definitions, set the value of each new name to that
+ of the RHS (if we can derive a constant out of the RHS). */
static enum ssa_prop_result
visit_assignment (tree stmt, tree *output_p)
{
- value val;
+ prop_value_t val;
tree lhs, rhs;
- vuse_optype vuses;
- v_must_def_optype v_must_defs;
+ enum ssa_prop_result retval;
lhs = TREE_OPERAND (stmt, 0);
rhs = TREE_OPERAND (stmt, 1);
- vuses = STMT_VUSE_OPS (stmt);
- v_must_defs = STMT_V_MUST_DEF_OPS (stmt);
-
- gcc_assert (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0);
- gcc_assert (NUM_V_MUST_DEFS (v_must_defs) == 1
- || TREE_CODE (lhs) == SSA_NAME);
-
- /* We require the SSA version number of the lhs for the value_vector.
- Make sure we have it. */
- if (TREE_CODE (lhs) != SSA_NAME)
- {
- /* If we make it here, then stmt only has one definition:
- a V_MUST_DEF. */
- lhs = V_MUST_DEF_RESULT (v_must_defs, 0);
- }
if (TREE_CODE (rhs) == SSA_NAME)
{
/* For a simple copy operation, we copy the lattice values. */
- value *nval = get_value (rhs);
+ prop_value_t *nval = get_value (rhs, true);
val = *nval;
}
- else if (DECL_P (rhs)
- && NUM_VUSES (vuses) == 1
- && rhs == SSA_NAME_VAR (VUSE_OP (vuses, 0)))
+ else if (do_store_ccp && stmt_makes_single_load (stmt))
{
- /* Same as above, but the rhs is not a gimple register and yet
- has a known VUSE. */
- value *nval = get_value (VUSE_OP (vuses, 0));
- val = *nval;
+ /* Same as above, but the RHS is not a gimple register and yet
+ has a known VUSE. If STMT is loading from the same memory
+ location that created the SSA_NAMEs for the virtual operands,
+ we can propagate the value on the RHS. */
+ prop_value_t *nval = get_value_loaded_by (stmt, const_val);
+
+ if (nval && simple_cst_equal (nval->mem_ref, rhs) == 1)
+ val = *nval;
+ else
+ val = evaluate_stmt (stmt);
}
else
/* Evaluate the statement. */
@@ -1063,15 +1069,15 @@ visit_assignment (tree stmt, tree *output_p)
{
tree w = fold (build1 (VIEW_CONVERT_EXPR,
TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
- val.const_val));
+ val.value));
orig_lhs = TREE_OPERAND (orig_lhs, 1);
if (w && is_gimple_min_invariant (w))
- val.const_val = w;
+ val.value = w;
else
{
val.lattice_val = VARYING;
- val.const_val = NULL;
+ val.value = NULL;
}
}
@@ -1079,36 +1085,73 @@ visit_assignment (tree stmt, tree *output_p)
&& TREE_CODE (orig_lhs) == COMPONENT_REF
&& DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
{
- tree w = widen_bitfield (val.const_val, TREE_OPERAND (orig_lhs, 1),
+ tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
orig_lhs);
if (w && is_gimple_min_invariant (w))
- val.const_val = w;
+ val.value = w;
else
{
val.lattice_val = VARYING;
- val.const_val = NULL;
+ val.value = NULL_TREE;
+ val.mem_ref = NULL_TREE;
}
}
}
- /* If LHS is not a gimple register, then it cannot take on an
- UNDEFINED value. */
- if (!is_gimple_reg (SSA_NAME_VAR (lhs))
- && val.lattice_val == UNDEFINED)
- val.lattice_val = UNKNOWN_VAL;
+ retval = SSA_PROP_NOT_INTERESTING;
/* Set the lattice value of the statement's output. */
- if (set_lattice_value (lhs, val))
+ if (TREE_CODE (lhs) == SSA_NAME)
{
- *output_p = lhs;
- if (val.lattice_val == VARYING)
- return SSA_PROP_VARYING;
- else
- return SSA_PROP_INTERESTING;
+ /* If STMT is an assignment to an SSA_NAME, we only have one
+ value to set. */
+ if (set_lattice_value (lhs, val))
+ {
+ *output_p = lhs;
+ if (val.lattice_val == VARYING)
+ retval = SSA_PROP_VARYING;
+ else
+ retval = SSA_PROP_INTERESTING;
+ }
}
- else
- return SSA_PROP_NOT_INTERESTING;
+ else if (do_store_ccp && stmt_makes_single_store (stmt))
+ {
+ /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
+ to the new constant value and mark the LHS as the memory
+ reference associated with VAL. */
+ ssa_op_iter i;
+ tree vdef;
+ bool changed;
+
+ /* Stores cannot take on an UNDEFINED value. */
+ if (val.lattice_val == UNDEFINED)
+ val.lattice_val = UNKNOWN_VAL;
+
+ /* Mark VAL as stored in the LHS of this assignment. */
+ val.mem_ref = lhs;
+
+ /* Set the value of every VDEF to VAL. */
+ changed = false;
+ FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ changed |= set_lattice_value (vdef, val);
+
+ /* Note that for propagation purposes, we are only interested in
+ visiting statements that load the exact same memory reference
+ stored here. Those statements will have the exact same list
+ of virtual uses, so it is enough to set the output of this
+ statement to be its first virtual definition. */
+ *output_p = first_vdef (stmt);
+ if (changed)
+ {
+ if (val.lattice_val == VARYING)
+ retval = SSA_PROP_VARYING;
+ else
+ retval = SSA_PROP_INTERESTING;
+ }
+ }
+
+ return retval;
}
@@ -1119,7 +1162,7 @@ visit_assignment (tree stmt, tree *output_p)
static enum ssa_prop_result
visit_cond_stmt (tree stmt, edge *taken_edge_p)
{
- value val;
+ prop_value_t val;
basic_block block;
block = bb_for_stmt (stmt);
@@ -1129,7 +1172,7 @@ visit_cond_stmt (tree stmt, edge *taken_edge_p)
to the worklist. If no single edge can be determined statically,
return SSA_PROP_VARYING to feed all the outgoing edges to the
propagation engine. */
- *taken_edge_p = val.const_val ? find_taken_edge (block, val.const_val) : 0;
+ *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
if (*taken_edge_p)
return SSA_PROP_INTERESTING;
else
@@ -1157,8 +1200,8 @@ ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- fprintf (dump_file, "\nVisiting statement: ");
- print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ fprintf (dump_file, "\nVisiting statement:\n");
+ print_generic_stmt (dump_file, stmt, dump_flags);
fprintf (dump_file, "\n");
}
@@ -1166,10 +1209,7 @@ ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
v_must_defs = V_MUST_DEF_OPS (ann);
v_may_defs = V_MAY_DEF_OPS (ann);
- if (TREE_CODE (stmt) == MODIFY_EXPR
- && NUM_V_MAY_DEFS (v_may_defs) == 0
- && (NUM_V_MUST_DEFS (v_must_defs) == 1
- || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
{
/* If the statement is an assignment that produces a single
output value, evaluate its RHS to see if the lattice value of
@@ -1191,30 +1231,35 @@ ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
/* Definitions made by statements other than assignments to
SSA_NAMEs represent unknown modifications to their outputs.
Mark them VARYING. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
- def_to_varying (def);
-
- /* Mark all V_MAY_DEF operands VARYING. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
- def_to_varying (def);
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
+ {
+ prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
+ set_lattice_value (def, v);
+ }
return SSA_PROP_VARYING;
}
-/* Main entry point for SSA Conditional Constant Propagation.
-
- [ DESCRIBE MAIN ALGORITHM HERE ] */
+/* Main entry point for SSA Conditional Constant Propagation. */
static void
-execute_ssa_ccp (void)
+execute_ssa_ccp (bool store_ccp)
{
+ do_store_ccp = store_ccp;
ccp_initialize ();
ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
ccp_finalize ();
}
+static void
+do_ssa_ccp (void)
+{
+ execute_ssa_ccp (false);
+}
+
+
static bool
gate_ccp (void)
{
@@ -1226,7 +1271,7 @@ struct tree_opt_pass pass_ccp =
{
"ccp", /* name */
gate_ccp, /* gate */
- execute_ssa_ccp, /* execute */
+ do_ssa_ccp, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
@@ -1235,13 +1280,50 @@ struct tree_opt_pass pass_ccp =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_cleanup_cfg | TODO_dump_func | TODO_rename_vars
+ TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa
| TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
+static void
+do_ssa_store_ccp (void)
+{
+ /* If STORE-CCP is not enabled, we just run regular CCP. */
+ execute_ssa_ccp (flag_tree_store_ccp != 0);
+}
+
+static bool
+gate_store_ccp (void)
+{
+ /* STORE-CCP is enabled only with -ftree-store-ccp, but when
+ -fno-tree-store-ccp is specified, we should run regular CCP.
+ That's why the pass is enabled with either flag. */
+ return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
+}
+
+
+struct tree_opt_pass pass_store_ccp =
+{
+ "store_ccp", /* name */
+ gate_store_ccp, /* gate */
+ do_ssa_store_ccp, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_STORE_CCP, /* tv_id */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func | TODO_update_ssa
+ | TODO_ggc_collect | TODO_verify_ssa
+ | TODO_cleanup_cfg
+ | TODO_verify_stmts, /* todo_flags_finish */
+ 0 /* letter */
+};
+
/* Given a constant value VAL for bitfield FIELD, and a destination
variable VAR, return VAL appropriately widened to fit into VAR. If
FIELD is wider than HOST_WIDE_INT, NULL is returned. */
@@ -2132,7 +2214,7 @@ convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
{
find_new_referenced_vars (tsi_stmt_ptr (ti));
- mark_new_vars_to_rename (tsi_stmt (ti), vars_to_rename);
+ mark_new_vars_to_rename (tsi_stmt (ti));
}
if (EXPR_HAS_LOCATION (stmt))
@@ -2233,6 +2315,6 @@ struct tree_opt_pass pass_fold_builtins =
0, /* todo_flags_start */
TODO_dump_func
| TODO_verify_ssa
- | TODO_rename_vars, /* todo_flags_finish */
+ | TODO_update_ssa, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-copy.c b/gcc/tree-ssa-copy.c
index 83500471739..baca99c061c 100644
--- a/gcc/tree-ssa-copy.c
+++ b/gcc/tree-ssa-copy.c
@@ -1,4 +1,4 @@
-/* Const/copy propagation and SSA_NAME replacement support routines.
+/* Copy propagation and SSA_NAME replacement support routines.
Copyright (C) 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
@@ -37,11 +37,13 @@ Boston, MA 02111-1307, USA. */
#include "tree-dump.h"
#include "tree-flow.h"
#include "tree-pass.h"
+#include "tree-ssa-propagate.h"
#include "langhooks.h"
-/* This file provides a handful of interfaces for performing const/copy
- propagation and simple expression replacement which keep variable
- annotations up-to-date.
+/* This file implements the copy propagation pass and provides a
+ handful of interfaces for performing const/copy propagation and
+ simple expression replacement which keep variable annotations
+ up-to-date.
We require that for any copy operation where the RHS and LHS have
a non-null memory tag the memory tag be the same. It is OK
@@ -54,7 +56,6 @@ Boston, MA 02111-1307, USA. */
replacements of one SSA_NAME with a different SSA_NAME to use the
APIs defined in this file. */
-
/* Return true if we may propagate ORIG into DEST, false otherwise. */
bool
@@ -103,8 +104,10 @@ may_propagate_copy (tree dest, tree orig)
I think that GIMPLE should emit the appropriate type-casts. For the
time being, blocking copy-propagation in these cases is the safe thing
to do. */
- if (TREE_CODE (dest) == SSA_NAME && TREE_CODE (orig) == SSA_NAME
- && POINTER_TYPE_P (type_d) && POINTER_TYPE_P (type_o))
+ if (TREE_CODE (dest) == SSA_NAME
+ && TREE_CODE (orig) == SSA_NAME
+ && POINTER_TYPE_P (type_d)
+ && POINTER_TYPE_P (type_o))
{
tree mt_dest = var_ann (SSA_NAME_VAR (dest))->type_mem_tag;
tree mt_orig = var_ann (SSA_NAME_VAR (orig))->type_mem_tag;
@@ -123,17 +126,9 @@ may_propagate_copy (tree dest, tree orig)
{
/* If both operands are SSA_NAMEs referring to virtual operands, then
we can always propagate. */
- if (TREE_CODE (orig) == SSA_NAME)
- {
- if (!is_gimple_reg (orig))
- return true;
-
-#ifdef ENABLE_CHECKING
- /* If we have one real and one virtual operand, then something has
- gone terribly wrong. */
- gcc_assert (!is_gimple_reg (orig));
-#endif
- }
+ if (TREE_CODE (orig) == SSA_NAME
+ && !is_gimple_reg (orig))
+ return true;
/* We have a "copy" from something like a constant into a virtual
operand. Reject these. */
@@ -202,30 +197,33 @@ merge_alias_info (tree orig, tree new)
else
gcc_assert (new_ann->type_mem_tag == orig_ann->type_mem_tag);
-#if defined ENABLE_CHECKING
- {
- struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
- struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new);
-
- if (orig_ptr_info
- && new_ptr_info
- && orig_ptr_info->name_mem_tag
- && new_ptr_info->name_mem_tag
- && orig_ptr_info->pt_vars
- && new_ptr_info->pt_vars)
+ /* Synchronize the name tags. If NEW did not have a name tag, get
+ it from ORIG. This happens when NEW is a compiler generated
+ temporary which still hasn't had its points-to information filled
+ in. */
+ if (SSA_NAME_PTR_INFO (orig))
{
- /* Note that pointer NEW may actually have a different set of
- pointed-to variables. However, since NEW is being
- copy-propagated into ORIG, it must always be true that the
- pointed-to set for pointer NEW is the same, or a subset, of
- the pointed-to set for pointer ORIG. If this isn't the case,
- we shouldn't have been able to do the propagation of NEW into
- ORIG. */
- gcc_assert (bitmap_intersect_p (new_ptr_info->pt_vars,
- orig_ptr_info->pt_vars));
+ struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
+ struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new);
+
+ if (new_ptr_info == NULL)
+ duplicate_ssa_name_ptr_info (new, orig_ptr_info);
+ else if (orig_ptr_info->name_mem_tag
+ && new_ptr_info->name_mem_tag
+ && orig_ptr_info->pt_vars
+ && new_ptr_info->pt_vars)
+ {
+ /* Note that pointer NEW may actually have a different set
+ of pointed-to variables. However, since NEW is being
+ copy-propagated into ORIG, it must always be true that
+ the pointed-to set for pointer NEW is the same, or a
+ subset, of the pointed-to set for pointer ORIG. If this
+ isn't the case, we shouldn't have been able to do the
+ propagation of NEW into ORIG. */
+ gcc_assert (bitmap_intersect_p (new_ptr_info->pt_vars,
+ orig_ptr_info->pt_vars));
+ }
}
- }
-#endif
}
@@ -310,3 +308,776 @@ replace_exp (use_operand_p op_p, tree val)
{
replace_exp_1 (op_p, val, false);
}
+
+
+/*---------------------------------------------------------------------------
+ Copy propagation
+---------------------------------------------------------------------------*/
+/* During propagation, we keep chains of variables that are copies of
+ one another. If variable X_i is a copy of X_j and X_j is a copy of
+ X_k, COPY_OF will contain:
+
+ COPY_OF[i].VALUE = X_j
+ COPY_OF[j].VALUE = X_k
+ COPY_OF[k].VALUE = X_k
+
+ After propagation, the copy-of value for each variable X_i is
+ converted into the final value by walking the copy-of chains and
+ updating COPY_OF[i].VALUE to be the last element of the chain. */
+static prop_value_t *copy_of;
+
+/* Used in set_copy_of_val to determine if the last link of a copy-of
+ chain has changed. */
+static tree *cached_last_copy_of;
+
+/* True if we are doing copy propagation on loads and stores. */
+static bool do_store_copy_prop;
+
+
+/* Return true if this statement may generate a useful copy. */
+
+static bool
+stmt_may_generate_copy (tree stmt)
+{
+ tree lhs, rhs;
+ stmt_ann_t ann;
+
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (stmt));
+
+ if (TREE_CODE (stmt) != MODIFY_EXPR)
+ return false;
+
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+ ann = stmt_ann (stmt);
+
+ /* If the statement has volatile operands, it won't generate a
+ useful copy. */
+ if (ann->has_volatile_ops)
+ return false;
+
+ /* If we are not doing store copy-prop, statements with loads and/or
+ stores will never generate a useful copy. */
+ if (!do_store_copy_prop
+ && (NUM_VUSES (VUSE_OPS (ann)) > 0
+ || NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) > 0
+ || NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) > 0))
+ return false;
+
+ /* Otherwise, the only statements that generate useful copies are
+ assignments whose RHS is just an SSA name that doesn't flow
+ through abnormal edges. */
+ return TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs);
+}
+
+
+/* Return the copy-of value for VAR. */
+
+static inline prop_value_t *
+get_copy_of_val (tree var)
+{
+ prop_value_t *val = &copy_of[SSA_NAME_VERSION (var)];
+
+ if (val->value == NULL_TREE
+ && !stmt_may_generate_copy (SSA_NAME_DEF_STMT (var)))
+ {
+ /* If the variable will never generate a useful copy relation,
+ make it its own copy. */
+ val->value = var;
+ val->mem_ref = NULL_TREE;
+ }
+
+ return val;
+}
+
+
+/* Return last link in the copy-of chain for VAR. */
+
+static tree
+get_last_copy_of (tree var)
+{
+ tree last;
+ int i;
+
+ /* Traverse COPY_OF starting at VAR until we get to the last
+ link in the chain. Since it is possible to have cycles in PHI
+ nodes, the copy-of chain may also contain cycles.
+
+ To avoid infinite loops and to avoid traversing lengthy copy-of
+ chains, we artificially limit the maximum number of chains we are
+ willing to traverse.
+
+ The value 5 was taken from a compiler and runtime library
+ bootstrap and a mixture of C and C++ code from various sources.
+ More than 82% of all copy-of chains were shorter than 5 links. */
+#define LIMIT 5
+
+ last = var;
+ for (i = 0; i < LIMIT; i++)
+ {
+ tree copy = copy_of[SSA_NAME_VERSION (last)].value;
+ if (copy == NULL_TREE || copy == last)
+ break;
+ last = copy;
+ }
+
+ /* If we have reached the limit, then we are either in a copy-of
+ cycle or the copy-of chain is too long. In this case, just
+ return VAR so that it is not considered a copy of anything. */
+ return (i < LIMIT ? last : var);
+}
+
+
+/* Set FIRST to be the first variable in the copy-of chain for DEST.
+ If DEST's copy-of value or its copy-of chain have changed, return
+ true.
+
+ MEM_REF is the memory reference where FIRST is stored. This is
+ used when DEST is a non-register and we are copy propagating loads
+ and stores. */
+
+static inline bool
+set_copy_of_val (tree dest, tree first, tree mem_ref)
+{
+ unsigned int dest_ver = SSA_NAME_VERSION (dest);
+ tree old_first, old_last, new_last;
+
+ /* Set FIRST to be the first link in COPY_OF[DEST]. If that
+ changed, return true. */
+ old_first = copy_of[dest_ver].value;
+ copy_of[dest_ver].value = first;
+ copy_of[dest_ver].mem_ref = mem_ref;
+
+ if (old_first != first)
+ return true;
+
+ /* If FIRST and OLD_FIRST are the same, we need to check whether the
+ copy-of chain starting at FIRST ends in a different variable. If
+ the copy-of chain starting at FIRST ends up in a different
+ variable than the last cached value we had for DEST, then return
+ true because DEST is now a copy of a different variable.
+
+ This test is necessary because even though the first link in the
+ copy-of chain may not have changed, if any of the variables in
+ the copy-of chain changed its final value, DEST will now be the
+ copy of a different variable, so we have to do another round of
+ propagation for everything that depends on DEST. */
+ old_last = cached_last_copy_of[dest_ver];
+ new_last = get_last_copy_of (dest);
+ cached_last_copy_of[dest_ver] = new_last;
+
+ return (old_last != new_last);
+}
+
+
+/* Dump the copy-of value for variable VAR to DUMP_FILE. */
+
+static void
+dump_copy_of (FILE *dump_file, tree var)
+{
+ tree val;
+
+ print_generic_expr (dump_file, var, dump_flags);
+
+ if (TREE_CODE (var) != SSA_NAME)
+ return;
+
+ fprintf (dump_file, " copy-of chain: ");
+
+ val = var;
+ print_generic_expr (dump_file, val, 0);
+ fprintf (dump_file, " ");
+ while (copy_of[SSA_NAME_VERSION (val)].value
+ && copy_of[SSA_NAME_VERSION (val)].value != val)
+ {
+ fprintf (dump_file, "-> ");
+ val = copy_of[SSA_NAME_VERSION (val)].value;
+ print_generic_expr (dump_file, val, 0);
+ fprintf (dump_file, " ");
+ }
+
+ val = get_copy_of_val (var)->value;
+ if (val == NULL_TREE)
+ fprintf (dump_file, "[UNDEFINED]");
+ else if (val != var)
+ fprintf (dump_file, "[COPY]");
+ else
+ fprintf (dump_file, "[NOT A COPY]");
+}
+
+
+/* Evaluate the RHS of STMT. If it produces a valid copy, set the LHS
+ value and store the LHS into *RESULT_P. If STMT generates more
+ than one name (i.e., STMT is an aliased store), it is enough to
+ store the first name in the V_MAY_DEF list into *RESULT_P. After
+ all, the names generated will be VUSEd in the same statements. */
+
+static enum ssa_prop_result
+copy_prop_visit_assignment (tree stmt, tree *result_p)
+{
+ tree lhs, rhs;
+ prop_value_t *rhs_val;
+
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+
+ gcc_assert (TREE_CODE (rhs) == SSA_NAME);
+
+ rhs_val = get_copy_of_val (rhs);
+
+ if (TREE_CODE (lhs) == SSA_NAME)
+ {
+ /* Straight copy between two SSA names. First, make sure that
+ we can propagate the RHS into uses of LHS. */
+ if (!may_propagate_copy (lhs, rhs))
+ return SSA_PROP_VARYING;
+
+ /* Avoid copy propagation from an inner into an outer loop.
+ Otherwise, this may move loop variant variables outside of
+ their loops and prevent coalescing opportunities. If the
+ value was loop invariant, it will be hoisted by LICM and
+ exposed for copy propagation. */
+ if (loop_depth_of_name (rhs) > loop_depth_of_name (lhs))
+ return SSA_PROP_VARYING;
+
+ /* Notice that in the case of assignments, we make the LHS be a
+ copy of RHS's value, not of RHS itself. This avoids keeping
+ unnecessary copy-of chains (assignments cannot be in a cycle
+ like PHI nodes), speeding up the propagation process.
+ This is different from what we do in copy_prop_visit_phi_node.
+ In those cases, we are interested in the copy-of chains. */
+ *result_p = lhs;
+ if (set_copy_of_val (*result_p, rhs_val->value, rhs_val->mem_ref))
+ return SSA_PROP_INTERESTING;
+ else
+ return SSA_PROP_NOT_INTERESTING;
+ }
+ else if (stmt_makes_single_store (stmt))
+ {
+ /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
+ to be a copy of RHS. */
+ ssa_op_iter i;
+ tree vdef;
+ bool changed;
+
+ /* This should only be executed when doing store copy-prop. */
+ gcc_assert (do_store_copy_prop);
+
+ /* Set the value of every VDEF to RHS_VAL. */
+ changed = false;
+ FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ changed |= set_copy_of_val (vdef, rhs_val->value, lhs);
+
+ /* Note that for propagation purposes, we are only interested in
+ visiting statements that load the exact same memory reference
+ stored here. Those statements will have the exact same list
+ of virtual uses, so it is enough to set the output of this
+ statement to be its first virtual definition. */
+ *result_p = first_vdef (stmt);
+
+ if (changed)
+ return SSA_PROP_INTERESTING;
+ else
+ return SSA_PROP_NOT_INTERESTING;
+ }
+
+
+ return SSA_PROP_VARYING;
+}
+
+
+/* Visit the COND_EXPR STMT. Return SSA_PROP_INTERESTING
+ if it can determine which edge will be taken. Otherwise, return
+ SSA_PROP_VARYING. */
+
+static enum ssa_prop_result
+copy_prop_visit_cond_stmt (tree stmt, edge *taken_edge_p)
+{
+ enum ssa_prop_result retval;
+ tree cond;
+ use_optype uses;
+
+ cond = COND_EXPR_COND (stmt);
+ uses = STMT_USE_OPS (stmt);
+ retval = SSA_PROP_VARYING;
+
+ /* The only conditionals that we may be able to compute statically
+ are predicates involving at least one SSA_NAME. */
+ if (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
+ && NUM_USES (uses) >= 1)
+ {
+ unsigned i;
+ tree *orig;
+
+ /* Save the original operands. */
+ orig = xmalloc (sizeof (tree) * NUM_USES (uses));
+ for (i = 0; i < NUM_USES (uses); i++)
+ {
+ orig[i] = USE_OP (uses, i);
+ SET_USE_OP (uses, i, get_last_copy_of (USE_OP (uses, i)));
+ }
+
+ /* See if we can determine the predicate's value. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Trying to determine truth value of ");
+ fprintf (dump_file, "predicate ");
+ print_generic_stmt (dump_file, cond, 0);
+ }
+
+ *taken_edge_p = find_taken_edge (bb_for_stmt (stmt), cond);
+ if (*taken_edge_p)
+ retval = SSA_PROP_INTERESTING;
+
+ /* Restore the original operands. */
+ for (i = 0; i < NUM_USES (uses); i++)
+ SET_USE_OP (uses, i, orig[i]);
+ free (orig);
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS) && *taken_edge_p)
+ fprintf (dump_file, "\nConditional will always take edge %d->%d\n",
+ (*taken_edge_p)->src->index, (*taken_edge_p)->dest->index);
+
+ return retval;
+}
+
+
+/* Evaluate statement STMT. If the statement produces a new output
+ value, return SSA_PROP_INTERESTING and store the SSA_NAME holding
+ the new value in *RESULT_P.
+
+ If STMT is a conditional branch and we can determine its truth
+ value, set *TAKEN_EDGE_P accordingly.
+
+ If the new value produced by STMT is varying, return
+ SSA_PROP_VARYING. */
+
+static enum ssa_prop_result
+copy_prop_visit_stmt (tree stmt, edge *taken_edge_p, tree *result_p)
+{
+ stmt_ann_t ann;
+ enum ssa_prop_result retval;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nVisiting statement:\n");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ fprintf (dump_file, "\n");
+ }
+
+ ann = stmt_ann (stmt);
+
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
+ && (do_store_copy_prop
+ || TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME))
+ {
+ /* If the statement is a copy assignment, evaluate its RHS to
+ see if the lattice value of its output has changed. */
+ retval = copy_prop_visit_assignment (stmt, result_p);
+ }
+ else if (TREE_CODE (stmt) == COND_EXPR)
+ {
+ /* See if we can determine which edge goes out of a conditional
+ jump. */
+ retval = copy_prop_visit_cond_stmt (stmt, taken_edge_p);
+ }
+ else
+ retval = SSA_PROP_VARYING;
+
+ if (retval == SSA_PROP_VARYING)
+ {
+ tree def;
+ ssa_op_iter i;
+
+ /* Any other kind of statement is not interesting for constant
+ propagation and, therefore, not worth simulating. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file, "No interesting values produced.\n");
+
+ /* The assignment is not a copy operation. Don't visit this
+ statement again and mark all the definitions in the statement
+ to be copies of nothing. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_ALL_DEFS)
+ set_copy_of_val (def, def, NULL_TREE);
+ }
+
+ return retval;
+}
+
+
+/* Visit PHI node PHI. If all the arguments produce the same value,
+ set it to be the value of the LHS of PHI. */
+
+static enum ssa_prop_result
+copy_prop_visit_phi_node (tree phi)
+{
+ enum ssa_prop_result retval;
+ int i;
+ tree lhs;
+ prop_value_t phi_val = { 0, NULL_TREE, NULL_TREE };
+
+ lhs = PHI_RESULT (phi);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nVisiting PHI node: ");
+ print_generic_expr (dump_file, phi, dump_flags);
+ fprintf (dump_file, "\n\n");
+ }
+
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ {
+ prop_value_t *arg_val;
+ tree arg = PHI_ARG_DEF (phi, i);
+ edge e = PHI_ARG_EDGE (phi, i);
+
+ /* We don't care about values flowing through non-executable
+ edges. */
+ if (!(e->flags & EDGE_EXECUTABLE))
+ continue;
+
+ /* Constants in the argument list never generate a useful copy.
+ Similarly, names that flow through abnormal edges cannot be
+ used to derive copies. */
+ if (TREE_CODE (arg) != SSA_NAME || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg))
+ {
+ phi_val.value = lhs;
+ break;
+ }
+
+ /* Avoid copy propagation from an inner into an outer loop.
+ Otherwise, this may move loop variant variables outside of
+ their loops and prevent coalescing opportunities. If the
+ value was loop invariant, it will be hoisted by LICM and
+ exposed for copy propagation. */
+ if (loop_depth_of_name (arg) > loop_depth_of_name (lhs))
+ {
+ phi_val.value = lhs;
+ break;
+ }
+
+ /* If the LHS appears in the argument list, ignore it. It is
+ irrelevant as a copy. */
+ if (arg == lhs || get_last_copy_of (arg) == lhs)
+ continue;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\tArgument #%d: ", i);
+ dump_copy_of (dump_file, arg);
+ fprintf (dump_file, "\n");
+ }
+
+ arg_val = get_copy_of_val (arg);
+
+ /* If the LHS didn't have a value yet, make it a copy of the
+ first argument we find. Notice that while we make the LHS be
+ a copy of the argument itself, we take the memory reference
+ from the argument's value so that we can compare it to the
+ memory reference of all the other arguments. */
+ if (phi_val.value == NULL_TREE)
+ {
+ phi_val.value = arg;
+ phi_val.mem_ref = arg_val->mem_ref;
+ continue;
+ }
+
+ /* If PHI_VAL and ARG don't have a common copy-of chain, then
+ this PHI node cannot be a copy operation. Also, if we are
+ copy propagating stores and these two arguments came from
+ different memory references, they cannot be considered
+ copies. */
+ if (get_last_copy_of (phi_val.value) != get_last_copy_of (arg)
+ || (do_store_copy_prop
+ && phi_val.mem_ref
+ && arg_val->mem_ref
+ && simple_cst_equal (phi_val.mem_ref, arg_val->mem_ref) != 1))
+ {
+ phi_val.value = lhs;
+ break;
+ }
+ }
+
+ if (phi_val.value && set_copy_of_val (lhs, phi_val.value, phi_val.mem_ref))
+ retval = (phi_val.value != lhs) ? SSA_PROP_INTERESTING : SSA_PROP_VARYING;
+ else
+ retval = SSA_PROP_NOT_INTERESTING;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nPHI node ");
+ dump_copy_of (dump_file, lhs);
+ fprintf (dump_file, "\nTelling the propagator to ");
+ if (retval == SSA_PROP_INTERESTING)
+ fprintf (dump_file, "add SSA edges out of this PHI and continue.");
+ else if (retval == SSA_PROP_VARYING)
+ fprintf (dump_file, "add SSA edges out of this PHI and never visit again.");
+ else
+ fprintf (dump_file, "do nothing with SSA edges and keep iterating.");
+ fprintf (dump_file, "\n\n");
+ }
+
+ return retval;
+}
+
+
+/* Initialize structures used for copy propagation. */
+
+static void
+init_copy_prop (void)
+{
+ basic_block bb;
+
+ copy_of = xmalloc (num_ssa_names * sizeof (*copy_of));
+ memset (copy_of, 0, num_ssa_names * sizeof (*copy_of));
+
+ cached_last_copy_of = xmalloc (num_ssa_names * sizeof (*cached_last_copy_of));
+ memset (cached_last_copy_of, 0, num_ssa_names * sizeof (*cached_last_copy_of));
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator si;
+ tree phi;
+
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt = bsi_stmt (si);
+
+ /* The only statements that we care about are those that may
+ generate useful copies. We also need to mark conditional
+ jumps so that their outgoing edges are added to the work
+ lists of the propagator. */
+ if (stmt_ends_bb_p (stmt))
+ DONT_SIMULATE_AGAIN (stmt) = false;
+ else if (stmt_may_generate_copy (stmt))
+ DONT_SIMULATE_AGAIN (stmt) = false;
+ else
+ {
+ tree def;
+ ssa_op_iter iter;
+
+ /* No need to simulate this statement anymore. */
+ DONT_SIMULATE_AGAIN (stmt) = true;
+
+ /* Mark all the outputs of this statement as not being
+ the copy of anything. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
+ set_copy_of_val (def, def, NULL_TREE);
+ }
+ }
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ DONT_SIMULATE_AGAIN (phi) = false;
+ }
+}
+
+
+/* Deallocate memory used in copy propagation and do final
+ substitution. */
+
+static void
+fini_copy_prop (void)
+{
+ size_t i;
+
+ /* Set the final copy-of value for each variable by traversing the
+ copy-of chains. */
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree var = ssa_name (i);
+ if (var && copy_of[i].value && copy_of[i].value != var)
+ copy_of[i].value = get_last_copy_of (var);
+ }
+
+ substitute_and_fold (copy_of);
+
+ free (copy_of);
+}
+
+
+/* Main entry point to the copy propagator. The algorithm propagates
+ the value COPY-OF using ssa_propagate. For every variable X_i,
+ COPY-OF(X_i) indicates which variable is X_i created from. The
+ following example shows how the algorithm proceeds at a high level:
+
+ 1 a_24 = x_1
+ 2 a_2 = PHI <a_24, x_1>
+ 3 a_5 = PHI <a_2>
+ 4 x_1 = PHI <x_298, a_5, a_2>
+
+ The end result should be that a_2, a_5, a_24 and x_1 are a copy of
+ x_298. Propagation proceeds as follows.
+
+ Visit #1: a_24 is copy-of x_1. Value changed.
+ Visit #2: a_2 is copy-of x_1. Value changed.
+ Visit #3: a_5 is copy-of x_1. Value changed.
+ Visit #4: x_1 is copy-of x_298. Value changed.
+ Visit #1: a_24 is copy-of x_298. Value changed.
+ Visit #2: a_2 is copy-of x_298. Value changed.
+ Visit #3: a_5 is copy-of x_298. Value changed.
+ Visit #4: x_1 is copy-of x_298. Stable state reached.
+
+ When visiting PHI nodes, we only consider arguments that flow
+ through edges marked executable by the propagation engine. So,
+ when visiting statement #2 for the first time, we will only look at
+ the first argument (a_24) and optimistically assume that its value
+ is the copy of a_24 (x_1).
+
+ The problem with this approach is that it may fail to discover copy
+ relations in PHI cycles. Instead of propagating copy-of
+ values, we actually propagate copy-of chains. For instance:
+
+ A_3 = B_1;
+ C_9 = A_3;
+ D_4 = C_9;
+ X_i = D_4;
+
+ In this code fragment, COPY-OF (X_i) = { D_4, C_9, A_3, B_1 }.
+ Obviously, we are only really interested in the last value of the
+ chain, however the propagator needs to access the copy-of chain
+ when visiting PHI nodes.
+
+ To represent the copy-of chain, we use the array COPY_CHAINS, which
+ holds the first link in the copy-of chain for every variable.
+ If variable X_i is a copy of X_j, which in turn is a copy of X_k,
+ the array will contain:
+
+ COPY_CHAINS[i] = X_j
+ COPY_CHAINS[j] = X_k
+ COPY_CHAINS[k] = X_k
+
+ Keeping copy-of chains instead of copy-of values directly becomes
+ important when visiting PHI nodes. Suppose that we had the
+ following PHI cycle, such that x_52 is already considered a copy of
+ x_53:
+
+ 1 x_54 = PHI <x_53, x_52>
+ 2 x_53 = PHI <x_898, x_54>
+
+ Visit #1: x_54 is copy-of x_53 (because x_52 is copy-of x_53)
+ Visit #2: x_53 is copy-of x_898 (because x_54 is a copy of x_53,
+ so it is considered irrelevant
+ as a copy).
+ Visit #1: x_54 is copy-of nothing (x_53 is a copy-of x_898 and
+ x_52 is a copy of x_53, so
+ they don't match)
+ Visit #2: x_53 is copy-of nothing
+
+ This problem is avoided by keeping a chain of copies, instead of
+ the final copy-of value. Propagation will now only keep the first
+ element of a variable's copy-of chain. When visiting PHI nodes,
+ arguments are considered equal if their copy-of chains end in the
+ same variable. So, as long as their copy-of chains overlap, we
+ know that they will be a copy of the same variable, regardless of
+ which variable that may be).
+
+ Propagation would then proceed as follows (the notation a -> b
+ means that a is a copy-of b):
+
+ Visit #1: x_54 = PHI <x_53, x_52>
+ x_53 -> x_53
+ x_52 -> x_53
+ Result: x_54 -> x_53. Value changed. Add SSA edges.
+
+ Visit #1: x_53 = PHI <x_898, x_54>
+ x_898 -> x_898
+ x_54 -> x_53
+ Result: x_53 -> x_898. Value changed. Add SSA edges.
+
+ Visit #2: x_54 = PHI <x_53, x_52>
+ x_53 -> x_898
+ x_52 -> x_53 -> x_898
+ Result: x_54 -> x_898. Value changed. Add SSA edges.
+
+ Visit #2: x_53 = PHI <x_898, x_54>
+ x_898 -> x_898
+ x_54 -> x_898
+ Result: x_53 -> x_898. Value didn't change. Stable state
+
+ Once the propagator stabilizes, we end up with the desired result
+ x_53 and x_54 are both copies of x_898. */
+
+static void
+execute_copy_prop (bool store_copy_prop)
+{
+ do_store_copy_prop = store_copy_prop;
+ init_copy_prop ();
+ ssa_propagate (copy_prop_visit_stmt, copy_prop_visit_phi_node);
+ fini_copy_prop ();
+}
+
+
+static bool
+gate_copy_prop (void)
+{
+ return flag_tree_copy_prop != 0;
+}
+
+static void
+do_copy_prop (void)
+{
+ execute_copy_prop (false);
+}
+
+struct tree_opt_pass pass_copy_prop =
+{
+ "copyprop", /* name */
+ gate_copy_prop, /* gate */
+ do_copy_prop, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_COPY_PROP, /* tv_id */
+ PROP_ssa | PROP_alias | PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_cleanup_cfg
+ | TODO_dump_func
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_update_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+
+static bool
+gate_store_copy_prop (void)
+{
+ /* STORE-COPY-PROP is enabled only with -ftree-store-copy-prop, but
+ when -fno-tree-store-copy-prop is specified, we should run
+ regular COPY-PROP. That's why the pass is enabled with either
+ flag. */
+ return flag_tree_store_copy_prop != 0 || flag_tree_copy_prop != 0;
+}
+
+static void
+store_copy_prop (void)
+{
+ /* If STORE-COPY-PROP is not enabled, we just run regular COPY-PROP. */
+ execute_copy_prop (flag_tree_store_copy_prop != 0);
+}
+
+struct tree_opt_pass pass_store_copy_prop =
+{
+ "store_copyprop", /* name */
+ gate_store_copy_prop, /* gate */
+ store_copy_prop, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_STORE_COPY_PROP, /* tv_id */
+ PROP_ssa | PROP_alias | PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func
+ | TODO_cleanup_cfg
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_update_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index dc388ee7a87..582de359dd7 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -779,8 +779,7 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
SSA_OP_VIRTUAL_DEFS | SSA_OP_VIRTUAL_KILLS)
{
tree def = DEF_FROM_PTR (def_p);
- bitmap_set_bit (vars_to_rename,
- var_ann (SSA_NAME_VAR (def))->uid);
+ mark_sym_for_renaming (SSA_NAME_VAR (def));
}
bsi_remove (i);
release_defs (t);
@@ -942,7 +941,11 @@ struct tree_opt_pass pass_dce =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_dump_func
+ | TODO_update_ssa_no_phi
+ | TODO_cleanup_cfg
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
@@ -959,8 +962,12 @@ struct tree_opt_pass pass_cd_dce =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_fix_def_def_chains | TODO_cleanup_cfg | TODO_ggc_collect | TODO_verify_ssa | TODO_verify_flow,
- /* todo_flags_finish */
+ TODO_dump_func
+ | TODO_update_ssa_no_phi
+ | TODO_cleanup_cfg
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_verify_flow, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index ebb0aa3fbfa..1d4f9b0b1c2 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -174,6 +174,8 @@ struct opt_stats_d
long num_stmts;
long num_exprs_considered;
long num_re;
+ long num_const_prop;
+ long num_copy_prop;
};
static struct opt_stats_d opt_stats;
@@ -299,6 +301,7 @@ static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void restore_nonzero_vars_to_original_value (void);
static inline bool unsafe_associative_fp_binop (tree);
+
/* Local version of fold that doesn't introduce cruft. */
static tree
@@ -403,6 +406,7 @@ tree_ssa_dominator_optimize (void)
structure. */
walk_data.global_data = NULL;
walk_data.block_local_data_size = 0;
+ walk_data.interesting_blocks = NULL;
/* Now initialize the dominator walker. */
init_walk_dominator_tree (&walk_data);
@@ -442,11 +446,7 @@ tree_ssa_dominator_optimize (void)
interactions between rewriting of _DECL nodes into SSA form
and rewriting SSA_NAME nodes into SSA form after block
duplication and CFG manipulation. */
- if (!bitmap_empty_p (vars_to_rename))
- {
- rewrite_into_ssa (false);
- bitmap_clear (vars_to_rename);
- }
+ update_ssa (TODO_update_ssa);
free_all_edge_infos ();
@@ -572,7 +572,8 @@ struct tree_opt_pass pass_dominator =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
+ TODO_dump_func
+ | TODO_update_ssa
| TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
@@ -1200,7 +1201,7 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
break;
VEC_pop (tree_on_heap, stmts_to_rescan);
- mark_new_vars_to_rename (stmt, vars_to_rename);
+ mark_new_vars_to_rename (stmt);
}
}
@@ -1386,6 +1387,10 @@ dump_dominator_optimization_stats (FILE *file)
fprintf (file, " Redundant expressions eliminated: %6ld (%.0f%%)\n",
opt_stats.num_re, PERCENT (opt_stats.num_re,
n_exprs));
+ fprintf (file, " Constants propagated: %6ld\n",
+ opt_stats.num_const_prop);
+ fprintf (file, " Copies propagated: %6ld\n",
+ opt_stats.num_copy_prop);
fprintf (file, "\nHash table statistics:\n");
@@ -1600,7 +1605,7 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
will be relatively correct, and as more passes are taught to keep loop info
up to date, the result will become more and more accurate. */
-static int
+int
loop_depth_of_name (tree x)
{
tree defstmt;
@@ -2229,9 +2234,9 @@ simplify_cond_and_lookup_avail_expr (tree stmt,
Similarly the high value for the merged range is the
minimum of the previous high value and the high value of
this record. */
- low = (tree_int_cst_compare (low, tmp_low) == 1
+ low = (low && tree_int_cst_compare (low, tmp_low) == 1
? low : tmp_low);
- high = (tree_int_cst_compare (high, tmp_high) == -1
+ high = (high && tree_int_cst_compare (high, tmp_high) == -1
? high : tmp_high);
}
@@ -2424,12 +2429,11 @@ cprop_into_successor_phis (basic_block bb, bitmap nonzero_vars)
ORIG_P with its value in our constant/copy table. */
new = SSA_NAME_VALUE (orig);
if (new
+ && new != orig
&& (TREE_CODE (new) == SSA_NAME
|| is_gimple_min_invariant (new))
&& may_propagate_copy (orig, new))
- {
- propagate_value (orig_p, new);
- }
+ propagate_value (orig_p, new);
}
}
}
@@ -2624,7 +2628,6 @@ static void
propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb)
{
-
record_edge_info (bb);
cprop_into_successor_phis (bb, nonzero_vars);
}
@@ -2756,24 +2759,7 @@ record_equivalences_from_stmt (tree stmt,
|| is_gimple_min_invariant (rhs)))
SSA_NAME_VALUE (lhs) = rhs;
- /* alloca never returns zero and the address of a non-weak symbol
- is never zero. NOP_EXPRs and CONVERT_EXPRs can be completely
- stripped as they do not affect this equivalence. */
- while (TREE_CODE (rhs) == NOP_EXPR
- || TREE_CODE (rhs) == CONVERT_EXPR)
- rhs = TREE_OPERAND (rhs, 0);
-
- if (alloca_call_p (rhs)
- || (TREE_CODE (rhs) == ADDR_EXPR
- && DECL_P (TREE_OPERAND (rhs, 0))
- && ! DECL_WEAK (TREE_OPERAND (rhs, 0))))
- record_var_is_nonzero (lhs);
-
- /* IOR of any value with a nonzero value will result in a nonzero
- value. Even if we do not know the exact result recording that
- the result is nonzero is worth the effort. */
- if (TREE_CODE (rhs) == BIT_IOR_EXPR
- && integer_nonzerop (TREE_OPERAND (rhs, 1)))
+ if (expr_computes_nonzero (rhs))
record_var_is_nonzero (lhs);
}
@@ -2875,7 +2861,7 @@ cprop_operand (tree stmt, use_operand_p op_p)
copy of some other variable, use the value or copy stored in
CONST_AND_COPIES. */
val = SSA_NAME_VALUE (op);
- if (val && TREE_CODE (val) != VALUE_HANDLE)
+ if (val && val != op && TREE_CODE (val) != VALUE_HANDLE)
{
tree op_type, val_type;
@@ -2885,8 +2871,9 @@ cprop_operand (tree stmt, use_operand_p op_p)
statement. Also only allow the new value to be an SSA_NAME
for propagation into virtual operands. */
if (!is_gimple_reg (op)
- && (get_virtual_var (val) != get_virtual_var (op)
- || TREE_CODE (val) != SSA_NAME))
+ && (TREE_CODE (val) != SSA_NAME
+ || is_gimple_reg (val)
+ || get_virtual_var (val) != get_virtual_var (op)))
return false;
/* Do not replace hard register operands in asm statements. */
@@ -2952,6 +2939,11 @@ cprop_operand (tree stmt, use_operand_p op_p)
&& is_gimple_min_invariant (val)))
may_have_exposed_new_symbols = true;
+ if (TREE_CODE (val) != SSA_NAME)
+ opt_stats.num_const_prop++;
+ else
+ opt_stats.num_copy_prop++;
+
propagate_value (op_p, val);
/* And note that we modified this statement. This is now
diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index d31c4344f04..4d929e1331b 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -251,6 +251,9 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
&& operand_equal_p (TREE_OPERAND (stmt, 0),
TREE_OPERAND (use_stmt, 0), 0))
{
+ tree def;
+ ssa_op_iter iter;
+
/* Make sure we propagate the ABNORMAL bit setting. */
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (first_use_p)))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
@@ -267,6 +270,12 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
/* Remove the dead store. */
bsi_remove (&bsi);
+ /* The virtual defs for the dead statement will need to be
+ updated. Since these names are going to disappear,
+ FUD chains for uses downstream need to be updated. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VIRTUAL_DEFS)
+ mark_sym_for_renaming (SSA_NAME_VAR (def));
+
/* And release any SSA_NAMEs set in this statement back to the
SSA_NAME manager. */
release_defs (stmt);
@@ -347,6 +356,7 @@ tree_ssa_dse (void)
walk_data.after_dom_children_before_stmts = NULL;
walk_data.after_dom_children_walk_stmts = NULL;
walk_data.after_dom_children_after_stmts = dse_finalize_block;
+ walk_data.interesting_blocks = NULL;
walk_data.block_local_data_size = sizeof (struct dse_block_local_data);
@@ -384,12 +394,15 @@ struct tree_opt_pass pass_dse = {
NULL, /* next */
0, /* static_pass_number */
TV_TREE_DSE, /* tv_id */
- PROP_cfg | PROP_ssa
+ PROP_cfg
+ | PROP_ssa
| PROP_alias, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
- | TODO_verify_ssa,
- 0 /* letter */
+ TODO_dump_func
+ | TODO_ggc_collect
+ | TODO_update_ssa
+ | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};
diff --git a/gcc/tree-ssa-loop-ch.c b/gcc/tree-ssa-loop-ch.c
index fb5f7f68fb5..c465e2e15d4 100644
--- a/gcc/tree-ssa-loop-ch.c
+++ b/gcc/tree-ssa-loop-ch.c
@@ -237,10 +237,6 @@ copy_loop_headers (void)
free (bbs);
free (copied_bbs);
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
-
loop_optimizer_finalize (loops, NULL);
}
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index 7823e1711e7..a0c45821cdb 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -720,15 +720,14 @@ move_computations (void)
fini_walk_dominator_tree (&walk_data);
loop_commit_inserts ();
- rewrite_into_ssa (false);
- if (!bitmap_empty_p (vars_to_rename))
- {
- /* The rewrite of ssa names may cause violation of loop closed ssa
- form invariants. TODO -- avoid these rewrites completely.
- Information in virtual phi nodes is sufficient for it. */
- rewrite_into_loop_closed_ssa (NULL);
- }
- bitmap_clear (vars_to_rename);
+
+ if (need_ssa_update_p ())
+ update_ssa (TODO_update_ssa);
+
+ /* The movement of LI code may cause violation of loop closed SSA
+ form invariants. TODO -- avoid these rewrites completely.
+ Information in virtual phi nodes is sufficient for it. */
+ rewrite_into_loop_closed_ssa (NULL);
}
/* Checks whether the statement defining variable *INDEX can be hoisted
@@ -1096,10 +1095,7 @@ rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
for (; mem_refs; mem_refs = mem_refs->next)
{
FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
- {
- var = SSA_NAME_VAR (var);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
- }
+ mark_sym_for_renaming (SSA_NAME_VAR (var));
*mem_refs->ref = tmp_var;
update_stmt (mem_refs->stmt);
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 508f7814f45..6dbb451fadf 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -4810,7 +4810,7 @@ unshare_and_remove_ssa_names (tree ref)
static void
rewrite_address_base (block_stmt_iterator *bsi, tree *op, tree with)
{
- tree bvar, var, new_var, new_name, copy, name;
+ tree bvar, var, new_name, copy, name;
tree orig;
var = bvar = get_base_address (*op);
@@ -4832,24 +4832,27 @@ rewrite_address_base (block_stmt_iterator *bsi, tree *op, tree with)
else
goto do_rewrite;
- if (var_ann (var)->type_mem_tag)
- var = var_ann (var)->type_mem_tag;
-
/* We need to add a memory tag for the variable. But we do not want
to add it to the temporary used for the computations, since this leads
to problems in redundancy elimination when there are common parts
in two computations referring to the different arrays. So we copy
the variable to a new temporary. */
copy = build2 (MODIFY_EXPR, void_type_node, NULL_TREE, with);
+
if (name)
new_name = duplicate_ssa_name (name, copy);
else
{
- new_var = create_tmp_var (TREE_TYPE (with), "ruatmp");
- add_referenced_tmp_var (new_var);
- var_ann (new_var)->type_mem_tag = var;
- new_name = make_ssa_name (new_var, copy);
+ tree tag = var_ann (var)->type_mem_tag;
+ tree new_ptr = create_tmp_var (TREE_TYPE (with), "ruatmp");
+ add_referenced_tmp_var (new_ptr);
+ if (tag)
+ var_ann (new_ptr)->type_mem_tag = tag;
+ else
+ add_type_alias (new_ptr, var);
+ new_name = make_ssa_name (new_ptr, copy);
}
+
TREE_OPERAND (copy, 0) = new_name;
update_stmt (copy);
bsi_insert_before (bsi, copy, BSI_SAME_STMT);
@@ -4870,6 +4873,10 @@ do_rewrite:
/* Record the original reference, for purposes of alias analysis. */
REF_ORIGINAL (*op) = orig;
+
+ /* Virtual operands in the original statement may have to be renamed
+ because of the replacement. */
+ mark_new_vars_to_rename (bsi_stmt (*bsi));
}
/* Rewrites USE (address that is an iv) using candidate CAND. */
@@ -5377,11 +5384,6 @@ tree_ssa_iv_optimize (struct loops *loops)
while (loop->inner)
loop = loop->inner;
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
- verify_stmts ();
-#endif
-
/* Scan the loops, inner ones first. */
while (loop != loops->tree_root)
{
@@ -5400,10 +5402,27 @@ tree_ssa_iv_optimize (struct loops *loops)
loop = loop->outer;
}
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
- verify_stmts ();
-#endif
+ /* FIXME. IV opts introduces new aliases and call-clobbered
+ variables, which need to be renamed. However, when we call the
+ renamer, not all statements will be scanned for operands. In
+ particular, the newly introduced aliases may appear in statements
+ that are considered "unmodified", so the renamer will not get a
+ chance to rename those operands.
+
+ Work around this problem by forcing an operand re-scan on every
+ statement. This will not be necessary once the new operand
+ scanner is implemented. */
+ if (need_ssa_update_p ())
+ {
+ basic_block bb;
+ block_stmt_iterator si;
+ FOR_EACH_BB (bb)
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ update_stmt (bsi_stmt (si));
+
+ update_ssa (TODO_update_ssa);
+ }
+ rewrite_into_loop_closed_ssa (NULL);
tree_ssa_iv_optimize_finalize (loops, &data);
}
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index cbc1e69d430..fbb45ad3b01 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -157,7 +157,10 @@ add_exit_phis_var (tree var, bitmap livein, bitmap exits)
basic_block def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (var));
bitmap_iterator bi;
- bitmap_clear_bit (livein, def_bb->index);
+ if (is_gimple_reg (var))
+ bitmap_clear_bit (livein, def_bb->index);
+ else
+ bitmap_set_bit (livein, def_bb->index);
def = BITMAP_ALLOC (NULL);
bitmap_set_bit (def, def_bb->index);
diff --git a/gcc/tree-ssa-loop.c b/gcc/tree-ssa-loop.c
index 50124706ec1..03c3249fe2e 100644
--- a/gcc/tree-ssa-loop.c
+++ b/gcc/tree-ssa-loop.c
@@ -53,17 +53,8 @@ tree_loop_optimizer_init (FILE *dump)
if (!loops)
return NULL;
- /* Creation of preheaders may create redundant phi nodes if the loop is
- entered by more than one edge, but the initial value of the induction
- variable is the same on all of them. */
- kill_redundant_phi_nodes ();
- rewrite_into_ssa (false);
- bitmap_clear (vars_to_rename);
-
+ update_ssa (TODO_update_ssa);
rewrite_into_loop_closed_ssa (NULL);
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
return loops;
}
@@ -121,7 +112,7 @@ struct tree_opt_pass pass_loop_init =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -155,7 +146,7 @@ struct tree_opt_pass pass_lim =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -189,7 +180,7 @@ struct tree_opt_pass pass_unswitch =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -201,7 +192,6 @@ tree_vectorize (void)
if (!current_loops)
return;
- bitmap_clear (vars_to_rename);
vectorize_loops (current_loops);
}
@@ -224,7 +214,7 @@ struct tree_opt_pass pass_vectorize =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_update_ssa, /* todo_flags_finish */
0 /* letter */
};
@@ -259,7 +249,7 @@ struct tree_opt_pass pass_linear_transform =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -293,7 +283,7 @@ struct tree_opt_pass pass_iv_canon =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -356,7 +346,7 @@ struct tree_opt_pass pass_complete_unroll =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -390,7 +380,7 @@ struct tree_opt_pass pass_iv_optimize =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_loops, /* todo_flags_finish */
0 /* letter */
};
@@ -402,10 +392,6 @@ tree_ssa_loop_done (void)
if (!current_loops)
return;
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
-
free_numbers_of_iterations_estimates (current_loops);
scev_finalize ();
loop_optimizer_finalize (current_loops,
@@ -429,4 +415,3 @@ struct tree_opt_pass pass_loop_done =
TODO_cleanup_cfg | TODO_dump_func, /* todo_flags_finish */
0 /* letter */
};
-
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index 5d5f6bac4e2..62bb8c9192a 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -1452,6 +1452,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case TRUTH_XOR_EXPR:
case COMPOUND_EXPR:
case OBJ_TYPE_REF:
+ case ASSERT_EXPR:
do_binary:
{
tree op0 = TREE_OPERAND (expr, 0);
@@ -1735,7 +1736,7 @@ get_call_expr_operands (tree stmt, tree expr)
&& !bitmap_empty_p (call_clobbered_vars)
&& !(call_flags & ECF_NOVOPS))
{
- /* A 'pure' or a 'const' functions never call clobber anything.
+ /* A 'pure' or a 'const' function never call-clobbers anything.
A 'noreturn' function might, but since we don't return anyway
there is no point in recording that. */
if (TREE_SIDE_EFFECTS (expr)
@@ -1798,6 +1799,24 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
if (TREE_THIS_VOLATILE (sym) && s_ann)
s_ann->has_volatile_ops = true;
+ /* If the variable cannot be modified and this is a V_MAY_DEF change
+ it into a VUSE. This happens when read-only variables are marked
+ call-clobbered and/or aliased to writeable variables. So we only
+ check that this only happens on stores, and not writes to GIMPLE
+ registers.
+
+ FIXME: The C++ FE is emitting assignments in the IL stream for
+ read-only globals. This is wrong, but for the time being disable
+ this transformation on V_MUST_DEF operands (otherwise, we
+ mis-optimize SPEC2000's eon). */
+ if ((flags & opf_is_def)
+ && !(flags & opf_kill_def)
+ && unmodifiable_var_p (var))
+ {
+ gcc_assert (!is_real_op);
+ flags &= ~opf_is_def;
+ }
+
if (is_real_op)
{
/* The variable is a GIMPLE register. Add it to real operands. */
@@ -1858,17 +1877,35 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
if (flags & opf_is_def)
{
+ bool added_may_defs_p = false;
+
/* If the variable is also an alias tag, add a virtual
operand for it, otherwise we will miss representing
references to the members of the variable's alias set.
This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
if (v_ann->is_alias_tag)
- append_v_may_def (var);
+ {
+ added_may_defs_p = true;
+ append_v_may_def (var);
+ }
for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_v_may_def (VARRAY_TREE (aliases, i));
+ {
+ /* While VAR may be modifiable, some of its aliases
+ may not be. If that's the case, we don't really
+ need to add them a V_MAY_DEF for them. */
+ tree alias = VARRAY_TREE (aliases, i);
+
+ if (unmodifiable_var_p (alias))
+ append_vuse (alias);
+ else
+ {
+ append_v_may_def (alias);
+ added_may_defs_p = true;
+ }
+ }
- if (s_ann)
+ if (s_ann && added_may_defs_p)
s_ann->makes_aliased_stores = 1;
}
else
@@ -2000,8 +2037,7 @@ add_call_clobber_ops (tree stmt)
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
- if (TREE_READONLY (var)
- && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
+ if (unmodifiable_var_p (var))
add_stmt_operand (&var, &empty_ann, opf_none);
else
add_stmt_operand (&var, &empty_ann, opf_is_def);
diff --git a/gcc/tree-ssa-phiopt.c b/gcc/tree-ssa-phiopt.c
index 41370ed114a..e9db4c0ae2f 100644
--- a/gcc/tree-ssa-phiopt.c
+++ b/gcc/tree-ssa-phiopt.c
@@ -964,8 +964,12 @@ struct tree_opt_pass pass_phiopt =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_cleanup_cfg | TODO_dump_func | TODO_ggc_collect /* todo_flags_finish */
- | TODO_verify_ssa | TODO_rename_vars
- | TODO_verify_flow | TODO_verify_stmts,
+ TODO_cleanup_cfg
+ | TODO_dump_func
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_update_ssa
+ | TODO_verify_flow
+ | TODO_verify_stmts, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index c57c9f8695a..a4c764aacef 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -1,5 +1,5 @@
/* Generic SSA value propagation engine.
- Copyright (C) 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
@@ -459,6 +459,7 @@ ssa_prop_init (void)
edge e;
edge_iterator ei;
basic_block bb;
+ size_t i;
/* Worklists of SSA edges. */
interesting_ssa_edges = VEC_alloc (tree, 20);
@@ -475,7 +476,12 @@ ssa_prop_init (void)
VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
- /* Initially assume that every edge in the CFG is not executable
+ /* Initialize the values for every SSA_NAME. */
+ for (i = 1; i < num_ssa_names; i++)
+ if (ssa_name (i))
+ SSA_NAME_VALUE (ssa_name (i)) = NULL_TREE;
+
+ /* Initially assume that every edge in the CFG is not executable.
(including the edges coming out of ENTRY_BLOCK_PTR). */
FOR_ALL_BB (bb)
{
@@ -666,4 +672,409 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
ssa_prop_fini ();
}
+
+/* Return the first V_MAY_DEF or V_MUST_DEF operand for STMT. */
+
+tree
+first_vdef (tree stmt)
+{
+ if (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) > 0)
+ return V_MAY_DEF_RESULT (STMT_V_MAY_DEF_OPS (stmt), 0);
+ else if (NUM_V_MUST_DEFS (STMT_V_MUST_DEF_OPS (stmt)) > 0)
+ return V_MUST_DEF_RESULT (STMT_V_MUST_DEF_OPS (stmt), 0);
+ else
+ gcc_unreachable ();
+}
+
+
+/* Return true if STMT is of the form 'LHS = mem_ref', where 'mem_ref'
+ is a non-volatile pointer dereference, a structure reference or a
+ reference to a single _DECL. Ignore volatile memory references
+ because they are not interesting for the optimizers. */
+
+bool
+stmt_makes_single_load (tree stmt)
+{
+ tree rhs;
+
+ if (TREE_CODE (stmt) != MODIFY_EXPR)
+ return false;
+
+ if (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0
+ && NUM_VUSES (STMT_VUSE_OPS (stmt)) == 0)
+ return false;
+
+ rhs = TREE_OPERAND (stmt, 1);
+ STRIP_NOPS (rhs);
+
+ return (!TREE_THIS_VOLATILE (rhs)
+ && (DECL_P (rhs)
+ || TREE_CODE_CLASS (TREE_CODE (rhs)) == tcc_reference));
+}
+
+
+/* Return true if STMT is of the form 'mem_ref = RHS', where 'mem_ref'
+ is a non-volatile pointer dereference, a structure reference or a
+ reference to a single _DECL. Ignore volatile memory references
+ because they are not interesting for the optimizers. */
+
+bool
+stmt_makes_single_store (tree stmt)
+{
+ tree lhs;
+
+ if (TREE_CODE (stmt) != MODIFY_EXPR)
+ return false;
+
+ if (NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt)) == 0
+ && NUM_V_MUST_DEFS (STMT_V_MUST_DEF_OPS (stmt)) == 0)
+ return false;
+
+ lhs = TREE_OPERAND (stmt, 0);
+ STRIP_NOPS (lhs);
+
+ return (!TREE_THIS_VOLATILE (lhs)
+ && (DECL_P (lhs)
+ || TREE_CODE_CLASS (TREE_CODE (lhs)) == tcc_reference));
+}
+
+
+/* If STMT makes a single memory load and all the virtual use operands
+ have the same value in array VALUES, return it. Otherwise, return
+ NULL. */
+
+prop_value_t *
+get_value_loaded_by (tree stmt, prop_value_t *values)
+{
+ ssa_op_iter i;
+ tree vuse;
+ prop_value_t *prev_val = NULL;
+ prop_value_t *val = NULL;
+
+ FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, i, SSA_OP_VIRTUAL_USES)
+ {
+ val = &values[SSA_NAME_VERSION (vuse)];
+ if (prev_val && prev_val->value != val->value)
+ return NULL;
+ prev_val = val;
+ }
+
+ return val;
+}
+
+
+/* Propagation statistics. */
+struct prop_stats_d
+{
+ long num_const_prop;
+ long num_copy_prop;
+};
+
+static struct prop_stats_d prop_stats;
+
+/* Replace USE references in statement STMT with the values stored in
+ PROP_VALUE. Return true if at least one reference was replaced. If
+ REPLACED_ADDRESSES_P is given, it will be set to true if an address
+ constant was replaced. */
+
+bool
+replace_uses_in (tree stmt, bool *replaced_addresses_p,
+ prop_value_t *prop_value)
+{
+ bool replaced = false;
+ use_operand_p use;
+ ssa_op_iter iter;
+
+ FOR_EACH_SSA_USE_OPERAND (use, stmt, iter, SSA_OP_USE)
+ {
+ tree tuse = USE_FROM_PTR (use);
+ tree val = prop_value[SSA_NAME_VERSION (tuse)].value;
+
+ if (val == tuse || val == NULL_TREE)
+ continue;
+
+ if (TREE_CODE (stmt) == ASM_EXPR
+ && !may_propagate_copy_into_asm (tuse))
+ continue;
+
+ if (!may_propagate_copy (tuse, val))
+ continue;
+
+ if (TREE_CODE (val) != SSA_NAME)
+ prop_stats.num_const_prop++;
+ else
+ prop_stats.num_copy_prop++;
+
+ propagate_value (use, val);
+
+ replaced = true;
+ if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
+ *replaced_addresses_p = true;
+ }
+
+ return replaced;
+}
+
+
+/* Replace the VUSE references in statement STMT with the values
+ stored in PROP_VALUE. Return true if a reference was replaced. If
+ REPLACED_ADDRESSES_P is given, it will be set to true if an address
+ constant was replaced.
+
+ Replacing VUSE operands is slightly more complex than replacing
+ regular USEs. We are only interested in two types of replacements
+ here:
+
+ 1- If the value to be replaced is a constant or an SSA name for a
+ GIMPLE register, then we are making a copy/constant propagation
+ from a memory store. For instance,
+
+ # a_3 = V_MAY_DEF <a_2>
+ a.b = x_1;
+ ...
+ # VUSE <a_3>
+ y_4 = a.b;
+
+ This replacement is only possible iff STMT is an assignment
+ whose RHS is identical to the LHS of the statement that created
+ the VUSE(s) that we are replacing. Otherwise, we may do the
+ wrong replacement:
+
+ # a_3 = V_MAY_DEF <a_2>
+ # b_5 = V_MAY_DEF <b_4>
+ *p = 10;
+ ...
+ # VUSE <b_5>
+ x_8 = b;
+
+ Even though 'b_5' acquires the value '10' during propagation,
+ there is no way for the propagator to tell whether the
+ replacement is correct in every reached use, because values are
+ computed at definition sites. Therefore, when doing final
+ substitution of propagated values, we have to check each use
+ site. Since the RHS of STMT ('b') is different from the LHS of
+ the originating statement ('*p'), we cannot replace 'b' with
+ '10'.
+
+ Similarly, when merging values from PHI node arguments,
+ propagators need to take care not to merge the same values
+ stored in different locations:
+
+ if (...)
+ # a_3 = V_MAY_DEF <a_2>
+ a.b = 3;
+ else
+ # a_4 = V_MAY_DEF <a_2>
+ a.c = 3;
+ # a_5 = PHI <a_3, a_4>
+
+ It would be wrong to propagate '3' into 'a_5' because that
+ operation merges two stores to different memory locations.
+
+
+ 2- If the value to be replaced is an SSA name for a virtual
+ register, then we simply replace each VUSE operand with its
+ value from PROP_VALUE. This is the same replacement done by
+ replace_uses_in. */
+
+static bool
+replace_vuses_in (tree stmt, bool *replaced_addresses_p,
+ prop_value_t *prop_value)
+{
+ bool replaced = false;
+ ssa_op_iter iter;
+ use_operand_p vuse;
+
+ if (stmt_makes_single_load (stmt))
+ {
+ /* If STMT is an assignment whose RHS is a single memory load,
+ see if we are trying to propagate a constant or a GIMPLE
+ register (case #1 above). */
+ prop_value_t *val = get_value_loaded_by (stmt, prop_value);
+ tree rhs = TREE_OPERAND (stmt, 1);
+
+ if (val
+ && val->value
+ && (is_gimple_reg (val->value)
+ || is_gimple_min_invariant (val->value))
+ && simple_cst_equal (rhs, val->mem_ref) == 1)
+
+ {
+ /* If we are replacing a constant address, inform our
+ caller. */
+ if (TREE_CODE (val->value) != SSA_NAME
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (stmt, 1)))
+ && replaced_addresses_p)
+ *replaced_addresses_p = true;
+
+ /* We can only perform the substitution if the load is done
+ from the same memory location as the original store.
+ Since we already know that there are no intervening
+ stores between DEF_STMT and STMT, we only need to check
+ that the RHS of STMT is the same as the memory reference
+ propagated together with the value. */
+ TREE_OPERAND (stmt, 1) = val->value;
+
+ if (TREE_CODE (val->value) != SSA_NAME)
+ prop_stats.num_const_prop++;
+ else
+ prop_stats.num_copy_prop++;
+
+ /* Since we have replaced the whole RHS of STMT, there
+ is no point in checking the other VUSEs, as they will
+ all have the same value. */
+ return true;
+ }
+ }
+
+ /* Otherwise, the values for every VUSE operand must be other
+ SSA_NAMEs that can be propagated into STMT. */
+ FOR_EACH_SSA_USE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
+ {
+ tree var = USE_FROM_PTR (vuse);
+ tree val = prop_value[SSA_NAME_VERSION (var)].value;
+
+ if (val == NULL_TREE || var == val)
+ continue;
+
+ /* Constants and copies propagated between real and virtual
+ operands are only possible in the cases handled above. They
+ should be ignored in any other context. */
+ if (is_gimple_min_invariant (val) || is_gimple_reg (val))
+ continue;
+
+ propagate_value (vuse, val);
+ prop_stats.num_copy_prop++;
+ replaced = true;
+ }
+
+ return replaced;
+}
+
+
+/* Replace propagated values into all the arguments for PHI using the
+ values from PROP_VALUE. */
+
+static void
+replace_phi_args_in (tree phi, prop_value_t *prop_value)
+{
+ int i;
+
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ {
+ tree arg = PHI_ARG_DEF (phi, i);
+
+ if (TREE_CODE (arg) == SSA_NAME)
+ {
+ tree val = prop_value[SSA_NAME_VERSION (arg)].value;
+
+ if (val && val != arg && may_propagate_copy (arg, val))
+ {
+ if (TREE_CODE (val) != SSA_NAME)
+ prop_stats.num_const_prop++;
+ else
+ prop_stats.num_copy_prop++;
+
+ propagate_value (PHI_ARG_DEF_PTR (phi, i), val);
+
+ /* If we propagated a copy and this argument flows
+ through an abnormal edge, update the replacement
+ accordingly. */
+ if (TREE_CODE (val) == SSA_NAME
+ && PHI_ARG_EDGE (phi, i)->flags & EDGE_ABNORMAL)
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
+ }
+ }
+ }
+}
+
+
+/* Perform final substitution and folding of propagated values. */
+
+void
+substitute_and_fold (prop_value_t *prop_value)
+{
+ basic_block bb;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ fprintf (dump_file,
+ "\nSubstituing values and folding statements\n\n");
+
+ memset (&prop_stats, 0, sizeof (prop_stats));
+
+ /* Substitute values in every statement of every basic block. */
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator i;
+ tree phi;
+
+ /* Propagate our known values into PHI nodes. */
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Replaced ");
+ print_generic_stmt (dump_file, phi, TDF_SLIM);
+ }
+
+ replace_phi_args_in (phi, prop_value);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, " with ");
+ print_generic_stmt (dump_file, phi, TDF_SLIM);
+ fprintf (dump_file, "\n");
+ }
+ }
+
+ for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+ {
+ bool replaced_address, did_replace;
+ tree stmt = bsi_stmt (i);
+
+ get_stmt_operands (stmt);
+
+ /* Replace the statement with its folded version and mark it
+ folded. */
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Replaced ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ }
+
+ replaced_address = false;
+ did_replace = replace_uses_in (stmt, &replaced_address, prop_value);
+ did_replace |= replace_vuses_in (stmt, &replaced_address, prop_value);
+ if (did_replace)
+ {
+ fold_stmt (bsi_stmt_ptr (i));
+ stmt = bsi_stmt(i);
+
+ /* If we folded a builtin function, we'll likely
+ need to rename VDEFs. */
+ mark_new_vars_to_rename (stmt);
+
+ /* If we cleaned up EH information from the statement,
+ remove EH edges. */
+ if (maybe_clean_eh_stmt (stmt))
+ tree_purge_dead_eh_edges (bb);
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, " with ");
+ print_generic_stmt (dump_file, stmt, TDF_SLIM);
+ fprintf (dump_file, "\n");
+ }
+ }
+ }
+
+ if (dump_file && (dump_flags & TDF_STATS))
+ {
+ fprintf (dump_file, "Constants propagated: %6ld\n",
+ prop_stats.num_const_prop);
+ fprintf (dump_file, "Copies propagated: %6ld\n",
+ prop_stats.num_copy_prop);
+ }
+}
#include "gt-tree-ssa-propagate.h"
diff --git a/gcc/tree-ssa-propagate.h b/gcc/tree-ssa-propagate.h
index 6375f5bd6c6..f0124f45204 100644
--- a/gcc/tree-ssa-propagate.h
+++ b/gcc/tree-ssa-propagate.h
@@ -1,6 +1,6 @@
/* Data structures and function declarations for the SSA value propagation
engine.
- Copyright (C) 2001, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
@@ -30,7 +30,6 @@ Boston, MA 02111-1307, USA. */
/* Lattice values used for propagation purposes. Specific instances
of a propagation engine must return these values from the statement
and PHI visit functions to direct the engine. */
-
enum ssa_prop_result {
/* The statement produces nothing of interest. No edges will be
added to the work lists. */
@@ -51,12 +50,43 @@ enum ssa_prop_result {
};
+struct prop_value_d {
+ /* Lattice value. Each propagator is free to define its own
+ lattice and this field is only meaningful while propagating.
+ It will not be used by substitute_and_fold. */
+ unsigned lattice_val;
+
+ /* Propagated value. */
+ tree value;
+
+ /* If this value is held in an SSA name for a non-register
+ variable, this field holds the actual memory reference
+ associated with this value. This field is taken from
+ the LHS of the assignment that generated the associated SSA
+ name. However, in the case of PHI nodes, this field is copied
+ from the PHI arguments (assuming that all the arguments have
+ the same memory reference). See replace_vuses_in for a more
+ detailed description. */
+ tree mem_ref;
+};
+
+typedef struct prop_value_d prop_value_t;
+
+
/* Call-back functions used by the value propagation engine. */
typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (tree, edge *, tree *);
typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (tree);
+
+/* In tree-ssa-propagate.c */
void ssa_propagate (ssa_prop_visit_stmt_fn, ssa_prop_visit_phi_fn);
tree get_rhs (tree);
bool set_rhs (tree *, tree);
+tree first_vdef (tree);
+bool stmt_makes_single_load (tree);
+bool stmt_makes_single_store (tree);
+prop_value_t *get_value_loaded_by (tree, prop_value_t *);
+bool replace_uses_in (tree, bool *, prop_value_t *);
+void substitute_and_fold (prop_value_t *);
#endif /* _TREE_SSA_PROPAGATE_H */
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index 54f9fb961dd..a315d0d4773 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -584,6 +584,9 @@ struct tree_opt_pass pass_sink_code =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_rename_vars | TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ TODO_update_ssa
+ | TODO_dump_func
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index e23e665d42a..93b54805763 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -285,7 +285,7 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
{
fprintf (stderr, "for SSA_NAME: ");
print_generic_expr (stderr, ssa_name, TDF_VOPS);
- fprintf (stderr, "in statement:\n");
+ fprintf (stderr, " in statement:\n");
print_generic_stmt (stderr, stmt, TDF_VOPS);
}
@@ -1028,319 +1028,6 @@ walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
}
}
-
-/* Replaces VAR with REPL in memory reference expression *X in
- statement STMT at use location USE_P. Return TRUE if Anything was done. */
-
-static bool
-propagate_into_addr (tree stmt, use_operand_p use_p, tree *x, tree repl)
-{
- tree new_var, ass_stmt, addr_var;
- basic_block bb;
- block_stmt_iterator bsi;
-
- /* There is nothing special to handle in the other cases. */
- if (TREE_CODE (repl) != ADDR_EXPR)
- return false;
- addr_var = TREE_OPERAND (repl, 0);
-
- while (handled_component_p (*x)
- || TREE_CODE (*x) == REALPART_EXPR
- || TREE_CODE (*x) == IMAGPART_EXPR)
- x = &TREE_OPERAND (*x, 0);
-
- /* Heres a hack but since KRPhinodes is going away soon, Im not going to
- sweat it. */
- if (TREE_CODE (*x) != INDIRECT_REF
- || &(TREE_OPERAND (*x, 0)) != use_p->use) /* HACK ALERT. */
- return false;
-
- if (TREE_TYPE (*x) == TREE_TYPE (addr_var))
- {
- *x = addr_var;
- mark_new_vars_to_rename (stmt, vars_to_rename);
- return true;
- }
-
-
- /* Frontends sometimes produce expressions like *&a instead of a[0].
- Create a temporary variable to handle this case. */
- ass_stmt = build2 (MODIFY_EXPR, void_type_node, NULL_TREE, repl);
- new_var = duplicate_ssa_name (USE_FROM_PTR (use_p), ass_stmt);
- TREE_OPERAND (*x, 0) = new_var;
- TREE_OPERAND (ass_stmt, 0) = new_var;
-
- bb = bb_for_stmt (stmt);
- tree_block_label (bb);
- bsi = bsi_after_labels (bb);
- bsi_insert_after (&bsi, ass_stmt, BSI_NEW_STMT);
-
- mark_new_vars_to_rename (stmt, vars_to_rename);
- return true;
-}
-
-/* Replaces immediate uses of VAR by REPL. */
-
-static void
-replace_immediate_uses (tree var, tree repl)
-{
- tree stmt;
- bool mark_new_vars;
- use_operand_p imm_use;
- imm_use_iterator imm_iter;
-
- FOR_EACH_IMM_USE_SAFE (imm_use, imm_iter, var)
- {
- stmt = USE_STMT (imm_use);
-
- if (TREE_CODE (stmt) == PHI_NODE)
- {
- int index = PHI_ARG_INDEX_FROM_USE (imm_use);
-#ifdef ENABLE_CHECKING
- gcc_assert (&(PHI_ARG_IMM_USE_NODE (stmt, index)) == imm_use);
-#endif
- SET_USE (imm_use, repl);
- if (TREE_CODE (repl) == SSA_NAME
- && PHI_ARG_EDGE (stmt, index)->flags & EDGE_ABNORMAL)
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (repl) = 1;
- continue;
- }
-
- gcc_assert (!stmt_modified_p (stmt));
-
- mark_new_vars = false;
- if (is_gimple_reg (SSA_NAME_VAR (var)))
- {
- bool propagated = false;
- if (TREE_CODE (stmt) == MODIFY_EXPR)
- {
- if (TREE_CODE (repl) == ADDR_EXPR)
- {
- propagated =
- propagate_into_addr (stmt, imm_use, &TREE_OPERAND (stmt, 0),
- repl);
- if (!propagated)
- propagated =
- propagate_into_addr (stmt, imm_use,
- &TREE_OPERAND (stmt, 1), repl);
- }
- }
- if (!propagated)
- propagate_value (imm_use, repl);
- mark_new_vars = POINTER_TYPE_P (TREE_TYPE (repl));
- }
- else
- propagate_value (imm_use, repl);
-
- /* FIXME. If REPL is a constant, we need to fold STMT.
- However, fold_stmt wants a pointer to the statement, because
- it may happen that it needs to replace the whole statement
- with a new expression. Since the current def-use machinery
- does not return pointers to statements, we call fold_stmt
- with the address of a local temporary, if that call changes
- the temporary then we fallback on looking for a proper
- pointer to STMT by scanning STMT's basic block.
-
- Note that all this will become unnecessary soon. This
- pass is being replaced with a proper copy propagation pass
- for 4.1 (dnovillo, 2004-09-17). */
- if (TREE_CODE (repl) != SSA_NAME)
- {
- tree tmp = stmt;
- fold_stmt (&tmp);
- mark_new_vars = true;
- if (tmp != stmt)
- {
- block_stmt_iterator si = bsi_for_stmt (stmt);
- mark_new_vars_to_rename (tmp, vars_to_rename);
- bsi_replace (&si, tmp, true);
- stmt = bsi_stmt (si);
- }
- }
-
- /* If REPL is a pointer, it may have different memory tags associated
- with it. For instance, VAR may have had a name tag while REPL
- only had a type tag. In these cases, the virtual operands (if
- any) in the statement will refer to different symbols which need
- to be renamed. */
- if (mark_new_vars)
- mark_new_vars_to_rename (stmt, vars_to_rename);
- else
- update_stmt (stmt);
- }
-
-}
-
-/* Gets the value VAR is equivalent to according to EQ_TO. */
-
-static tree
-get_eq_name (tree *eq_to, tree var)
-{
- unsigned ver;
- tree val = var;
-
- while (TREE_CODE (val) == SSA_NAME)
- {
- ver = SSA_NAME_VERSION (val);
- if (!eq_to[ver])
- break;
-
- val = eq_to[ver];
- }
-
- while (TREE_CODE (var) == SSA_NAME)
- {
- ver = SSA_NAME_VERSION (var);
- if (!eq_to[ver])
- break;
-
- var = eq_to[ver];
- eq_to[ver] = val;
- }
-
- return val;
-}
-
-/* Checks whether phi node PHI is redundant and if it is, records the ssa name
- its result is redundant to to EQ_TO array. */
-
-static void
-check_phi_redundancy (tree phi, tree *eq_to)
-{
- tree val = NULL_TREE, def, res = PHI_RESULT (phi), stmt;
- unsigned i, ver = SSA_NAME_VERSION (res);
- imm_use_iterator imm_iter;
- use_operand_p use_p;
-
- /* It is unlikely that such large phi node would be redundant. */
- if (PHI_NUM_ARGS (phi) > 16)
- return;
-
- for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
- {
- def = PHI_ARG_DEF (phi, i);
-
- if (TREE_CODE (def) == SSA_NAME)
- {
- def = get_eq_name (eq_to, def);
- if (def == res)
- continue;
- }
-
- if (val
- && !operand_equal_for_phi_arg_p (val, def))
- return;
-
- val = def;
- }
-
- /* At least one of the arguments should not be equal to the result, or
- something strange is happening. */
- gcc_assert (val);
-
- if (get_eq_name (eq_to, res) == val)
- return;
-
- if (!may_propagate_copy (res, val))
- return;
-
- eq_to[ver] = val;
-
- FOR_EACH_IMM_USE_FAST (use_p, imm_iter, res)
- {
- stmt = USE_STMT (use_p);
- if (TREE_CODE (stmt) == PHI_NODE)
- check_phi_redundancy (stmt, eq_to);
- }
-}
-
-/* Removes redundant phi nodes.
-
- A redundant PHI node is a PHI node where all of its PHI arguments
- are the same value, excluding any PHI arguments which are the same
- as the PHI result.
-
- A redundant PHI node is effectively a copy, so we forward copy propagate
- which removes all uses of the destination of the PHI node then
- finally we delete the redundant PHI node.
-
- Note that if we can not copy propagate the PHI node, then the PHI
- will not be removed. Thus we do not have to worry about dependencies
- between PHIs and the problems serializing PHIs into copies creates.
-
- The most important effect of this pass is to remove degenerate PHI
- nodes created by removing unreachable code. */
-
-void
-kill_redundant_phi_nodes (void)
-{
- tree *eq_to;
- unsigned i, old_num_ssa_names;
- basic_block bb;
- tree phi, repl, stmt;
-
- /* The EQ_TO[VER] holds the value by that the ssa name VER should be
- replaced. If EQ_TO[VER] is ssa name and it is decided to replace it by
- other value, it may be necessary to follow the chain till the final value.
- We perform path shortening (replacing the entries of the EQ_TO array with
- heads of these chains) whenever we access the field to prevent quadratic
- complexity (probably would not occur in practice anyway, but let us play
- it safe). */
- eq_to = xcalloc (num_ssa_names, sizeof (tree));
-
- old_num_ssa_names = num_ssa_names;
-
- FOR_EACH_BB (bb)
- {
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- check_phi_redundancy (phi, eq_to);
- }
-
- /* Now propagate the values. */
- for (i = 0; i < old_num_ssa_names; i++)
- {
- if (!ssa_name (i))
- continue;
-
- repl = get_eq_name (eq_to, ssa_name (i));
- if (repl != ssa_name (i))
- replace_immediate_uses (ssa_name (i), repl);
- }
-
- /* And remove the dead phis. */
- for (i = 0; i < old_num_ssa_names; i++)
- {
- if (!ssa_name (i))
- continue;
-
- repl = get_eq_name (eq_to, ssa_name (i));
- if (repl != ssa_name (i))
- {
- stmt = SSA_NAME_DEF_STMT (ssa_name (i));
- remove_phi_node (stmt, NULL_TREE);
- }
- }
-
- free (eq_to);
-}
-
-struct tree_opt_pass pass_redundant_phi =
-{
- "redphi", /* name */
- NULL, /* gate */
- kill_redundant_phi_nodes, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_TREE_REDPHI, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
- 0 /* letter */
-};
/* Emit warnings for uninitialized variables. This is done in two passes.
diff --git a/gcc/tree-vect-transform.c b/gcc/tree-vect-transform.c
index 5032dedf2a0..0d14c13143b 100644
--- a/gcc/tree-vect-transform.c
+++ b/gcc/tree-vect-transform.c
@@ -349,9 +349,18 @@ vect_create_data_ref_ptr (tree stmt, block_stmt_iterator *bsi, tree offset,
tag = STMT_VINFO_MEMTAG (stmt_info);
gcc_assert (tag);
- get_var_ann (vect_ptr)->type_mem_tag = tag;
- get_var_ann (vect_ptr)->subvars = STMT_VINFO_SUBVARS (stmt_info);
+ /* If the memory tag of the original reference was not a type tag or
+ if the pointed-to type of VECT_PTR has an alias set number
+ different than TAG's, then we need to create a new type tag for
+ VECT_PTR and add TAG to its alias set. */
+ if (var_ann (tag)->mem_tag_kind == NOT_A_TAG
+ || get_alias_set (tag) != get_alias_set (TREE_TYPE (vect_ptr_type)))
+ add_type_alias (vect_ptr, tag);
+ else
+ var_ann (vect_ptr)->type_mem_tag = tag;
+
+ var_ann (vect_ptr)->subvars = STMT_VINFO_SUBVARS (stmt_info);
/** (3) Calculate the initial address the vector-pointer, and set
the vector-pointer to point to it before the loop: **/
diff --git a/gcc/tree-vectorizer.c b/gcc/tree-vectorizer.c
index f3551f8528d..63dd2d201d2 100644
--- a/gcc/tree-vectorizer.c
+++ b/gcc/tree-vectorizer.c
@@ -1836,10 +1836,6 @@ vectorize_loops (struct loops *loops)
return;
}
-#ifdef ENABLE_CHECKING
- verify_loop_closed_ssa ();
-#endif
-
/* ----------- Analyze loops. ----------- */
/* If some loop was duplicated, it gets bigger number
diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h
index dee7a4ebc54..a38edd0785f 100644
--- a/gcc/tree-vectorizer.h
+++ b/gcc/tree-vectorizer.h
@@ -176,7 +176,11 @@ typedef struct _stmt_vec_info {
/* Information about the data-ref (access function, etc). */
struct data_reference *data_ref_info;
- /* Aliasing information. */
+ /* Aliasing information. This field represents the symbol that
+ should be aliased by a pointer holding the address of this data
+ reference. If the original data reference was a pointer
+ dereference, then this field contains the memory tag that should
+ be used by the new vector-pointer. */
tree memtag;
struct ptr_info_def *ptr_info;
subvar_t subvars;
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
new file mode 100644
index 00000000000..e4adcfaf8d1
--- /dev/null
+++ b/gcc/tree-vrp.c
@@ -0,0 +1,2265 @@
+/* Support routines for Value Range Propagation (VRP).
+ Copyright (C) 2005 Free Software Foundation, Inc.
+ Contributed by Diego Novillo <dnovillo@redhat.com>.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 2, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING. If not, write to
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "ggc.h"
+#include "flags.h"
+#include "tree.h"
+#include "basic-block.h"
+#include "tree-flow.h"
+#include "tree-pass.h"
+#include "tree-dump.h"
+#include "timevar.h"
+#include "diagnostic.h"
+#include "cfgloop.h"
+#include "tree-scalar-evolution.h"
+#include "tree-ssa-propagate.h"
+#include "tree-chrec.h"
+
+/* Set of SSA names found during the dominator traversal of a
+ sub-graph in maybe_add_assert_expr_on_edges. */
+static sbitmap found;
+
+/* Loop structure of the program. Used to analyze scalar evolutions
+ inside adjust_range_with_scev. */
+static struct loops *cfg_loops;
+
+/* Local functions. */
+static int compare_values (tree val1, tree val2);
+
+/* Given a conditional predicate COND that has WHICH as one of its
+ operands, return the other operand. No error checking is done.
+ This helper assumes that COND is a comparison and WHICH is one of
+ its operands. */
+
+static inline tree
+get_opposite_operand (tree cond, tree which)
+{
+ if (TREE_OPERAND (cond, 0) == which)
+ return TREE_OPERAND (cond, 1);
+ else
+ return TREE_OPERAND (cond, 0);
+}
+
+
+/* Given a comparison code, return its opposite. Note that this is *not*
+ the same as inverting its truth value (invert_tree_comparison). Here we
+ just want to literally flip the comparison around.
+
+ So, '<' gets '>', '<=' gets '>='. Both '==' and '!=' are returned
+ unchanged. */
+
+static enum tree_code
+opposite_comparison (enum tree_code code)
+{
+ switch (code)
+ {
+ case EQ_EXPR:
+ case NE_EXPR:
+ case ORDERED_EXPR:
+ case UNORDERED_EXPR:
+ case LTGT_EXPR:
+ case UNEQ_EXPR:
+ return code;
+ case GT_EXPR:
+ return LT_EXPR;
+ case GE_EXPR:
+ return LE_EXPR;
+ case LT_EXPR:
+ return GT_EXPR;
+ case LE_EXPR:
+ return GE_EXPR;
+ case UNGT_EXPR:
+ return UNLT_EXPR;
+ case UNGE_EXPR:
+ return UNLE_EXPR;
+ case UNLT_EXPR:
+ return UNGT_EXPR;
+ case UNLE_EXPR:
+ return UNGE_EXPR;
+ default:
+ gcc_unreachable ();
+ }
+}
+
+
+/* Set value range VR to {T, MIN, MAX}. */
+
+static inline void
+set_value_range (value_range *vr, enum value_range_type t, tree min, tree max)
+{
+#if defined ENABLE_CHECKING
+ if (t == VR_RANGE || t == VR_ANTI_RANGE)
+ {
+ int cmp;
+
+ gcc_assert (min && max);
+
+ if (INTEGRAL_TYPE_P (TREE_TYPE (min)) && t == VR_ANTI_RANGE)
+ gcc_assert (min != TYPE_MIN_VALUE (TREE_TYPE (min))
+ || max != TYPE_MAX_VALUE (TREE_TYPE (max)));
+
+ cmp = compare_values (min, max);
+ gcc_assert (cmp == 0 || cmp == -1 || cmp == -2);
+ }
+#endif
+
+ if (t == VR_RANGE
+ && INTEGRAL_TYPE_P (TREE_TYPE (min))
+ && min == TYPE_MIN_VALUE (TREE_TYPE (min))
+ && max == TYPE_MAX_VALUE (TREE_TYPE (max)))
+ {
+ /* Ranges that cover all the possible values for the type decay
+ to VARYING. */
+ vr->type = VR_VARYING;
+ vr->min = NULL_TREE;
+ vr->max = NULL_TREE;
+ return;
+ }
+
+ vr->type = t;
+ vr->min = min;
+ vr->max = max;
+}
+
+
+/* Similar to set_value_range but return true if any field of VR
+ changed from its previous value. */
+
+static inline bool
+update_value_range (value_range *vr, enum value_range_type t, tree min,
+ tree max)
+{
+ bool is_new = vr->type != t || vr->min != min || vr->max != max;
+ if (is_new)
+ set_value_range (vr, t, min, max);
+
+ return is_new;
+}
+
+
+/* Return value range information for VAR. Create an empty range if
+ none existed. */
+
+value_range *
+get_value_range (tree var)
+{
+ value_range *vr;
+ tree sym;
+
+ vr = SSA_NAME_VALUE_RANGE (var);
+ if (vr)
+ return vr;
+
+ /* Create a default value range. */
+ vr = ggc_alloc (sizeof (*vr));
+ memset ((void *) vr, 0, sizeof (*vr));
+ SSA_NAME_VALUE_RANGE (var) = vr;
+
+ /* If VAR is a default definition for a PARM_DECL, then we have to
+ assume a VARYING range for it. */
+ sym = SSA_NAME_VAR (var);
+ if (TREE_CODE (sym) == PARM_DECL && var == var_ann (sym)->default_def)
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+
+ return vr;
+}
+
+
+/* Return true if value range VR involves at least one symbol. */
+
+static inline bool
+symbolic_range_p (value_range *vr)
+{
+ return (!is_gimple_min_invariant (vr->min)
+ || !is_gimple_min_invariant (vr->max));
+}
+
+
+/* Return true if EXPR computes a non-zero value. */
+
+bool
+expr_computes_nonzero (tree expr)
+{
+ /* Type casts won't change anything, so just strip it. */
+ STRIP_NOPS (expr);
+
+ /* Calling alloca, guarantees that the value is non-NULL. */
+ if (alloca_call_p (expr))
+ return true;
+
+ /* The address of a non-weak symbol is never NULL, unless the user
+ has requested not to remove NULL pointer checks. */
+ if (flag_delete_null_pointer_checks
+ && TREE_CODE (expr) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (expr, 0))
+ && !DECL_WEAK (TREE_OPERAND (expr, 0)))
+ return true;
+
+ /* IOR of any value with a nonzero value will result in a nonzero
+ value. */
+ if (TREE_CODE (expr) == BIT_IOR_EXPR
+ && integer_nonzerop (TREE_OPERAND (expr, 1)))
+ return true;
+
+ return false;
+}
+
+
+/* Return true if VR is ~[0, 0]. */
+
+static inline bool
+range_is_nonnull (value_range *vr)
+{
+ return vr->type == VR_ANTI_RANGE
+ && integer_zerop (vr->min)
+ && integer_zerop (vr->max);
+}
+
+
+/* Return true if VR is [0, 0]. */
+
+static inline bool
+range_is_null (value_range *vr)
+{
+ return vr->type == VR_RANGE
+ && integer_zerop (vr->min)
+ && integer_zerop (vr->max);
+}
+
+
+/* Set value range VR to a non-NULL range of type TYPE. */
+
+static void
+set_value_range_to_nonnull (value_range *vr, tree type)
+{
+ tree zero = build_int_cst (type, 0);
+ set_value_range (vr, VR_ANTI_RANGE, zero, zero);
+}
+
+
+/* Set value range VR to a NULL range of type TYPE. */
+
+static void
+set_value_range_to_null (value_range *vr, tree type)
+{
+ tree zero = build_int_cst (type, 0);
+ set_value_range (vr, VR_RANGE, zero, zero);
+}
+
+
+/* Compare two values VAL1 and VAL2. Return
+
+ -2 if VAL1 and VAL2 cannot be compared at compile-time,
+ -1 if VAL1 < VAL2,
+ 0 if VAL1 == VAL2,
+ +1 if VAL1 > VAL2, and
+ +2 if VAL1 != VAL2
+
+ This is similar to tree_int_cst_compare but supports pointer values
+ and values that cannot be compared at compile time. */
+
+static int
+compare_values (tree val1, tree val2)
+{
+ if (val1 == val2)
+ return 0;
+
+ /* Do some limited symbolic comparisons. */
+ if (!POINTER_TYPE_P (TREE_TYPE (val1)))
+ {
+ /* We can determine some comparisons against +INF and -INF even
+ if the other value is an expression. */
+ if (val1 == TYPE_MAX_VALUE (TREE_TYPE (val1))
+ && TREE_CODE (val2) == MINUS_EXPR)
+ {
+ /* +INF > NAME - CST. */
+ return 1;
+ }
+ else if (val1 == TYPE_MIN_VALUE (TREE_TYPE (val1))
+ && TREE_CODE (val2) == PLUS_EXPR)
+ {
+ /* -INF < NAME + CST. */
+ return -1;
+ }
+ else if (TREE_CODE (val1) == MINUS_EXPR
+ && val2 == TYPE_MAX_VALUE (TREE_TYPE (val2)))
+ {
+ /* NAME - CST < +INF. */
+ return -1;
+ }
+ else if (TREE_CODE (val1) == PLUS_EXPR
+ && val2 == TYPE_MIN_VALUE (TREE_TYPE (val2)))
+ {
+ /* NAME + CST > -INF. */
+ return 1;
+ }
+ }
+
+ if ((TREE_CODE (val1) == SSA_NAME
+ || TREE_CODE (val1) == PLUS_EXPR
+ || TREE_CODE (val1) == MINUS_EXPR)
+ && (TREE_CODE (val2) == SSA_NAME
+ || TREE_CODE (val2) == PLUS_EXPR
+ || TREE_CODE (val2) == MINUS_EXPR))
+ {
+ tree n1, c1, n2, c2;
+
+ /* If VAL1 and VAL2 are of the form 'NAME [+-] CST' or 'NAME',
+ return -1 or +1 accordingly. If VAL1 and VAL2 don't use the
+ same name, return -2. */
+ if (TREE_CODE (val1) == SSA_NAME)
+ {
+ n1 = val1;
+ c1 = NULL_TREE;
+ }
+ else
+ {
+ n1 = TREE_OPERAND (val1, 0);
+ c1 = TREE_OPERAND (val1, 1);
+ }
+
+ if (TREE_CODE (val2) == SSA_NAME)
+ {
+ n2 = val2;
+ c2 = NULL_TREE;
+ }
+ else
+ {
+ n2 = TREE_OPERAND (val2, 0);
+ c2 = TREE_OPERAND (val2, 1);
+ }
+
+ /* Both values must use the same name. */
+ if (n1 != n2)
+ return -2;
+
+ if (TREE_CODE (val1) == SSA_NAME)
+ {
+ if (TREE_CODE (val2) == SSA_NAME)
+ /* NAME == NAME */
+ return 0;
+ else if (TREE_CODE (val2) == PLUS_EXPR)
+ /* NAME < NAME + CST */
+ return -1;
+ else if (TREE_CODE (val2) == MINUS_EXPR)
+ /* NAME > NAME - CST */
+ return 1;
+ }
+ else if (TREE_CODE (val1) == PLUS_EXPR)
+ {
+ if (TREE_CODE (val2) == SSA_NAME)
+ /* NAME + CST > NAME */
+ return 1;
+ else if (TREE_CODE (val2) == PLUS_EXPR)
+ /* NAME + CST1 > NAME + CST2, if CST1 > CST2 */
+ return compare_values (c1, c2);
+ else if (TREE_CODE (val2) == MINUS_EXPR)
+ /* NAME + CST1 > NAME - CST2 */
+ return 1;
+ }
+ else if (TREE_CODE (val1) == MINUS_EXPR)
+ {
+ if (TREE_CODE (val2) == SSA_NAME)
+ /* NAME - CST < NAME */
+ return -1;
+ else if (TREE_CODE (val2) == PLUS_EXPR)
+ /* NAME - CST1 < NAME + CST2 */
+ return -1;
+ else if (TREE_CODE (val2) == MINUS_EXPR)
+ /* NAME - CST1 > NAME - CST2, if CST1 < CST2. Notice that
+ C1 and C2 are swapped in the call to compare_values. */
+ return compare_values (c2, c1);
+ }
+
+ gcc_unreachable ();
+ }
+
+ /* We cannot compare non-constants. */
+ if (!is_gimple_min_invariant (val1) || !is_gimple_min_invariant (val2))
+ return -2;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (val1)))
+ return tree_int_cst_compare (val1, val2);
+ else
+ {
+ tree t;
+
+ /* First see if VAL1 and VAL2 are not the same. */
+ if (val1 == val2 || operand_equal_p (val1, val2, 0))
+ return 0;
+
+ /* If VAL1 is a lower address than VAL2, return -1. */
+ t = fold (build2 (LT_EXPR, TREE_TYPE (val1), val1, val2));
+ if (t == boolean_true_node)
+ return -1;
+
+ /* If VAL1 is a higher address than VAL2, return +1. */
+ t = fold (build2 (GT_EXPR, TREE_TYPE (val1), val1, val2));
+ if (t == boolean_true_node)
+ return 1;
+
+ /* If VAL1 is different than VAL2, return +2. */
+ t = fold (build2 (NE_EXPR, TREE_TYPE (val1), val1, val2));
+ if (t == boolean_true_node)
+ return 2;
+
+ return -2;
+ }
+}
+
+
+/* Return 1 if VAL is inside value range VR (VR->MIN <= VAL <= VR->MAX),
+ 0 if VAL is not inside VR,
+ -2 if we cannot tell either way. */
+
+static inline int
+value_inside_range (tree val, value_range *vr)
+{
+ int cmp1, cmp2;
+
+ cmp1 = compare_values (val, vr->min);
+ if (cmp1 == -2 || cmp1 == 2)
+ return -2;
+
+ cmp2 = compare_values (val, vr->max);
+ if (cmp2 == -2 || cmp2 == 2)
+ return -2;
+
+ return (cmp1 == 0 || cmp1 == 1) && (cmp2 == -1 || cmp2 == 0);
+}
+
+
+/* Return true if value ranges VR0 and VR1 have a non-empty
+ intersection. */
+
+static inline bool
+value_ranges_intersect_p (value_range *vr0, value_range *vr1)
+{
+ return (value_inside_range (vr1->min, vr0) == 1
+ || value_inside_range (vr1->max, vr0) == 1
+ || value_inside_range (vr0->min, vr1) == 1
+ || value_inside_range (vr0->max, vr1) == 1);
+}
+
+
+/* Extract value range information from an ASSERT_EXPR EXPR and store
+ it in *VR_P. */
+
+static void
+extract_range_from_assert (value_range *vr_p, tree expr)
+{
+ tree var, cond, limit, type;
+ value_range *var_vr;
+
+ var = ASSERT_EXPR_VAR (expr);
+ cond = ASSERT_EXPR_COND (expr);
+
+ gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison);
+
+ /* Find VAR in the ASSERT_EXPR conditional. */
+ limit = get_opposite_operand (cond, var);
+ type = TREE_TYPE (limit);
+
+ gcc_assert (limit != var);
+
+ /* For pointer arithmetic, we only keep track of anti-ranges
+ (NE_EXPR). Notice that we don't need to handle EQ_EXPR in these
+ cases because assertions with equalities are never generated.
+ The assert pass generates straight assignments in those cases. */
+ if (POINTER_TYPE_P (type) && TREE_CODE (cond) != NE_EXPR)
+ {
+ set_value_range (vr_p, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ if (TREE_CODE (cond) == NE_EXPR)
+ set_value_range (vr_p, VR_ANTI_RANGE, limit, limit);
+ else if (TREE_CODE (cond) == LE_EXPR)
+ set_value_range (vr_p, VR_RANGE, TYPE_MIN_VALUE (type), limit);
+ else if (TREE_CODE (cond) == LT_EXPR)
+ {
+ tree one = build_int_cst (type, 1);
+ set_value_range (vr_p, VR_RANGE, TYPE_MIN_VALUE (type),
+ fold (build (MINUS_EXPR, type, limit, one)));
+ }
+ else if (TREE_CODE (cond) == GE_EXPR)
+ set_value_range (vr_p, VR_RANGE, limit, TYPE_MAX_VALUE (type));
+ else if (TREE_CODE (cond) == GT_EXPR)
+ {
+ tree one = build_int_cst (type, 1);
+ set_value_range (vr_p, VR_RANGE,
+ fold (build (PLUS_EXPR, type, limit, one)),
+ TYPE_MAX_VALUE (type));
+ }
+ else
+ gcc_unreachable ();
+
+ /* If VAR already has a known range and the two ranges have a
+ non-empty intersection, we can refine the resulting range.
+ Since the assert expression creates an equivalency and at the
+ same time it asserts a predicate, we can take the intersection of
+ the two ranges to get better precision. */
+ var_vr = get_value_range (var);
+ if (var_vr->type == VR_RANGE
+ && vr_p->type == VR_RANGE
+ && value_ranges_intersect_p (var_vr, vr_p))
+ {
+ tree min, max;
+
+ /* Use the larger of the two minimums. */
+ if (compare_values (vr_p->min, var_vr->min) == -1)
+ min = var_vr->min;
+ else
+ min = vr_p->min;
+
+ /* Use the smaller of the two maximums. */
+ if (compare_values (vr_p->max, var_vr->max) == 1)
+ max = var_vr->max;
+ else
+ max = vr_p->max;
+
+ set_value_range (vr_p, vr_p->type, min, max);
+ }
+}
+
+
+/* Extract range information from SSA name VAR and store it in VR. If
+ VAR has an interesting range, use it. Otherwise, create the
+ range [VAR, VAR] and return it. This is useful in situations where
+ we may have conditionals testing values of VARYING names. For
+ instance,
+
+ x_3 = y_5;
+ if (x_3 > y_5)
+ ...
+
+ Even if y_5 is deemed VARYING, we can determine that x_3 > y_5 is
+ always false. */
+
+static void
+extract_range_from_ssa_name (value_range *vr, tree var)
+{
+ value_range *var_vr = get_value_range (var);
+
+ if (var_vr->type != VR_UNDEFINED && var_vr->type != VR_VARYING)
+ *vr = *var_vr;
+ else
+ set_value_range (vr, VR_RANGE, var, var);
+}
+
+
+/* Extract range information from a binary expression EXPR based on
+ the ranges of each of its operands and the expression code. */
+
+static void
+extract_range_from_binary_expr (value_range *vr, tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+ tree op0, op1, min, max;
+ value_range vr0, vr1;
+ int cmp;
+
+ /* Not all binary expressions can be applied to ranges in a
+ meaningful way. Handle only arithmetic operations. */
+ if (code != PLUS_EXPR
+ && code != MINUS_EXPR
+ && code != MULT_EXPR
+ && code != TRUNC_DIV_EXPR
+ && code != FLOOR_DIV_EXPR
+ && code != CEIL_DIV_EXPR
+ && code != EXACT_DIV_EXPR
+ && code != ROUND_DIV_EXPR
+ && code != MIN_EXPR
+ && code != MAX_EXPR)
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* Get value ranges for each operand. For constant operands, create
+ a new value range with the operand to simplify processing. */
+ op0 = TREE_OPERAND (expr, 0);
+ if (TREE_CODE (op0) == SSA_NAME)
+ vr0 = *(get_value_range (op0));
+ else
+ {
+ if (is_gimple_min_invariant (op0))
+ set_value_range (&vr0, VR_RANGE, op0, op0);
+ else
+ set_value_range (&vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+
+ op1 = TREE_OPERAND (expr, 1);
+ if (TREE_CODE (op1) == SSA_NAME)
+ vr1 = *(get_value_range (op1));
+ else
+ {
+ if (is_gimple_min_invariant (op1))
+ set_value_range (&vr1, VR_RANGE, op1, op1);
+ else
+ set_value_range (&vr1, VR_VARYING, 0, 0);
+ }
+
+ /* If either range is UNDEFINED, so is the result. */
+ if (vr0.type == VR_UNDEFINED || vr1.type == VR_UNDEFINED)
+ {
+ set_value_range (vr, VR_UNDEFINED, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* If either range is VARYING, so is the result. */
+ if (vr0.type == VR_VARYING || vr1.type == VR_VARYING)
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* If the ranges are of different types, the result is VARYING. */
+ if (vr0.type != vr1.type)
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* TODO. Refuse to do any symbolic range operations for now. */
+ if (symbolic_range_p (&vr0) || symbolic_range_p (&vr1))
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* Now evaluate the expression to determine the new range. */
+ if (POINTER_TYPE_P (TREE_TYPE (expr))
+ || POINTER_TYPE_P (TREE_TYPE (op0))
+ || POINTER_TYPE_P (TREE_TYPE (op1)))
+ {
+ /* For pointer types, we are really only interested in asserting
+ whether the expression evaluates to non-NULL. FIXME. We
+ used to gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR),
+ but ivopts is generating expressions with pointer
+ multiplication in them. */
+ if (code == PLUS_EXPR)
+ {
+ /* Assume that pointers can never wrap around. FIXME, Is
+ this always safe? */
+ tree zero = build_int_cst (TREE_TYPE (expr), 0);
+ set_value_range (vr, VR_ANTI_RANGE, zero, zero);
+ }
+ else
+ {
+ /* Subtracting from a pointer, may yield 0, so just drop the
+ resulting range to varying. */
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+
+ return;
+ }
+
+ /* For integer ranges, apply the operation to each end of the
+ range and see what we end up with. */
+ if (code == PLUS_EXPR
+ || code == MULT_EXPR
+ || code == MIN_EXPR
+ || code == MAX_EXPR)
+ {
+ /* For operations that make the resulting range directly
+ proportional to the original ranges, apply the operation to
+ the same end of each range. */
+ min = int_const_binop (code, vr0.min, vr1.min, 0);
+ max = int_const_binop (code, vr0.max, vr1.max, 0);
+ }
+ else
+ {
+ /* For operations that make the resulting range inversely
+ proportional to the original ranges (-, /), apply the
+ operation to the opposite ends of each range. */
+ min = int_const_binop (code, vr0.min, vr1.max, 0);
+ max = int_const_binop (code, vr0.max, vr1.min, 0);
+ }
+
+ cmp = compare_values (min, max);
+ if (cmp == -2 || cmp == 1)
+ {
+ /* If the new range has its limits swapped around (MIN > MAX),
+ then the operation caused one of them to wrap around, mark
+ the new range VARYING. */
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+ else
+ set_value_range (vr, vr0.type, min, max);
+}
+
+
+/* Extract range information from a unary expression EXPR based on
+ the range of its operand and the expression code. */
+
+static void
+extract_range_from_unary_expr (value_range *vr, tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+ tree min, max, op0;
+ value_range vr0;
+ int cmp;
+
+ /* Get value ranges for the operand. For constant operands, create
+ a new value range with the operand to simplify processing. */
+ op0 = TREE_OPERAND (expr, 0);
+ if (TREE_CODE (op0) == SSA_NAME)
+ vr0 = *(get_value_range (op0));
+ else
+ {
+ if (is_gimple_min_invariant (op0))
+ set_value_range (&vr0, VR_RANGE, op0, op0);
+ else
+ set_value_range (&vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+
+ /* If VR0 is UNDEFINED, so is the result. */
+ if (vr0.type == VR_UNDEFINED)
+ {
+ set_value_range (vr, VR_UNDEFINED, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* If VR0 is VARYING, so is the result. */
+ if (vr0.type == VR_VARYING)
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* TODO. Refuse to do any symbolic range operations for now. */
+ if (symbolic_range_p (&vr0))
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* If the operand is neither a pointer nor an integral type, set the
+ range to VARYING. TODO, we may set the range to non-zero. */
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ && !POINTER_TYPE_P (TREE_TYPE (op0)))
+ {
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* If the expression involves pointers, we are only interested in
+ determining if it evaluates to NULL [0, 0] or non-NULL (~[0, 0]). */
+ if (POINTER_TYPE_P (TREE_TYPE (expr)) || POINTER_TYPE_P (TREE_TYPE (op0)))
+ {
+ if (range_is_nonnull (&vr0) || expr_computes_nonzero (expr))
+ set_value_range_to_nonnull (vr, TREE_TYPE (expr));
+ else if (range_is_null (&vr0))
+ set_value_range_to_null (vr, TREE_TYPE (expr));
+ else
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+
+ return;
+ }
+
+ /* Handle unary expressions on integer ranges. */
+ if ((code == NOP_EXPR || code == CONVERT_EXPR)
+ && (TYPE_SIZE (TREE_TYPE (vr0.min)) != TYPE_SIZE (TREE_TYPE (expr))))
+ {
+ /* When converting types of different sizes, set the result to
+ VARYING. Things like sign extensions and precision loss may
+ change the range. For instance, if x_3 is of type 'long long
+ int' and 'y_5 = (unsigned short) x_3', if x_3 is ~[0, 0], it
+ is impossible to know at compile time whether y_5 will be
+ ~[0, 0]. */
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ /* Apply the operation to each end of the range and see what we end
+ up with. */
+ min = fold_unary_to_constant (code, TREE_TYPE (expr), vr0.min);
+ max = fold_unary_to_constant (code, TREE_TYPE (expr), vr0.max);
+
+ cmp = compare_values (min, max);
+ if (cmp == -2 || cmp == 1)
+ {
+ /* If the new range has its limits swapped around (MIN > MAX),
+ then the operation caused one of them to wrap around, mark
+ the new range VARYING. */
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+ else
+ set_value_range (vr, vr0.type, min, max);
+}
+
+
+/* Try to compute a useful range out of expression EXPR and store it
+ in *VR_P. */
+
+static void
+extract_range_from_expr (value_range *vr, tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+
+ if (code == ASSERT_EXPR)
+ extract_range_from_assert (vr, expr);
+ else if (code == SSA_NAME)
+ extract_range_from_ssa_name (vr, expr);
+ else if (TREE_CODE_CLASS (code) == tcc_binary)
+ extract_range_from_binary_expr (vr, expr);
+ else if (TREE_CODE_CLASS (code) == tcc_unary)
+ extract_range_from_unary_expr (vr, expr);
+ else if (expr_computes_nonzero (expr))
+ set_value_range_to_nonnull (vr, TREE_TYPE (expr));
+ else
+ set_value_range (vr, VR_VARYING, NULL_TREE, NULL_TREE);
+}
+
+
+/* Given a range VR, a loop L and a variable VAR, determine whether it
+ would be profitable to adjust VR using scalar evolution information
+ for VAR. If so, update VR with the new limits. */
+
+static void
+adjust_range_with_scev (value_range *vr, struct loop *l, tree var)
+{
+ tree init, step, chrec;
+ bool init_is_max;
+
+ /* TODO. Don't adjust anti-ranges. An anti-range may provide
+ better opportunities than a regular range, but I'm not sure. */
+ if (vr->type == VR_ANTI_RANGE)
+ return;
+
+ chrec = analyze_scalar_evolution (l, var);
+ if (TREE_CODE (chrec) != POLYNOMIAL_CHREC)
+ return;
+
+ init = CHREC_LEFT (chrec);
+ step = CHREC_RIGHT (chrec);
+
+ /* If STEP is symbolic, we can't know whether INIT will be the
+ minimum or maximum value in the range. */
+ if (!is_gimple_min_invariant (step))
+ return;
+
+ /* FIXME. When dealing with unsigned types,
+ analyze_scalar_evolution sets STEP to very large unsigned values
+ when the evolution goes backwards. This confuses this analysis
+ because we think that INIT is the smallest value that the range
+ can take, instead of the largest. Ignore these chrecs for now. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (step)) && TYPE_UNSIGNED (TREE_TYPE (step)))
+ return;
+
+ /* If STEP is negative, then INIT is the maximum value the range
+ will take. Otherwise, INIT is the minimum value. */
+ init_is_max = (tree_int_cst_sgn (step) < 0);
+
+ if (!POINTER_TYPE_P (TREE_TYPE (init))
+ && (vr->type == VR_VARYING || vr->type == VR_UNDEFINED))
+ {
+ /* For VARYING or UNDEFINED ranges, just about anything we get
+ from scalar evolutions should be better. */
+ if (init_is_max)
+ set_value_range (vr, VR_RANGE, TYPE_MIN_VALUE (TREE_TYPE (init)), init);
+ else
+ set_value_range (vr, VR_RANGE, init, TYPE_MAX_VALUE (TREE_TYPE (init)));
+ }
+ else if (vr->type == VR_RANGE)
+ {
+ if (init_is_max)
+ {
+ /* INIT is the maximum value. If INIT is lower than
+ VR->MAX, set VR->MAX to INIT. */
+ if (compare_values (init, vr->max) == -1)
+ set_value_range (vr, VR_RANGE, vr->min, init);
+ }
+ else
+ {
+ /* If INIT is bigger than VR->MIN, set VR->MIN to INIT. */
+ if (compare_values (init, vr->min) == 1)
+ set_value_range (vr, VR_RANGE, init, vr->max);
+ }
+ }
+}
+
+
+/* Given two numeric value ranges VR0, VR1 and a comparison code COMP:
+
+ - Return BOOLEAN_TRUE_NODE if VR0 COMP VR1 always returns true for all the
+ values in the ranges.
+
+ - Return BOOLEAN_FALSE_NODE if the comparison always returns false.
+
+ - Return NULL_TREE if it is not always possible to determine the value of
+ the comparison. */
+
+static tree
+compare_ranges (enum tree_code comp, value_range *vr0, value_range *vr1)
+{
+ /* VARYING or UNDEFINED ranges cannot be compared. */
+ if (vr0->type == VR_VARYING
+ || vr0->type == VR_UNDEFINED
+ || vr1->type == VR_VARYING
+ || vr1->type == VR_UNDEFINED)
+ return NULL_TREE;
+
+ /* Anti-ranges need to be handled separately. */
+ if (vr0->type == VR_ANTI_RANGE || vr1->type == VR_ANTI_RANGE)
+ {
+ /* If both are anti-ranges, then we cannot compute any
+ comparison. */
+ if (vr0->type == VR_ANTI_RANGE && vr1->type == VR_ANTI_RANGE)
+ return NULL_TREE;
+
+ /* These comparisons are never statically computable. */
+ if (comp == GT_EXPR
+ || comp == GE_EXPR
+ || comp == LT_EXPR
+ || comp == LE_EXPR)
+ return NULL_TREE;
+
+ /* Equality can be computed only between a range and an
+ anti-range. ~[VAL1, VAL2] == [VAL1, VAL2] is always false. */
+ if (vr0->type == VR_RANGE)
+ {
+ /* To simplify processing, make VR0 the anti-range. */
+ value_range *tmp = vr0;
+ vr0 = vr1;
+ vr1 = tmp;
+ }
+
+ gcc_assert (comp == NE_EXPR || comp == EQ_EXPR);
+
+ if (compare_values (vr0->min, vr1->min) == 0
+ && compare_values (vr0->max, vr1->max) == 0)
+ return (comp == NE_EXPR) ? boolean_true_node : boolean_false_node;
+
+ return NULL_TREE;
+ }
+
+ /* Simplify processing. If COMP is GT_EXPR or GE_EXPR, switch the
+ operands around and change the comparison code. */
+ if (comp == GT_EXPR || comp == GE_EXPR)
+ {
+ value_range *tmp;
+ comp = (comp == GT_EXPR) ? LT_EXPR : LE_EXPR;
+ tmp = vr0;
+ vr0 = vr1;
+ vr1 = tmp;
+ }
+
+ if (comp == EQ_EXPR)
+ {
+ /* Equality may only be computed if both ranges represent
+ exactly one value. */
+ if (compare_values (vr0->min, vr0->max) == 0
+ && compare_values (vr1->min, vr1->max) == 0)
+ {
+ int cmp_min = compare_values (vr0->min, vr1->min);
+ int cmp_max = compare_values (vr0->max, vr1->max);
+ if (cmp_min == 0 && cmp_max == 0)
+ return boolean_true_node;
+ else if (cmp_min != -2 && cmp_max != -2)
+ return boolean_false_node;
+ }
+
+ return NULL_TREE;
+ }
+ else if (comp == NE_EXPR)
+ {
+ int cmp1, cmp2;
+
+ /* If VR0 is completely to the left or completely to the right
+ of VR1, they are always different. Notice that we need to
+ make sure that both comparisons yield similar results to
+ avoid comparing values that cannot be compared at
+ compile-time. */
+ cmp1 = compare_values (vr0->max, vr1->min);
+ cmp2 = compare_values (vr0->min, vr1->max);
+ if ((cmp1 == -1 && cmp2 == -1) || (cmp1 == 1 && cmp2 == 1))
+ return boolean_true_node;
+
+ /* If VR0 and VR1 represent a single value and are identical,
+ return false. */
+ else if (compare_values (vr0->min, vr0->max) == 0
+ && compare_values (vr1->min, vr1->max) == 0
+ && compare_values (vr0->min, vr1->min) == 0
+ && compare_values (vr0->max, vr1->max) == 0)
+ return boolean_false_node;
+
+ /* Otherwise, they may or may not be different. */
+ else
+ return NULL_TREE;
+ }
+ else if (comp == LT_EXPR || comp == LE_EXPR)
+ {
+ int tst;
+
+ /* If VR0 is to the left of VR1, return true. */
+ tst = compare_values (vr0->max, vr1->min);
+ if ((comp == LT_EXPR && tst == -1)
+ || (comp == LE_EXPR && (tst == -1 || tst == 0)))
+ return boolean_true_node;
+
+ /* If VR0 is to the right of VR1, return false. */
+ tst = compare_values (vr0->min, vr1->max);
+ if ((comp == LT_EXPR && (tst == 0 || tst == 1))
+ || (comp == LE_EXPR && tst == 1))
+ return boolean_false_node;
+
+ /* Otherwise, we don't know. */
+ return NULL_TREE;
+ }
+
+ gcc_unreachable ();
+}
+
+
+/* Given a value range VR, a value VAL and a comparison code COMP, return
+ BOOLEAN_TRUE_NODE if VR COMP VR1 always returns true for all the
+ values in VR. Return BOOLEAN_FALSE_NODE if the comparison
+ always returns false. Return NULL_TREE if it is not always
+ possible to determine the value of the comparison. */
+
+static tree
+compare_range_with_value (enum tree_code comp, value_range *vr, tree val)
+{
+ if (vr->type == VR_VARYING || vr->type == VR_UNDEFINED)
+ return NULL_TREE;
+
+ /* Anti-ranges need to be handled separately. */
+ if (vr->type == VR_ANTI_RANGE)
+ {
+ /* For anti-ranges, the only predicates that we can compute at
+ compile time are equality and inequality. */
+ if (comp == GT_EXPR
+ || comp == GE_EXPR
+ || comp == LT_EXPR
+ || comp == LE_EXPR)
+ return NULL_TREE;
+
+ /* ~[VAL, VAL] == VAL is always false. */
+ if (compare_values (vr->min, val) == 0
+ && compare_values (vr->max, val) == 0)
+ return (comp == NE_EXPR) ? boolean_true_node : boolean_false_node;
+
+ return NULL_TREE;
+ }
+
+ if (comp == EQ_EXPR)
+ {
+ /* EQ_EXPR may only be computed if VR represents exactly
+ one value. */
+ if (compare_values (vr->min, vr->max) == 0)
+ {
+ int cmp = compare_values (vr->min, val);
+ if (cmp == 0)
+ return boolean_true_node;
+ else if (cmp == -1 || cmp == 1 || cmp == 2)
+ return boolean_false_node;
+ }
+
+ return NULL_TREE;
+ }
+ else if (comp == NE_EXPR)
+ {
+ /* If VAL is not inside VR, then they are always different. */
+ if (compare_values (vr->max, val) == -1
+ || compare_values (vr->min, val) == 1)
+ return boolean_true_node;
+
+ /* If VR represents exactly one value equal to VAL, then return
+ false. */
+ if (compare_values (vr->min, vr->max) == 0
+ && compare_values (vr->min, val) == 0)
+ return boolean_false_node;
+
+ /* Otherwise, they may or may not be different. */
+ return NULL_TREE;
+ }
+ else if (comp == LT_EXPR || comp == LE_EXPR)
+ {
+ int tst;
+
+ /* If VR is to the left of VAL, return true. */
+ tst = compare_values (vr->max, val);
+ if ((comp == LT_EXPR && tst == -1)
+ || (comp == LE_EXPR && (tst == -1 || tst == 0)))
+ return boolean_true_node;
+
+ /* If VR is to the right of VAL, return false. */
+ tst = compare_values (vr->min, val);
+ if ((comp == LT_EXPR && (tst == 0 || tst == 1))
+ || (comp == LE_EXPR && tst == 1))
+ return boolean_false_node;
+
+ /* Otherwise, we don't know. */
+ return NULL_TREE;
+ }
+ else if (comp == GT_EXPR || comp == GE_EXPR)
+ {
+ int tst;
+
+ /* If VR is to the right of VAL, return true. */
+ tst = compare_values (vr->min, val);
+ if ((comp == GT_EXPR && tst == 1)
+ || (comp == GE_EXPR && (tst == 0 || tst == 1)))
+ return boolean_true_node;
+
+ /* If VR is to the left of VAL, return false. */
+ tst = compare_values (vr->max, val);
+ if ((comp == GT_EXPR && (tst == -1 || tst == 0))
+ || (comp == GE_EXPR && tst == -1))
+ return boolean_false_node;
+
+ /* Otherwise, we don't know. */
+ return NULL_TREE;
+ }
+
+ gcc_unreachable ();
+}
+
+
+/* Debugging dumps. */
+
+void
+dump_value_range (FILE *file, value_range *vr)
+{
+ if (vr == NULL)
+ fprintf (file, "[]");
+ else if (vr->type == VR_UNDEFINED)
+ fprintf (file, "UNDEFINED");
+ else if (vr->type == VR_RANGE || vr->type == VR_ANTI_RANGE)
+ {
+ fprintf (file, "%s[", (vr->type == VR_ANTI_RANGE) ? "~" : "");
+ print_generic_expr (file, vr->min, 0);
+ fprintf (file, ", ");
+ print_generic_expr (file, vr->max, 0);
+ fprintf (file, "]");
+ }
+ else if (vr->type == VR_VARYING)
+ fprintf (file, "VARYING");
+ else
+ fprintf (file, "INVALID RANGE");
+}
+
+
+/* Dump value range VR to stderr. */
+
+void
+debug_value_range (value_range *vr)
+{
+ dump_value_range (stderr, vr);
+}
+
+
+/* Dump value ranges of all SSA_NAMEs to FILE. */
+
+void
+dump_all_value_ranges (FILE *file)
+{
+ size_t i;
+
+ for (i = 0; i < num_ssa_names; i++)
+ {
+ tree var = ssa_name (i);
+ if (var && SSA_NAME_VALUE_RANGE (var))
+ {
+ print_generic_expr (file, var, 0);
+ fprintf (file, ": ");
+ dump_value_range (file, SSA_NAME_VALUE_RANGE (var));
+ fprintf (file, "\n");
+ }
+ }
+
+ fprintf (file, "\n");
+}
+
+
+/* Dump all value ranges to stderr. */
+
+void
+debug_all_value_ranges (void)
+{
+ dump_all_value_ranges (stderr);
+}
+
+
+/*---------------------------------------------------------------------------
+ Value Range Propagation
+---------------------------------------------------------------------------*/
+
+/* Given a COND_EXPR COND of the form 'V OP W', and an SSA name V,
+ create a new SSA name N and return the assertion assignment
+ 'V = ASSERT_EXPR <V, V OP W>'. */
+
+static tree
+build_assert_expr_for (tree cond, tree v)
+{
+ tree n, assertion;
+
+ gcc_assert (TREE_CODE (v) == SSA_NAME);
+ n = duplicate_ssa_name (v, NULL_TREE);
+
+ if (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison)
+ {
+ /* Build N = ASSERT_EXPR <V, COND>. As a special case, if the
+ conditional is an EQ_EXPR (V == Z), just build the assignment
+ N = Z. */
+ if (TREE_CODE (cond) == EQ_EXPR)
+ {
+ tree other = get_opposite_operand (cond, v);
+ assertion = build (MODIFY_EXPR, TREE_TYPE (v), n, other);
+ }
+ else
+ assertion = build (MODIFY_EXPR, TREE_TYPE (v), n,
+ build (ASSERT_EXPR, TREE_TYPE (v), v, cond));
+ }
+ else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
+ {
+ /* Given !V, build the assignment N = false. */
+ tree op0 = TREE_OPERAND (cond, 0);
+ gcc_assert (op0 == v);
+ assertion = build (MODIFY_EXPR, TREE_TYPE (v), n, boolean_false_node);
+ }
+ else if (TREE_CODE (cond) == SSA_NAME)
+ {
+ /* Given V, build the assignment N = true. */
+ gcc_assert (v == cond);
+ assertion = build (MODIFY_EXPR, TREE_TYPE (v), n, boolean_true_node);
+ }
+ else
+ gcc_unreachable ();
+
+ SSA_NAME_DEF_STMT (n) = assertion;
+
+ /* The new ASSERT_EXPR, creates a new SSA name that replaces the
+ operand of the ASSERT_EXPR. Register the new name and the old one
+ in the replacement table so that we can fix the SSA web after
+ adding all the ASSERT_EXPRs. */
+ register_new_name_mapping (n, v);
+
+ return assertion;
+}
+
+
+/* Return false if EXPR is a predicate expression involving floating
+ point values. */
+
+static inline bool
+fp_predicate (tree expr)
+{
+ return TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison
+ && FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
+}
+
+
+/* Return an expression predicate that represents the range of values
+ that can be taken by operand OP after STMT executes. */
+
+static tree
+infer_value_range (tree stmt, tree op)
+{
+ if (POINTER_TYPE_P (TREE_TYPE (op)))
+ {
+ bool is_store;
+ unsigned num_uses, num_derefs;
+
+ count_uses_and_derefs (op, stmt, &num_uses, &num_derefs, &is_store);
+ if (num_derefs > 0 && flag_delete_null_pointer_checks)
+ {
+ /* We can only assume that a pointer dereference will yield
+ non-NULL if -fdelete-null-pointer-checks is enabled. */
+ tree null = build_int_cst (TREE_TYPE (op), 0);
+ tree t = build (NE_EXPR, boolean_type_node, op, null);
+ return t;
+ }
+ }
+
+ return NULL_TREE;
+}
+
+
+/* Return true if OP is the result of an ASSERT_EXPR that tests the
+ same condition as COND. */
+
+static bool
+has_assert_expr (tree op, tree cond)
+{
+ tree def_stmt = SSA_NAME_DEF_STMT (op);
+ tree assert_expr, other_cond, other_op;
+
+ /* If OP was not generated by an ASSERT_EXPR, return false. */
+ if (TREE_CODE (def_stmt) != MODIFY_EXPR
+ || TREE_CODE (TREE_OPERAND (def_stmt, 1)) != ASSERT_EXPR)
+ return false;
+
+ assert_expr = TREE_OPERAND (def_stmt, 1);
+ other_cond = ASSERT_EXPR_COND (assert_expr);
+ other_op = ASSERT_EXPR_VAR (assert_expr);
+
+ if (TREE_CODE (cond) == TREE_CODE (other_cond))
+ {
+ tree t1, t2;
+
+ /* If COND is not a comparison predicate, something is wrong. */
+ gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison);
+
+ /* Note that we only need to compare against one of the operands
+ of OTHER_COND.
+
+ Suppose that we are about to insert the assertion ASSERT_EXPR
+ <x_4, x_4 != 0> and the defining statement for x_4 is x_4 =
+ ASSERT_EXPR <x_3, x_3 != 0>.
+
+ In this case, we don't really want to insert a new
+ ASSERT_EXPR for x_4 because that would be redundant. We
+ already know that x_4 is not 0. So, when comparing the
+ conditionals 'x_3 != 0' and 'x_4 != 0', we don't want to
+ compare x_3 and x_4, we just want to compare the predicate's
+ code (!=) and the other operand (0). */
+ if (TREE_OPERAND (cond, 0) == op)
+ t1 = TREE_OPERAND (cond, 1);
+ else
+ t1 = TREE_OPERAND (cond, 0);
+
+ if (TREE_OPERAND (other_cond, 0) == other_op)
+ t2 = TREE_OPERAND (other_cond, 1);
+ else
+ t2 = TREE_OPERAND (other_cond, 0);
+
+ return (t1 == t2 || operand_equal_p (t1, t2, 0));
+ }
+
+ return false;
+}
+
+
+/* Traverse all the statements in block BB looking for used variables.
+ Variables used in BB are added to bitmap FOUND. The algorithm
+ works in three main parts:
+
+ 1- For every statement S in BB, all the variables used by S are
+ added to bitmap FOUND.
+
+ 2- If statement S uses an operand N in a way that exposes a known
+ value range for N, then if N was not already generated by an
+ ASSERT_EXPR, create a new ASSERT_EXPR for N. For instance, if N
+ is a pointer and the statement dereferences it, we can assume
+ that N is not NULL.
+
+ 3- COND_EXPRs are a special case of #2. We can derive range
+ information from the predicate but need to insert different
+ ASSERT_EXPRs for each of the sub-graphs rooted at the
+ conditional block. If the last statement of BB is a conditional
+ expression of the form 'X op Y', then
+
+ a) Remove X and Y from the set FOUND.
+
+ b) If the conditional dominates its THEN_CLAUSE sub-graph,
+ recurse into it. On return, if X and/or Y are marked in
+ FOUND, then an ASSERT_EXPR is added for the corresponding
+ variable.
+
+ c) Repeat step (b) on the ELSE_CLAUSE.
+
+ d) Mark X and Y in FOUND.
+
+ 3- If BB does not end in a conditional expression, then we recurse
+ into BB's dominator children.
+
+ At the end of the recursive traversal, ASSERT_EXPRs will have been
+ added to the edges of COND_EXPR blocks that have sub-graphs using
+ one or both predicate operands. For instance,
+
+ if (a == 9)
+ b = a;
+ else
+ b = c + 1;
+
+ In this case, an assertion on the THEN clause is useful to
+ determine that 'a' is always 9 on that edge. However, an assertion
+ on the ELSE clause would be unnecessary.
+
+ On exit from this function, all the names created by the newly
+ inserted ASSERT_EXPRs need to be added to the SSA web by rewriting
+ the SSA names that they replace.
+
+ TODO. Handle SWITCH_EXPR. */
+
+static bool
+maybe_add_assert_expr (basic_block bb)
+{
+ block_stmt_iterator si;
+ tree last;
+ bool added;
+ use_optype uses;
+
+ /* Step 1. Mark all the SSA names used in BB in bitmap FOUND. */
+ added = false;
+ last = NULL_TREE;
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt, op;
+ ssa_op_iter i;
+
+ stmt = bsi_stmt (si);
+ get_stmt_operands (stmt);
+
+ /* Mark all the SSA names used by STMT in bitmap FOUND. If STMT
+ is inside the sub-graph of a conditional block, when we
+ return from this recursive walk, our parent will use the
+ FOUND bitset to determine if one of the operands it was
+ looking for was present in the sub-graph. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
+ {
+ tree cond;
+
+ SET_BIT (found, SSA_NAME_VERSION (op));
+
+ cond = infer_value_range (stmt, op);
+ if (!cond)
+ continue;
+
+ /* Step 3. If OP is used in such a way that we can infer a
+ value range for it, create a new ASSERT_EXPR for OP
+ (unless OP already has an ASSERT_EXPR). */
+ gcc_assert (!is_ctrl_stmt (stmt));
+
+ if (has_assert_expr (op, cond))
+ continue;
+
+ if (!stmt_ends_bb_p (stmt))
+ {
+ /* If STMT does not end the block, we can insert the new
+ assertion right after it. */
+ tree t = build_assert_expr_for (cond, op);
+ bsi_insert_after (&si, t, BSI_NEW_STMT);
+ added = true;
+ }
+ else
+ {
+ /* STMT must be the last statement in BB. We can only
+ insert new assertions on the non-abnormal edge out of
+ BB. Note that since STMT is not control flow, there
+ may only be one non-abnormal edge out of BB. */
+ edge_iterator ei;
+ edge e;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (!(e->flags & EDGE_ABNORMAL))
+ {
+ tree t = build_assert_expr_for (cond, op);
+ bsi_insert_on_edge (e, t);
+ added = true;
+ break;
+ }
+ }
+ }
+
+ /* Remember the last statement of the block. */
+ last = stmt;
+ }
+
+ /* Step 3. If BB's last statement is a conditional expression
+ involving integer operands, recurse into each of the sub-graphs
+ rooted at BB to determine if we need to add ASSERT_EXPRs.
+ Notice that we only care about the first operand of the
+ conditional. Adding assertions for both operands may actually
+ hinder VRP. FIXME, add example. */
+ if (last
+ && TREE_CODE (last) == COND_EXPR
+ && !fp_predicate (COND_EXPR_COND (last))
+ && NUM_USES (uses = STMT_USE_OPS (last)) > 0)
+ {
+ edge e;
+ edge_iterator ei;
+ tree op, cond;
+
+ cond = COND_EXPR_COND (last);
+
+ /* Remove the COND_EXPR operand from the FOUND bitmap.
+ Otherwise, when we finish traversing each of the sub-graphs,
+ we won't know whether the variables were found in the
+ sub-graphs or if they had been found in a block upstream from
+ BB. */
+ op = USE_OP (uses, 0);
+ RESET_BIT (found, SSA_NAME_VERSION (op));
+
+ /* Look for uses of the operands in each of the sub-graphs
+ rooted at BB. We need to check each of the outgoing edges
+ separately, so that we know what kind of ASSERT_EXPR to
+ insert. */
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ /* If BB strictly dominates the sub-graph at E->DEST,
+ recurse into it. */
+ if (e->dest != bb
+ && dominated_by_p (CDI_DOMINATORS, e->dest, bb))
+ added |= maybe_add_assert_expr (e->dest);
+
+ /* Once we traversed the sub-graph, check if any block inside
+ used either of the predicate's operands. If so, add the
+ appropriate ASSERT_EXPR. */
+ if (TEST_BIT (found, SSA_NAME_VERSION (op)))
+ {
+ /* We found a use of OP in the sub-graph rooted at
+ E->DEST. Add an ASSERT_EXPR according to whether
+ E goes to THEN_CLAUSE or ELSE_CLAUSE. */
+ tree c, t;
+
+ if (e->flags & EDGE_TRUE_VALUE)
+ c = unshare_expr (cond);
+ else if (e->flags & EDGE_FALSE_VALUE)
+ c = invert_truthvalue (cond);
+ else
+ gcc_unreachable ();
+
+ t = build_assert_expr_for (c, op);
+ bsi_insert_on_edge (e, t);
+ added = true;
+ }
+ }
+
+ /* Finally, mark all the COND_EXPR operands as found. */
+ SET_BIT (found, SSA_NAME_VERSION (op));
+ }
+ else
+ {
+ /* Step 3. Recurse into the dominator children of BB. */
+ basic_block son;
+
+ for (son = first_dom_son (CDI_DOMINATORS, bb);
+ son;
+ son = next_dom_son (CDI_DOMINATORS, son))
+ added |= maybe_add_assert_expr (son);
+ }
+
+ return added;
+}
+
+
+/* Traverse the flowgraph looking for conditional jumps to insert range
+ expressions. These range expressions are meant to provide information
+ to optimizations that need to reason in terms of value ranges. They
+ will not be expanded into RTL. For instance, given:
+
+ x = ...
+ y = ...
+ if (x < y)
+ y = x - 2;
+ else
+ x = y + 3;
+
+ this pass will transform the code into:
+
+ x = ...
+ y = ...
+ if (x < y)
+ {
+ x = ASSERT_EXPR <x, x < y>
+ y = x - 2
+ }
+ else
+ {
+ y = ASSERT_EXPR <y, x <= y>
+ x = y + 3
+ }
+
+ The idea is that once copy and constant propagation have run, other
+ optimizations will be able to determine what ranges of values can 'x'
+ take in different paths of the code, simply by checking the reaching
+ definition of 'x'. */
+
+static void
+insert_range_assertions (void)
+{
+ edge e;
+ edge_iterator ei;
+ bool update_ssa_p;
+
+ found = sbitmap_alloc (num_ssa_names);
+ sbitmap_zero (found);
+
+ calculate_dominance_info (CDI_DOMINATORS);
+
+ update_ssa_p = false;
+ FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
+ if (maybe_add_assert_expr (e->dest))
+ update_ssa_p = true;
+
+ if (update_ssa_p)
+ {
+ bsi_commit_edge_inserts ();
+ update_ssa (TODO_update_ssa_no_phi);
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nSSA form after inserting ASSERT_EXPRs\n");
+ dump_function_to_file (current_function_decl, dump_file, dump_flags);
+ }
+
+ sbitmap_free (found);
+}
+
+
+/* Convert range assertion expressions into copies. FIXME, explain why. */
+
+static void
+remove_range_assertions (void)
+{
+ basic_block bb;
+ block_stmt_iterator si;
+
+ FOR_EACH_BB (bb)
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt = bsi_stmt (si);
+
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
+ {
+ tree rhs = TREE_OPERAND (stmt, 1);
+ tree cond = fold (ASSERT_EXPR_COND (rhs));
+ gcc_assert (cond != boolean_false_node);
+ TREE_OPERAND (stmt, 1) = ASSERT_EXPR_VAR (rhs);
+ update_stmt (stmt);
+ }
+ }
+}
+
+
+/* Return true if STMT is interesting for VRP. */
+
+static bool
+stmt_interesting_for_vrp (tree stmt)
+{
+ if (TREE_CODE (stmt) == PHI_NODE
+ && is_gimple_reg (PHI_RESULT (stmt))
+ && (INTEGRAL_TYPE_P (TREE_TYPE (PHI_RESULT (stmt)))
+ || POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (stmt)))))
+ return true;
+ else if (TREE_CODE (stmt) == MODIFY_EXPR)
+ {
+ tree lhs = TREE_OPERAND (stmt, 0);
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ if (TREE_CODE (lhs) == SSA_NAME
+ && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ || POINTER_TYPE_P (TREE_TYPE (lhs)))
+ && NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
+ && NUM_VUSES (VUSE_OPS (ann)) == 0
+ && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
+ return true;
+ }
+ else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
+ return true;
+
+ return false;
+}
+
+
+/* Initialize local data structures for VRP. Return true if VRP
+ is worth running (i.e. if we found any statements that could
+ benefit from range information). */
+
+static bool
+vrp_initialize (void)
+{
+ basic_block bb;
+ bool do_vrp;
+
+ /* If we don't find any ASSERT_EXPRs in the code, there's no point
+ running VRP. */
+ do_vrp = false;
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator si;
+ tree phi;
+
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ if (!stmt_interesting_for_vrp (phi))
+ {
+ tree lhs = PHI_RESULT (phi);
+ set_value_range (get_value_range (lhs), VR_VARYING, 0, 0);
+ DONT_SIMULATE_AGAIN (phi) = true;
+ }
+ else
+ DONT_SIMULATE_AGAIN (phi) = false;
+ }
+
+ for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+ {
+ tree stmt = bsi_stmt (si);
+
+ if (!stmt_interesting_for_vrp (stmt))
+ {
+ ssa_op_iter i;
+ tree def;
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+ set_value_range (get_value_range (def), VR_VARYING, 0, 0);
+ DONT_SIMULATE_AGAIN (stmt) = true;
+ }
+ else
+ {
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ && TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
+ do_vrp = true;
+
+ DONT_SIMULATE_AGAIN (stmt) = false;
+ }
+ }
+ }
+
+ return do_vrp;
+}
+
+
+/* Visit assignment STMT. If it produces an interesting range, record
+ the SSA name in *OUTPUT_P. */
+
+static enum ssa_prop_result
+vrp_visit_assignment (tree stmt, tree *output_p)
+{
+ tree lhs, rhs, def;
+ ssa_op_iter iter;
+
+ lhs = TREE_OPERAND (stmt, 0);
+ rhs = TREE_OPERAND (stmt, 1);
+
+ /* We only keep track of ranges in integral and pointer types. */
+ if (TREE_CODE (lhs) == SSA_NAME
+ && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ || POINTER_TYPE_P (TREE_TYPE (lhs))))
+ {
+ value_range *vr, new_vr;
+ struct loop *l;
+
+ vr = get_value_range (lhs);
+ extract_range_from_expr (&new_vr, rhs);
+
+ /* If STMT is inside a loop, we may be able to know something
+ else about the range of LHS by examining scalar evolution
+ information. */
+ if (cfg_loops && (l = loop_containing_stmt (stmt)))
+ adjust_range_with_scev (&new_vr, l, lhs);
+
+ if (update_value_range (vr, new_vr.type, new_vr.min, new_vr.max))
+ {
+ *output_p = lhs;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Found new range ");
+ dump_value_range (dump_file, &new_vr);
+ fprintf (dump_file, " for ");
+ print_generic_expr (dump_file, lhs, 0);
+ fprintf (dump_file, "\n\n");
+ }
+
+ if (new_vr.type == VR_VARYING)
+ return SSA_PROP_VARYING;
+
+ return SSA_PROP_INTERESTING;
+ }
+
+ return SSA_PROP_NOT_INTERESTING;
+ }
+
+ /* Every other statements produces no useful ranges. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
+ set_value_range (get_value_range (def), VR_VARYING, 0, 0);
+
+ return SSA_PROP_VARYING;
+}
+
+
+/* Given a conditional predicate COND, try to determine if COND yields
+ true or false based on the value ranges of its operands. */
+
+static tree
+vrp_evaluate_conditional (tree cond)
+{
+ gcc_assert (TREE_CODE (cond) == SSA_NAME
+ || TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison);
+
+ if (TREE_CODE (cond) == SSA_NAME)
+ {
+ /* For SSA names, only return a truth value if the range is
+ known and contains exactly one value. */
+ value_range *vr = SSA_NAME_VALUE_RANGE (cond);
+ if (vr && vr->type == VR_RANGE && vr->min == vr->max)
+ return vr->min;
+ }
+ else
+ {
+ /* For comparisons, evaluate each operand and compare their
+ ranges. */
+ tree op0, op1;
+ value_range *vr0, *vr1;
+
+ op0 = TREE_OPERAND (cond, 0);
+ vr0 = (TREE_CODE (op0) == SSA_NAME) ? get_value_range (op0) : NULL;
+
+ op1 = TREE_OPERAND (cond, 1);
+ vr1 = (TREE_CODE (op1) == SSA_NAME) ? get_value_range (op1) : NULL;
+
+ if (vr0 && vr1)
+ return compare_ranges (TREE_CODE (cond), vr0, vr1);
+ else if (vr0 && vr1 == NULL)
+ return compare_range_with_value (TREE_CODE (cond), vr0, op1);
+ else if (vr0 == NULL && vr1)
+ return compare_range_with_value (opposite_comparison (TREE_CODE (cond)),
+ vr1, op0);
+ }
+
+ /* Anything else cannot be computed statically. */
+ return NULL_TREE;
+}
+
+
+/* Visit conditional statement STMT. If we can determine which edge
+ will be taken out of STMT's basic block, record it in
+ *TAKEN_EDGE_P and return SSA_PROP_INTERESTING. Otherwise, return
+ SSA_PROP_VARYING. */
+
+static enum ssa_prop_result
+vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
+{
+ tree cond, val;
+
+ *taken_edge_p = NULL;
+
+ /* FIXME. Handle SWITCH_EXPRs. But first, the assert pass needs to
+ add ASSERT_EXPRs for them. */
+ if (TREE_CODE (stmt) == SWITCH_EXPR)
+ return SSA_PROP_VARYING;
+
+ cond = COND_EXPR_COND (stmt);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ tree use;
+ ssa_op_iter i;
+
+ fprintf (dump_file, "\nVisiting conditional with predicate: ");
+ print_generic_expr (dump_file, cond, 0);
+ fprintf (dump_file, "\nWith known ranges\n");
+
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
+ {
+ fprintf (dump_file, "\t");
+ print_generic_expr (dump_file, use, 0);
+ fprintf (dump_file, ": ");
+ dump_value_range (dump_file, SSA_NAME_VALUE_RANGE (use));
+ }
+
+ fprintf (dump_file, "\n");
+ }
+
+ /* Compute the value of the predicate COND by checking the known
+ ranges of each of its operands. */
+ val = vrp_evaluate_conditional (cond);
+ if (val)
+ *taken_edge_p = find_taken_edge (bb_for_stmt (stmt), val);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nPredicate evaluates to: ");
+ if (val == NULL_TREE)
+ fprintf (dump_file, "DON'T KNOW\n");
+ else
+ print_generic_stmt (dump_file, val, 0);
+ }
+
+ return (*taken_edge_p) ? SSA_PROP_INTERESTING : SSA_PROP_VARYING;
+}
+
+
+/* Evaluate statement STMT. If the statement produces a useful range,
+ return SSA_PROP_INTERESTING and record the SSA name with the
+ interesting range into *OUTPUT_P.
+
+ If STMT is a conditional branch and we can determine its truth
+ value, the taken edge is recorded in *TAKEN_EDGE_P.
+
+ If STMT produces a varying value, return SSA_PROP_VARYING. */
+
+static enum ssa_prop_result
+vrp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
+{
+ tree def;
+ ssa_op_iter iter;
+ stmt_ann_t ann;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nVisiting statement:\n");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ fprintf (dump_file, "\n");
+ }
+
+ ann = stmt_ann (stmt);
+ if (TREE_CODE (stmt) == MODIFY_EXPR
+ && NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
+ && NUM_VUSES (VUSE_OPS (ann)) == 0
+ && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
+ return vrp_visit_assignment (stmt, output_p);
+ else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
+ return vrp_visit_cond_stmt (stmt, taken_edge_p);
+
+ /* All other statements produce nothing of interest for VRP, so mark
+ their outputs varying and prevent further simulation. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
+ set_value_range (get_value_range (def), VR_VARYING, 0, 0);
+
+ return SSA_PROP_VARYING;
+}
+
+
+/* Meet operation for value ranges. Given two value ranges VR0 and
+ VR1, store in VR0 the result of meeting VR0 and VR1.
+
+ The meeting rules are as follows:
+
+ 1- If VR0 and VR1 have an empty intersection, set VR0 to VR_VARYING.
+
+ 2- If VR0 and VR1 have a non-empty intersection, set VR0 to the
+ union of VR0 and VR1. */
+
+static void
+vrp_meet (value_range *vr0, value_range *vr1)
+{
+ if (vr0->type == VR_UNDEFINED)
+ {
+ *vr0 = *vr1;
+ return;
+ }
+
+ if (vr1->type == VR_UNDEFINED)
+ {
+ /* Nothing to do. VR0 already has the resulting range. */
+ return;
+ }
+
+ if (vr0->type == VR_VARYING)
+ {
+ /* Nothing to do. VR0 already has the resulting range. */
+ return;
+ }
+
+ if (vr1->type == VR_VARYING)
+ {
+ *vr0 = *vr1;
+ return;
+ }
+
+ /* If either is a symbolic range, drop to VARYING. */
+ if (symbolic_range_p (vr0) || symbolic_range_p (vr1))
+ {
+ set_value_range (vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ return;
+ }
+
+ if (vr0->type == VR_RANGE && vr1->type == VR_RANGE)
+ {
+ /* If VR0 and VR1 have a non-empty intersection, compute the
+ union of both ranges. */
+ if (value_ranges_intersect_p (vr0, vr1))
+ {
+ tree min, max;
+
+ min = vr0->min;
+ max = vr0->max;
+
+ /* The lower limit of the new range is the minimum of the
+ two ranges. */
+ if (compare_values (vr0->min, vr1->min) == 1)
+ min = vr1->min;
+
+ /* The upper limit of the new range is the maximium of the
+ two ranges. */
+ if (compare_values (vr0->max, vr1->max) == -1)
+ max = vr1->max;
+
+ set_value_range (vr0, vr0->type, min, max);
+ }
+ else
+ {
+ /* The two ranges don't intersect, set the result to VR_VARYING. */
+ set_value_range (vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+ }
+ else if (vr0->type == VR_ANTI_RANGE && vr1->type == VR_ANTI_RANGE)
+ {
+ /* Two anti-ranges meet only if they are both identical. */
+ if (compare_values (vr0->min, vr1->min) == 0
+ && compare_values (vr0->max, vr1->max) == 0
+ && compare_values (vr0->min, vr0->max) == 0)
+ /* Nothing to do. */ ;
+ else
+ set_value_range (vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+ else if (vr0->type == VR_ANTI_RANGE || vr1->type == VR_ANTI_RANGE)
+ {
+ /* A range [VAL1, VAL2] and an anti-range ~[VAL3, VAL4] meet
+ only if the ranges have an empty intersection. The result of
+ the meet operation is the anti-range. */
+ if (!value_ranges_intersect_p (vr0, vr1))
+ {
+ if (vr1->type == VR_ANTI_RANGE)
+ *vr0 = *vr1;
+ }
+ else
+ set_value_range (vr0, VR_VARYING, NULL_TREE, NULL_TREE);
+ }
+ else
+ gcc_unreachable ();
+}
+
+
+/* Visit all arguments for PHI node PHI that flow through executable
+ edges. If a valid value range can be derived from all the incoming
+ value ranges, set a new range for the LHS of PHI. */
+
+static enum ssa_prop_result
+vrp_visit_phi_node (tree phi)
+{
+ int i;
+ tree lhs = PHI_RESULT (phi);
+ value_range *lhs_vr = get_value_range (lhs);
+ value_range vr_result = *lhs_vr;
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\nVisiting PHI node: ");
+ print_generic_expr (dump_file, phi, dump_flags);
+ }
+
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ {
+ edge e = PHI_ARG_EDGE (phi, i);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file,
+ "\n Argument #%d (%d -> %d %sexecutable)\n",
+ i, e->src->index, e->dest->index,
+ (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
+ }
+
+ if (e->flags & EDGE_EXECUTABLE)
+ {
+ tree arg = PHI_ARG_DEF (phi, i);
+ value_range vr_arg;
+
+ if (TREE_CODE (arg) == SSA_NAME)
+ vr_arg = *(get_value_range (arg));
+ else
+ {
+ vr_arg.type = VR_RANGE;
+ vr_arg.min = arg;
+ vr_arg.max = arg;
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "\t");
+ print_generic_expr (dump_file, arg, dump_flags);
+ fprintf (dump_file, "\n\tValue: ");
+ dump_value_range (dump_file, &vr_arg);
+ fprintf (dump_file, "\n");
+ }
+
+ vrp_meet (&vr_result, &vr_arg);
+
+ if (vr_result.type == VR_VARYING)
+ break;
+ }
+ }
+
+ if (vr_result.type == VR_VARYING)
+ {
+ set_value_range (lhs_vr, VR_VARYING, 0, 0);
+ return SSA_PROP_VARYING;
+ }
+
+ /* To prevent infinite iterations in the algorithm, derive ranges
+ when the new value is slightly bigger or smaller than the
+ previous one. */
+ if (lhs_vr->type == VR_RANGE)
+ {
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs)))
+ {
+ int cmp_min = compare_values (lhs_vr->min, vr_result.min);
+ int cmp_max = compare_values (lhs_vr->max, vr_result.max);
+
+ /* If the new minimum is smaller or larger than the previous
+ one, go all the way to -INF. In the first case, to avoid
+ iterating millions of times to reach -INF, and in the
+ other case to avoid infinite bouncing between different
+ minimums. */
+ if (cmp_min > 0 || cmp_min < 0)
+ vr_result.min = TYPE_MIN_VALUE (TREE_TYPE (vr_result.min));
+
+ /* Similarly, if the new maximum is smaller or larger than
+ the previous one, go all the way to +INF. */
+ if (cmp_max < 0 || cmp_max > 0)
+ vr_result.max = TYPE_MAX_VALUE (TREE_TYPE (vr_result.max));
+
+ /* If we ended up with a (-INF, +INF) range, set it to
+ VARYING. */
+ if (vr_result.min == TYPE_MIN_VALUE (TREE_TYPE (vr_result.min))
+ && vr_result.max == TYPE_MAX_VALUE (TREE_TYPE (vr_result.max)))
+ {
+ set_value_range (lhs_vr, VR_VARYING, 0, 0);
+ return SSA_PROP_VARYING;
+ }
+ }
+ }
+
+ /* If the new range is different than the previous value, keep
+ iterating. */
+ if (update_value_range (lhs_vr, vr_result.type, vr_result.min, vr_result.max))
+ return SSA_PROP_INTERESTING;
+
+ /* Nothing changed, don't add outgoing edges. */
+ return SSA_PROP_NOT_INTERESTING;
+}
+
+
+/* Traverse all the blocks folding conditionals with known ranges. */
+
+static void
+vrp_finalize (void)
+{
+ basic_block bb;
+ int num_pred_folded = 0;
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "\nValue ranges after VRP:\n\n");
+ dump_all_value_ranges (dump_file);
+ fprintf (dump_file, "\n");
+ }
+
+ FOR_EACH_BB (bb)
+ {
+ tree last = last_stmt (bb);
+ if (last && TREE_CODE (last) == COND_EXPR)
+ {
+ tree val = vrp_evaluate_conditional (COND_EXPR_COND (last));
+ if (val)
+ {
+ if (dump_file)
+ {
+ fprintf (dump_file, "Folding predicate ");
+ print_generic_expr (dump_file, COND_EXPR_COND (last), 0);
+ fprintf (dump_file, " to ");
+ print_generic_expr (dump_file, val, 0);
+ fprintf (dump_file, "\n");
+ }
+
+ num_pred_folded++;
+ COND_EXPR_COND (last) = val;
+ update_stmt (last);
+ }
+ }
+ }
+
+ if (dump_file && (dump_flags & TDF_STATS))
+ fprintf (dump_file, "\nNumber of predicates folded: %d\n\n",
+ num_pred_folded);
+}
+
+
+/* Main entry point to VRP (Value Range Propagation). This pass is
+ loosely based on J. R. C. Patterson, ``Accurate Static Branch
+ Prediction by Value Range Propagation,'' in SIGPLAN Conference on
+ Programming Language Design and Implementation, pp. 67-78, 1995.
+ Also available at http://citeseer.ist.psu.edu/patterson95accurate.html
+
+ This is essentially an SSA-CCP pass modified to deal with ranges
+ instead of constants.
+
+ TODO, the main difference between this pass and Patterson's is that
+ we do not propagate edge probabilities. We only compute whether
+ edges can be taken or not. That is, instead of having a spectrum
+ of jump probabilities between 0 and 1, we only deal with 0, 1 and
+ DON'T KNOW. In the future, it may be worthwhile to propagate
+ probabilities to aid branch prediction. */
+
+static void
+execute_vrp (void)
+{
+ insert_range_assertions ();
+
+ cfg_loops = loop_optimizer_init (NULL);
+ if (cfg_loops)
+ scev_initialize (cfg_loops);
+
+ if (vrp_initialize ())
+ {
+ ssa_propagate (vrp_visit_stmt, vrp_visit_phi_node);
+ vrp_finalize ();
+ }
+
+ if (cfg_loops)
+ {
+ scev_finalize ();
+ loop_optimizer_finalize (cfg_loops, NULL);
+ current_loops = NULL;
+ }
+
+ remove_range_assertions ();
+}
+
+static bool
+gate_vrp (void)
+{
+ return flag_tree_vrp != 0;
+}
+
+struct tree_opt_pass pass_vrp =
+{
+ "vrp", /* name */
+ gate_vrp, /* gate */
+ execute_vrp, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_VRP, /* tv_id */
+ PROP_ssa | PROP_alias, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_cleanup_cfg
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_dump_func
+ | TODO_update_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};
diff --git a/gcc/tree.def b/gcc/tree.def
index 08e28068167..3806f5b0043 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -899,6 +899,20 @@ DEFTREECODE (STATEMENT_LIST, "statement_list", tcc_exceptional, 0)
the same value, they will be assigned the same value handle. */
DEFTREECODE (VALUE_HANDLE, "value_handle", tcc_exceptional, 0)
+/* Predicate assertion. Artificial expression generated by the optimizers
+ to keep track of predicate values. This expression may only appear on
+ the RHS of assignments.
+
+ Given X = ASSERT_EXPR <Y, EXPR>, the optimizers can infer
+ two things:
+
+ 1- X is a copy of Y.
+ 2- EXPR is a GIMPLE conditional expression (as defined by
+ is_gimple_condexpr) and is known to be true.
+
+ The type of the expression is the same as Y. */
+DEFTREECODE (ASSERT_EXPR, "assert_expr", tcc_expression, 2)
+
/* Base class information. Holds information about a class as a
baseclass of itself or another class. */
DEFTREECODE (TREE_BINFO, "tree_binfo", tcc_exceptional, 0)
diff --git a/gcc/tree.h b/gcc/tree.h
index 0141d2b4456..40cf7397c82 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -1283,6 +1283,10 @@ struct tree_vec GTY(())
#define OBJ_TYPE_REF_OBJECT(NODE) TREE_OPERAND (OBJ_TYPE_REF_CHECK (NODE), 1)
#define OBJ_TYPE_REF_TOKEN(NODE) TREE_OPERAND (OBJ_TYPE_REF_CHECK (NODE), 2)
+/* ASSERT_EXPR accessors. */
+#define ASSERT_EXPR_VAR(NODE) TREE_OPERAND (ASSERT_EXPR_CHECK (NODE), 0)
+#define ASSERT_EXPR_COND(NODE) TREE_OPERAND (ASSERT_EXPR_CHECK (NODE), 1)
+
struct tree_exp GTY(())
{
struct tree_common common;
@@ -1329,12 +1333,17 @@ struct tree_exp GTY(())
#define SSA_NAME_VALUE(N) \
SSA_NAME_CHECK (N)->ssa_name.value_handle
+/* Range information for SSA_NAMEs. */
+#define SSA_NAME_VALUE_RANGE(N) \
+ SSA_NAME_CHECK (N)->ssa_name.value_range
+
/* Auxiliary pass-specific data. */
#define SSA_NAME_AUX(N) \
SSA_NAME_CHECK (N)->ssa_name.aux
#ifndef _TREE_FLOW_H
struct ptr_info_def;
+struct value_range_def;
#endif
@@ -1372,6 +1381,9 @@ struct tree_ssa_name GTY(())
as well. */
tree value_handle;
+ /* Value range information. */
+ struct value_range_def *value_range;
+
/* Auxiliary information stored with the ssa name. */
PTR GTY((skip)) aux;
@@ -1395,9 +1407,6 @@ struct tree_ssa_name GTY(())
the link to the next PHI is in PHI_CHAIN. */
#define PHI_CHAIN(NODE) TREE_CHAIN (PHI_NODE_CHECK (NODE))
-/* Nonzero if the PHI node was rewritten by a previous pass through the
- SSA renamer. */
-#define PHI_REWRITTEN(NODE) PHI_NODE_CHECK (NODE)->phi.rewritten
#define PHI_NUM_ARGS(NODE) PHI_NODE_CHECK (NODE)->phi.num_args
#define PHI_ARG_CAPACITY(NODE) PHI_NODE_CHECK (NODE)->phi.capacity
#define PHI_ARG_ELT(NODE, I) PHI_NODE_ELT_CHECK (NODE, I)
@@ -1422,10 +1431,6 @@ struct tree_phi_node GTY(())
int num_args;
int capacity;
- /* Nonzero if the PHI node was rewritten by a previous pass through the
- SSA renamer. */
- int rewritten;
-
/* Basic block to that the phi node belongs. */
struct basic_block_def *bb;
OpenPOWER on IntegriCloud