| /* Control flow functions for trees. |
| Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. |
| Contributed by Diego Novillo <dnovillo@redhat.com> |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify |
| it under the terms of the GNU General Public License as published by |
| the Free Software Foundation; either version 2, or (at your option) |
| any later version. |
| |
| GCC is distributed in the hope that it will be useful, |
| but WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| GNU General Public License for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING. If not, write to |
| the Free Software Foundation, 59 Temple Place - Suite 330, |
| Boston, MA 02111-1307, USA. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "tm.h" |
| #include "tree.h" |
| #include "rtl.h" |
| #include "tm_p.h" |
| #include "hard-reg-set.h" |
| #include "basic-block.h" |
| #include "output.h" |
| #include "errors.h" |
| #include "flags.h" |
| #include "function.h" |
| #include "expr.h" |
| #include "ggc.h" |
| #include "langhooks.h" |
| #include "diagnostic.h" |
| #include "tree-flow.h" |
| #include "timevar.h" |
| #include "tree-dump.h" |
| #include "tree-pass.h" |
| #include "toplev.h" |
| #include "except.h" |
| #include "cfgloop.h" |
| #include "cfglayout.h" |
| #include "hashtab.h" |
| |
| /* This file contains functions for building the Control Flow Graph (CFG) |
| for a function tree. */ |
| |
| /* Local declarations. */ |
| |
| /* Initial capacity for the basic block array. */ |
| static const int initial_cfg_capacity = 20; |
| |
| /* Mapping of labels to their associated blocks. This can greatly speed up |
| building of the CFG in code with lots of gotos. */ |
| static GTY(()) varray_type label_to_block_map; |
| |
| /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs |
| which use a particular edge. The CASE_LABEL_EXPRs are chained together |
| via their TREE_CHAIN field, which we clear after we're done with the |
| hash table to prevent problems with duplication of SWITCH_EXPRs. |
| |
| Access to this list of CASE_LABEL_EXPRs allows us to efficiently |
| update the case vector in response to edge redirections. |
| |
| Right now this table is set up and torn down at key points in the |
| compilation process. It would be nice if we could make the table |
| more persistent. The key is getting notification of changes to |
| the CFG (particularly edge removal, creation and redirection). */ |
| |
| struct edge_to_cases_elt |
| { |
| /* The edge itself. Necessary for hashing and equality tests. */ |
| edge e; |
| |
| /* The case labels associated with this edge. We link these up via |
| their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields |
| when we destroy the hash table. This prevents problems when copying |
| SWITCH_EXPRs. */ |
| tree case_labels; |
| }; |
| |
| static htab_t edge_to_cases; |
| |
| /* CFG statistics. */ |
| struct cfg_stats_d |
| { |
| long num_merged_labels; |
| }; |
| |
| static struct cfg_stats_d cfg_stats; |
| |
| /* Nonzero if we found a computed goto while building basic blocks. */ |
| static bool found_computed_goto; |
| |
| /* Basic blocks and flowgraphs. */ |
| static basic_block create_bb (void *, void *, basic_block); |
| static void create_block_annotation (basic_block); |
| static void free_blocks_annotations (void); |
| static void clear_blocks_annotations (void); |
| static void make_blocks (tree); |
| static void factor_computed_gotos (void); |
| |
| /* Edges. */ |
| static void make_edges (void); |
| static void make_ctrl_stmt_edges (basic_block); |
| static void make_exit_edges (basic_block); |
| static void make_cond_expr_edges (basic_block); |
| static void make_switch_expr_edges (basic_block); |
| static void make_goto_expr_edges (basic_block); |
| static edge tree_redirect_edge_and_branch (edge, basic_block); |
| static edge tree_try_redirect_by_replacing_jump (edge, basic_block); |
| static void split_critical_edges (void); |
| static bool remove_fallthru_edge (VEC(edge) *); |
| |
| /* Various helpers. */ |
| static inline bool stmt_starts_bb_p (tree, tree); |
| static int tree_verify_flow_info (void); |
| static void tree_make_forwarder_block (edge); |
| static bool tree_forwarder_block_p (basic_block, bool); |
| static void tree_cfg2vcg (FILE *); |
| |
| /* Flowgraph optimization and cleanup. */ |
| static void tree_merge_blocks (basic_block, basic_block); |
| static bool tree_can_merge_blocks_p (basic_block, basic_block); |
| static void remove_bb (basic_block); |
| static bool cleanup_control_flow (void); |
| static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator); |
| static edge find_taken_edge_cond_expr (basic_block, tree); |
| static edge find_taken_edge_switch_expr (basic_block, tree); |
| static tree find_case_label_for_value (tree, tree); |
| static bool phi_alternatives_equal (basic_block, edge, edge); |
| static bool cleanup_forwarder_blocks (void); |
| |
| |
| /*--------------------------------------------------------------------------- |
| Create basic blocks |
| ---------------------------------------------------------------------------*/ |
| |
| /* Entry point to the CFG builder for trees. TP points to the list of |
| statements to be added to the flowgraph. */ |
| |
| static void |
| build_tree_cfg (tree *tp) |
| { |
| /* Register specific tree functions. */ |
| tree_register_cfg_hooks (); |
| |
| /* Initialize rbi_pool. */ |
| alloc_rbi_pool (); |
| |
| /* Initialize the basic block array. */ |
| init_flow (); |
| profile_status = PROFILE_ABSENT; |
| n_basic_blocks = 0; |
| last_basic_block = 0; |
| VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info"); |
| memset ((void *) &cfg_stats, 0, sizeof (cfg_stats)); |
| |
| /* Build a mapping of labels to their associated blocks. */ |
| VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity, |
| "label to block map"); |
| |
| ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR; |
| EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR; |
| |
| found_computed_goto = 0; |
| make_blocks (*tp); |
| |
| /* Computed gotos are hell to deal with, especially if there are |
| lots of them with a large number of destinations. So we factor |
| them to a common computed goto location before we build the |
| edge list. After we convert back to normal form, we will un-factor |
| the computed gotos since factoring introduces an unwanted jump. */ |
| if (found_computed_goto) |
| factor_computed_gotos (); |
| |
| /* Make sure there is always at least one block, even if it's empty. */ |
| if (n_basic_blocks == 0) |
| create_empty_bb (ENTRY_BLOCK_PTR); |
| |
| create_block_annotation (ENTRY_BLOCK_PTR); |
| create_block_annotation (EXIT_BLOCK_PTR); |
| |
| /* Adjust the size of the array. */ |
| VARRAY_GROW (basic_block_info, n_basic_blocks); |
| |
| /* To speed up statement iterator walks, we first purge dead labels. */ |
| cleanup_dead_labels (); |
| |
| /* Group case nodes to reduce the number of edges. |
| We do this after cleaning up dead labels because otherwise we miss |
| a lot of obvious case merging opportunities. */ |
| group_case_labels (); |
| |
| /* Create the edges of the flowgraph. */ |
| make_edges (); |
| |
| /* Debugging dumps. */ |
| |
| /* Write the flowgraph to a VCG file. */ |
| { |
| int local_dump_flags; |
| FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags); |
| if (dump_file) |
| { |
| tree_cfg2vcg (dump_file); |
| dump_end (TDI_vcg, dump_file); |
| } |
| } |
| |
| /* Dump a textual representation of the flowgraph. */ |
| if (dump_file) |
| dump_tree_cfg (dump_file, dump_flags); |
| } |
| |
| static void |
| execute_build_cfg (void) |
| { |
| build_tree_cfg (&DECL_SAVED_TREE (current_function_decl)); |
| } |
| |
| struct tree_opt_pass pass_build_cfg = |
| { |
| "cfg", /* name */ |
| NULL, /* gate */ |
| execute_build_cfg, /* execute */ |
| NULL, /* sub */ |
| NULL, /* next */ |
| 0, /* static_pass_number */ |
| TV_TREE_CFG, /* tv_id */ |
| PROP_gimple_leh, /* properties_required */ |
| PROP_cfg, /* properties_provided */ |
| 0, /* properties_destroyed */ |
| 0, /* todo_flags_start */ |
| TODO_verify_stmts, /* todo_flags_finish */ |
| 0 /* letter */ |
| }; |
| |
| /* Search the CFG for any computed gotos. If found, factor them to a |
| common computed goto site. Also record the location of that site so |
| that we can un-factor the gotos after we have converted back to |
| normal form. */ |
| |
| static void |
| factor_computed_gotos (void) |
| { |
| basic_block bb; |
| tree factored_label_decl = NULL; |
| tree var = NULL; |
| tree factored_computed_goto_label = NULL; |
| tree factored_computed_goto = NULL; |
| |
| /* We know there are one or more computed gotos in this function. |
| Examine the last statement in each basic block to see if the block |
| ends with a computed goto. */ |
| |
| FOR_EACH_BB (bb) |
| { |
| block_stmt_iterator bsi = bsi_last (bb); |
| tree last; |
| |
| if (bsi_end_p (bsi)) |
| continue; |
| last = bsi_stmt (bsi); |
| |
| /* Ignore the computed goto we create when we factor the original |
| computed gotos. */ |
| if (last == factored_computed_goto) |
| continue; |
| |
| /* If the last statement is a computed goto, factor it. */ |
| if (computed_goto_p (last)) |
| { |
| tree assignment; |
| |
| /* The first time we find a computed goto we need to create |
| the factored goto block and the variable each original |
| computed goto will use for their goto destination. */ |
| if (! factored_computed_goto) |
| { |
| basic_block new_bb = create_empty_bb (bb); |
| block_stmt_iterator new_bsi = bsi_start (new_bb); |
| |
| /* Create the destination of the factored goto. Each original |
| computed goto will put its desired destination into this |
| variable and jump to the label we create immediately |
| below. */ |
| var = create_tmp_var (ptr_type_node, "gotovar"); |
| |
| /* Build a label for the new block which will contain the |
| factored computed goto. */ |
| factored_label_decl = create_artificial_label (); |
| factored_computed_goto_label |
| = build1 (LABEL_EXPR, void_type_node, factored_label_decl); |
| bsi_insert_after (&new_bsi, factored_computed_goto_label, |
| BSI_NEW_STMT); |
| |
| /* Build our new computed goto. */ |
| factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var); |
| bsi_insert_after (&new_bsi, factored_computed_goto, |
| BSI_NEW_STMT); |
| } |
| |
| /* Copy the original computed goto's destination into VAR. */ |
| assignment = build (MODIFY_EXPR, ptr_type_node, |
| var, GOTO_DESTINATION (last)); |
| bsi_insert_before (&bsi, assignment, BSI_SAME_STMT); |
| |
| /* And re-vector the computed goto to the new destination. */ |
| GOTO_DESTINATION (last) = factored_label_decl; |
| } |
| } |
| } |
| |
| |
| /* Create annotations for a single basic block. */ |
| |
| static void |
| create_block_annotation (basic_block bb) |
| { |
| /* Verify that the tree_annotations field is clear. */ |
| gcc_assert (!bb->tree_annotations); |
| bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d)); |
| } |
| |
| |
| /* Free the annotations for all the basic blocks. */ |
| |
| static void free_blocks_annotations (void) |
| { |
| clear_blocks_annotations (); |
| } |
| |
| |
| /* Clear the annotations for all the basic blocks. */ |
| |
| static void |
| clear_blocks_annotations (void) |
| { |
| basic_block bb; |
| |
| FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) |
| bb->tree_annotations = NULL; |
| } |
| |
| |
| /* Build a flowgraph for the statement_list STMT_LIST. */ |
| |
| static void |
| make_blocks (tree stmt_list) |
| { |
| tree_stmt_iterator i = tsi_start (stmt_list); |
| tree stmt = NULL; |
| bool start_new_block = true; |
| bool first_stmt_of_list = true; |
| basic_block bb = ENTRY_BLOCK_PTR; |
| |
| while (!tsi_end_p (i)) |
| { |
| tree prev_stmt; |
| |
| prev_stmt = stmt; |
| stmt = tsi_stmt (i); |
| |
| /* If the statement starts a new basic block or if we have determined |
| in a previous pass that we need to create a new block for STMT, do |
| so now. */ |
| if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt)) |
| { |
| if (!first_stmt_of_list) |
| stmt_list = tsi_split_statement_list_before (&i); |
| bb = create_basic_block (stmt_list, NULL, bb); |
| start_new_block = false; |
| } |
| |
| /* Now add STMT to BB and create the subgraphs for special statement |
| codes. */ |
| set_bb_for_stmt (stmt, bb); |
| |
| if (computed_goto_p (stmt)) |
| found_computed_goto = true; |
| |
| /* If STMT is a basic block terminator, set START_NEW_BLOCK for the |
| next iteration. */ |
| if (stmt_ends_bb_p (stmt)) |
| start_new_block = true; |
| |
| tsi_next (&i); |
| first_stmt_of_list = false; |
| } |
| } |
| |
| |
| /* Create and return a new empty basic block after bb AFTER. */ |
| |
| static basic_block |
| create_bb (void *h, void *e, basic_block after) |
| { |
| basic_block bb; |
| |
| gcc_assert (!e); |
| |
| /* Create and initialize a new basic block. Since alloc_block uses |
| ggc_alloc_cleared to allocate a basic block, we do not have to |
| clear the newly allocated basic block here. */ |
| bb = alloc_block (); |
| |
| bb->index = last_basic_block; |
| bb->flags = BB_NEW; |
| bb->stmt_list = h ? h : alloc_stmt_list (); |
| |
| /* Add the new block to the linked list of blocks. */ |
| link_block (bb, after); |
| |
| /* Grow the basic block array if needed. */ |
| if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info)) |
| { |
| size_t new_size = last_basic_block + (last_basic_block + 3) / 4; |
| VARRAY_GROW (basic_block_info, new_size); |
| } |
| |
| /* Add the newly created block to the array. */ |
| BASIC_BLOCK (last_basic_block) = bb; |
| |
| create_block_annotation (bb); |
| |
| n_basic_blocks++; |
| last_basic_block++; |
| |
| initialize_bb_rbi (bb); |
| return bb; |
| } |
| |
| |
| /*--------------------------------------------------------------------------- |
| Edge creation |
| ---------------------------------------------------------------------------*/ |
| |
| /* Fold COND_EXPR_COND of each COND_EXPR. */ |
| |
| /* APPLE LOCAL mainline 4506977 */ |
| void |
| fold_cond_expr_cond (void) |
| { |
| basic_block bb; |
| |
| FOR_EACH_BB (bb) |
| { |
| tree stmt = last_stmt (bb); |
| |
| if (stmt |
| && TREE_CODE (stmt) == COND_EXPR) |
| { |
| tree cond = fold (COND_EXPR_COND (stmt)); |
| if (integer_zerop (cond)) |
| COND_EXPR_COND (stmt) = integer_zero_node; |
| else if (integer_onep (cond)) |
| COND_EXPR_COND (stmt) = integer_one_node; |
| } |
| } |
| } |
| |
| /* Join all the blocks in the flowgraph. */ |
| |
| static void |
| make_edges (void) |
| { |
| basic_block bb; |
| |
| /* Create an edge from entry to the first block with executable |
| statements in it. */ |
| make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU); |
| |
| /* Traverse the basic block array placing edges. */ |
| FOR_EACH_BB (bb) |
| { |
| tree first = first_stmt (bb); |
| tree last = last_stmt (bb); |
| |
| if (first) |
| { |
| /* Edges for statements that always alter flow control. */ |
| if (is_ctrl_stmt (last)) |
| make_ctrl_stmt_edges (bb); |
| |
| /* Edges for statements that sometimes alter flow control. */ |
| if (is_ctrl_altering_stmt (last)) |
| make_exit_edges (bb); |
| } |
| |
| /* Finally, if no edges were created above, this is a regular |
| basic block that only needs a fallthru edge. */ |
| if (EDGE_COUNT (bb->succs) == 0) |
| make_edge (bb, bb->next_bb, EDGE_FALLTHRU); |
| } |
| |
| /* We do not care about fake edges, so remove any that the CFG |
| builder inserted for completeness. */ |
| remove_fake_exit_edges (); |
| |
| /* Fold COND_EXPR_COND of each COND_EXPR. */ |
| fold_cond_expr_cond (); |
| |
| /* Clean up the graph and warn for unreachable code. */ |
| cleanup_tree_cfg (); |
| } |
| |
| |
| /* Create edges for control statement at basic block BB. */ |
| |
| static void |
| make_ctrl_stmt_edges (basic_block bb) |
| { |
| tree last = last_stmt (bb); |
| |
| gcc_assert (last); |
| switch (TREE_CODE (last)) |
| { |
| case GOTO_EXPR: |
| make_goto_expr_edges (bb); |
| break; |
| |
| case RETURN_EXPR: |
| make_edge (bb, EXIT_BLOCK_PTR, 0); |
| break; |
| |
| case COND_EXPR: |
| make_cond_expr_edges (bb); |
| break; |
| |
| case SWITCH_EXPR: |
| make_switch_expr_edges (bb); |
| break; |
| |
| case RESX_EXPR: |
| make_eh_edges (last); |
| /* Yet another NORETURN hack. */ |
| if (EDGE_COUNT (bb->succs) == 0) |
| make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| |
| /* Create exit edges for statements in block BB that alter the flow of |
| control. Statements that alter the control flow are 'goto', 'return' |
| and calls to non-returning functions. */ |
| |
| static void |
| make_exit_edges (basic_block bb) |
| { |
| tree last = last_stmt (bb), op; |
| |
| gcc_assert (last); |
| switch (TREE_CODE (last)) |
| { |
| case CALL_EXPR: |
| /* If this function receives a nonlocal goto, then we need to |
| make edges from this call site to all the nonlocal goto |
| handlers. */ |
| if (TREE_SIDE_EFFECTS (last) |
| && current_function_has_nonlocal_label) |
| make_goto_expr_edges (bb); |
| |
| /* If this statement has reachable exception handlers, then |
| create abnormal edges to them. */ |
| make_eh_edges (last); |
| |
| /* Some calls are known not to return. For such calls we create |
| a fake edge. |
| |
| We really need to revamp how we build edges so that it's not |
| such a bloody pain to avoid creating edges for this case since |
| all we do is remove these edges when we're done building the |
| CFG. */ |
| if (call_expr_flags (last) & ECF_NORETURN) |
| { |
| make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); |
| return; |
| } |
| |
| /* Don't forget the fall-thru edge. */ |
| make_edge (bb, bb->next_bb, EDGE_FALLTHRU); |
| break; |
| |
| case MODIFY_EXPR: |
| /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR |
| may have an abnormal edge. Search the RHS for this case and |
| create any required edges. */ |
| op = get_call_expr_in (last); |
| if (op && TREE_SIDE_EFFECTS (op) |
| && current_function_has_nonlocal_label) |
| make_goto_expr_edges (bb); |
| |
| make_eh_edges (last); |
| make_edge (bb, bb->next_bb, EDGE_FALLTHRU); |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| |
| /* Create the edges for a COND_EXPR starting at block BB. |
| At this point, both clauses must contain only simple gotos. */ |
| |
| static void |
| make_cond_expr_edges (basic_block bb) |
| { |
| tree entry = last_stmt (bb); |
| basic_block then_bb, else_bb; |
| tree then_label, else_label; |
| |
| gcc_assert (entry); |
| gcc_assert (TREE_CODE (entry) == COND_EXPR); |
| |
| /* Entry basic blocks for each component. */ |
| then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry)); |
| else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry)); |
| then_bb = label_to_block (then_label); |
| else_bb = label_to_block (else_label); |
| |
| make_edge (bb, then_bb, EDGE_TRUE_VALUE); |
| make_edge (bb, else_bb, EDGE_FALSE_VALUE); |
| } |
| |
| /* Hashing routine for EDGE_TO_CASES. */ |
| |
| static hashval_t |
| edge_to_cases_hash (const void *p) |
| { |
| edge e = ((struct edge_to_cases_elt *)p)->e; |
| |
| /* Hash on the edge itself (which is a pointer). */ |
| return htab_hash_pointer (e); |
| } |
| |
| /* Equality routine for EDGE_TO_CASES, edges are unique, so testing |
| for equality is just a pointer comparison. */ |
| |
| static int |
| edge_to_cases_eq (const void *p1, const void *p2) |
| { |
| edge e1 = ((struct edge_to_cases_elt *)p1)->e; |
| edge e2 = ((struct edge_to_cases_elt *)p2)->e; |
| |
| return e1 == e2; |
| } |
| |
| /* Called for each element in the hash table (P) as we delete the |
| edge to cases hash table. |
| |
| Clear all the TREE_CHAINs to prevent problems with copying of |
| SWITCH_EXPRs and structure sharing rules, then free the hash table |
| element. */ |
| |
| static void |
| edge_to_cases_cleanup (void *p) |
| { |
| struct edge_to_cases_elt *elt = p; |
| tree t, next; |
| |
| for (t = elt->case_labels; t; t = next) |
| { |
| next = TREE_CHAIN (t); |
| TREE_CHAIN (t) = NULL; |
| } |
| free (p); |
| } |
| |
| /* Start recording information mapping edges to case labels. */ |
| |
| static void |
| start_recording_case_labels (void) |
| { |
| gcc_assert (edge_to_cases == NULL); |
| |
| edge_to_cases = htab_create (37, |
| edge_to_cases_hash, |
| edge_to_cases_eq, |
| edge_to_cases_cleanup); |
| } |
| |
| /* Return nonzero if we are recording information for case labels. */ |
| |
| static bool |
| recording_case_labels_p (void) |
| { |
| return (edge_to_cases != NULL); |
| } |
| |
| /* Stop recording information mapping edges to case labels and |
| remove any information we have recorded. */ |
| static void |
| end_recording_case_labels (void) |
| { |
| htab_delete (edge_to_cases); |
| edge_to_cases = NULL; |
| } |
| |
| /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */ |
| |
| static void |
| record_switch_edge (edge e, tree case_label) |
| { |
| struct edge_to_cases_elt *elt; |
| void **slot; |
| |
| /* Build a hash table element so we can see if E is already |
| in the table. */ |
| elt = xmalloc (sizeof (struct edge_to_cases_elt)); |
| elt->e = e; |
| elt->case_labels = case_label; |
| |
| slot = htab_find_slot (edge_to_cases, elt, INSERT); |
| |
| if (*slot == NULL) |
| { |
| /* E was not in the hash table. Install E into the hash table. */ |
| *slot = (void *)elt; |
| } |
| else |
| { |
| /* E was already in the hash table. Free ELT as we do not need it |
| anymore. */ |
| free (elt); |
| |
| /* Get the entry stored in the hash table. */ |
| elt = (struct edge_to_cases_elt *) *slot; |
| |
| /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */ |
| TREE_CHAIN (case_label) = elt->case_labels; |
| elt->case_labels = case_label; |
| } |
| } |
| |
| /* If we are inside a {start,end}_recording_cases block, then return |
| a chain of CASE_LABEL_EXPRs from T which reference E. |
| |
| Otherwise return NULL. */ |
| |
| static tree |
| get_cases_for_edge (edge e, tree t) |
| { |
| struct edge_to_cases_elt elt, *elt_p; |
| void **slot; |
| size_t i, n; |
| tree vec; |
| |
| /* If we are not recording cases, then we do not have CASE_LABEL_EXPR |
| chains available. Return NULL so the caller can detect this case. */ |
| if (!recording_case_labels_p ()) |
| return NULL; |
| |
| restart: |
| elt.e = e; |
| elt.case_labels = NULL; |
| slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT); |
| |
| if (slot) |
| { |
| elt_p = (struct edge_to_cases_elt *)*slot; |
| return elt_p->case_labels; |
| } |
| |
| /* If we did not find E in the hash table, then this must be the first |
| time we have been queried for information about E & T. Add all the |
| elements from T to the hash table then perform the query again. */ |
| |
| vec = SWITCH_LABELS (t); |
| n = TREE_VEC_LENGTH (vec); |
| for (i = 0; i < n; i++) |
| { |
| tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i)); |
| basic_block label_bb = label_to_block (lab); |
| record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i)); |
| } |
| goto restart; |
| } |
| |
| /* Create the edges for a SWITCH_EXPR starting at block BB. |
| At this point, the switch body has been lowered and the |
| SWITCH_LABELS filled in, so this is in effect a multi-way branch. */ |
| |
| static void |
| make_switch_expr_edges (basic_block bb) |
| { |
| tree entry = last_stmt (bb); |
| size_t i, n; |
| tree vec; |
| |
| vec = SWITCH_LABELS (entry); |
| n = TREE_VEC_LENGTH (vec); |
| |
| for (i = 0; i < n; ++i) |
| { |
| tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i)); |
| basic_block label_bb = label_to_block (lab); |
| make_edge (bb, label_bb, 0); |
| } |
| } |
| |
| |
| /* Return the basic block holding label DEST. */ |
| |
| basic_block |
| label_to_block (tree dest) |
| { |
| int uid = LABEL_DECL_UID (dest); |
| |
| /* We would die hard when faced by an undefined label. Emit a label to |
| the very first basic block. This will hopefully make even the dataflow |
| and undefined variable warnings quite right. */ |
| if ((errorcount || sorrycount) && uid < 0) |
| { |
| block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0)); |
| tree stmt; |
| |
| stmt = build1 (LABEL_EXPR, void_type_node, dest); |
| bsi_insert_before (&bsi, stmt, BSI_NEW_STMT); |
| uid = LABEL_DECL_UID (dest); |
| } |
| return VARRAY_BB (label_to_block_map, uid); |
| } |
| |
| |
| /* Create edges for a goto statement at block BB. */ |
| |
| static void |
| make_goto_expr_edges (basic_block bb) |
| { |
| tree goto_t, dest; |
| basic_block target_bb; |
| int for_call; |
| block_stmt_iterator last = bsi_last (bb); |
| |
| goto_t = bsi_stmt (last); |
| |
| /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR, |
| CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting |
| from a nonlocal goto. */ |
| if (TREE_CODE (goto_t) != GOTO_EXPR) |
| { |
| dest = error_mark_node; |
| for_call = 1; |
| } |
| else |
| { |
| dest = GOTO_DESTINATION (goto_t); |
| for_call = 0; |
| |
| /* A GOTO to a local label creates normal edges. */ |
| if (simple_goto_p (goto_t)) |
| { |
| edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU); |
| #ifdef USE_MAPPED_LOCATION |
| e->goto_locus = EXPR_LOCATION (goto_t); |
| #else |
| e->goto_locus = EXPR_LOCUS (goto_t); |
| #endif |
| bsi_remove (&last); |
| return; |
| } |
| |
| /* Nothing more to do for nonlocal gotos. */ |
| if (TREE_CODE (dest) == LABEL_DECL) |
| return; |
| |
| /* Computed gotos remain. */ |
| } |
| |
| /* Look for the block starting with the destination label. In the |
| case of a computed goto, make an edge to any label block we find |
| in the CFG. */ |
| FOR_EACH_BB (target_bb) |
| { |
| block_stmt_iterator bsi; |
| |
| for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi)) |
| { |
| tree target = bsi_stmt (bsi); |
| |
| if (TREE_CODE (target) != LABEL_EXPR) |
| break; |
| |
| if ( |
| /* Computed GOTOs. Make an edge to every label block that has |
| been marked as a potential target for a computed goto. */ |
| (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0) |
| /* Nonlocal GOTO target. Make an edge to every label block |
| that has been marked as a potential target for a nonlocal |
| goto. */ |
| || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1)) |
| { |
| make_edge (bb, target_bb, EDGE_ABNORMAL); |
| break; |
| } |
| } |
| } |
| |
| /* Degenerate case of computed goto with no labels. */ |
| if (!for_call && EDGE_COUNT (bb->succs) == 0) |
| make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); |
| } |
| |
| |
| /*--------------------------------------------------------------------------- |
| Flowgraph analysis |
| ---------------------------------------------------------------------------*/ |
| |
| /* Remove unreachable blocks and other miscellaneous clean up work. */ |
| |
| bool |
| cleanup_tree_cfg (void) |
| { |
| bool retval = false; |
| |
| timevar_push (TV_TREE_CLEANUP_CFG); |
| |
| retval = cleanup_control_flow (); |
| retval |= delete_unreachable_blocks (); |
| |
| /* cleanup_forwarder_blocks can redirect edges out of SWITCH_EXPRs, |
| which can get expensive. So we want to enable recording of edge |
| to CASE_LABEL_EXPR mappings around the call to |
| cleanup_forwarder_blocks. */ |
| start_recording_case_labels (); |
| retval |= cleanup_forwarder_blocks (); |
| end_recording_case_labels (); |
| |
| #ifdef ENABLE_CHECKING |
| if (retval) |
| { |
| gcc_assert (!cleanup_control_flow ()); |
| gcc_assert (!delete_unreachable_blocks ()); |
| gcc_assert (!cleanup_forwarder_blocks ()); |
| } |
| #endif |
| |
| /* Merging the blocks creates no new opportunities for the other |
| optimizations, so do it here. */ |
| retval |= merge_seq_blocks (); |
| |
| compact_blocks (); |
| |
| #ifdef ENABLE_CHECKING |
| verify_flow_info (); |
| #endif |
| timevar_pop (TV_TREE_CLEANUP_CFG); |
| return retval; |
| } |
| |
| |
| /* Cleanup useless labels in basic blocks. This is something we wish |
| to do early because it allows us to group case labels before creating |
| the edges for the CFG, and it speeds up block statement iterators in |
| all passes later on. |
| We only run this pass once, running it more than once is probably not |
| profitable. */ |
| |
| /* A map from basic block index to the leading label of that block. */ |
| static tree *label_for_bb; |
| |
| /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */ |
| static void |
| update_eh_label (struct eh_region *region) |
| { |
| tree old_label = get_eh_region_tree_label (region); |
| if (old_label) |
| { |
| tree new_label; |
| basic_block bb = label_to_block (old_label); |
| |
| /* ??? After optimizing, there may be EH regions with labels |
| that have already been removed from the function body, so |
| there is no basic block for them. */ |
| if (! bb) |
| return; |
| |
| new_label = label_for_bb[bb->index]; |
| set_eh_region_tree_label (region, new_label); |
| } |
| } |
| |
| /* Given LABEL return the first label in the same basic block. */ |
| static tree |
| main_block_label (tree label) |
| { |
| basic_block bb = label_to_block (label); |
| |
| /* label_to_block possibly inserted undefined label into the chain. */ |
| if (!label_for_bb[bb->index]) |
| label_for_bb[bb->index] = label; |
| return label_for_bb[bb->index]; |
| } |
| |
| /* Cleanup redundant labels. This is a three-step process: |
| 1) Find the leading label for each block. |
| 2) Redirect all references to labels to the leading labels. |
| 3) Cleanup all useless labels. */ |
| |
| void |
| cleanup_dead_labels (void) |
| { |
| basic_block bb; |
| label_for_bb = xcalloc (last_basic_block, sizeof (tree)); |
| |
| /* Find a suitable label for each block. We use the first user-defined |
| label if there is one, or otherwise just the first label we see. */ |
| FOR_EACH_BB (bb) |
| { |
| block_stmt_iterator i; |
| |
| for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i)) |
| { |
| tree label, stmt = bsi_stmt (i); |
| |
| if (TREE_CODE (stmt) != LABEL_EXPR) |
| break; |
| |
| label = LABEL_EXPR_LABEL (stmt); |
| |
| /* If we have not yet seen a label for the current block, |
| remember this one and see if there are more labels. */ |
| if (! label_for_bb[bb->index]) |
| { |
| label_for_bb[bb->index] = label; |
| continue; |
| } |
| |
| /* If we did see a label for the current block already, but it |
| is an artificially created label, replace it if the current |
| label is a user defined label. */ |
| if (! DECL_ARTIFICIAL (label) |
| && DECL_ARTIFICIAL (label_for_bb[bb->index])) |
| { |
| label_for_bb[bb->index] = label; |
| break; |
| } |
| } |
| } |
| |
| /* Now redirect all jumps/branches to the selected label. |
| First do so for each block ending in a control statement. */ |
| FOR_EACH_BB (bb) |
| { |
| tree stmt = last_stmt (bb); |
| if (!stmt) |
| continue; |
| |
| switch (TREE_CODE (stmt)) |
| { |
| case COND_EXPR: |
| { |
| tree true_branch, false_branch; |
| |
| true_branch = COND_EXPR_THEN (stmt); |
| false_branch = COND_EXPR_ELSE (stmt); |
| |
| GOTO_DESTINATION (true_branch) |
| = main_block_label (GOTO_DESTINATION (true_branch)); |
| GOTO_DESTINATION (false_branch) |
| = main_block_label (GOTO_DESTINATION (false_branch)); |
| |
| break; |
| } |
| |
| case SWITCH_EXPR: |
| { |
| size_t i; |
| tree vec = SWITCH_LABELS (stmt); |
| size_t n = TREE_VEC_LENGTH (vec); |
| |
| /* Replace all destination labels. */ |
| for (i = 0; i < n; ++i) |
| { |
| tree elt = TREE_VEC_ELT (vec, i); |
| tree label = main_block_label (CASE_LABEL (elt)); |
| CASE_LABEL (elt) = label; |
| } |
| break; |
| } |
| |
| /* We have to handle GOTO_EXPRs until they're removed, and we don't |
| remove them until after we've created the CFG edges. */ |
| case GOTO_EXPR: |
| if (! computed_goto_p (stmt)) |
| { |
| GOTO_DESTINATION (stmt) |
| = main_block_label (GOTO_DESTINATION (stmt)); |
| break; |
| } |
| |
| default: |
| break; |
| } |
| } |
| |
| for_each_eh_region (update_eh_label); |
| |
| /* Finally, purge dead labels. All user-defined labels and labels that |
| can be the target of non-local gotos are preserved. */ |
| FOR_EACH_BB (bb) |
| { |
| block_stmt_iterator i; |
| tree label_for_this_bb = label_for_bb[bb->index]; |
| |
| if (! label_for_this_bb) |
| continue; |
| |
| for (i = bsi_start (bb); !bsi_end_p (i); ) |
| { |
| tree label, stmt = bsi_stmt (i); |
| |
| if (TREE_CODE (stmt) != LABEL_EXPR) |
| break; |
| |
| label = LABEL_EXPR_LABEL (stmt); |
| |
| if (label == label_for_this_bb |
| || ! DECL_ARTIFICIAL (label) |
| || DECL_NONLOCAL (label)) |
| bsi_next (&i); |
| else |
| bsi_remove (&i); |
| } |
| } |
| |
| free (label_for_bb); |
| } |
| |
| /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE), |
| and scan the sorted vector of cases. Combine the ones jumping to the |
| same label. |
| Eg. three separate entries 1: 2: 3: become one entry 1..3: */ |
| |
| void |
| group_case_labels (void) |
| { |
| basic_block bb; |
| |
| FOR_EACH_BB (bb) |
| { |
| tree stmt = last_stmt (bb); |
| if (stmt && TREE_CODE (stmt) == SWITCH_EXPR) |
| { |
| tree labels = SWITCH_LABELS (stmt); |
| int old_size = TREE_VEC_LENGTH (labels); |
| int i, j, new_size = old_size; |
| tree default_case = TREE_VEC_ELT (labels, old_size - 1); |
| tree default_label; |
| |
| /* The default label is always the last case in a switch |
| statement after gimplification. */ |
| default_label = CASE_LABEL (default_case); |
| |
| /* Look for possible opportunities to merge cases. |
| Ignore the last element of the label vector because it |
| must be the default case. */ |
| i = 0; |
| while (i < old_size - 1) |
| { |
| tree base_case, base_label, base_high, type; |
| base_case = TREE_VEC_ELT (labels, i); |
| |
| gcc_assert (base_case); |
| base_label = CASE_LABEL (base_case); |
| |
| /* Discard cases that have the same destination as the |
| default case. */ |
| if (base_label == default_label) |
| { |
| TREE_VEC_ELT (labels, i) = NULL_TREE; |
| i++; |
| new_size--; |
| continue; |
| } |
| |
| type = TREE_TYPE (CASE_LOW (base_case)); |
| base_high = CASE_HIGH (base_case) ? |
| CASE_HIGH (base_case) : CASE_LOW (base_case); |
| i++; |
| /* Try to merge case labels. Break out when we reach the end |
| of the label vector or when we cannot merge the next case |
| label with the current one. */ |
| while (i < old_size - 1) |
| { |
| tree merge_case = TREE_VEC_ELT (labels, i); |
| tree merge_label = CASE_LABEL (merge_case); |
| tree t = int_const_binop (PLUS_EXPR, base_high, |
| integer_one_node, 1); |
| |
| /* Merge the cases if they jump to the same place, |
| and their ranges are consecutive. */ |
| if (merge_label == base_label |
| && tree_int_cst_equal (CASE_LOW (merge_case), t)) |
| { |
| base_high = CASE_HIGH (merge_case) ? |
| CASE_HIGH (merge_case) : CASE_LOW (merge_case); |
| CASE_HIGH (base_case) = base_high; |
| TREE_VEC_ELT (labels, i) = NULL_TREE; |
| new_size--; |
| i++; |
| } |
| else |
| break; |
| } |
| } |
| |
| /* Compress the case labels in the label vector, and adjust the |
| length of the vector. */ |
| for (i = 0, j = 0; i < new_size; i++) |
| { |
| while (! TREE_VEC_ELT (labels, j)) |
| j++; |
| TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++); |
| } |
| TREE_VEC_LENGTH (labels) = new_size; |
| } |
| } |
| } |
| |
| /* Checks whether we can merge block B into block A. */ |
| |
| static bool |
| tree_can_merge_blocks_p (basic_block a, basic_block b) |
| { |
| tree stmt; |
| block_stmt_iterator bsi; |
| |
| if (EDGE_COUNT (a->succs) != 1) |
| return false; |
| |
| if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL) |
| return false; |
| |
| if (EDGE_SUCC (a, 0)->dest != b) |
| return false; |
| |
| if (EDGE_COUNT (b->preds) > 1) |
| return false; |
| |
| if (b == EXIT_BLOCK_PTR) |
| return false; |
| |
| /* If A ends by a statement causing exceptions or something similar, we |
| cannot merge the blocks. */ |
| stmt = last_stmt (a); |
| if (stmt && stmt_ends_bb_p (stmt)) |
| return false; |
| |
| /* Do not allow a block with only a non-local label to be merged. */ |
| if (stmt && TREE_CODE (stmt) == LABEL_EXPR |
| && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))) |
| return false; |
| |
| /* There may be no phi nodes at the start of b. Most of these degenerate |
| phi nodes should be cleaned up by kill_redundant_phi_nodes. */ |
| if (phi_nodes (b)) |
| return false; |
| |
| /* Do not remove user labels. */ |
| for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi)) |
| { |
| stmt = bsi_stmt (bsi); |
| if (TREE_CODE (stmt) != LABEL_EXPR) |
| break; |
| if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt))) |
| return false; |
| } |
| |
| return true; |
| } |
| |
| |
| /* Merge block B into block A. */ |
| |
| static void |
| tree_merge_blocks (basic_block a, basic_block b) |
| { |
| block_stmt_iterator bsi; |
| tree_stmt_iterator last; |
| |
| if (dump_file) |
| fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index); |
| |
| /* Ensure that B follows A. */ |
| move_block_after (b, a); |
| |
| gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU); |
| gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a))); |
| |
| /* Remove labels from B and set bb_for_stmt to A for other statements. */ |
| for (bsi = bsi_start (b); !bsi_end_p (bsi);) |
| { |
| if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR) |
| bsi_remove (&bsi); |
| else |
| { |
| set_bb_for_stmt (bsi_stmt (bsi), a); |
| bsi_next (&bsi); |
| } |
| } |
| |
| /* Merge the chains. */ |
| last = tsi_last (a->stmt_list); |
| tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT); |
| b->stmt_list = NULL; |
| } |
| |
| |
| /* Walk the function tree removing unnecessary statements. |
| |
| * Empty statement nodes are removed |
| |
| * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed |
| |
| * Unnecessary COND_EXPRs are removed |
| |
| * Some unnecessary BIND_EXPRs are removed |
| |
| Clearly more work could be done. The trick is doing the analysis |
| and removal fast enough to be a net improvement in compile times. |
| |
| Note that when we remove a control structure such as a COND_EXPR |
| BIND_EXPR, or TRY block, we will need to repeat this optimization pass |
| to ensure we eliminate all the useless code. */ |
| |
| struct rus_data |
| { |
| tree *last_goto; |
| bool repeat; |
| bool may_throw; |
| bool may_branch; |
| bool has_label; |
| }; |
| |
| static void remove_useless_stmts_1 (tree *, struct rus_data *); |
| |
| static bool |
| remove_useless_stmts_warn_notreached (tree stmt) |
| { |
| if (EXPR_HAS_LOCATION (stmt)) |
| { |
| location_t loc = EXPR_LOCATION (stmt); |
| if (LOCATION_LINE (loc) > 0) |
| { |
| warning ("%Hwill never be executed", &loc); |
| return true; |
| } |
| } |
| |
| switch (TREE_CODE (stmt)) |
| { |
| case STATEMENT_LIST: |
| { |
| tree_stmt_iterator i; |
| for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i)) |
| if (remove_useless_stmts_warn_notreached (tsi_stmt (i))) |
| return true; |
| } |
| break; |
| |
| case COND_EXPR: |
| if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt))) |
| return true; |
| if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt))) |
| return true; |
| if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt))) |
| return true; |
| break; |
| |
| case TRY_FINALLY_EXPR: |
| case TRY_CATCH_EXPR: |
| if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0))) |
| return true; |
| if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1))) |
| return true; |
| break; |
| |
| case CATCH_EXPR: |
| return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt)); |
| case EH_FILTER_EXPR: |
| return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt)); |
| case BIND_EXPR: |
| return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt)); |
| |
| default: |
| /* Not a live container. */ |
| break; |
| } |
| |
| return false; |
| } |
| |
| static void |
| remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data) |
| { |
| tree then_clause, else_clause, cond; |
| bool save_has_label, then_has_label, else_has_label; |
| |
| save_has_label = data->has_label; |
| data->has_label = false; |
| data->last_goto = NULL; |
| |
| remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data); |
| |
| then_has_label = data->has_label; |
| data->has_label = false; |
| data->last_goto = NULL; |
| |
| remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data); |
| |
| else_has_label = data->has_label; |
| data->has_label = save_has_label | then_has_label | else_has_label; |
| |
| then_clause = COND_EXPR_THEN (*stmt_p); |
| else_clause = COND_EXPR_ELSE (*stmt_p); |
| cond = fold (COND_EXPR_COND (*stmt_p)); |
| |
| /* If neither arm does anything at all, we can remove the whole IF. */ |
| if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause)) |
| { |
| *stmt_p = build_empty_stmt (); |
| data->repeat = true; |
| } |
| |
| /* If there are no reachable statements in an arm, then we can |
| zap the entire conditional. */ |
| else if (integer_nonzerop (cond) && !else_has_label) |
| { |
| if (warn_notreached) |
| remove_useless_stmts_warn_notreached (else_clause); |
| *stmt_p = then_clause; |
| data->repeat = true; |
| } |
| else if (integer_zerop (cond) && !then_has_label) |
| { |
| if (warn_notreached) |
| remove_useless_stmts_warn_notreached (then_clause); |
| *stmt_p = else_clause; |
| data->repeat = true; |
| } |
| |
| /* Check a couple of simple things on then/else with single stmts. */ |
| else |
| { |
| tree then_stmt = expr_only (then_clause); |
| tree else_stmt = expr_only (else_clause); |
| |
| /* Notice branches to a common destination. */ |
| if (then_stmt && else_stmt |
| && TREE_CODE (then_stmt) == GOTO_EXPR |
| && TREE_CODE (else_stmt) == GOTO_EXPR |
| && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt))) |
| { |
| *stmt_p = then_stmt; |
| data->repeat = true; |
| } |
| |
| /* If the THEN/ELSE clause merely assigns a value to a variable or |
| parameter which is already known to contain that value, then |
| remove the useless THEN/ELSE clause. */ |
| else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL) |
| { |
| if (else_stmt |
| && TREE_CODE (else_stmt) == MODIFY_EXPR |
| && TREE_OPERAND (else_stmt, 0) == cond |
| && integer_zerop (TREE_OPERAND (else_stmt, 1))) |
| COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list (); |
| } |
| else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR) |
| && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL |
| || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL) |
| && TREE_CONSTANT (TREE_OPERAND (cond, 1))) |
| { |
| tree stmt = (TREE_CODE (cond) == EQ_EXPR |
| ? then_stmt : else_stmt); |
| tree *location = (TREE_CODE (cond) == EQ_EXPR |
| ? &COND_EXPR_THEN (*stmt_p) |
| : &COND_EXPR_ELSE (*stmt_p)); |
| |
| if (stmt |
| && TREE_CODE (stmt) == MODIFY_EXPR |
| && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0) |
| && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1)) |
| *location = alloc_stmt_list (); |
| } |
| } |
| |
| /* Protect GOTOs in the arm of COND_EXPRs from being removed. They |
| would be re-introduced during lowering. */ |
| data->last_goto = NULL; |
| } |
| |
| |
| static void |
| remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data) |
| { |
| bool save_may_branch, save_may_throw; |
| bool this_may_branch, this_may_throw; |
| |
| /* Collect may_branch and may_throw information for the body only. */ |
| save_may_branch = data->may_branch; |
| save_may_throw = data->may_throw; |
| data->may_branch = false; |
| data->may_throw = false; |
| data->last_goto = NULL; |
| |
| remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data); |
| |
| this_may_branch = data->may_branch; |
| this_may_throw = data->may_throw; |
| data->may_branch |= save_may_branch; |
| data->may_throw |= save_may_throw; |
| data->last_goto = NULL; |
| |
| remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data); |
| |
| /* If the body is empty, then we can emit the FINALLY block without |
| the enclosing TRY_FINALLY_EXPR. */ |
| if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0))) |
| { |
| *stmt_p = TREE_OPERAND (*stmt_p, 1); |
| data->repeat = true; |
| } |
| |
| /* If the handler is empty, then we can emit the TRY block without |
| the enclosing TRY_FINALLY_EXPR. */ |
| else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1))) |
| { |
| *stmt_p = TREE_OPERAND (*stmt_p, 0); |
| data->repeat = true; |
| } |
| |
| /* If the body neither throws, nor branches, then we can safely |
| string the TRY and FINALLY blocks together. */ |
| else if (!this_may_branch && !this_may_throw) |
| { |
| tree stmt = *stmt_p; |
| *stmt_p = TREE_OPERAND (stmt, 0); |
| append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p); |
| data->repeat = true; |
| } |
| } |
| |
| |
| static void |
| remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data) |
| { |
| bool save_may_throw, this_may_throw; |
| tree_stmt_iterator i; |
| tree stmt; |
| |
| /* Collect may_throw information for the body only. */ |
| save_may_throw = data->may_throw; |
| data->may_throw = false; |
| data->last_goto = NULL; |
| |
| remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data); |
| |
| this_may_throw = data->may_throw; |
| data->may_throw = save_may_throw; |
| |
| /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */ |
| if (!this_may_throw) |
| { |
| if (warn_notreached) |
| remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1)); |
| *stmt_p = TREE_OPERAND (*stmt_p, 0); |
| data->repeat = true; |
| return; |
| } |
| |
| /* Process the catch clause specially. We may be able to tell that |
| no exceptions propagate past this point. */ |
| |
| this_may_throw = true; |
| i = tsi_start (TREE_OPERAND (*stmt_p, 1)); |
| stmt = tsi_stmt (i); |
| data->last_goto = NULL; |
| |
| switch (TREE_CODE (stmt)) |
| { |
| case CATCH_EXPR: |
| for (; !tsi_end_p (i); tsi_next (&i)) |
| { |
| stmt = tsi_stmt (i); |
| /* If we catch all exceptions, then the body does not |
| propagate exceptions past this point. */ |
| if (CATCH_TYPES (stmt) == NULL) |
| this_may_throw = false; |
| data->last_goto = NULL; |
| remove_useless_stmts_1 (&CATCH_BODY (stmt), data); |
| } |
| break; |
| |
| case EH_FILTER_EXPR: |
| if (EH_FILTER_MUST_NOT_THROW (stmt)) |
| this_may_throw = false; |
| else if (EH_FILTER_TYPES (stmt) == NULL) |
| this_may_throw = false; |
| remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data); |
| break; |
| |
| default: |
| /* Otherwise this is a cleanup. */ |
| remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data); |
| |
| /* If the cleanup is empty, then we can emit the TRY block without |
| the enclosing TRY_CATCH_EXPR. */ |
| if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1))) |
| { |
| *stmt_p = TREE_OPERAND (*stmt_p, 0); |
| data->repeat = true; |
| } |
| break; |
| } |
| data->may_throw |= this_may_throw; |
| } |
| |
| |
| static void |
| remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data) |
| { |
| tree block; |
| |
| /* First remove anything underneath the BIND_EXPR. */ |
| remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data); |
| |
| /* If the BIND_EXPR has no variables, then we can pull everything |
| up one level and remove the BIND_EXPR, unless this is the toplevel |
| BIND_EXPR for the current function or an inlined function. |
| |
| When this situation occurs we will want to apply this |
| optimization again. */ |
| block = BIND_EXPR_BLOCK (*stmt_p); |
| if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE |
| && *stmt_p != DECL_SAVED_TREE (current_function_decl) |
| && (! block |
| || ! BLOCK_ABSTRACT_ORIGIN (block) |
| || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) |
| != FUNCTION_DECL))) |
| { |
| *stmt_p = BIND_EXPR_BODY (*stmt_p); |
| data->repeat = true; |
| } |
| } |
| |
| |
| static void |
| remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data) |
| { |
| tree dest = GOTO_DESTINATION (*stmt_p); |
| |
| data->may_branch = true; |
| data->last_goto = NULL; |
| |
| /* Record the last goto expr, so that we can delete it if unnecessary. */ |
| if (TREE_CODE (dest) == LABEL_DECL) |
| data->last_goto = stmt_p; |
| } |
| |
| |
| static void |
| remove_useless_stmts_label (tree *stmt_p, struct rus_data *data) |
| { |
| tree label = LABEL_EXPR_LABEL (*stmt_p); |
| |
| data->has_label = true; |
| |
| /* We do want to jump across non-local label receiver code. */ |
| if (DECL_NONLOCAL (label)) |
| data->last_goto = NULL; |
| |
| else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label) |
| { |
| *data->last_goto = build_empty_stmt (); |
| data->repeat = true; |
| } |
| |
| /* ??? Add something here to delete unused labels. */ |
| } |
| |
| |
| /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its |
| decl. This allows us to eliminate redundant or useless |
| calls to "const" functions. |
| |
| Gimplifier already does the same operation, but we may notice functions |
| being const and pure once their calls has been gimplified, so we need |
| to update the flag. */ |
| |
| static void |
| update_call_expr_flags (tree call) |
| { |
| tree decl = get_callee_fndecl (call); |
| if (!decl) |
| return; |
| if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) |
| TREE_SIDE_EFFECTS (call) = 0; |
| if (TREE_NOTHROW (decl)) |
| TREE_NOTHROW (call) = 1; |
| } |
| |
| |
| /* T is CALL_EXPR. Set current_function_calls_* flags. */ |
| |
| void |
| notice_special_calls (tree t) |
| { |
| int flags = call_expr_flags (t); |
| |
| if (flags & ECF_MAY_BE_ALLOCA) |
| current_function_calls_alloca = true; |
| if (flags & ECF_RETURNS_TWICE) |
| current_function_calls_setjmp = true; |
| } |
| |
| |
| /* Clear flags set by notice_special_calls. Used by dead code removal |
| to update the flags. */ |
| |
| void |
| clear_special_calls (void) |
| { |
| current_function_calls_alloca = false; |
| current_function_calls_setjmp = false; |
| } |
| |
| |
| static void |
| remove_useless_stmts_1 (tree *tp, struct rus_data *data) |
| { |
| tree t = *tp, op; |
| |
| switch (TREE_CODE (t)) |
| { |
| case COND_EXPR: |
| remove_useless_stmts_cond (tp, data); |
| break; |
| |
| case TRY_FINALLY_EXPR: |
| remove_useless_stmts_tf (tp, data); |
| break; |
| |
| case TRY_CATCH_EXPR: |
| remove_useless_stmts_tc (tp, data); |
| break; |
| |
| case BIND_EXPR: |
| remove_useless_stmts_bind (tp, data); |
| break; |
| |
| case GOTO_EXPR: |
| remove_useless_stmts_goto (tp, data); |
| break; |
| |
| case LABEL_EXPR: |
| remove_useless_stmts_label (tp, data); |
| break; |
| |
| case RETURN_EXPR: |
| fold_stmt (tp); |
| data->last_goto = NULL; |
| data->may_branch = true; |
| break; |
| |
| case CALL_EXPR: |
| fold_stmt (tp); |
| data->last_goto = NULL; |
| notice_special_calls (t); |
| update_call_expr_flags (t); |
| if (tree_could_throw_p (t)) |
| data->may_throw = true; |
| break; |
| |
| case MODIFY_EXPR: |
| data->last_goto = NULL; |
| fold_stmt (tp); |
| op = get_call_expr_in (t); |
| if (op) |
| { |
| update_call_expr_flags (op); |
| notice_special_calls (op); |
| } |
| if (tree_could_throw_p (t)) |
| data->may_throw = true; |
| break; |
| |
| case STATEMENT_LIST: |
| { |
| tree_stmt_iterator i = tsi_start (t); |
| while (!tsi_end_p (i)) |
| { |
| t = tsi_stmt (i); |
| if (IS_EMPTY_STMT (t)) |
| { |
| tsi_delink (&i); |
| continue; |
| } |
| |
| remove_useless_stmts_1 (tsi_stmt_ptr (i), data); |
| |
| t = tsi_stmt (i); |
| if (TREE_CODE (t) == STATEMENT_LIST) |
| { |
| tsi_link_before (&i, t, TSI_SAME_STMT); |
| tsi_delink (&i); |
| } |
| else |
| tsi_next (&i); |
| } |
| } |
| break; |
| case ASM_EXPR: |
| fold_stmt (tp); |
| data->last_goto = NULL; |
| break; |
| |
| default: |
| data->last_goto = NULL; |
| break; |
| } |
| } |
| |
| static void |
| remove_useless_stmts (void) |
| { |
| struct rus_data data; |
| |
| clear_special_calls (); |
| |
| do |
| { |
| memset (&data, 0, sizeof (data)); |
| remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data); |
| } |
| while (data.repeat); |
| } |
| |
| |
| struct tree_opt_pass pass_remove_useless_stmts = |
| { |
| "useless", /* name */ |
| NULL, /* gate */ |
| remove_useless_stmts, /* execute */ |
| NULL, /* sub */ |
| NULL, /* next */ |
| 0, /* static_pass_number */ |
| 0, /* tv_id */ |
| PROP_gimple_any, /* properties_required */ |
| 0, /* properties_provided */ |
| 0, /* properties_destroyed */ |
| 0, /* todo_flags_start */ |
| TODO_dump_func, /* todo_flags_finish */ |
| 0 /* letter */ |
| }; |
| |
| |
| /* Remove obviously useless statements in basic block BB. */ |
| |
| static void |
| cfg_remove_useless_stmts_bb (basic_block bb) |
| { |
| block_stmt_iterator bsi; |
| tree stmt = NULL_TREE; |
| tree cond, var = NULL_TREE, val = NULL_TREE; |
| struct var_ann_d *ann; |
| |
| /* Check whether we come here from a condition, and if so, get the |
| condition. */ |
| if (EDGE_COUNT (bb->preds) != 1 |
| || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))) |
| return; |
| |
| cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src)); |
| |
| if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL) |
| { |
| var = cond; |
| val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE |
| ? boolean_false_node : boolean_true_node); |
| } |
| else if (TREE_CODE (cond) == TRUTH_NOT_EXPR |
| && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL |
| || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)) |
| { |
| var = TREE_OPERAND (cond, 0); |
| val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE |
| ? boolean_true_node : boolean_false_node); |
| } |
| else |
| { |
| if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE) |
| cond = invert_truthvalue (cond); |
| if (TREE_CODE (cond) == EQ_EXPR |
| && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL |
| || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL) |
| && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL |
| || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL |
| || TREE_CONSTANT (TREE_OPERAND (cond, 1)))) |
| { |
| var = TREE_OPERAND (cond, 0); |
| val = TREE_OPERAND (cond, 1); |
| } |
| else |
| return; |
| } |
| |
| /* Only work for normal local variables. */ |
| ann = var_ann (var); |
| if (!ann |
| || ann->may_aliases |
| || TREE_ADDRESSABLE (var)) |
| return; |
| |
| if (! TREE_CONSTANT (val)) |
| { |
| ann = var_ann (val); |
| if (!ann |
| || ann->may_aliases |
| || TREE_ADDRESSABLE (val)) |
| return; |
| } |
| |
| /* Ignore floating point variables, since comparison behaves weird for |
| them. */ |
| if (FLOAT_TYPE_P (TREE_TYPE (var))) |
| return; |
| |
| for (bsi = bsi_start (bb); !bsi_end_p (bsi);) |
| { |
| stmt = bsi_stmt (bsi); |
| |
| /* If the THEN/ELSE clause merely assigns a value to a variable/parameter |
| which is already known to contain that value, then remove the useless |
| THEN/ELSE clause. */ |
| if (TREE_CODE (stmt) == MODIFY_EXPR |
| && TREE_OPERAND (stmt, 0) == var |
| && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0)) |
| { |
| bsi_remove (&bsi); |
| continue; |
| } |
| |
| /* Invalidate the var if we encounter something that could modify it. |
| Likewise for the value it was previously set to. Note that we only |
| consider values that are either a VAR_DECL or PARM_DECL so we |
| can test for conflict very simply. */ |
| if (TREE_CODE (stmt) == ASM_EXPR |
| || (TREE_CODE (stmt) == MODIFY_EXPR |
| && (TREE_OPERAND (stmt, 0) == var |
| || TREE_OPERAND (stmt, 0) == val))) |
| return; |
| |
| bsi_next (&bsi); |
| } |
| } |
| |
| |
| /* A CFG-aware version of remove_useless_stmts. */ |
| |
| void |
| cfg_remove_useless_stmts (void) |
| { |
| basic_block bb; |
| |
| #ifdef ENABLE_CHECKING |
| verify_flow_info (); |
| #endif |
| |
| FOR_EACH_BB (bb) |
| { |
| cfg_remove_useless_stmts_bb (bb); |
| } |
| } |
| |
| |
| /* Remove PHI nodes associated with basic block BB and all edges out of BB. */ |
| |
| static void |
| remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb) |
| { |
| tree phi; |
| |
| /* Since this block is no longer reachable, we can just delete all |
| of its PHI nodes. */ |
| phi = phi_nodes (bb); |
| while (phi) |
| { |
| tree next = PHI_CHAIN (phi); |
| remove_phi_node (phi, NULL_TREE, bb); |
| phi = next; |
| } |
| |
| /* Remove edges to BB's successors. */ |
| while (EDGE_COUNT (bb->succs) > 0) |
| remove_edge (EDGE_SUCC (bb, 0)); |
| } |
| |
| |
| /* Remove statements of basic block BB. */ |
| |
| static void |
| remove_bb (basic_block bb) |
| { |
| block_stmt_iterator i; |
| source_locus loc = 0; |
| |
| if (dump_file) |
| { |
| fprintf (dump_file, "Removing basic block %d\n", bb->index); |
| if (dump_flags & TDF_DETAILS) |
| { |
| dump_bb (bb, dump_file, 0); |
| fprintf (dump_file, "\n"); |
| } |
| } |
| |
| /* Remove all the instructions in the block. */ |
| for (i = bsi_start (bb); !bsi_end_p (i);) |
| { |
| tree stmt = bsi_stmt (i); |
| if (TREE_CODE (stmt) == LABEL_EXPR |
| && FORCED_LABEL (LABEL_EXPR_LABEL (stmt))) |
| { |
| basic_block new_bb = bb->prev_bb; |
| block_stmt_iterator new_bsi = bsi_start (new_bb); |
| |
| bsi_remove (&i); |
| bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT); |
| } |
| else |
| { |
| release_defs (stmt); |
| |
| set_bb_for_stmt (stmt, NULL); |
| bsi_remove (&i); |
| } |
| |
| /* Don't warn for removed gotos. Gotos are often removed due to |
| jump threading, thus resulting in bogus warnings. Not great, |
| since this way we lose warnings for gotos in the original |
| program that are indeed unreachable. */ |
| if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc) |
| { |
| source_locus t; |
| |
| #ifdef USE_MAPPED_LOCATION |
| t = EXPR_LOCATION (stmt); |
| #else |
| t = EXPR_LOCUS (stmt); |
| #endif |
| if (t && LOCATION_LINE (*t) > 0) |
| loc = t; |
| } |
| } |
| |
| /* If requested, give a warning that the first statement in the |
| block is unreachable. We walk statements backwards in the |
| loop above, so the last statement we process is the first statement |
| in the block. */ |
| if (warn_notreached && loc) |
| #ifdef USE_MAPPED_LOCATION |
| warning ("%Hwill never be executed", &loc); |
| #else |
| warning ("%Hwill never be executed", loc); |
| #endif |
| |
| remove_phi_nodes_and_edges_for_unreachable_block (bb); |
| } |
| |
| /* A list of all the noreturn calls passed to modify_stmt. |
| cleanup_control_flow uses it to detect cases where a mid-block |
| indirect call has been turned into a noreturn call. When this |
| happens, all the instructions after the call are no longer |
| reachable and must be deleted as dead. */ |
| |
| VEC(tree) *modified_noreturn_calls; |
| |
| /* Try to remove superfluous control structures. */ |
| |
| static bool |
| cleanup_control_flow (void) |
| { |
| basic_block bb; |
| block_stmt_iterator bsi; |
| bool retval = false; |
| tree stmt; |
| |
| /* Detect cases where a mid-block call is now known not to return. */ |
| while (VEC_length (tree, modified_noreturn_calls)) |
| { |
| stmt = VEC_pop (tree, modified_noreturn_calls); |
| bb = bb_for_stmt (stmt); |
| if (bb != NULL && last_stmt (bb) != stmt && noreturn_call_p (stmt)) |
| split_block (bb, stmt); |
| } |
| |
| FOR_EACH_BB (bb) |
| { |
| bsi = bsi_last (bb); |
| |
| if (bsi_end_p (bsi)) |
| continue; |
| |
| stmt = bsi_stmt (bsi); |
| if (TREE_CODE (stmt) == COND_EXPR |
| || TREE_CODE (stmt) == SWITCH_EXPR) |
| retval |= cleanup_control_expr_graph (bb, bsi); |
| |
| /* Check for indirect calls that have been turned into |
| noreturn calls. */ |
| if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs)) |
| { |
| free_dominance_info (CDI_DOMINATORS); |
| retval = true; |
| } |
| } |
| return retval; |
| } |
| |
| |
| /* Disconnect an unreachable block in the control expression starting |
| at block BB. */ |
| |
| static bool |
| cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi) |
| { |
| edge taken_edge; |
| bool retval = false; |
| tree expr = bsi_stmt (bsi), val; |
| |
| if (EDGE_COUNT (bb->succs) > 1) |
| { |
| edge e; |
| edge_iterator ei; |
| |
| switch (TREE_CODE (expr)) |
| { |
| case COND_EXPR: |
| val = COND_EXPR_COND (expr); |
| break; |
| |
| case SWITCH_EXPR: |
| val = SWITCH_COND (expr); |
| if (TREE_CODE (val) != INTEGER_CST) |
| return false; |
| break; |
| |
| default: |
| gcc_unreachable (); |
| } |
| |
| taken_edge = find_taken_edge (bb, val); |
| if (!taken_edge) |
| return false; |
| |
| /* Remove all the edges except the one that is always executed. */ |
| for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
| { |
| if (e != taken_edge) |
| { |
| taken_edge->probability += e->probability; |
| taken_edge->count += e->count; |
| remove_edge (e); |
| retval = true; |
| } |
| else |
| ei_next (&ei); |
| } |
| if (taken_edge->probability > REG_BR_PROB_BASE) |
| taken_edge->probability = REG_BR_PROB_BASE; |
| } |
| else |
| taken_edge = EDGE_SUCC (bb, 0); |
| |
| bsi_remove (&bsi); |
| taken_edge->flags = EDGE_FALLTHRU; |
| |
| /* We removed some paths from the cfg. */ |
| free_dominance_info (CDI_DOMINATORS); |
| |
| return retval; |
| } |
| |
| /* Remove any fallthru edge from EV. Return true if an edge was removed. */ |
| |
| static bool |
| remove_fallthru_edge (VEC(edge) *ev) |
| { |
| edge_iterator ei; |
| edge e; |
| |
| FOR_EACH_EDGE (e, ei, ev) |
| if ((e->flags & EDGE_FALLTHRU) != 0) |
| { |
| remove_edge (e); |
| return true; |
| } |
| return false; |
| } |
| |
| /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a |
| predicate VAL, return the edge that will be taken out of the block. |
| If VAL does not match a unique edge, NULL is returned. */ |
| |
| edge |
| find_taken_edge (basic_block bb, tree val) |
| { |
| tree stmt; |
| |
| stmt = last_stmt (bb); |
| |
| gcc_assert (stmt); |
| gcc_assert (is_ctrl_stmt (stmt)); |
| gcc_assert (val); |
| |
| if (TREE_CODE (val) != INTEGER_CST) |
| return NULL; |
| |
| if (TREE_CODE (stmt) == COND_EXPR) |
| return find_taken_edge_cond_expr (bb, val); |
| |
| if (TREE_CODE (stmt) == SWITCH_EXPR) |
| return find_taken_edge_switch_expr (bb, val); |
| |
| gcc_unreachable (); |
| } |
| |
| |
| /* Given a constant value VAL and the entry block BB to a COND_EXPR |
| statement, determine which of the two edges will be taken out of the |
| block. Return NULL if either edge may be taken. */ |
| |
| static edge |
| find_taken_edge_cond_expr (basic_block bb, tree val) |
| { |
| edge true_edge, false_edge; |
| |
| extract_true_false_edges_from_block (bb, &true_edge, &false_edge); |
| |
| gcc_assert (TREE_CODE (val) == INTEGER_CST); |
| return (zero_p (val) ? false_edge : true_edge); |
| } |
| |
| /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR |
| statement, determine which edge will be taken out of the block. Return |
| NULL if any edge may be taken. */ |
| |
| static edge |
| find_taken_edge_switch_expr (basic_block bb, tree val) |
| { |
| tree switch_expr, taken_case; |
| basic_block dest_bb; |
| edge e; |
| |
| switch_expr = last_stmt (bb); |
| taken_case = find_case_label_for_value (switch_expr, val); |
| dest_bb = label_to_block (CASE_LABEL (taken_case)); |
| |
| e = find_edge (bb, dest_bb); |
| gcc_assert (e); |
| return e; |
| } |
| |
| |
| /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL. |
| We can make optimal use here of the fact that the case labels are |
| sorted: We can do a binary search for a case matching VAL. */ |
| |
| static tree |
| find_case_label_for_value (tree switch_expr, tree val) |
| { |
| tree vec = SWITCH_LABELS (switch_expr); |
| size_t low, high, n = TREE_VEC_LENGTH (vec); |
| tree default_case = TREE_VEC_ELT (vec, n - 1); |
| |
| for (low = -1, high = n - 1; high - low > 1; ) |
| { |
| size_t i = (high + low) / 2; |
| tree t = TREE_VEC_ELT (vec, i); |
| int cmp; |
| |
| /* Cache the result of comparing CASE_LOW and val. */ |
| cmp = tree_int_cst_compare (CASE_LOW (t), val); |
| |
| if (cmp > 0) |
| high = i; |
| else |
| low = i; |
| |
| if (CASE_HIGH (t) == NULL) |
| { |
| /* A singe-valued case label. */ |
| if (cmp == 0) |
| return t; |
| } |
| else |
| { |
| /* A case range. We can only handle integer ranges. */ |
| if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0) |
| return t; |
| } |
| } |
| |
| return default_case; |
| } |
| |
| |
| /* If all the PHI nodes in DEST have alternatives for E1 and E2 and |
| those alternatives are equal in each of the PHI nodes, then return |
| true, else return false. */ |
| |
| static bool |
| phi_alternatives_equal (basic_block dest, edge e1, edge e2) |
| { |
| int n1 = e1->dest_idx; |
| int n2 = e2->dest_idx; |
| tree phi; |
| |
| for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi)) |
| { |
| tree val1 = PHI_ARG_DEF (phi, n1); |
| tree val2 = PHI_ARG_DEF (phi, n2); |
| |
| gcc_assert (val1 != NULL_TREE); |
| gcc_assert (val2 != NULL_TREE); |
| |
| if (!operand_equal_for_phi_arg_p (val1, val2)) |
| return false; |
| } |
| |
| return true; |
| } |
| |
| |
| /*--------------------------------------------------------------------------- |
| Debugging functions |
| ---------------------------------------------------------------------------*/ |
| |
| /* Dump tree-specific information of block BB to file OUTF. */ |
| |
| void |
| tree_dump_bb (basic_block bb, FILE *outf, int indent) |
| { |
| dump_generic_bb (outf, bb, indent, TDF_VOPS); |
| } |
| |
| |
| /* Dump a basic block on stderr. */ |
| |
| void |
| debug_tree_bb (basic_block bb) |
| { |
| dump_bb (bb, stderr, 0); |
| } |
| |
| |
| /* Dump basic block with index N on stderr. */ |
| |
| basic_block |
| debug_tree_bb_n (int n) |
| { |
| debug_tree_bb (BASIC_BLOCK (n)); |
| return BASIC_BLOCK (n); |
| } |
| |
| |
| /* Dump the CFG on stderr. |
| |
| FLAGS are the same used by the tree dumping functions |
| (see TDF_* in tree.h). */ |
| |
| void |
| debug_tree_cfg (int flags) |
| { |
| dump_tree_cfg (stderr, flags); |
| } |
| |
| |
| /* Dump the program showing basic block boundaries on the given FILE. |
| |
| FLAGS are the same used by the tree dumping functions (see TDF_* in |
| tree.h). */ |
| |
| void |
| dump_tree_cfg (FILE *file, int flags) |
| { |
| if (flags & TDF_DETAILS) |
| { |
| const char *funcname |
| = lang_hooks.decl_printable_name (current_function_decl, 2); |
| |
| fputc ('\n', file); |
| fprintf (file, ";; Function %s\n\n", funcname); |
| fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n", |
| n_basic_blocks, n_edges, last_basic_block); |
| |
| brief_dump_cfg (file); |
| fprintf (file, "\n"); |
| } |
| |
| if (flags & TDF_STATS) |
| dump_cfg_stats (file); |
| |
| dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS); |
| } |
| |
| |
| /* Dump CFG statistics on FILE. */ |
| |
| void |
| dump_cfg_stats (FILE *file) |
| { |
| static long max_num_merged_labels = 0; |
| unsigned long size, total = 0; |
| int n_edges; |
| basic_block bb; |
| const char * const fmt_str = "%-30s%-13s%12s\n"; |
| const char * const fmt_str_1 = "%-30s%13d%11lu%c\n"; |
| const char * const fmt_str_3 = "%-43s%11lu%c\n"; |
| const char *funcname |
| = lang_hooks.decl_printable_name (current_function_decl, 2); |
| |
| |
| fprintf (file, "\nCFG Statistics for %s\n\n", funcname); |
| |
| fprintf (file, "---------------------------------------------------------\n"); |
| fprintf (file, fmt_str, "", " Number of ", "Memory"); |
| fprintf (file, fmt_str, "", " instances ", "used "); |
| fprintf (file, "---------------------------------------------------------\n"); |
| |
| size = n_basic_blocks * sizeof (struct basic_block_def); |
| total += size; |
| fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks, |
| SCALE (size), LABEL (size)); |
| |
| n_edges = 0; |
| FOR_EACH_BB (bb) |
| n_edges += EDGE_COUNT (bb->succs); |
| size = n_edges * sizeof (struct edge_def); |
| total += size; |
| fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size)); |
| |
| size = n_basic_blocks * sizeof (struct bb_ann_d); |
| total += size; |
| fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks, |
| SCALE (size), LABEL (size)); |
| |
| fprintf (file, "---------------------------------------------------------\n"); |
| fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total), |
| LABEL (total)); |
| fprintf (file, "---------------------------------------------------------\n"); |
| fprintf (file, "\n"); |
| |
| if (cfg_stats.num_merged_labels > max_num_merged_labels) |
| max_num_merged_labels = cfg_stats.num_merged_labels; |
| |
| fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n", |
| cfg_stats.num_merged_labels, max_num_merged_labels); |
| |
| fprintf (file, "\n"); |
| } |
| |
| |
| /* Dump CFG statistics on stderr. Keep extern so that it's always |
| linked in the final executable. */ |
| |
| void |
| debug_cfg_stats (void) |
| { |
| dump_cfg_stats (stderr); |
| } |
| |
| |
| /* Dump the flowgraph to a .vcg FILE. */ |
| |
| static void |
| tree_cfg2vcg (FILE *file) |
| { |
| edge e; |
| edge_iterator ei; |
| basic_block bb; |
| const char *funcname |
| = lang_hooks.decl_printable_name (current_function_decl, 2); |
| |
| /* Write the file header. */ |
| fprintf (file, "graph: { title: \"%s\"\n", funcname); |
| fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n"); |
| fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n"); |
| |
| /* Write blocks and edges. */ |
| FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
| { |
| fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"", |
| e->dest->index); |
| |
| if (e->flags & EDGE_FAKE) |
| fprintf (file, " linestyle: dotted priority: 10"); |
| else |
| fprintf (file, " linestyle: solid priority: 100"); |
| |
| fprintf (file, " }\n"); |
| } |
| fputc ('\n', file); |
| |
| FOR_EACH_BB (bb) |
| { |
| enum tree_code head_code, end_code; |
| const char *head_name, *end_name; |
| int head_line = 0; |
| int end_line = 0; |
| tree first = first_stmt (bb); |
| tree last = last_stmt (bb); |
| |
| if (first) |
| { |
| head_code = TREE_CODE (first); |
| head_name = tree_code_name[head_code]; |
| head_line = get_lineno (first); |
| } |
| else |
| head_name = "no-statement"; |
| |
| if (last) |
| { |
| end_code = TREE_CODE (last); |
| end_name = tree_code_name[end_code]; |
| end_line = get_lineno (last); |
| } |
| else |
| end_name = "no-statement"; |
| |
| fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n", |
| bb->index, bb->index, head_name, head_line, end_name, |
| end_line); |
| |
| FOR_EACH_EDGE (e, ei, bb->succs) |
| { |
| if (e->dest == EXIT_BLOCK_PTR) |
| fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index); |
| else |
| fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index); |
| |
| if (e->flags & EDGE_FAKE) |
| fprintf (file, " priority: 10 linestyle: dotted"); |
| else |
| fprintf (file, " priority: 100 linestyle: solid"); |
| |
| fprintf (file, " }\n"); |
| } |
| |
| if (bb->next_bb != EXIT_BLOCK_PTR) |
| fputc ('\n', file); |
| } |
| |
| fputs ("}\n\n", file); |
| } |
| |
| |
| |
| /*--------------------------------------------------------------------------- |
| Miscellaneous helpers |
| ---------------------------------------------------------------------------*/ |
| |
| /* Return true if T represents a stmt that always transfers control. */ |
| |
| bool |
| is_ctrl_stmt (tree t) |
| { |
| return (TREE_CODE (t) == COND_EXPR |
| || TREE_CODE (t) == SWITCH_EXPR |
| || TREE_CODE (t) == GOTO_EXPR |
| || TREE_CODE (t) == RETURN_EXPR |
| || TREE_CODE (t) == RESX_EXPR); |
| } |
| |
| |
| /* Return true if T is a statement that may alter the flow of control |
| (e.g., a call to a non-returning function). */ |
| |
| bool |
| is_ctrl_altering_stmt (tree t) |
| { |
| tree call; |
| |
| gcc_assert (t); |
| call = get_call_expr_in (t); |
| if (call) |
| { |
| /* A non-pure/const CALL_EXPR alters flow control if the current |
| function has nonlocal labels. */ |
| if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label) |
| return true; |
| |
| /* A CALL_EXPR also alters control flow if it does not return. */ |
| if (call_expr_flags (call) & ECF_NORETURN) |
| return true; |
| } |
| |
| /* If a statement can throw, it alters control flow. */ |
| return tree_can_throw_internal (t); |
| } |
| |
| |
| /* Return true if T is a computed goto. */ |
| |
| bool |
| computed_goto_p (tree t) |
| { |
| return (TREE_CODE (t) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL); |
| } |
| |
| |
| /* Checks whether EXPR is a simple local goto. */ |
| |
| bool |
| simple_goto_p (tree expr) |
| { |
| return (TREE_CODE (expr) == GOTO_EXPR |
| && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL); |
| } |
| |
| |
| /* Return true if T should start a new basic block. PREV_T is the |
| statement preceding T. It is used when T is a label or a case label. |
| Labels should only start a new basic block if their previous statement |
| wasn't a label. Otherwise, sequence of labels would generate |
| unnecessary basic blocks that only contain a single label. */ |
| |
| static inline bool |
| stmt_starts_bb_p (tree t, tree prev_t) |
| { |
| enum tree_code code; |
| |
| if (t == NULL_TREE) |
| return false; |
| |
| /* LABEL_EXPRs start a new basic block only if the preceding |
| statement wasn't a label of the same type. This prevents the |
| creation of consecutive blocks that have nothing but a single |
| label. */ |
| code = TREE_CODE (t); |
| if (code == LABEL_EXPR) |
| { |
| /* Nonlocal and computed GOTO targets always start a new block. */ |
| if (code == LABEL_EXPR |
| && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t)) |
| || FORCED_LABEL (LABEL_EXPR_LABEL (t)))) |
| return true; |
| |
| if (prev_t && TREE_CODE (prev_t) == code) |
| { |
| if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t))) |
| return true; |
| |
| cfg_stats.num_merged_labels++; |
| return false; |
| } |
| else |
| return true; |
| } |
| |
| return false; |
| } |
| |
| |
| /* Return true if T should end a basic block. */ |
| |
| bool |
| stmt_ends_bb_p (tree t) |
| { |
| return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t); |
| } |
| |
| |
| /* Add gotos that used to be represented implicitly in the CFG. */ |
| |
| void |
| disband_implicit_edges (void) |
| { |
| basic_block bb; |
| block_stmt_iterator last; |
| edge e; |
| edge_iterator ei; |
| tree stmt, label; |
| |
| FOR_EACH_BB (bb) |
| { |
| last = bsi_last (bb); |
| stmt = last_stmt (bb); |
| |
| if (stmt && TREE_CODE (stmt) == COND_EXPR) |
| { |
| /* Remove superfluous gotos from COND_EXPR branches. Moved |
| from cfg_remove_useless_stmts here since it violates the |
| invariants for tree--cfg correspondence and thus fits better |
| here where we do it anyway. */ |
| e = find_edge (bb, bb->next_bb); |
| if (e) |
| { |
| if (e->flags & EDGE_TRUE_VALUE) |
| COND_EXPR_THEN (stmt) = build_empty_stmt (); |
| else if (e->flags & EDGE_FALSE_VALUE) |
| COND_EXPR_ELSE (stmt) = build_empty_stmt (); |
| else |
| gcc_unreachable (); |
| e->flags |= EDGE_FALLTHRU; |
| } |
| |
| continue; |
| } |
| |
| if (stmt && TREE_CODE (stmt) == RETURN_EXPR) |
| { |
| /* Remove the RETURN_EXPR if we may fall though to the exit |
| instead. */ |
| gcc_assert (EDGE_COUNT (bb->succs) == 1); |
| gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR); |
| |
| if (bb->next_bb == EXIT_BLOCK_PTR |
| && !TREE_OPERAND (stmt, 0)) |
| { |
| bsi_remove (&last); |
| EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU; |
| } |
| continue; |
| } |
| |
| /* There can be no fallthru edge if the last statement is a control |
| one. */ |
| if (stmt && is_ctrl_stmt (stmt)) |
| continue; |
| |
| /* Find a fallthru edge and emit the goto if necessary. */ |
| FOR_EACH_EDGE (e, ei, bb->succs) |
| if (e->flags & EDGE_FALLTHRU) |
| break; |
| |
| if (!e || e->dest == bb->next_bb) |
| continue; |
| |
| gcc_assert (e->dest != EXIT_BLOCK_PTR); |
| label = tree_block_label (e->dest); |
| |
| stmt = build1 (GOTO_EXPR, void_type_node, label); |
| #ifdef USE_MAPPED_LOCATION |
| SET_EXPR_LOCATION (stmt, e->goto_locus); |
| #else |
| SET_EXPR_LOCUS (stmt, e->goto_locus); |
| #endif |
| bsi_insert_after (&last, stmt, BSI_NEW_STMT); |
| e->flags &= ~EDGE_FALLTHRU; |
| } |
| } |
| |
| /* Remove block annotations and other datastructures. */ |
| |
| void |
| delete_tree_cfg_annotations (void) |
| { |
| basic_block bb; |
| if (n_basic_blocks > 0) |
| free_blocks_annotations (); |
| |
| label_to_block_map = NULL; |
| free_rbi_pool (); |
| FOR_EACH_BB (bb) |
| bb->rbi = NULL; |
| } |
| |
| |
| /* Return the first statement in basic block BB. */ |
| |
| tree |
| first_stmt (basic_block bb) |
| { |
| block_stmt_iterator i = bsi_start (bb); |
| return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE; |
| } |
| |
| |
| /* Return the last statement in basic block BB. */ |
| |
| tree |
| last_stmt (basic_block bb) |
| { |
| block_stmt_iterator b = bsi_last (bb); |
| return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE; |
| } |
| |
| |
| /* Return a pointer to the last statement in block BB. */ |
| |
| tree * |
| last_stmt_ptr (basic_block bb) |
| { |
| block_stmt_iterator last = bsi_last (bb); |
| return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL; |
| } |
| |
| |
| /* Return the last statement of an otherwise empty block. Return NULL |
| if the block is totally empty, or if it contains more than one |
| statement. */ |
| |
| tree |
| last_and_only_stmt (basic_block bb) |
| { |
| block_stmt_iterator i = bsi_last (bb); |
| tree last, prev; |
| |
| if (bsi_end_p (i)) |
| return NULL_TREE; |
| |
| last = bsi_stmt (i); |
| bsi_prev (&i); |
| if (bsi_end_p (i)) |
| return last; |
| |
| /* Empty statements should no longer appear in the instruction stream. |
| Everything that might have appeared before should be deleted by |
| remove_useless_stmts, and the optimizers should just bsi_remove |
| instead of smashing with build_empty_stmt. |
| |
| Thus the only thing that should appear here in a block containing |
| one executable statement is a label. */ |
| prev = bsi_stmt (i); |
| if (TREE_CODE (prev) == LABEL_EXPR) |
| return last; |
| else |
| return NULL_TREE; |
| } |
| |
| |
| /* Mark BB as the basic block holding statement T. */ |
| |
| void |
| set_bb_for_stmt (tree t, basic_block bb) |
| { |
| if (TREE_CODE (t) == PHI_NODE) |
| PHI_BB (t) = bb; |
| else if (TREE_CODE (t) == STATEMENT_LIST) |
| { |
| tree_stmt_iterator i; |
| for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i)) |
| set_bb_for_stmt (tsi_stmt (i), bb); |
| } |
| else |
| { |
| stmt_ann_t ann = get_stmt_ann (t); |
| ann->bb = bb; |
| |
| /* If the statement is a label, add the label to block-to-labels map |
| so that we can speed up edge creation for GOTO_EXPRs. */ |
| if (TREE_CODE (t) == LABEL_EXPR) |
| { |
| int uid; |
| |
| t = LABEL_EXPR_LABEL (t); |
| uid = LABEL_DECL_UID (t); |
| if (uid == -1) |
| { |
| LABEL_DECL_UID (t) = uid = cfun->last_label_uid++; |
| if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid) |
| VARRAY_GROW (label_to_block_map, 3 * uid / 2); |
| } |
| else |
| /* We're moving an existing label. Make sure that we've |
| removed it from the old block. */ |
| gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid)); |
| VARRAY_BB (label_to_block_map, uid) = bb; |
| } |
| } |
| } |
| |
| /* Finds iterator for STMT. */ |
| |
| extern block_stmt_iterator |
| bsi_for_stmt (tree stmt) |
| { |
| block_stmt_iterator bsi; |
| |
| for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi)) |
| if (bsi_stmt (bsi) == stmt) |
| return bsi; |
| |
| gcc_unreachable (); |
| } |
| |
| /* Insert statement (or statement list) T before the statement |
| pointed-to by iterator I. M specifies how to update iterator I |
| after insertion (see enum bsi_iterator_update). */ |
| |
| void |
| bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m) |
| { |
| set_bb_for_stmt (t, i->bb); |
| tsi_link_before (&i->tsi, t, m); |
| modify_stmt (t); |
| } |
| |
| |
| /* Insert statement (or statement list) T after the statement |
| pointed-to by iterator I. M specifies how to update iterator I |
| after insertion (see enum bsi_iterator_update). */ |
| |
| void |
| bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m) |
| { |
| set_bb_for_stmt (t, i->bb); |
| tsi_link_after (&i->tsi, t, m); |
| modify_stmt (t); |
| } |
| |
| |
| /* Remove the statement pointed to by iterator I. The iterator is updated |
| to the next statement. */ |
| |
| void |
| bsi_remove (block_stmt_iterator *i) |
| { |
| tree t = bsi_stmt (*i); |
| set_bb_for_stmt (t, NULL); |
| tsi_delink (&i->tsi); |
| } |
| |
| |
| /* Move the statement at FROM so it comes right after the statement at TO. */ |
| |
| void |
| bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to) |
| { |
| tree stmt = bsi_stmt (*from); |
| bsi_remove (from); |
| bsi_insert_after (to, stmt, BSI_SAME_STMT); |
| } |
| |
| |
| /* Move the statement at FROM so it comes right before the statement at TO. */ |
| |
| void |
| bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to) |
| { |
| tree stmt = bsi_stmt (*from); |
| bsi_remove (from); |
| bsi_insert_before (to, stmt, BSI_SAME_STMT); |
| } |
| |
| |
| /* Move the statement at FROM to the end of basic block BB. */ |
| |
| void |
| bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb) |
| { |
| block_stmt_iterator last = bsi_last (bb); |
| |
| /* Have to check bsi_end_p because it could be an empty block. */ |
| if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last))) |
| bsi_move_before (from, &last); |
| else |
| bsi_move_after (from, &last); |
| } |
| |
| |
| /* Replace the contents of the statement pointed to by iterator BSI |
| with STMT. If PRESERVE_EH_INFO is true, the exception handling |
| information of the original statement is preserved. */ |
| |
| void |
| bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info) |
| { |
| int eh_region; |
| tree orig_stmt = bsi_stmt (*bsi); |
| |
| SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt)); |
| set_bb_for_stmt (stmt, bsi->bb); |
| |
| /* Preserve EH region information from the original statement, if |
| requested by the caller. */ |
| if (preserve_eh_info) |
| { |
| eh_region = lookup_stmt_eh_region (orig_stmt); |
| if (eh_region >= 0) |
| add_stmt_to_eh_region (stmt, eh_region); |
| } |
| |
| *bsi_stmt_ptr (*bsi) = stmt; |
| modify_stmt (stmt); |
| } |
| |
| |
| /* Insert the statement pointed-to by BSI into edge E. Every attempt |
| is made to place the statement in an existing basic block, but |
| sometimes that isn't possible. When it isn't possible, the edge is |
| split and the statement is added to the new block. |
| |
| In all cases, the returned *BSI points to the correct location. The |
| return value is true if insertion should be done after the location, |
| or false if it should be done before the location. If new basic block |
| has to be created, it is stored in *NEW_BB. */ |
| |
| static bool |
| tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi, |
| basic_block *new_bb) |
| { |
| basic_block dest, src; |
| tree tmp; |
| |
| dest = e->dest; |
| restart: |
| |
| /* If the destination has one predecessor which has no PHI nodes, |
| insert there. Except for the exit block. |
| |
| The requirement for no PHI nodes could be relaxed. Basically we |
| would have to examine the PHIs to prove that none of them used |
| the value set by the statement we want to insert on E. That |
| hardly seems worth the effort. */ |
| if (EDGE_COUNT (dest->preds) == 1 |
| && ! phi_nodes (dest) |
| && dest != EXIT_BLOCK_PTR) |
| { |
| *bsi = bsi_start (dest); |
| if (bsi_end_p (*bsi)) |
| return true; |
| |
| /* Make sure we insert after any leading labels. */ |
| tmp = bsi_stmt (*bsi); |
| while (TREE_CODE (tmp) == LABEL_EXPR) |
| { |
| bsi_next (bsi); |
| if (bsi_end_p (*bsi)) |
| break; |
| tmp = bsi_stmt (*bsi); |
| } |
| |
| if (bsi_end_p (*bsi)) |
| { |
| *bsi = bsi_last (dest); |
| return true; |
| } |
| else |
| return false; |
| } |
| |
| /* If the source has one successor, the edge is not abnormal and |
| the last statement does not end a basic block, insert there. |
| Except for the entry block. */ |
| src = e->src; |
| if ((e->flags & EDGE_ABNORMAL) == 0 |
| && EDGE_COUNT (src->succs) == 1 |
| && src != ENTRY_BLOCK_PTR) |
| { |
| *bsi = bsi_last (src); |
| if (bsi_end_p (*bsi)) |
| return true; |
| |
| tmp = bsi_stmt (*bsi); |
| if (!stmt_ends_bb_p (tmp)) |
| return true; |
| |
| /* Insert code just before returning the value. We may need to decompose |
| the return in the case it contains non-trivial operand. */ |
| if (TREE_CODE (tmp) == RETURN_EXPR) |
| { |
| tree op = TREE_OPERAND (tmp, 0); |
| if (!is_gimple_val (op)) |
| { |
| gcc_assert (TREE_CODE (op) == MODIFY_EXPR); |
| bsi_insert_before (bsi, op, BSI_NEW_STMT); |
| TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0); |
| } |
| bsi_prev (bsi); |
| return true; |
| } |
| } |
| |
| /* Otherwise, create a new basic block, and split this edge. */ |
| dest = split_edge (e); |
| if (new_bb) |
| *new_bb = dest; |
| e = EDGE_PRED (dest, 0); |
| goto restart; |
| } |
| |
| |
| /* This routine will commit all pending edge insertions, creating any new |
| basic blocks which are necessary. */ |
| |
| void |
| bsi_commit_edge_inserts (void) |
| { |
| basic_block bb; |
| edge e; |
| edge_iterator ei; |
| |
| bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL); |
| |
| FOR_EACH_BB (bb) |
| FOR_EACH_EDGE (e, ei, bb->succs) |
| bsi_commit_one_edge_insert (e, NULL); |
| } |
| |
| |
| /* Commit insertions pending at edge E. If a new block is created, set NEW_BB |
| to this block, otherwise set it to NULL. */ |
| |
| void |
| bsi_commit_one_edge_insert (edge e, basic_block *new_bb) |
| { |
| if (new_bb) |
| *new_bb = NULL; |
| if (PENDING_STMT (e)) |
| { |
| block_stmt_iterator bsi; |
| tree stmt = PENDING_STMT (e); |
| |
| PENDING_STMT (e) = NULL_TREE; |
| |
| if (tree_find_edge_insert_loc (e, &bsi, new_bb)) |
| bsi_insert_after (&bsi, stmt, BSI_NEW_STMT); |
| else |
| bsi_insert_before (&bsi, stmt, BSI_NEW_STMT); |
| } |
| } |
| |
| |
| /* Add STMT to the pending list of edge E. No actual insertion is |
| made until a call to bsi_commit_edge_inserts () is made. */ |
| |
| void |
| bsi_insert_on_edge (edge e, tree stmt) |
| { |
| append_to_statement_list (stmt, &PENDING_STMT (e)); |
| } |
| |
| /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new |
| block has to be created, it is returned. */ |
| |
| basic_block |
| bsi_insert_on_edge_immediate (edge e, tree stmt) |
| { |
| block_stmt_iterator bsi; |
| basic_block new_bb = NULL; |
| |
| gcc_assert (!PENDING_STMT (e)); |
| |
| if (tree_find_edge_insert_loc (e, &bsi, &new_bb)) |
| bsi_insert_after (&bsi, stmt, BSI_NEW_STMT); |
| else |
| bsi_insert_before (&bsi, stmt, BSI_NEW_STMT); |
| |
| return new_bb; |
| } |
| |
| /*--------------------------------------------------------------------------- |
| Tree specific functions for CFG manipulation |
| ---------------------------------------------------------------------------*/ |
| |
| /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */ |
| |
| static void |
| reinstall_phi_args (edge new_edge, edge old_edge) |
| { |
| tree var, phi; |
| |
| if (!PENDING_STMT (old_edge)) |
| return; |
| |
| for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest); |
| var && phi; |
| var = TREE_CHAIN (var), phi = PHI_CHAIN (phi)) |
| { |
| tree result = TREE_PURPOSE (var); |
| tree arg = TREE_VALUE (var); |
| |
| gcc_assert (result == PHI_RESULT (phi)); |
| |
| add_phi_arg (phi, arg, new_edge); |
| } |
| |
| PENDING_STMT (old_edge) = NULL; |
| } |
| |
| /* Split a (typically critical) edge EDGE_IN. Return the new block. |
| Abort on abnormal edges. */ |
| |
| static basic_block |
| tree_split_edge (edge edge_in) |
| { |
| basic_block new_bb, after_bb, dest, src; |
| edge new_edge, e; |
| |
| /* Abnormal edges cannot be split. */ |
| gcc_assert (!(edge_in->flags & EDGE_ABNORMAL)); |
| |
| src = edge_in->src; |
| dest = edge_in->dest; |
| |
| /* Place the new block in the block list. Try to keep the new block |
| near its "logical" location. This is of most help to humans looking |
| at debugging dumps. */ |
| if (dest->prev_bb && find_edge (dest->prev_bb, dest)) |
| after_bb = edge_in->src; |
| else |
| after_bb = dest->prev_bb; |
| |
| new_bb = create_empty_bb (after_bb); |
| new_bb->frequency = EDGE_FREQUENCY (edge_in); |
| new_bb->count = edge_in->count; |
| new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU); |
| new_edge->probability = REG_BR_PROB_BASE; |
| new_edge->count = edge_in->count; |
| |
| e = redirect_edge_and_branch (edge_in, new_bb); |
| gcc_assert (e); |
| reinstall_phi_args (new_edge, e); |
| |
| return new_bb; |
| } |
| |
| |
| /* Return true when BB has label LABEL in it. */ |
| |
| static bool |
| has_label_p (basic_block bb, tree label) |
| { |
| block_stmt_iterator bsi; |
| |
| for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) |
| { |
| tree stmt = bsi_stmt (bsi); |
| |
| if (TREE_CODE (stmt) != LABEL_EXPR) |
| return false; |
| if (LABEL_EXPR_LABEL (stmt) == label) |
| return true; |
| } |
| return false; |
| } |
| |
| |
| /* Callback for walk_tree, check that all elements with address taken are |
| properly noticed as such. The DATA is an int* that is 1 if TP was seen |
| inside a PHI node. */ |
| |
| static tree |
| verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) |
| { |
| tree t = *tp, x; |
| bool in_phi = (data != NULL); |
| |
| if (TYPE_P (t)) |
| *walk_subtrees = 0; |
| |
| /* Check operand N for being valid GIMPLE and give error MSG if not. |
| We check for constants explicitly since they are not considered |
| gimple invariants if they overflowed. */ |
| #define CHECK_OP(N, MSG) \ |
| do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \ |
| && !is_gimple_val (TREE_OPERAND (t, N))) \ |
| { error (MSG); return TREE_OPERAND (t, N); }} while (0) |
| |
| switch (TREE_CODE (t)) |
| { |
| case SSA_NAME: |
| if (SSA_NAME_IN_FREE_LIST (t)) |
| { |
| error ("SSA name in freelist but still referenced"); |
| return *tp; |
| } |
| break; |
| |
| case MODIFY_EXPR: |
| x = TREE_OPERAND (t, 0); |
| if (TREE_CODE (x) == BIT_FIELD_REF |
| && is_gimple_reg (TREE_OPERAND (x, 0))) |
| { |
| error ("GIMPLE register modified with BIT_FIELD_REF"); |
| return t; |
| } |
| break; |
| |
| case ADDR_EXPR: |
| /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing |
| dead PHIs that take the address of something. But if the PHI |
| result is dead, the fact that it takes the address of anything |
| is irrelevant. Because we can not tell from here if a PHI result |
| is dead, we just skip this check for PHIs altogether. This means |
| we may be missing "valid" checks, but what can you do? |
| This was PR19217. */ |
| if (in_phi) |
| break; |
| |
| /* Skip any references (they will be checked when we recurse down the |
| tree) and ensure that any variable used as a prefix is marked |
| addressable. */ |
| for (x = TREE_OPERAND (t, 0); |
| handled_component_p (x); |
| x = TREE_OPERAND (x, 0)) |
| ; |
| |
| if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL) |
| return NULL; |
| if (!TREE_ADDRESSABLE (x)) |
| { |
| error ("address taken, but ADDRESSABLE bit not set"); |
| return x; |
| } |
| break; |
| |
| case COND_EXPR: |
| x = COND_EXPR_COND (t); |
| if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE) |
| { |
| error ("non-boolean used in condition"); |
| return x; |
| } |
| break; |
| |
| case NOP_EXPR: |
| case CONVERT_EXPR: |
| case FIX_TRUNC_EXPR: |
| case FIX_CEIL_EXPR: |
| case FIX_FLOOR_EXPR: |
| case FIX_ROUND_EXPR: |
| case FLOAT_EXPR: |
| case NEGATE_EXPR: |
| case ABS_EXPR: |
| case BIT_NOT_EXPR: |
| case NON_LVALUE_EXPR: |
| case TRUTH_NOT_EXPR: |
| CHECK_OP (0, "Invalid operand to unary operator"); |
| break; |
| |
| case REALPART_EXPR: |
| case IMAGPART_EXPR: |
| case COMPONENT_REF: |
| case ARRAY_REF: |
| case ARRAY_RANGE_REF: |
| case BIT_FIELD_REF: |
| case VIEW_CONVERT_EXPR: |
| /* We have a nest of references. Verify that each of the operands |
| that determine where to reference is either a constant or a variable, |
| verify that the base is valid, and then show we've already checked |
| the subtrees. */ |
| while (handled_component_p (t)) |
| { |
| if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2)) |
| CHECK_OP (2, "Invalid COMPONENT_REF offset operator"); |
| else if (TREE_CODE (t) == ARRAY_REF |
| || TREE_CODE (t) == ARRAY_RANGE_REF) |
| { |
| CHECK_OP (1, "Invalid array index."); |
| if (TREE_OPERAND (t, 2)) |
| CHECK_OP (2, "Invalid array lower bound."); |
| if (TREE_OPERAND (t, 3)) |
| CHECK_OP (3, "Invalid array stride."); |
| } |
| else if (TREE_CODE (t) == BIT_FIELD_REF) |
| { |
| CHECK_OP (1, "Invalid operand to BIT_FIELD_REF"); |
| CHECK_OP (2, "Invalid operand to BIT_FIELD_REF"); |
| } |
| |
| t = TREE_OPERAND (t, 0); |
| } |
| |
| if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t)) |
| { |
| error ("Invalid reference prefix."); |
| return t; |
| } |
| *walk_subtrees = 0; |
| break; |
| |
| case LT_EXPR: |
| case LE_EXPR: |
| case GT_EXPR: |
| case GE_EXPR: |
| case EQ_EXPR: |
| case NE_EXPR: |
| case UNORDERED_EXPR: |
| case ORDERED_EXPR: |
| case UNLT_EXPR: |
| case UNLE_EXPR: |
| case UNGT_EXPR: |
| case UNGE_EXPR: |
| case UNEQ_EXPR: |
| case LTGT_EXPR: |
| case PLUS_EXPR: |
| |