这个转换是由gcc 一个pass 来完成的
class pass_expand : public rtl_opt_pass
具体在:
unsigned int
pass_expand::execute (function *fun)
{
basic_block bb, init_block;
edge_iterator ei;
edge e;
rtx_insn *var_seq, *var_ret_seq;
unsigned i;
timevar_push (TV_OUT_OF_SSA);
rewrite_out_of_ssa (&SA);
timevar_pop (TV_OUT_OF_SSA);
SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
{
gimple_stmt_iterator gsi;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
if (gimple_debug_bind_p (gsi_stmt (gsi)))
avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
}
/* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
auto_bitmap forced_stack_vars;
discover_nonconstant_array_refs (forced_stack_vars);
/* Make sure all values used by the optimization passes have sane
defaults. */
reg_renumber = 0;
/* Some backends want to know that we are expanding to RTL. */
currently_expanding_to_rtl = 1;
/* Dominators are not kept up-to-date as we may create new basic-blocks. */
free_dominance_info (CDI_DOMINATORS);
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
insn_locations_init ();
if (!DECL_IS_UNDECLARED_BUILTIN (current_function_decl))
{
/* Eventually, all FEs should explicitly set function_start_locus. */
if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
set_curr_insn_location
(DECL_SOURCE_LOCATION (current_function_decl));
else
set_curr_insn_location (fun->function_start_locus);
}
else
set_curr_insn_location (UNKNOWN_LOCATION);
prologue_location = curr_insn_location ();
#ifdef INSN_SCHEDULING
init_sched_attrs ();
#endif
/* Make sure first insn is a note even if we don't want linenums.
This makes sure the first insn will never be deleted.
Also, final expects a note to appear there. */
emit_note (NOTE_INSN_DELETED);
targetm.expand_to_rtl_hook ();
crtl->init_stack_alignment ();
fun->cfg->max_jumptable_ents = 0;
/* Resovle the function section. Some targets, like ARM EABI rely on knowledge
of the function section at exapnsion time to predict distance of calls. */
resolve_unique_section (current_function_decl, 0, flag_function_sections);
/* Expand the variables recorded during gimple lowering. */
timevar_push (TV_VAR_EXPAND);
start_sequence ();
var_ret_seq = expand_used_vars (forced_stack_vars);
var_seq = get_insns ();
end_sequence ();
timevar_pop (TV_VAR_EXPAND);
/* Honor stack protection warnings. */
if (warn_stack_protect)
{
if (fun->calls_alloca)
warning (OPT_Wstack_protector,
"stack protector not protecting local variables: "
"variable length buffer");
if (has_short_buffer && !crtl->stack_protect_guard)
warning (OPT_Wstack_protector,
"stack protector not protecting function: "
"all local arrays are less than %d bytes long",
(int) param_ssp_buffer_size);
}
/* Temporarily mark PARM_DECLs and RESULT_DECLs we need to expand to
memory addressable so expand_function_start can emit the required
copies. */
auto_vec<tree, 16> marked_parms;
for (tree parm = DECL_ARGUMENTS (current_function_decl); parm;
parm = DECL_CHAIN (parm))
if (!TREE_ADDRESSABLE (parm)
&& bitmap_bit_p (forced_stack_vars, DECL_UID (parm)))
{
TREE_ADDRESSABLE (parm) = 1;
marked_parms.safe_push (parm);
}
if (DECL_RESULT (current_function_decl)
&& !TREE_ADDRESSABLE (DECL_RESULT (current_function_decl))
&& bitmap_bit_p (forced_stack_vars,
DECL_UID (DECL_RESULT (current_function_decl))))
{
TREE_ADDRESSABLE (DECL_RESULT (current_function_decl)) = 1;
marked_parms.safe_push (DECL_RESULT (current_function_decl));
}
/* Set up parameters and prepare for return, for the function. */
expand_function_start (current_function_decl);
/* Clear TREE_ADDRESSABLE again. */
while (!marked_parms.is_empty ())
TREE_ADDRESSABLE (marked_parms.pop ()) = 0;
/* If we emitted any instructions for setting up the variables,
emit them before the FUNCTION_START note. */
if (var_seq)
{
emit_insn_before (var_seq, parm_birth_insn);
/* In expand_function_end we'll insert the alloca save/restore
before parm_birth_insn. We've just insertted an alloca call.
Adjust the pointer to match. */
parm_birth_insn = var_seq;
}
/* Now propagate the RTL assignment of each partition to the
underlying var of each SSA_NAME. */
tree name;
FOR_EACH_SSA_NAME (i, name, cfun)
{
/* We might have generated new SSA names in
update_alias_info_with_stack_vars. They will have a NULL
defining statements, and won't be part of the partitioning,
so ignore those. */
if (!SSA_NAME_DEF_STMT (name))
continue;
adjust_one_expanded_partition_var (name);
}
/* Clean up RTL of variables that straddle across multiple
partitions, and check that the rtl of any PARM_DECLs that are not
cleaned up is that of their default defs. */
FOR_EACH_SSA_NAME (i, name, cfun)
{
int part;
/* We might have generated new SSA names in
update_alias_info_with_stack_vars. They will have a NULL
defining statements, and won't be part of the partitioning,
so ignore those. */
if (!SSA_NAME_DEF_STMT (name))
continue;
part = var_to_partition (SA.map, name);
if (part == NO_PARTITION)
continue;
/* If this decl was marked as living in multiple places, reset
this now to NULL. */
tree var = SSA_NAME_VAR (name);
if (var && DECL_RTL_IF_SET (var) == pc_rtx)
SET_DECL_RTL (var, NULL);
/* Check that the pseudos chosen by assign_parms are those of
the corresponding default defs. */
else if (SSA_NAME_IS_DEFAULT_DEF (name)
&& (TREE_CODE (var) == PARM_DECL
|| TREE_CODE (var) == RESULT_DECL))
{
rtx in = DECL_RTL_IF_SET (var);
gcc_assert (in);
rtx out = SA.partition_to_pseudo[part];
gcc_assert (in == out);
/* Now reset VAR's RTL to IN, so that the _EXPR attrs match
those expected by debug backends for each parm and for
the result. This is particularly important for stabs,
whose register elimination from parm's DECL_RTL may cause
-fcompare-debug differences as SET_DECL_RTL changes reg's
attrs. So, make sure the RTL already has the parm as the
EXPR, so that it won't change. */
SET_DECL_RTL (var, NULL_RTX);
if (MEM_P (in))
set_mem_attributes (in, var, true);
SET_DECL_RTL (var, in);
}
}
/* If this function is `main', emit a call to `__main'
to run global initializers, etc. */
if (DECL_NAME (current_function_decl)
&& MAIN_NAME_P (DECL_NAME (current_function_decl))
&& DECL_FILE_SCOPE_P (current_function_decl))
expand_main_function ();
/* Initialize the stack_protect_guard field. This must happen after the
call to __main (if any) so that the external decl is initialized. */
if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
stack_protect_prologue ();
expand_phi_nodes (&SA);
/* Release any stale SSA redirection data. */
redirect_edge_var_map_empty ();
/* Register rtl specific functions for cfg. */
rtl_register_cfg_hooks ();
init_block = construct_init_block ();
/* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
remaining edges later. */
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
e->flags &= ~EDGE_EXECUTABLE;
/* If the function has too many markers, drop them while expanding. */
if (cfun->debug_marker_count
>= param_max_debug_marker_count)
cfun->debug_nonbind_markers = false;
lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
next_bb)
bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
if (MAY_HAVE_DEBUG_BIND_INSNS)
expand_debug_locations ();
if (deep_ter_debug_map)
{
delete deep_ter_debug_map;
deep_ter_debug_map = NULL;
}
/* Free stuff we no longer need after GIMPLE optimizations. */
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
delete_tree_cfg_annotations (fun);
timevar_push (TV_OUT_OF_SSA);
finish_out_of_ssa (&SA);
timevar_pop (TV_OUT_OF_SSA);
timevar_push (TV_POST_EXPAND);
/* We are no longer in SSA form. */
fun->gimple_df->in_ssa_p = false;
loops_state_clear (LOOP_CLOSED_SSA);
/* Expansion is used by optimization passes too, set maybe_hot_insn_p
conservatively to true until they are all profile aware. */
delete lab_rtx_for_bb;
free_histograms (fun);
construct_exit_block ();
insn_locations_finalize ();
if (var_ret_seq)
{
rtx_insn *after = return_label;
rtx_insn *next = NEXT_INSN (after);
if (next && NOTE_INSN_BASIC_BLOCK_P (next))
after = next;
emit_insn_after (var_ret_seq, after);
}
if (hwasan_sanitize_stack_p ())
hwasan_maybe_emit_frame_base_init ();
/* Zap the tree EH table. */
set_eh_throw_stmt_table (fun, NULL);
/* We need JUMP_LABEL be set in order to redirect jumps, and hence
split edges which edge insertions might do. */
rebuild_jump_labels (get_insns ());
/* If we have a single successor to the entry block, put the pending insns
after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
{
edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
if (e->insns.r)
{
rtx_insn *insns = e->insns.r;
e->insns.r = NULL;
rebuild_jump_labels_chain (insns);
if (NOTE_P (parm_birth_insn)
&& NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
else
emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
}
}
/* Otherwise, as well as for other edges, take the usual way. */
commit_edge_insertions ();
/* We're done expanding trees to RTL. */
currently_expanding_to_rtl = 0;
flush_mark_addressable_queue ();
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
{
edge e;
edge_iterator ei;
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
/* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
e->flags &= ~EDGE_EXECUTABLE;
/* At the moment not all abnormal edges match the RTL
representation. It is safe to remove them here as
find_many_sub_basic_blocks will rediscover them.
In the future we should get this fixed properly. */
if ((e->flags & EDGE_ABNORMAL)
&& !(e->flags & EDGE_SIBCALL))
remove_edge (e);
else
ei_next (&ei);
}
}
auto_sbitmap blocks (last_basic_block_for_fn (fun));
bitmap_ones (blocks);
find_many_sub_basic_blocks (blocks);
purge_all_dead_edges ();
/* After initial rtl generation, call back to finish generating
exception support code. We need to do this before cleaning up
the CFG as the code does not expect dead landing pads. */
if (fun->eh->region_tree != NULL)
finish_eh_generation ();
/* Call expand_stack_alignment after finishing all
updates to crtl->preferred_stack_boundary. */
expand_stack_alignment ();
/* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
function. */
if (crtl->tail_call_emit)
fixup_tail_calls ();
HOST_WIDE_INT patch_area_size, patch_area_entry;
parse_and_check_patch_area (flag_patchable_function_entry, false,
&patch_area_size, &patch_area_entry);
tree patchable_function_entry_attr
= lookup_attribute ("patchable_function_entry",
DECL_ATTRIBUTES (cfun->decl));
if (patchable_function_entry_attr)
{
tree pp_val = TREE_VALUE (patchable_function_entry_attr);
tree patchable_function_entry_value1 = TREE_VALUE (pp_val);
patch_area_size = tree_to_uhwi (patchable_function_entry_value1);
patch_area_entry = 0;
if (TREE_CHAIN (pp_val) != NULL_TREE)
{
tree patchable_function_entry_value2
= TREE_VALUE (TREE_CHAIN (pp_val));
patch_area_entry = tree_to_uhwi (patchable_function_entry_value2);
}
}
if (patch_area_entry > patch_area_size)
{
if (patch_area_size > 0)
warning (OPT_Wattributes,
"patchable function entry %wu exceeds size %wu",
patch_area_entry, patch_area_size);
patch_area_entry = 0;
}
crtl->patch_area_size = patch_area_size;
crtl->patch_area_entry = patch_area_entry;
/* BB subdivision may have created basic blocks that are only reachable
from unlikely bbs but not marked as such in the profile. */
if (optimize)