diff options
Diffstat (limited to 'toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99456.patch')
-rw-r--r-- | toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99456.patch | 3163 |
1 files changed, 0 insertions, 3163 deletions
diff --git a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99456.patch b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99456.patch deleted file mode 100644 index 35f98d24ab..0000000000 --- a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99456.patch +++ /dev/null | |||
@@ -1,3163 +0,0 @@ | |||
1 | 2011-01-03 Bernd Schmidt <bernds@codesourcery.com> | ||
2 | |||
3 | gcc/ | ||
4 | * doc/tm.texi (RETURN_ADDR_REGNUM): Document. | ||
5 | * doc/md.texi (simple_return): Document pattern. | ||
6 | (return): Add a sentence to clarify. | ||
7 | * doc/rtl.texi (simple_return): Document. | ||
8 | * doc/invoke.texi (Optimize Options): Document -fshrink-wrap. | ||
9 | * common.opt (fshrink-wrap): New. | ||
10 | * opts.c (decode_options): Set it for -O2 and above. | ||
11 | * gengenrtl.c (special_rtx): PC, CC0, RETURN and SIMPLE_RETURN | ||
12 | are special. | ||
13 | * rtl.h (ANY_RETURN_P): New macro. | ||
14 | (global_rtl_index): Add GR_RETURN and GR_SIMPLE_RETURN. | ||
15 | (ret_rtx, simple_return_rtx): New macros. | ||
16 | * genemit.c (gen_exp): RETURN and SIMPLE_RETURN have unique rtxs. | ||
17 | (gen_expand, gen_split): Use ANY_RETURN_P. | ||
18 | * rtl.c (copy_rtx): RETURN and SIMPLE_RETURN are shared. | ||
19 | * emit-rtl.c (verify_rtx_sharing): Likewise. | ||
20 | (skip_consecutive_labels): Return the argument if it is a return rtx. | ||
21 | (classify_insn): Handle both kinds of return. | ||
22 | (init_emit_regs): Create global rtl for ret_rtx and simple_return_rtx. | ||
23 | * df-scan.c (df_uses_record): Handle SIMPLE_RETURN. | ||
24 | * rtl.def (SIMPLE_RETURN): New. | ||
25 | * rtlanal.c (tablejump_p): Check JUMP_LABEL for returns. | ||
26 | * final.c (final_scan_insn): Recognize both kinds of return. | ||
27 | * reorg.c (function_return_label, function_simple_return_label): New | ||
28 | static variables. | ||
29 | (end_of_function_label): Remove. | ||
30 | (simplejump_or_return_p): New static function. | ||
31 | (find_end_label): Add a new arg, KIND. All callers changed. | ||
32 | Depending on KIND, look for a label suitable for return or | ||
33 | simple_return. | ||
34 | (make_return_insns): Make corresponding changes. | ||
35 | (get_jump_flags): Check JUMP_LABELs for returns. | ||
36 | (follow_jumps): Likewise. | ||
37 | (get_branch_condition): Check target for return patterns rather | ||
38 | than NULL. | ||
39 | (own_thread_p): Likewise for thread. | ||
40 | (steal_delay_list_from_target): Check JUMP_LABELs for returns. | ||
41 | Use simplejump_or_return_p. | ||
42 | (fill_simple_delay_slots): Likewise. | ||
43 | (optimize_skip): Likewise. | ||
44 | (fill_slots_from_thread): Likewise. | ||
45 | (relax_delay_slots): Likewise. | ||
46 | (dbr_schedule): Adjust handling of end_of_function_label for the | ||
47 | two new variables. | ||
48 | * ifcvt.c (find_if_case_1): Take care when redirecting jumps to the | ||
49 | exit block. | ||
50 | (dead_or_predicable): Change NEW_DEST arg to DEST_EDGE. All callers | ||
51 | changed. Ensure that the right label is passed to redirect_jump. | ||
52 | * jump.c (condjump_p, condjump_in_parallel_p, any_condjump_p, | ||
53 | returnjump_p): Handle SIMPLE_RETURNs. | ||
54 | (delete_related_insns): Check JUMP_LABEL for returns. | ||
55 | (redirect_target): New static function. | ||
56 | (redirect_exp_1): Use it. Handle any kind of return rtx as a label | ||
57 | rather than interpreting NULL as a return. | ||
58 | (redirect_jump_1): Assert that nlabel is not NULL. | ||
59 | (redirect_jump): Likewise. | ||
60 | (redirect_jump_2): Handle any kind of return rtx as a label rather | ||
61 | than interpreting NULL as a return. | ||
62 | * dwarf2out.c (compute_barrier_args_size_1): Check JUMP_LABEL for | ||
63 | returns. | ||
64 | * function.c (emit_return_into_block): Remove useless declaration. | ||
65 | (record_hard_reg_sets, frame_required_for_rtx, gen_return_pattern, | ||
66 | requires_stack_frame_p): New static functions. | ||
67 | (emit_return_into_block): New arg SIMPLE_P. All callers changed. | ||
68 | Generate either kind of return pattern and update the JUMP_LABEL. | ||
69 | (thread_prologue_and_epilogue_insns): Implement a form of | ||
70 | shrink-wrapping. Ensure JUMP_LABELs for return insns are set. | ||
71 | * print-rtl.c (print_rtx): Handle returns in JUMP_LABELs. | ||
72 | * cfglayout.c (fixup_reorder_chain): Ensure JUMP_LABELs for returns | ||
73 | remain correct. | ||
74 | * resource.c (find_dead_or_set_registers): Check JUMP_LABELs for | ||
75 | returns. | ||
76 | (mark_target_live_regs): Don't pass a return rtx to next_active_insn. | ||
77 | * basic-block.h (force_nonfallthru_and_redirect): Declare. | ||
78 | * sched-vis.c (print_pattern): Add case for SIMPLE_RETURN. | ||
79 | * cfgrtl.c (force_nonfallthru_and_redirect): No longer static. New arg | ||
80 | JUMP_LABEL. All callers changed. Use the label when generating | ||
81 | return insns. | ||
82 | |||
83 | * config/i386/i386.md (returns, return_str, return_cond): New | ||
84 | code_iterator and corresponding code_attrs. | ||
85 | (<return_str>return): Renamed from return and adapted. | ||
86 | (<return_str>return_internal): Likewise for return_internal. | ||
87 | (<return_str>return_internal_long): Likewise for return_internal_long. | ||
88 | (<return_str>return_pop_internal): Likewise for return_pop_internal. | ||
89 | (<return_str>return_indirect_internal): Likewise for | ||
90 | return_indirect_internal. | ||
91 | * config/i386/i386.c (ix86_expand_epilogue): Expand a simple_return as | ||
92 | the last insn. | ||
93 | (ix86_pad_returns): Handle both kinds of return rtx. | ||
94 | * config/arm/arm.c (use_simple_return_p): new function. | ||
95 | (is_jump_table): Handle returns in JUMP_LABELs. | ||
96 | (output_return_instruction): New arg SIMPLE. All callers changed. | ||
97 | Use it to determine which kind of return to generate. | ||
98 | (arm_final_prescan_insn): Handle both kinds of return. | ||
99 | * config/arm/arm.md (returns, return_str, return_simple_p, | ||
100 | return_cond): New code_iterator and corresponding code_attrs. | ||
101 | (<return_str>return): Renamed from return and adapted. | ||
102 | (arm_<return_str>return): Renamed from arm_return and adapted. | ||
103 | (cond_<return_str>return): Renamed from cond_return and adapted. | ||
104 | (cond_<return_str>return_inverted): Renamed from cond_return_inverted | ||
105 | and adapted. | ||
106 | (epilogue): Use ret_rtx instead of gen_rtx_RETURN. | ||
107 | * config/arm/thumb2.md (thumb2_<return_str>return): Renamed from | ||
108 | thumb2_return and adapted. | ||
109 | * config/arm/arm.h (RETURN_ADDR_REGNUM): Define. | ||
110 | * config/arm/arm-protos.h (use_simple_return_p): Declare. | ||
111 | (output_return_instruction): Adjust declaration. | ||
112 | * config/mips/mips.c (mips_expand_epilogue): Generate a simple_return | ||
113 | as final insn. | ||
114 | * config/mips/mips.md (simple_return): New expander. | ||
115 | (*simple_return, simple_return_internal): New patterns. | ||
116 | * config/sh/sh.c (barrier_align): Handle return in a JUMP_LABEL. | ||
117 | (split_branches): Don't pass a null label to redirect_jump. | ||
118 | |||
119 | From mainline: | ||
120 | * vec.h (FOR_EACH_VEC_ELT, FOR_EACH_VEC_ELT_REVERSE): New macros. | ||
121 | * haifa-sched.c (find_fallthru_edge_from): Rename from | ||
122 | find_fallthru_edge. All callers changed. | ||
123 | * sched-int.h (find_fallthru_edge_from): Rename declaration as well. | ||
124 | * basic-block.h (find_fallthru_edge): New inline function. | ||
125 | |||
126 | === modified file 'gcc/basic-block.h' | ||
127 | --- old/gcc/basic-block.h 2010-09-01 13:29:58 +0000 | ||
128 | +++ new/gcc/basic-block.h 2011-01-05 12:12:18 +0000 | ||
129 | @@ -884,6 +884,7 @@ | ||
130 | |||
131 | /* In cfgrtl.c */ | ||
132 | extern basic_block force_nonfallthru (edge); | ||
133 | +extern basic_block force_nonfallthru_and_redirect (edge, basic_block, rtx); | ||
134 | extern rtx block_label (basic_block); | ||
135 | extern bool purge_all_dead_edges (void); | ||
136 | extern bool purge_dead_edges (basic_block); | ||
137 | @@ -1004,6 +1005,20 @@ | ||
138 | return false; | ||
139 | } | ||
140 | |||
141 | +/* Return the fallthru edge in EDGES if it exists, NULL otherwise. */ | ||
142 | +static inline edge | ||
143 | +find_fallthru_edge (VEC(edge,gc) *edges) | ||
144 | +{ | ||
145 | + edge e; | ||
146 | + edge_iterator ei; | ||
147 | + | ||
148 | + FOR_EACH_EDGE (e, ei, edges) | ||
149 | + if (e->flags & EDGE_FALLTHRU) | ||
150 | + break; | ||
151 | + | ||
152 | + return e; | ||
153 | +} | ||
154 | + | ||
155 | /* In cfgloopmanip.c. */ | ||
156 | extern edge mfb_kj_edge; | ||
157 | extern bool mfb_keep_just (edge); | ||
158 | |||
159 | === modified file 'gcc/cfganal.c' | ||
160 | --- old/gcc/cfganal.c 2009-11-25 10:55:54 +0000 | ||
161 | +++ new/gcc/cfganal.c 2011-01-05 12:12:18 +0000 | ||
162 | @@ -271,6 +271,37 @@ | ||
163 | EDGE_SUCC (bb, 0)->flags |= EDGE_CAN_FALLTHRU; | ||
164 | EDGE_SUCC (bb, 1)->flags |= EDGE_CAN_FALLTHRU; | ||
165 | } | ||
166 | + /* dwarf2out expects that a NOTE_INSN_EPILOGUE_BEGIN is always paired | ||
167 | + with a return or a sibcall. Ensure that this remains the case if | ||
168 | + they are in different basic blocks. */ | ||
169 | + FOR_EACH_BB (bb) | ||
170 | + { | ||
171 | + edge e; | ||
172 | + edge_iterator ei; | ||
173 | + rtx insn, end; | ||
174 | + | ||
175 | + end = BB_END (bb); | ||
176 | + FOR_BB_INSNS (bb, insn) | ||
177 | + if (GET_CODE (insn) == NOTE | ||
178 | + && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG | ||
179 | + && !(CALL_P (end) && SIBLING_CALL_P (end)) | ||
180 | + && !returnjump_p (end)) | ||
181 | + { | ||
182 | + basic_block other_bb = NULL; | ||
183 | + FOR_EACH_EDGE (e, ei, bb->succs) | ||
184 | + { | ||
185 | + if (e->flags & EDGE_FALLTHRU) | ||
186 | + other_bb = e->dest; | ||
187 | + else | ||
188 | + e->flags &= ~EDGE_CAN_FALLTHRU; | ||
189 | + } | ||
190 | + FOR_EACH_EDGE (e, ei, other_bb->preds) | ||
191 | + { | ||
192 | + if (!(e->flags & EDGE_FALLTHRU)) | ||
193 | + e->flags &= ~EDGE_CAN_FALLTHRU; | ||
194 | + } | ||
195 | + } | ||
196 | + } | ||
197 | } | ||
198 | |||
199 | /* Find unreachable blocks. An unreachable block will have 0 in | ||
200 | |||
201 | === modified file 'gcc/cfglayout.c' | ||
202 | --- old/gcc/cfglayout.c 2010-05-17 16:30:54 +0000 | ||
203 | +++ new/gcc/cfglayout.c 2011-01-05 12:12:18 +0000 | ||
204 | @@ -766,6 +766,7 @@ | ||
205 | { | ||
206 | edge e_fall, e_taken, e; | ||
207 | rtx bb_end_insn; | ||
208 | + rtx ret_label = NULL_RTX; | ||
209 | basic_block nb; | ||
210 | edge_iterator ei; | ||
211 | |||
212 | @@ -785,6 +786,7 @@ | ||
213 | bb_end_insn = BB_END (bb); | ||
214 | if (JUMP_P (bb_end_insn)) | ||
215 | { | ||
216 | + ret_label = JUMP_LABEL (bb_end_insn); | ||
217 | if (any_condjump_p (bb_end_insn)) | ||
218 | { | ||
219 | /* This might happen if the conditional jump has side | ||
220 | @@ -899,7 +901,7 @@ | ||
221 | } | ||
222 | |||
223 | /* We got here if we need to add a new jump insn. */ | ||
224 | - nb = force_nonfallthru (e_fall); | ||
225 | + nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label); | ||
226 | if (nb) | ||
227 | { | ||
228 | nb->il.rtl->visited = 1; | ||
229 | @@ -1118,24 +1120,30 @@ | ||
230 | bool | ||
231 | cfg_layout_can_duplicate_bb_p (const_basic_block bb) | ||
232 | { | ||
233 | + rtx insn; | ||
234 | + | ||
235 | /* Do not attempt to duplicate tablejumps, as we need to unshare | ||
236 | the dispatch table. This is difficult to do, as the instructions | ||
237 | computing jump destination may be hoisted outside the basic block. */ | ||
238 | if (tablejump_p (BB_END (bb), NULL, NULL)) | ||
239 | return false; | ||
240 | |||
241 | - /* Do not duplicate blocks containing insns that can't be copied. */ | ||
242 | - if (targetm.cannot_copy_insn_p) | ||
243 | + insn = BB_HEAD (bb); | ||
244 | + while (1) | ||
245 | { | ||
246 | - rtx insn = BB_HEAD (bb); | ||
247 | - while (1) | ||
248 | - { | ||
249 | - if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn)) | ||
250 | - return false; | ||
251 | - if (insn == BB_END (bb)) | ||
252 | - break; | ||
253 | - insn = NEXT_INSN (insn); | ||
254 | - } | ||
255 | + /* Do not duplicate blocks containing insns that can't be copied. */ | ||
256 | + if (INSN_P (insn) && targetm.cannot_copy_insn_p | ||
257 | + && targetm.cannot_copy_insn_p (insn)) | ||
258 | + return false; | ||
259 | + /* dwarf2out expects that these notes are always paired with a | ||
260 | + returnjump or sibling call. */ | ||
261 | + if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG | ||
262 | + && !returnjump_p (BB_END (bb)) | ||
263 | + && (!CALL_P (BB_END (bb)) || !SIBLING_CALL_P (BB_END (bb)))) | ||
264 | + return false; | ||
265 | + if (insn == BB_END (bb)) | ||
266 | + break; | ||
267 | + insn = NEXT_INSN (insn); | ||
268 | } | ||
269 | |||
270 | return true; | ||
271 | @@ -1167,6 +1175,9 @@ | ||
272 | || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC) | ||
273 | break; | ||
274 | copy = emit_copy_of_insn_after (insn, get_last_insn ()); | ||
275 | + if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX | ||
276 | + && ANY_RETURN_P (JUMP_LABEL (insn))) | ||
277 | + JUMP_LABEL (copy) = JUMP_LABEL (insn); | ||
278 | maybe_copy_epilogue_insn (insn, copy); | ||
279 | break; | ||
280 | |||
281 | |||
282 | === modified file 'gcc/cfgrtl.c' | ||
283 | --- old/gcc/cfgrtl.c 2010-09-20 21:30:35 +0000 | ||
284 | +++ new/gcc/cfgrtl.c 2011-01-05 12:12:18 +0000 | ||
285 | @@ -1107,10 +1107,13 @@ | ||
286 | } | ||
287 | |||
288 | /* Like force_nonfallthru below, but additionally performs redirection | ||
289 | - Used by redirect_edge_and_branch_force. */ | ||
290 | + Used by redirect_edge_and_branch_force. JUMP_LABEL is used only | ||
291 | + when redirecting to the EXIT_BLOCK, it is either a return or a | ||
292 | + simple_return rtx indicating which kind of returnjump to create. | ||
293 | + It should be NULL otherwise. */ | ||
294 | |||
295 | -static basic_block | ||
296 | -force_nonfallthru_and_redirect (edge e, basic_block target) | ||
297 | +basic_block | ||
298 | +force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label) | ||
299 | { | ||
300 | basic_block jump_block, new_bb = NULL, src = e->src; | ||
301 | rtx note; | ||
302 | @@ -1242,11 +1245,25 @@ | ||
303 | e->flags &= ~EDGE_FALLTHRU; | ||
304 | if (target == EXIT_BLOCK_PTR) | ||
305 | { | ||
306 | + if (jump_label == ret_rtx) | ||
307 | + { | ||
308 | #ifdef HAVE_return | ||
309 | - emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc); | ||
310 | -#else | ||
311 | - gcc_unreachable (); | ||
312 | -#endif | ||
313 | + emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), | ||
314 | + loc); | ||
315 | +#else | ||
316 | + gcc_unreachable (); | ||
317 | +#endif | ||
318 | + } | ||
319 | + else | ||
320 | + { | ||
321 | + gcc_assert (jump_label == simple_return_rtx); | ||
322 | +#ifdef HAVE_simple_return | ||
323 | + emit_jump_insn_after_setloc (gen_simple_return (), | ||
324 | + BB_END (jump_block), loc); | ||
325 | +#else | ||
326 | + gcc_unreachable (); | ||
327 | +#endif | ||
328 | + } | ||
329 | } | ||
330 | else | ||
331 | { | ||
332 | @@ -1273,7 +1290,7 @@ | ||
333 | basic_block | ||
334 | force_nonfallthru (edge e) | ||
335 | { | ||
336 | - return force_nonfallthru_and_redirect (e, e->dest); | ||
337 | + return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX); | ||
338 | } | ||
339 | |||
340 | /* Redirect edge even at the expense of creating new jump insn or | ||
341 | @@ -1290,7 +1307,7 @@ | ||
342 | /* In case the edge redirection failed, try to force it to be non-fallthru | ||
343 | and redirect newly created simplejump. */ | ||
344 | df_set_bb_dirty (e->src); | ||
345 | - return force_nonfallthru_and_redirect (e, target); | ||
346 | + return force_nonfallthru_and_redirect (e, target, NULL_RTX); | ||
347 | } | ||
348 | |||
349 | /* The given edge should potentially be a fallthru edge. If that is in | ||
350 | |||
351 | === modified file 'gcc/common.opt' | ||
352 | --- old/gcc/common.opt 2010-12-10 15:33:37 +0000 | ||
353 | +++ new/gcc/common.opt 2011-01-05 12:12:18 +0000 | ||
354 | @@ -1147,6 +1147,11 @@ | ||
355 | Common C ObjC C++ ObjC++ Report Var(flag_show_column) Init(1) | ||
356 | Show column numbers in diagnostics, when available. Default on | ||
357 | |||
358 | +fshrink-wrap | ||
359 | +Common Report Var(flag_shrink_wrap) Optimization | ||
360 | +Emit function prologues only before parts of the function that need it, | ||
361 | +rather than at the top of the function. | ||
362 | + | ||
363 | fsignaling-nans | ||
364 | Common Report Var(flag_signaling_nans) Optimization | ||
365 | Disable optimizations observable by IEEE signaling NaNs | ||
366 | |||
367 | === modified file 'gcc/config/arm/arm-protos.h' | ||
368 | --- old/gcc/config/arm/arm-protos.h 2010-11-04 10:45:05 +0000 | ||
369 | +++ new/gcc/config/arm/arm-protos.h 2011-01-05 12:12:18 +0000 | ||
370 | @@ -26,6 +26,7 @@ | ||
371 | extern void arm_override_options (void); | ||
372 | extern void arm_optimization_options (int, int); | ||
373 | extern int use_return_insn (int, rtx); | ||
374 | +extern bool use_simple_return_p (void); | ||
375 | extern enum reg_class arm_regno_class (int); | ||
376 | extern void arm_load_pic_register (unsigned long); | ||
377 | extern int arm_volatile_func (void); | ||
378 | @@ -137,7 +138,7 @@ | ||
379 | extern const char *output_add_immediate (rtx *); | ||
380 | extern const char *arithmetic_instr (rtx, int); | ||
381 | extern void output_ascii_pseudo_op (FILE *, const unsigned char *, int); | ||
382 | -extern const char *output_return_instruction (rtx, int, int); | ||
383 | +extern const char *output_return_instruction (rtx, bool, bool, bool); | ||
384 | extern void arm_poke_function_name (FILE *, const char *); | ||
385 | extern void arm_print_operand (FILE *, rtx, int); | ||
386 | extern void arm_print_operand_address (FILE *, rtx); | ||
387 | |||
388 | === modified file 'gcc/config/arm/arm.c' | ||
389 | --- old/gcc/config/arm/arm.c 2011-01-05 11:32:50 +0000 | ||
390 | +++ new/gcc/config/arm/arm.c 2011-01-05 12:12:18 +0000 | ||
391 | @@ -2163,6 +2163,18 @@ | ||
392 | return addr; | ||
393 | } | ||
394 | |||
395 | +/* Return true if we should try to use a simple_return insn, i.e. perform | ||
396 | + shrink-wrapping if possible. This is the case if we need to emit a | ||
397 | + prologue, which we can test by looking at the offsets. */ | ||
398 | +bool | ||
399 | +use_simple_return_p (void) | ||
400 | +{ | ||
401 | + arm_stack_offsets *offsets; | ||
402 | + | ||
403 | + offsets = arm_get_frame_offsets (); | ||
404 | + return offsets->outgoing_args != 0; | ||
405 | +} | ||
406 | + | ||
407 | /* Return 1 if it is possible to return using a single instruction. | ||
408 | If SIBLING is non-null, this is a test for a return before a sibling | ||
409 | call. SIBLING is the call insn, so we can examine its register usage. */ | ||
410 | @@ -11284,6 +11296,7 @@ | ||
411 | |||
412 | if (GET_CODE (insn) == JUMP_INSN | ||
413 | && JUMP_LABEL (insn) != NULL | ||
414 | + && !ANY_RETURN_P (JUMP_LABEL (insn)) | ||
415 | && ((table = next_real_insn (JUMP_LABEL (insn))) | ||
416 | == next_real_insn (insn)) | ||
417 | && table != NULL | ||
418 | @@ -14168,7 +14181,7 @@ | ||
419 | /* Generate a function exit sequence. If REALLY_RETURN is false, then do | ||
420 | everything bar the final return instruction. */ | ||
421 | const char * | ||
422 | -output_return_instruction (rtx operand, int really_return, int reverse) | ||
423 | +output_return_instruction (rtx operand, bool really_return, bool reverse, bool simple) | ||
424 | { | ||
425 | char conditional[10]; | ||
426 | char instr[100]; | ||
427 | @@ -14206,10 +14219,15 @@ | ||
428 | |||
429 | sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd'); | ||
430 | |||
431 | - cfun->machine->return_used_this_function = 1; | ||
432 | + if (simple) | ||
433 | + live_regs_mask = 0; | ||
434 | + else | ||
435 | + { | ||
436 | + cfun->machine->return_used_this_function = 1; | ||
437 | |||
438 | - offsets = arm_get_frame_offsets (); | ||
439 | - live_regs_mask = offsets->saved_regs_mask; | ||
440 | + offsets = arm_get_frame_offsets (); | ||
441 | + live_regs_mask = offsets->saved_regs_mask; | ||
442 | + } | ||
443 | |||
444 | if (live_regs_mask) | ||
445 | { | ||
446 | @@ -17108,6 +17126,7 @@ | ||
447 | |||
448 | /* If we start with a return insn, we only succeed if we find another one. */ | ||
449 | int seeking_return = 0; | ||
450 | + enum rtx_code return_code = UNKNOWN; | ||
451 | |||
452 | /* START_INSN will hold the insn from where we start looking. This is the | ||
453 | first insn after the following code_label if REVERSE is true. */ | ||
454 | @@ -17146,7 +17165,7 @@ | ||
455 | else | ||
456 | return; | ||
457 | } | ||
458 | - else if (GET_CODE (body) == RETURN) | ||
459 | + else if (ANY_RETURN_P (body)) | ||
460 | { | ||
461 | start_insn = next_nonnote_insn (start_insn); | ||
462 | if (GET_CODE (start_insn) == BARRIER) | ||
463 | @@ -17157,6 +17176,7 @@ | ||
464 | { | ||
465 | reverse = TRUE; | ||
466 | seeking_return = 1; | ||
467 | + return_code = GET_CODE (body); | ||
468 | } | ||
469 | else | ||
470 | return; | ||
471 | @@ -17197,11 +17217,15 @@ | ||
472 | label = XEXP (XEXP (SET_SRC (body), 2), 0); | ||
473 | then_not_else = FALSE; | ||
474 | } | ||
475 | - else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN) | ||
476 | - seeking_return = 1; | ||
477 | - else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN) | ||
478 | + else if (ANY_RETURN_P (XEXP (SET_SRC (body), 1))) | ||
479 | + { | ||
480 | + seeking_return = 1; | ||
481 | + return_code = GET_CODE (XEXP (SET_SRC (body), 1)); | ||
482 | + } | ||
483 | + else if (ANY_RETURN_P (XEXP (SET_SRC (body), 2))) | ||
484 | { | ||
485 | seeking_return = 1; | ||
486 | + return_code = GET_CODE (XEXP (SET_SRC (body), 2)); | ||
487 | then_not_else = FALSE; | ||
488 | } | ||
489 | else | ||
490 | @@ -17302,8 +17326,7 @@ | ||
491 | && !use_return_insn (TRUE, NULL) | ||
492 | && !optimize_size) | ||
493 | fail = TRUE; | ||
494 | - else if (GET_CODE (scanbody) == RETURN | ||
495 | - && seeking_return) | ||
496 | + else if (GET_CODE (scanbody) == return_code) | ||
497 | { | ||
498 | arm_ccfsm_state = 2; | ||
499 | succeed = TRUE; | ||
500 | |||
501 | === modified file 'gcc/config/arm/arm.h' | ||
502 | --- old/gcc/config/arm/arm.h 2010-11-11 11:12:14 +0000 | ||
503 | +++ new/gcc/config/arm/arm.h 2011-01-05 12:12:18 +0000 | ||
504 | @@ -2622,6 +2622,8 @@ | ||
505 | #define RETURN_ADDR_RTX(COUNT, FRAME) \ | ||
506 | arm_return_addr (COUNT, FRAME) | ||
507 | |||
508 | +#define RETURN_ADDR_REGNUM LR_REGNUM | ||
509 | + | ||
510 | /* Mask of the bits in the PC that contain the real return address | ||
511 | when running in 26-bit mode. */ | ||
512 | #define RETURN_ADDR_MASK26 (0x03fffffc) | ||
513 | |||
514 | === modified file 'gcc/config/arm/arm.md' | ||
515 | --- old/gcc/config/arm/arm.md 2011-01-05 11:52:16 +0000 | ||
516 | +++ new/gcc/config/arm/arm.md 2011-01-05 12:12:18 +0000 | ||
517 | @@ -8882,66 +8882,72 @@ | ||
518 | [(set_attr "type" "call")] | ||
519 | ) | ||
520 | |||
521 | -(define_expand "return" | ||
522 | - [(return)] | ||
523 | - "TARGET_32BIT && USE_RETURN_INSN (FALSE)" | ||
524 | +;; Both kinds of return insn. | ||
525 | +(define_code_iterator returns [return simple_return]) | ||
526 | +(define_code_attr return_str [(return "") (simple_return "simple_")]) | ||
527 | +(define_code_attr return_simple_p [(return "false") (simple_return "true")]) | ||
528 | +(define_code_attr return_cond [(return " && USE_RETURN_INSN (FALSE)") | ||
529 | + (simple_return " && use_simple_return_p ()")]) | ||
530 | + | ||
531 | +(define_expand "<return_str>return" | ||
532 | + [(returns)] | ||
533 | + "TARGET_32BIT<return_cond>" | ||
534 | "") | ||
535 | |||
536 | -;; Often the return insn will be the same as loading from memory, so set attr | ||
537 | -(define_insn "*arm_return" | ||
538 | - [(return)] | ||
539 | - "TARGET_ARM && USE_RETURN_INSN (FALSE)" | ||
540 | - "* | ||
541 | - { | ||
542 | - if (arm_ccfsm_state == 2) | ||
543 | - { | ||
544 | - arm_ccfsm_state += 2; | ||
545 | - return \"\"; | ||
546 | - } | ||
547 | - return output_return_instruction (const_true_rtx, TRUE, FALSE); | ||
548 | - }" | ||
549 | +(define_insn "*arm_<return_str>return" | ||
550 | + [(returns)] | ||
551 | + "TARGET_ARM<return_cond>" | ||
552 | +{ | ||
553 | + if (arm_ccfsm_state == 2) | ||
554 | + { | ||
555 | + arm_ccfsm_state += 2; | ||
556 | + return ""; | ||
557 | + } | ||
558 | + return output_return_instruction (const_true_rtx, true, false, | ||
559 | + <return_simple_p>); | ||
560 | +} | ||
561 | [(set_attr "type" "load1") | ||
562 | (set_attr "length" "12") | ||
563 | (set_attr "predicable" "yes")] | ||
564 | ) | ||
565 | |||
566 | -(define_insn "*cond_return" | ||
567 | +(define_insn "*cond_<return_str>return" | ||
568 | [(set (pc) | ||
569 | (if_then_else (match_operator 0 "arm_comparison_operator" | ||
570 | [(match_operand 1 "cc_register" "") (const_int 0)]) | ||
571 | - (return) | ||
572 | + (returns) | ||
573 | (pc)))] | ||
574 | - "TARGET_ARM && USE_RETURN_INSN (TRUE)" | ||
575 | - "* | ||
576 | - { | ||
577 | - if (arm_ccfsm_state == 2) | ||
578 | - { | ||
579 | - arm_ccfsm_state += 2; | ||
580 | - return \"\"; | ||
581 | - } | ||
582 | - return output_return_instruction (operands[0], TRUE, FALSE); | ||
583 | - }" | ||
584 | + "TARGET_ARM<return_cond>" | ||
585 | +{ | ||
586 | + if (arm_ccfsm_state == 2) | ||
587 | + { | ||
588 | + arm_ccfsm_state += 2; | ||
589 | + return ""; | ||
590 | + } | ||
591 | + return output_return_instruction (operands[0], true, false, | ||
592 | + <return_simple_p>); | ||
593 | +} | ||
594 | [(set_attr "conds" "use") | ||
595 | (set_attr "length" "12") | ||
596 | (set_attr "type" "load1")] | ||
597 | ) | ||
598 | |||
599 | -(define_insn "*cond_return_inverted" | ||
600 | +(define_insn "*cond_<return_str>return_inverted" | ||
601 | [(set (pc) | ||
602 | (if_then_else (match_operator 0 "arm_comparison_operator" | ||
603 | [(match_operand 1 "cc_register" "") (const_int 0)]) | ||
604 | (pc) | ||
605 | - (return)))] | ||
606 | - "TARGET_ARM && USE_RETURN_INSN (TRUE)" | ||
607 | - "* | ||
608 | - { | ||
609 | - if (arm_ccfsm_state == 2) | ||
610 | - { | ||
611 | - arm_ccfsm_state += 2; | ||
612 | - return \"\"; | ||
613 | - } | ||
614 | - return output_return_instruction (operands[0], TRUE, TRUE); | ||
615 | - }" | ||
616 | + (returns)))] | ||
617 | + "TARGET_ARM<return_cond>" | ||
618 | +{ | ||
619 | + if (arm_ccfsm_state == 2) | ||
620 | + { | ||
621 | + arm_ccfsm_state += 2; | ||
622 | + return ""; | ||
623 | + } | ||
624 | + return output_return_instruction (operands[0], true, true, | ||
625 | + <return_simple_p>); | ||
626 | +} | ||
627 | [(set_attr "conds" "use") | ||
628 | (set_attr "length" "12") | ||
629 | (set_attr "type" "load1")] | ||
630 | @@ -10809,8 +10815,7 @@ | ||
631 | DONE; | ||
632 | } | ||
633 | emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode, | ||
634 | - gen_rtvec (1, | ||
635 | - gen_rtx_RETURN (VOIDmode)), | ||
636 | + gen_rtvec (1, ret_rtx), | ||
637 | VUNSPEC_EPILOGUE)); | ||
638 | DONE; | ||
639 | " | ||
640 | @@ -10827,7 +10832,7 @@ | ||
641 | "TARGET_32BIT" | ||
642 | "* | ||
643 | if (use_return_insn (FALSE, next_nonnote_insn (insn))) | ||
644 | - return output_return_instruction (const_true_rtx, FALSE, FALSE); | ||
645 | + return output_return_instruction (const_true_rtx, false, false, false); | ||
646 | return arm_output_epilogue (next_nonnote_insn (insn)); | ||
647 | " | ||
648 | ;; Length is absolute worst case | ||
649 | |||
650 | === modified file 'gcc/config/arm/thumb2.md' | ||
651 | --- old/gcc/config/arm/thumb2.md 2010-09-22 05:54:42 +0000 | ||
652 | +++ new/gcc/config/arm/thumb2.md 2011-01-05 12:12:18 +0000 | ||
653 | @@ -1020,16 +1020,15 @@ | ||
654 | |||
655 | ;; Note: this is not predicable, to avoid issues with linker-generated | ||
656 | ;; interworking stubs. | ||
657 | -(define_insn "*thumb2_return" | ||
658 | - [(return)] | ||
659 | - "TARGET_THUMB2 && USE_RETURN_INSN (FALSE)" | ||
660 | - "* | ||
661 | - { | ||
662 | - return output_return_instruction (const_true_rtx, TRUE, FALSE); | ||
663 | - }" | ||
664 | +(define_insn "*thumb2_<return_str>return" | ||
665 | + [(returns)] | ||
666 | + "TARGET_THUMB2<return_cond>" | ||
667 | +{ | ||
668 | + return output_return_instruction (const_true_rtx, true, false, | ||
669 | + <return_simple_p>); | ||
670 | +} | ||
671 | [(set_attr "type" "load1") | ||
672 | - (set_attr "length" "12")] | ||
673 | -) | ||
674 | + (set_attr "length" "12")]) | ||
675 | |||
676 | (define_insn_and_split "thumb2_eh_return" | ||
677 | [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")] | ||
678 | |||
679 | === modified file 'gcc/config/i386/i386.c' | ||
680 | --- old/gcc/config/i386/i386.c 2010-11-16 18:05:53 +0000 | ||
681 | +++ new/gcc/config/i386/i386.c 2011-01-05 12:12:18 +0000 | ||
682 | @@ -9308,13 +9308,13 @@ | ||
683 | |||
684 | pro_epilogue_adjust_stack (stack_pointer_rtx, stack_pointer_rtx, | ||
685 | popc, -1, true); | ||
686 | - emit_jump_insn (gen_return_indirect_internal (ecx)); | ||
687 | + emit_jump_insn (gen_simple_return_indirect_internal (ecx)); | ||
688 | } | ||
689 | else | ||
690 | - emit_jump_insn (gen_return_pop_internal (popc)); | ||
691 | + emit_jump_insn (gen_simple_return_pop_internal (popc)); | ||
692 | } | ||
693 | else | ||
694 | - emit_jump_insn (gen_return_internal ()); | ||
695 | + emit_jump_insn (gen_simple_return_internal ()); | ||
696 | |||
697 | /* Restore the state back to the state from the prologue, | ||
698 | so that it's correct for the next epilogue. */ | ||
699 | @@ -26596,7 +26596,7 @@ | ||
700 | rtx prev; | ||
701 | bool replace = false; | ||
702 | |||
703 | - if (!JUMP_P (ret) || GET_CODE (PATTERN (ret)) != RETURN | ||
704 | + if (!JUMP_P (ret) || !ANY_RETURN_P (PATTERN (ret)) | ||
705 | || optimize_bb_for_size_p (bb)) | ||
706 | continue; | ||
707 | for (prev = PREV_INSN (ret); prev; prev = PREV_INSN (prev)) | ||
708 | @@ -26626,7 +26626,10 @@ | ||
709 | } | ||
710 | if (replace) | ||
711 | { | ||
712 | - emit_jump_insn_before (gen_return_internal_long (), ret); | ||
713 | + if (PATTERN (ret) == ret_rtx) | ||
714 | + emit_jump_insn_before (gen_return_internal_long (), ret); | ||
715 | + else | ||
716 | + emit_jump_insn_before (gen_simple_return_internal_long (), ret); | ||
717 | delete_insn (ret); | ||
718 | } | ||
719 | } | ||
720 | |||
721 | === modified file 'gcc/config/i386/i386.md' | ||
722 | --- old/gcc/config/i386/i386.md 2010-11-27 15:24:12 +0000 | ||
723 | +++ new/gcc/config/i386/i386.md 2011-01-05 12:12:18 +0000 | ||
724 | @@ -13797,24 +13797,29 @@ | ||
725 | "" | ||
726 | [(set_attr "length" "0")]) | ||
727 | |||
728 | +(define_code_iterator returns [return simple_return]) | ||
729 | +(define_code_attr return_str [(return "") (simple_return "simple_")]) | ||
730 | +(define_code_attr return_cond [(return "ix86_can_use_return_insn_p ()") | ||
731 | + (simple_return "")]) | ||
732 | + | ||
733 | ;; Insn emitted into the body of a function to return from a function. | ||
734 | ;; This is only done if the function's epilogue is known to be simple. | ||
735 | ;; See comments for ix86_can_use_return_insn_p in i386.c. | ||
736 | |||
737 | -(define_expand "return" | ||
738 | - [(return)] | ||
739 | - "ix86_can_use_return_insn_p ()" | ||
740 | +(define_expand "<return_str>return" | ||
741 | + [(returns)] | ||
742 | + "<return_cond>" | ||
743 | { | ||
744 | if (crtl->args.pops_args) | ||
745 | { | ||
746 | rtx popc = GEN_INT (crtl->args.pops_args); | ||
747 | - emit_jump_insn (gen_return_pop_internal (popc)); | ||
748 | + emit_jump_insn (gen_<return_str>return_pop_internal (popc)); | ||
749 | DONE; | ||
750 | } | ||
751 | }) | ||
752 | |||
753 | -(define_insn "return_internal" | ||
754 | - [(return)] | ||
755 | +(define_insn "<return_str>return_internal" | ||
756 | + [(returns)] | ||
757 | "reload_completed" | ||
758 | "ret" | ||
759 | [(set_attr "length" "1") | ||
760 | @@ -13825,8 +13830,8 @@ | ||
761 | ;; Used by x86_machine_dependent_reorg to avoid penalty on single byte RET | ||
762 | ;; instruction Athlon and K8 have. | ||
763 | |||
764 | -(define_insn "return_internal_long" | ||
765 | - [(return) | ||
766 | +(define_insn "<return_str>return_internal_long" | ||
767 | + [(returns) | ||
768 | (unspec [(const_int 0)] UNSPEC_REP)] | ||
769 | "reload_completed" | ||
770 | "rep\;ret" | ||
771 | @@ -13836,8 +13841,8 @@ | ||
772 | (set_attr "prefix_rep" "1") | ||
773 | (set_attr "modrm" "0")]) | ||
774 | |||
775 | -(define_insn "return_pop_internal" | ||
776 | - [(return) | ||
777 | +(define_insn "<return_str>return_pop_internal" | ||
778 | + [(returns) | ||
779 | (use (match_operand:SI 0 "const_int_operand" ""))] | ||
780 | "reload_completed" | ||
781 | "ret\t%0" | ||
782 | @@ -13846,8 +13851,8 @@ | ||
783 | (set_attr "length_immediate" "2") | ||
784 | (set_attr "modrm" "0")]) | ||
785 | |||
786 | -(define_insn "return_indirect_internal" | ||
787 | - [(return) | ||
788 | +(define_insn "<return_str>return_indirect_internal" | ||
789 | + [(returns) | ||
790 | (use (match_operand:SI 0 "register_operand" "r"))] | ||
791 | "reload_completed" | ||
792 | "jmp\t%A0" | ||
793 | |||
794 | === modified file 'gcc/config/mips/mips.c' | ||
795 | --- old/gcc/config/mips/mips.c 2010-11-21 10:38:43 +0000 | ||
796 | +++ new/gcc/config/mips/mips.c 2011-01-05 12:12:18 +0000 | ||
797 | @@ -10497,7 +10497,8 @@ | ||
798 | regno = GP_REG_FIRST + 7; | ||
799 | else | ||
800 | regno = RETURN_ADDR_REGNUM; | ||
801 | - emit_jump_insn (gen_return_internal (gen_rtx_REG (Pmode, regno))); | ||
802 | + emit_jump_insn (gen_simple_return_internal (gen_rtx_REG (Pmode, | ||
803 | + regno))); | ||
804 | } | ||
805 | } | ||
806 | |||
807 | |||
808 | === modified file 'gcc/config/mips/mips.md' | ||
809 | --- old/gcc/config/mips/mips.md 2010-04-02 18:54:46 +0000 | ||
810 | +++ new/gcc/config/mips/mips.md 2011-01-05 12:12:18 +0000 | ||
811 | @@ -5815,6 +5815,18 @@ | ||
812 | [(set_attr "type" "jump") | ||
813 | (set_attr "mode" "none")]) | ||
814 | |||
815 | +(define_expand "simple_return" | ||
816 | + [(simple_return)] | ||
817 | + "!mips_can_use_return_insn ()" | ||
818 | + { mips_expand_before_return (); }) | ||
819 | + | ||
820 | +(define_insn "*simple_return" | ||
821 | + [(simple_return)] | ||
822 | + "!mips_can_use_return_insn ()" | ||
823 | + "%*j\t$31%/" | ||
824 | + [(set_attr "type" "jump") | ||
825 | + (set_attr "mode" "none")]) | ||
826 | + | ||
827 | ;; Normal return. | ||
828 | |||
829 | (define_insn "return_internal" | ||
830 | @@ -5825,6 +5837,14 @@ | ||
831 | [(set_attr "type" "jump") | ||
832 | (set_attr "mode" "none")]) | ||
833 | |||
834 | +(define_insn "simple_return_internal" | ||
835 | + [(simple_return) | ||
836 | + (use (match_operand 0 "pmode_register_operand" ""))] | ||
837 | + "" | ||
838 | + "%*j\t%0%/" | ||
839 | + [(set_attr "type" "jump") | ||
840 | + (set_attr "mode" "none")]) | ||
841 | + | ||
842 | ;; Exception return. | ||
843 | (define_insn "mips_eret" | ||
844 | [(return) | ||
845 | |||
846 | === modified file 'gcc/config/sh/sh.c' | ||
847 | --- old/gcc/config/sh/sh.c 2010-12-10 15:34:19 +0000 | ||
848 | +++ new/gcc/config/sh/sh.c 2011-01-05 12:12:18 +0000 | ||
849 | @@ -5252,7 +5252,8 @@ | ||
850 | } | ||
851 | if (prev | ||
852 | && JUMP_P (prev) | ||
853 | - && JUMP_LABEL (prev)) | ||
854 | + && JUMP_LABEL (prev) | ||
855 | + && !ANY_RETURN_P (JUMP_LABEL (prev))) | ||
856 | { | ||
857 | rtx x; | ||
858 | if (jump_to_next | ||
859 | @@ -5951,7 +5952,7 @@ | ||
860 | JUMP_LABEL (insn) = far_label; | ||
861 | LABEL_NUSES (far_label)++; | ||
862 | } | ||
863 | - redirect_jump (insn, NULL_RTX, 1); | ||
864 | + redirect_jump (insn, ret_rtx, 1); | ||
865 | far_label = 0; | ||
866 | } | ||
867 | } | ||
868 | |||
869 | === modified file 'gcc/df-scan.c' | ||
870 | --- old/gcc/df-scan.c 2010-11-16 22:17:17 +0000 | ||
871 | +++ new/gcc/df-scan.c 2011-01-05 12:12:18 +0000 | ||
872 | @@ -3296,6 +3296,7 @@ | ||
873 | } | ||
874 | |||
875 | case RETURN: | ||
876 | + case SIMPLE_RETURN: | ||
877 | break; | ||
878 | |||
879 | case ASM_OPERANDS: | ||
880 | |||
881 | === modified file 'gcc/doc/invoke.texi' | ||
882 | --- old/gcc/doc/invoke.texi 2010-11-04 14:29:09 +0000 | ||
883 | +++ new/gcc/doc/invoke.texi 2011-01-05 12:12:18 +0000 | ||
884 | @@ -5750,6 +5750,7 @@ | ||
885 | -fipa-pure-const @gol | ||
886 | -fipa-reference @gol | ||
887 | -fmerge-constants | ||
888 | +-fshrink-wrap @gol | ||
889 | -fsplit-wide-types @gol | ||
890 | -ftree-builtin-call-dce @gol | ||
891 | -ftree-ccp @gol | ||
892 | @@ -6504,6 +6505,12 @@ | ||
893 | When pipelining loops during selective scheduling, also pipeline outer loops. | ||
894 | This option has no effect until @option{-fsel-sched-pipelining} is turned on. | ||
895 | |||
896 | +@item -fshrink-wrap | ||
897 | +@opindex fshrink-wrap | ||
898 | +Emit function prologues only before parts of the function that need it, | ||
899 | +rather than at the top of the function. This flag is enabled by default at | ||
900 | +@option{-O} and higher. | ||
901 | + | ||
902 | @item -fcaller-saves | ||
903 | @opindex fcaller-saves | ||
904 | Enable values to be allocated in registers that will be clobbered by | ||
905 | |||
906 | === modified file 'gcc/doc/md.texi' | ||
907 | --- old/gcc/doc/md.texi 2009-12-15 18:36:44 +0000 | ||
908 | +++ new/gcc/doc/md.texi 2011-01-05 12:12:18 +0000 | ||
909 | @@ -4801,7 +4801,19 @@ | ||
910 | multiple instructions are usually needed to return from a function, but | ||
911 | some class of functions only requires one instruction to implement a | ||
912 | return. Normally, the applicable functions are those which do not need | ||
913 | -to save any registers or allocate stack space. | ||
914 | +to save any registers or allocate stack space, although some targets | ||
915 | +have instructions that can perform both the epilogue and function return | ||
916 | +in one instruction. | ||
917 | + | ||
918 | +@cindex @code{simple_return} instruction pattern | ||
919 | +@item @samp{simple_return} | ||
920 | +Subroutine return instruction. This instruction pattern name should be | ||
921 | +defined only if a single instruction can do all the work of returning | ||
922 | +from a function on a path where no epilogue is required. This pattern | ||
923 | +is very similar to the @code{return} instruction pattern, but it is emitted | ||
924 | +only by the shrink-wrapping optimization on paths where the function | ||
925 | +prologue has not been executed, and a function return should occur without | ||
926 | +any of the effects of the epilogue. | ||
927 | |||
928 | @findex reload_completed | ||
929 | @findex leaf_function_p | ||
930 | |||
931 | === modified file 'gcc/doc/rtl.texi' | ||
932 | --- old/gcc/doc/rtl.texi 2010-07-06 19:23:53 +0000 | ||
933 | +++ new/gcc/doc/rtl.texi 2011-01-05 12:12:18 +0000 | ||
934 | @@ -2888,6 +2888,13 @@ | ||
935 | Note that an insn pattern of @code{(return)} is logically equivalent to | ||
936 | @code{(set (pc) (return))}, but the latter form is never used. | ||
937 | |||
938 | +@findex simple_return | ||
939 | +@item (simple_return) | ||
940 | +Like @code{(return)}, but truly represents only a function return, while | ||
941 | +@code{(return)} may represent an insn that also performs other functions | ||
942 | +of the function epilogue. Like @code{(return)}, this may also occur in | ||
943 | +conditional jumps. | ||
944 | + | ||
945 | @findex call | ||
946 | @item (call @var{function} @var{nargs}) | ||
947 | Represents a function call. @var{function} is a @code{mem} expression | ||
948 | @@ -3017,7 +3024,7 @@ | ||
949 | brackets stand for a vector; the operand of @code{parallel} is a | ||
950 | vector of expressions. @var{x0}, @var{x1} and so on are individual | ||
951 | side effect expressions---expressions of code @code{set}, @code{call}, | ||
952 | -@code{return}, @code{clobber} or @code{use}. | ||
953 | +@code{return}, @code{simple_return}, @code{clobber} or @code{use}. | ||
954 | |||
955 | ``In parallel'' means that first all the values used in the individual | ||
956 | side-effects are computed, and second all the actual side-effects are | ||
957 | @@ -3656,14 +3663,16 @@ | ||
958 | @table @code | ||
959 | @findex PATTERN | ||
960 | @item PATTERN (@var{i}) | ||
961 | -An expression for the side effect performed by this insn. This must be | ||
962 | -one of the following codes: @code{set}, @code{call}, @code{use}, | ||
963 | -@code{clobber}, @code{return}, @code{asm_input}, @code{asm_output}, | ||
964 | -@code{addr_vec}, @code{addr_diff_vec}, @code{trap_if}, @code{unspec}, | ||
965 | -@code{unspec_volatile}, @code{parallel}, @code{cond_exec}, or @code{sequence}. If it is a @code{parallel}, | ||
966 | -each element of the @code{parallel} must be one these codes, except that | ||
967 | -@code{parallel} expressions cannot be nested and @code{addr_vec} and | ||
968 | -@code{addr_diff_vec} are not permitted inside a @code{parallel} expression. | ||
969 | +An expression for the side effect performed by this insn. This must | ||
970 | +be one of the following codes: @code{set}, @code{call}, @code{use}, | ||
971 | +@code{clobber}, @code{return}, @code{simple_return}, @code{asm_input}, | ||
972 | +@code{asm_output}, @code{addr_vec}, @code{addr_diff_vec}, | ||
973 | +@code{trap_if}, @code{unspec}, @code{unspec_volatile}, | ||
974 | +@code{parallel}, @code{cond_exec}, or @code{sequence}. If it is a | ||
975 | +@code{parallel}, each element of the @code{parallel} must be one these | ||
976 | +codes, except that @code{parallel} expressions cannot be nested and | ||
977 | +@code{addr_vec} and @code{addr_diff_vec} are not permitted inside a | ||
978 | +@code{parallel} expression. | ||
979 | |||
980 | @findex INSN_CODE | ||
981 | @item INSN_CODE (@var{i}) | ||
982 | |||
983 | === modified file 'gcc/doc/tm.texi' | ||
984 | --- old/gcc/doc/tm.texi 2010-09-01 13:29:58 +0000 | ||
985 | +++ new/gcc/doc/tm.texi 2011-01-05 12:12:18 +0000 | ||
986 | @@ -3287,6 +3287,12 @@ | ||
987 | from the frame pointer of the previous stack frame. | ||
988 | @end defmac | ||
989 | |||
990 | +@defmac RETURN_ADDR_REGNUM | ||
991 | +If defined, a C expression whose value is the register number of the return | ||
992 | +address for the current function. Targets that pass the return address on | ||
993 | +the stack should not define this macro. | ||
994 | +@end defmac | ||
995 | + | ||
996 | @defmac INCOMING_RETURN_ADDR_RTX | ||
997 | A C expression whose value is RTL representing the location of the | ||
998 | incoming return address at the beginning of any function, before the | ||
999 | |||
1000 | === modified file 'gcc/dwarf2out.c' | ||
1001 | --- old/gcc/dwarf2out.c 2010-12-21 18:46:10 +0000 | ||
1002 | +++ new/gcc/dwarf2out.c 2011-01-05 12:12:18 +0000 | ||
1003 | @@ -1396,7 +1396,7 @@ | ||
1004 | { | ||
1005 | rtx dest = JUMP_LABEL (insn); | ||
1006 | |||
1007 | - if (dest) | ||
1008 | + if (dest && !ANY_RETURN_P (dest)) | ||
1009 | { | ||
1010 | if (barrier_args_size [INSN_UID (dest)] < 0) | ||
1011 | { | ||
1012 | |||
1013 | === modified file 'gcc/emit-rtl.c' | ||
1014 | --- old/gcc/emit-rtl.c 2010-10-04 00:50:43 +0000 | ||
1015 | +++ new/gcc/emit-rtl.c 2011-01-05 12:12:18 +0000 | ||
1016 | @@ -2432,6 +2432,8 @@ | ||
1017 | case CODE_LABEL: | ||
1018 | case PC: | ||
1019 | case CC0: | ||
1020 | + case RETURN: | ||
1021 | + case SIMPLE_RETURN: | ||
1022 | case SCRATCH: | ||
1023 | return; | ||
1024 | /* SCRATCH must be shared because they represent distinct values. */ | ||
1025 | @@ -3323,14 +3325,17 @@ | ||
1026 | return insn; | ||
1027 | } | ||
1028 | |||
1029 | -/* Return the last label to mark the same position as LABEL. Return null | ||
1030 | - if LABEL itself is null. */ | ||
1031 | +/* Return the last label to mark the same position as LABEL. Return LABEL | ||
1032 | + itself if it is null or any return rtx. */ | ||
1033 | |||
1034 | rtx | ||
1035 | skip_consecutive_labels (rtx label) | ||
1036 | { | ||
1037 | rtx insn; | ||
1038 | |||
1039 | + if (label && ANY_RETURN_P (label)) | ||
1040 | + return label; | ||
1041 | + | ||
1042 | for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn)) | ||
1043 | if (LABEL_P (insn)) | ||
1044 | label = insn; | ||
1045 | @@ -5209,7 +5214,7 @@ | ||
1046 | return CODE_LABEL; | ||
1047 | if (GET_CODE (x) == CALL) | ||
1048 | return CALL_INSN; | ||
1049 | - if (GET_CODE (x) == RETURN) | ||
1050 | + if (GET_CODE (x) == RETURN || GET_CODE (x) == SIMPLE_RETURN) | ||
1051 | return JUMP_INSN; | ||
1052 | if (GET_CODE (x) == SET) | ||
1053 | { | ||
1054 | @@ -5715,8 +5720,10 @@ | ||
1055 | init_reg_modes_target (); | ||
1056 | |||
1057 | /* Assign register numbers to the globally defined register rtx. */ | ||
1058 | - pc_rtx = gen_rtx_PC (VOIDmode); | ||
1059 | - cc0_rtx = gen_rtx_CC0 (VOIDmode); | ||
1060 | + pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); | ||
1061 | + ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | ||
1062 | + simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | ||
1063 | + cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | ||
1064 | stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); | ||
1065 | frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); | ||
1066 | hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); | ||
1067 | |||
1068 | === modified file 'gcc/final.c' | ||
1069 | --- old/gcc/final.c 2010-03-26 16:18:51 +0000 | ||
1070 | +++ new/gcc/final.c 2011-01-05 12:12:18 +0000 | ||
1071 | @@ -2428,7 +2428,7 @@ | ||
1072 | delete_insn (insn); | ||
1073 | break; | ||
1074 | } | ||
1075 | - else if (GET_CODE (SET_SRC (body)) == RETURN) | ||
1076 | + else if (ANY_RETURN_P (SET_SRC (body))) | ||
1077 | /* Replace (set (pc) (return)) with (return). */ | ||
1078 | PATTERN (insn) = body = SET_SRC (body); | ||
1079 | |||
1080 | |||
1081 | === modified file 'gcc/function.c' | ||
1082 | --- old/gcc/function.c 2010-08-16 19:18:08 +0000 | ||
1083 | +++ new/gcc/function.c 2011-01-05 12:12:18 +0000 | ||
1084 | @@ -147,9 +147,6 @@ | ||
1085 | can always export `prologue_epilogue_contains'. */ | ||
1086 | static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED; | ||
1087 | static bool contains (const_rtx, htab_t); | ||
1088 | -#ifdef HAVE_return | ||
1089 | -static void emit_return_into_block (basic_block); | ||
1090 | -#endif | ||
1091 | static void prepare_function_start (void); | ||
1092 | static void do_clobber_return_reg (rtx, void *); | ||
1093 | static void do_use_return_reg (rtx, void *); | ||
1094 | @@ -4987,35 +4984,189 @@ | ||
1095 | return 0; | ||
1096 | } | ||
1097 | |||
1098 | +#ifdef HAVE_simple_return | ||
1099 | +/* This collects sets and clobbers of hard registers in a HARD_REG_SET, | ||
1100 | + which is pointed to by DATA. */ | ||
1101 | +static void | ||
1102 | +record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data) | ||
1103 | +{ | ||
1104 | + HARD_REG_SET *pset = (HARD_REG_SET *)data; | ||
1105 | + if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER) | ||
1106 | + { | ||
1107 | + int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)]; | ||
1108 | + while (nregs-- > 0) | ||
1109 | + SET_HARD_REG_BIT (*pset, REGNO (x) + nregs); | ||
1110 | + } | ||
1111 | +} | ||
1112 | + | ||
1113 | +/* A subroutine of requires_stack_frame_p, called via for_each_rtx. | ||
1114 | + If any change is made, set CHANGED | ||
1115 | + to true. */ | ||
1116 | + | ||
1117 | +static int | ||
1118 | +frame_required_for_rtx (rtx *loc, void *data ATTRIBUTE_UNUSED) | ||
1119 | +{ | ||
1120 | + rtx x = *loc; | ||
1121 | + if (x == stack_pointer_rtx || x == hard_frame_pointer_rtx | ||
1122 | + || x == arg_pointer_rtx || x == pic_offset_table_rtx | ||
1123 | +#ifdef RETURN_ADDR_REGNUM | ||
1124 | + || (REG_P (x) && REGNO (x) == RETURN_ADDR_REGNUM) | ||
1125 | +#endif | ||
1126 | + ) | ||
1127 | + return 1; | ||
1128 | + return 0; | ||
1129 | +} | ||
1130 | + | ||
1131 | +static bool | ||
1132 | +requires_stack_frame_p (rtx insn) | ||
1133 | +{ | ||
1134 | + HARD_REG_SET hardregs; | ||
1135 | + unsigned regno; | ||
1136 | + | ||
1137 | + if (!INSN_P (insn) || DEBUG_INSN_P (insn)) | ||
1138 | + return false; | ||
1139 | + if (CALL_P (insn)) | ||
1140 | + return !SIBLING_CALL_P (insn); | ||
1141 | + if (for_each_rtx (&PATTERN (insn), frame_required_for_rtx, NULL)) | ||
1142 | + return true; | ||
1143 | + CLEAR_HARD_REG_SET (hardregs); | ||
1144 | + note_stores (PATTERN (insn), record_hard_reg_sets, &hardregs); | ||
1145 | + AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set); | ||
1146 | + for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) | ||
1147 | + if (TEST_HARD_REG_BIT (hardregs, regno) | ||
1148 | + && df_regs_ever_live_p (regno)) | ||
1149 | + return true; | ||
1150 | + return false; | ||
1151 | +} | ||
1152 | +#endif | ||
1153 | + | ||
1154 | #ifdef HAVE_return | ||
1155 | -/* Insert gen_return at the end of block BB. This also means updating | ||
1156 | - block_for_insn appropriately. */ | ||
1157 | + | ||
1158 | +static rtx | ||
1159 | +gen_return_pattern (bool simple_p) | ||
1160 | +{ | ||
1161 | +#ifdef HAVE_simple_return | ||
1162 | + return simple_p ? gen_simple_return () : gen_return (); | ||
1163 | +#else | ||
1164 | + gcc_assert (!simple_p); | ||
1165 | + return gen_return (); | ||
1166 | +#endif | ||
1167 | +} | ||
1168 | + | ||
1169 | +/* Insert an appropriate return pattern at the end of block BB. This | ||
1170 | + also means updating block_for_insn appropriately. */ | ||
1171 | |||
1172 | static void | ||
1173 | -emit_return_into_block (basic_block bb) | ||
1174 | +emit_return_into_block (bool simple_p, basic_block bb) | ||
1175 | { | ||
1176 | - emit_jump_insn_after (gen_return (), BB_END (bb)); | ||
1177 | + rtx jump; | ||
1178 | + jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb)); | ||
1179 | + JUMP_LABEL (jump) = simple_p ? simple_return_rtx : ret_rtx; | ||
1180 | } | ||
1181 | -#endif /* HAVE_return */ | ||
1182 | +#endif | ||
1183 | |||
1184 | /* Generate the prologue and epilogue RTL if the machine supports it. Thread | ||
1185 | this into place with notes indicating where the prologue ends and where | ||
1186 | - the epilogue begins. Update the basic block information when possible. */ | ||
1187 | + the epilogue begins. Update the basic block information when possible. | ||
1188 | + | ||
1189 | + Notes on epilogue placement: | ||
1190 | + There are several kinds of edges to the exit block: | ||
1191 | + * a single fallthru edge from LAST_BB | ||
1192 | + * possibly, edges from blocks containing sibcalls | ||
1193 | + * possibly, fake edges from infinite loops | ||
1194 | + | ||
1195 | + The epilogue is always emitted on the fallthru edge from the last basic | ||
1196 | + block in the function, LAST_BB, into the exit block. | ||
1197 | + | ||
1198 | + If LAST_BB is empty except for a label, it is the target of every | ||
1199 | + other basic block in the function that ends in a return. If a | ||
1200 | + target has a return or simple_return pattern (possibly with | ||
1201 | + conditional variants), these basic blocks can be changed so that a | ||
1202 | + return insn is emitted into them, and their target is adjusted to | ||
1203 | + the real exit block. | ||
1204 | + | ||
1205 | + Notes on shrink wrapping: We implement a fairly conservative | ||
1206 | + version of shrink-wrapping rather than the textbook one. We only | ||
1207 | + generate a single prologue and a single epilogue. This is | ||
1208 | + sufficient to catch a number of interesting cases involving early | ||
1209 | + exits. | ||
1210 | + | ||
1211 | + First, we identify the blocks that require the prologue to occur before | ||
1212 | + them. These are the ones that modify a call-saved register, or reference | ||
1213 | + any of the stack or frame pointer registers. To simplify things, we then | ||
1214 | + mark everything reachable from these blocks as also requiring a prologue. | ||
1215 | + This takes care of loops automatically, and avoids the need to examine | ||
1216 | + whether MEMs reference the frame, since it is sufficient to check for | ||
1217 | + occurrences of the stack or frame pointer. | ||
1218 | + | ||
1219 | + We then compute the set of blocks for which the need for a prologue | ||
1220 | + is anticipatable (borrowing terminology from the shrink-wrapping | ||
1221 | + description in Muchnick's book). These are the blocks which either | ||
1222 | + require a prologue themselves, or those that have only successors | ||
1223 | + where the prologue is anticipatable. The prologue needs to be | ||
1224 | + inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1 | ||
1225 | + is not. For the moment, we ensure that only one such edge exists. | ||
1226 | + | ||
1227 | + The epilogue is placed as described above, but we make a | ||
1228 | + distinction between inserting return and simple_return patterns | ||
1229 | + when modifying other blocks that end in a return. Blocks that end | ||
1230 | + in a sibcall omit the sibcall_epilogue if the block is not in | ||
1231 | + ANTIC. */ | ||
1232 | |||
1233 | static void | ||
1234 | thread_prologue_and_epilogue_insns (void) | ||
1235 | { | ||
1236 | int inserted = 0; | ||
1237 | + basic_block last_bb; | ||
1238 | + bool last_bb_active; | ||
1239 | +#ifdef HAVE_simple_return | ||
1240 | + bool unconverted_simple_returns = false; | ||
1241 | + basic_block simple_return_block = NULL; | ||
1242 | +#endif | ||
1243 | + rtx returnjump ATTRIBUTE_UNUSED; | ||
1244 | + rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED; | ||
1245 | + rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED; | ||
1246 | + edge entry_edge, orig_entry_edge, exit_fallthru_edge; | ||
1247 | edge e; | ||
1248 | -#if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue) | ||
1249 | - rtx seq; | ||
1250 | -#endif | ||
1251 | -#if defined (HAVE_epilogue) || defined(HAVE_return) | ||
1252 | - rtx epilogue_end = NULL_RTX; | ||
1253 | -#endif | ||
1254 | edge_iterator ei; | ||
1255 | + bitmap_head bb_flags; | ||
1256 | + | ||
1257 | + df_analyze (); | ||
1258 | |||
1259 | rtl_profile_for_bb (ENTRY_BLOCK_PTR); | ||
1260 | + | ||
1261 | + epilogue_end = NULL_RTX; | ||
1262 | + | ||
1263 | + /* Can't deal with multiple successors of the entry block at the | ||
1264 | + moment. Function should always have at least one entry | ||
1265 | + point. */ | ||
1266 | + gcc_assert (single_succ_p (ENTRY_BLOCK_PTR)); | ||
1267 | + entry_edge = single_succ_edge (ENTRY_BLOCK_PTR); | ||
1268 | + orig_entry_edge = entry_edge; | ||
1269 | + | ||
1270 | + exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds); | ||
1271 | + if (exit_fallthru_edge != NULL) | ||
1272 | + { | ||
1273 | + rtx label; | ||
1274 | + | ||
1275 | + last_bb = exit_fallthru_edge->src; | ||
1276 | + /* Test whether there are active instructions in the last block. */ | ||
1277 | + label = BB_END (last_bb); | ||
1278 | + while (label && !LABEL_P (label)) | ||
1279 | + { | ||
1280 | + if (active_insn_p (label)) | ||
1281 | + break; | ||
1282 | + label = PREV_INSN (label); | ||
1283 | + } | ||
1284 | + | ||
1285 | + last_bb_active = BB_HEAD (last_bb) != label || !LABEL_P (label); | ||
1286 | + } | ||
1287 | + else | ||
1288 | + { | ||
1289 | + last_bb = NULL; | ||
1290 | + last_bb_active = false; | ||
1291 | + } | ||
1292 | + | ||
1293 | #ifdef HAVE_prologue | ||
1294 | if (HAVE_prologue) | ||
1295 | { | ||
1296 | @@ -5040,19 +5191,168 @@ | ||
1297 | emit_insn (gen_blockage ()); | ||
1298 | #endif | ||
1299 | |||
1300 | - seq = get_insns (); | ||
1301 | + prologue_seq = get_insns (); | ||
1302 | end_sequence (); | ||
1303 | set_insn_locators (seq, prologue_locator); | ||
1304 | - | ||
1305 | - /* Can't deal with multiple successors of the entry block | ||
1306 | - at the moment. Function should always have at least one | ||
1307 | - entry point. */ | ||
1308 | - gcc_assert (single_succ_p (ENTRY_BLOCK_PTR)); | ||
1309 | - | ||
1310 | - insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR)); | ||
1311 | - inserted = 1; | ||
1312 | - } | ||
1313 | -#endif | ||
1314 | + } | ||
1315 | +#endif | ||
1316 | + | ||
1317 | + bitmap_initialize (&bb_flags, &bitmap_default_obstack); | ||
1318 | + | ||
1319 | +#ifdef HAVE_simple_return | ||
1320 | + /* Try to perform a kind of shrink-wrapping, making sure the | ||
1321 | + prologue/epilogue is emitted only around those parts of the | ||
1322 | + function that require it. */ | ||
1323 | + | ||
1324 | + if (flag_shrink_wrap && HAVE_simple_return && !flag_non_call_exceptions | ||
1325 | + && HAVE_prologue && !crtl->calls_eh_return) | ||
1326 | + { | ||
1327 | + HARD_REG_SET prologue_clobbered, live_on_edge; | ||
1328 | + rtx p_insn; | ||
1329 | + VEC(basic_block, heap) *vec; | ||
1330 | + basic_block bb; | ||
1331 | + bitmap_head bb_antic_flags; | ||
1332 | + bitmap_head bb_on_list; | ||
1333 | + | ||
1334 | + bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack); | ||
1335 | + bitmap_initialize (&bb_on_list, &bitmap_default_obstack); | ||
1336 | + | ||
1337 | + vec = VEC_alloc (basic_block, heap, n_basic_blocks); | ||
1338 | + | ||
1339 | + FOR_EACH_BB (bb) | ||
1340 | + { | ||
1341 | + rtx insn; | ||
1342 | + FOR_BB_INSNS (bb, insn) | ||
1343 | + { | ||
1344 | + if (requires_stack_frame_p (insn)) | ||
1345 | + { | ||
1346 | + bitmap_set_bit (&bb_flags, bb->index); | ||
1347 | + VEC_quick_push (basic_block, vec, bb); | ||
1348 | + break; | ||
1349 | + } | ||
1350 | + } | ||
1351 | + } | ||
1352 | + | ||
1353 | + /* For every basic block that needs a prologue, mark all blocks | ||
1354 | + reachable from it, so as to ensure they are also seen as | ||
1355 | + requiring a prologue. */ | ||
1356 | + while (!VEC_empty (basic_block, vec)) | ||
1357 | + { | ||
1358 | + basic_block tmp_bb = VEC_pop (basic_block, vec); | ||
1359 | + edge e; | ||
1360 | + edge_iterator ei; | ||
1361 | + FOR_EACH_EDGE (e, ei, tmp_bb->succs) | ||
1362 | + { | ||
1363 | + if (e->dest == EXIT_BLOCK_PTR | ||
1364 | + || bitmap_bit_p (&bb_flags, e->dest->index)) | ||
1365 | + continue; | ||
1366 | + bitmap_set_bit (&bb_flags, e->dest->index); | ||
1367 | + VEC_quick_push (basic_block, vec, e->dest); | ||
1368 | + } | ||
1369 | + } | ||
1370 | + /* If the last basic block contains only a label, we'll be able | ||
1371 | + to convert jumps to it to (potentially conditional) return | ||
1372 | + insns later. This means we don't necessarily need a prologue | ||
1373 | + for paths reaching it. */ | ||
1374 | + if (last_bb) | ||
1375 | + { | ||
1376 | + if (!last_bb_active) | ||
1377 | + bitmap_clear_bit (&bb_flags, last_bb->index); | ||
1378 | + else if (!bitmap_bit_p (&bb_flags, last_bb->index)) | ||
1379 | + goto fail_shrinkwrap; | ||
1380 | + } | ||
1381 | + | ||
1382 | + /* Now walk backwards from every block that is marked as needing | ||
1383 | + a prologue to compute the bb_antic_flags bitmap. */ | ||
1384 | + bitmap_copy (&bb_antic_flags, &bb_flags); | ||
1385 | + FOR_EACH_BB (bb) | ||
1386 | + { | ||
1387 | + edge e; | ||
1388 | + edge_iterator ei; | ||
1389 | + if (!bitmap_bit_p (&bb_flags, bb->index)) | ||
1390 | + continue; | ||
1391 | + FOR_EACH_EDGE (e, ei, bb->preds) | ||
1392 | + if (!bitmap_bit_p (&bb_antic_flags, e->src->index)) | ||
1393 | + { | ||
1394 | + VEC_quick_push (basic_block, vec, e->src); | ||
1395 | + bitmap_set_bit (&bb_on_list, e->src->index); | ||
1396 | + } | ||
1397 | + } | ||
1398 | + while (!VEC_empty (basic_block, vec)) | ||
1399 | + { | ||
1400 | + basic_block tmp_bb = VEC_pop (basic_block, vec); | ||
1401 | + edge e; | ||
1402 | + edge_iterator ei; | ||
1403 | + bool all_set = true; | ||
1404 | + | ||
1405 | + bitmap_clear_bit (&bb_on_list, tmp_bb->index); | ||
1406 | + FOR_EACH_EDGE (e, ei, tmp_bb->succs) | ||
1407 | + { | ||
1408 | + if (!bitmap_bit_p (&bb_antic_flags, e->dest->index)) | ||
1409 | + { | ||
1410 | + all_set = false; | ||
1411 | + break; | ||
1412 | + } | ||
1413 | + } | ||
1414 | + if (all_set) | ||
1415 | + { | ||
1416 | + bitmap_set_bit (&bb_antic_flags, tmp_bb->index); | ||
1417 | + FOR_EACH_EDGE (e, ei, tmp_bb->preds) | ||
1418 | + if (!bitmap_bit_p (&bb_antic_flags, e->src->index)) | ||
1419 | + { | ||
1420 | + VEC_quick_push (basic_block, vec, e->src); | ||
1421 | + bitmap_set_bit (&bb_on_list, e->src->index); | ||
1422 | + } | ||
1423 | + } | ||
1424 | + } | ||
1425 | + /* Find exactly one edge that leads to a block in ANTIC from | ||
1426 | + a block that isn't. */ | ||
1427 | + if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index)) | ||
1428 | + FOR_EACH_BB (bb) | ||
1429 | + { | ||
1430 | + if (!bitmap_bit_p (&bb_antic_flags, bb->index)) | ||
1431 | + continue; | ||
1432 | + FOR_EACH_EDGE (e, ei, bb->preds) | ||
1433 | + if (!bitmap_bit_p (&bb_antic_flags, e->src->index)) | ||
1434 | + { | ||
1435 | + if (entry_edge != orig_entry_edge) | ||
1436 | + { | ||
1437 | + entry_edge = orig_entry_edge; | ||
1438 | + goto fail_shrinkwrap; | ||
1439 | + } | ||
1440 | + entry_edge = e; | ||
1441 | + } | ||
1442 | + } | ||
1443 | + | ||
1444 | + /* Test whether the prologue is known to clobber any register | ||
1445 | + (other than FP or SP) which are live on the edge. */ | ||
1446 | + CLEAR_HARD_REG_SET (prologue_clobbered); | ||
1447 | + for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn)) | ||
1448 | + if (NONDEBUG_INSN_P (p_insn)) | ||
1449 | + note_stores (PATTERN (p_insn), record_hard_reg_sets, | ||
1450 | + &prologue_clobbered); | ||
1451 | + CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM); | ||
1452 | + if (frame_pointer_needed) | ||
1453 | + CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM); | ||
1454 | + | ||
1455 | + CLEAR_HARD_REG_SET (live_on_edge); | ||
1456 | + reg_set_to_hard_reg_set (&live_on_edge, | ||
1457 | + df_get_live_in (entry_edge->dest)); | ||
1458 | + if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered)) | ||
1459 | + entry_edge = orig_entry_edge; | ||
1460 | + | ||
1461 | + fail_shrinkwrap: | ||
1462 | + bitmap_clear (&bb_antic_flags); | ||
1463 | + bitmap_clear (&bb_on_list); | ||
1464 | + VEC_free (basic_block, heap, vec); | ||
1465 | + } | ||
1466 | +#endif | ||
1467 | + | ||
1468 | + if (prologue_seq != NULL_RTX) | ||
1469 | + { | ||
1470 | + insert_insn_on_edge (prologue_seq, entry_edge); | ||
1471 | + inserted = true; | ||
1472 | + } | ||
1473 | |||
1474 | /* If the exit block has no non-fake predecessors, we don't need | ||
1475 | an epilogue. */ | ||
1476 | @@ -5063,100 +5363,130 @@ | ||
1477 | goto epilogue_done; | ||
1478 | |||
1479 | rtl_profile_for_bb (EXIT_BLOCK_PTR); | ||
1480 | + | ||
1481 | #ifdef HAVE_return | ||
1482 | - if (optimize && HAVE_return) | ||
1483 | + /* If we're allowed to generate a simple return instruction, then by | ||
1484 | + definition we don't need a full epilogue. If the last basic | ||
1485 | + block before the exit block does not contain active instructions, | ||
1486 | + examine its predecessors and try to emit (conditional) return | ||
1487 | + instructions. */ | ||
1488 | + if (optimize && !last_bb_active | ||
1489 | + && (HAVE_return || entry_edge != orig_entry_edge)) | ||
1490 | { | ||
1491 | - /* If we're allowed to generate a simple return instruction, | ||
1492 | - then by definition we don't need a full epilogue. Examine | ||
1493 | - the block that falls through to EXIT. If it does not | ||
1494 | - contain any code, examine its predecessors and try to | ||
1495 | - emit (conditional) return instructions. */ | ||
1496 | - | ||
1497 | - basic_block last; | ||
1498 | + edge_iterator ei2; | ||
1499 | + int i; | ||
1500 | + basic_block bb; | ||
1501 | rtx label; | ||
1502 | + VEC(basic_block,heap) *src_bbs; | ||
1503 | |||
1504 | - FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) | ||
1505 | - if (e->flags & EDGE_FALLTHRU) | ||
1506 | - break; | ||
1507 | - if (e == NULL) | ||
1508 | + if (exit_fallthru_edge == NULL) | ||
1509 | goto epilogue_done; | ||
1510 | - last = e->src; | ||
1511 | - | ||
1512 | - /* Verify that there are no active instructions in the last block. */ | ||
1513 | - label = BB_END (last); | ||
1514 | - while (label && !LABEL_P (label)) | ||
1515 | + label = BB_HEAD (last_bb); | ||
1516 | + | ||
1517 | + src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds)); | ||
1518 | + FOR_EACH_EDGE (e, ei2, last_bb->preds) | ||
1519 | + if (e->src != ENTRY_BLOCK_PTR) | ||
1520 | + VEC_quick_push (basic_block, src_bbs, e->src); | ||
1521 | + | ||
1522 | + FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb) | ||
1523 | { | ||
1524 | - if (active_insn_p (label)) | ||
1525 | - break; | ||
1526 | - label = PREV_INSN (label); | ||
1527 | + bool simple_p; | ||
1528 | + rtx jump; | ||
1529 | + e = find_edge (bb, last_bb); | ||
1530 | + | ||
1531 | + jump = BB_END (bb); | ||
1532 | + | ||
1533 | +#ifdef HAVE_simple_return | ||
1534 | + simple_p = (entry_edge != orig_entry_edge | ||
1535 | + ? !bitmap_bit_p (&bb_flags, bb->index) : false); | ||
1536 | +#else | ||
1537 | + simple_p = false; | ||
1538 | +#endif | ||
1539 | + | ||
1540 | + if (!simple_p | ||
1541 | + && (!HAVE_return || !JUMP_P (jump) | ||
1542 | + || JUMP_LABEL (jump) != label)) | ||
1543 | + continue; | ||
1544 | + | ||
1545 | + /* If we have an unconditional jump, we can replace that | ||
1546 | + with a simple return instruction. */ | ||
1547 | + if (!JUMP_P (jump)) | ||
1548 | + { | ||
1549 | + emit_barrier_after (BB_END (bb)); | ||
1550 | + emit_return_into_block (simple_p, bb); | ||
1551 | + } | ||
1552 | + else if (simplejump_p (jump)) | ||
1553 | + { | ||
1554 | + emit_return_into_block (simple_p, bb); | ||
1555 | + delete_insn (jump); | ||
1556 | + } | ||
1557 | + else if (condjump_p (jump) && JUMP_LABEL (jump) != label) | ||
1558 | + { | ||
1559 | + basic_block new_bb; | ||
1560 | + edge new_e; | ||
1561 | + | ||
1562 | + gcc_assert (simple_p); | ||
1563 | + new_bb = split_edge (e); | ||
1564 | + emit_barrier_after (BB_END (new_bb)); | ||
1565 | + emit_return_into_block (simple_p, new_bb); | ||
1566 | +#ifdef HAVE_simple_return | ||
1567 | + simple_return_block = new_bb; | ||
1568 | +#endif | ||
1569 | + new_e = single_succ_edge (new_bb); | ||
1570 | + redirect_edge_succ (new_e, EXIT_BLOCK_PTR); | ||
1571 | + | ||
1572 | + continue; | ||
1573 | + } | ||
1574 | + /* If we have a conditional jump branching to the last | ||
1575 | + block, we can try to replace that with a conditional | ||
1576 | + return instruction. */ | ||
1577 | + else if (condjump_p (jump)) | ||
1578 | + { | ||
1579 | + rtx dest; | ||
1580 | + if (simple_p) | ||
1581 | + dest = simple_return_rtx; | ||
1582 | + else | ||
1583 | + dest = ret_rtx; | ||
1584 | + if (! redirect_jump (jump, dest, 0)) | ||
1585 | + { | ||
1586 | +#ifdef HAVE_simple_return | ||
1587 | + if (simple_p) | ||
1588 | + unconverted_simple_returns = true; | ||
1589 | +#endif | ||
1590 | + continue; | ||
1591 | + } | ||
1592 | + | ||
1593 | + /* If this block has only one successor, it both jumps | ||
1594 | + and falls through to the fallthru block, so we can't | ||
1595 | + delete the edge. */ | ||
1596 | + if (single_succ_p (bb)) | ||
1597 | + continue; | ||
1598 | + } | ||
1599 | + else | ||
1600 | + { | ||
1601 | +#ifdef HAVE_simple_return | ||
1602 | + if (simple_p) | ||
1603 | + unconverted_simple_returns = true; | ||
1604 | +#endif | ||
1605 | + continue; | ||
1606 | + } | ||
1607 | + | ||
1608 | + /* Fix up the CFG for the successful change we just made. */ | ||
1609 | + redirect_edge_succ (e, EXIT_BLOCK_PTR); | ||
1610 | } | ||
1611 | + VEC_free (basic_block, heap, src_bbs); | ||
1612 | |||
1613 | - if (BB_HEAD (last) == label && LABEL_P (label)) | ||
1614 | + if (HAVE_return) | ||
1615 | { | ||
1616 | - edge_iterator ei2; | ||
1617 | - | ||
1618 | - for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); ) | ||
1619 | - { | ||
1620 | - basic_block bb = e->src; | ||
1621 | - rtx jump; | ||
1622 | - | ||
1623 | - if (bb == ENTRY_BLOCK_PTR) | ||
1624 | - { | ||
1625 | - ei_next (&ei2); | ||
1626 | - continue; | ||
1627 | - } | ||
1628 | - | ||
1629 | - jump = BB_END (bb); | ||
1630 | - if (!JUMP_P (jump) || JUMP_LABEL (jump) != label) | ||
1631 | - { | ||
1632 | - ei_next (&ei2); | ||
1633 | - continue; | ||
1634 | - } | ||
1635 | - | ||
1636 | - /* If we have an unconditional jump, we can replace that | ||
1637 | - with a simple return instruction. */ | ||
1638 | - if (simplejump_p (jump)) | ||
1639 | - { | ||
1640 | - emit_return_into_block (bb); | ||
1641 | - delete_insn (jump); | ||
1642 | - } | ||
1643 | - | ||
1644 | - /* If we have a conditional jump, we can try to replace | ||
1645 | - that with a conditional return instruction. */ | ||
1646 | - else if (condjump_p (jump)) | ||
1647 | - { | ||
1648 | - if (! redirect_jump (jump, 0, 0)) | ||
1649 | - { | ||
1650 | - ei_next (&ei2); | ||
1651 | - continue; | ||
1652 | - } | ||
1653 | - | ||
1654 | - /* If this block has only one successor, it both jumps | ||
1655 | - and falls through to the fallthru block, so we can't | ||
1656 | - delete the edge. */ | ||
1657 | - if (single_succ_p (bb)) | ||
1658 | - { | ||
1659 | - ei_next (&ei2); | ||
1660 | - continue; | ||
1661 | - } | ||
1662 | - } | ||
1663 | - else | ||
1664 | - { | ||
1665 | - ei_next (&ei2); | ||
1666 | - continue; | ||
1667 | - } | ||
1668 | - | ||
1669 | - /* Fix up the CFG for the successful change we just made. */ | ||
1670 | - redirect_edge_succ (e, EXIT_BLOCK_PTR); | ||
1671 | - } | ||
1672 | - | ||
1673 | /* Emit a return insn for the exit fallthru block. Whether | ||
1674 | this is still reachable will be determined later. */ | ||
1675 | |||
1676 | - emit_barrier_after (BB_END (last)); | ||
1677 | - emit_return_into_block (last); | ||
1678 | - epilogue_end = BB_END (last); | ||
1679 | - single_succ_edge (last)->flags &= ~EDGE_FALLTHRU; | ||
1680 | + emit_barrier_after (BB_END (last_bb)); | ||
1681 | + emit_return_into_block (false, last_bb); | ||
1682 | + epilogue_end = BB_END (last_bb); | ||
1683 | + if (JUMP_P (epilogue_end)) | ||
1684 | + JUMP_LABEL (epilogue_end) = ret_rtx; | ||
1685 | + single_succ_edge (last_bb)->flags &= ~EDGE_FALLTHRU; | ||
1686 | goto epilogue_done; | ||
1687 | } | ||
1688 | } | ||
1689 | @@ -5193,15 +5523,10 @@ | ||
1690 | } | ||
1691 | #endif | ||
1692 | |||
1693 | - /* Find the edge that falls through to EXIT. Other edges may exist | ||
1694 | - due to RETURN instructions, but those don't need epilogues. | ||
1695 | - There really shouldn't be a mixture -- either all should have | ||
1696 | - been converted or none, however... */ | ||
1697 | + /* If nothing falls through into the exit block, we don't need an | ||
1698 | + epilogue. */ | ||
1699 | |||
1700 | - FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) | ||
1701 | - if (e->flags & EDGE_FALLTHRU) | ||
1702 | - break; | ||
1703 | - if (e == NULL) | ||
1704 | + if (exit_fallthru_edge == NULL) | ||
1705 | goto epilogue_done; | ||
1706 | |||
1707 | #ifdef HAVE_epilogue | ||
1708 | @@ -5217,25 +5542,38 @@ | ||
1709 | set_insn_locators (seq, epilogue_locator); | ||
1710 | |||
1711 | seq = get_insns (); | ||
1712 | + returnjump = get_last_insn (); | ||
1713 | end_sequence (); | ||
1714 | |||
1715 | - insert_insn_on_edge (seq, e); | ||
1716 | + insert_insn_on_edge (seq, exit_fallthru_edge); | ||
1717 | inserted = 1; | ||
1718 | + if (JUMP_P (returnjump)) | ||
1719 | + { | ||
1720 | + rtx pat = PATTERN (returnjump); | ||
1721 | + if (GET_CODE (pat) == PARALLEL) | ||
1722 | + pat = XVECEXP (pat, 0, 0); | ||
1723 | + if (ANY_RETURN_P (pat)) | ||
1724 | + JUMP_LABEL (returnjump) = pat; | ||
1725 | + else | ||
1726 | + JUMP_LABEL (returnjump) = ret_rtx; | ||
1727 | + } | ||
1728 | + else | ||
1729 | + returnjump = NULL_RTX; | ||
1730 | } | ||
1731 | else | ||
1732 | #endif | ||
1733 | { | ||
1734 | basic_block cur_bb; | ||
1735 | |||
1736 | - if (! next_active_insn (BB_END (e->src))) | ||
1737 | + if (! next_active_insn (BB_END (exit_fallthru_edge->src))) | ||
1738 | goto epilogue_done; | ||
1739 | /* We have a fall-through edge to the exit block, the source is not | ||
1740 | - at the end of the function, and there will be an assembler epilogue | ||
1741 | - at the end of the function. | ||
1742 | - We can't use force_nonfallthru here, because that would try to | ||
1743 | - use return. Inserting a jump 'by hand' is extremely messy, so | ||
1744 | + at the end of the function, and there will be an assembler epilogue | ||
1745 | + at the end of the function. | ||
1746 | + We can't use force_nonfallthru here, because that would try to | ||
1747 | + use return. Inserting a jump 'by hand' is extremely messy, so | ||
1748 | we take advantage of cfg_layout_finalize using | ||
1749 | - fixup_fallthru_exit_predecessor. */ | ||
1750 | + fixup_fallthru_exit_predecessor. */ | ||
1751 | cfg_layout_initialize (0); | ||
1752 | FOR_EACH_BB (cur_bb) | ||
1753 | if (cur_bb->index >= NUM_FIXED_BLOCKS | ||
1754 | @@ -5244,6 +5582,7 @@ | ||
1755 | cfg_layout_finalize (); | ||
1756 | } | ||
1757 | epilogue_done: | ||
1758 | + | ||
1759 | default_rtl_profile (); | ||
1760 | |||
1761 | if (inserted) | ||
1762 | @@ -5260,33 +5599,93 @@ | ||
1763 | } | ||
1764 | } | ||
1765 | |||
1766 | +#ifdef HAVE_simple_return | ||
1767 | + /* If there were branches to an empty LAST_BB which we tried to | ||
1768 | + convert to conditional simple_returns, but couldn't for some | ||
1769 | + reason, create a block to hold a simple_return insn and redirect | ||
1770 | + those remaining edges. */ | ||
1771 | + if (unconverted_simple_returns) | ||
1772 | + { | ||
1773 | + edge_iterator ei2; | ||
1774 | + basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb; | ||
1775 | + | ||
1776 | + gcc_assert (entry_edge != orig_entry_edge); | ||
1777 | + | ||
1778 | +#ifdef HAVE_epilogue | ||
1779 | + if (simple_return_block == NULL && returnjump != NULL_RTX | ||
1780 | + && JUMP_LABEL (returnjump) == simple_return_rtx) | ||
1781 | + { | ||
1782 | + edge e = split_block (exit_fallthru_edge->src, | ||
1783 | + PREV_INSN (returnjump)); | ||
1784 | + simple_return_block = e->dest; | ||
1785 | + } | ||
1786 | +#endif | ||
1787 | + if (simple_return_block == NULL) | ||
1788 | + { | ||
1789 | + basic_block bb; | ||
1790 | + rtx start; | ||
1791 | + | ||
1792 | + bb = create_basic_block (NULL, NULL, exit_pred); | ||
1793 | + start = emit_jump_insn_after (gen_simple_return (), | ||
1794 | + BB_END (bb)); | ||
1795 | + JUMP_LABEL (start) = simple_return_rtx; | ||
1796 | + emit_barrier_after (start); | ||
1797 | + | ||
1798 | + simple_return_block = bb; | ||
1799 | + make_edge (bb, EXIT_BLOCK_PTR, 0); | ||
1800 | + } | ||
1801 | + | ||
1802 | + restart_scan: | ||
1803 | + for (ei2 = ei_start (last_bb->preds); (e = ei_safe_edge (ei2)); ) | ||
1804 | + { | ||
1805 | + basic_block bb = e->src; | ||
1806 | + | ||
1807 | + if (bb != ENTRY_BLOCK_PTR | ||
1808 | + && !bitmap_bit_p (&bb_flags, bb->index)) | ||
1809 | + { | ||
1810 | + redirect_edge_and_branch_force (e, simple_return_block); | ||
1811 | + goto restart_scan; | ||
1812 | + } | ||
1813 | + ei_next (&ei2); | ||
1814 | + | ||
1815 | + } | ||
1816 | + } | ||
1817 | +#endif | ||
1818 | + | ||
1819 | #ifdef HAVE_sibcall_epilogue | ||
1820 | /* Emit sibling epilogues before any sibling call sites. */ | ||
1821 | for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); ) | ||
1822 | { | ||
1823 | basic_block bb = e->src; | ||
1824 | rtx insn = BB_END (bb); | ||
1825 | + rtx ep_seq; | ||
1826 | |||
1827 | if (!CALL_P (insn) | ||
1828 | - || ! SIBLING_CALL_P (insn)) | ||
1829 | + || ! SIBLING_CALL_P (insn) | ||
1830 | + || (entry_edge != orig_entry_edge | ||
1831 | + && !bitmap_bit_p (&bb_flags, bb->index))) | ||
1832 | { | ||
1833 | ei_next (&ei); | ||
1834 | continue; | ||
1835 | } | ||
1836 | |||
1837 | - start_sequence (); | ||
1838 | - emit_note (NOTE_INSN_EPILOGUE_BEG); | ||
1839 | - emit_insn (gen_sibcall_epilogue ()); | ||
1840 | - seq = get_insns (); | ||
1841 | - end_sequence (); | ||
1842 | - | ||
1843 | - /* Retain a map of the epilogue insns. Used in life analysis to | ||
1844 | - avoid getting rid of sibcall epilogue insns. Do this before we | ||
1845 | - actually emit the sequence. */ | ||
1846 | - record_insns (seq, NULL, &epilogue_insn_hash); | ||
1847 | - set_insn_locators (seq, epilogue_locator); | ||
1848 | - | ||
1849 | - emit_insn_before (seq, insn); | ||
1850 | + ep_seq = gen_sibcall_epilogue (); | ||
1851 | + if (ep_seq) | ||
1852 | + { | ||
1853 | + start_sequence (); | ||
1854 | + emit_note (NOTE_INSN_EPILOGUE_BEG); | ||
1855 | + emit_insn (ep_seq); | ||
1856 | + seq = get_insns (); | ||
1857 | + end_sequence (); | ||
1858 | + | ||
1859 | + /* Retain a map of the epilogue insns. Used in life analysis to | ||
1860 | + avoid getting rid of sibcall epilogue insns. Do this before we | ||
1861 | + actually emit the sequence. */ | ||
1862 | + record_insns (seq, NULL, &epilogue_insn_hash); | ||
1863 | + set_insn_locators (seq, epilogue_locator); | ||
1864 | + | ||
1865 | + emit_insn_before (seq, insn); | ||
1866 | + } | ||
1867 | ei_next (&ei); | ||
1868 | } | ||
1869 | #endif | ||
1870 | @@ -5311,6 +5710,8 @@ | ||
1871 | } | ||
1872 | #endif | ||
1873 | |||
1874 | + bitmap_clear (&bb_flags); | ||
1875 | + | ||
1876 | /* Threading the prologue and epilogue changes the artificial refs | ||
1877 | in the entry and exit blocks. */ | ||
1878 | epilogue_completed = 1; | ||
1879 | |||
1880 | === modified file 'gcc/genemit.c' | ||
1881 | --- old/gcc/genemit.c 2009-11-27 11:37:06 +0000 | ||
1882 | +++ new/gcc/genemit.c 2011-01-05 12:12:18 +0000 | ||
1883 | @@ -222,6 +222,12 @@ | ||
1884 | case PC: | ||
1885 | printf ("pc_rtx"); | ||
1886 | return; | ||
1887 | + case RETURN: | ||
1888 | + printf ("ret_rtx"); | ||
1889 | + return; | ||
1890 | + case SIMPLE_RETURN: | ||
1891 | + printf ("simple_return_rtx"); | ||
1892 | + return; | ||
1893 | case CLOBBER: | ||
1894 | if (REG_P (XEXP (x, 0))) | ||
1895 | { | ||
1896 | @@ -544,8 +550,8 @@ | ||
1897 | || (GET_CODE (next) == PARALLEL | ||
1898 | && ((GET_CODE (XVECEXP (next, 0, 0)) == SET | ||
1899 | && GET_CODE (SET_DEST (XVECEXP (next, 0, 0))) == PC) | ||
1900 | - || GET_CODE (XVECEXP (next, 0, 0)) == RETURN)) | ||
1901 | - || GET_CODE (next) == RETURN) | ||
1902 | + || ANY_RETURN_P (XVECEXP (next, 0, 0)))) | ||
1903 | + || ANY_RETURN_P (next)) | ||
1904 | printf (" emit_jump_insn ("); | ||
1905 | else if ((GET_CODE (next) == SET && GET_CODE (SET_SRC (next)) == CALL) | ||
1906 | || GET_CODE (next) == CALL | ||
1907 | @@ -660,7 +666,7 @@ | ||
1908 | || (GET_CODE (next) == PARALLEL | ||
1909 | && GET_CODE (XVECEXP (next, 0, 0)) == SET | ||
1910 | && GET_CODE (SET_DEST (XVECEXP (next, 0, 0))) == PC) | ||
1911 | - || GET_CODE (next) == RETURN) | ||
1912 | + || ANY_RETURN_P (next)) | ||
1913 | printf (" emit_jump_insn ("); | ||
1914 | else if ((GET_CODE (next) == SET && GET_CODE (SET_SRC (next)) == CALL) | ||
1915 | || GET_CODE (next) == CALL | ||
1916 | |||
1917 | === modified file 'gcc/gengenrtl.c' | ||
1918 | --- old/gcc/gengenrtl.c 2007-08-22 23:30:39 +0000 | ||
1919 | +++ new/gcc/gengenrtl.c 2011-01-05 12:12:18 +0000 | ||
1920 | @@ -146,6 +146,10 @@ | ||
1921 | || strcmp (defs[idx].enumname, "REG") == 0 | ||
1922 | || strcmp (defs[idx].enumname, "SUBREG") == 0 | ||
1923 | || strcmp (defs[idx].enumname, "MEM") == 0 | ||
1924 | + || strcmp (defs[idx].enumname, "PC") == 0 | ||
1925 | + || strcmp (defs[idx].enumname, "CC0") == 0 | ||
1926 | + || strcmp (defs[idx].enumname, "RETURN") == 0 | ||
1927 | + || strcmp (defs[idx].enumname, "SIMPLE_RETURN") == 0 | ||
1928 | || strcmp (defs[idx].enumname, "CONST_VECTOR") == 0); | ||
1929 | } | ||
1930 | |||
1931 | |||
1932 | === modified file 'gcc/haifa-sched.c' | ||
1933 | --- old/gcc/haifa-sched.c 2010-08-12 08:14:47 +0000 | ||
1934 | +++ new/gcc/haifa-sched.c 2011-01-05 12:12:18 +0000 | ||
1935 | @@ -4231,7 +4231,7 @@ | ||
1936 | /* Helper function. | ||
1937 | Find fallthru edge from PRED. */ | ||
1938 | edge | ||
1939 | -find_fallthru_edge (basic_block pred) | ||
1940 | +find_fallthru_edge_from (basic_block pred) | ||
1941 | { | ||
1942 | edge e; | ||
1943 | edge_iterator ei; | ||
1944 | @@ -4298,7 +4298,7 @@ | ||
1945 | edge e; | ||
1946 | |||
1947 | last = EXIT_BLOCK_PTR->prev_bb; | ||
1948 | - e = find_fallthru_edge (last); | ||
1949 | + e = find_fallthru_edge_from (last); | ||
1950 | |||
1951 | if (e) | ||
1952 | { | ||
1953 | @@ -5234,6 +5234,11 @@ | ||
1954 | gcc_assert (/* Usual case. */ | ||
1955 | (EDGE_COUNT (bb->succs) > 1 | ||
1956 | && !BARRIER_P (NEXT_INSN (head))) | ||
1957 | + /* Special cases, see cfglayout.c: | ||
1958 | + fixup_reorder_chain. */ | ||
1959 | + || (EDGE_COUNT (bb->succs) == 1 | ||
1960 | + && (!onlyjump_p (head) | ||
1961 | + || returnjump_p (head))) | ||
1962 | /* Or jump to the next instruction. */ | ||
1963 | || (EDGE_COUNT (bb->succs) == 1 | ||
1964 | && (BB_HEAD (EDGE_I (bb->succs, 0)->dest) | ||
1965 | |||
1966 | === modified file 'gcc/ifcvt.c' | ||
1967 | --- old/gcc/ifcvt.c 2010-11-26 12:03:32 +0000 | ||
1968 | +++ new/gcc/ifcvt.c 2011-01-05 12:12:18 +0000 | ||
1969 | @@ -105,7 +105,7 @@ | ||
1970 | static int find_if_case_2 (basic_block, edge, edge); | ||
1971 | static int find_memory (rtx *, void *); | ||
1972 | static int dead_or_predicable (basic_block, basic_block, basic_block, | ||
1973 | - basic_block, int); | ||
1974 | + edge, int); | ||
1975 | static void noce_emit_move_insn (rtx, rtx); | ||
1976 | static rtx block_has_only_trap (basic_block); | ||
1977 | |||
1978 | @@ -3791,6 +3791,7 @@ | ||
1979 | basic_block then_bb = then_edge->dest; | ||
1980 | basic_block else_bb = else_edge->dest; | ||
1981 | basic_block new_bb; | ||
1982 | + rtx else_target = NULL_RTX; | ||
1983 | int then_bb_index; | ||
1984 | |||
1985 | /* If we are partitioning hot/cold basic blocks, we don't want to | ||
1986 | @@ -3840,9 +3841,16 @@ | ||
1987 | predictable_edge_p (then_edge))))) | ||
1988 | return FALSE; | ||
1989 | |||
1990 | + if (else_bb == EXIT_BLOCK_PTR) | ||
1991 | + { | ||
1992 | + rtx jump = BB_END (else_edge->src); | ||
1993 | + gcc_assert (JUMP_P (jump)); | ||
1994 | + else_target = JUMP_LABEL (jump); | ||
1995 | + } | ||
1996 | + | ||
1997 | /* Registers set are dead, or are predicable. */ | ||
1998 | if (! dead_or_predicable (test_bb, then_bb, else_bb, | ||
1999 | - single_succ (then_bb), 1)) | ||
2000 | + single_succ_edge (then_bb), 1)) | ||
2001 | return FALSE; | ||
2002 | |||
2003 | /* Conversion went ok, including moving the insns and fixing up the | ||
2004 | @@ -3859,6 +3867,9 @@ | ||
2005 | redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb); | ||
2006 | new_bb = 0; | ||
2007 | } | ||
2008 | + else if (else_bb == EXIT_BLOCK_PTR) | ||
2009 | + new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb), | ||
2010 | + else_bb, else_target); | ||
2011 | else | ||
2012 | new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb), | ||
2013 | else_bb); | ||
2014 | @@ -3957,7 +3968,7 @@ | ||
2015 | return FALSE; | ||
2016 | |||
2017 | /* Registers set are dead, or are predicable. */ | ||
2018 | - if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0)) | ||
2019 | + if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ, 0)) | ||
2020 | return FALSE; | ||
2021 | |||
2022 | /* Conversion went ok, including moving the insns and fixing up the | ||
2023 | @@ -3995,12 +4006,34 @@ | ||
2024 | |||
2025 | static int | ||
2026 | dead_or_predicable (basic_block test_bb, basic_block merge_bb, | ||
2027 | - basic_block other_bb, basic_block new_dest, int reversep) | ||
2028 | + basic_block other_bb, edge dest_edge, int reversep) | ||
2029 | { | ||
2030 | - rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX; | ||
2031 | + basic_block new_dest = dest_edge->dest; | ||
2032 | + rtx head, end, jump, earliest = NULL_RTX, old_dest; | ||
2033 | bitmap merge_set = NULL; | ||
2034 | /* Number of pending changes. */ | ||
2035 | int n_validated_changes = 0; | ||
2036 | + rtx new_dest_label; | ||
2037 | + | ||
2038 | + jump = BB_END (dest_edge->src); | ||
2039 | + if (JUMP_P (jump)) | ||
2040 | + { | ||
2041 | + new_dest_label = JUMP_LABEL (jump); | ||
2042 | + if (new_dest_label == NULL_RTX) | ||
2043 | + { | ||
2044 | + new_dest_label = PATTERN (jump); | ||
2045 | + gcc_assert (ANY_RETURN_P (new_dest_label)); | ||
2046 | + } | ||
2047 | + } | ||
2048 | + else if (other_bb != new_dest) | ||
2049 | + { | ||
2050 | + if (new_dest == EXIT_BLOCK_PTR) | ||
2051 | + new_dest_label = ret_rtx; | ||
2052 | + else | ||
2053 | + new_dest_label = block_label (new_dest); | ||
2054 | + } | ||
2055 | + else | ||
2056 | + new_dest_label = NULL_RTX; | ||
2057 | |||
2058 | jump = BB_END (test_bb); | ||
2059 | |||
2060 | @@ -4220,10 +4253,9 @@ | ||
2061 | old_dest = JUMP_LABEL (jump); | ||
2062 | if (other_bb != new_dest) | ||
2063 | { | ||
2064 | - new_label = block_label (new_dest); | ||
2065 | if (reversep | ||
2066 | - ? ! invert_jump_1 (jump, new_label) | ||
2067 | - : ! redirect_jump_1 (jump, new_label)) | ||
2068 | + ? ! invert_jump_1 (jump, new_dest_label) | ||
2069 | + : ! redirect_jump_1 (jump, new_dest_label)) | ||
2070 | goto cancel; | ||
2071 | } | ||
2072 | |||
2073 | @@ -4234,7 +4266,7 @@ | ||
2074 | |||
2075 | if (other_bb != new_dest) | ||
2076 | { | ||
2077 | - redirect_jump_2 (jump, old_dest, new_label, 0, reversep); | ||
2078 | + redirect_jump_2 (jump, old_dest, new_dest_label, 0, reversep); | ||
2079 | |||
2080 | redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest); | ||
2081 | if (reversep) | ||
2082 | |||
2083 | === modified file 'gcc/jump.c' | ||
2084 | --- old/gcc/jump.c 2010-12-13 10:05:52 +0000 | ||
2085 | +++ new/gcc/jump.c 2011-01-05 12:12:18 +0000 | ||
2086 | @@ -29,7 +29,8 @@ | ||
2087 | JUMP_LABEL internal field. With this we can detect labels that | ||
2088 | become unused because of the deletion of all the jumps that | ||
2089 | formerly used them. The JUMP_LABEL info is sometimes looked | ||
2090 | - at by later passes. | ||
2091 | + at by later passes. For return insns, it contains either a | ||
2092 | + RETURN or a SIMPLE_RETURN rtx. | ||
2093 | |||
2094 | The subroutines redirect_jump and invert_jump are used | ||
2095 | from other passes as well. */ | ||
2096 | @@ -742,10 +743,10 @@ | ||
2097 | return (GET_CODE (x) == IF_THEN_ELSE | ||
2098 | && ((GET_CODE (XEXP (x, 2)) == PC | ||
2099 | && (GET_CODE (XEXP (x, 1)) == LABEL_REF | ||
2100 | - || GET_CODE (XEXP (x, 1)) == RETURN)) | ||
2101 | + || ANY_RETURN_P (XEXP (x, 1)))) | ||
2102 | || (GET_CODE (XEXP (x, 1)) == PC | ||
2103 | && (GET_CODE (XEXP (x, 2)) == LABEL_REF | ||
2104 | - || GET_CODE (XEXP (x, 2)) == RETURN)))); | ||
2105 | + || ANY_RETURN_P (XEXP (x, 2)))))); | ||
2106 | } | ||
2107 | |||
2108 | /* Return nonzero if INSN is a (possibly) conditional jump inside a | ||
2109 | @@ -774,11 +775,11 @@ | ||
2110 | return 0; | ||
2111 | if (XEXP (SET_SRC (x), 2) == pc_rtx | ||
2112 | && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF | ||
2113 | - || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN)) | ||
2114 | + || ANY_RETURN_P (XEXP (SET_SRC (x), 1)) == RETURN)) | ||
2115 | return 1; | ||
2116 | if (XEXP (SET_SRC (x), 1) == pc_rtx | ||
2117 | && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF | ||
2118 | - || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN)) | ||
2119 | + || ANY_RETURN_P (XEXP (SET_SRC (x), 2)))) | ||
2120 | return 1; | ||
2121 | return 0; | ||
2122 | } | ||
2123 | @@ -840,8 +841,9 @@ | ||
2124 | a = GET_CODE (XEXP (SET_SRC (x), 1)); | ||
2125 | b = GET_CODE (XEXP (SET_SRC (x), 2)); | ||
2126 | |||
2127 | - return ((b == PC && (a == LABEL_REF || a == RETURN)) | ||
2128 | - || (a == PC && (b == LABEL_REF || b == RETURN))); | ||
2129 | + return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN)) | ||
2130 | + || (a == PC | ||
2131 | + && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN))); | ||
2132 | } | ||
2133 | |||
2134 | /* Return the label of a conditional jump. */ | ||
2135 | @@ -878,6 +880,7 @@ | ||
2136 | switch (GET_CODE (x)) | ||
2137 | { | ||
2138 | case RETURN: | ||
2139 | + case SIMPLE_RETURN: | ||
2140 | case EH_RETURN: | ||
2141 | return true; | ||
2142 | |||
2143 | @@ -1200,7 +1203,7 @@ | ||
2144 | /* If deleting a jump, decrement the count of the label, | ||
2145 | and delete the label if it is now unused. */ | ||
2146 | |||
2147 | - if (JUMP_P (insn) && JUMP_LABEL (insn)) | ||
2148 | + if (JUMP_P (insn) && JUMP_LABEL (insn) && !ANY_RETURN_P (JUMP_LABEL (insn))) | ||
2149 | { | ||
2150 | rtx lab = JUMP_LABEL (insn), lab_next; | ||
2151 | |||
2152 | @@ -1331,6 +1334,18 @@ | ||
2153 | is also an unconditional jump in that case. */ | ||
2154 | } | ||
2155 | |||
2156 | +/* A helper function for redirect_exp_1; examines its input X and returns | ||
2157 | + either a LABEL_REF around a label, or a RETURN if X was NULL. */ | ||
2158 | +static rtx | ||
2159 | +redirect_target (rtx x) | ||
2160 | +{ | ||
2161 | + if (x == NULL_RTX) | ||
2162 | + return ret_rtx; | ||
2163 | + if (!ANY_RETURN_P (x)) | ||
2164 | + return gen_rtx_LABEL_REF (Pmode, x); | ||
2165 | + return x; | ||
2166 | +} | ||
2167 | + | ||
2168 | /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or | ||
2169 | NLABEL as a return. Accrue modifications into the change group. */ | ||
2170 | |||
2171 | @@ -1342,37 +1357,19 @@ | ||
2172 | int i; | ||
2173 | const char *fmt; | ||
2174 | |||
2175 | - if (code == LABEL_REF) | ||
2176 | - { | ||
2177 | - if (XEXP (x, 0) == olabel) | ||
2178 | - { | ||
2179 | - rtx n; | ||
2180 | - if (nlabel) | ||
2181 | - n = gen_rtx_LABEL_REF (Pmode, nlabel); | ||
2182 | - else | ||
2183 | - n = gen_rtx_RETURN (VOIDmode); | ||
2184 | - | ||
2185 | - validate_change (insn, loc, n, 1); | ||
2186 | - return; | ||
2187 | - } | ||
2188 | - } | ||
2189 | - else if (code == RETURN && olabel == 0) | ||
2190 | - { | ||
2191 | - if (nlabel) | ||
2192 | - x = gen_rtx_LABEL_REF (Pmode, nlabel); | ||
2193 | - else | ||
2194 | - x = gen_rtx_RETURN (VOIDmode); | ||
2195 | - if (loc == &PATTERN (insn)) | ||
2196 | - x = gen_rtx_SET (VOIDmode, pc_rtx, x); | ||
2197 | - validate_change (insn, loc, x, 1); | ||
2198 | + if ((code == LABEL_REF && XEXP (x, 0) == olabel) | ||
2199 | + || x == olabel) | ||
2200 | + { | ||
2201 | + validate_change (insn, loc, redirect_target (nlabel), 1); | ||
2202 | return; | ||
2203 | } | ||
2204 | |||
2205 | - if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx | ||
2206 | + if (code == SET && SET_DEST (x) == pc_rtx | ||
2207 | + && ANY_RETURN_P (nlabel) | ||
2208 | && GET_CODE (SET_SRC (x)) == LABEL_REF | ||
2209 | && XEXP (SET_SRC (x), 0) == olabel) | ||
2210 | { | ||
2211 | - validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1); | ||
2212 | + validate_change (insn, loc, nlabel, 1); | ||
2213 | return; | ||
2214 | } | ||
2215 | |||
2216 | @@ -1409,6 +1406,7 @@ | ||
2217 | int ochanges = num_validated_changes (); | ||
2218 | rtx *loc, asmop; | ||
2219 | |||
2220 | + gcc_assert (nlabel); | ||
2221 | asmop = extract_asm_operands (PATTERN (jump)); | ||
2222 | if (asmop) | ||
2223 | { | ||
2224 | @@ -1430,17 +1428,20 @@ | ||
2225 | jump target label is unused as a result, it and the code following | ||
2226 | it may be deleted. | ||
2227 | |||
2228 | - If NLABEL is zero, we are to turn the jump into a (possibly conditional) | ||
2229 | - RETURN insn. | ||
2230 | + Normally, NLABEL will be a label, but it may also be a RETURN or | ||
2231 | + SIMPLE_RETURN rtx; in that case we are to turn the jump into a | ||
2232 | + (possibly conditional) return insn. | ||
2233 | |||
2234 | The return value will be 1 if the change was made, 0 if it wasn't | ||
2235 | - (this can only occur for NLABEL == 0). */ | ||
2236 | + (this can only occur when trying to produce return insns). */ | ||
2237 | |||
2238 | int | ||
2239 | redirect_jump (rtx jump, rtx nlabel, int delete_unused) | ||
2240 | { | ||
2241 | rtx olabel = JUMP_LABEL (jump); | ||
2242 | |||
2243 | + gcc_assert (nlabel != NULL_RTX); | ||
2244 | + | ||
2245 | if (nlabel == olabel) | ||
2246 | return 1; | ||
2247 | |||
2248 | @@ -1452,7 +1453,7 @@ | ||
2249 | } | ||
2250 | |||
2251 | /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with | ||
2252 | - NLABEL in JUMP. | ||
2253 | + NEW_DEST in JUMP. | ||
2254 | If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref | ||
2255 | count has dropped to zero. */ | ||
2256 | void | ||
2257 | @@ -1468,13 +1469,14 @@ | ||
2258 | about this. */ | ||
2259 | gcc_assert (delete_unused >= 0); | ||
2260 | JUMP_LABEL (jump) = nlabel; | ||
2261 | - if (nlabel) | ||
2262 | + if (nlabel && !ANY_RETURN_P (nlabel)) | ||
2263 | ++LABEL_NUSES (nlabel); | ||
2264 | |||
2265 | /* Update labels in any REG_EQUAL note. */ | ||
2266 | if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX) | ||
2267 | { | ||
2268 | - if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump))) | ||
2269 | + if (ANY_RETURN_P (nlabel) | ||
2270 | + || (invert && !invert_exp_1 (XEXP (note, 0), jump))) | ||
2271 | remove_note (jump, note); | ||
2272 | else | ||
2273 | { | ||
2274 | @@ -1483,7 +1485,8 @@ | ||
2275 | } | ||
2276 | } | ||
2277 | |||
2278 | - if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0 | ||
2279 | + if (olabel && !ANY_RETURN_P (olabel) | ||
2280 | + && --LABEL_NUSES (olabel) == 0 && delete_unused > 0 | ||
2281 | /* Undefined labels will remain outside the insn stream. */ | ||
2282 | && INSN_UID (olabel)) | ||
2283 | delete_related_insns (olabel); | ||
2284 | |||
2285 | === modified file 'gcc/opts.c' | ||
2286 | --- old/gcc/opts.c 2010-12-10 15:33:37 +0000 | ||
2287 | +++ new/gcc/opts.c 2011-01-05 12:12:18 +0000 | ||
2288 | @@ -908,6 +908,7 @@ | ||
2289 | flag_ipa_cp = opt2; | ||
2290 | flag_ipa_sra = opt2; | ||
2291 | flag_ee = opt2; | ||
2292 | + flag_shrink_wrap = opt2; | ||
2293 | |||
2294 | /* Track fields in field-sensitive alias analysis. */ | ||
2295 | set_param_value ("max-fields-for-field-sensitive", | ||
2296 | |||
2297 | === modified file 'gcc/print-rtl.c' | ||
2298 | --- old/gcc/print-rtl.c 2010-03-26 16:18:51 +0000 | ||
2299 | +++ new/gcc/print-rtl.c 2011-01-05 12:12:18 +0000 | ||
2300 | @@ -308,9 +308,16 @@ | ||
2301 | } | ||
2302 | } | ||
2303 | else if (i == 8 && JUMP_P (in_rtx) && JUMP_LABEL (in_rtx) != NULL) | ||
2304 | - /* Output the JUMP_LABEL reference. */ | ||
2305 | - fprintf (outfile, "\n%s%*s -> %d", print_rtx_head, indent * 2, "", | ||
2306 | - INSN_UID (JUMP_LABEL (in_rtx))); | ||
2307 | + { | ||
2308 | + /* Output the JUMP_LABEL reference. */ | ||
2309 | + fprintf (outfile, "\n%s%*s -> ", print_rtx_head, indent * 2, ""); | ||
2310 | + if (GET_CODE (JUMP_LABEL (in_rtx)) == RETURN) | ||
2311 | + fprintf (outfile, "return"); | ||
2312 | + else if (GET_CODE (JUMP_LABEL (in_rtx)) == SIMPLE_RETURN) | ||
2313 | + fprintf (outfile, "simple_return"); | ||
2314 | + else | ||
2315 | + fprintf (outfile, "%d", INSN_UID (JUMP_LABEL (in_rtx))); | ||
2316 | + } | ||
2317 | else if (i == 0 && GET_CODE (in_rtx) == VALUE) | ||
2318 | { | ||
2319 | #ifndef GENERATOR_FILE | ||
2320 | |||
2321 | === modified file 'gcc/reorg.c' | ||
2322 | --- old/gcc/reorg.c 2010-09-15 22:51:44 +0000 | ||
2323 | +++ new/gcc/reorg.c 2011-01-05 12:12:18 +0000 | ||
2324 | @@ -161,8 +161,11 @@ | ||
2325 | #define unfilled_slots_next \ | ||
2326 | ((rtx *) obstack_next_free (&unfilled_slots_obstack)) | ||
2327 | |||
2328 | -/* Points to the label before the end of the function. */ | ||
2329 | -static rtx end_of_function_label; | ||
2330 | +/* Points to the label before the end of the function, or before a | ||
2331 | + return insn. */ | ||
2332 | +static rtx function_return_label; | ||
2333 | +/* Likewise for a simple_return. */ | ||
2334 | +static rtx function_simple_return_label; | ||
2335 | |||
2336 | /* Mapping between INSN_UID's and position in the code since INSN_UID's do | ||
2337 | not always monotonically increase. */ | ||
2338 | @@ -175,7 +178,7 @@ | ||
2339 | static int resource_conflicts_p (struct resources *, struct resources *); | ||
2340 | static int insn_references_resource_p (rtx, struct resources *, bool); | ||
2341 | static int insn_sets_resource_p (rtx, struct resources *, bool); | ||
2342 | -static rtx find_end_label (void); | ||
2343 | +static rtx find_end_label (rtx); | ||
2344 | static rtx emit_delay_sequence (rtx, rtx, int); | ||
2345 | static rtx add_to_delay_list (rtx, rtx); | ||
2346 | static rtx delete_from_delay_slot (rtx); | ||
2347 | @@ -220,6 +223,15 @@ | ||
2348 | static void make_return_insns (rtx); | ||
2349 | #endif | ||
2350 | |||
2351 | +/* Return true iff INSN is a simplejump, or any kind of return insn. */ | ||
2352 | + | ||
2353 | +static bool | ||
2354 | +simplejump_or_return_p (rtx insn) | ||
2355 | +{ | ||
2356 | + return (JUMP_P (insn) | ||
2357 | + && (simplejump_p (insn) || ANY_RETURN_P (PATTERN (insn)))); | ||
2358 | +} | ||
2359 | + | ||
2360 | /* Return TRUE if this insn should stop the search for insn to fill delay | ||
2361 | slots. LABELS_P indicates that labels should terminate the search. | ||
2362 | In all cases, jumps terminate the search. */ | ||
2363 | @@ -335,23 +347,29 @@ | ||
2364 | |||
2365 | ??? There may be a problem with the current implementation. Suppose | ||
2366 | we start with a bare RETURN insn and call find_end_label. It may set | ||
2367 | - end_of_function_label just before the RETURN. Suppose the machinery | ||
2368 | + function_return_label just before the RETURN. Suppose the machinery | ||
2369 | is able to fill the delay slot of the RETURN insn afterwards. Then | ||
2370 | - end_of_function_label is no longer valid according to the property | ||
2371 | + function_return_label is no longer valid according to the property | ||
2372 | described above and find_end_label will still return it unmodified. | ||
2373 | Note that this is probably mitigated by the following observation: | ||
2374 | - once end_of_function_label is made, it is very likely the target of | ||
2375 | + once function_return_label is made, it is very likely the target of | ||
2376 | a jump, so filling the delay slot of the RETURN will be much more | ||
2377 | difficult. */ | ||
2378 | |||
2379 | static rtx | ||
2380 | -find_end_label (void) | ||
2381 | +find_end_label (rtx kind) | ||
2382 | { | ||
2383 | rtx insn; | ||
2384 | + rtx *plabel; | ||
2385 | + | ||
2386 | + if (kind == ret_rtx) | ||
2387 | + plabel = &function_return_label; | ||
2388 | + else | ||
2389 | + plabel = &function_simple_return_label; | ||
2390 | |||
2391 | /* If we found one previously, return it. */ | ||
2392 | - if (end_of_function_label) | ||
2393 | - return end_of_function_label; | ||
2394 | + if (*plabel) | ||
2395 | + return *plabel; | ||
2396 | |||
2397 | /* Otherwise, see if there is a label at the end of the function. If there | ||
2398 | is, it must be that RETURN insns aren't needed, so that is our return | ||
2399 | @@ -366,44 +384,44 @@ | ||
2400 | |||
2401 | /* When a target threads its epilogue we might already have a | ||
2402 | suitable return insn. If so put a label before it for the | ||
2403 | - end_of_function_label. */ | ||
2404 | + function_return_label. */ | ||
2405 | if (BARRIER_P (insn) | ||
2406 | && JUMP_P (PREV_INSN (insn)) | ||
2407 | - && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN) | ||
2408 | + && PATTERN (PREV_INSN (insn)) == kind) | ||
2409 | { | ||
2410 | rtx temp = PREV_INSN (PREV_INSN (insn)); | ||
2411 | - end_of_function_label = gen_label_rtx (); | ||
2412 | - LABEL_NUSES (end_of_function_label) = 0; | ||
2413 | + rtx label = gen_label_rtx (); | ||
2414 | + LABEL_NUSES (label) = 0; | ||
2415 | |||
2416 | /* Put the label before an USE insns that may precede the RETURN insn. */ | ||
2417 | while (GET_CODE (temp) == USE) | ||
2418 | temp = PREV_INSN (temp); | ||
2419 | |||
2420 | - emit_label_after (end_of_function_label, temp); | ||
2421 | + emit_label_after (label, temp); | ||
2422 | + *plabel = label; | ||
2423 | } | ||
2424 | |||
2425 | else if (LABEL_P (insn)) | ||
2426 | - end_of_function_label = insn; | ||
2427 | + *plabel = insn; | ||
2428 | else | ||
2429 | { | ||
2430 | - end_of_function_label = gen_label_rtx (); | ||
2431 | - LABEL_NUSES (end_of_function_label) = 0; | ||
2432 | + rtx label = gen_label_rtx (); | ||
2433 | + LABEL_NUSES (label) = 0; | ||
2434 | /* If the basic block reorder pass moves the return insn to | ||
2435 | some other place try to locate it again and put our | ||
2436 | - end_of_function_label there. */ | ||
2437 | - while (insn && ! (JUMP_P (insn) | ||
2438 | - && (GET_CODE (PATTERN (insn)) == RETURN))) | ||
2439 | + function_return_label there. */ | ||
2440 | + while (insn && ! (JUMP_P (insn) && (PATTERN (insn) == kind))) | ||
2441 | insn = PREV_INSN (insn); | ||
2442 | if (insn) | ||
2443 | { | ||
2444 | insn = PREV_INSN (insn); | ||
2445 | |||
2446 | - /* Put the label before an USE insns that may proceed the | ||
2447 | + /* Put the label before an USE insns that may precede the | ||
2448 | RETURN insn. */ | ||
2449 | while (GET_CODE (insn) == USE) | ||
2450 | insn = PREV_INSN (insn); | ||
2451 | |||
2452 | - emit_label_after (end_of_function_label, insn); | ||
2453 | + emit_label_after (label, insn); | ||
2454 | } | ||
2455 | else | ||
2456 | { | ||
2457 | @@ -413,19 +431,16 @@ | ||
2458 | && ! HAVE_return | ||
2459 | #endif | ||
2460 | ) | ||
2461 | - { | ||
2462 | - /* The RETURN insn has its delay slot filled so we cannot | ||
2463 | - emit the label just before it. Since we already have | ||
2464 | - an epilogue and cannot emit a new RETURN, we cannot | ||
2465 | - emit the label at all. */ | ||
2466 | - end_of_function_label = NULL_RTX; | ||
2467 | - return end_of_function_label; | ||
2468 | - } | ||
2469 | + /* The RETURN insn has its delay slot filled so we cannot | ||
2470 | + emit the label just before it. Since we already have | ||
2471 | + an epilogue and cannot emit a new RETURN, we cannot | ||
2472 | + emit the label at all. */ | ||
2473 | + return NULL_RTX; | ||
2474 | #endif /* HAVE_epilogue */ | ||
2475 | |||
2476 | /* Otherwise, make a new label and emit a RETURN and BARRIER, | ||
2477 | if needed. */ | ||
2478 | - emit_label (end_of_function_label); | ||
2479 | + emit_label (label); | ||
2480 | #ifdef HAVE_return | ||
2481 | /* We don't bother trying to create a return insn if the | ||
2482 | epilogue has filled delay-slots; we would have to try and | ||
2483 | @@ -437,19 +452,21 @@ | ||
2484 | /* The return we make may have delay slots too. */ | ||
2485 | rtx insn = gen_return (); | ||
2486 | insn = emit_jump_insn (insn); | ||
2487 | + JUMP_LABEL (insn) = ret_rtx; | ||
2488 | emit_barrier (); | ||
2489 | if (num_delay_slots (insn) > 0) | ||
2490 | obstack_ptr_grow (&unfilled_slots_obstack, insn); | ||
2491 | } | ||
2492 | #endif | ||
2493 | } | ||
2494 | + *plabel = label; | ||
2495 | } | ||
2496 | |||
2497 | /* Show one additional use for this label so it won't go away until | ||
2498 | we are done. */ | ||
2499 | - ++LABEL_NUSES (end_of_function_label); | ||
2500 | + ++LABEL_NUSES (*plabel); | ||
2501 | |||
2502 | - return end_of_function_label; | ||
2503 | + return *plabel; | ||
2504 | } | ||
2505 | |||
2506 | /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace | ||
2507 | @@ -797,10 +814,8 @@ | ||
2508 | if ((next_trial == next_active_insn (JUMP_LABEL (insn)) | ||
2509 | && ! (next_trial == 0 && crtl->epilogue_delay_list != 0)) | ||
2510 | || (next_trial != 0 | ||
2511 | - && JUMP_P (next_trial) | ||
2512 | - && JUMP_LABEL (insn) == JUMP_LABEL (next_trial) | ||
2513 | - && (simplejump_p (next_trial) | ||
2514 | - || GET_CODE (PATTERN (next_trial)) == RETURN))) | ||
2515 | + && simplejump_or_return_p (next_trial) | ||
2516 | + && JUMP_LABEL (insn) == JUMP_LABEL (next_trial))) | ||
2517 | { | ||
2518 | if (eligible_for_annul_false (insn, 0, trial, flags)) | ||
2519 | { | ||
2520 | @@ -819,13 +834,11 @@ | ||
2521 | branch, thread our jump to the target of that branch. Don't | ||
2522 | change this into a RETURN here, because it may not accept what | ||
2523 | we have in the delay slot. We'll fix this up later. */ | ||
2524 | - if (next_trial && JUMP_P (next_trial) | ||
2525 | - && (simplejump_p (next_trial) | ||
2526 | - || GET_CODE (PATTERN (next_trial)) == RETURN)) | ||
2527 | + if (next_trial && simplejump_or_return_p (next_trial)) | ||
2528 | { | ||
2529 | rtx target_label = JUMP_LABEL (next_trial); | ||
2530 | - if (target_label == 0) | ||
2531 | - target_label = find_end_label (); | ||
2532 | + if (ANY_RETURN_P (target_label)) | ||
2533 | + target_label = find_end_label (target_label); | ||
2534 | |||
2535 | if (target_label) | ||
2536 | { | ||
2537 | @@ -866,7 +879,7 @@ | ||
2538 | if (JUMP_P (insn) | ||
2539 | && (condjump_p (insn) || condjump_in_parallel_p (insn)) | ||
2540 | && INSN_UID (insn) <= max_uid | ||
2541 | - && label != 0 | ||
2542 | + && label != 0 && !ANY_RETURN_P (label) | ||
2543 | && INSN_UID (label) <= max_uid) | ||
2544 | flags | ||
2545 | = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)]) | ||
2546 | @@ -1038,7 +1051,7 @@ | ||
2547 | pat = XVECEXP (pat, 0, 0); | ||
2548 | |||
2549 | if (GET_CODE (pat) == RETURN) | ||
2550 | - return target == 0 ? const_true_rtx : 0; | ||
2551 | + return ANY_RETURN_P (target) ? const_true_rtx : 0; | ||
2552 | |||
2553 | else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx) | ||
2554 | return 0; | ||
2555 | @@ -1318,7 +1331,11 @@ | ||
2556 | } | ||
2557 | |||
2558 | /* Show the place to which we will be branching. */ | ||
2559 | - *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0))); | ||
2560 | + temp = JUMP_LABEL (XVECEXP (seq, 0, 0)); | ||
2561 | + if (ANY_RETURN_P (temp)) | ||
2562 | + *pnew_thread = temp; | ||
2563 | + else | ||
2564 | + *pnew_thread = next_active_insn (temp); | ||
2565 | |||
2566 | /* Add any new insns to the delay list and update the count of the | ||
2567 | number of slots filled. */ | ||
2568 | @@ -1358,8 +1375,7 @@ | ||
2569 | /* We can't do anything if SEQ's delay insn isn't an | ||
2570 | unconditional branch. */ | ||
2571 | |||
2572 | - if (! simplejump_p (XVECEXP (seq, 0, 0)) | ||
2573 | - && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN) | ||
2574 | + if (! simplejump_or_return_p (XVECEXP (seq, 0, 0))) | ||
2575 | return delay_list; | ||
2576 | |||
2577 | for (i = 1; i < XVECLEN (seq, 0); i++) | ||
2578 | @@ -1827,7 +1843,7 @@ | ||
2579 | rtx insn; | ||
2580 | |||
2581 | /* We don't own the function end. */ | ||
2582 | - if (thread == 0) | ||
2583 | + if (ANY_RETURN_P (thread)) | ||
2584 | return 0; | ||
2585 | |||
2586 | /* Get the first active insn, or THREAD, if it is an active insn. */ | ||
2587 | @@ -2245,7 +2261,8 @@ | ||
2588 | && (!JUMP_P (insn) | ||
2589 | || ((condjump_p (insn) || condjump_in_parallel_p (insn)) | ||
2590 | && ! simplejump_p (insn) | ||
2591 | - && JUMP_LABEL (insn) != 0))) | ||
2592 | + && JUMP_LABEL (insn) != 0 | ||
2593 | + && !ANY_RETURN_P (JUMP_LABEL (insn))))) | ||
2594 | { | ||
2595 | /* Invariant: If insn is a JUMP_INSN, the insn's jump | ||
2596 | label. Otherwise, zero. */ | ||
2597 | @@ -2270,7 +2287,7 @@ | ||
2598 | target = JUMP_LABEL (insn); | ||
2599 | } | ||
2600 | |||
2601 | - if (target == 0) | ||
2602 | + if (target == 0 || ANY_RETURN_P (target)) | ||
2603 | for (trial = next_nonnote_insn (insn); trial; trial = next_trial) | ||
2604 | { | ||
2605 | next_trial = next_nonnote_insn (trial); | ||
2606 | @@ -2349,6 +2366,7 @@ | ||
2607 | && JUMP_P (trial) | ||
2608 | && simplejump_p (trial) | ||
2609 | && (target == 0 || JUMP_LABEL (trial) == target) | ||
2610 | + && !ANY_RETURN_P (JUMP_LABEL (trial)) | ||
2611 | && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0 | ||
2612 | && ! (NONJUMP_INSN_P (next_trial) | ||
2613 | && GET_CODE (PATTERN (next_trial)) == SEQUENCE) | ||
2614 | @@ -2371,7 +2389,7 @@ | ||
2615 | if (new_label != 0) | ||
2616 | new_label = get_label_before (new_label); | ||
2617 | else | ||
2618 | - new_label = find_end_label (); | ||
2619 | + new_label = find_end_label (simple_return_rtx); | ||
2620 | |||
2621 | if (new_label) | ||
2622 | { | ||
2623 | @@ -2503,7 +2521,8 @@ | ||
2624 | |||
2625 | /* Follow any unconditional jump at LABEL; | ||
2626 | return the ultimate label reached by any such chain of jumps. | ||
2627 | - Return null if the chain ultimately leads to a return instruction. | ||
2628 | + Return a suitable return rtx if the chain ultimately leads to a | ||
2629 | + return instruction. | ||
2630 | If LABEL is not followed by a jump, return LABEL. | ||
2631 | If the chain loops or we can't find end, return LABEL, | ||
2632 | since that tells caller to avoid changing the insn. */ | ||
2633 | @@ -2518,6 +2537,7 @@ | ||
2634 | |||
2635 | for (depth = 0; | ||
2636 | (depth < 10 | ||
2637 | + && !ANY_RETURN_P (value) | ||
2638 | && (insn = next_active_insn (value)) != 0 | ||
2639 | && JUMP_P (insn) | ||
2640 | && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn) | ||
2641 | @@ -2527,18 +2547,22 @@ | ||
2642 | && BARRIER_P (next)); | ||
2643 | depth++) | ||
2644 | { | ||
2645 | - rtx tem; | ||
2646 | + rtx this_label = JUMP_LABEL (insn); | ||
2647 | |||
2648 | /* If we have found a cycle, make the insn jump to itself. */ | ||
2649 | - if (JUMP_LABEL (insn) == label) | ||
2650 | + if (this_label == label) | ||
2651 | return label; | ||
2652 | |||
2653 | - tem = next_active_insn (JUMP_LABEL (insn)); | ||
2654 | - if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC | ||
2655 | + if (!ANY_RETURN_P (this_label)) | ||
2656 | + { | ||
2657 | + rtx tem = next_active_insn (this_label); | ||
2658 | + if (tem | ||
2659 | + && (GET_CODE (PATTERN (tem)) == ADDR_VEC | ||
2660 | || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC)) | ||
2661 | - break; | ||
2662 | + break; | ||
2663 | + } | ||
2664 | |||
2665 | - value = JUMP_LABEL (insn); | ||
2666 | + value = this_label; | ||
2667 | } | ||
2668 | if (depth == 10) | ||
2669 | return label; | ||
2670 | @@ -2901,6 +2925,7 @@ | ||
2671 | arithmetic insn after the jump insn and put the arithmetic insn in the | ||
2672 | delay slot. If we can't do this, return. */ | ||
2673 | if (delay_list == 0 && likely && new_thread | ||
2674 | + && !ANY_RETURN_P (new_thread) | ||
2675 | && NONJUMP_INSN_P (new_thread) | ||
2676 | && GET_CODE (PATTERN (new_thread)) != ASM_INPUT | ||
2677 | && asm_noperands (PATTERN (new_thread)) < 0) | ||
2678 | @@ -2985,16 +3010,14 @@ | ||
2679 | |||
2680 | gcc_assert (thread_if_true); | ||
2681 | |||
2682 | - if (new_thread && JUMP_P (new_thread) | ||
2683 | - && (simplejump_p (new_thread) | ||
2684 | - || GET_CODE (PATTERN (new_thread)) == RETURN) | ||
2685 | + if (new_thread && simplejump_or_return_p (new_thread) | ||
2686 | && redirect_with_delay_list_safe_p (insn, | ||
2687 | JUMP_LABEL (new_thread), | ||
2688 | delay_list)) | ||
2689 | new_thread = follow_jumps (JUMP_LABEL (new_thread)); | ||
2690 | |||
2691 | - if (new_thread == 0) | ||
2692 | - label = find_end_label (); | ||
2693 | + if (ANY_RETURN_P (new_thread)) | ||
2694 | + label = find_end_label (new_thread); | ||
2695 | else if (LABEL_P (new_thread)) | ||
2696 | label = new_thread; | ||
2697 | else | ||
2698 | @@ -3340,11 +3363,12 @@ | ||
2699 | group of consecutive labels. */ | ||
2700 | if (JUMP_P (insn) | ||
2701 | && (condjump_p (insn) || condjump_in_parallel_p (insn)) | ||
2702 | - && (target_label = JUMP_LABEL (insn)) != 0) | ||
2703 | + && (target_label = JUMP_LABEL (insn)) != 0 | ||
2704 | + && !ANY_RETURN_P (target_label)) | ||
2705 | { | ||
2706 | target_label = skip_consecutive_labels (follow_jumps (target_label)); | ||
2707 | - if (target_label == 0) | ||
2708 | - target_label = find_end_label (); | ||
2709 | + if (ANY_RETURN_P (target_label)) | ||
2710 | + target_label = find_end_label (target_label); | ||
2711 | |||
2712 | if (target_label && next_active_insn (target_label) == next | ||
2713 | && ! condjump_in_parallel_p (insn)) | ||
2714 | @@ -3359,9 +3383,8 @@ | ||
2715 | /* See if this jump conditionally branches around an unconditional | ||
2716 | jump. If so, invert this jump and point it to the target of the | ||
2717 | second jump. */ | ||
2718 | - if (next && JUMP_P (next) | ||
2719 | + if (next && simplejump_or_return_p (next) | ||
2720 | && any_condjump_p (insn) | ||
2721 | - && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN) | ||
2722 | && target_label | ||
2723 | && next_active_insn (target_label) == next_active_insn (next) | ||
2724 | && no_labels_between_p (insn, next)) | ||
2725 | @@ -3403,8 +3426,7 @@ | ||
2726 | Don't do this if we expect the conditional branch to be true, because | ||
2727 | we would then be making the more common case longer. */ | ||
2728 | |||
2729 | - if (JUMP_P (insn) | ||
2730 | - && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN) | ||
2731 | + if (simplejump_or_return_p (insn) | ||
2732 | && (other = prev_active_insn (insn)) != 0 | ||
2733 | && any_condjump_p (other) | ||
2734 | && no_labels_between_p (other, insn) | ||
2735 | @@ -3445,10 +3467,10 @@ | ||
2736 | Only do so if optimizing for size since this results in slower, but | ||
2737 | smaller code. */ | ||
2738 | if (optimize_function_for_size_p (cfun) | ||
2739 | - && GET_CODE (PATTERN (delay_insn)) == RETURN | ||
2740 | + && ANY_RETURN_P (PATTERN (delay_insn)) | ||
2741 | && next | ||
2742 | && JUMP_P (next) | ||
2743 | - && GET_CODE (PATTERN (next)) == RETURN) | ||
2744 | + && PATTERN (next) == PATTERN (delay_insn)) | ||
2745 | { | ||
2746 | rtx after; | ||
2747 | int i; | ||
2748 | @@ -3487,14 +3509,16 @@ | ||
2749 | continue; | ||
2750 | |||
2751 | target_label = JUMP_LABEL (delay_insn); | ||
2752 | + if (target_label && ANY_RETURN_P (target_label)) | ||
2753 | + continue; | ||
2754 | |||
2755 | if (target_label) | ||
2756 | { | ||
2757 | /* If this jump goes to another unconditional jump, thread it, but | ||
2758 | don't convert a jump into a RETURN here. */ | ||
2759 | trial = skip_consecutive_labels (follow_jumps (target_label)); | ||
2760 | - if (trial == 0) | ||
2761 | - trial = find_end_label (); | ||
2762 | + if (ANY_RETURN_P (trial)) | ||
2763 | + trial = find_end_label (trial); | ||
2764 | |||
2765 | if (trial && trial != target_label | ||
2766 | && redirect_with_delay_slots_safe_p (delay_insn, trial, insn)) | ||
2767 | @@ -3517,7 +3541,7 @@ | ||
2768 | later incorrectly compute register live/death info. */ | ||
2769 | rtx tmp = next_active_insn (trial); | ||
2770 | if (tmp == 0) | ||
2771 | - tmp = find_end_label (); | ||
2772 | + tmp = find_end_label (simple_return_rtx); | ||
2773 | |||
2774 | if (tmp) | ||
2775 | { | ||
2776 | @@ -3537,14 +3561,12 @@ | ||
2777 | delay list and that insn is redundant, thread the jump. */ | ||
2778 | if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE | ||
2779 | && XVECLEN (PATTERN (trial), 0) == 2 | ||
2780 | - && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)) | ||
2781 | - && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0)) | ||
2782 | - || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN) | ||
2783 | + && simplejump_or_return_p (XVECEXP (PATTERN (trial), 0, 0)) | ||
2784 | && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0)) | ||
2785 | { | ||
2786 | target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0)); | ||
2787 | - if (target_label == 0) | ||
2788 | - target_label = find_end_label (); | ||
2789 | + if (ANY_RETURN_P (target_label)) | ||
2790 | + target_label = find_end_label (target_label); | ||
2791 | |||
2792 | if (target_label | ||
2793 | && redirect_with_delay_slots_safe_p (delay_insn, target_label, | ||
2794 | @@ -3622,16 +3644,15 @@ | ||
2795 | a RETURN here. */ | ||
2796 | if (! INSN_ANNULLED_BRANCH_P (delay_insn) | ||
2797 | && any_condjump_p (delay_insn) | ||
2798 | - && next && JUMP_P (next) | ||
2799 | - && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN) | ||
2800 | + && next && simplejump_or_return_p (next) | ||
2801 | && next_active_insn (target_label) == next_active_insn (next) | ||
2802 | && no_labels_between_p (insn, next)) | ||
2803 | { | ||
2804 | rtx label = JUMP_LABEL (next); | ||
2805 | rtx old_label = JUMP_LABEL (delay_insn); | ||
2806 | |||
2807 | - if (label == 0) | ||
2808 | - label = find_end_label (); | ||
2809 | + if (ANY_RETURN_P (label)) | ||
2810 | + label = find_end_label (label); | ||
2811 | |||
2812 | /* find_end_label can generate a new label. Check this first. */ | ||
2813 | if (label | ||
2814 | @@ -3692,7 +3713,8 @@ | ||
2815 | make_return_insns (rtx first) | ||
2816 | { | ||
2817 | rtx insn, jump_insn, pat; | ||
2818 | - rtx real_return_label = end_of_function_label; | ||
2819 | + rtx real_return_label = function_return_label; | ||
2820 | + rtx real_simple_return_label = function_simple_return_label; | ||
2821 | int slots, i; | ||
2822 | |||
2823 | #ifdef DELAY_SLOTS_FOR_EPILOGUE | ||
2824 | @@ -3707,18 +3729,25 @@ | ||
2825 | #endif | ||
2826 | |||
2827 | /* See if there is a RETURN insn in the function other than the one we | ||
2828 | - made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change | ||
2829 | + made for FUNCTION_RETURN_LABEL. If so, set up anything we can't change | ||
2830 | into a RETURN to jump to it. */ | ||
2831 | for (insn = first; insn; insn = NEXT_INSN (insn)) | ||
2832 | - if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN) | ||
2833 | + if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn))) | ||
2834 | { | ||
2835 | - real_return_label = get_label_before (insn); | ||
2836 | + rtx t = get_label_before (insn); | ||
2837 | + if (PATTERN (insn) == ret_rtx) | ||
2838 | + real_return_label = t; | ||
2839 | + else | ||
2840 | + real_simple_return_label = t; | ||
2841 | break; | ||
2842 | } | ||
2843 | |||
2844 | /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it | ||
2845 | - was equal to END_OF_FUNCTION_LABEL. */ | ||
2846 | - LABEL_NUSES (real_return_label)++; | ||
2847 | + was equal to FUNCTION_RETURN_LABEL. */ | ||
2848 | + if (real_return_label) | ||
2849 | + LABEL_NUSES (real_return_label)++; | ||
2850 | + if (real_simple_return_label) | ||
2851 | + LABEL_NUSES (real_simple_return_label)++; | ||
2852 | |||
2853 | /* Clear the list of insns to fill so we can use it. */ | ||
2854 | obstack_free (&unfilled_slots_obstack, unfilled_firstobj); | ||
2855 | @@ -3726,13 +3755,27 @@ | ||
2856 | for (insn = first; insn; insn = NEXT_INSN (insn)) | ||
2857 | { | ||
2858 | int flags; | ||
2859 | + rtx kind, real_label; | ||
2860 | |||
2861 | /* Only look at filled JUMP_INSNs that go to the end of function | ||
2862 | label. */ | ||
2863 | if (!NONJUMP_INSN_P (insn) | ||
2864 | || GET_CODE (PATTERN (insn)) != SEQUENCE | ||
2865 | - || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0)) | ||
2866 | - || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label) | ||
2867 | + || !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))) | ||
2868 | + continue; | ||
2869 | + | ||
2870 | + if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) == function_return_label) | ||
2871 | + { | ||
2872 | + kind = ret_rtx; | ||
2873 | + real_label = real_return_label; | ||
2874 | + } | ||
2875 | + else if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) | ||
2876 | + == function_simple_return_label) | ||
2877 | + { | ||
2878 | + kind = simple_return_rtx; | ||
2879 | + real_label = real_simple_return_label; | ||
2880 | + } | ||
2881 | + else | ||
2882 | continue; | ||
2883 | |||
2884 | pat = PATTERN (insn); | ||
2885 | @@ -3740,14 +3783,12 @@ | ||
2886 | |||
2887 | /* If we can't make the jump into a RETURN, try to redirect it to the best | ||
2888 | RETURN and go on to the next insn. */ | ||
2889 | - if (! reorg_redirect_jump (jump_insn, NULL_RTX)) | ||
2890 | + if (! reorg_redirect_jump (jump_insn, kind)) | ||
2891 | { | ||
2892 | /* Make sure redirecting the jump will not invalidate the delay | ||
2893 | slot insns. */ | ||
2894 | - if (redirect_with_delay_slots_safe_p (jump_insn, | ||
2895 | - real_return_label, | ||
2896 | - insn)) | ||
2897 | - reorg_redirect_jump (jump_insn, real_return_label); | ||
2898 | + if (redirect_with_delay_slots_safe_p (jump_insn, real_label, insn)) | ||
2899 | + reorg_redirect_jump (jump_insn, real_label); | ||
2900 | continue; | ||
2901 | } | ||
2902 | |||
2903 | @@ -3787,7 +3828,7 @@ | ||
2904 | RETURN, delete the SEQUENCE and output the individual insns, | ||
2905 | followed by the RETURN. Then set things up so we try to find | ||
2906 | insns for its delay slots, if it needs some. */ | ||
2907 | - if (GET_CODE (PATTERN (jump_insn)) == RETURN) | ||
2908 | + if (ANY_RETURN_P (PATTERN (jump_insn))) | ||
2909 | { | ||
2910 | rtx prev = PREV_INSN (insn); | ||
2911 | |||
2912 | @@ -3804,13 +3845,16 @@ | ||
2913 | else | ||
2914 | /* It is probably more efficient to keep this with its current | ||
2915 | delay slot as a branch to a RETURN. */ | ||
2916 | - reorg_redirect_jump (jump_insn, real_return_label); | ||
2917 | + reorg_redirect_jump (jump_insn, real_label); | ||
2918 | } | ||
2919 | |||
2920 | /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any | ||
2921 | new delay slots we have created. */ | ||
2922 | - if (--LABEL_NUSES (real_return_label) == 0) | ||
2923 | + if (real_return_label != NULL_RTX && --LABEL_NUSES (real_return_label) == 0) | ||
2924 | delete_related_insns (real_return_label); | ||
2925 | + if (real_simple_return_label != NULL_RTX | ||
2926 | + && --LABEL_NUSES (real_simple_return_label) == 0) | ||
2927 | + delete_related_insns (real_simple_return_label); | ||
2928 | |||
2929 | fill_simple_delay_slots (1); | ||
2930 | fill_simple_delay_slots (0); | ||
2931 | @@ -3878,7 +3922,7 @@ | ||
2932 | init_resource_info (epilogue_insn); | ||
2933 | |||
2934 | /* Show we haven't computed an end-of-function label yet. */ | ||
2935 | - end_of_function_label = 0; | ||
2936 | + function_return_label = function_simple_return_label = NULL_RTX; | ||
2937 | |||
2938 | /* Initialize the statistics for this function. */ | ||
2939 | memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays); | ||
2940 | @@ -3900,11 +3944,23 @@ | ||
2941 | /* If we made an end of function label, indicate that it is now | ||
2942 | safe to delete it by undoing our prior adjustment to LABEL_NUSES. | ||
2943 | If it is now unused, delete it. */ | ||
2944 | - if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0) | ||
2945 | - delete_related_insns (end_of_function_label); | ||
2946 | + if (function_return_label && --LABEL_NUSES (function_return_label) == 0) | ||
2947 | + delete_related_insns (function_return_label); | ||
2948 | + if (function_simple_return_label | ||
2949 | + && --LABEL_NUSES (function_simple_return_label) == 0) | ||
2950 | + delete_related_insns (function_simple_return_label); | ||
2951 | |||
2952 | +#if defined HAVE_return || defined HAVE_simple_return | ||
2953 | + if ( | ||
2954 | #ifdef HAVE_return | ||
2955 | - if (HAVE_return && end_of_function_label != 0) | ||
2956 | + (HAVE_return && function_return_label != 0) | ||
2957 | +#else | ||
2958 | + 0 | ||
2959 | +#endif | ||
2960 | +#ifdef HAVE_simple_return | ||
2961 | + || (HAVE_simple_return && function_simple_return_label != 0) | ||
2962 | +#endif | ||
2963 | + ) | ||
2964 | make_return_insns (first); | ||
2965 | #endif | ||
2966 | |||
2967 | |||
2968 | === modified file 'gcc/resource.c' | ||
2969 | --- old/gcc/resource.c 2009-11-25 10:55:54 +0000 | ||
2970 | +++ new/gcc/resource.c 2011-01-05 12:12:18 +0000 | ||
2971 | @@ -495,6 +495,8 @@ | ||
2972 | || GET_CODE (PATTERN (this_jump_insn)) == RETURN) | ||
2973 | { | ||
2974 | next = JUMP_LABEL (this_jump_insn); | ||
2975 | + if (next && ANY_RETURN_P (next)) | ||
2976 | + next = NULL_RTX; | ||
2977 | if (jump_insn == 0) | ||
2978 | { | ||
2979 | jump_insn = insn; | ||
2980 | @@ -562,9 +564,10 @@ | ||
2981 | AND_COMPL_HARD_REG_SET (scratch, needed.regs); | ||
2982 | AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); | ||
2983 | |||
2984 | - find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | ||
2985 | - &target_res, 0, jump_count, | ||
2986 | - target_set, needed); | ||
2987 | + if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn))) | ||
2988 | + find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), | ||
2989 | + &target_res, 0, jump_count, | ||
2990 | + target_set, needed); | ||
2991 | find_dead_or_set_registers (next, | ||
2992 | &fallthrough_res, 0, jump_count, | ||
2993 | set, needed); | ||
2994 | @@ -1097,6 +1100,8 @@ | ||
2995 | struct resources new_resources; | ||
2996 | rtx stop_insn = next_active_insn (jump_insn); | ||
2997 | |||
2998 | + if (jump_target && ANY_RETURN_P (jump_target)) | ||
2999 | + jump_target = NULL_RTX; | ||
3000 | mark_target_live_regs (insns, next_active_insn (jump_target), | ||
3001 | &new_resources); | ||
3002 | CLEAR_RESOURCE (&set); | ||
3003 | |||
3004 | === modified file 'gcc/rtl.c' | ||
3005 | --- old/gcc/rtl.c 2010-12-13 10:05:52 +0000 | ||
3006 | +++ new/gcc/rtl.c 2011-01-05 12:12:18 +0000 | ||
3007 | @@ -256,6 +256,8 @@ | ||
3008 | case CODE_LABEL: | ||
3009 | case PC: | ||
3010 | case CC0: | ||
3011 | + case RETURN: | ||
3012 | + case SIMPLE_RETURN: | ||
3013 | case SCRATCH: | ||
3014 | /* SCRATCH must be shared because they represent distinct values. */ | ||
3015 | return orig; | ||
3016 | |||
3017 | === modified file 'gcc/rtl.def' | ||
3018 | --- old/gcc/rtl.def 2010-04-02 18:54:46 +0000 | ||
3019 | +++ new/gcc/rtl.def 2011-01-05 12:12:18 +0000 | ||
3020 | @@ -296,6 +296,10 @@ | ||
3021 | |||
3022 | DEF_RTL_EXPR(RETURN, "return", "", RTX_EXTRA) | ||
3023 | |||
3024 | +/* A plain return, to be used on paths that are reached without going | ||
3025 | + through the function prologue. */ | ||
3026 | +DEF_RTL_EXPR(SIMPLE_RETURN, "simple_return", "", RTX_EXTRA) | ||
3027 | + | ||
3028 | /* Special for EH return from subroutine. */ | ||
3029 | |||
3030 | DEF_RTL_EXPR(EH_RETURN, "eh_return", "", RTX_EXTRA) | ||
3031 | |||
3032 | === modified file 'gcc/rtl.h' | ||
3033 | --- old/gcc/rtl.h 2010-11-16 22:17:17 +0000 | ||
3034 | +++ new/gcc/rtl.h 2011-01-05 12:12:18 +0000 | ||
3035 | @@ -411,6 +411,10 @@ | ||
3036 | (JUMP_P (INSN) && (GET_CODE (PATTERN (INSN)) == ADDR_VEC || \ | ||
3037 | GET_CODE (PATTERN (INSN)) == ADDR_DIFF_VEC)) | ||
3038 | |||
3039 | +/* Predicate yielding nonzero iff X is a return or simple_preturn. */ | ||
3040 | +#define ANY_RETURN_P(X) \ | ||
3041 | + (GET_CODE (X) == RETURN || GET_CODE (X) == SIMPLE_RETURN) | ||
3042 | + | ||
3043 | /* 1 if X is a unary operator. */ | ||
3044 | |||
3045 | #define UNARY_P(X) \ | ||
3046 | @@ -1998,6 +2002,8 @@ | ||
3047 | { | ||
3048 | GR_PC, | ||
3049 | GR_CC0, | ||
3050 | + GR_RETURN, | ||
3051 | + GR_SIMPLE_RETURN, | ||
3052 | GR_STACK_POINTER, | ||
3053 | GR_FRAME_POINTER, | ||
3054 | /* For register elimination to work properly these hard_frame_pointer_rtx, | ||
3055 | @@ -2032,6 +2038,8 @@ | ||
3056 | |||
3057 | /* Standard pieces of rtx, to be substituted directly into things. */ | ||
3058 | #define pc_rtx (global_rtl[GR_PC]) | ||
3059 | +#define ret_rtx (global_rtl[GR_RETURN]) | ||
3060 | +#define simple_return_rtx (global_rtl[GR_SIMPLE_RETURN]) | ||
3061 | #define cc0_rtx (global_rtl[GR_CC0]) | ||
3062 | |||
3063 | /* All references to certain hard regs, except those created | ||
3064 | |||
3065 | === modified file 'gcc/rtlanal.c' | ||
3066 | --- old/gcc/rtlanal.c 2010-11-16 22:17:17 +0000 | ||
3067 | +++ new/gcc/rtlanal.c 2011-01-05 12:12:18 +0000 | ||
3068 | @@ -2673,6 +2673,7 @@ | ||
3069 | |||
3070 | if (JUMP_P (insn) | ||
3071 | && (label = JUMP_LABEL (insn)) != NULL_RTX | ||
3072 | + && !ANY_RETURN_P (label) | ||
3073 | && (table = next_active_insn (label)) != NULL_RTX | ||
3074 | && JUMP_TABLE_DATA_P (table)) | ||
3075 | { | ||
3076 | |||
3077 | === modified file 'gcc/sched-int.h' | ||
3078 | --- old/gcc/sched-int.h 2010-06-02 16:31:39 +0000 | ||
3079 | +++ new/gcc/sched-int.h 2011-01-05 12:12:18 +0000 | ||
3080 | @@ -199,7 +199,7 @@ | ||
3081 | |||
3082 | extern void ebb_compute_jump_reg_dependencies (rtx, regset, regset, regset); | ||
3083 | |||
3084 | -extern edge find_fallthru_edge (basic_block); | ||
3085 | +extern edge find_fallthru_edge_from (basic_block); | ||
3086 | |||
3087 | extern void (* sched_init_only_bb) (basic_block, basic_block); | ||
3088 | extern basic_block (* sched_split_block) (basic_block, rtx); | ||
3089 | |||
3090 | === modified file 'gcc/sched-vis.c' | ||
3091 | --- old/gcc/sched-vis.c 2009-11-25 10:55:54 +0000 | ||
3092 | +++ new/gcc/sched-vis.c 2011-01-05 12:12:18 +0000 | ||
3093 | @@ -549,6 +549,9 @@ | ||
3094 | case RETURN: | ||
3095 | sprintf (buf, "return"); | ||
3096 | break; | ||
3097 | + case SIMPLE_RETURN: | ||
3098 | + sprintf (buf, "simple_return"); | ||
3099 | + break; | ||
3100 | case CALL: | ||
3101 | print_exp (buf, x, verbose); | ||
3102 | break; | ||
3103 | |||
3104 | === modified file 'gcc/sel-sched-ir.c' | ||
3105 | --- old/gcc/sel-sched-ir.c 2010-08-31 11:52:01 +0000 | ||
3106 | +++ new/gcc/sel-sched-ir.c 2011-01-05 12:12:18 +0000 | ||
3107 | @@ -686,7 +686,7 @@ | ||
3108 | |||
3109 | /* Find fallthrough edge. */ | ||
3110 | gcc_assert (BLOCK_FOR_INSN (insn)->prev_bb); | ||
3111 | - candidate = find_fallthru_edge (BLOCK_FOR_INSN (insn)->prev_bb); | ||
3112 | + candidate = find_fallthru_edge_from (BLOCK_FOR_INSN (insn)->prev_bb); | ||
3113 | |||
3114 | if (!candidate | ||
3115 | || (candidate->src != BLOCK_FOR_INSN (last_scheduled_insn) | ||
3116 | |||
3117 | === modified file 'gcc/sel-sched.c' | ||
3118 | --- old/gcc/sel-sched.c 2010-11-12 15:47:38 +0000 | ||
3119 | +++ new/gcc/sel-sched.c 2011-01-05 12:12:18 +0000 | ||
3120 | @@ -617,8 +617,8 @@ | ||
3121 | if (bb == BLOCK_FOR_INSN (succ)) | ||
3122 | return true; | ||
3123 | |||
3124 | - if (find_fallthru_edge (bb)) | ||
3125 | - bb = find_fallthru_edge (bb)->dest; | ||
3126 | + if (find_fallthru_edge_from (bb)) | ||
3127 | + bb = find_fallthru_edge_from (bb)->dest; | ||
3128 | else | ||
3129 | return false; | ||
3130 | |||
3131 | @@ -4911,7 +4911,7 @@ | ||
3132 | next = PREV_INSN (insn); | ||
3133 | BND_TO (bnd) = insn; | ||
3134 | |||
3135 | - ft_edge = find_fallthru_edge (block_from); | ||
3136 | + ft_edge = find_fallthru_edge_from (block_from); | ||
3137 | block_next = ft_edge->dest; | ||
3138 | /* There must be a fallthrough block (or where should go | ||
3139 | control flow in case of false jump predicate otherwise?). */ | ||
3140 | |||
3141 | === modified file 'gcc/vec.h' | ||
3142 | --- old/gcc/vec.h 2010-01-09 14:46:25 +0000 | ||
3143 | +++ new/gcc/vec.h 2011-01-05 12:12:18 +0000 | ||
3144 | @@ -188,6 +188,18 @@ | ||
3145 | |||
3146 | #define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P))) | ||
3147 | |||
3148 | +/* Convenience macro for forward iteration. */ | ||
3149 | + | ||
3150 | +#define FOR_EACH_VEC_ELT(T, V, I, P) \ | ||
3151 | + for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I)) | ||
3152 | + | ||
3153 | +/* Convenience macro for reverse iteration. */ | ||
3154 | + | ||
3155 | +#define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \ | ||
3156 | + for (I = VEC_length (T, (V)) - 1; \ | ||
3157 | + VEC_iterate (T, (V), (I), (P)); \ | ||
3158 | + (I)--) | ||
3159 | + | ||
3160 | /* Allocate new vector. | ||
3161 | VEC(T,A) *VEC_T_A_alloc(int reserve); | ||
3162 | |||
3163 | |||