This patch simply adds the remainder of clobber high checks. Happy to split this into smaller patches if required (there didn't seem anything obvious to split into).
Alan. 2017-11-16 Alan Hayward <alan.hayw...@arm.com> * alias.c (record_set): Check for clobber high. * cfgexpand.c (expand_gimple_stmt): Likewise. * combine-stack-adj.c (single_set_for_csa): Likewise. * combine.c (find_single_use_1): Likewise. (set_nonzero_bits_and_sign_copies): Likewise. (get_combine_src_dest): Likewise. (is_parallel_of_n_reg_sets): Likewise. (try_combine): Likewise. (record_dead_and_set_regs_1): Likewise. (reg_dead_at_p_1): Likewise. (reg_dead_at_p): Likewise. * dce.c (deletable_insn_p): Likewise. (mark_nonreg_stores_1): Likewise. (mark_nonreg_stores_2): Likewise. * df-scan.c (df_find_hard_reg_defs): Likewise. (df_uses_record): Likewise. (df_get_call_refs): Likewise. * dwarf2out.c (mem_loc_descriptor): Likewise. * haifa-sched.c (haifa_classify_rtx): Likewise. * ira-build.c (create_insn_allocnos): Likewise. * ira-costs.c (scan_one_insn): Likewise. * ira.c (equiv_init_movable_p): Likewise. (rtx_moveable_p): Likewise. (interesting_dest_for_shprep): Likewise. * jump.c (mark_jump_label_1): Likewise. * postreload-gcse.c (record_opr_changes): Likewise. * postreload.c (reload_cse_simplify): Likewise. (struct reg_use): Add source expr. (reload_combine): Check for clobber high. (reload_combine_note_use): Likewise. (reload_cse_move2add): Likewise. (move2add_note_store): Likewise. * print-rtl.c (print_pattern): Likewise. * recog.c (decode_asm_operands): Likewise. (store_data_bypass_p): Likewise. (if_test_bypass_p): Likewise. * regcprop.c (kill_clobbered_value): Likewise. (kill_set_value): Likewise. * reginfo.c (reg_scan_mark_refs): Likewise. * reload1.c (maybe_fix_stack_asms): Likewise. (eliminate_regs_1): Likewise. (elimination_effects): Likewise. (mark_not_eliminable): Likewise. (scan_paradoxical_subregs): Likewise. (forget_old_reloads_1): Likewise. * reorg.c (find_end_label): Likewise. (try_merge_delay_insns): Likewise. (redundant_insn): Likewise. (own_thread_p): Likewise. (fill_simple_delay_slots): Likewise. (fill_slots_from_thread): Likewise. (dbr_schedule): Likewise. * resource.c (update_live_status): Likewise. (mark_referenced_resources): Likewise. (mark_set_resources): Likewise. * rtl.c (copy_rtx): Likewise. * rtlanal.c (reg_referenced_p): Likewise. (single_set_2): Likewise. (noop_move_p): Likewise. (note_stores): Likewise. * sched-deps.c (sched_analyze_reg): Likewise. (sched_analyze_insn): Likewise. diff --git a/gcc/alias.c b/gcc/alias.c index c69ef410edac2ab0ab93e8ec9fe4c89a7078c001..6a6734bd7d5732c255c009be47e68aa073a9ebb1 100644 --- a/gcc/alias.c +++ b/gcc/alias.c @@ -1554,6 +1554,17 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED) new_reg_base_value[regno] = 0; return; } + /* A CLOBBER_HIGH only wipes out the old value if the mode of the old + value is greater than that of the clobber. */ + else if (GET_CODE (set) == CLOBBER_HIGH) + { + if (new_reg_base_value[regno] != 0 + && reg_is_clobbered_by_clobber_high ( + regno, GET_MODE (new_reg_base_value[regno]), XEXP (set, 0))) + new_reg_base_value[regno] = 0; + return; + } + src = SET_SRC (set); } else diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c index 06a8af8a1663c9e518a8169650a0c9969990df1f..ea6fc265f757543cff635a805fd4045a10add23e 100644 --- a/gcc/cfgexpand.c +++ b/gcc/cfgexpand.c @@ -3803,6 +3803,7 @@ expand_gimple_stmt (gimple *stmt) /* If we want exceptions for non-call insns, any may_trap_p instruction may throw. */ && GET_CODE (PATTERN (insn)) != CLOBBER + && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH && GET_CODE (PATTERN (insn)) != USE && insn_could_throw_p (insn)) make_reg_eh_region_note (insn, 0, lp_nr); diff --git a/gcc/combine-stack-adj.c b/gcc/combine-stack-adj.c index 09f0be814f98922b6926a929401894809a890f61..595e83c73760a97e0f8ebd99e12b1853d1d52b92 100644 --- a/gcc/combine-stack-adj.c +++ b/gcc/combine-stack-adj.c @@ -133,6 +133,7 @@ single_set_for_csa (rtx_insn *insn) && SET_SRC (this_rtx) == SET_DEST (this_rtx)) ; else if (GET_CODE (this_rtx) != CLOBBER + && GET_CODE (this_rtx) != CLOBBER_HIGH && GET_CODE (this_rtx) != USE) return NULL_RTX; } diff --git a/gcc/combine.c b/gcc/combine.c index 99cc343192ec4e2f8bdca0667858fbdf11baaffb..8b17349c240bea7a752cfd58f392f5a9dbfd53d6 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -573,6 +573,7 @@ find_single_use_1 (rtx dest, rtx *loc) case SYMBOL_REF: CASE_CONST_ANY: case CLOBBER: + case CLOBBER_HIGH: return 0; case SET: @@ -1755,6 +1756,9 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data) return; } + /* Should not happen as we only using pseduo registers. */ + gcc_assert (GET_CODE (set) != CLOBBER_HIGH); + /* If this register is being initialized using itself, and the register is uninitialized in this basic block, and there are no LOG_LINKS which set the register, then part of the @@ -1853,6 +1857,7 @@ get_combine_src_dest (combine_insn *insnc, rtx *pdest, rtx *psrc) /* We can ignore CLOBBERs. */ case CLOBBER: + case CLOBBER_HIGH: break; case SET: @@ -2715,10 +2720,17 @@ is_parallel_of_n_reg_sets (rtx pat, int n) || !REG_P (SET_DEST (XVECEXP (pat, 0, i)))) return false; for ( ; i < len; i++) - if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER - || XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) - return false; - + switch (GET_CODE (XVECEXP (pat, 0, i))) + { + case CLOBBER: + if (XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) + return false; + break; + case CLOBBER_HIGH: + break; + default: + return false; + } return true; } @@ -3099,7 +3111,8 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0, for (i = 0; ok && i < XVECLEN (p2, 0); i++) { if ((GET_CODE (XVECEXP (p2, 0, i)) == SET - || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) + || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER + || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER_HIGH) && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)), SET_DEST (XVECEXP (p2, 0, i)))) ok = false; @@ -13553,6 +13566,15 @@ record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data) record_value_for_reg (dest, record_dead_insn, gen_lowpart (GET_MODE (dest), SET_SRC (setter))); + else if (GET_CODE (setter) == CLOBBER_HIGH) + { + reg_stat_type *rsp = ®_stat[REGNO (dest)]; + if (rsp->last_set_value + && reg_is_clobbered_by_clobber_high + (REGNO (dest), GET_MODE (rsp->last_set_value), + XEXP (setter, 0))) + record_value_for_reg (dest, NULL, NULL_RTX); + } else record_value_for_reg (dest, record_dead_insn, NULL_RTX); } @@ -14030,6 +14052,7 @@ use_crosses_set_p (const_rtx x, rtx_insn *from, rtx_insn *to) static unsigned int reg_dead_regno, reg_dead_endregno; static int reg_dead_flag; +rtx reg_dead_reg; /* Function called via note_stores from reg_dead_at_p. @@ -14044,6 +14067,10 @@ reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) if (!REG_P (dest)) return; + if (GET_CODE (x) == CLOBBER_HIGH + && !reg_is_clobbered_by_clobber_high (reg_dead_reg, x)) + return; + regno = REGNO (dest); endregno = END_REGNO (dest); if (reg_dead_endregno > regno && reg_dead_regno < endregno) @@ -14067,6 +14094,7 @@ reg_dead_at_p (rtx reg, rtx_insn *insn) /* Set variables for reg_dead_at_p_1. */ reg_dead_regno = REGNO (reg); reg_dead_endregno = END_REGNO (reg); + reg_dead_reg = reg; reg_dead_flag = 0; diff --git a/gcc/dce.c b/gcc/dce.c index 9f3021f13fe5171e0eaa26918eaee6bb690a3eb0..b0518a7d23d677e7192c173375473e03e68812cd 100644 --- a/gcc/dce.c +++ b/gcc/dce.c @@ -139,6 +139,7 @@ deletable_insn_p (rtx_insn *insn, bool fast, bitmap arg_stores) return false; case CLOBBER: + case CLOBBER_HIGH: if (fast) { /* A CLOBBER of a dead pseudo register serves no purpose. @@ -207,7 +208,10 @@ static void mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data) { if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) - mark_insn ((rtx_insn *) data, true); + { + gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH); + mark_insn ((rtx_insn *) data, true); + } } @@ -218,7 +222,10 @@ static void mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data) { if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) - mark_insn ((rtx_insn *) data, false); + { + gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH); + mark_insn ((rtx_insn *) data, false); + } } diff --git a/gcc/df-scan.c b/gcc/df-scan.c index 8ab3d716ea2975ed687e1e3cf61ab3233e378f63..3962d5ab49cf321a4d8e866ec84f3867c0f34248 100644 --- a/gcc/df-scan.c +++ b/gcc/df-scan.c @@ -2777,6 +2777,7 @@ df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs) break; case CLOBBER: + case CLOBBER_HIGH: df_find_hard_reg_defs_1 (XEXP (x, 0), defs); break; @@ -2836,6 +2837,10 @@ df_uses_record (struct df_collection_rec *collection_rec, /* If we're clobbering a REG then we have a def so ignore. */ return; + case CLOBBER_HIGH: + gcc_assert (REG_P (XEXP (x, 0))); + return; + case MEM: df_uses_record (collection_rec, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, @@ -3132,6 +3137,7 @@ df_get_call_refs (struct df_collection_rec *collection_rec, for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note; note = XEXP (note, 1)) { + gcc_assert (GET_CODE (XEXP (note, 0)) != CLOBBER_HIGH); if (GET_CODE (XEXP (note, 0)) == USE) df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE, bb, insn_info, flags); diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index c0f93d763f5163cff5db2241df383c48777803ac..78811a6861e6100ebaaeb7ed4b143da55657d6e1 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -15663,6 +15663,7 @@ mem_loc_descriptor (rtx rtl, machine_mode mode, case CONST_FIXED: case CLRSB: case CLOBBER: + case CLOBBER_HIGH: /* If delegitimize_address couldn't do anything with the UNSPEC, we can't express it in the debug info. This can happen e.g. with some TLS UNSPECs. */ diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c index f5c06a95bb6742475bf7d1f2c5ea16456fa21efe..81c9ae378333079e99372153b64e5ffb1a088fc5 100644 --- a/gcc/haifa-sched.c +++ b/gcc/haifa-sched.c @@ -529,6 +529,9 @@ haifa_classify_rtx (const_rtx x) /* Test if it is a 'store'. */ tmp_class = may_trap_exp (XEXP (x, 0), 1); break; + case CLOBBER_HIGH: + gcc_assert (REG_P (XEXP (x, 0))); + break; case SET: /* Test if it is a store. */ tmp_class = may_trap_exp (SET_DEST (x), 1); diff --git a/gcc/ira-build.c b/gcc/ira-build.c index 67c0305a1685d432904e93d2057b38daf7fac315..8a1f2bac04600bc18cc0e3d6b45fb75c0be2ad97 100644 --- a/gcc/ira-build.c +++ b/gcc/ira-build.c @@ -1876,6 +1876,11 @@ create_insn_allocnos (rtx x, rtx outer, bool output_p) create_insn_allocnos (XEXP (x, 0), NULL, true); return; } + else if (code == CLOBBER_HIGH) + { + gcc_assert (REG_P (XEXP (x, 0)) && HARD_REGISTER_P (XEXP (x, 0))); + return; + } else if (code == MEM) { create_insn_allocnos (XEXP (x, 0), NULL, false); diff --git a/gcc/ira-costs.c b/gcc/ira-costs.c index e24dbc2e01be3048dbec379c0823c90271d7e768..9f03036e18b1abc03dc043c8deee023fa7a82fd4 100644 --- a/gcc/ira-costs.c +++ b/gcc/ira-costs.c @@ -1444,6 +1444,13 @@ scan_one_insn (rtx_insn *insn) return insn; } + if (pat_code == CLOBBER_HIGH) + { + gcc_assert (REG_P (XEXP (PATTERN (insn), 0)) + && HARD_REGISTER_P (XEXP (PATTERN (insn), 0))); + return insn; + } + counted_mem = false; set = single_set (insn); extract_insn (insn); diff --git a/gcc/ira.c b/gcc/ira.c index 93d02093757c2db1e0133db2a2f388cea81e497a..bb6421853a7a7a049a79f0c6e82ef82f667f8321 100644 --- a/gcc/ira.c +++ b/gcc/ira.c @@ -3086,6 +3086,7 @@ equiv_init_movable_p (rtx x, int regno) case CC0: case CLOBBER: + case CLOBBER_HIGH: return 0; case PRE_INC: @@ -4411,6 +4412,7 @@ rtx_moveable_p (rtx *loc, enum op_type type) && rtx_moveable_p (&XEXP (x, 2), OP_IN)); case CLOBBER: + case CLOBBER_HIGH: return rtx_moveable_p (&SET_DEST (x), OP_OUT); case UNSPEC_VOLATILE: @@ -4863,7 +4865,9 @@ interesting_dest_for_shprep (rtx_insn *insn, basic_block call_dom) for (int i = 0; i < XVECLEN (pat, 0); i++) { rtx sub = XVECEXP (pat, 0, i); - if (GET_CODE (sub) == USE || GET_CODE (sub) == CLOBBER) + if (GET_CODE (sub) == USE + || GET_CODE (sub) == CLOBBER + || GET_CODE (sub) == CLOBBER_HIGH) continue; if (GET_CODE (sub) != SET || side_effects_p (sub)) diff --git a/gcc/jump.c b/gcc/jump.c index b5392011b533e83ae7094e0f5870b281e35c2bea..03fc1c20840832c65d65711151c47dbde7c2acef 100644 --- a/gcc/jump.c +++ b/gcc/jump.c @@ -1105,6 +1105,7 @@ mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target) case CC0: case REG: case CLOBBER: + case CLOBBER_HIGH: case CALL: return; diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c index 15fdb7e0cfe7767e559cd9ef6d5f66d3501ffd90..f4f6af75ea56d79a7579a971a3019c586fc9bfc1 100644 --- a/gcc/postreload-gcse.c +++ b/gcc/postreload-gcse.c @@ -791,15 +791,18 @@ record_opr_changes (rtx_insn *insn) record_last_reg_set_info_regno (insn, regno); for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) - if (GET_CODE (XEXP (link, 0)) == CLOBBER) - { - x = XEXP (XEXP (link, 0), 0); - if (REG_P (x)) - { - gcc_assert (HARD_REGISTER_P (x)); - record_last_reg_set_info (insn, x); - } - } + { + gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH); + if (GET_CODE (XEXP (link, 0)) == CLOBBER) + { + x = XEXP (XEXP (link, 0), 0); + if (REG_P (x)) + { + gcc_assert (HARD_REGISTER_P (x)); + record_last_reg_set_info (insn, x); + } + } + } if (! RTL_CONST_OR_PURE_CALL_P (insn)) record_last_mem_set_info (insn); diff --git a/gcc/postreload.c b/gcc/postreload.c index a70d11a6c878871fea86a1afe0caec4507c0d394..6bdb4a4f3217b1d263f1cedf0234500f20b1516e 100644 --- a/gcc/postreload.c +++ b/gcc/postreload.c @@ -133,6 +133,8 @@ reload_cse_simplify (rtx_insn *insn, rtx testreg) for (i = XVECLEN (body, 0) - 1; i >= 0; --i) { rtx part = XVECEXP (body, 0, i); + /* asms can only have full clobbers, not clobber_highs. */ + gcc_assert (GET_CODE (part) != CLOBBER_HIGH); if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0))) cselib_invalidate_rtx (XEXP (part, 0)); } @@ -156,6 +158,7 @@ reload_cse_simplify (rtx_insn *insn, rtx testreg) } } else if (GET_CODE (part) != CLOBBER + && GET_CODE (part) != CLOBBER_HIGH && GET_CODE (part) != USE) break; } @@ -667,7 +670,8 @@ struct reg_use STORE_RUID is always meaningful if we only want to use a value in a register in a different place: it denotes the next insn in the insn stream (i.e. the last encountered) that sets or clobbers the register. - REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */ + REAL_STORE_RUID is similar, but clobbers are ignored when updating it. + EXPR is the expression used when storing the register. */ static struct { struct reg_use reg_use[RELOAD_COMBINE_MAX_USES]; @@ -677,6 +681,7 @@ static struct int real_store_ruid; int use_ruid; bool all_offsets_match; + rtx expr; } reg_state[FIRST_PSEUDO_REGISTER]; /* Reverse linear uid. This is increased in reload_combine while scanning @@ -1339,6 +1344,10 @@ reload_combine (void) { rtx setuse = XEXP (link, 0); rtx usage_rtx = XEXP (setuse, 0); + /* We could support CLOBBER_HIGH and treat it in the same way as + HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */ + gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH); + if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER) && REG_P (usage_rtx)) { @@ -1514,6 +1523,10 @@ reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem) } break; + case CLOBBER_HIGH: + gcc_assert (REG_P (SET_DEST (x))); + return; + case PLUS: /* We are interested in (plus (reg) (const_int)) . */ if (!REG_P (XEXP (x, 0)) @@ -2133,6 +2146,9 @@ reload_cse_move2add (rtx_insn *first) { rtx setuse = XEXP (link, 0); rtx usage_rtx = XEXP (setuse, 0); + /* CALL_INSN_FUNCTION_USAGEs can only have full clobbers, not + clobber_highs. */ + gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH); if (GET_CODE (setuse) == CLOBBER && REG_P (usage_rtx)) { @@ -2295,6 +2311,13 @@ move2add_note_store (rtx dst, const_rtx set, void *data) move2add_record_mode (dst); } + else if (GET_CODE (set) == CLOBBER_HIGH) + { + /* Only invalidate if actually clobbered. */ + if (reg_mode[regno] == BLKmode + || reg_is_clobbered_by_clobber_high (regno, reg_mode[regno], dst)) + goto invalidate; + } else { invalidate: diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c index 2ecdbb4299e2c64e8eea76a4534fe264fce613c4..5920165ceb416e7eec370e6863ffe887161f4b92 100644 --- a/gcc/print-rtl.c +++ b/gcc/print-rtl.c @@ -1715,6 +1715,7 @@ print_pattern (pretty_printer *pp, const_rtx x, int verbose) print_exp (pp, x, verbose); break; case CLOBBER: + case CLOBBER_HIGH: case USE: pp_printf (pp, "%s ", GET_RTX_NAME (GET_CODE (x))); print_value (pp, XEXP (x, 0), verbose); diff --git a/gcc/recog.c b/gcc/recog.c index 05e69134236c2266b966744a54b7ca7e9ed26fa1..769f462d0407673781a2634218c5c62579fb4ac1 100644 --- a/gcc/recog.c +++ b/gcc/recog.c @@ -1600,6 +1600,7 @@ decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs, { if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER) break; /* Past last SET */ + gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET); if (operands) operands[i] = SET_DEST (XVECEXP (body, 0, i)); if (operand_locs) @@ -3690,7 +3691,8 @@ store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) { out_exp = XVECEXP (out_pat, 0, i); - if (GET_CODE (out_exp) == CLOBBER) + if (GET_CODE (out_exp) == CLOBBER + || GET_CODE (out_exp) == CLOBBER_HIGH) continue; gcc_assert (GET_CODE (out_exp) == SET); @@ -3709,7 +3711,8 @@ store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) { in_exp = XVECEXP (in_pat, 0, i); - if (GET_CODE (in_exp) == CLOBBER) + if (GET_CODE (in_exp) == CLOBBER + || GET_CODE (in_exp) == CLOBBER_HIGH) continue; gcc_assert (GET_CODE (in_exp) == SET); @@ -3732,7 +3735,8 @@ store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) { out_exp = XVECEXP (out_pat, 0, j); - if (GET_CODE (out_exp) == CLOBBER) + if (GET_CODE (out_exp) == CLOBBER + || GET_CODE (out_exp) == CLOBBER_HIGH) continue; gcc_assert (GET_CODE (out_exp) == SET); @@ -3787,7 +3791,7 @@ if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn) { rtx exp = XVECEXP (out_pat, 0, i); - if (GET_CODE (exp) == CLOBBER) + if (GET_CODE (exp) == CLOBBER || GET_CODE (exp) == CLOBBER_HIGH) continue; gcc_assert (GET_CODE (exp) == SET); diff --git a/gcc/regcprop.c b/gcc/regcprop.c index 4ca10f58a58aa4c7605b001ec851c79a25fd6a8c..85b474544b22c915af9ea55230443186563dd624 100644 --- a/gcc/regcprop.c +++ b/gcc/regcprop.c @@ -237,7 +237,11 @@ static void kill_clobbered_value (rtx x, const_rtx set, void *data) { struct value_data *const vd = (struct value_data *) data; - if (GET_CODE (set) == CLOBBER) + gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x)); + + if (GET_CODE (set) == CLOBBER + || (GET_CODE (set) == CLOBBER_HIGH + && reg_is_clobbered_by_clobber_high (x, XEXP (set, 0)))) kill_value (x, vd); } @@ -257,7 +261,9 @@ kill_set_value (rtx x, const_rtx set, void *data) struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data; if (rtx_equal_p (x, ksvd->ignore_set_reg)) return; - if (GET_CODE (set) != CLOBBER) + + gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x)); + if (GET_CODE (set) != CLOBBER && GET_CODE (set) != CLOBBER_HIGH) { kill_value (x, ksvd->vd); if (REG_P (x)) diff --git a/gcc/reginfo.c b/gcc/reginfo.c index 847305ebe61fd3cd0cf02c01a02c537ad8b22c4a..bd9dd05e53404490b795b86b473e3b252bde1b50 100644 --- a/gcc/reginfo.c +++ b/gcc/reginfo.c @@ -1100,6 +1100,10 @@ reg_scan_mark_refs (rtx x, rtx_insn *insn) reg_scan_mark_refs (XEXP (XEXP (x, 0), 0), insn); break; + case CLOBBER_HIGH: + gcc_assert (!(MEM_P (XEXP (x, 0)))); + break; + case SET: /* Count a set of the destination if it is a register. */ for (dest = SET_DEST (x); diff --git a/gcc/reload1.c b/gcc/reload1.c index 902d940245dfcb73c10de8670adb22375fbfe098..68b16ca8bd747511589a74529c1ac922acf04215 100644 --- a/gcc/reload1.c +++ b/gcc/reload1.c @@ -1339,6 +1339,8 @@ maybe_fix_stack_asms (void) rtx t = XVECEXP (pat, 0, i); if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0))) SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))); + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (GET_CODE (t) != CLOBBER_HIGH); } /* Get the operand values and constraints out of the insn. */ @@ -2879,6 +2881,7 @@ eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn, return x; case CLOBBER: + case CLOBBER_HIGH: case ASM_OPERANDS: gcc_assert (insn && DEBUG_INSN_P (insn)); break; @@ -3089,6 +3092,10 @@ elimination_effects (rtx x, machine_mode mem_mode) elimination_effects (XEXP (x, 0), mem_mode); return; + case CLOBBER_HIGH: + /* CLOBBER_HIGH is only supported for LRA. */ + return; + case SET: /* Check for setting a register that we know about. */ if (REG_P (SET_DEST (x))) @@ -3810,6 +3817,9 @@ mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) if (dest == hard_frame_pointer_rtx) return; + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (GET_CODE (x) != CLOBBER_HIGH); + for (i = 0; i < NUM_ELIMINABLE_REGS; i++) if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx && (GET_CODE (x) != SET @@ -4445,6 +4455,7 @@ scan_paradoxical_subregs (rtx x) case PC: case USE: case CLOBBER: + case CLOBBER_HIGH: return; case SUBREG: @@ -4899,7 +4910,7 @@ reload_as_needed (int live_known) to be forgotten later. */ static void -forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED, +forget_old_reloads_1 (rtx x, const_rtx setter, void *data) { unsigned int regno; @@ -4919,6 +4930,9 @@ forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED, if (!REG_P (x)) return; + /* CLOBBER_HIGH is only supported for LRA. */ + gcc_assert (GET_CODE (setter) != CLOBBER_HIGH); + regno = REGNO (x); if (regno >= FIRST_PSEUDO_REGISTER) diff --git a/gcc/reorg.c b/gcc/reorg.c index 5914af655b6bbbddc98f23f53c0d1e6e496dc2eb..4f97e78b2a3c03db89a1bd2fa10bf8e83c374ae7 100644 --- a/gcc/reorg.c +++ b/gcc/reorg.c @@ -396,7 +396,8 @@ find_end_label (rtx kind) while (NOTE_P (insn) || (NONJUMP_INSN_P (insn) && (GET_CODE (PATTERN (insn)) == USE - || GET_CODE (PATTERN (insn)) == CLOBBER))) + || GET_CODE (PATTERN (insn)) == CLOBBER + || GET_CODE (PATTERN (insn)) == CLOBBER_HIGH))) insn = PREV_INSN (insn); /* When a target threads its epilogue we might already have a @@ -1290,7 +1291,8 @@ try_merge_delay_insns (rtx_insn *insn, rtx_insn *thread) /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */ if (NONJUMP_INSN_P (trial) - && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)) + && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH)) continue; if (GET_CODE (next_to_match) == GET_CODE (trial) @@ -1484,7 +1486,8 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list) --insns_to_search; pat = PATTERN (trial); - if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH) continue; if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) @@ -1579,7 +1582,8 @@ redundant_insn (rtx insn, rtx_insn *target, const vec<rtx_insn *> &delay_list) --insns_to_search; pat = PATTERN (trial); - if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH) continue; if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat)) @@ -1688,7 +1692,8 @@ own_thread_p (rtx thread, rtx label, int allow_fallthrough) || LABEL_P (insn) || (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) != USE - && GET_CODE (PATTERN (insn)) != CLOBBER)) + && GET_CODE (PATTERN (insn)) != CLOBBER + && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH)) return 0; return 1; @@ -2013,7 +2018,8 @@ fill_simple_delay_slots (int non_jumps_p) pat = PATTERN (trial); /* Stand-alone USE and CLOBBER are just for flow. */ - if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH) continue; /* Check for resource conflict first, to avoid unnecessary @@ -2135,7 +2141,8 @@ fill_simple_delay_slots (int non_jumps_p) pat = PATTERN (trial); /* Stand-alone USE and CLOBBER are just for flow. */ - if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH) continue; /* If this already has filled delay slots, get the insn needing @@ -2400,7 +2407,8 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition, } pat = PATTERN (trial); - if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == CLOBBER_HIGH) continue; /* If TRIAL conflicts with the insns ahead of it, we lose. Also, @@ -3790,7 +3798,8 @@ dbr_schedule (rtx_insn *first) if (! insn->deleted () && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) != USE - && GET_CODE (PATTERN (insn)) != CLOBBER) + && GET_CODE (PATTERN (insn)) != CLOBBER + && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH) { if (GET_CODE (PATTERN (insn)) == SEQUENCE) { diff --git a/gcc/resource.c b/gcc/resource.c index 95911c5c718f8ae79a401c782624ec8a01a9e8ab..463e370b2d8610787ba21b4be3925aa1ae86e71e 100644 --- a/gcc/resource.c +++ b/gcc/resource.c @@ -108,6 +108,11 @@ update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) if (GET_CODE (x) == CLOBBER) for (i = first_regno; i < last_regno; i++) CLEAR_HARD_REG_BIT (current_live_regs, i); + else if (GET_CODE (x) == CLOBBER_HIGH) + /* No current target supports both branch delay slots and CLOBBER_HIGH. + We'd need more elaborate liveness tracking to handle that + combination. */ + gcc_unreachable (); else for (i = first_regno; i < last_regno; i++) { @@ -292,6 +297,7 @@ mark_referenced_resources (rtx x, struct resources *res, return; case CLOBBER: + case CLOBBER_HIGH: return; case CALL_INSN: @@ -665,9 +671,15 @@ mark_set_resources (rtx x, struct resources *res, int in_dest, for (link = CALL_INSN_FUNCTION_USAGE (call_insn); link; link = XEXP (link, 1)) - if (GET_CODE (XEXP (link, 0)) == CLOBBER) - mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, - MARK_SRC_DEST); + { + /* We could support CLOBBER_HIGH and treat it in the same way as + HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that + yet. */ + gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH); + if (GET_CODE (XEXP (link, 0)) == CLOBBER) + mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, + MARK_SRC_DEST); + } /* Check for a REG_SETJMP. If it exists, then we must assume that this call can clobber any register. */ @@ -710,6 +722,12 @@ mark_set_resources (rtx x, struct resources *res, int in_dest, mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); return; + case CLOBBER_HIGH: + /* No current target supports both branch delay slots and CLOBBER_HIGH. + We'd need more elaborate liveness tracking to handle that + combination. */ + gcc_unreachable (); + case SEQUENCE: { rtx_sequence *seq = as_a <rtx_sequence *> (x); diff --git a/gcc/rtl.c b/gcc/rtl.c index 6db84f99627bb8617c6e227892ca44076f4e729b..12a01e8ed1f9005e5c0dfeb0f86c762069520284 100644 --- a/gcc/rtl.c +++ b/gcc/rtl.c @@ -307,6 +307,10 @@ copy_rtx (rtx orig) return orig; break; + case CLOBBER_HIGH: + gcc_assert (REG_P (XEXP (orig, 0))); + return orig; + case CONST: if (shared_const_p (orig)) return orig; diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c index 923e3314d25c05f9055907c61b4a24186701cc23..d907d636ed5f22d9cdc7237d66fdb05da9922f60 100644 --- a/gcc/rtlanal.c +++ b/gcc/rtlanal.c @@ -1196,6 +1196,10 @@ reg_referenced_p (const_rtx x, const_rtx body) return 1; return 0; + case CLOBBER_HIGH: + gcc_assert (REG_P (XEXP (body, 0))); + return 0; + case COND_EXEC: if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) return 1; @@ -1507,6 +1511,7 @@ single_set_2 (const rtx_insn *insn, const_rtx pat) { case USE: case CLOBBER: + case CLOBBER_HIGH: break; case SET: @@ -1660,7 +1665,8 @@ noop_move_p (const rtx_insn *insn) rtx tem = XVECEXP (pat, 0, i); if (GET_CODE (tem) == USE - || GET_CODE (tem) == CLOBBER) + || GET_CODE (tem) == CLOBBER + || GET_CODE (tem) == CLOBBER_HIGH) continue; if (GET_CODE (tem) != SET || ! set_noop_p (tem)) @@ -1892,7 +1898,9 @@ note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data) if (GET_CODE (x) == COND_EXEC) x = COND_EXEC_CODE (x); - if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) + if (GET_CODE (x) == SET + || GET_CODE (x) == CLOBBER + || GET_CODE (x) == CLOBBER_HIGH) { rtx dest = SET_DEST (x); diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c index a64e4e17bbc8115982d253d041032243e9506802..8eb0ed350557d3c0af87bbba674cefd4478d97aa 100644 --- a/gcc/sched-deps.c +++ b/gcc/sched-deps.c @@ -2319,6 +2319,13 @@ sched_analyze_reg (struct deps_desc *deps, int regno, machine_mode mode, while (--i >= 0) note_reg_use (regno + i); } + else if (ref == CLOBBER_HIGH) + { + gcc_assert (i == 1); + /* We don't know the current state of the register, so have to treat + the clobber high as a full clobber. */ + note_reg_clobber (regno); + } else { while (--i >= 0) @@ -2342,6 +2349,8 @@ sched_analyze_reg (struct deps_desc *deps, int regno, machine_mode mode, else if (ref == USE) note_reg_use (regno); else + /* For CLOBBER_HIGH, we don't know the current state of the register, + so have to treat it as a full clobber. */ note_reg_clobber (regno); /* Pseudos that are REG_EQUIV to something may be replaced @@ -2953,7 +2962,7 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx_insn *insn) sub = COND_EXEC_CODE (sub); code = GET_CODE (sub); } - if (code == SET || code == CLOBBER) + else if (code == SET || code == CLOBBER || code == CLOBBER_HIGH) sched_analyze_1 (deps, sub, insn); else sched_analyze_2 (deps, sub, insn); @@ -2969,6 +2978,10 @@ sched_analyze_insn (struct deps_desc *deps, rtx x, rtx_insn *insn) { if (GET_CODE (XEXP (link, 0)) == CLOBBER) sched_analyze_1 (deps, XEXP (link, 0), insn); + else if (GET_CODE (XEXP (link, 0)) == CLOBBER_HIGH) + /* We could support CLOBBER_HIGH and treat it in the same way as + HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */ + gcc_unreachable (); else if (GET_CODE (XEXP (link, 0)) != SET) sched_analyze_2 (deps, XEXP (link, 0), insn); }