diff options
author | Alexander Kabaev <kan@FreeBSD.org> | 2007-05-19 01:19:51 +0000 |
---|---|---|
committer | Alexander Kabaev <kan@FreeBSD.org> | 2007-05-19 01:19:51 +0000 |
commit | 6b834ef156bcf24dcf0e281f57ee5bde03ca07cf (patch) | |
tree | 0cb530c9c38af219e6dda2994c078b6b2b9ad853 /contrib/gcc/cselib.c | |
parent | 9ba78bf6b1135ae200742b2a97ae5bc71c9fd265 (diff) |
GCC 4.2.0 release.
Notes
Notes:
svn path=/vendor/gcc/dist/; revision=169689
Diffstat (limited to 'contrib/gcc/cselib.c')
-rw-r--r-- | contrib/gcc/cselib.c | 375 |
1 files changed, 210 insertions, 165 deletions
diff --git a/contrib/gcc/cselib.c b/contrib/gcc/cselib.c index 9c1d93649b59..4070da7c01f0 100644 --- a/contrib/gcc/cselib.c +++ b/contrib/gcc/cselib.c @@ -1,6 +1,6 @@ /* Common subexpression elimination library for GNU compiler. Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2003, 2004 Free Software Foundation, Inc. + 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc. This file is part of GCC. @@ -16,8 +16,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ #include "config.h" #include "system.h" @@ -33,7 +33,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "insn-config.h" #include "recog.h" #include "function.h" -#include "expr.h" +#include "emit-rtl.h" #include "toplev.h" #include "output.h" #include "ggc.h" @@ -41,7 +41,9 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #include "cselib.h" #include "params.h" #include "alloc-pool.h" +#include "target.h" +static bool cselib_record_memory; static int entry_and_rtx_equal_p (const void *, const void *); static hashval_t get_value_hash (const void *); static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); @@ -49,12 +51,11 @@ static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx); static void unchain_one_value (cselib_val *); static void unchain_one_elt_list (struct elt_list **); static void unchain_one_elt_loc_list (struct elt_loc_list **); -static void clear_table (void); static int discard_useless_locs (void **, void *); static int discard_useless_values (void **, void *); static void remove_useless_values (void); static rtx wrap_constant (enum machine_mode, rtx); -static unsigned int hash_rtx (rtx, enum machine_mode, int); +static unsigned int cselib_hash_rtx (rtx, int); static cselib_val *new_cselib_val (unsigned int, enum machine_mode); static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); static cselib_val *cselib_lookup_mem (rtx, int); @@ -73,7 +74,7 @@ static void cselib_record_sets (rtx); the locations of the entries with the rtx we are looking up. */ /* A table that enables us to look up elts by their value. */ -static GTY((param_is (cselib_val))) htab_t hash_table; +static htab_t cselib_hash_table; /* This is a global so we don't have to pass this through every function. It is used in new_elt_loc_list to set SETTING_INSN. */ @@ -100,18 +101,18 @@ static int n_useless_values; which the register was set; if the mode is unknown or the value is no longer valid in that mode, ELT will be NULL for the first element. */ -static GTY(()) varray_type reg_values; -static GTY((deletable (""))) varray_type reg_values_old; -#define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I)) +static struct elt_list **reg_values; +static unsigned int reg_values_size; +#define REG_VALUES(i) reg_values[i] /* The largest number of hard regs used by any entry added to the - REG_VALUES table. Cleared on each clear_table() invocation. */ + REG_VALUES table. Cleared on each cselib_clear_table() invocation. */ static unsigned int max_value_regs; /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used - in clear_table() for fast emptying. */ -static GTY(()) varray_type used_regs; -static GTY((deletable (""))) varray_type used_regs_old; + in cselib_clear_table() for fast emptying. */ +static unsigned int *used_regs; +static unsigned int n_used_regs; /* We pass this to cselib_invalidate_mem to invalidate all of memory for a non-const call instruction. */ @@ -155,7 +156,6 @@ new_elt_loc_list (struct elt_loc_list *next, rtx loc) el = pool_alloc (elt_loc_list_pool); el->next = next; el->loc = loc; - el->canon_loc = NULL; el->setting_insn = cselib_current_insn; el->in_libcall = cselib_current_insn_in_libcall; return el; @@ -200,19 +200,19 @@ unchain_one_value (cselib_val *v) initialization. If CLEAR_ALL isn't set, then only clear the entries which are known to have been used. */ -static void -clear_table (void) +void +cselib_clear_table (void) { unsigned int i; - for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++) - REG_VALUES (VARRAY_UINT (used_regs, i)) = 0; + for (i = 0; i < n_used_regs; i++) + REG_VALUES (used_regs[i]) = 0; max_value_regs = 0; - VARRAY_POP_ALL (used_regs); + n_used_regs = 0; - htab_empty (hash_table); + htab_empty (cselib_hash_table); n_useless_values = 0; @@ -234,9 +234,9 @@ entry_and_rtx_equal_p (const void *entry, const void *x_arg) rtx x = (rtx) x_arg; enum machine_mode mode = GET_MODE (x); - if (GET_CODE (x) == CONST_INT - || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE)) - abort (); + gcc_assert (GET_CODE (x) != CONST_INT + && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE)); + if (mode != GET_MODE (v->u.val_rtx)) return 0; @@ -256,8 +256,8 @@ entry_and_rtx_equal_p (const void *entry, const void *x_arg) } /* The hash function for our hash table. The value is always computed with - hash_rtx when adding an element; this function just extracts the hash - value from a cselib_val structure. */ + cselib_hash_rtx when adding an element; this function just extracts the + hash value from a cselib_val structure. */ static hashval_t get_value_hash (const void *entry) @@ -332,7 +332,7 @@ discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED) if (v->locs == 0) { CSELIB_VAL_PTR (v->u.val_rtx) = NULL; - htab_clear_slot (hash_table, x); + htab_clear_slot (cselib_hash_table, x); unchain_one_value (v); n_useless_values--; } @@ -352,11 +352,12 @@ remove_useless_values (void) do { values_became_useless = 0; - htab_traverse (hash_table, discard_useless_locs, 0); + htab_traverse (cselib_hash_table, discard_useless_locs, 0); } while (values_became_useless); /* Second pass: actually remove the values. */ + p = &first_containing_mem; for (v = *p; v != &dummy_val; v = v->next_containing_mem) if (v->locs) @@ -366,10 +367,9 @@ remove_useless_values (void) } *p = &dummy_val; - htab_traverse (hash_table, discard_useless_values, 0); + htab_traverse (cselib_hash_table, discard_useless_values, 0); - if (n_useless_values != 0) - abort (); + gcc_assert (!n_useless_values); } /* Return the mode in which a register was last set. If X is not a @@ -380,7 +380,7 @@ remove_useless_values (void) enum machine_mode cselib_reg_set_mode (rtx x) { - if (GET_CODE (x) != REG) + if (!REG_P (x)) return GET_MODE (x); if (REG_VALUES (REGNO (x)) == NULL @@ -400,7 +400,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y) const char *fmt; int i; - if (GET_CODE (x) == REG || GET_CODE (x) == MEM) + if (REG_P (x) || MEM_P (x)) { cselib_val *e = cselib_lookup (x, GET_MODE (x), 0); @@ -408,7 +408,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y) x = e->u.val_rtx; } - if (GET_CODE (y) == REG || GET_CODE (y) == MEM) + if (REG_P (y) || MEM_P (y)) { cselib_val *e = cselib_lookup (y, GET_MODE (y), 0); @@ -432,7 +432,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y) rtx t = l->loc; /* Avoid infinite recursion. */ - if (GET_CODE (t) == REG || GET_CODE (t) == MEM) + if (REG_P (t) || MEM_P (t)) continue; else if (rtx_equal_for_cselib_p (t, y)) return 1; @@ -450,7 +450,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y) { rtx t = l->loc; - if (GET_CODE (t) == REG || GET_CODE (t) == MEM) + if (REG_P (t) || MEM_P (t)) continue; else if (rtx_equal_for_cselib_p (x, t)) return 1; @@ -462,9 +462,18 @@ rtx_equal_for_cselib_p (rtx x, rtx y) if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y)) return 0; - /* This won't be handled correctly by the code below. */ - if (GET_CODE (x) == LABEL_REF) - return XEXP (x, 0) == XEXP (y, 0); + /* These won't be handled correctly by the code below. */ + switch (GET_CODE (x)) + { + case CONST_DOUBLE: + return 0; + + case LABEL_REF: + return XEXP (x, 0) == XEXP (y, 0); + + default: + break; + } code = GET_CODE (x); fmt = GET_RTX_FORMAT (code); @@ -500,6 +509,11 @@ rtx_equal_for_cselib_p (rtx x, rtx y) break; case 'e': + if (i == 1 + && targetm.commutative_p (x, UNKNOWN) + && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0)) + && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1))) + return 1; if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i))) return 0; break; @@ -522,7 +536,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y) contain anything but integers and other rtx's, except for within LABEL_REFs and SYMBOL_REFs. */ default: - abort (); + gcc_unreachable (); } } return 1; @@ -537,8 +551,7 @@ wrap_constant (enum machine_mode mode, rtx x) if (GET_CODE (x) != CONST_INT && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode)) return x; - if (mode == VOIDmode) - abort (); + gcc_assert (mode != VOIDmode); return gen_rtx_CONST (mode, x); } @@ -548,11 +561,22 @@ wrap_constant (enum machine_mode mode, rtx x) Possible reasons for return 0 are: the object is volatile, or we couldn't find a register or memory location in the table and CREATE is zero. If CREATE is nonzero, table elts are created for regs and mem. - MODE is used in hashing for CONST_INTs only; - otherwise the mode of X is used. */ + N.B. this hash function returns the same hash value for RTXes that + differ only in the order of operands, thus it is suitable for comparisons + that take commutativity into account. + If we wanted to also support associative rules, we'd have to use a different + strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) . + We used to have a MODE argument for hashing for CONST_INTs, but that + didn't make sense, since it caused spurious hash differences between + (set (reg:SI 1) (const_int)) + (plus:SI (reg:SI 2) (reg:SI 1)) + and + (plus:SI (reg:SI 2) (const_int)) + If the mode is important in any context, it must be checked specifically + in a comparison anyway, since relying on hash differences is unsafe. */ static unsigned int -hash_rtx (rtx x, enum machine_mode mode, int create) +cselib_hash_rtx (rtx x, int create) { cselib_val *e; int i, j; @@ -574,7 +598,7 @@ hash_rtx (rtx x, enum machine_mode mode, int create) return e->value; case CONST_INT: - hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x); + hash += ((unsigned) CONST_INT << 7) + INTVAL (x); return hash ? hash : (unsigned int) CONST_INT; case CONST_DOUBLE: @@ -598,7 +622,7 @@ hash_rtx (rtx x, enum machine_mode mode, int create) for (i = 0; i < units; ++i) { elt = CONST_VECTOR_ELT (x, i); - hash += hash_rtx (elt, GET_MODE (elt), 0); + hash += cselib_hash_rtx (elt, 0); } return hash; @@ -606,14 +630,28 @@ hash_rtx (rtx x, enum machine_mode mode, int create) /* Assume there is only one rtx object for any given label. */ case LABEL_REF: - hash - += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0); + /* We don't hash on the address of the CODE_LABEL to avoid bootstrap + differences and differences between each stage's debugging dumps. */ + hash += (((unsigned int) LABEL_REF << 7) + + CODE_LABEL_NUMBER (XEXP (x, 0))); return hash ? hash : (unsigned int) LABEL_REF; case SYMBOL_REF: - hash - += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0); - return hash ? hash : (unsigned int) SYMBOL_REF; + { + /* Don't hash on the symbol's address to avoid bootstrap differences. + Different hash values may cause expressions to be recorded in + different orders and thus different registers to be used in the + final assembler. This also avoids differences in the dump files + between various stages. */ + unsigned int h = 0; + const unsigned char *p = (const unsigned char *) XSTR (x, 0); + + while (*p) + h += (h << 7) + *p++; /* ??? revisit */ + + hash += ((unsigned int) SYMBOL_REF << 7) + h; + return hash ? hash : (unsigned int) SYMBOL_REF; + } case PRE_DEC: case PRE_INC: @@ -641,40 +679,54 @@ hash_rtx (rtx x, enum machine_mode mode, int create) fmt = GET_RTX_FORMAT (code); for (; i >= 0; i--) { - if (fmt[i] == 'e') + switch (fmt[i]) { - rtx tem = XEXP (x, i); - unsigned int tem_hash = hash_rtx (tem, 0, create); - - if (tem_hash == 0) - return 0; - - hash += tem_hash; - } - else if (fmt[i] == 'E') - for (j = 0; j < XVECLEN (x, i); j++) + case 'e': { - unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create); - + rtx tem = XEXP (x, i); + unsigned int tem_hash = cselib_hash_rtx (tem, create); + if (tem_hash == 0) return 0; - + hash += tem_hash; } - else if (fmt[i] == 's') - { - const unsigned char *p = (const unsigned char *) XSTR (x, i); + break; + case 'E': + for (j = 0; j < XVECLEN (x, i); j++) + { + unsigned int tem_hash + = cselib_hash_rtx (XVECEXP (x, i, j), create); + + if (tem_hash == 0) + return 0; + + hash += tem_hash; + } + break; - if (p) - while (*p) - hash += *p++; + case 's': + { + const unsigned char *p = (const unsigned char *) XSTR (x, i); + + if (p) + while (*p) + hash += *p++; + break; + } + + case 'i': + hash += XINT (x, i); + break; + + case '0': + case 't': + /* unused */ + break; + + default: + gcc_unreachable (); } - else if (fmt[i] == 'i') - hash += XINT (x, i); - else if (fmt[i] == '0' || fmt[i] == 't') - /* unused */; - else - abort (); } return hash ? hash : 1 + (unsigned int) GET_CODE (x); @@ -688,14 +740,14 @@ new_cselib_val (unsigned int value, enum machine_mode mode) { cselib_val *e = pool_alloc (cselib_val_pool); -#ifdef ENABLE_CHECKING - if (value == 0) - abort (); -#endif + gcc_assert (value); e->value = value; - /* We use custom method to allocate this RTL construct because it accounts - about 8% of overall memory usage. */ + /* We use an alloc pool to allocate this RTL construct because it + accounts for about 8% of the overall memory usage. We know + precisely when we can have VALUE RTXen (when cselib is active) + so we don't need to put them in garbage collected memory. + ??? Why should a VALUE be an RTX in the first place? */ e->u.val_rtx = pool_alloc (value_pool); memset (e->u.val_rtx, 0, RTX_HDR_SIZE); PUT_CODE (e->u.val_rtx, VALUE); @@ -718,7 +770,7 @@ add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x) /* Avoid duplicates. */ for (l = mem_elt->locs; l; l = l->next) - if (GET_CODE (l->loc) == MEM + if (MEM_P (l->loc) && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt) return; @@ -746,6 +798,7 @@ cselib_lookup_mem (rtx x, int create) struct elt_list *l; if (MEM_VOLATILE_P (x) || mode == BLKmode + || !cselib_record_memory || (FLOAT_MODE_P (mode) && flag_float_store)) return 0; @@ -764,7 +817,7 @@ cselib_lookup_mem (rtx x, int create) mem_elt = new_cselib_val (++next_unknown_value, mode); add_mem_for_addr (addr, mem_elt, x); - slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x), + slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x), mem_elt->value, INSERT); *slot = mem_elt; return mem_elt; @@ -796,7 +849,7 @@ cselib_subst_to_values (rtx x) if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x)) return l->elt->u.val_rtx; - abort (); + gcc_unreachable (); case MEM: e = cselib_lookup_mem (x, 0); @@ -881,7 +934,7 @@ cselib_lookup (rtx x, enum machine_mode mode, int create) if (GET_CODE (x) == VALUE) return CSELIB_VAL_PTR (x); - if (GET_CODE (x) == REG) + if (REG_P (x)) { struct elt_list *l; unsigned int i = REGNO (x); @@ -898,7 +951,7 @@ cselib_lookup (rtx x, enum machine_mode mode, int create) if (i < FIRST_PSEUDO_REGISTER) { - unsigned int n = HARD_REGNO_NREGS (i, mode); + unsigned int n = hard_regno_nregs[i][mode]; if (n > max_value_regs) max_value_regs = n; @@ -911,24 +964,24 @@ cselib_lookup (rtx x, enum machine_mode mode, int create) /* Maintain the invariant that the first entry of REG_VALUES, if present, must be the value used to set the register, or NULL. */ - VARRAY_PUSH_UINT (used_regs, i); + used_regs[n_used_regs++] = i; REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL); } REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e); - slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT); + slot = htab_find_slot_with_hash (cselib_hash_table, x, e->value, INSERT); *slot = e; return e; } - if (GET_CODE (x) == MEM) + if (MEM_P (x)) return cselib_lookup_mem (x, create); - hashval = hash_rtx (x, mode, create); + hashval = cselib_hash_rtx (x, create); /* Can't even create if hashing is not possible. */ if (! hashval) return 0; - slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x), + slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x), hashval, create ? INSERT : NO_INSERT); if (slot == 0) return 0; @@ -960,9 +1013,8 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) unsigned int i; /* If we see pseudos after reload, something is _wrong_. */ - if (reload_completed && regno >= FIRST_PSEUDO_REGISTER - && reg_renumber[regno] >= 0) - abort (); + gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER + || reg_renumber[regno] < 0); /* Determine the range of registers that must be invalidated. For pseudos, only REGNO is affected. For hard regs, we must take MODE @@ -970,15 +1022,14 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) if they contain values that overlap REGNO. */ if (regno < FIRST_PSEUDO_REGISTER) { - if (mode == VOIDmode) - abort (); + gcc_assert (mode != VOIDmode); if (regno < max_value_regs) i = 0; else i = regno - max_value_regs; - endregno = regno + HARD_REGNO_NREGS (regno, mode); + endregno = regno + hard_regno_nregs[regno][mode]; } else { @@ -999,7 +1050,7 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) unsigned int this_last = i; if (i < FIRST_PSEUDO_REGISTER && v != NULL) - this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1; + this_last += hard_regno_nregs[i][GET_MODE (v->u.val_rtx)] - 1; if (this_last < regno || v == NULL) { @@ -1027,7 +1078,7 @@ cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) { rtx x = (*p)->loc; - if (GET_CODE (x) == REG && REGNO (x) == i) + if (REG_P (x) && REGNO (x) == i) { unchain_one_elt_loc_list (p); break; @@ -1077,19 +1128,16 @@ cselib_invalidate_mem (rtx mem_rtx) while (*p) { rtx x = (*p)->loc; - rtx canon_x = (*p)->canon_loc; cselib_val *addr; struct elt_list **mem_chain; /* MEMs may occur in locations only at the top level; below that every MEM or REG is substituted by its VALUE. */ - if (GET_CODE (x) != MEM) + if (!MEM_P (x)) { p = &(*p)->next; continue; } - if (!canon_x) - canon_x = (*p)->canon_loc = canon_rtx (x); if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS) && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr, x, cselib_rtx_varies_p)) @@ -1139,13 +1187,14 @@ cselib_invalidate_mem (rtx mem_rtx) void cselib_invalidate_rtx (rtx dest) { - while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT - || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG) + while (GET_CODE (dest) == SUBREG + || GET_CODE (dest) == ZERO_EXTRACT + || GET_CODE (dest) == STRICT_LOW_PART) dest = XEXP (dest, 0); - if (GET_CODE (dest) == REG) + if (REG_P (dest)) cselib_invalidate_regno (REGNO (dest), GET_MODE (dest)); - else if (GET_CODE (dest) == MEM) + else if (MEM_P (dest)) cselib_invalidate_mem (dest); /* Some machines don't define AUTO_INC_DEC, but they still use push @@ -1172,7 +1221,7 @@ cselib_invalidate_rtx_note_stores (rtx dest, rtx ignore ATTRIBUTE_UNUSED, static void cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) { - int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1; + int dreg = REG_P (dest) ? (int) REGNO (dest) : -1; if (src_elt == 0 || side_effects_p (dest)) return; @@ -1181,7 +1230,7 @@ cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) { if (dreg < FIRST_PSEUDO_REGISTER) { - unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest)); + unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)]; if (n > max_value_regs) max_value_regs = n; @@ -1189,23 +1238,22 @@ cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) if (REG_VALUES (dreg) == 0) { - VARRAY_PUSH_UINT (used_regs, dreg); + used_regs[n_used_regs++] = dreg; REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt); } else { - if (REG_VALUES (dreg)->elt == 0) - REG_VALUES (dreg)->elt = src_elt; - else - /* The register should have been invalidated. */ - abort (); + /* The register should have been invalidated. */ + gcc_assert (REG_VALUES (dreg)->elt == 0); + REG_VALUES (dreg)->elt = src_elt; } if (src_elt->locs == 0) n_useless_values--; src_elt->locs = new_elt_loc_list (src_elt->locs, dest); } - else if (GET_CODE (dest) == MEM && dest_addr_elt != 0) + else if (MEM_P (dest) && dest_addr_elt != 0 + && cselib_record_memory) { if (src_elt->locs == 0) n_useless_values--; @@ -1279,13 +1327,14 @@ cselib_record_sets (rtx insn) sets[i].dest = dest = XEXP (dest, 0); /* We don't know how to record anything but REG or MEM. */ - if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM) + if (REG_P (dest) + || (MEM_P (dest) && cselib_record_memory)) { rtx src = sets[i].src; if (cond) src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest); sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1); - if (GET_CODE (dest) == MEM) + if (MEM_P (dest)) sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1); else sets[i].dest_addr_elt = 0; @@ -1307,7 +1356,7 @@ cselib_record_sets (rtx insn) for (i = 0; i < n_sets; i++) { rtx dest = sets[i].dest; - if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM) + if (REG_P (dest) || MEM_P (dest)) { int j; for (j = i + 1; j < n_sets; j++) @@ -1324,7 +1373,8 @@ cselib_record_sets (rtx insn) for (i = 0; i < n_sets; i++) { rtx dest = sets[i].dest; - if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM) + if (REG_P (dest) + || (MEM_P (dest) && cselib_record_memory)) cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); } } @@ -1342,16 +1392,16 @@ cselib_process_insn (rtx insn) cselib_current_insn = insn; /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */ - if (GET_CODE (insn) == CODE_LABEL - || (GET_CODE (insn) == CALL_INSN + if (LABEL_P (insn) + || (CALL_P (insn) && find_reg_note (insn, REG_SETJMP, NULL)) - || (GET_CODE (insn) == INSN + || (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == ASM_OPERANDS && MEM_VOLATILE_P (PATTERN (insn)))) { if (find_reg_note (insn, REG_RETVAL, NULL)) cselib_current_insn_in_libcall = false; - clear_table (); + cselib_clear_table (); return; } @@ -1366,10 +1416,13 @@ cselib_process_insn (rtx insn) /* If this is a call instruction, forget anything stored in a call clobbered register, or, if this is not a const call, in memory. */ - if (GET_CODE (insn) == CALL_INSN) + if (CALL_P (insn)) { for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) - if (call_used_regs[i]) + if (call_used_regs[i] + || (REG_VALUES (i) && REG_VALUES (i)->elt + && HARD_REGNO_CALL_PART_CLOBBERED (i, + GET_MODE (REG_VALUES (i)->elt->u.val_rtx)))) cselib_invalidate_regno (i, reg_raw_mode[i]); if (! CONST_OR_PURE_CALL_P (insn)) @@ -1389,7 +1442,7 @@ cselib_process_insn (rtx insn) /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only after we have processed the insn. */ - if (GET_CODE (insn) == CALL_INSN) + if (CALL_P (insn)) for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) if (GET_CODE (XEXP (x, 0)) == CLOBBER) cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)); @@ -1398,31 +1451,19 @@ cselib_process_insn (rtx insn) cselib_current_insn_in_libcall = false; cselib_current_insn = 0; - if (n_useless_values > MAX_USELESS_VALUES) + if (n_useless_values > MAX_USELESS_VALUES + /* remove_useless_values is linear in the hash table size. Avoid + quadratic behaviour for very large hashtables with very few + useless elements. */ + && (unsigned int)n_useless_values > cselib_hash_table->n_elements / 4) remove_useless_values (); } -/* Make sure our varrays are big enough. Not called from any cselib routines; - it must be called by the user if it allocated new registers. */ - -void -cselib_update_varray_sizes (void) -{ - unsigned int nregs = max_reg_num (); - - if (nregs == cselib_nregs) - return; - - cselib_nregs = nregs; - VARRAY_GROW (reg_values, nregs); - VARRAY_GROW (used_regs, nregs); -} - /* Initialize cselib for one pass. The caller must also call init_alias_analysis. */ void -cselib_init (void) +cselib_init (bool record_memory) { elt_list_pool = create_alloc_pool ("elt_list", sizeof (struct elt_list), 10); @@ -1430,25 +1471,30 @@ cselib_init (void) sizeof (struct elt_loc_list), 10); cselib_val_pool = create_alloc_pool ("cselib_val_list", sizeof (cselib_val), 10); - value_pool = create_alloc_pool ("value", - RTX_SIZE (VALUE), 10); + value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100); + cselib_record_memory = record_memory; /* This is only created once. */ if (! callmem) callmem = gen_rtx_MEM (BLKmode, const0_rtx); cselib_nregs = max_reg_num (); - if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs) - { - reg_values = reg_values_old; - used_regs = used_regs_old; - } - else + + /* We preserve reg_values to allow expensive clearing of the whole thing. + Reallocate it however if it happens to be too large. */ + if (!reg_values || reg_values_size < cselib_nregs + || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4)) { - VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values"); - VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs"); + if (reg_values) + free (reg_values); + /* Some space for newly emit instructions so we don't end up + reallocating in between passes. */ + reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16; + reg_values = XCNEWVEC (struct elt_list *, reg_values_size); } - hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p, - NULL); + used_regs = XNEWVEC (unsigned int, cselib_nregs); + n_used_regs = 0; + cselib_hash_table = htab_create (31, get_value_hash, + entry_and_rtx_equal_p, NULL); cselib_current_insn_in_libcall = false; } @@ -1461,12 +1507,11 @@ cselib_finish (void) free_alloc_pool (elt_loc_list_pool); free_alloc_pool (cselib_val_pool); free_alloc_pool (value_pool); - clear_table (); - reg_values_old = reg_values; - reg_values = 0; - used_regs_old = used_regs; + cselib_clear_table (); + htab_delete (cselib_hash_table); + free (used_regs); used_regs = 0; - hash_table = 0; + cselib_hash_table = 0; n_useless_values = 0; next_unknown_value = 0; } |