Submitted By: Alexander E. Patrakov
Date: 2006-12-11
Initial Package Version: 4.1.1
Upstream Status: backport
Origin: GCC SVN
Description: Various upstream fixes
1) Remove this from build log with Make-3.81:
sed: -e expression #1, char 88: unterminated address regex
2) Fix crash of programs compiled with -Os -ffast-math
3) Fix some cases of miscompilation
Upstream bugzilla URLs:
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=13685
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27334
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27616
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27768
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=28386
|
|
|
3146 | 3209 | macro_list: s-macro_list; @true |
3147 | 3210 | s-macro_list : $(GCC_PASSES) |
3148 | 3211 | echo | $(GCC_FOR_TARGET) -E -dM - | \ |
3149 | | sed -n 's/^#define \([^_][a-zA-Z0-9_]*\).*/\1/p ; \ |
3150 | | s/^#define \(_[^_A-Z][a-zA-Z0-9_]*\).*/\1/p' | \ |
| 3212 | sed -n -e 's/^#define \([^_][a-zA-Z0-9_]*\).*/\1/p' \ |
| 3213 | -e 's/^#define \(_[^_A-Z][a-zA-Z0-9_]*\).*/\1/p' | \ |
3151 | 3214 | sort -u > tmp-macro_list |
3152 | 3215 | $(SHELL) $(srcdir)/../move-if-change tmp-macro_list macro_list |
3153 | 3216 | $(STAMP) s-macro_list |
@@ -3441,10 +3451,10 @@
return x;
}
-/* Fold MEM. */
+/* Fold MEM. Not to be called directly, see fold_rtx_mem instead. */
static rtx
-fold_rtx_mem (rtx x, rtx insn)
+fold_rtx_mem_1 (rtx x, rtx insn)
{
enum machine_mode mode = GET_MODE (x);
rtx new;
@@ -3607,6 +3617,51 @@
}
}
+/* Fold MEM. */
+
+static rtx
+fold_rtx_mem (rtx x, rtx insn)
+{
+ /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
+ refuse to allow recursion of the latter past n levels. This can
+ happen because fold_rtx_mem will try to fold the address of the
+ memory reference it is passed, i.e. conceptually throwing away
+ the MEM and reinjecting the bare address into fold_rtx. As a
+ result, patterns like
+
+ set (reg1)
+ (plus (reg)
+ (mem (plus (reg2) (const_int))))
+
+ set (reg2)
+ (plus (reg)
+ (mem (plus (reg1) (const_int))))
+
+ will defeat any "first-order" short-circuit put in either
+ function to prevent these infinite oscillations.
+
+ The heuristics for determining n is as follows: since each time
+ it is invoked fold_rtx_mem throws away a MEM, and since MEMs
+ are generically not nested, we assume that each invocation of
+ fold_rtx_mem corresponds to a new "top-level" operand, i.e.
+ the source or the destination of a SET. So fold_rtx_mem is
+ bound to stop or cycle before n recursions, n being the number
+ of expressions recorded in the hash table. We also leave some
+ play to account for the initial steps. */
+
+ static unsigned int depth;
+ rtx ret;
+
+ if (depth > 3 + table_size)
+ return x;
+
+ depth++;
+ ret = fold_rtx_mem_1 (x, insn);
+ depth--;
+
+ return ret;
+}
+
/* If X is a nontrivial arithmetic operation on an argument
for which a constant value can be determined, return
the result of operating on that value, as a constant.
@@ -4220,21 +4275,23 @@
{
int is_shift
= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
- rtx y = lookup_as_function (folded_arg0, code);
- rtx inner_const;
+ rtx y, inner_const, new_const;
enum rtx_code associate_code;
- rtx new_const;
- if (y == 0
- || 0 == (inner_const
- = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
- || GET_CODE (inner_const) != CONST_INT
- /* If we have compiled a statement like
- "if (x == (x & mask1))", and now are looking at
- "x & mask2", we will have a case where the first operand
- of Y is the same as our first operand. Unless we detect
- this case, an infinite loop will result. */
- || XEXP (y, 0) == folded_arg0)
+ y = lookup_as_function (folded_arg0, code);
+ if (y == 0)
+ break;
+
+ /* If we have compiled a statement like
+ "if (x == (x & mask1))", and now are looking at
+ "x & mask2", we will have a case where the first operand
+ of Y is the same as our first operand. Unless we detect
+ this case, an infinite loop will result. */
+ if (XEXP (y, 0) == folded_arg0)
+ break;
+
+ inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
+ if (!inner_const || GET_CODE (inner_const) != CONST_INT)
break;
/* Don't associate these operations if they are a PLUS with the
|
|
|
766 | 766 | struct alias_map_d *p_map = ai->pointers[i]; |
767 | 767 | tree tag = var_ann (p_map->var)->type_mem_tag; |
768 | 768 | var_ann_t tag_ann = var_ann (tag); |
| 769 | tree var; |
769 | 770 | |
770 | 771 | p_map->total_alias_vops = 0; |
771 | 772 | p_map->may_aliases = BITMAP_ALLOC (&alias_obstack); |
772 | 773 | |
| 774 | /* Add any pre-existing may_aliases to the bitmap used to represent |
| 775 | TAG's alias set in case we need to group aliases. */ |
| 776 | if (tag_ann->may_aliases) |
| 777 | for (j = 0; j < VARRAY_ACTIVE_SIZE (tag_ann->may_aliases); ++j) |
| 778 | bitmap_set_bit (p_map->may_aliases, |
| 779 | DECL_UID (VARRAY_TREE (tag_ann->may_aliases, j))); |
| 780 | |
773 | 781 | for (j = 0; j < ai->num_addressable_vars; j++) |
774 | 782 | { |
775 | 783 | struct alias_map_d *v_map; |
776 | 784 | var_ann_t v_ann; |
777 | | tree var; |
778 | 785 | bool tag_stored_p, var_stored_p; |
779 | 786 | |
780 | 787 | v_map = ai->addressable_vars[j]; |