Ticket #1935: gcc-4.1.1-fixes-1.patch

File gcc-4.1.1-fixes-1.patch, 8.4 KB (added by alexander@…, 18 years ago)

Adds more backoprted bugfixes

  • gcc-4.1.1/gcc/Makefile.in

    Submitted By: Alexander E. Patrakov
    Date: 2006-12-11
    Initial Package Version: 4.1.1
    Upstream Status: backport
    Origin: GCC SVN
    Description: Various upstream fixes
    
    1) Remove this from build log with Make-3.81:
    sed: -e expression #1, char 88: unterminated address regex
    2) Fix crash of programs compiled with -Os -ffast-math
    3) Fix some cases of miscompilation
    
    Upstream bugzilla URLs:
    
    http://gcc.gnu.org/bugzilla/show_bug.cgi?id=13685
    http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27334
    http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27616
    http://gcc.gnu.org/bugzilla/show_bug.cgi?id=27768
    http://gcc.gnu.org/bugzilla/show_bug.cgi?id=28386
    
     
    31463209macro_list: s-macro_list; @true
    31473210s-macro_list : $(GCC_PASSES)
    31483211        echo | $(GCC_FOR_TARGET) -E -dM - | \
    3149           sed -n 's/^#define \([^_][a-zA-Z0-9_]*\).*/\1/p ; \
    3150                 s/^#define \(_[^_A-Z][a-zA-Z0-9_]*\).*/\1/p' | \
     3212          sed -n -e 's/^#define \([^_][a-zA-Z0-9_]*\).*/\1/p' \
     3213                 -e 's/^#define \(_[^_A-Z][a-zA-Z0-9_]*\).*/\1/p' | \
    31513214          sort -u > tmp-macro_list
    31523215        $(SHELL) $(srcdir)/../move-if-change tmp-macro_list macro_list
    31533216        $(STAMP) s-macro_list
  • gcc-4.1.1/gcc/config/i386/i386.c

     
    15021502    }
    15031503
    15041504  /* Validate -mpreferred-stack-boundary= value, or provide default.
    1505      The default of 128 bits is for Pentium III's SSE __m128, but we
    1506      don't want additional code to keep the stack aligned when
    1507      optimizing for code size.  */
    1508   ix86_preferred_stack_boundary = (optimize_size
    1509                                    ? TARGET_64BIT ? 128 : 32
    1510                                    : 128);
     1505     The default of 128 bits is for Pentium III's SSE __m128, We can't
     1506     change it because of optimize_size.  Otherwise, we can't mix object
     1507     files compiled with -Os and -On.  */
     1508  ix86_preferred_stack_boundary = 128;
    15111509  if (ix86_preferred_stack_boundary_string)
    15121510    {
    15131511      i = atoi (ix86_preferred_stack_boundary_string);
  • gcc-4.1.1/gcc/cse.c

     
    528528
    529529static struct table_elt *table[HASH_SIZE];
    530530
     531/* Number of elements in the hash table.  */
     532
     533static unsigned int table_size;
     534
    531535/* Chain of `struct table_elt's made so far for this function
    532536   but currently removed from the table.  */
    533537
     
    962966        }
    963967    }
    964968
     969  table_size = 0;
     970
    965971#ifdef HAVE_cc0
    966972  prev_insn = 0;
    967973  prev_insn_cc0 = 0;
     
    13721378  /* Now add it to the free element chain.  */
    13731379  elt->next_same_hash = free_element_chain;
    13741380  free_element_chain = elt;
     1381
     1382  table_size--;
    13751383}
    13761384
    13771385/* Look up X in the hash table and return its table element,
     
    16491657        }
    16501658    }
    16511659
     1660  table_size++;
     1661
    16521662  return elt;
    16531663}
    16541664
  • gcc-4.1.1/gcc/tree-ssa-alias.c

    @@ -3441,10 +3451,10 @@
       return x;
     }
     
    -/* Fold MEM.  */
    +/* Fold MEM.  Not to be called directly, see fold_rtx_mem instead.  */
     
     static rtx
    -fold_rtx_mem (rtx x, rtx insn)
    +fold_rtx_mem_1 (rtx x, rtx insn)
     {
       enum machine_mode mode = GET_MODE (x);
       rtx new;
    @@ -3607,6 +3617,51 @@
       }
     }
     
    +/* Fold MEM.  */
    +
    +static rtx
    +fold_rtx_mem (rtx x, rtx insn)
    +{
    +  /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
    +     refuse to allow recursion of the latter past n levels.  This can
    +     happen because fold_rtx_mem will try to fold the address of the
    +     memory reference it is passed, i.e. conceptually throwing away
    +     the MEM and reinjecting the bare address into fold_rtx.  As a
    +     result, patterns like
    +
    +       set (reg1)
    +	   (plus (reg)
    +		 (mem (plus (reg2) (const_int))))
    +
    +       set (reg2)
    +	   (plus (reg)
    +		 (mem (plus (reg1) (const_int))))
    +
    +     will defeat any "first-order" short-circuit put in either
    +     function to prevent these infinite oscillations.
    +
    +     The heuristics for determining n is as follows: since each time
    +     it is invoked fold_rtx_mem throws away a MEM, and since MEMs
    +     are generically not nested, we assume that each invocation of
    +     fold_rtx_mem corresponds to a new "top-level" operand, i.e.
    +     the source or the destination of a SET.  So fold_rtx_mem is
    +     bound to stop or cycle before n recursions, n being the number
    +     of expressions recorded in the hash table.  We also leave some
    +     play to account for the initial steps.  */
    +
    +  static unsigned int depth;
    +  rtx ret;
    +
    +  if (depth > 3 + table_size)
    +    return x;
    +
    +  depth++;
    +  ret = fold_rtx_mem_1 (x, insn);
    +  depth--;
    +
    +  return ret;
    +}
    +
     /* If X is a nontrivial arithmetic operation on an argument
        for which a constant value can be determined, return
        the result of operating on that value, as a constant.
    @@ -4220,21 +4275,23 @@
     	    {
     	      int is_shift
     		= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
    -	      rtx y = lookup_as_function (folded_arg0, code);
    -	      rtx inner_const;
    +	      rtx y, inner_const, new_const;
     	      enum rtx_code associate_code;
    -	      rtx new_const;
     
    -	      if (y == 0
    -		  || 0 == (inner_const
    -			   = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
    -		  || GET_CODE (inner_const) != CONST_INT
    -		  /* If we have compiled a statement like
    -		     "if (x == (x & mask1))", and now are looking at
    -		     "x & mask2", we will have a case where the first operand
    -		     of Y is the same as our first operand.  Unless we detect
    -		     this case, an infinite loop will result.  */
    -		  || XEXP (y, 0) == folded_arg0)
    +	      y = lookup_as_function (folded_arg0, code);
    +	      if (y == 0)
    +		break;
    +
    +	      /* If we have compiled a statement like
    +		 "if (x == (x & mask1))", and now are looking at
    +		 "x & mask2", we will have a case where the first operand
    +		 of Y is the same as our first operand.  Unless we detect
    +		 this case, an infinite loop will result.  */
    +	      if (XEXP (y, 0) == folded_arg0)
    +		break;
    +
    +	      inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
    +	      if (!inner_const || GET_CODE (inner_const) != CONST_INT)
     		break;
     
     	      /* Don't associate these operations if they are a PLUS with the
     
    766766      struct alias_map_d *p_map = ai->pointers[i];
    767767      tree tag = var_ann (p_map->var)->type_mem_tag;
    768768      var_ann_t tag_ann = var_ann (tag);
     769      tree var;
    769770
    770771      p_map->total_alias_vops = 0;
    771772      p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
    772773
     774      /* Add any pre-existing may_aliases to the bitmap used to represent
     775         TAG's alias set in case we need to group aliases.  */
     776      if (tag_ann->may_aliases)
     777        for (j = 0; j < VARRAY_ACTIVE_SIZE (tag_ann->may_aliases); ++j)
     778          bitmap_set_bit (p_map->may_aliases,
     779                          DECL_UID (VARRAY_TREE (tag_ann->may_aliases, j)));
     780
    773781      for (j = 0; j < ai->num_addressable_vars; j++)
    774782        {
    775783          struct alias_map_d *v_map;
    776784          var_ann_t v_ann;
    777           tree var;
    778785          bool tag_stored_p, var_stored_p;
    779786         
    780787          v_map = ai->addressable_vars[j];
  • gcc-4.1.1/gcc/loop.c

     
    88248824}
    88258825
    88268826
    8827 /* Return false iff it is provable that biv BL plus BIAS will not wrap
    8828    at any point in its update sequence.  Note that at the rtl level we
    8829    may not have information about the signedness of BL; in that case,
    8830    check for both signed and unsigned overflow.  */
     8827/* Return false iff it is provable that biv BL will not wrap at any point
     8828   in its update sequence.  Note that at the RTL level we may not have
     8829   information about the signedness of BL; in that case, check for both
     8830   signed and unsigned overflow.  */
    88318831
    88328832static bool
    8833 biased_biv_may_wrap_p (const struct loop *loop, struct iv_class *bl,
    8834                        unsigned HOST_WIDE_INT bias)
     8833biv_may_wrap_p (const struct loop *loop, struct iv_class *bl)
    88358834{
    88368835  HOST_WIDE_INT incr;
    88378836  bool check_signed, check_unsigned;
     
    88678866  mode = GET_MODE (bl->biv->src_reg);
    88688867
    88698868  if (check_unsigned
    8870       && !biased_biv_fits_mode_p (loop, bl, incr, mode, bias))
     8869      && !biased_biv_fits_mode_p (loop, bl, incr, mode, 0))
    88718870    return true;
    88728871
    88738872  if (check_signed)
    88748873    {
    8875       bias += (GET_MODE_MASK (mode) >> 1) + 1;
     8874      unsigned HOST_WIDE_INT bias = (GET_MODE_MASK (mode) >> 1) + 1;
    88768875      if (!biased_biv_fits_mode_p (loop, bl, incr, mode, bias))
    88778876        return true;
    88788877    }
     
    1030610305         valid programs.  */
    1030710306      /* Without lifetime analysis, we don't know how COMPARE will be
    1030810307         used, so we must assume the worst.  */
    10309       if (code != EQ && code != NE
    10310           && biased_biv_may_wrap_p (loop, bl, INTVAL (arg)))
     10308      if (code != EQ && code != NE && biv_may_wrap_p (loop, bl))
    1031110309        return 0;
    1031210310
    1031310311      /* Try to replace with any giv that has constant positive mult_val