From 701245b3347bb4ac5433de15e4c69924c2d1a7e8 Mon Sep 17 00:00:00 2001 From: rguenth Date: Mon, 6 Jun 2011 10:13:23 +0000 Subject: [PATCH] 2011-06-06 Richard Guenther PR tree-optimization/48702 * tree-ssa-address.c (create_mem_ref_raw): Create MEM_REFs only when we know the base address is within bounds. * tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Do not assume the base address of TARGET_MEM_REFs is in bounds. (indirect_refs_may_alias_p): Fix TARGET_MEM_REF without index tests. * gcc.dg/torture/pr48702.c: New testcase. Backport from mainline 2011-05-31 Jakub Jelinek PR rtl-optimization/49235 * tree-ssa-address.c (gen_addr_rtx): Ignore base if it is const0_rtx. (create_mem_ref_raw): Create MEM_REF even if base is INTEGER_CST. * gcc.dg/pr49235.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-4_6-branch@174688 138bc75d-0d04-0410-961f-82ee72b054a4 index 1a7b287..b3199f9 100644 new file mode 100644 index 0000000..f1e589f --- /dev/null +++ b/gcc/testsuite/gcc.dg/pr49235.c @@ -0,0 +1,25 @@ +/* PR rtl-optimization/49235 */ +/* { dg-do compile { target { int32plus } } } */ +/* { dg-options "-O -fno-delete-null-pointer-checks -fno-tree-scev-cprop -ftree-vectorize -fno-vect-cost-model -w" } */ + +void +foo (void) +{ + unsigned i; + unsigned *p = 0; + for (i = 0; i < 4; ++i) + *p++ = 0; + for (i = 0; i < 4; ++i) + *p++ = 0; +} + +void +bar (void) +{ + unsigned i; + unsigned *p = (unsigned *) (__UINTPTR_TYPE__) 0x12340000; + for (i = 0; i < 4; ++i) + *p++ = 0; + for (i = 0; i < 4; ++i) + *p++ = 0; +} diff --git a/gcc/testsuite/gcc.dg/torture/pr48702.c b/gcc/testsuite/gcc.dg/torture/pr48702.c new file mode 100644 index 0000000..1ec371d --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr48702.c @@ -0,0 +1,47 @@ +/* { dg-do run } */ + +extern void abort (void); + +#define LEN 4 + +static inline void unpack(int array[LEN]) +{ + int ii, val; + val = 1; + for (ii = 0; ii < LEN; ii++) { + array[ii] = val % 2; + val = val / 2; + } +} + +static inline int pack(int array[LEN]) +{ + int ans, ii; + ans = 0; + for (ii = LEN-1; ii >= 0; ii--) { + ans = 2 * ans + array[ii]; + } + return ans; +} + +int __attribute__((noinline)) +foo() +{ + int temp, ans; + int array[LEN]; + unpack(array); + temp = array[0]; + array[0] = array[2]; + array[2] = temp; + ans = pack(array); + return ans; +} + +int main(void) +{ + int val; + val = foo(); + if (val != 4) + abort (); + return 0; +} diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c index a9ca835..437460d 100644 --- a/gcc/tree-ssa-address.c +++ b/gcc/tree-ssa-address.c @@ -1,5 +1,5 @@ /* Memory address lowering and addressing mode selection. - Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010 + Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc. This file is part of GCC. @@ -129,7 +129,7 @@ gen_addr_rtx (enum machine_mode address_mode, *addr = act_elem; } - if (base) + if (base && base != const0_rtx) { if (*addr) *addr = simplify_gen_binary (PLUS, address_mode, base, *addr); @@ -361,8 +361,11 @@ create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr, index2 = addr->base; } - /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. */ - if (alias_ptr_type + /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. + ??? As IVOPTs does not follow restrictions to where the base + pointer may point to create a MEM_REF only if we know that + base is valid. */ + if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST) && (!index2 || integer_zerop (index2)) && (!addr->index || integer_zerop (addr->index))) return fold_build2 (MEM_REF, type, base, addr->offset); diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c index 8434179..1d213df 100644 --- a/gcc/tree-ssa-alias.c +++ b/gcc/tree-ssa-alias.c @@ -719,8 +719,9 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, alias_set_type base2_alias_set, bool tbaa_p) { tree ptr1; - tree ptrtype1; + tree ptrtype1, dbase2; HOST_WIDE_INT offset1p = offset1, offset2p = offset2; + HOST_WIDE_INT doffset1, doffset2; ptr1 = TREE_OPERAND (base1, 0); @@ -744,11 +745,12 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, the pointer access is beyond the extent of the variable access. (the pointer base cannot validly point to an offset less than zero of the variable). - They also cannot alias if the pointer may not point to the decl. */ - if ((TREE_CODE (base1) != TARGET_MEM_REF - || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) + ??? IVOPTs creates bases that do not honor this restriction, + so do not apply this optimization for TARGET_MEM_REFs. */ + if (TREE_CODE (base1) != TARGET_MEM_REF && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2)) return false; + /* They also cannot alias if the pointer may not point to the decl. */ if (!ptr_deref_may_alias_decl_p (ptr1, base2)) return false; @@ -771,20 +773,6 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, if (base2_alias_set == -1) base2_alias_set = get_alias_set (base2); - /* If both references are through the same type, they do not alias - if the accesses do not overlap. This does extra disambiguation - for mixed/pointer accesses but requires strict aliasing. - For MEM_REFs we require that the component-ref offset we computed - is relative to the start of the type which we ensure by - comparing rvalue and access type and disregarding the constant - pointer offset. */ - if ((TREE_CODE (base1) != TARGET_MEM_REF - || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) - && (TREE_CODE (base1) != MEM_REF - || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1) - && same_type_for_tbaa (TREE_TYPE (ptrtype1), TREE_TYPE (base2)) == 1) - return ranges_overlap_p (offset1, max_size1, offset2, max_size2); - /* When we are trying to disambiguate an access with a pointer dereference as base versus one with a decl as base we can use both the size of the decl and its dynamic type for extra disambiguation. @@ -814,6 +802,48 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1)))) return false; + if (!ref2) + return true; + + /* If the decl is accressed via a MEM_REF, reconstruct the base + we can use for TBAA and an appropriately adjusted offset. */ + dbase2 = ref2; + while (handled_component_p (dbase2)) + dbase2 = TREE_OPERAND (dbase2, 0); + doffset1 = offset1; + doffset2 = offset2; + if (TREE_CODE (dbase2) == MEM_REF + || TREE_CODE (dbase2) == TARGET_MEM_REF) + { + double_int moff = mem_ref_offset (dbase2); + moff = double_int_lshift (moff, + BITS_PER_UNIT == 8 + ? 3 : exact_log2 (BITS_PER_UNIT), + HOST_BITS_PER_DOUBLE_INT, true); + if (double_int_negative_p (moff)) + doffset1 -= double_int_neg (moff).low; + else + doffset2 -= moff.low; + } + + /* If either reference is view-converted, give up now. */ + if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 + || same_type_for_tbaa (TREE_TYPE (dbase2), + TREE_TYPE (reference_alias_ptr_type (dbase2))) != 1) + return true; + + /* If both references are through the same type, they do not alias + if the accesses do not overlap. This does extra disambiguation + for mixed/pointer accesses but requires strict aliasing. + For MEM_REFs we require that the component-ref offset we computed + is relative to the start of the type which we ensure by + comparing rvalue and access type and disregarding the constant + pointer offset. */ + if ((TREE_CODE (base1) != TARGET_MEM_REF + || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) + && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1) + return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2); + /* Do access-path based disambiguation. */ if (ref1 && ref2 && handled_component_p (ref1) @@ -942,12 +972,12 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, /* If both references are through the same type, they do not alias if the accesses do not overlap. This does extra disambiguation for mixed/pointer accesses but requires strict aliasing. */ - if ((TREE_CODE (base1) != TARGET_MEM_REF || !TMR_INDEX (base1)) - && (TREE_CODE (base2) != TARGET_MEM_REF || !TMR_INDEX (base2)) - && (TREE_CODE (base1) != MEM_REF - || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1) - && (TREE_CODE (base2) != MEM_REF - || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1) + if ((TREE_CODE (base1) != TARGET_MEM_REF + || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) + && (TREE_CODE (base2) != TARGET_MEM_REF + || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))) + && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1 + && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1 && same_type_for_tbaa (TREE_TYPE (ptrtype1), TREE_TYPE (ptrtype2)) == 1) return ranges_overlap_p (offset1, max_size1, offset2, max_size2); -- 1.7.0.4