aboutsummaryrefslogtreecommitdiffstats
path: root/gcc-4.9/gcc/config/arm
diff options
context:
space:
mode:
Diffstat (limited to 'gcc-4.9/gcc/config/arm')
-rw-r--r--gcc-4.9/gcc/config/arm/arm-protos.h3
-rw-r--r--gcc-4.9/gcc/config/arm/arm.c88
-rw-r--r--gcc-4.9/gcc/config/arm/arm.h11
-rw-r--r--gcc-4.9/gcc/config/arm/arm.md64
-rw-r--r--gcc-4.9/gcc/config/arm/constraints.md11
-rw-r--r--gcc-4.9/gcc/config/arm/linux-grte.h27
-rw-r--r--gcc-4.9/gcc/config/arm/t-aprofile3
7 files changed, 107 insertions, 100 deletions
diff --git a/gcc-4.9/gcc/config/arm/arm-protos.h b/gcc-4.9/gcc/config/arm/arm-protos.h
index 13874ee6e..2ac3b3009 100644
--- a/gcc-4.9/gcc/config/arm/arm-protos.h
+++ b/gcc-4.9/gcc/config/arm/arm-protos.h
@@ -56,6 +56,7 @@ extern int arm_split_constant (RTX_CODE, enum machine_mode, rtx,
extern int legitimate_pic_operand_p (rtx);
extern rtx legitimize_pic_address (rtx, enum machine_mode, rtx);
extern rtx legitimize_tls_address (rtx, rtx);
+extern bool arm_legitimate_address_p (enum machine_mode, rtx, bool);
extern int arm_legitimate_address_outer_p (enum machine_mode, rtx, RTX_CODE, int);
extern int thumb_legitimate_offset_p (enum machine_mode, HOST_WIDE_INT);
extern bool arm_legitimize_reload_address (rtx *, enum machine_mode, int, int,
@@ -294,4 +295,6 @@ extern void arm_emit_eabi_attribute (const char *, int, int);
/* Defined in gcc/common/config/arm-common.c. */
extern const char *arm_rewrite_selected_cpu (const char *name);
+extern bool arm_is_constant_pool_ref (rtx);
+
#endif /* ! GCC_ARM_PROTOS_H */
diff --git a/gcc-4.9/gcc/config/arm/arm.c b/gcc-4.9/gcc/config/arm/arm.c
index 3c237cb6d..b79bb48b1 100644
--- a/gcc-4.9/gcc/config/arm/arm.c
+++ b/gcc-4.9/gcc/config/arm/arm.c
@@ -89,7 +89,6 @@ static rtx arm_legitimize_address (rtx, rtx, enum machine_mode);
static reg_class_t arm_preferred_reload_class (rtx, reg_class_t);
static rtx thumb_legitimize_address (rtx, rtx, enum machine_mode);
inline static int thumb1_index_register_rtx_p (rtx, int);
-static bool arm_legitimate_address_p (enum machine_mode, rtx, bool);
static int thumb_far_jump_used_p (void);
static bool thumb_force_lr_save (void);
static unsigned arm_size_return_regs (void);
@@ -13952,9 +13951,9 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
HOST_WIDE_INT srcoffset, dstoffset;
HOST_WIDE_INT src_autoinc, dst_autoinc;
rtx mem, addr;
-
+
gcc_assert (1 <= interleave_factor && interleave_factor <= 4);
-
+
/* Use hard registers if we have aligned source or destination so we can use
load/store multiple with contiguous registers. */
if (dst_aligned || src_aligned)
@@ -13968,7 +13967,7 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
src = copy_addr_to_reg (XEXP (srcbase, 0));
srcoffset = dstoffset = 0;
-
+
/* Calls to arm_gen_load_multiple and arm_gen_store_multiple update SRC/DST.
For copying the last bytes we want to subtract this offset again. */
src_autoinc = dst_autoinc = 0;
@@ -14022,14 +14021,14 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
remaining -= block_size_bytes;
}
-
+
/* Copy any whole words left (note these aren't interleaved with any
subsequent halfword/byte load/stores in the interests of simplicity). */
-
+
words = remaining / UNITS_PER_WORD;
gcc_assert (words < interleave_factor);
-
+
if (src_aligned && words > 1)
{
emit_insn (arm_gen_load_multiple (regnos, words, src, TRUE, srcbase,
@@ -14069,11 +14068,11 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
}
remaining -= words * UNITS_PER_WORD;
-
+
gcc_assert (remaining < 4);
-
+
/* Copy a halfword if necessary. */
-
+
if (remaining >= 2)
{
halfword_tmp = gen_reg_rtx (SImode);
@@ -14097,11 +14096,11 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
remaining -= 2;
srcoffset += 2;
}
-
+
gcc_assert (remaining < 2);
-
+
/* Copy last byte. */
-
+
if ((remaining & 1) != 0)
{
byte_tmp = gen_reg_rtx (SImode);
@@ -14122,9 +14121,9 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
remaining--;
srcoffset++;
}
-
+
/* Store last halfword if we haven't done so already. */
-
+
if (halfword_tmp)
{
addr = plus_constant (Pmode, dst, dstoffset - dst_autoinc);
@@ -14143,7 +14142,7 @@ arm_block_move_unaligned_straight (rtx dstbase, rtx srcbase,
emit_move_insn (mem, gen_lowpart (QImode, byte_tmp));
dstoffset++;
}
-
+
gcc_assert (remaining == 0 && srcoffset == dstoffset);
}
@@ -14162,7 +14161,7 @@ arm_adjust_block_mem (rtx mem, HOST_WIDE_INT length, rtx *loop_reg,
rtx *loop_mem)
{
*loop_reg = copy_addr_to_reg (XEXP (mem, 0));
-
+
/* Although the new mem does not refer to a known location,
it does keep up to LENGTH bytes of alignment. */
*loop_mem = change_address (mem, BLKmode, *loop_reg);
@@ -14182,14 +14181,14 @@ arm_block_move_unaligned_loop (rtx dest, rtx src, HOST_WIDE_INT length,
{
rtx label, src_reg, dest_reg, final_src, test;
HOST_WIDE_INT leftover;
-
+
leftover = length % bytes_per_iter;
length -= leftover;
-
+
/* Create registers and memory references for use within the loop. */
arm_adjust_block_mem (src, bytes_per_iter, &src_reg, &src);
arm_adjust_block_mem (dest, bytes_per_iter, &dest_reg, &dest);
-
+
/* Calculate the value that SRC_REG should have after the last iteration of
the loop. */
final_src = expand_simple_binop (Pmode, PLUS, src_reg, GEN_INT (length),
@@ -14198,7 +14197,7 @@ arm_block_move_unaligned_loop (rtx dest, rtx src, HOST_WIDE_INT length,
/* Emit the start of the loop. */
label = gen_label_rtx ();
emit_label (label);
-
+
/* Emit the loop body. */
arm_block_move_unaligned_straight (dest, src, bytes_per_iter,
interleave_factor);
@@ -14206,11 +14205,11 @@ arm_block_move_unaligned_loop (rtx dest, rtx src, HOST_WIDE_INT length,
/* Move on to the next block. */
emit_move_insn (src_reg, plus_constant (Pmode, src_reg, bytes_per_iter));
emit_move_insn (dest_reg, plus_constant (Pmode, dest_reg, bytes_per_iter));
-
+
/* Emit the loop condition. */
test = gen_rtx_NE (VOIDmode, src_reg, final_src);
emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
-
+
/* Mop up any left-over bytes. */
if (leftover)
arm_block_move_unaligned_straight (dest, src, leftover, interleave_factor);
@@ -14224,7 +14223,7 @@ static int
arm_movmemqi_unaligned (rtx *operands)
{
HOST_WIDE_INT length = INTVAL (operands[2]);
-
+
if (optimize_size)
{
bool src_aligned = MEM_ALIGN (operands[1]) >= BITS_PER_WORD;
@@ -14235,7 +14234,7 @@ arm_movmemqi_unaligned (rtx *operands)
resulting code can be smaller. */
unsigned int interleave_factor = (src_aligned || dst_aligned) ? 2 : 1;
HOST_WIDE_INT bytes_per_iter = (src_aligned || dst_aligned) ? 8 : 4;
-
+
if (length > 12)
arm_block_move_unaligned_loop (operands[0], operands[1], length,
interleave_factor, bytes_per_iter);
@@ -14253,7 +14252,7 @@ arm_movmemqi_unaligned (rtx *operands)
else
arm_block_move_unaligned_straight (operands[0], operands[1], length, 4);
}
-
+
return 1;
}
@@ -28520,7 +28519,11 @@ arm_set_return_address (rtx source, rtx scratch)
addr = plus_constant (Pmode, addr, delta);
}
- emit_move_insn (gen_frame_mem (Pmode, addr), source);
+ /* The store needs to be marked as frame related in order to prevent
+ DSE from deleting it as dead if it is based on fp. */
+ rtx insn = emit_move_insn (gen_frame_mem (Pmode, addr), source);
+ RTX_FRAME_RELATED_P (insn) = 1;
+ add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (Pmode, LR_REGNUM));
}
}
@@ -28572,7 +28575,11 @@ thumb_set_return_address (rtx source, rtx scratch)
else
addr = plus_constant (Pmode, addr, delta);
- emit_move_insn (gen_frame_mem (Pmode, addr), source);
+ /* The store needs to be marked as frame related in order to prevent
+ DSE from deleting it as dead if it is based on fp. */
+ rtx insn = emit_move_insn (gen_frame_mem (Pmode, addr), source);
+ RTX_FRAME_RELATED_P (insn) = 1;
+ add_reg_note (insn, REG_CFA_RESTORE, gen_rtx_REG (Pmode, LR_REGNUM));
}
else
emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
@@ -29828,10 +29835,10 @@ int
vfp3_const_double_for_fract_bits (rtx operand)
{
REAL_VALUE_TYPE r0;
-
+
if (!CONST_DOUBLE_P (operand))
return 0;
-
+
REAL_VALUE_FROM_CONST_DOUBLE (r0, operand);
if (exact_real_inverse (DFmode, &r0))
{
@@ -30825,7 +30832,7 @@ arm_autoinc_modes_ok_p (enum machine_mode mode, enum arm_auto_incmodes code)
else
return false;
}
-
+
return true;
case ARM_POST_DEC:
@@ -30842,10 +30849,10 @@ arm_autoinc_modes_ok_p (enum machine_mode mode, enum arm_auto_incmodes code)
return false;
return true;
-
+
default:
return false;
-
+
}
return false;
@@ -30856,7 +30863,7 @@ arm_autoinc_modes_ok_p (enum machine_mode mode, enum arm_auto_incmodes code)
Additionally, the default expansion code is not available or suitable
for post-reload insn splits (this can occur when the register allocator
chooses not to do a shift in NEON).
-
+
This function is used in both initial expand and post-reload splits, and
handles all kinds of 64-bit shifts.
@@ -31109,7 +31116,7 @@ arm_validize_comparison (rtx *comparison, rtx * op1, rtx * op2)
{
enum rtx_code code = GET_CODE (*comparison);
int code_int;
- enum machine_mode mode = (GET_MODE (*op1) == VOIDmode)
+ enum machine_mode mode = (GET_MODE (*op1) == VOIDmode)
? GET_MODE (*op2) : GET_MODE (*op1);
gcc_assert (GET_MODE (*op1) != VOIDmode || GET_MODE (*op2) != VOIDmode);
@@ -31163,7 +31170,7 @@ arm_asan_shadow_offset (void)
/* This is a temporary fix for PR60655. Ideally we need
to handle most of these cases in the generic part but
- currently we reject minus (..) (sym_ref). We try to
+ currently we reject minus (..) (sym_ref). We try to
ameliorate the case with minus (sym_ref1) (sym_ref2)
where they are in the same section. */
@@ -31393,4 +31400,13 @@ arm_load_global_address (rtx symbol, rtx offset_reg,
df_insn_rescan (load_insn);
}
+/* return TRUE if x is a reference to a value in a constant pool */
+extern bool
+arm_is_constant_pool_ref (rtx x)
+{
+ return (MEM_P (x)
+ && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
+ && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
+}
+
#include "gt-arm.h"
diff --git a/gcc-4.9/gcc/config/arm/arm.h b/gcc-4.9/gcc/config/arm/arm.h
index ab5167a8b..433a3dd77 100644
--- a/gcc-4.9/gcc/config/arm/arm.h
+++ b/gcc-4.9/gcc/config/arm/arm.h
@@ -74,8 +74,8 @@ extern char arm_arch_name[];
builtin_define_with_int_value ( \
"__ARM_SIZEOF_MINIMAL_ENUM", \
flag_short_enums ? 1 : 4); \
- builtin_define_with_int_value ( \
- "__ARM_SIZEOF_WCHAR_T", WCHAR_TYPE_SIZE); \
+ builtin_define_type_sizeof ("__ARM_SIZEOF_WCHAR_T", \
+ wchar_type_node); \
if (TARGET_ARM_ARCH_PROFILE) \
builtin_define_with_int_value ( \
"__ARM_ARCH_PROFILE", TARGET_ARM_ARCH_PROFILE); \
@@ -2139,9 +2139,10 @@ extern int making_const_table;
? reverse_condition_maybe_unordered (code) \
: reverse_condition (code))
-/* The arm5 clz instruction returns 32. */
-#define CLZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) ((VALUE) = 32, 1)
-#define CTZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) ((VALUE) = 32, 1)
+#define CLZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) \
+ ((VALUE) = GET_MODE_UNIT_BITSIZE (MODE))
+#define CTZ_DEFINED_VALUE_AT_ZERO(MODE, VALUE) \
+ ((VALUE) = GET_MODE_UNIT_BITSIZE (MODE))
#define CC_STATUS_INIT \
do { cfun->machine->thumb1_cc_insn = NULL_RTX; } while (0)
diff --git a/gcc-4.9/gcc/config/arm/arm.md b/gcc-4.9/gcc/config/arm/arm.md
index 467f9ce4e..1153a1e34 100644
--- a/gcc-4.9/gcc/config/arm/arm.md
+++ b/gcc-4.9/gcc/config/arm/arm.md
@@ -127,9 +127,10 @@
; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
-; arm_arch6. This attribute is used to compute attribute "enabled",
-; use type "any" to enable an alternative in all cases.
-(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
+; arm_arch6. "v6t2" for Thumb-2 with arm_arch6. This attribute is
+; used to compute attribute "enabled", use type "any" to enable an
+; alternative in all cases.
+(define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,neon_for_64bits,avoid_neon_for_64bits,iwmmxt,iwmmxt2"
(const_string "any"))
(define_attr "arch_enabled" "no,yes"
@@ -164,6 +165,10 @@
(match_test "TARGET_32BIT && !arm_arch6"))
(const_string "yes")
+ (and (eq_attr "arch" "v6t2")
+ (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
+ (const_string "yes")
+
(and (eq_attr "arch" "avoid_neon_for_64bits")
(match_test "TARGET_NEON")
(not (match_test "TARGET_PREFER_NEON_64BITS")))
@@ -3631,7 +3636,7 @@
[(match_operand:SI 1 "s_register_operand" "r")
(match_operand:SI 2 "s_register_operand" "r")]))
(clobber (reg:CC CC_REGNUM))]
- "TARGET_32BIT && optimize_function_for_size_p (cfun)"
+ "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
"*
operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
operands[1], operands[2]);
@@ -4374,7 +4379,7 @@
(define_insn "unaligned_loadhis"
[(set (match_operand:SI 0 "s_register_operand" "=l,r")
(sign_extend:SI
- (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,m")]
+ (unspec:HI [(match_operand:HI 1 "memory_operand" "Uw,Uh")]
UNSPEC_UNALIGNED_LOAD)))]
"unaligned_access && TARGET_32BIT"
"ldr%(sh%)\t%0, %1\t@ unaligned"
@@ -5287,7 +5292,7 @@
(define_insn "*arm_zero_extendhisi2_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
+ (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
"TARGET_ARM && arm_arch6"
"@
uxth%?\\t%0, %1
@@ -5381,7 +5386,7 @@
(define_insn "*arm_zero_extendqisi2_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
+ (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
"TARGET_ARM && arm_arch6"
"@
uxtb%(%)\\t%0, %1
@@ -5615,31 +5620,27 @@
(define_insn "*arm_extendhisi2"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
+ (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
"TARGET_ARM && arm_arch4 && !arm_arch6"
"@
#
ldr%(sh%)\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift_reg,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable" "yes")]
)
;; ??? Check Thumb-2 pool range
(define_insn "*arm_extendhisi2_v6"
[(set (match_operand:SI 0 "s_register_operand" "=r,r")
- (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
+ (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
"TARGET_32BIT && arm_arch6"
"@
sxth%?\\t%0, %1
ldr%(sh%)\\t%0, %1"
[(set_attr "type" "extend,load_byte")
(set_attr "predicable" "yes")
- (set_attr "predicable_short_it" "no")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable_short_it" "no")]
)
(define_insn "*arm_extendhisi2addsi"
@@ -5682,9 +5683,7 @@
"TARGET_ARM && arm_arch4"
"ldr%(sb%)\\t%0, %1"
[(set_attr "type" "load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "256")
- (set_attr "neg_pool_range" "244")]
+ (set_attr "predicable" "yes")]
)
(define_expand "extendqisi2"
@@ -5724,9 +5723,7 @@
ldr%(sb%)\\t%0, %1"
[(set_attr "length" "8,4")
(set_attr "type" "alu_shift_reg,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_extendqisi_v6"
@@ -5738,9 +5735,7 @@
sxtb%?\\t%0, %1
ldr%(sb%)\\t%0, %1"
[(set_attr "type" "extend,load_byte")
- (set_attr "predicable" "yes")
- (set_attr "pool_range" "*,256")
- (set_attr "neg_pool_range" "*,244")]
+ (set_attr "predicable" "yes")]
)
(define_insn "*arm_extendqisi2addsi"
@@ -6973,8 +6968,8 @@
;; Pattern to recognize insn generated default case above
(define_insn "*movhi_insn_arch4"
- [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
- (match_operand:HI 1 "general_operand" "rI,K,r,mi"))]
+ [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
+ (match_operand:HI 1 "general_operand" "rI,K,n,r,mi"))]
"TARGET_ARM
&& arm_arch4
&& (register_operand (operands[0], HImode)
@@ -6982,16 +6977,19 @@
"@
mov%?\\t%0, %1\\t%@ movhi
mvn%?\\t%0, #%B1\\t%@ movhi
+ movw%?\\t%0, %L1\\t%@ movhi
str%(h%)\\t%1, %0\\t%@ movhi
ldr%(h%)\\t%0, %1\\t%@ movhi"
[(set_attr "predicable" "yes")
- (set_attr "pool_range" "*,*,*,256")
- (set_attr "neg_pool_range" "*,*,*,244")
+ (set_attr "pool_range" "*,*,*,*,256")
+ (set_attr "neg_pool_range" "*,*,*,*,244")
+ (set_attr "arch" "*,*,v6t2,*,*")
(set_attr_alternative "type"
[(if_then_else (match_operand 1 "const_int_operand" "")
(const_string "mov_imm" )
(const_string "mov_reg"))
(const_string "mvn_imm")
+ (const_string "mov_imm")
(const_string "store1")
(const_string "load1")])]
)
@@ -10944,10 +10942,16 @@
enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
operands[3], operands[4]);
enum rtx_code rc = GET_CODE (operands[5]);
-
operands[6] = gen_rtx_REG (mode, CC_REGNUM);
gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
- rc = reverse_condition (rc);
+ if (REGNO (operands[2]) != REGNO (operands[0]))
+ rc = reverse_condition (rc);
+ else
+ {
+ rtx tmp = operands[1];
+ operands[1] = operands[2];
+ operands[2] = tmp;
+ }
operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
}
diff --git a/gcc-4.9/gcc/config/arm/constraints.md b/gcc-4.9/gcc/config/arm/constraints.md
index 85dd116ce..f848664d5 100644
--- a/gcc-4.9/gcc/config/arm/constraints.md
+++ b/gcc-4.9/gcc/config/arm/constraints.md
@@ -36,7 +36,7 @@
;; in Thumb-2 state: Pj, PJ, Ps, Pt, Pu, Pv, Pw, Px, Py
;; The following memory constraints have been used:
-;; in ARM/Thumb-2 state: Q, Ut, Uv, Uy, Un, Um, Us
+;; in ARM/Thumb-2 state: Q, Uh, Ut, Uv, Uy, Un, Um, Us
;; in ARM state: Uq
;; in Thumb state: Uu, Uw
@@ -348,6 +348,12 @@
An address valid for loading/storing register exclusive"
(match_operand 0 "mem_noofs_operand"))
+(define_memory_constraint "Uh"
+ "@internal
+ An address suitable for byte and half-word loads which does not point inside a constant pool"
+ (and (match_code "mem")
+ (match_test "arm_legitimate_address_p (GET_MODE (op), XEXP (op, 0), false) && !arm_is_constant_pool_ref (op)")))
+
(define_memory_constraint "Ut"
"@internal
In ARM/Thumb-2 state an address valid for loading/storing opaque structure
@@ -394,7 +400,8 @@
(and (match_code "mem")
(match_test "TARGET_ARM
&& arm_legitimate_address_outer_p (GET_MODE (op), XEXP (op, 0),
- SIGN_EXTEND, 0)")))
+ SIGN_EXTEND, 0)
+ && !arm_is_constant_pool_ref (op)")))
(define_memory_constraint "Q"
"@internal
diff --git a/gcc-4.9/gcc/config/arm/linux-grte.h b/gcc-4.9/gcc/config/arm/linux-grte.h
index 7ee5806b7..e69de29bb 100644
--- a/gcc-4.9/gcc/config/arm/linux-grte.h
+++ b/gcc-4.9/gcc/config/arm/linux-grte.h
@@ -1,27 +0,0 @@
-/* Definitions for ARM Linux-based GRTE (Google RunTime Environment).
- Copyright (C) 2011 Free Software Foundation, Inc.
- Contributed by Chris Demetriou.
-
-This file is part of GCC.
-
-GCC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 3, or (at your option)
-any later version.
-
-GCC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-
-Under Section 7 of GPL version 3, you are granted additional
-permissions described in the GCC Runtime Library Exception, version
-3.1, as published by the Free Software Foundation.
-
-You should have received a copy of the GNU General Public License and
-a copy of the GCC Runtime Library Exception along with this program;
-see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
-<http://www.gnu.org/licenses/>. */
-
-#undef SUBSUBTARGET_EXTRA_SPECS
-#define SUBSUBTARGET_EXTRA_SPECS LINUX_GRTE_EXTRA_SPECS
diff --git a/gcc-4.9/gcc/config/arm/t-aprofile b/gcc-4.9/gcc/config/arm/t-aprofile
index ff9e2e1b3..86741e6b0 100644
--- a/gcc-4.9/gcc/config/arm/t-aprofile
+++ b/gcc-4.9/gcc/config/arm/t-aprofile
@@ -88,6 +88,9 @@ MULTILIB_MATCHES += march?armv8-a=mcpu?cortex-a53
MULTILIB_MATCHES += march?armv8-a=mcpu?cortex-a57
MULTILIB_MATCHES += march?armv8-a=mcpu?cortex-a57.cortex-a53
+# Arch Matches
+MULTILIB_MATCHES += march?armv8-a=march?armv8-a+crc
+
# FPU matches
MULTILIB_MATCHES += mfpu?vfpv3-d16=mfpu?vfpv3
MULTILIB_MATCHES += mfpu?vfpv3-d16=mfpu?vfpv3-fp16