aboutsummaryrefslogtreecommitdiffstats
path: root/gcc-4.9/gcc/config/rs6000/rs6000.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc-4.9/gcc/config/rs6000/rs6000.c')
-rw-r--r--gcc-4.9/gcc/config/rs6000/rs6000.c32
1 files changed, 24 insertions, 8 deletions
diff --git a/gcc-4.9/gcc/config/rs6000/rs6000.c b/gcc-4.9/gcc/config/rs6000/rs6000.c
index fc837352c..494efc562 100644
--- a/gcc-4.9/gcc/config/rs6000/rs6000.c
+++ b/gcc-4.9/gcc/config/rs6000/rs6000.c
@@ -2310,6 +2310,10 @@ rs6000_debug_reg_global (void)
(int)END_BUILTINS);
fprintf (stderr, DEBUG_FMT_D, "Number of rs6000 builtins",
(int)RS6000_BUILTIN_COUNT);
+
+ if (TARGET_VSX)
+ fprintf (stderr, DEBUG_FMT_D, "VSX easy 64-bit scalar element",
+ (int)VECTOR_ELEMENT_SCALAR_64BIT);
}
@@ -5632,11 +5636,15 @@ rs6000_expand_vector_set (rtx target, rtx val, int elt)
UNSPEC_VPERM);
else
{
- /* Invert selector. */
+ /* Invert selector. We prefer to generate VNAND on P8 so
+ that future fusion opportunities can kick in, but must
+ generate VNOR elsewhere. */
rtx notx = gen_rtx_NOT (V16QImode, force_reg (V16QImode, x));
- rtx andx = gen_rtx_AND (V16QImode, notx, notx);
+ rtx iorx = (TARGET_P8_VECTOR
+ ? gen_rtx_IOR (V16QImode, notx, notx)
+ : gen_rtx_AND (V16QImode, notx, notx));
rtx tmp = gen_reg_rtx (V16QImode);
- emit_move_insn (tmp, andx);
+ emit_insn (gen_rtx_SET (VOIDmode, tmp, iorx));
/* Permute with operands reversed and adjusted selector. */
x = gen_rtx_UNSPEC (mode, gen_rtvec (3, reg, target, tmp),
@@ -30209,12 +30217,12 @@ altivec_expand_vec_perm_const_le (rtx operands[4])
/* Similarly to altivec_expand_vec_perm_const_le, we must adjust the
permute control vector. But here it's not a constant, so we must
- generate a vector NOR to do the adjustment. */
+ generate a vector NAND or NOR to do the adjustment. */
void
altivec_expand_vec_perm_le (rtx operands[4])
{
- rtx notx, andx, unspec;
+ rtx notx, iorx, unspec;
rtx target = operands[0];
rtx op0 = operands[1];
rtx op1 = operands[2];
@@ -30233,10 +30241,13 @@ altivec_expand_vec_perm_le (rtx operands[4])
if (!REG_P (target))
tmp = gen_reg_rtx (mode);
- /* Invert the selector with a VNOR. */
+ /* Invert the selector with a VNAND if available, else a VNOR.
+ The VNAND is preferred for future fusion opportunities. */
notx = gen_rtx_NOT (V16QImode, sel);
- andx = gen_rtx_AND (V16QImode, notx, notx);
- emit_move_insn (norreg, andx);
+ iorx = (TARGET_P8_VECTOR
+ ? gen_rtx_IOR (V16QImode, notx, notx)
+ : gen_rtx_AND (V16QImode, notx, notx));
+ emit_insn (gen_rtx_SET (VOIDmode, norreg, iorx));
/* Permute with operands reversed and adjusted selector. */
unspec = gen_rtx_UNSPEC (mode, gen_rtvec (3, op1, op0, norreg),
@@ -32519,6 +32530,11 @@ fusion_gpr_load_p (rtx *operands, bool peep2_p)
if (!peep2_reg_dead_p (2, addis_reg))
return false;
+
+ /* If the target register being loaded is the stack pointer, we must
+ avoid loading any other value into it, even temporarily. */
+ if (REG_P (target) && REGNO (target) == STACK_POINTER_REGNUM)
+ return false;
}
base_reg = XEXP (addr, 0);