Index: vax.c
===================================================================
--- vax.c (revision 143198)
+++ vax.c (working copy)
@@ -39,6 +39,7 @@
#include "flags.h"
#include "debug.h"
#include "toplev.h"
+#include "tm-constrs.h"
#include "tm_p.h"
#include "target.h"
#include "target-def.h"
@@ -52,6 +53,7 @@
static int vax_address_cost (rtx, bool);
static bool vax_rtx_costs (rtx, int, int, int *, bool);
static rtx vax_struct_value_rtx (tree, int);
+static rtx vax_builtin_setjmp_frame_value (void);
/* Initialize the GCC target structure. */
#undef TARGET_ASM_ALIGNED_HI_OP
@@ -87,6 +89,9 @@
#undef TARGET_STRUCT_VALUE_RTX
#define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
+#undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
+#define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
+
struct gcc_target targetm = TARGET_INITIALIZER;
/* Set global variables as needed for the options enabled. */
@@ -167,28 +172,35 @@
/* This is like nonimmediate_operand with a restriction on the type of MEM. */
-void
-split_quadword_operands (rtx * operands, rtx * low, int n ATTRIBUTE_UNUSED)
+static void
+split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
+ rtx * low, int n)
{
int i;
- /* Split operands. */
- low[0] = low[1] = low[2] = 0;
- for (i = 0; i < 3; i++)
+ for (i = 0; i < n; i++)
+ low[i] = 0;
+
+ for (i = 0; i < n; i++)
{
- if (low[i])
- /* it's already been figured out */;
- else if (MEM_P (operands[i])
- && (GET_CODE (XEXP (operands[i], 0)) == POST_INC))
+ if (MEM_P (operands[i])
+ && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
+ || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
{
rtx addr = XEXP (operands[i], 0);
operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
- if (which_alternative == 0 && i == 0)
- {
- addr = XEXP (operands[i], 0);
- operands[i+1] = low[i+1] = gen_rtx_MEM (SImode, addr);
- }
}
+ else if (optimize_size && MEM_P (operands[i])
+ && REG_P (XEXP (operands[i], 0))
+ && (code != MINUS || operands[1] != const0_rtx)
+ && find_regno_note (insn, REG_DEAD,
+ REGNO (XEXP (operands[i], 0))))
+ {
+ low[i] = gen_rtx_MEM (SImode,
+ gen_rtx_POST_INC (Pmode,
+ XEXP (operands[i], 0)));
+ operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
+ }
else
{
low[i] = operand_subword (operands[i], 0, 0, DImode);
@@ -200,6 +212,7 @@
void
print_operand_address (FILE * file, rtx addr)
{
+ rtx orig = addr;
rtx reg1, breg, ireg;
rtx offset;
@@ -344,7 +357,11 @@
/* If REG1 is nonzero, figure out if it is a base or index register. */
if (reg1)
{
- if (breg != 0 || (offset && MEM_P (offset)))
+ if (breg
+ || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
+ || (offset
+ && (MEM_P (offset)
+ || (flag_pic && symbolic_operand (offset, SImode)))))
{
gcc_assert (!ireg);
ireg = reg1;
@@ -354,8 +371,37 @@
}
if (offset != 0)
- output_address (offset);
+ {
+ if (flag_pic && symbolic_operand (offset, SImode))
+ {
+ if (breg && ireg)
+ {
+ debug_rtx (orig);
+ output_operand_lossage ("symbol used with both base and indexed registers");
+ }
+
+#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
+ if (flag_pic > 1 && GET_CODE (offset) == CONST
+ && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
+ && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
+ {
+ debug_rtx (orig);
+ output_operand_lossage ("symbol with offset used in PIC mode");
+ }
+#endif
+ /* symbol(reg) isn't PIC, but symbol[reg] is. */
+ if (breg)
+ {
+ ireg = breg;
+ breg = 0;
+ }
+
+ }
+
+ output_address (offset);
+ }
+
if (breg != 0)
fprintf (file, "(%s)", reg_names[REGNO (breg)]);
@@ -372,6 +418,64 @@
output_addr_const (file, addr);
}
}
+
+void
+print_operand (FILE *file, rtx x, int code)
+{
+ if (code == '#')
+ fputc (ASM_DOUBLE_CHAR, file);
+ else if (code == '|')
+ fputs (REGISTER_PREFIX, file);
+ else if (code == 'C')
+ fputs (rev_cond_name (x), file);
+ else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
+ fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
+ else if (code == 'P' && CONST_INT_P (x))
+ fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
+ else if (code == 'N' && CONST_INT_P (x))
+ fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
+ /* rotl instruction cannot deal with negative arguments. */
+ else if (code == 'R' && CONST_INT_P (x))
+ fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
+ else if (code == 'H' && CONST_INT_P (x))
+ fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
+ else if (code == 'h' && CONST_INT_P (x))
+ fprintf (file, "$%d", (short) - INTVAL (x));
+ else if (code == 'B' && CONST_INT_P (x))
+ fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
+ else if (code == 'b' && CONST_INT_P (x))
+ fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
+ else if (code == 'M' && CONST_INT_P (x))
+ fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
+ else if (REG_P (x))
+ fprintf (file, "%s", reg_names[REGNO (x)]);
+ else if (MEM_P (x))
+ output_address (XEXP (x, 0));
+ else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
+ {
+ char dstr[30];
+ real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
+ sizeof (dstr), 0, 1);
+ fprintf (file, "$0f%s", dstr);
+ }
+ else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
+ {
+ char dstr[30];
+ real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
+ sizeof (dstr), 0, 1);
+ fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
+ }
+ else
+ {
+ if (flag_pic > 1 && symbolic_operand (x, SImode))
+ {
+ debug_rtx (x);
+ output_operand_lossage ("symbol used as immediate operand");
+ }
+ putc ('$', file);
+ output_addr_const (file, x);
+ }
+}
const char *
rev_cond_name (rtx op)
@@ -404,7 +508,7 @@
}
}
-int
+static bool
vax_float_literal(rtx c)
{
enum machine_mode mode;
@@ -412,14 +516,14 @@
int i;
if (GET_CODE (c) != CONST_DOUBLE)
- return 0;
+ return false;
mode = GET_MODE (c);
if (c == const_tiny_rtx[(int) mode][0]
|| c == const_tiny_rtx[(int) mode][1]
|| c == const_tiny_rtx[(int) mode][2])
- return 1;
+ return true;
REAL_VALUE_FROM_CONST_DOUBLE (r, c);
@@ -430,13 +534,13 @@
REAL_VALUE_FROM_INT (s, x, 0, mode);
if (REAL_VALUES_EQUAL (r, s))
- return 1;
+ return true;
ok = exact_real_inverse (mode, &s);
gcc_assert (ok);
if (REAL_VALUES_EQUAL (r, s))
- return 1;
+ return true;
}
- return 0;
+ return false;
}
@@ -550,10 +654,13 @@
patterns will not match). */
case CONST_INT:
if (INTVAL (x) == 0)
- return true;
+ {
+ *total = 0;
+ return true;
+ }
if (outer_code == AND)
{
- *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
+ *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
return true;
}
if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
@@ -577,7 +684,7 @@
if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
*total = vax_float_literal (x) ? 5 : 8;
else
- *total = ((CONST_DOUBLE_HIGH (x) == 0
+ *total = ((CONST_DOUBLE_HIGH (x) == 0
&& (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
|| (outer_code == PLUS
&& CONST_DOUBLE_HIGH (x) == -1
@@ -783,7 +890,7 @@
else
{
if (CONST_DOUBLE_HIGH (op) != 0
- || (unsigned)CONST_DOUBLE_LOW (op) > 63)
+ || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
*total += 2;
}
break;
@@ -812,10 +919,10 @@
static void
vax_output_mi_thunk (FILE * file,
- tree thunk ATTRIBUTE_UNUSED,
- HOST_WIDE_INT delta,
- HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
- tree function)
+ tree thunk ATTRIBUTE_UNUSED,
+ HOST_WIDE_INT delta,
+ HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
+ tree function)
{
fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
asm_fprintf (file, ",4(%Rap)\n");
@@ -831,6 +938,12 @@
return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
}
+static rtx
+vax_builtin_setjmp_frame_value (void)
+{
+ return hard_frame_pointer_rtx;
+}
+
/* Worker function for NOTICE_UPDATE_CC. */
void
@@ -906,28 +1019,161 @@
vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
enum machine_mode mode)
{
+ rtx hi[3], lo[3];
+ const char *pattern_hi, *pattern_lo;
+
switch (mode)
{
+ case DImode:
+ if (operands[1] == const0_rtx)
+ return "clrq %0";
+ if (TARGET_QMATH && optimize_size
+ && (CONST_INT_P (operands[1])
+ || GET_CODE (operands[1]) == CONST_DOUBLE))
+ {
+ unsigned HOST_WIDE_INT hval, lval;
+ int n;
+
+ if (GET_CODE (operands[1]) == CONST_DOUBLE)
+ {
+ gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
+
+ /* Make sure only the low 32 bits are valid. */
+ lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
+ hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
+ }
+ else
+ {
+ lval = INTVAL (operands[1]);
+ hval = 0;
+ }
+
+ /* Here we see if we are trying to see if the 64bit value is really
+ a 6bit shifted some arbitrary amount. If so, we can use ashq to
+ shift it to the correct value saving 7 bytes (1 addr-mode-byte +
+ 8 bytes - 1 shift byte - 1 short literal byte. */
+ if (lval != 0
+ && (n = exact_log2 (lval & (- lval))) != -1
+ && (lval >> n) < 64)
+ {
+ lval >>= n;
+
+#if HOST_BITS_PER_WIDE_INT == 32
+ /* On 32bit platforms, if the 6bits didn't overflow into the
+ upper 32bit value that value better be 0. If we have
+ overflowed, make sure it wasn't too much. */
+ if (hval != 0)
+ {
+ if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
+ n = 0; /* failure */
+ else
+ lval |= hval << (32 - n);
+ }
+#endif
+ /* If n is 0, then ashq is not the best way to emit this. */
+ if (n > 0)
+ {
+ operands[1] = GEN_INT (lval);
+ operands[2] = GEN_INT (n);
+ return "ashq %2,%1,%0";
+ }
+#if HOST_BITS_PER_WIDE_INT == 32
+ }
+ /* On 32bit platforms, if the low 32bit value is 0, checkout the
+ upper 32bit value. */
+ else if (hval != 0
+ && (n = exact_log2 (hval & (- hval)) - 1) != -1
+ && (hval >> n) < 64)
+ {
+ operands[1] = GEN_INT (hval >> n);
+ operands[2] = GEN_INT (n + 32);
+ return "ashq %2,%1,%0";
+#endif
+ }
+ }
+
+ if (TARGET_QMATH
+ && (!MEM_P (operands[0])
+ || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
+ || GET_CODE (XEXP (operands[0], 0)) == POST_INC
+ || !illegal_addsub_di_memory_operand (operands[0], DImode))
+ && ((GET_CODE (operands[1]) == CONST_INT
+ && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
+ || GET_CODE (operands[1]) == CONST_DOUBLE))
+ {
+ hi[0] = operands[0];
+ hi[1] = operands[1];
+
+ split_quadword_operands(insn, SET, hi, lo, 2);
+
+ pattern_lo = vax_output_int_move (NULL, lo, SImode);
+ pattern_hi = vax_output_int_move (NULL, hi, SImode);
+
+ /* The patterns are just movl/movl or pushl/pushl then a movq will
+ be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
+ bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
+ value bytes. */
+ if ((!strncmp (pattern_lo, "movl", 4)
+ && !strncmp (pattern_hi, "movl", 4))
+ || (!strncmp (pattern_lo, "pushl", 5)
+ && !strncmp (pattern_hi, "pushl", 5)))
+ return "movq %1,%0";
+
+ if (MEM_P (operands[0])
+ && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
+ {
+ output_asm_insn (pattern_hi, hi);
+ operands[0] = lo[0];
+ operands[1] = lo[1];
+ operands[2] = lo[2];
+ return pattern_lo;
+ }
+ else
+ {
+ output_asm_insn (pattern_lo, lo);
+ operands[0] = hi[0];
+ operands[1] = hi[1];
+ operands[2] = hi[2];
+ return pattern_hi;
+ }
+ }
+ return "movq %1,%0";
+
case SImode:
- if (GET_CODE (operands[1]) == SYMBOL_REF || GET_CODE (operands[1]) == CONST)
+ if (symbolic_operand (operands[1], SImode))
{
if (push_operand (operands[0], SImode))
return "pushab %a1";
return "movab %a1,%0";
}
+
if (operands[1] == const0_rtx)
- return "clrl %0";
+ {
+ if (push_operand (operands[1], SImode))
+ return "pushl %1";
+ return "clrl %0";
+ }
+
if (CONST_INT_P (operands[1])
- && (unsigned) INTVAL (operands[1]) >= 64)
+ && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
{
- int i = INTVAL (operands[1]);
- if ((unsigned)(~i) < 64)
+ HOST_WIDE_INT i = INTVAL (operands[1]);
+ int n;
+ if ((unsigned HOST_WIDE_INT)(~i) < 64)
return "mcoml %N1,%0";
- if ((unsigned)i < 0x100)
+ if ((unsigned HOST_WIDE_INT)i < 0x100)
return "movzbl %1,%0";
if (i >= -0x80 && i < 0)
return "cvtbl %1,%0";
- if ((unsigned)i < 0x10000)
+ if (optimize_size
+ && (n = exact_log2 (i & (-i))) != -1
+ && ((unsigned HOST_WIDE_INT)i >> n) < 64)
+ {
+ operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
+ operands[2] = GEN_INT (n);
+ return "ashl %2,%1,%0";
+ }
+ if ((unsigned HOST_WIDE_INT)i < 0x10000)
return "movzwl %1,%0";
if (i >= -0x8000 && i < 0)
return "cvtwl %1,%0";
@@ -939,25 +1185,27 @@
case HImode:
if (CONST_INT_P (operands[1]))
{
- int i = INTVAL (operands[1]);
+ HOST_WIDE_INT i = INTVAL (operands[1]);
if (i == 0)
return "clrw %0";
- else if ((unsigned int)i < 64)
+ else if ((unsigned HOST_WIDE_INT)i < 64)
return "movw %1,%0";
- else if ((unsigned int)~i < 64)
+ else if ((unsigned HOST_WIDE_INT)~i < 64)
return "mcomw %H1,%0";
- else if ((unsigned int)i < 256)
+ else if ((unsigned HOST_WIDE_INT)i < 256)
return "movzbw %1,%0";
+ else if (i >= -0x80 && i < 0)
+ return "cvtbw %1,%0";
}
return "movw %1,%0";
case QImode:
if (CONST_INT_P (operands[1]))
{
- int i = INTVAL (operands[1]);
+ HOST_WIDE_INT i = INTVAL (operands[1]);
if (i == 0)
return "clrb %0";
- else if ((unsigned int)~i < 64)
+ else if ((unsigned HOST_WIDE_INT)~i < 64)
return "mcomb %B1,%0";
}
return "movb %1,%0";
@@ -982,11 +1230,96 @@
which are not modified very often. */
const char *
-vax_output_int_add (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
- enum machine_mode mode)
+vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
{
switch (mode)
{
+ case DImode:
+ {
+ rtx low[3];
+ const char *pattern;
+ int carry = 1;
+ bool sub;
+
+ if (TARGET_QMATH && 0)
+ debug_rtx (insn);
+
+ split_quadword_operands (insn, PLUS, operands, low, 3);
+
+ if (TARGET_QMATH)
+ {
+ gcc_assert (rtx_equal_p (operands[0], operands[1]));
+#ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
+ gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
+ gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
+#endif
+
+ /* No reason to add a 0 to the low part and thus no carry, so just
+ emit the appropriate add/sub instruction. */
+ if (low[2] == const0_rtx)
+ return vax_output_int_add (NULL, operands, SImode);
+
+ /* Are we doing addition or subtraction? */
+ sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
+
+ /* We can't use vax_output_int_add since some the patterns don't
+ modify the carry bit. */
+ if (sub)
+ {
+ if (low[2] == constm1_rtx)
+ pattern = "decl %0";
+ else
+ pattern = "subl2 $%n2,%0";
+ }
+ else
+ {
+ if (low[2] == const1_rtx)
+ pattern = "incl %0";
+ else
+ pattern = "addl2 %2,%0";
+ }
+ output_asm_insn (pattern, low);
+
+ /* In 2's complement, -n = ~n + 1. Since we are dealing with
+ two 32bit parts, we complement each and then add one to
+ low part. We know that the low part can't overflow since
+ it's value can never be 0. */
+ if (sub)
+ return "sbwc %N2,%0";
+ return "adwc %2,%0";
+ }
+
+ /* Add low parts. */
+ if (rtx_equal_p (operands[0], operands[1]))
+ {
+ if (low[2] == const0_rtx)
+ /* Should examine operand, punt if not POST_INC. */
+ pattern = "tstl %0", carry = 0;
+ else if (low[2] == const1_rtx)
+ pattern = "incl %0";
+ else
+ pattern = "addl2 %2,%0";
+ }
+ else
+ {
+ if (low[2] == const0_rtx)
+ pattern = "movl %1,%0", carry = 0;
+ else
+ pattern = "addl3 %2,%1,%0";
+ }
+ if (pattern)
+ output_asm_insn (pattern, low);
+ if (!carry)
+ /* If CARRY is 0, we don't have any carry value to worry about. */
+ return get_insn_template (CODE_FOR_addsi3, insn);
+ /* %0 = C + %1 + %2 */
+ if (!rtx_equal_p (operands[0], operands[1]))
+ output_asm_insn ((operands[1] == const0_rtx
+ ? "clrl %0"
+ : "movl %1,%0"), operands);
+ return "adwc %2,%0";
+ }
+
case SImode:
if (rtx_equal_p (operands[0], operands[1]))
{
@@ -995,19 +1328,25 @@
if (operands[2] == constm1_rtx)
return "decl %0";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subl2 $%n2,%0";
if (CONST_INT_P (operands[2])
- && (unsigned) INTVAL (operands[2]) >= 64
+ && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
&& REG_P (operands[1])
&& ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
|| REGNO (operands[1]) > 11))
return "movab %c2(%1),%0";
+ if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
+ return "movab %a2[%0],%0";
return "addl2 %2,%0";
}
if (rtx_equal_p (operands[0], operands[2]))
- return "addl2 %1,%0";
+ {
+ if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
+ return "movab %a1[%0],%0";
+ return "addl2 %1,%0";
+ }
if (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) < 32767
@@ -1017,11 +1356,11 @@
return "pushab %c2(%1)";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subl3 $%n2,%1,%0";
if (CONST_INT_P (operands[2])
- && (unsigned) INTVAL (operands[2]) >= 64
+ && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
&& REG_P (operands[1])
&& ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
|| REGNO (operands[1]) > 11))
@@ -1031,6 +1370,30 @@
if (REG_P (operands[1]) && REG_P (operands[2]))
return "movab (%1)[%2],%0";
*/
+
+ if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
+ {
+ if (push_operand (operands[0], SImode))
+ return "pushab %a2[%1]";
+ return "movab %a2[%1],%0";
+ }
+
+ if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
+ {
+ if (push_operand (operands[0], SImode))
+ return "pushab %a1[%2]";
+ return "movab %a1[%2],%0";
+ }
+
+ if (flag_pic && REG_P (operands[0])
+ && symbolic_operand (operands[2], SImode))
+ return "movab %a2,%0;addl2 %1,%0";
+
+ if (flag_pic
+ && (symbolic_operand (operands[1], SImode)
+ || symbolic_operand (operands[1], SImode)))
+ debug_rtx (insn);
+
return "addl3 %1,%2,%0";
case HImode:
@@ -1041,14 +1404,14 @@
if (operands[2] == constm1_rtx)
return "decw %0";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subw2 $%n2,%0";
return "addw2 %2,%0";
}
if (rtx_equal_p (operands[0], operands[2]))
return "addw2 %1,%0";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subw3 $%n2,%1,%0";
return "addw3 %1,%2,%0";
@@ -1060,14 +1423,14 @@
if (operands[2] == constm1_rtx)
return "decb %0";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subb2 $%n2,%0";
return "addb2 %2,%0";
}
if (rtx_equal_p (operands[0], operands[2]))
return "addb2 %1,%0";
if (CONST_INT_P (operands[2])
- && (unsigned) (- INTVAL (operands[2])) < 64)
+ && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
return "subb3 $%n2,%1,%0";
return "addb3 %1,%2,%0";
@@ -1076,6 +1439,81 @@
}
}
+const char *
+vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
+{
+ switch (mode)
+ {
+ case DImode:
+ {
+ rtx low[3];
+ const char *pattern;
+ int carry = 1;
+
+ if (TARGET_QMATH && 0)
+ debug_rtx (insn);
+
+ split_quadword_operands (insn, MINUS, operands, low, 3);
+
+ if (TARGET_QMATH)
+ {
+ if (operands[1] == const0_rtx && low[1] == const0_rtx)
+ {
+ /* Negation is tricky. It's basically complement and increment.
+ Negate hi, then lo, and subtract the carry back. */
+ if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
+ || (MEM_P (operands[0])
+ && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
+ fatal_insn ("illegal operand detected", insn);
+ output_asm_insn ("mnegl %2,%0", operands);
+ output_asm_insn ("mnegl %2,%0", low);
+ return "sbwc $0,%0";
+ }
+ gcc_assert (rtx_equal_p (operands[0], operands[1]));
+ gcc_assert (rtx_equal_p (low[0], low[1]));
+ if (low[2] == const1_rtx)
+ output_asm_insn ("decl %0", low);
+ else
+ output_asm_insn ("subl2 %2,%0", low);
+ return "sbwc %2,%0";
+ }
+
+ /* Subtract low parts. */
+ if (rtx_equal_p (operands[0], operands[1]))
+ {
+ if (low[2] == const0_rtx)
+ pattern = 0, carry = 0;
+ else if (low[2] == constm1_rtx)
+ pattern = "decl %0";
+ else
+ pattern = "subl2 %2,%0";
+ }
+ else
+ {
+ if (low[2] == constm1_rtx)
+ pattern = "decl %0";
+ else if (low[2] == const0_rtx)
+ pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
+ else
+ pattern = "subl3 %2,%1,%0";
+ }
+ if (pattern)
+ output_asm_insn (pattern, low);
+ if (carry)
+ {
+ if (!rtx_equal_p (operands[0], operands[1]))
+ return "movl %1,%0;sbwc %2,%0";
+ return "sbwc %2,%0";
+ /* %0 = %2 - %1 - C */
+ }
+ return get_insn_template (CODE_FOR_subsi3, insn);
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
/* Output a conditional branch. */
const char *
vax_output_conditional_branch (enum rtx_code code)
@@ -1093,27 +1531,36 @@
case GEU: return "jgequ %l0";
case LEU: return "jlequ %l0";
default:
- gcc_unreachable ();
+ gcc_unreachable ();
}
}
/* 1 if X is an rtx for a constant that is a valid address. */
-int
+bool
legitimate_constant_address_p (rtx x)
{
- return (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
- || CONST_INT_P (x) || GET_CODE (x) == CONST
- || GET_CODE (x) == HIGH);
+ if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
+ || CONST_INT_P (x) || GET_CODE (x) == HIGH)
+ return true;
+ if (GET_CODE (x) != CONST)
+ return false;
+#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
+ if (flag_pic
+ && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
+ && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
+ return false;
+#endif
+ return true;
}
/* Nonzero if the constant value X is a legitimate general operand.
It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
-int
+bool
legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
{
- return 1;
+ return true;
}
/* The other macros defined here are used only in legitimate_address_p (). */
@@ -1133,23 +1580,24 @@
/* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
are no SYMBOL_REFs for external symbols present. */
-static int
-indirectable_constant_address_p (rtx x)
+static bool
+indirectable_constant_address_p (rtx x, bool indirect)
{
- if (!CONSTANT_ADDRESS_P (x))
- return 0;
- if (GET_CODE (x) == CONST && GET_CODE (XEXP ((x), 0)) == PLUS)
- x = XEXP (XEXP (x, 0), 0);
- if (GET_CODE (x) == SYMBOL_REF && !SYMBOL_REF_LOCAL_P (x))
- return 0;
+ if (GET_CODE (x) == SYMBOL_REF)
+ return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
- return 1;
+ if (GET_CODE (x) == CONST)
+ return !flag_pic
+ || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
+ || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
+
+ return CONSTANT_ADDRESS_P (x);
}
#else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
-static int
-indirectable_constant_address_p (rtx x)
+static bool
+indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
{
return CONSTANT_ADDRESS_P (x);
}
@@ -1159,52 +1607,51 @@
/* Nonzero if X is an address which can be indirected. External symbols
could be in a sharable image library, so we disallow those. */
-static int
-indirectable_address_p(rtx x, int strict)
+static bool
+indirectable_address_p(rtx x, bool strict, bool indirect)
{
- if (indirectable_constant_address_p (x))
- return 1;
- if (BASE_REGISTER_P (x, strict))
- return 1;
- if (GET_CODE (x) == PLUS
- && BASE_REGISTER_P (XEXP (x, 0), strict)
- && indirectable_constant_address_p (XEXP (x, 1)))
- return 1;
- return 0;
+ if (indirectable_constant_address_p (x, indirect)
+ || BASE_REGISTER_P (x, strict))
+ return true;
+ if (GET_CODE (x) != PLUS
+ || !BASE_REGISTER_P (XEXP (x, 0), strict)
+ || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
+ return false;
+ return indirectable_constant_address_p (XEXP (x, 1), indirect);
}
/* Return 1 if x is a valid address not using indexing.
(This much is the easy part.) */
-static int
-nonindexed_address_p (rtx x, int strict)
+static bool
+nonindexed_address_p (rtx x, bool strict)
{
rtx xfoo0;
if (REG_P (x))
{
extern rtx *reg_equiv_mem;
- if (!reload_in_progress
+ if (! reload_in_progress
|| reg_equiv_mem[REGNO (x)] == 0
- || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict))
- return 1;
+ || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
+ return true;
}
- if (indirectable_constant_address_p (x))
- return 1;
- if (indirectable_address_p (x, strict))
- return 1;
+ if (indirectable_constant_address_p (x, false))
+ return true;
+ if (indirectable_address_p (x, strict, false))
+ return true;
xfoo0 = XEXP (x, 0);
- if (MEM_P (x) && indirectable_address_p (xfoo0, strict))
- return 1;
+ if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
+ return true;
if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
&& BASE_REGISTER_P (xfoo0, strict))
- return 1;
- return 0;
+ return true;
+ return false;
}
/* 1 if PROD is either a reg times size of mode MODE and MODE is less
than or equal 8 bytes, or just a reg if MODE is one byte. */
-static int
-index_term_p (rtx prod, enum machine_mode mode, int strict)
+static bool
+index_term_p (rtx prod, enum machine_mode mode, bool strict)
{
rtx xfoo0, xfoo1;
@@ -1212,7 +1659,7 @@
return BASE_REGISTER_P (prod, strict);
if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
- return 0;
+ return false;
xfoo0 = XEXP (prod, 0);
xfoo1 = XEXP (prod, 1);
@@ -1220,52 +1667,65 @@
if (CONST_INT_P (xfoo0)
&& INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
&& INDEX_REGISTER_P (xfoo1, strict))
- return 1;
+ return true;
if (CONST_INT_P (xfoo1)
&& INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
&& INDEX_REGISTER_P (xfoo0, strict))
- return 1;
+ return true;
- return 0;
+ return false;
}
/* Return 1 if X is the sum of a register
and a valid index term for mode MODE. */
-static int
-reg_plus_index_p (rtx x, enum machine_mode mode, int strict)
+static bool
+reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
{
rtx xfoo0, xfoo1;
if (GET_CODE (x) != PLUS)
- return 0;
+ return false;
xfoo0 = XEXP (x, 0);
xfoo1 = XEXP (x, 1);
if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
- return 1;
+ return true;
if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
- return 1;
+ return true;
- return 0;
+ return false;
}
+/* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
+static bool
+indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
+{
+ if (!CONSTANT_ADDRESS_P (xfoo0))
+ return false;
+ if (BASE_REGISTER_P (xfoo1, strict))
+ return !flag_pic || mode == QImode;
+ if (flag_pic && symbolic_operand (xfoo0, SImode))
+ return false;
+ return reg_plus_index_p (xfoo1, mode, strict);
+}
+
/* legitimate_address_p returns 1 if it recognizes an RTL expression "x"
that is a valid memory address for an instruction.
The MODE argument is the machine mode for the MEM expression
that wants to use this address. */
-int
-legitimate_address_p (enum machine_mode mode, rtx x, int strict)
+bool
+legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
{
rtx xfoo0, xfoo1;
if (nonindexed_address_p (x, strict))
- return 1;
+ return true;
if (GET_CODE (x) != PLUS)
- return 0;
+ return false;
/* Handle <address>[index] represented with index-sum outermost */
@@ -1274,25 +1734,19 @@
if (index_term_p (xfoo0, mode, strict)
&& nonindexed_address_p (xfoo1, strict))
- return 1;
+ return true;
if (index_term_p (xfoo1, mode, strict)
&& nonindexed_address_p (xfoo0, strict))
- return 1;
+ return true;
/* Handle offset(reg)[index] with offset added outermost */
- if (indirectable_constant_address_p (xfoo0)
- && (BASE_REGISTER_P (xfoo1, strict)
- || reg_plus_index_p (xfoo1, mode, strict)))
- return 1;
+ if (indexable_address_p (xfoo0, xfoo1, mode, strict)
+ || indexable_address_p (xfoo1, xfoo0, mode, strict))
+ return true;
- if (indirectable_constant_address_p (xfoo1)
- && (BASE_REGISTER_P (xfoo0, strict)
- || reg_plus_index_p (xfoo0, mode, strict)))
- return 1;
-
- return 0;
+ return false;
}
/* Return 1 if x (a legitimate address expression) has an effect that
@@ -1301,23 +1755,244 @@
increment being the length of the operand) and all indexed address depend
thus (because the index scale factor is the length of the operand). */
-int
+bool
vax_mode_dependent_address_p (rtx x)
{
rtx xfoo0, xfoo1;
/* Auto-increment cases are now dealt with generically in recog.c. */
-
if (GET_CODE (x) != PLUS)
- return 0;
+ return false;
xfoo0 = XEXP (x, 0);
xfoo1 = XEXP (x, 1);
- if (CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
- return 0;
- if (CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
- return 0;
+ if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
+ return false;
+ if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
+ return false;
+ if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
+ return false;
+ if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
+ return false;
- return 1;
+ return true;
}
+
+static rtx
+fixup_mathdi_operand (rtx x, enum machine_mode mode)
+{
+ if (illegal_addsub_di_memory_operand (x, mode))
+ {
+ rtx addr = XEXP (x, 0);
+ rtx temp = gen_reg_rtx (Pmode);
+ rtx offset = 0;
+#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
+ if (GET_CODE (addr) == CONST && flag_pic)
+ {
+ offset = XEXP (XEXP (addr, 0), 1);
+ addr = XEXP (XEXP (addr, 0), 0);
+ }
+#endif
+ emit_move_insn (temp, addr);
+ if (offset)
+ temp = gen_rtx_PLUS (Pmode, temp, offset);
+ x = gen_rtx_MEM (DImode, temp);
+ }
+ return x;
+}
+
+void
+vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
+{
+ int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
+ rtx temp;
+
+ rtx (*gen_old_insn)(rtx, rtx, rtx);
+ rtx (*gen_si_insn)(rtx, rtx, rtx);
+ rtx (*gen_insn)(rtx, rtx, rtx);
+
+ if (code == PLUS)
+ {
+ gen_old_insn = gen_adddi3_old;
+ gen_si_insn = gen_addsi3;
+ gen_insn = gen_adcdi3;
+ }
+ else if (code == MINUS)
+ {
+ gen_old_insn = gen_subdi3_old;
+ gen_si_insn = gen_subsi3;
+ gen_insn = gen_sbcdi3;
+ }
+ else
+ gcc_unreachable ();
+
+ /* If this is addition (thus operands are commutative) and if there is one
+ addend that duplicates the desination, we want that addend to be the
+ first addend. */
+ if (code == PLUS
+ && rtx_equal_p (operands[0], operands[2])
+ && !rtx_equal_p (operands[1], operands[2]))
+ {
+ temp = operands[2];
+ operands[2] = operands[1];
+ operands[1] = temp;
+ }
+
+ if (!TARGET_QMATH)
+ {
+ emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
+ }
+ else if (hi_only)
+ {
+ if (!rtx_equal_p (operands[0], operands[1])
+ && (REG_P (operands[0]) && MEM_P (operands[1])))
+ {
+ emit_move_insn (operands[0], operands[1]);
+ operands[1] = operands[0];
+ }
+
+ operands[0] = fixup_mathdi_operand (operands[0], DImode);
+ operands[1] = fixup_mathdi_operand (operands[1], DImode);
+ operands[2] = fixup_mathdi_operand (operands[2], DImode);
+
+ if (!rtx_equal_p (operands[0], operands[1]))
+ emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
+ operand_subword (operands[1], 0, 0, DImode));
+
+ emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
+ operand_subword (operands[1], 1, 0, DImode),
+ operand_subword (operands[2], 1, 0, DImode)));
+ }
+ else
+ {
+ /* If are adding the same value together, that's really a multiply by 2,
+ and that's just a left shift of 1. */
+ if (rtx_equal_p (operands[1], operands[2]))
+ {
+ gcc_assert (code != MINUS);
+ emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
+ return;
+ }
+
+ operands[0] = fixup_mathdi_operand (operands[0], DImode);
+
+ /* If an operand is the same as operand[0], use the operand[0] rtx
+ because fixup will an equivalent rtx but not an equal one. */
+
+ if (rtx_equal_p (operands[0], operands[1]))
+ operands[1] = operands[0];
+ else
+ operands[1] = fixup_mathdi_operand (operands[1], DImode);
+
+ if (rtx_equal_p (operands[0], operands[2]))
+ operands[2] = operands[0];
+ else
+ operands[2] = fixup_mathdi_operand (operands[2], DImode);
+
+ /* If we are subtracting not from ourselves [d = a - b], and because the
+ carry ops are two operand only, we would need to do a move prior to
+ the subtract. And if d == b, we would need a temp otherwise
+ [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
+ into d = -b, d += a. Since -b can never overflow, even if b == d,
+ no temp is needed.
+
+ If we are doing addition, since the carry ops are two operand, if
+ we aren't adding to ourselves, move the first addend to the
+ destination first. */
+
+ gcc_assert (operands[1] != const0_rtx || code == MINUS);
+ if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
+ {
+ if (code == MINUS && CONSTANT_P (operands[1]))
+ {
+ temp = gen_reg_rtx (DImode);
+ emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
+ code = PLUS;
+ gen_insn = gen_adcdi3;
+ operands[2] = operands[1];
+ operands[1] = operands[0];
+ }
+ else
+ emit_move_insn (operands[0], operands[1]);
+ }
+
+ /* Subtracting a constant will have been rewritten to an addition of the
+ negative of that constant before we get here. */
+ gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
+ emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
+ }
+}
+
+bool
+adjacent_operands_p(rtx lo, rtx hi, enum machine_mode mode)
+{
+ HOST_WIDE_INT lo_offset;
+ HOST_WIDE_INT hi_offset;
+
+ if (GET_CODE (lo) != GET_CODE (hi))
+ return false;
+
+ if (REG_P (lo))
+ return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
+ if (GET_CODE (lo) == CONST_INT)
+ return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
+ if (GET_CODE (lo) == CONST_INT)
+ return mode != SImode;
+
+ if (!MEM_P (lo))
+ return false;
+
+ if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
+ return false;
+
+ lo = XEXP (lo, 0);
+ hi = XEXP (hi, 0);
+
+ if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
+ return rtx_equal_p (lo, hi);
+
+ switch (GET_CODE (lo))
+ {
+ case REG:
+ case SYMBOL_REF:
+ lo_offset = 0;
+ break;
+ case CONST:
+ lo = XEXP (lo, 0);
+ /* FALLTHROUGH */
+ case PLUS:
+ if (!CONST_INT_P (XEXP (lo, 1)))
+ return false;
+ lo_offset = INTVAL (XEXP (lo, 1));
+ lo = XEXP (lo, 0);
+ break;
+ default:
+ return false;
+ }
+
+ switch (GET_CODE (hi))
+ {
+ case REG:
+ case SYMBOL_REF:
+ hi_offset = 0;
+ break;
+ case CONST:
+ hi = XEXP (hi, 0);
+ /* FALLTHROUGH */
+ case PLUS:
+ if (!CONST_INT_P (XEXP (hi, 1)))
+ return false;
+ hi_offset = INTVAL (XEXP (hi, 1));
+ hi = XEXP (hi, 0);
+ break;
+ default:
+ return false;
+ }
+
+ if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
+ return false;
+
+ return rtx_equal_p (lo, hi)
+ && hi_offset - lo_offset == GET_MODE_SIZE (mode);
+}
Index: vax.opt
===================================================================
--- vax.opt (revision 143198)
+++ vax.opt (working copy)
@@ -45,3 +45,7 @@
mvaxc-alignment
Target RejectNegative Mask(VAXC_ALIGNMENT)
Use VAXC structure conventions
+
+mqmath
+Target Mask(QMATH)
+Use new adddi3/subdi3 patterns
Index: predicates.md
===================================================================
--- predicates.md (revision 0)
+++ predicates.md (revision 0)
@@ -0,0 +1,112 @@
+;; Predicate definitions for DEC VAX.
+;; Copyright (C) 2007 Free Software Foundation, Inc.
+;;
+;; This file is part of GCC.
+;;
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 2, or (at your option)
+;; any later version.
+;;
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING. If not, write to
+;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+;; Boston, MA 02110-1301, USA.
+
+;; Special case of a symbolic operand that's used as a
+;; operand.
+
+(define_predicate "symbolic_operand"
+ (match_code "const,symbol_ref,label_ref"))
+
+(define_predicate "local_symbolic_operand"
+ (match_code "const,symbol_ref,label_ref")
+{
+ if (GET_CODE (op) == LABEL_REF)
+ return 1;
+ if (GET_CODE (op) == SYMBOL_REF)
+ return !flag_pic || SYMBOL_REF_LOCAL_P (op);
+ if (GET_CODE (XEXP (XEXP (op, 0), 0)) == LABEL_REF)
+ return 1;
+ return !flag_pic || SYMBOL_REF_LOCAL_P (XEXP (XEXP (op, 0), 0));
+})
+
+(define_predicate "external_symbolic_operand"
+ (and (match_code "symbol_ref")
+ (not (match_operand 0 "local_symbolic_operand" ""))))
+
+(define_predicate "external_const_operand"
+ (and (match_code "const")
+ (match_test "GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
+ && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (op, 0), 0))")))
+
+(define_predicate "nonsymbolic_operand"
+ (and (ior (match_test "!flag_pic")
+ (not (match_operand 0 "symbolic_operand")))
+ (match_operand 0 "general_operand" "")))
+
+(define_predicate "external_memory_operand"
+ (match_code "mem")
+{
+ rtx addr = XEXP (op, 0);
+ if (MEM_P (addr))
+ addr = XEXP (addr, 0);
+ if (GET_CODE (addr) == PLUS)
+ addr = XEXP (addr, 1);
+ if (MEM_P (addr))
+ addr = XEXP (addr, 0);
+ if (GET_CODE (addr) == PLUS)
+ addr = XEXP (addr, 1);
+ return external_symbolic_operand (addr, SImode)
+ || external_const_operand (addr, SImode);
+})
+
+(define_predicate "indirect_memory_operand"
+ (match_code "mem")
+{
+ op = XEXP (op, 0);
+ if (MEM_P (op))
+ return 1;
+ if (GET_CODE (op) == PLUS)
+ op = XEXP (op, 1);
+ return MEM_P (op);
+})
+
+(define_predicate "indexed_memory_operand"
+ (match_code "mem")
+{
+ op = XEXP (op, 0);
+ return GET_CODE (op) != PRE_DEC && GET_CODE (op) != POST_INC
+ && mode_dependent_address_p (op);
+})
+
+(define_predicate "illegal_blk_memory_operand"
+ (and (match_code "mem")
+ (ior (and (match_test "flag_pic")
+ (match_operand 0 "external_memory_operand" ""))
+ (ior (match_operand 0 "indexed_memory_operand" "")
+ (ior (match_operand 0 "indirect_memory_operand" "")
+ (match_test "GET_CODE (XEXP (op, 0)) == PRE_DEC"))))))
+
+(define_predicate "illegal_addsub_di_memory_operand"
+ (and (match_code "mem")
+ (ior (and (match_test "flag_pic")
+ (match_operand 0 "external_memory_operand" ""))
+ (ior (match_operand 0 "indexed_memory_operand" "")
+ (ior (match_operand 0 "indirect_memory_operand" "")
+ (match_test "GET_CODE (XEXP (op, 0)) == PRE_DEC"))))))
+
+(define_predicate "nonimmediate_addsub_di_operand"
+ (and (match_code "subreg,reg,mem")
+ (and (match_operand:DI 0 "nonimmediate_operand" "")
+ (not (match_operand:DI 0 "illegal_addsub_di_memory_operand")))))
+
+(define_predicate "general_addsub_di_operand"
+ (and (match_code "const_int,const_double,subreg,reg,mem")
+ (and (match_operand:DI 0 "general_operand" "")
+ (not (match_operand:DI 0 "illegal_addsub_di_memory_operand")))))
Property changes on: predicates.md
___________________________________________________________________
Added: svn:special
+ *
Index: vax.h
===================================================================
--- vax.h (revision 143198)
+++ vax.h (working copy)
@@ -103,7 +103,7 @@
#define STRUCTURE_SIZE_BOUNDARY 8
/* A bit-field declared as `int' forces `int' alignment for the struct. */
-#define PCC_BITFIELD_TYPE_MATTERS (!TARGET_VAXC_ALIGNMENT)
+#define PCC_BITFIELD_TYPE_MATTERS (! TARGET_VAXC_ALIGNMENT)
/* No data type wants to be aligned rounder than this. */
#define BIGGEST_ALIGNMENT 32
@@ -180,6 +180,9 @@
This is computed in `reload', in reload1.c. */
#define FRAME_POINTER_REQUIRED 1
+/* Offset from the frame pointer register value to the top of stack. */
+#define FRAME_POINTER_CFA_OFFSET(FNDECL) 0
+
/* Base register for access to arguments of the function. */
#define ARG_POINTER_REGNUM VAX_AP_REGNUM
@@ -227,6 +230,20 @@
#define REG_CLASS_NAMES \
{ "NO_REGS", "ALL_REGS" }
+/* The following macro defines cover classes for Integrated Register
+ Allocator. Cover classes is a set of non-intersected register
+ classes covering all hard registers used for register allocation
+ purpose. Any move between two registers of a cover class should be
+ cheaper than load or store of the registers. The macro value is
+ array of register classes with LIM_REG_CLASSES used as the end
+ marker. */
+#define IRA_COVER_CLASSES { ALL_REGS, LIM_REG_CLASSES }
+
+/* Return the maximum number of consecutive registers
+ needed to represent mode MODE in a register of class CLASS. */
+#define CLASS_MAX_NREGS(CLASS, MODE) \
+ ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
+
/* Define which registers fit in which classes.
This is an initializer for a vector of HARD_REG_SET
of length N_REG_CLASSES. */
@@ -245,54 +262,6 @@
#define INDEX_REG_CLASS ALL_REGS
#define BASE_REG_CLASS ALL_REGS
-/* Get reg_class from a letter such as appears in the machine description. */
-
-#define REG_CLASS_FROM_LETTER(C) NO_REGS
-
-/* The letters I, J, K, L, M, N, and O in a register constraint string
- can be used to stand for particular ranges of immediate operands.
- This macro defines what the ranges are.
- C is the letter, and VALUE is a constant value.
- Return 1 if VALUE is in the range specified by C.
-
- `I' is the constant zero.
- `J' is a value between 0 .. 63 (inclusive)
- `K' is a value between -128 and 127 (inclusive)
- 'L' is a value between -32768 and 32767 (inclusive)
- `M' is a value between 0 and 255 (inclusive)
- 'N' is a value between 0 and 65535 (inclusive)
- `O' is a value between -63 and -1 (inclusive) */
-
-#define CONST_OK_FOR_LETTER_P(VALUE, C) \
- ( (C) == 'I' ? (VALUE) == 0 \
- : (C) == 'J' ? 0 <= (VALUE) && (VALUE) < 64 \
- : (C) == 'O' ? -63 <= (VALUE) && (VALUE) < 0 \
- : (C) == 'K' ? -128 <= (VALUE) && (VALUE) < 128 \
- : (C) == 'M' ? 0 <= (VALUE) && (VALUE) < 256 \
- : (C) == 'L' ? -32768 <= (VALUE) && (VALUE) < 32768 \
- : (C) == 'N' ? 0 <= (VALUE) && (VALUE) < 65536 \
- : 0)
-
-/* Similar, but for floating constants, and defining letters G and H.
- Here VALUE is the CONST_DOUBLE rtx itself.
-
- `G' is a floating-point zero. */
-
-#define CONST_DOUBLE_OK_FOR_LETTER_P(VALUE, C) \
- ((C) == 'G' ? ((VALUE) == CONST0_RTX (DFmode) \
- || (VALUE) == CONST0_RTX (SFmode)) \
- : 0)
-
-/* Optional extra constraints for this machine.
-
- For the VAX, `Q' means that OP is a MEM that does not have a mode-dependent
- address. */
-
-#define EXTRA_CONSTRAINT(OP, C) \
- ((C) == 'Q' \
- ? MEM_P (OP) && !mode_dependent_address_p (XEXP (OP, 0)) \
- : 0)
-
/* Given an rtx X being reloaded into a reg required to be
in class CLASS, return the class of reg to actually use.
In general this is just CLASS; but on some machines
@@ -620,6 +589,11 @@
in one reasonably fast instruction. */
#define MOVE_MAX 8
+/* If a memory-to-memory move would take MOVE_RATIO or more simple
+ move-instruction pairs, we will do a movmem or libcall instead. */
+#define MOVE_RATIO(speed) ((speed) ? 6 : 3)
+#define CLEAR_RATIO(speed) ((speed) ? 6 : 2)
+
/* Nonzero if access to memory by bytes is slow and undesirable. */
#define SLOW_BYTE_ACCESS 0
@@ -850,49 +824,11 @@
# define NEG_HWI_PRINT_HEX16 "0xffffffff%08lx"
#endif
-#define PRINT_OPERAND_PUNCT_VALID_P(CODE) \
+#define PRINT_OPERAND_PUNCT_VALID_P(CODE) \
((CODE) == '#' || (CODE) == '|')
-#define PRINT_OPERAND(FILE, X, CODE) \
-{ if (CODE == '#') fputc (ASM_DOUBLE_CHAR, FILE); \
- else if (CODE == '|') \
- fputs (REGISTER_PREFIX, FILE); \
- else if (CODE == 'C') \
- fputs (rev_cond_name (X), FILE); \
- else if (CODE == 'D' && CONST_INT_P (X) && INTVAL (X) < 0) \
- fprintf (FILE, "$" NEG_HWI_PRINT_HEX16, INTVAL (X)); \
- else if (CODE == 'P' && CONST_INT_P (X)) \
- fprintf (FILE, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (X) + 1); \
- else if (CODE == 'N' && CONST_INT_P (X)) \
- fprintf (FILE, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (X)); \
- /* rotl instruction cannot deal with negative arguments. */ \
- else if (CODE == 'R' && CONST_INT_P (X)) \
- fprintf (FILE, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (X)); \
- else if (CODE == 'H' && CONST_INT_P (X)) \
- fprintf (FILE, "$%d", (int) (0xffff & ~ INTVAL (X))); \
- else if (CODE == 'h' && CONST_INT_P (X)) \
- fprintf (FILE, "$%d", (short) - INTVAL (x)); \
- else if (CODE == 'B' && CONST_INT_P (X)) \
- fprintf (FILE, "$%d", (int) (0xff & ~ INTVAL (X))); \
- else if (CODE == 'b' && CONST_INT_P (X)) \
- fprintf (FILE, "$%d", (int) (0xff & - INTVAL (X))); \
- else if (CODE == 'M' && CONST_INT_P (X)) \
- fprintf (FILE, "$%d", ~((1 << INTVAL (x)) - 1)); \
- else if (REG_P (X)) \
- fprintf (FILE, "%s", reg_names[REGNO (X)]); \
- else if (MEM_P (X)) \
- output_address (XEXP (X, 0)); \
- else if (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == SFmode) \
- { char dstr[30]; \
- real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (X), \
- sizeof (dstr), 0, 1); \
- fprintf (FILE, "$0f%s", dstr); } \
- else if (GET_CODE (X) == CONST_DOUBLE && GET_MODE (X) == DFmode) \
- { char dstr[30]; \
- real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (X), \
- sizeof (dstr), 0, 1); \
- fprintf (FILE, "$0%c%s", ASM_DOUBLE_CHAR, dstr); } \
- else { putc ('$', FILE); output_addr_const (FILE, X); }}
+#define PRINT_OPERAND(FILE, X, CODE) \
+ print_operand (FILE, X, CODE)
/* Print a memory operand whose address is X, on file FILE.
This uses a function in output-vax.c. */
Index: constraints.md
===================================================================
--- constraints.md (revision 0)
+++ constraints.md (revision 0)
@@ -0,0 +1,98 @@
+(define_constraint "Z0"
+ "Match a CONST_INT of 0"
+ (and (match_code "const_int")
+ (match_test "ival == 0")))
+
+(define_constraint "U06"
+ "unsigned 6 bit value (0..63)"
+ (and (match_code "const_int")
+ (match_test "0 <= ival && ival < 64")))
+
+(define_constraint "U08"
+ "Unsigned 8 bit value"
+ (and (match_code "const_int")
+ (match_test "0 <= ival && ival < 256")))
+
+(define_constraint "U16"
+ "Unsigned 16 bit value"
+ (and (match_code "const_int")
+ (match_test "0 <= ival && ival < 65536")))
+
+(define_constraint "CN6"
+ "negative 6 bit value (-63..-1)"
+ (and (match_code "const_int")
+ (match_test "-63 <= ival && ival < 0")))
+
+(define_constraint "S08"
+ "signed 8 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "-128 <= ival && ival < 128")))
+
+(define_constraint "S16"
+ "signed 16 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "-32768 <= ival && ival < 32768")))
+
+(define_constraint "I"
+ "Match a CONST_INT of 0 [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_Z0 (GEN_INT (ival))")))
+
+(define_constraint "J"
+ "unsigned 6 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_U06 (GEN_INT (ival))")))
+
+(define_constraint "K"
+ "signed 8 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_S08 (GEN_INT (ival))")))
+
+(define_constraint "L"
+ "signed 16 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_S16 (GEN_INT (ival))")))
+
+(define_constraint "M"
+ "Unsigned 8 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_U08 (GEN_INT (ival))")))
+
+(define_constraint "N"
+ "Unsigned 16 bit value [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_U16 (GEN_INT (ival))")))
+
+(define_constraint "O"
+ "Negative short literals (-63..-1) [old]"
+ (and (match_code "const_int")
+ (match_test "satisfies_constraint_CN6 (GEN_INT (ival))")))
+
+/* Similar, but for floating constants, and defining letters G and H. */
+
+(define_constraint "G"
+ "Match a floating-point zero"
+ (and (match_code "const_double")
+ (match_test "op == CONST0_RTX (DFmode) || op == CONST0_RTX (SFmode)")))
+
+/* Optional extra constraints for this machine. */
+
+(define_memory_constraint "Q"
+ "operand is a MEM that does not have a mode-dependent address."
+ (and (match_code "mem")
+ (match_test "!mode_dependent_address_p (XEXP (op, 0))")))
+
+(define_memory_constraint "B"
+ ""
+ (and (match_operand:BLK 0 "memory_operand")
+ (not (match_operand:BLK 0 "illegal_blk_memory_operand" ""))))
+
+(define_memory_constraint "R"
+ ""
+ (and (match_operand:DI 0 "memory_operand")
+ (not (match_operand:DI 0 "illegal_addsub_di_memory_operand" ""))))
+
+(define_constraint "T"
+ "@internal satisfies CONSTANT_P and, if pic is enabled, is not a SYMBOL_REF, LABEL_REF, or CONST."
+ (ior (not (match_code "const,symbol_ref,label_ref"))
+ (match_test "!flag_pic")))
Index: vax-protos.h
===================================================================
--- vax-protos.h (revision 143198)
+++ vax-protos.h (working copy)
@@ -19,20 +19,24 @@
extern void override_options (void);
-extern int legitimate_constant_address_p (rtx);
-extern int legitimate_constant_p (rtx);
-extern int legitimate_address_p (enum machine_mode, rtx, int);
-extern int vax_mode_dependent_address_p (rtx);
+extern bool legitimate_constant_address_p (rtx);
+extern bool legitimate_constant_p (rtx);
+extern bool legitimate_pic_operand_p (rtx);
+extern bool legitimate_address_p (enum machine_mode, rtx, bool);
+extern bool vax_mode_dependent_address_p (rtx);
#ifdef RTX_CODE
+extern bool adjacent_operands_p(rtx, rtx, enum machine_mode);
extern const char *rev_cond_name (rtx);
-extern void split_quadword_operands (rtx *, rtx *, int);
extern void print_operand_address (FILE *, rtx);
-extern int vax_float_literal (rtx);
+extern void print_operand (FILE *, rtx, int);
extern void vax_notice_update_cc (rtx, rtx);
+extern void vax_expand_addsub_di_operands (rtx *, enum rtx_code);
extern const char * vax_output_int_move (rtx, rtx *, enum machine_mode);
extern const char * vax_output_int_add (rtx, rtx *, enum machine_mode);
+extern const char * vax_output_int_subtract (rtx, rtx *, enum machine_mode);
extern const char * vax_output_conditional_branch (enum rtx_code);
+extern const char * vax_output_movmemsi (rtx, rtx *);
#endif /* RTX_CODE */
#ifdef REAL_VALUE_TYPE
Index: builtins.md
===================================================================
--- builtins.md (revision 0)
+++ builtins.md (revision 0)
@@ -0,0 +1,193 @@
+;; builtin definitions for DEC VAX.
+;; Copyright (C) 2007 Free Software Foundation, Inc.
+;;
+;; This file is part of GCC.
+;;
+;; GCC is free software; you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation; either version 2, or (at your option)
+;; any later version.
+;;
+;; GCC is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+;;
+;; You should have received a copy of the GNU General Public License
+;; along with GCC; see the file COPYING. If not, write to
+;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+;; Boston, MA 02110-1301, USA.
+
+(define_constants
+ [
+ (VUNSPEC_LOCK 100) ; sync lock and test
+ (VUNSPEC_UNLOCK 101) ; sync lock release
+ ]
+)
+
+(define_expand "ffssi2"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "")
+ (ffs:SI (match_operand:SI 1 "general_operand" "")))]
+ ""
+ "
+{
+ rtx label = gen_label_rtx ();
+ emit_insn (gen_ffssi2_internal (operands[0], operands[1]));
+ emit_jump_insn (gen_bne (label));
+ emit_insn (gen_negsi2 (operands[0], const1_rtx));
+ emit_label (label);
+ emit_insn (gen_addsi3 (operands[0], operands[0], const1_rtx));
+ DONE;
+}")
+
+(define_insn "ffssi2_internal"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=rQ")
+ (ffs:SI (match_operand:SI 1 "general_operand" "nrmT")))
+ (set (cc0) (match_dup 0))]
+ ""
+ "ffs $0,$32,%1,%0")
+
+(define_expand "sync_lock_test_and_set<mode>"
+ [(set (match_operand:VAXint 0 "nonimmediate_operand" "=&g")
+ (unspec:VAXint [(match_operand:VAXint 1 "memory_operand" "+m")
+ (match_operand:VAXint 2 "const_int_operand" "n")
+ ] VUNSPEC_LOCK))]
+ ""
+ "
+{
+ rtx label;
+
+ if (operands[2] != const1_rtx)
+ FAIL;
+
+ label = gen_label_rtx ();
+ emit_move_insn (operands[0], const1_rtx);
+ emit_jump_insn (gen_jbbssi<mode> (operands[1], const0_rtx, label, operands[1]));
+ emit_move_insn (operands[0], const0_rtx);
+ emit_label (label);
+ DONE;
+}")
+
+(define_insn "jbbssiqi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (ne (zero_extract:SI (match_operand:QI 0 "memory_operand" "g")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:QI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 1))])]
+ ""
+ "jbssi %1,%0,%l2")
+
+(define_insn "jbbssihi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (ne (zero_extract:SI (match_operand:HI 0 "memory_operand" "Q")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:HI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 1))])]
+ ""
+ "jbssi %1,%0,%l2")
+
+(define_insn "jbbssisi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (ne (zero_extract:SI (match_operand:SI 0 "memory_operand" "Q")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:SI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 1))])]
+ ""
+ "jbssi %1,%0,%l2")
+
+
+(define_expand "sync_lock_release<mode>"
+ [(set (match_operand:VAXint 0 "memory_operand" "+m")
+ (unspec:VAXint [(match_operand:VAXint 1 "const_int_operand" "n")
+ ] VUNSPEC_UNLOCK))]
+ ""
+ "
+{
+ rtx label;
+ if (operands[1] != const0_rtx)
+ FAIL;
+#if 1
+ label = gen_label_rtx ();
+ emit_jump_insn (gen_jbbcci<mode> (operands[0], const0_rtx, label, operands[0]));
+ emit_label (label);
+#else
+ emit_move_insn (operands[0], const0_rtx);
+#endif
+ DONE;
+}")
+
+(define_insn "jbbcciqi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (eq (zero_extract:SI (match_operand:QI 0 "memory_operand" "g")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:QI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 0))])]
+ ""
+ "jbcci %1,%0,%l2")
+
+(define_insn "jbbccihi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (eq (zero_extract:SI (match_operand:HI 0 "memory_operand" "Q")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:HI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 0))])]
+ ""
+ "jbcci %1,%0,%l2")
+
+(define_insn "jbbccisi"
+ [(parallel
+ [(set (pc)
+ (if_then_else
+ (eq (zero_extract:SI (match_operand:SI 0 "memory_operand" "Q")
+ (const_int 1)
+ (match_operand:SI 1 "general_operand" "nrm"))
+ (const_int 0))
+ (label_ref (match_operand 2 "" ""))
+ (pc)))
+ (set (zero_extract:SI (match_operand:SI 3 "memory_operand" "+0")
+ (const_int 1)
+ (match_dup 1))
+ (const_int 0))])]
+ ""
+ "jbcci %1,%0,%l2")
+
Index: netbsd-elf.h
===================================================================
--- netbsd-elf.h (revision 143198)
+++ netbsd-elf.h (working copy)
@@ -30,10 +30,16 @@
#undef CPP_SPEC
#define CPP_SPEC NETBSD_CPP_SPEC
+#undef CC1_SPEC
+#define CC1_SPEC NETBSD_CC1_AND_CC1PLUS_SPEC VAX_CC1_AND_CC1PLUS_SPEC
+
+#undef CC1PLUS_SPEC
+#define CC1PLUS_SPEC NETBSD_CC1_AND_CC1PLUS_SPEC VAX_CC1_AND_CC1PLUS_SPEC
+
#define NETBSD_ENTRY_POINT "__start"
#undef LINK_SPEC
-#if 1
+#if 0
/* FIXME: We must link all executables statically until PIC support
is added to the compiler. */
#define LINK_SPEC \
@@ -55,5 +61,4 @@
/* We use gas, not the UNIX assembler. */
#undef TARGET_DEFAULT
-#define TARGET_DEFAULT 0
-
+#define TARGET_DEFAULT MASK_QMATH
Index: elf.h
===================================================================
--- elf.h (revision 143198)
+++ elf.h (working copy)
@@ -85,8 +85,29 @@
} \
while (0)
+/* Don't allow *foo which foo is non-local */
+#define NO_EXTERNAL_INDIRECT_ADDRESS
+
+#undef VAX_CC1_AND_CC1PLUS_SPEC
+#define VAX_CC1_AND_CC1PLUS_SPEC \
+ "%{!fno-pic: \
+ %{!fpic: \
+ %{!fPIC:-fPIC}}}"
+
/* VAX ELF is always gas; override the generic VAX ASM_SPEC. */
#undef ASM_SPEC
-#define ASM_SPEC ""
+#define ASM_SPEC "%{!fno-pic: %{!mno-asm-pic:-k}}"
+/* We want PCREL dwarf output. */
+#define ASM_PREFERRED_EH_DATA_FORMAT(CODE,GLOBAL) \
+ ((GLOBAL ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | DW_EH_PE_sdata4)
+
+/* Emit a PC-relative relocation. */
+#define ASM_OUTPUT_DWARF_PCREL(FILE, SIZE, LABEL) \
+ do { \
+ fputs (integer_asm_op (SIZE, FALSE), FILE); \
+ fprintf (FILE, "%%pcrel%d(", SIZE * 8); \
+ assemble_name (FILE, LABEL); \
+ fputc (')', FILE); \
+ } while (0)
Index: vax.md
===================================================================
--- vax.md (revision 143198)
+++ vax.md (working copy)
@@ -43,7 +43,9 @@
;; Integer modes supported on VAX, with a mapping from machine mode
;; to mnemonic suffix. DImode is always a special case.
(define_mode_iterator VAXint [QI HI SI])
-(define_mode_attr isfx [(QI "b") (HI "w") (SI "l")])
+(define_mode_iterator VAXintQH [QI HI])
+(define_mode_iterator VAXintQHSD [QI HI SI DI])
+(define_mode_attr isfx [(QI "b") (HI "w") (SI "l") (DI "q")])
;; Similar for float modes supported on VAX.
(define_mode_iterator VAXfp [SF DF])
@@ -52,13 +54,17 @@
;; Some output patterns want integer immediates with a prefix...
(define_mode_attr iprefx [(QI "B") (HI "H") (SI "N")])
+;;
+(include "constraints.md")
+(include "predicates.md")
+
;; We don't want to allow a constant operand for test insns because
;; (set (cc0) (const_int foo)) has no mode information. Such insns will
;; be folded while optimizing anyway.
(define_insn "tst<mode>"
[(set (cc0)
- (match_operand:VAXint 0 "nonimmediate_operand" "g"))]
+ (match_operand:VAXint 0 "nonimmediate_operand" "nrmT"))]
""
"tst<VAXint:isfx> %0")
@@ -70,8 +76,8 @@
(define_insn "cmp<mode>"
[(set (cc0)
- (compare (match_operand:VAXint 0 "nonimmediate_operand" "g")
- (match_operand:VAXint 1 "general_operand" "g")))]
+ (compare (match_operand:VAXint 0 "nonimmediate_operand" "nrmT")
+ (match_operand:VAXint 1 "general_operand" "nrmT")))]
""
"cmp<VAXint:isfx> %0,%1")
@@ -86,8 +92,8 @@
(define_insn "*bit<mode>"
[(set (cc0)
- (and:VAXint (match_operand:VAXint 0 "general_operand" "g")
- (match_operand:VAXint 1 "general_operand" "g")))]
+ (and:VAXint (match_operand:VAXint 0 "general_operand" "nrmT")
+ (match_operand:VAXint 1 "general_operand" "nrmT")))]
""
"bit<VAXint:isfx> %0,%1")
@@ -115,12 +121,10 @@
;; "movh %1,%0")
(define_insn "movdi"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=g,g")
- (match_operand:DI 1 "general_operand" "I,g"))]
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
+ (match_operand:DI 1 "general_operand" "g"))]
""
- "@
- clrq %0
- movq %D1,%0")
+ "* return vax_output_int_move (insn, operands, DImode);")
;; The VAX move instructions have space-time tradeoffs. On a MicroVAX
;; register-register mov instructions take 3 bytes and 2 CPU cycles. clrl
@@ -139,9 +143,37 @@
;; Loads of constants between 64 and 128 used to be done with
;; "addl3 $63,#,dst" but this is slower than movzbl and takes as much space.
+(define_expand "movsi"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "")
+ (match_operand:SI 1 "general_operand" ""))]
+ ""
+ "
+{
+#ifdef NO_EXTERNAL_INDIRECT_ADDRESS
+ if (flag_pic
+ && GET_CODE (operands[1]) == CONST
+ && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
+ && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (operands[1], 0), 0)))
+ {
+ rtx symbol_ref = XEXP (XEXP (operands[1], 0), 0);
+ rtx const_int = XEXP (XEXP (operands[1], 0), 1);
+ rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
+ emit_move_insn (temp, symbol_ref);
+ emit_move_insn (operands[0], gen_rtx_PLUS (SImode, temp, const_int));
+ DONE;
+ }
+#endif
+}")
+
+(define_insn "movsi_2"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
+ (match_operand:SI 1 "nonsymbolic_operand" "nrmT"))]
+ ""
+ "* return vax_output_int_move (insn, operands, SImode);")
+
(define_insn "mov<mode>"
- [(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
- (match_operand:VAXint 1 "general_operand" "g"))]
+ [(set (match_operand:VAXintQH 0 "nonimmediate_operand" "=g")
+ (match_operand:VAXintQH 1 "general_operand" "g"))]
""
"* return vax_output_int_move (insn, operands, <MODE>mode);")
@@ -151,7 +183,7 @@
""
"*
{
- if (CONST_INT_P (operands[1]))
+ if (GET_CODE (operands[1]) == CONST_INT)
{
int i = INTVAL (operands[1]);
if (i == 0)
@@ -172,7 +204,7 @@
""
"*
{
- if (CONST_INT_P (operands[1]))
+ if (GET_CODE (operands[1]) == CONST_INT)
{
int i = INTVAL (operands[1]);
if (i == 0)
@@ -192,17 +224,19 @@
(match_operand 3 "" "")]
""
"
+{
emit_insn (gen_movmemhi1 (operands[0], operands[1], operands[2]));
DONE;
-")
+}")
;; The definition of this insn does not really explain what it does,
;; but it should suffice
;; that anything generated as this insn will be recognized as one
;; and that it won't successfully combine with anything.
+
(define_insn "movmemhi1"
- [(set (match_operand:BLK 0 "memory_operand" "=m")
- (match_operand:BLK 1 "memory_operand" "m"))
+ [(set (match_operand:BLK 0 "memory_operand" "=o")
+ (match_operand:BLK 1 "memory_operand" "o"))
(use (match_operand:HI 2 "general_operand" "g"))
(clobber (reg:SI 0))
(clobber (reg:SI 1))
@@ -217,13 +251,13 @@
(define_insn "truncsiqi2"
[(set (match_operand:QI 0 "nonimmediate_operand" "=g")
- (truncate:QI (match_operand:SI 1 "nonimmediate_operand" "g")))]
+ (truncate:QI (match_operand:SI 1 "nonimmediate_operand" "nrmT")))]
""
"cvtlb %1,%0")
(define_insn "truncsihi2"
[(set (match_operand:HI 0 "nonimmediate_operand" "=g")
- (truncate:HI (match_operand:SI 1 "nonimmediate_operand" "g")))]
+ (truncate:HI (match_operand:SI 1 "nonimmediate_operand" "nrmT")))]
""
"cvtlw %1,%0")
@@ -309,56 +343,62 @@
add<VAXfp:fsfx>2 %1,%0
add<VAXfp:fsfx>3 %1,%2,%0")
+(define_insn "pushlclsymreg"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (plus:SI (match_operand:SI 1 "register_operand" "%r")
+ (match_operand:SI 2 "local_symbolic_operand" "i")))]
+ "flag_pic"
+ "pushab %a2[%1]")
+
+(define_insn "pushextsymreg"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (plus:SI (match_operand:SI 1 "register_operand" "%r")
+ (match_operand:SI 2 "external_symbolic_operand" "i")))]
+ "flag_pic"
+ "pushab %a2[%1]")
+
+(define_insn "movlclsymreg"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
+ (plus:SI (match_operand:SI 1 "register_operand" "%r")
+ (match_operand:SI 2 "local_symbolic_operand" "i")))]
+ "flag_pic"
+ "movab %a2[%1],%0")
+
+(define_insn "movextsymreg"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
+ (plus:SI (match_operand:SI 1 "register_operand" "%r")
+ (match_operand:SI 2 "external_symbolic_operand" "i")))]
+ "flag_pic"
+ "movab %a2[%1],%0")
+
(define_insn "add<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
- (plus:VAXint (match_operand:VAXint 1 "general_operand" "g")
- (match_operand:VAXint 2 "general_operand" "g")))]
+ (plus:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT")))]
""
"* return vax_output_int_add (insn, operands, <MODE>mode);")
+(define_expand "adddi3"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
+ (plus:DI (match_operand:DI 1 "general_operand" "g")
+ (match_operand:DI 2 "general_operand" "g")))]
+ "!reload_in_progress"
+ "vax_expand_addsub_di_operands (operands, PLUS); DONE;")
+
+(define_insn "adcdi3"
+ [(set (match_operand:DI 0 "nonimmediate_addsub_di_operand" "=Rr")
+ (plus:DI (match_operand:DI 1 "general_addsub_di_operand" "%0")
+ (match_operand:DI 2 "general_addsub_di_operand" "nRr")))]
+ "TARGET_QMATH"
+ "* return vax_output_int_add (insn, operands, DImode);")
+
;; The add-with-carry (adwc) instruction only accepts two operands.
-(define_insn "adddi3"
+(define_insn "adddi3_old"
[(set (match_operand:DI 0 "nonimmediate_operand" "=ro>,ro>")
(plus:DI (match_operand:DI 1 "general_operand" "%0,ro>")
- (match_operand:DI 2 "general_operand" "Fro,F")))]
- ""
- "*
-{
- rtx low[3];
- const char *pattern;
- int carry = 1;
-
- split_quadword_operands (operands, low, 3);
- /* Add low parts. */
- if (rtx_equal_p (operands[0], operands[1]))
- {
- if (low[2] == const0_rtx)
- /* Should examine operand, punt if not POST_INC. */
- pattern = \"tstl %0\", carry = 0;
- else if (low[2] == const1_rtx)
- pattern = \"incl %0\";
- else
- pattern = \"addl2 %2,%0\";
- }
- else
- {
- if (low[2] == const0_rtx)
- pattern = \"movl %1,%0\", carry = 0;
- else
- pattern = \"addl3 %2,%1,%0\";
- }
- if (pattern)
- output_asm_insn (pattern, low);
- if (!carry)
- /* If CARRY is 0, we don't have any carry value to worry about. */
- return get_insn_template (CODE_FOR_addsi3, insn);
- /* %0 = C + %1 + %2 */
- if (!rtx_equal_p (operands[0], operands[1]))
- output_asm_insn ((operands[1] == const0_rtx
- ? \"clrl %0\"
- : \"movl %1,%0\"), operands);
- return \"adwc %2,%0\";
-}")
+ (match_operand:DI 2 "general_operand" "Fsro,Fs")))]
+ "!TARGET_QMATH"
+ "* return vax_output_int_add (insn, operands, DImode);")
;;- All kinds of subtract instructions.
@@ -373,56 +413,34 @@
(define_insn "sub<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
- (minus:VAXint (match_operand:VAXint 1 "general_operand" "0,g")
- (match_operand:VAXint 2 "general_operand" "g,g")))]
+ (minus:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT,nrmT")))]
""
"@
sub<VAXint:isfx>2 %2,%0
sub<VAXint:isfx>3 %2,%1,%0")
+(define_expand "subdi3"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=g")
+ (minus:DI (match_operand:DI 1 "general_operand" "g")
+ (match_operand:DI 2 "general_operand" "g")))]
+ "!reload_in_progress"
+ "vax_expand_addsub_di_operands (operands, MINUS); DONE;")
+
+(define_insn "sbcdi3"
+ [(set (match_operand:DI 0 "nonimmediate_addsub_di_operand" "=Rr,=Rr")
+ (minus:DI (match_operand:DI 1 "general_addsub_di_operand" "0,I")
+ (match_operand:DI 2 "general_addsub_di_operand" "nRr,Rr")))]
+ "TARGET_QMATH"
+ "* return vax_output_int_subtract (insn, operands, DImode);")
+
;; The subtract-with-carry (sbwc) instruction only takes two operands.
-(define_insn "subdi3"
+(define_insn "subdi3_old"
[(set (match_operand:DI 0 "nonimmediate_operand" "=or>,or>")
(minus:DI (match_operand:DI 1 "general_operand" "0,or>")
- (match_operand:DI 2 "general_operand" "For,F")))]
- ""
- "*
-{
- rtx low[3];
- const char *pattern;
- int carry = 1;
-
- split_quadword_operands (operands, low, 3);
- /* Subtract low parts. */
- if (rtx_equal_p (operands[0], operands[1]))
- {
- if (low[2] == const0_rtx)
- pattern = 0, carry = 0;
- else if (low[2] == constm1_rtx)
- pattern = \"decl %0\";
- else
- pattern = \"subl2 %2,%0\";
- }
- else
- {
- if (low[2] == constm1_rtx)
- pattern = \"decl %0\";
- else if (low[2] == const0_rtx)
- pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
- else
- pattern = \"subl3 %2,%1,%0\";
- }
- if (pattern)
- output_asm_insn (pattern, low);
- if (carry)
- {
- if (!rtx_equal_p (operands[0], operands[1]))
- return \"movl %1,%0\;sbwc %2,%0\";
- return \"sbwc %2,%0\";
- /* %0 = %2 - %1 - C */
- }
- return get_insn_template (CODE_FOR_subsi3, insn);
-}")
+ (match_operand:DI 2 "general_operand" "Fsor,Fs")))]
+ "!TARGET_QMATH"
+ "* return vax_output_int_subtract (insn, operands, DImode);")
;;- Multiply instructions.
@@ -438,8 +456,8 @@
(define_insn "mul<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
- (mult:VAXint (match_operand:VAXint 1 "general_operand" "0,g,g")
- (match_operand:VAXint 2 "general_operand" "g,0,g")))]
+ (mult:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
""
"@
mul<VAXint:isfx>2 %2,%0
@@ -449,9 +467,9 @@
(define_insn "mulsidi3"
[(set (match_operand:DI 0 "nonimmediate_operand" "=g")
(mult:DI (sign_extend:DI
- (match_operand:SI 1 "nonimmediate_operand" "g"))
+ (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
(sign_extend:DI
- (match_operand:SI 2 "nonimmediate_operand" "g"))))]
+ (match_operand:SI 2 "nonimmediate_operand" "nrmT"))))]
""
"emul %1,%2,$0,%0")
@@ -459,9 +477,9 @@
[(set (match_operand:DI 0 "nonimmediate_operand" "=g")
(plus:DI
(mult:DI (sign_extend:DI
- (match_operand:SI 1 "nonimmediate_operand" "g"))
+ (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
(sign_extend:DI
- (match_operand:SI 2 "nonimmediate_operand" "g")))
+ (match_operand:SI 2 "nonimmediate_operand" "nrmT")))
(sign_extend:DI (match_operand:SI 3 "nonimmediate_operand" "g"))))]
""
"emul %1,%2,%3,%0")
@@ -471,9 +489,9 @@
[(set (match_operand:DI 0 "nonimmediate_operand" "=g")
(plus:DI
(mult:DI (sign_extend:DI
- (match_operand:SI 1 "nonimmediate_operand" "g"))
+ (match_operand:SI 1 "nonimmediate_operand" "nrmT"))
(sign_extend:DI
- (match_operand:SI 2 "nonimmediate_operand" "g")))
+ (match_operand:SI 2 "nonimmediate_operand" "nrmT")))
(match_operand:DI 3 "immediate_operand" "F")))]
"GET_CODE (operands[3]) == CONST_DOUBLE
&& CONST_DOUBLE_HIGH (operands[3]) == (CONST_DOUBLE_LOW (operands[3]) >> 31)"
@@ -497,8 +515,8 @@
(define_insn "div<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
- (div:VAXint (match_operand:VAXint 1 "general_operand" "0,g")
- (match_operand:VAXint 2 "general_operand" "g,g")))]
+ (div:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT,nrmT")))]
""
"@
div<VAXint:isfx>2 %2,%0
@@ -527,14 +545,14 @@
rtx op1 = operands[1];
/* If there is a constant argument, complement that one. */
- if (CONST_INT_P (operands[2]) && !CONST_INT_P (op1))
+ if (GET_CODE (operands[2]) == CONST_INT && GET_CODE (op1) != CONST_INT)
{
operands[1] = operands[2];
operands[2] = op1;
op1 = operands[1];
}
- if (CONST_INT_P (op1))
+ if (GET_CODE (op1) == CONST_INT)
operands[1] = GEN_INT (~INTVAL (op1));
else
operands[1] = expand_unop (<MODE>mode, one_cmpl_optab, op1, 0, 1);
@@ -542,8 +560,8 @@
(define_insn "*and<mode>"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
- (and:VAXint (not:VAXint (match_operand:VAXint 1 "general_operand" "g,g"))
- (match_operand:VAXint 2 "general_operand" "0,g")))]
+ (and:VAXint (not:VAXint (match_operand:VAXint 1 "general_operand" "nrmT,nrmT"))
+ (match_operand:VAXint 2 "general_operand" "0,nrmT")))]
""
"@
bic<VAXint:isfx>2 %1,%0
@@ -556,7 +574,7 @@
(define_insn "*and<mode>_const_int"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g")
- (and:VAXint (match_operand:VAXint 1 "general_operand" "0,g")
+ (and:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT")
(match_operand:VAXint 2 "const_int_operand" "n,n")))]
""
"@
@@ -568,8 +586,8 @@
(define_insn "ior<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
- (ior:VAXint (match_operand:VAXint 1 "general_operand" "0,g,g")
- (match_operand:VAXint 2 "general_operand" "g,0,g")))]
+ (ior:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
""
"@
bis<VAXint:isfx>2 %2,%0
@@ -580,8 +598,8 @@
(define_insn "xor<mode>3"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g,g,g")
- (xor:VAXint (match_operand:VAXint 1 "general_operand" "0,g,g")
- (match_operand:VAXint 2 "general_operand" "g,0,g")))]
+ (xor:VAXint (match_operand:VAXint 1 "general_operand" "0,nrmT,nrmT")
+ (match_operand:VAXint 2 "general_operand" "nrmT,0,nrmT")))]
""
"@
xor<VAXint:isfx>2 %2,%0
@@ -597,13 +615,13 @@
(define_insn "neg<mode>2"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
- (neg:VAXint (match_operand:VAXint 1 "general_operand" "g")))]
+ (neg:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")))]
""
"mneg<VAXint:isfx> %1,%0")
(define_insn "one_cmpl<mode>2"
[(set (match_operand:VAXint 0 "nonimmediate_operand" "=g")
- (not:VAXint (match_operand:VAXint 1 "general_operand" "g")))]
+ (not:VAXint (match_operand:VAXint 1 "general_operand" "nrmT")))]
""
"mcom<VAXint:isfx> %1,%0")
@@ -620,43 +638,51 @@
""
"
{
- if (!CONST_INT_P (operands[2]))
+ if (GET_CODE (operands[2]) != CONST_INT)
operands[2] = gen_rtx_NEG (QImode, negate_rtx (QImode, operands[2]));
}")
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (ashiftrt:SI (match_operand:SI 1 "general_operand" "g")
+ (ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "const_int_operand" "n")))]
""
"ashl $%n2,%1,%0")
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (ashiftrt:SI (match_operand:SI 1 "general_operand" "g")
+ (ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
(neg:QI (match_operand:QI 2 "general_operand" "g"))))]
""
"ashl %2,%1,%0")
(define_insn "ashlsi3"
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (ashift:SI (match_operand:SI 1 "general_operand" "g")
+ (ashift:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "general_operand" "g")))]
""
"*
{
if (operands[2] == const1_rtx && rtx_equal_p (operands[0], operands[1]))
return \"addl2 %0,%0\";
- if (REG_P (operands[1])
- && CONST_INT_P (operands[2]))
+ if (GET_CODE (operands[1]) == REG
+ && GET_CODE (operands[2]) == CONST_INT)
{
int i = INTVAL (operands[2]);
if (i == 1)
return \"addl3 %1,%1,%0\";
- if (i == 2)
- return \"moval 0[%1],%0\";
- if (i == 3)
- return \"movad 0[%1],%0\";
+ if (i == 2 && !optimize_size)
+ {
+ if (push_operand (operands[0], SImode))
+ return \"pushal 0[%1]\";
+ return \"moval 0[%1],%0\";
+ }
+ if (i == 3 && !optimize_size)
+ {
+ if (push_operand (operands[0], SImode))
+ return \"pushaq 0[%1]\";
+ return \"movaq 0[%1],%0\";
+ }
}
return \"ashl %2,%1,%0\";
}")
@@ -695,7 +721,7 @@
[(set (match_dup 3)
(minus:QI (const_int 32)
(match_dup 4)))
- (set (match_operand:SI 0 "general_operand" "=g")
+ (set (match_operand:SI 0 "nonimmediate_operand" "=g")
(zero_extract:SI (match_operand:SI 1 "register_operand" "r")
(match_dup 3)
(match_operand:SI 2 "register_operand" "g")))]
@@ -714,27 +740,27 @@
""
"
{
- if (!CONST_INT_P (operands[2]))
+ if (GET_CODE (operands[2]) != CONST_INT)
operands[2] = gen_rtx_NEG (QImode, negate_rtx (QImode, operands[2]));
}")
(define_insn "rotlsi3"
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (rotate:SI (match_operand:SI 1 "general_operand" "g")
+ (rotate:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "general_operand" "g")))]
""
"rotl %2,%1,%0")
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (rotatert:SI (match_operand:SI 1 "general_operand" "g")
+ (rotatert:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "const_int_operand" "n")))]
""
"rotl %R2,%1,%0")
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (rotatert:SI (match_operand:SI 1 "general_operand" "g")
+ (rotatert:SI (match_operand:SI 1 "general_operand" "nrmT")
(neg:QI (match_operand:QI 2 "general_operand" "g"))))]
""
"rotl %2,%1,%0")
@@ -760,8 +786,8 @@
(match_operand:SI 3 "general_operand" "g"))]
"(INTVAL (operands[1]) == 8 || INTVAL (operands[1]) == 16)
&& INTVAL (operands[2]) % INTVAL (operands[1]) == 0
- && (REG_P (operands[0])
- || !mode_dependent_address_p (XEXP (operands[0], 0)))"
+ && (GET_CODE (operands[0]) == REG
+ || ! mode_dependent_address_p (XEXP (operands[0], 0)))"
"*
{
if (REG_P (operands[0]))
@@ -788,8 +814,8 @@
(match_operand:SI 3 "const_int_operand" "n")))]
"(INTVAL (operands[2]) == 8 || INTVAL (operands[2]) == 16)
&& INTVAL (operands[3]) % INTVAL (operands[2]) == 0
- && (REG_P (operands[1])
- || !mode_dependent_address_p (XEXP (operands[1], 0)))"
+ && (GET_CODE (operands[1]) == REG
+ || ! mode_dependent_address_p (XEXP (operands[1], 0)))"
"*
{
if (REG_P (operands[1]))
@@ -815,8 +841,8 @@
(match_operand:SI 3 "const_int_operand" "n")))]
"(INTVAL (operands[2]) == 8 || INTVAL (operands[2]) == 16)
&& INTVAL (operands[3]) % INTVAL (operands[2]) == 0
- && (REG_P (operands[1])
- || !mode_dependent_address_p (XEXP (operands[1], 0)))"
+ && (GET_CODE (operands[1]) == REG
+ || ! mode_dependent_address_p (XEXP (operands[1], 0)))"
"*
{
if (REG_P (operands[1]))
@@ -842,8 +868,8 @@
(compare
(sign_extract:SI (match_operand:SI 0 "register_operand" "r")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"cmpv %2,%1,%0,%3")
@@ -852,8 +878,8 @@
(compare
(zero_extract:SI (match_operand:SI 0 "register_operand" "r")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"cmpzv %2,%1,%0,%3")
@@ -866,13 +892,12 @@
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
(sign_extract:SI (match_operand:SI 1 "register_operand" "ro")
(match_operand:QI 2 "general_operand" "g")
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"*
{
- if (!CONST_INT_P (operands[3])
- || !CONST_INT_P (operands[2])
- || !REG_P (operands[0])
+ if (GET_CODE (operands[3]) != CONST_INT || GET_CODE (operands[2]) != CONST_INT
+ || GET_CODE (operands[0]) != REG
|| (INTVAL (operands[2]) != 8 && INTVAL (operands[2]) != 16))
return \"extv %3,%2,%1,%0\";
if (INTVAL (operands[2]) == 8)
@@ -884,13 +909,12 @@
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
(zero_extract:SI (match_operand:SI 1 "register_operand" "ro")
(match_operand:QI 2 "general_operand" "g")
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"*
{
- if (!CONST_INT_P (operands[3])
- || !CONST_INT_P (operands[2])
- || !REG_P (operands[0]))
+ if (GET_CODE (operands[3]) != CONST_INT || GET_CODE (operands[2]) != CONST_INT
+ || GET_CODE (operands[0]) != REG)
return \"extzv %3,%2,%1,%0\";
if (INTVAL (operands[2]) == 8)
return \"rotl %R3,%1,%0\;movzbl %0,%0\";
@@ -912,8 +936,8 @@
(compare
(sign_extract:SI (match_operand:QI 0 "memory_operand" "m")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"cmpv %2,%1,%0,%3")
@@ -922,8 +946,8 @@
(compare
(zero_extract:SI (match_operand:QI 0 "nonimmediate_operand" "rm")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"cmpzv %2,%1,%0,%3")
@@ -931,12 +955,11 @@
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
(sign_extract:SI (match_operand:QI 1 "memory_operand" "m")
(match_operand:QI 2 "general_operand" "g")
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"*
{
- if (!REG_P (operands[0])
- || !CONST_INT_P (operands[2])
+ if (!REG_P (operands[0]) || !CONST_INT_P (operands[2])
|| !CONST_INT_P (operands[3])
|| (INTVAL (operands[2]) != 8 && INTVAL (operands[2]) != 16)
|| INTVAL (operands[2]) + INTVAL (operands[3]) > 32
@@ -961,12 +984,11 @@
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
(zero_extract:SI (match_operand:QI 1 "memory_operand" "m")
(match_operand:QI 2 "general_operand" "g")
- (match_operand:SI 3 "general_operand" "g")))]
+ (match_operand:SI 3 "general_operand" "nrmT")))]
""
"*
{
- if (!REG_P (operands[0])
- || !CONST_INT_P (operands[2])
+ if (!REG_P (operands[0]) || !CONST_INT_P (operands[2])
|| !CONST_INT_P (operands[3])
|| INTVAL (operands[2]) + INTVAL (operands[3]) > 32
|| side_effects_p (operands[1])
@@ -977,6 +999,28 @@
return \"rotl %R3,%1,%0\;movzbl %0,%0\";
if (INTVAL (operands[2]) == 16)
return \"rotl %R3,%1,%0\;movzwl %0,%0\";
+ if (MEM_P (operands[1])
+ && GET_CODE (XEXP (operands[1], 0)) == PLUS
+ && REG_P (XEXP (XEXP (operands[1], 0), 0))
+ && CONST_INT_P (XEXP (XEXP (operands[1], 0), 1))
+ && CONST_INT_P (operands[2])
+ && CONST_INT_P (operands[3]))
+ {
+ HOST_WIDE_INT o = INTVAL (XEXP (XEXP (operands[1], 0), 1));
+ HOST_WIDE_INT l = INTVAL (operands[2]);
+ HOST_WIDE_INT v = INTVAL (operands[3]);
+ if ((o & 3) && (o & 3) * 8 + v + l <= 32)
+ {
+ rtx tmp;
+ tmp = XEXP (XEXP (operands[1], 0), 0);
+ if (o & ~3)
+ tmp = gen_rtx_PLUS (SImode, tmp, GEN_INT (o & ~3));
+ operands[1] = gen_rtx_MEM (QImode, tmp);
+ operands[3] = GEN_INT (v + (o & 3) * 8);
+ }
+ if (optimize_size)
+ return \"extzv %3,%2,%1,%0\";
+ }
return \"rotl %R3,%1,%0\;bicl2 %M2,%0\";
}")
@@ -991,16 +1035,39 @@
(define_insn ""
[(set (zero_extract:SI (match_operand:QI 0 "memory_operand" "+g")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g"))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT"))]
""
- "insv %3,%2,%1,%0")
+ "*
+{
+ if (MEM_P (operands[0])
+ && GET_CODE (XEXP (operands[0], 0)) == PLUS
+ && REG_P (XEXP (XEXP (operands[0], 0), 0))
+ && CONST_INT_P (XEXP (XEXP (operands[0], 0), 1))
+ && CONST_INT_P (operands[1])
+ && CONST_INT_P (operands[2]))
+ {
+ HOST_WIDE_INT o = INTVAL (XEXP (XEXP (operands[0], 0), 1));
+ HOST_WIDE_INT v = INTVAL (operands[2]);
+ HOST_WIDE_INT l = INTVAL (operands[1]);
+ if ((o & 3) && (o & 3) * 8 + v + l <= 32)
+ {
+ rtx tmp;
+ tmp = XEXP (XEXP (operands[0], 0), 0);
+ if (o & ~3)
+ tmp = gen_rtx_PLUS (SImode, tmp, GEN_INT (o & ~3));
+ operands[0] = gen_rtx_MEM (QImode, tmp);
+ operands[2] = GEN_INT (v + (o & 3) * 8);
+ }
+ }
+ return \"insv %3,%2,%1,%0\";
+}")
(define_insn ""
[(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
(match_operand:QI 1 "general_operand" "g")
- (match_operand:SI 2 "general_operand" "g"))
- (match_operand:SI 3 "general_operand" "g"))]
+ (match_operand:SI 2 "general_operand" "nrmT"))
+ (match_operand:SI 3 "general_operand" "nrmT"))]
""
"insv %3,%2,%1,%0")
@@ -1044,7 +1111,7 @@
(if_then_else
(ne (zero_extract:SI (match_operand:QI 0 "memory_operand" "Q,g")
(const_int 1)
- (match_operand:SI 1 "general_operand" "I,g"))
+ (match_operand:SI 1 "general_operand" "I,nrmT"))
(const_int 0))
(label_ref (match_operand 2 "" ""))
(pc)))]
@@ -1058,7 +1125,7 @@
(if_then_else
(eq (zero_extract:SI (match_operand:QI 0 "memory_operand" "Q,g")
(const_int 1)
- (match_operand:SI 1 "general_operand" "I,g"))
+ (match_operand:SI 1 "general_operand" "I,nrmT"))
(const_int 0))
(label_ref (match_operand 2 "" ""))
(pc)))]
@@ -1072,7 +1139,7 @@
(if_then_else
(ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r,r")
(const_int 1)
- (match_operand:SI 1 "general_operand" "I,g"))
+ (match_operand:SI 1 "general_operand" "I,nrmT"))
(const_int 0))
(label_ref (match_operand 2 "" ""))
(pc)))]
@@ -1086,7 +1153,7 @@
(if_then_else
(eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r,r")
(const_int 1)
- (match_operand:SI 1 "general_operand" "I,g"))
+ (match_operand:SI 1 "general_operand" "I,nrmT"))
(const_int 0))
(label_ref (match_operand 2 "" ""))
(pc)))]
@@ -1135,7 +1202,7 @@
(if_then_else
(lt (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
(const_int 1))
- (match_operand:SI 1 "general_operand" "g"))
+ (match_operand:SI 1 "general_operand" "nrmT"))
(label_ref (match_operand 2 "" ""))
(pc)))
(set (match_dup 0)
@@ -1148,13 +1215,13 @@
[(set (pc)
(if_then_else
(lt (match_operand:SI 0 "nonimmediate_operand" "+g")
- (match_operand:SI 1 "general_operand" "g"))
+ (match_operand:SI 1 "general_operand" "nrmT"))
(label_ref (match_operand 2 "" ""))
(pc)))
(set (match_dup 0)
(plus:SI (match_dup 0)
(const_int 1)))]
- "!TARGET_UNIX_ASM && CONST_INT_P (operands[1])"
+ "!TARGET_UNIX_ASM && GET_CODE (operands[1]) == CONST_INT"
"jaoblss %P1,%0,%l2")
(define_insn ""
@@ -1162,7 +1229,7 @@
(if_then_else
(le (plus:SI (match_operand:SI 0 "nonimmediate_operand" "+g")
(const_int 1))
- (match_operand:SI 1 "general_operand" "g"))
+ (match_operand:SI 1 "general_operand" "nrmT"))
(label_ref (match_operand 2 "" ""))
(pc)))
(set (match_dup 0)
@@ -1175,13 +1242,13 @@
[(set (pc)
(if_then_else
(le (match_operand:SI 0 "nonimmediate_operand" "+g")
- (match_operand:SI 1 "general_operand" "g"))
+ (match_operand:SI 1 "general_operand" "nrmT"))
(label_ref (match_operand 2 "" ""))
(pc)))
(set (match_dup 0)
(plus:SI (match_dup 0)
(const_int 1)))]
- "!TARGET_UNIX_ASM && CONST_INT_P (operands[1])"
+ "!TARGET_UNIX_ASM && GET_CODE (operands[1]) == CONST_INT"
"jaobleq %P1,%0,%l2")
;; Something like a sob insn, but compares against -1.
@@ -1413,13 +1480,37 @@
[(match_operand:SI 1 "const_int_operand" "n")
(set (pc)
(plus:SI (sign_extend:SI
- (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "general_operand" "g")
+ (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "general_operand" "nrmT")
(const_int 2))
(pc))))
(label_ref:SI (match_operand 2 "" ""))))]
""
"casel %0,$0,%1")
+(define_insn "pushextsym"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (match_operand:SI 1 "external_symbolic_operand" "i"))]
+ ""
+ "pushab %a1")
+
+(define_insn "movextsym"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
+ (match_operand:SI 1 "external_symbolic_operand" "i"))]
+ ""
+ "movab %a1,%0")
+
+(define_insn "pushlclsym"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (match_operand:SI 1 "local_symbolic_operand" "i"))]
+ ""
+ "pushab %a1")
+
+(define_insn "movlclsym"
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
+ (match_operand:SI 1 "local_symbolic_operand" "i"))]
+ ""
+ "movab %a1,%0")
+
;;- load or push effective address
;; These come after the move and add/sub patterns
;; because we don't want pushl $1 turned into pushad 1.
@@ -1428,77 +1519,29 @@
;; It does not work to use constraints to distinguish pushes from moves,
;; because < matches any autodecrement, not just a push.
-(define_insn ""
- [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:QI 1 "address_operand" "p"))]
+(define_insn "pushaddr<mode>"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (match_operand:VAXintQHSD 1 "address_operand" "p"))]
""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushab %a1\";
- else
- return \"movab %a1,%0\";
-}")
+ "pusha<VAXintQHSD:isfx> %a1")
-(define_insn ""
+(define_insn "movaddr<mode>"
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:HI 1 "address_operand" "p"))]
+ (match_operand:VAXintQHSD 1 "address_operand" "p"))]
""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushaw %a1\";
- else
- return \"movaw %a1,%0\";
-}")
+ "mova<VAXintQHSD:isfx> %a1,%0")
-(define_insn ""
- [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:SI 1 "address_operand" "p"))]
+(define_insn "pushaddr<mode>"
+ [(set (match_operand:SI 0 "push_operand" "=g")
+ (match_operand:VAXfp 1 "address_operand" "p"))]
""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushal %a1\";
- else
- return \"moval %a1,%0\";
-}")
+ "pusha<VAXfp:fsfx> %a1")
-(define_insn ""
+(define_insn "movaddr<mode>"
[(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:DI 1 "address_operand" "p"))]
+ (match_operand:VAXfp 1 "address_operand" "p"))]
""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushaq %a1\";
- else
- return \"movaq %a1,%0\";
-}")
-
-(define_insn ""
- [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:SF 1 "address_operand" "p"))]
- ""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushaf %a1\";
- else
- return \"movaf %a1,%0\";
-}")
-
-(define_insn ""
- [(set (match_operand:SI 0 "nonimmediate_operand" "=g")
- (match_operand:DF 1 "address_operand" "p"))]
- ""
- "*
-{
- if (push_operand (operands[0], SImode))
- return \"pushad %a1\";
- else
- return \"movad %a1,%0\";
-}")
+ "mova<VAXfp:fsfx> %a1,%0")
;; These used to be peepholes, but it is more straightforward to do them
;; as single insns. However, we must force the output to be a register
@@ -1515,7 +1558,7 @@
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=ro")
- (and:SI (ashiftrt:SI (match_operand:SI 1 "general_operand" "g")
+ (and:SI (ashiftrt:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "const_int_operand" "n"))
(match_operand:SI 3 "const_int_operand" "n")))]
"(INTVAL (operands[3]) & ~((1 << (32 - INTVAL (operands[2]))) - 1)) == 0"
@@ -1537,7 +1580,7 @@
(define_insn ""
[(set (match_operand:SI 0 "nonimmediate_operand" "=ro")
- (and:SI (ashift:SI (match_operand:SI 1 "general_operand" "g")
+ (and:SI (ashift:SI (match_operand:SI 1 "general_operand" "nrmT")
(match_operand:QI 2 "const_int_operand" "n"))
(match_operand:SI 3 "const_int_operand" "n")))]
""
@@ -1553,3 +1596,29 @@
[(unspec_volatile [(const_int 0)] VUNSPEC_SYNC_ISTREAM)]
""
"movpsl -(%|sp)\;pushal 1(%|pc)\;rei")
+
+(define_expand "nonlocal_goto"
+ [(use (match_operand 0 "general_operand" ""))
+ (use (match_operand 1 "general_operand" ""))
+ (use (match_operand 2 "general_operand" ""))
+ (use (match_operand 3 "general_operand" ""))]
+ ""
+{
+ rtx lab = operands[1];
+ rtx stack = operands[2];
+ rtx fp = operands[3];
+
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
+
+ emit_move_insn (hard_frame_pointer_rtx, fp);
+ emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
+
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
+
+ /* We'll convert this to direct jump via a peephole optimization. */
+ emit_indirect_jump (copy_to_reg (lab));
+ emit_barrier ();
+ DONE;
+})