Patch zend_operators.h.patch for Compile Failure Bug #64780
Patch version 2013-05-07 04:02 UTC
Return to Bug #64780 |
Download this patch
Patch Revisions:
Developer: sixd@php.net
diff --git a/Zend/zend_operators.h b/Zend/zend_operators.h
index a82c14b..4cd38e1 100644
--- a/Zend/zend_operators.h
+++ b/Zend/zend_operators.h
@@ -502,7 +502,17 @@ ZEND_API void zend_update_current_locale(void);
static zend_always_inline int fast_increment_function(zval *op1)
{
if (EXPECTED(Z_TYPE_P(op1) == IS_LONG)) {
-#if defined(__GNUC__) && defined(__i386__)
+#if defined(__GNUC__) && __GNUC__ <= 3 && defined(__i386__)
+ __asm__(
+ "incl (%0)\n\t"
+ "jno 0f\n\t"
+ "movl $0x0, (%0)\n\t"
+ "movl $0x41e00000, 0x4(%0)\n\t"
+ "movb $0x2,0xc(%0)\n"
+ "0:"
+ :
+ : "r"(op1));
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__i386__)
__asm__(
"incl (%0)\n\t"
"jno 0f\n\t"
@@ -515,7 +525,17 @@ static zend_always_inline int fast_increment_function(zval *op1)
"n"(IS_DOUBLE),
"n"(ZVAL_OFFSETOF_TYPE)
: "cc");
-#elif defined(__GNUC__) && defined(__x86_64__)
+#elif defined(__GNUC__) && __GNUC__ <= 3 && defined(__x86_64__)
+ __asm__(
+ "incq (%0)\n\t"
+ "jno 0f\n\t"
+ "movl $0x0, (%0)\n\t"
+ "movl $0x43e00000, 0x4(%0)\n\t"
+ "movb $0x2,0x14(%0)\n"
+ "0:"
+ :
+ : "r"(op1));
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__x86_64__)
__asm__(
"incq (%0)\n\t"
"jno 0f\n\t"
@@ -545,7 +565,17 @@ static zend_always_inline int fast_increment_function(zval *op1)
static zend_always_inline int fast_decrement_function(zval *op1)
{
if (EXPECTED(Z_TYPE_P(op1) == IS_LONG)) {
-#if defined(__GNUC__) && defined(__i386__)
+#if defined(__GNUC__) && __GNUC__ <= 3 && defined(__i386__)
+ __asm__(
+ "decl (%0)\n\t"
+ "jno 0f\n\t"
+ "movl $0x00200000, (%0)\n\t"
+ "movl $0xc1e00000, 0x4(%0)\n\t"
+ "movb $0x2,0xc(%0)\n"
+ "0:"
+ :
+ : "r"(op1));
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__i386__)
__asm__(
"decl (%0)\n\t"
"jno 0f\n\t"
@@ -558,7 +588,17 @@ static zend_always_inline int fast_decrement_function(zval *op1)
"n"(IS_DOUBLE),
"n"(ZVAL_OFFSETOF_TYPE)
: "cc");
-#elif defined(__GNUC__) && defined(__x86_64__)
+#elif defined(__GNUC__) && __GNUC__ <= 3 && defined(__x86_64__)
+ __asm__(
+ "decq (%0)\n\t"
+ "jno 0f\n\t"
+ "movl $0x00000000, (%0)\n\t"
+ "movl $0xc3e00000, 0x4(%0)\n\t"
+ "movb $0x2,0x14(%0)\n"
+ "0:"
+ :
+ : "r"(op1));
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__x86_64__)
__asm__(
"decq (%0)\n\t"
"jno 0f\n\t"
@@ -589,7 +629,27 @@ static zend_always_inline int fast_add_function(zval *result, zval *op1, zval *o
{
if (EXPECTED(Z_TYPE_P(op1) == IS_LONG)) {
if (EXPECTED(Z_TYPE_P(op2) == IS_LONG)) {
-#if defined(__GNUC__) && defined(__i386__)
+#if defined(__GNUC__) && __GNUC__ <= 3 && __GNUC__ <= 3 && defined(__i386__)
+ __asm__(
+ "movl (%1), %%eax\n\t"
+ "addl (%2), %%eax\n\t"
+ "jo 0f\n\t"
+ "movl %%eax, (%0)\n\t"
+ "movb $0x1,0xc(%0)\n\t"
+ "jmp 1f\n"
+ "0:\n\t"
+ "fildl (%1)\n\t"
+ "fildl (%2)\n\t"
+ "faddp %%st, %%st(1)\n\t"
+ "movb $0x2,0xc(%0)\n\t"
+ "fstpl (%0)\n"
+ "1:"
+ :
+ : "r"(result),
+ "r"(op1),
+ "r"(op2)
+ : "eax");
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__i386__)
__asm__(
"movl (%1), %%eax\n\t"
"addl (%2), %%eax\n\t"
@@ -612,7 +672,27 @@ static zend_always_inline int fast_add_function(zval *result, zval *op1, zval *o
"n"(IS_DOUBLE),
"n"(ZVAL_OFFSETOF_TYPE)
: "eax","cc");
-#elif defined(__GNUC__) && defined(__x86_64__)
+#elif defined(__GNUC__) && __GNUC__ <= 3 && defined(__x86_64__)
+ __asm__(
+ "movq (%1), %%rax\n\t"
+ "addq (%2), %%rax\n\t"
+ "jo 0f\n\t"
+ "movq %%rax, (%0)\n\t"
+ "movb $0x1,0x14(%0)\n\t"
+ "jmp 1f\n"
+ "0:\n\t"
+ "fildq (%1)\n\t"
+ "fildq (%2)\n\t"
+ "faddp %%st, %%st(1)\n\t"
+ "movb $0x2,0x14(%0)\n\t"
+ "fstpl (%0)\n"
+ "1:"
+ :
+ : "r"(result),
+ "r"(op1),
+ "r"(op2)
+ : "rax");
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__x86_64__)
__asm__(
"movq (%1), %%rax\n\t"
"addq (%2), %%rax\n\t"
@@ -670,7 +750,31 @@ static zend_always_inline int fast_sub_function(zval *result, zval *op1, zval *o
{
if (EXPECTED(Z_TYPE_P(op1) == IS_LONG)) {
if (EXPECTED(Z_TYPE_P(op2) == IS_LONG)) {
-#if defined(__GNUC__) && defined(__i386__)
+#if defined(__GNUC__) && __GNUC__ <= 3 && defined(__i386__)
+ __asm__(
+ "movl (%1), %%eax\n\t"
+ "subl (%2), %%eax\n\t"
+ "jo 0f\n\t"
+ "movl %%eax, (%0)\n\t"
+ "movb $0x1,0xc(%0)\n\t"
+ "jmp 1f\n"
+ "0:\n\t"
+ "fildl (%2)\n\t"
+ "fildl (%1)\n\t"
+#if defined(__clang__) && (__clang_major__ < 2 || (__clang_major__ == 2 && __clang_minor__ < 10))
+ "fsubp %%st(1), %%st\n\t" /* LLVM bug #9164 */
+#else
+ "fsubp %%st, %%st(1)\n\t"
+#endif
+ "movb $0x2,0xc(%0)\n\t"
+ "fstpl (%0)\n"
+ "1:"
+ :
+ : "r"(result),
+ "r"(op1),
+ "r"(op2)
+ : "eax");
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__i386__)
__asm__(
"movl (%1), %%eax\n\t"
"subl (%2), %%eax\n\t"
@@ -697,7 +801,31 @@ static zend_always_inline int fast_sub_function(zval *result, zval *op1, zval *o
"n"(IS_DOUBLE),
"n"(ZVAL_OFFSETOF_TYPE)
: "eax","cc");
-#elif defined(__GNUC__) && defined(__x86_64__)
+#elif defined(__GNUC__) && __GNUC__ <= 3 && defined(__x86_64__)
+ __asm__(
+ "movq (%1), %%rax\n\t"
+ "subq (%2), %%rax\n\t"
+ "jo 0f\n\t"
+ "movq %%rax, (%0)\n\t"
+ "movb $0x1,0x14(%0)\n\t"
+ "jmp 1f\n"
+ "0:\n\t"
+ "fildq (%2)\n\t"
+ "fildq (%1)\n\t"
+#if defined(__clang__) && (__clang_major__ < 2 || (__clang_major__ == 2 && __clang_minor__ < 10))
+ "fsubp %%st(1), %%st\n\t" /* LLVM bug #9164 */
+#else
+ "fsubp %%st, %%st(1)\n\t"
+#endif
+ "movb $0x2,0x14(%0)\n\t"
+ "fstpl (%0)\n"
+ "1:"
+ :
+ : "r"(result),
+ "r"(op1),
+ "r"(op2)
+ : "rax");
+#elif defined(__GNUC__) && __GNUC__ > 3 && defined(__x86_64__)
__asm__(
"movq (%1), %%rax\n\t"
"subq (%2), %%rax\n\t"
|