* gpgkeys_ldap.c (get_key, search_key), gpgkeys_hkp.c (get_key,
[gnupg.git] / mpi / longlong.h
index 6cfb8c8..f2c806b 100644 (file)
@@ -2,19 +2,19 @@
    Note: I added some stuff for use with gnupg
 
 Copyright (C) 1991, 1992, 1993, 1994, 1996, 1998,
-              2000, 2001 Free Software Foundation, Inc.
+              2000, 2001, 2002, 2003 Free Software Foundation, Inc.
 
 This file is free software; you can redistribute it and/or modify
-it under the terms of the GNU Library General Public License as published by
-the Free Software Foundation; either version 2 of the License, or (at your
+it under the terms of the GNU Lesser General Public License as published by
+the Free Software Foundation; either version 2.1 of the License, or (at your
 option) any later version.
 
 This file is distributed in the hope that it will be useful, but
 WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Library General Public
+or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public
 License for more details.
 
-You should have received a copy of the GNU Library General Public License
+You should have received a copy of the GNU Lesser General Public License
 along with this file; see the file COPYING.LIB.  If not, write to
 the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston,
 MA 02111-1307, USA. */
@@ -94,9 +94,6 @@ MA 02111-1307, USA. */
    Please add support for more CPUs here, or improve the current support
    for the CPUs below! */
 
-#ifdef __riscos__
-#pragma continue_after_hash_error
-#else /* !__riscos__ */
 #if defined (__GNUC__) && !defined (NO_ASM)
 
 /* We sometimes need to clobber "cc" with gcc2, but that would not be
@@ -115,8 +112,8 @@ MA 02111-1307, USA. */
  ***************************************/
 #if (defined (__a29k__) || defined (_AM29K)) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("add %1,%4,%5
-       addc %0,%2,%3"                                                  \
+  __asm__ ("add %1,%4,%5\n"   \
+           "addc %0,%2,%3"                                              \
           : "=r" ((USItype)(sh)),                                      \
            "=&r" ((USItype)(sl))                                       \
           : "%r" ((USItype)(ah)),                                      \
@@ -124,8 +121,8 @@ MA 02111-1307, USA. */
             "%r" ((USItype)(al)),                                      \
             "rI" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("sub %1,%4,%5
-       subc %0,%2,%3"                                                  \
+  __asm__ ("sub %1,%4,%5\n"                                             \
+          "subc %0,%2,%3"                                              \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "r" ((USItype)(ah)),                                       \
@@ -186,8 +183,8 @@ extern UDItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__arm__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("adds        %1, %4, %5
-       adc     %0, %2, %3"                                             \
+  __asm__ ("adds %1, %4, %5\n"                                          \
+          "adc  %0, %2, %3"                                            \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%r" ((USItype)(ah)),                                      \
@@ -195,8 +192,8 @@ extern UDItype __udiv_qrnnd ();
             "%r" ((USItype)(al)),                                      \
             "rI" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subs        %1, %4, %5
-       sbc     %0, %2, %3"                                             \
+  __asm__ ("subs %1, %4, %5\n"                                          \
+          "sbc  %0, %2, %3"                                            \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "r" ((USItype)(ah)),                                       \
@@ -205,32 +202,32 @@ extern UDItype __udiv_qrnnd ();
             "rI" ((USItype)(bl)))
 #if defined __ARM_ARCH_2__ || defined __ARM_ARCH_3__
 #define umul_ppmm(xh, xl, a, b) \
-  __asm__ ("%@ Inlined umul_ppmm
-       mov     %|r0, %2, lsr #16               @ AAAA
-       mov     %|r2, %3, lsr #16               @ BBBB
-       bic     %|r1, %2, %|r0, lsl #16         @ aaaa
-       bic     %0, %3, %|r2, lsl #16           @ bbbb
-       mul     %1, %|r1, %|r2                  @ aaaa * BBBB
-       mul     %|r2, %|r0, %|r2                @ AAAA * BBBB
-       mul     %|r1, %0, %|r1                  @ aaaa * bbbb
-       mul     %0, %|r0, %0                    @ AAAA * bbbb
-       adds    %|r0, %1, %0                    @ central sum
-       addcs   %|r2, %|r2, #65536
-       adds    %1, %|r1, %|r0, lsl #16
-       adc     %0, %|r2, %|r0, lsr #16"                                \
+  __asm__ ("%@ Inlined umul_ppmm\n"                                     \
+       "mov    %|r0, %2, lsr #16               @ AAAA\n"               \
+       "mov    %|r2, %3, lsr #16               @ BBBB\n"               \
+       "bic    %|r1, %2, %|r0, lsl #16         @ aaaa\n"               \
+       "bic    %0, %3, %|r2, lsl #16           @ bbbb\n"               \
+       "mul    %1, %|r1, %|r2                  @ aaaa * BBBB\n"        \
+       "mul    %|r2, %|r0, %|r2                @ AAAA * BBBB\n"        \
+       "mul    %|r1, %0, %|r1                  @ aaaa * bbbb\n"        \
+       "mul    %0, %|r0, %0                    @ AAAA * bbbb\n"        \
+       "adds   %|r0, %1, %0                    @ central sum\n"        \
+       "addcs  %|r2, %|r2, #65536\n"                                   \
+       "adds   %1, %|r1, %|r0, lsl #16\n"                              \
+       "adc    %0, %|r2, %|r0, lsr #16"                                \
           : "=&r" ((USItype)(xh)),                                     \
             "=r" ((USItype)(xl))                                       \
           : "r" ((USItype)(a)),                                        \
             "r" ((USItype)(b))                                         \
           : "r0", "r1", "r2")
 #else
-#define umul_ppmm(xh, xl, a, b) \
-  __asm__ ("%@ Inlined umul_ppmm
-       umull   %r1, %r0, %r2, %r3" \
-                  : "=&r" ((USItype)(xh)), \
-                    "=r" ((USItype)(xl)) \
-                  : "r" ((USItype)(a)), \
-                    "r" ((USItype)(b)) \
+#define umul_ppmm(xh, xl, a, b)                                         \
+  __asm__ ("%@ Inlined umul_ppmm\n"                                     \
+          "umull %r1, %r0, %r2, %r3"                                   \
+                  : "=&r" ((USItype)(xh)),                             \
+                    "=r" ((USItype)(xl))                               \
+                  : "r" ((USItype)(a)),                                \
+                    "r" ((USItype)(b))                                 \
                   : "r0", "r1")
 #endif
 #define UMUL_TIME 20
@@ -274,8 +271,8 @@ extern UDItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__gmicro__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("add.w %5,%1
-       addx %3,%0"                                                     \
+  __asm__ ("add.w %5,%1\n"                                              \
+          "addx %3,%0"                                                 \
           : "=g" ((USItype)(sh)),                                      \
             "=&g" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -283,8 +280,8 @@ extern UDItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "g" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("sub.w %5,%1
-       subx %3,%0"                                                     \
+  __asm__ ("sub.w %5,%1\n"                                              \
+          "subx %3,%0"                                                 \
           : "=g" ((USItype)(sh)),                                      \
             "=&g" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -317,8 +314,8 @@ extern UDItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__hppa) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("add %4,%5,%1
-       addc %2,%3,%0"                                                  \
+  __asm__ ("   add %4,%5,%1\n"                                             \
+          "    addc %2,%3,%0"                                              \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%rM" ((USItype)(ah)),                                     \
@@ -326,8 +323,8 @@ extern UDItype __udiv_qrnnd ();
             "%rM" ((USItype)(al)),                                     \
             "rM" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("sub %4,%5,%1
-       subb %2,%3,%0"                                                  \
+  __asm__ ("   sub %4,%5,%1\n"                                             \
+          "    subb %2,%3,%0"                                              \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "rM" ((USItype)(ah)),                                      \
@@ -340,7 +337,7 @@ extern UDItype __udiv_qrnnd ();
     union {UDItype __ll;                                               \
           struct {USItype __h, __l;} __i;                              \
          } __xx;                                                       \
-    __asm__ ("xmpyu %1,%2,%0"                                           \
+    __asm__ (" xmpyu %1,%2,%0"                                           \
             : "=*f" (__xx.__ll)                                        \
             : "*f" ((USItype)(u)),                                     \
               "*f" ((USItype)(v)));                                    \
@@ -362,25 +359,25 @@ extern UDItype __udiv_qrnnd ();
 extern USItype __udiv_qrnnd ();
 #endif /* LONGLONG_STANDALONE */
 #define count_leading_zeros(count, x) \
-  do {                                                                 \
-    USItype __tmp;                                                     \
-    __asm__ (                                                          \
-       "ldi             1,%0
-       extru,=         %1,15,16,%%r0           ; Bits 31..16 zero?
-       extru,tr        %1,15,16,%1             ; No.  Shift down, skip add.
-       ldo             16(%0),%0               ; Yes.  Perform add.
-       extru,=         %1,23,8,%%r0            ; Bits 15..8 zero?
-       extru,tr        %1,23,8,%1              ; No.  Shift down, skip add.
-       ldo             8(%0),%0                ; Yes.  Perform add.
-       extru,=         %1,27,4,%%r0            ; Bits 7..4 zero?
-       extru,tr        %1,27,4,%1              ; No.  Shift down, skip add.
-       ldo             4(%0),%0                ; Yes.  Perform add.
-       extru,=         %1,29,2,%%r0            ; Bits 3..2 zero?
-       extru,tr        %1,29,2,%1              ; No.  Shift down, skip add.
-       ldo             2(%0),%0                ; Yes.  Perform add.
-       extru           %1,30,1,%1              ; Extract bit 1.
-       sub             %0,%1,%0                ; Subtract it.
-       " : "=r" (count), "=r" (__tmp) : "1" (x));                      \
+  do {                                                                \
+    USItype __tmp;                                                    \
+    __asm__ (                                                         \
+       "       ldi             1,%0                                       \n" \
+       "       extru,=         %1,15,16,%%r0  ; Bits 31..16 zero?         \n" \
+       "       extru,tr        %1,15,16,%1    ; No.  Shift down, skip add.\n" \
+       "       ldo             16(%0),%0      ; Yes.   Perform add.       \n" \
+       "       extru,=         %1,23,8,%%r0   ; Bits 15..8 zero?          \n" \
+       "       extru,tr        %1,23,8,%1     ; No.  Shift down, skip add.\n" \
+       "       ldo             8(%0),%0       ; Yes.   Perform add.       \n" \
+       "       extru,=         %1,27,4,%%r0   ; Bits 7..4 zero?           \n" \
+       "       extru,tr        %1,27,4,%1     ; No.  Shift down, skip add.\n" \
+       "       ldo             4(%0),%0       ; Yes.   Perform add.       \n" \
+       "       extru,=         %1,29,2,%%r0   ; Bits 3..2 zero?           \n" \
+       "       extru,tr        %1,29,2,%1     ; No.  Shift down, skip add.\n" \
+       "       ldo             2(%0),%0       ; Yes.   Perform add.       \n" \
+       "       extru           %1,30,1,%1     ; Extract bit 1.            \n" \
+       "       sub             %0,%1,%0       ; Subtract it.              "   \
+       : "=r" (count), "=r" (__tmp) : "1" (x));                        \
   } while (0)
 #endif /* hppa */
 
@@ -435,8 +432,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if (defined (__i386__) || defined (__i486__)) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("addl %5,%1
-       adcl %3,%0"                                                     \
+  __asm__ ("addl %5,%1\n"                                               \
+          "adcl %3,%0"                                                 \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -444,8 +441,8 @@ extern USItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "g" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subl %5,%1
-       sbbl %3,%0"                                                     \
+  __asm__ ("subl %5,%1\n"                                               \
+          "sbbl %3,%0"                                                 \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -488,7 +485,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__i860__) && W_TYPE_SIZE == 32
 #define rshift_rhlc(r,h,l,c) \
-  __asm__ ("shr %3,r0,r0\;shrd %1,%2,%0"                                \
+  __asm__ ("shr %3,r0,r0\n"  \
+           "shrd %1,%2,%0"   \
           "=r" (r) : "r" (h), "r" (l), "rn" (c))
 #endif /* i860 */
 
@@ -497,7 +495,9 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__i960__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("cmpo 1,0\;addc %5,%4,%1\;addc %3,%2,%0"                     \
+  __asm__ ("cmpo 1,0\n"      \
+           "addc %5,%4,%1\n" \
+           "addc %3,%2,%0"   \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%dI" ((USItype)(ah)),                                     \
@@ -505,7 +505,9 @@ extern USItype __udiv_qrnnd ();
             "%dI" ((USItype)(al)),                                     \
             "dI" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("cmpo 0,0\;subc %5,%4,%1\;subc %3,%2,%0"                     \
+  __asm__ ("cmpo 0,0\n"      \
+           "subc %5,%4,%1\n" \
+           "subc %3,%2,%0"   \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "dI" ((USItype)(ah)),                                      \
@@ -568,8 +570,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if (defined (__mc68000__) || defined (__mc68020__) || defined (__NeXT__) || defined(mc68020)) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("add%.l %5,%1
-       addx%.l %3,%0"                                                  \
+  __asm__ ("add%.l %5,%1\n"                                             \
+          "addx%.l %3,%0"                                              \
           : "=d" ((USItype)(sh)),                                      \
             "=&d" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -577,8 +579,8 @@ extern USItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "g" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("sub%.l %5,%1
-       subx%.l %3,%0"                                                  \
+  __asm__ ("sub%.l %5,%1\n"                                             \
+          "subx%.l %3,%0"                                              \
           : "=d" ((USItype)(sh)),                                      \
             "=&d" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -615,32 +617,32 @@ extern USItype __udiv_qrnnd ();
 #define COUNT_LEADING_ZEROS_0 32
 #else /* not mc68020 */
 #define umul_ppmm(xh, xl, a, b) \
-  do { USItype __umul_tmp1, __umul_tmp2;                               \
-       __asm__ ("| Inlined umul_ppmm
-       move%.l %5,%3
-       move%.l %2,%0
-       move%.w %3,%1
-       swap    %3
-       swap    %0
-       mulu    %2,%1
-       mulu    %3,%0
-       mulu    %2,%3
-       swap    %2
-       mulu    %5,%2
-       add%.l  %3,%2
-       jcc     1f
-       add%.l  %#0x10000,%0
-1:     move%.l %2,%3
-       clr%.w  %2
-       swap    %2
-       swap    %3
-       clr%.w  %3
-       add%.l  %3,%1
-       addx%.l %2,%0
      | End inlined umul_ppmm"                                        \
-             : "=&d" ((USItype)(xh)), "=&d" ((USItype)(xl)),           \
-               "=d" (__umul_tmp1), "=&d" (__umul_tmp2)                 \
-             : "%2" ((USItype)(a)), "d" ((USItype)(b)));               \
+  do { USItype __umul_tmp1, __umul_tmp2;                         \
+       __asm__ ("| Inlined umul_ppmm                         \n" \
+ "        move%.l %5,%3                                       \n" \
+ "        move%.l %2,%0                                       \n" \
+ "        move%.w %3,%1                                       \n" \
+ "        swap %3                                            \n" \
+ "        swap %0                                            \n" \
+ "        mulu %2,%1                                         \n" \
+ "        mulu %3,%0                                         \n" \
+ "        mulu %2,%3                                         \n" \
+ "        swap %2                                            \n" \
+ "        mulu %5,%2                                         \n" \
+ "        add%.l       %3,%2                                 \n" \
+ "        jcc  1f                                            \n" \
+ "        add%.l       %#0x10000,%0                          \n" \
+ "1:   move%.l %2,%3                                         \n" \
+ "        clr%.w       %2                                    \n" \
+ "        swap %2                                            \n" \
+ "        swap %3                                            \n" \
+ "        clr%.w       %3                                    \n" \
+ "        add%.l       %3,%1                                 \n" \
+ "        addx%.l %2,%0                                       \n" \
"        | End inlined umul_ppmm"                                \
+             : "=&d" ((USItype)(xh)), "=&d" ((USItype)(xl)),     \
+               "=d" (__umul_tmp1), "=&d" (__umul_tmp2)           \
+             : "%2" ((USItype)(a)), "d" ((USItype)(b)));         \
   } while (0)
 #define UMUL_TIME 100
 #define UDIV_TIME 400
@@ -653,8 +655,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__m88000__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("addu.co %1,%r4,%r5
-       addu.ci %0,%r2,%r3"                                             \
+  __asm__ ("addu.co %1,%r4,%r5\n"                                       \
+          "addu.ci %0,%r2,%r3"                                         \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%rJ" ((USItype)(ah)),                                     \
@@ -662,8 +664,8 @@ extern USItype __udiv_qrnnd ();
             "%rJ" ((USItype)(al)),                                     \
             "rJ" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subu.co %1,%r4,%r5
-       subu.ci %0,%r2,%r3"                                             \
+  __asm__ ("subu.co %1,%r4,%r5\n"                                       \
+          "subu.ci %0,%r2,%r3"                                         \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "rJ" ((USItype)(ah)),                                      \
@@ -705,7 +707,6 @@ extern USItype __udiv_qrnnd ();
 #endif /* __m88110__ */
 #endif /* __m88000__ */
 
-
 /***************************************
  **************  MIPS  *****************
  ***************************************/
@@ -719,9 +720,9 @@ extern USItype __udiv_qrnnd ();
             "d" ((USItype)(v)))
 #else
 #define umul_ppmm(w1, w0, u, v) \
-  __asm__ ("multu %2,%3
-       mflo %0
-       mfhi %1"                                                        \
+  __asm__ ("multu %2,%3 \n" \
+          "mflo %0 \n"     \
+          "mfhi %1"                                                        \
           : "=d" ((USItype)(w0)),                                      \
             "=d" ((USItype)(w1))                                       \
           : "d" ((USItype)(u)),                                        \
@@ -744,9 +745,9 @@ extern USItype __udiv_qrnnd ();
             "d" ((UDItype)(v)))
 #else
 #define umul_ppmm(w1, w0, u, v) \
-  __asm__ ("dmultu %2,%3
-       mflo %0
-       mfhi %1"                                                        \
+  __asm__ ("dmultu %2,%3 \n"    \
+          "mflo %0 \n"         \
+          "mfhi %1"                                                        \
           : "=d" ((UDItype)(w0)),                                      \
             "=d" ((UDItype)(w1))                                       \
           : "d" ((UDItype)(u)),                                        \
@@ -920,14 +921,71 @@ extern USItype __udiv_qrnnd ();
 #endif
 #endif /* Power architecture variants. */
 
+/* Powerpc 64 bit support taken from gmp-4.1.2. */
+/* We should test _IBMR2 here when we add assembly support for the system
+   vendor compilers.  */
+#if 0 /* Not yet enabled because we don't have hardware for a test. */
+#if (defined (_ARCH_PPC) || defined (__powerpc__)) && W_TYPE_SIZE == 64
+#define add_ssaaaa(sh, sl, ah, al, bh, bl) \
+  do {                                                                 \
+    if (__builtin_constant_p (bh) && (bh) == 0)                                \
+      __asm__ ("{a%I4|add%I4c} %1,%3,%4\n\t{aze|addze} %0,%2"          \
+            : "=r" (sh), "=&r" (sl) : "r" (ah), "%r" (al), "rI" (bl));\
+    else if (__builtin_constant_p (bh) && (bh) == ~(UDItype) 0)                \
+      __asm__ ("{a%I4|add%I4c} %1,%3,%4\n\t{ame|addme} %0,%2"          \
+            : "=r" (sh), "=&r" (sl) : "r" (ah), "%r" (al), "rI" (bl));\
+    else                                                               \
+      __asm__ ("{a%I5|add%I5c} %1,%4,%5\n\t{ae|adde} %0,%2,%3"         \
+            : "=r" (sh), "=&r" (sl)                                    \
+            : "%r" (ah), "r" (bh), "%r" (al), "rI" (bl));              \
+  } while (0)
+#define sub_ddmmss(sh, sl, ah, al, bh, bl) \
+  do {                                                                 \
+    if (__builtin_constant_p (ah) && (ah) == 0)                                \
+      __asm__ ("{sf%I3|subf%I3c} %1,%4,%3\n\t{sfze|subfze} %0,%2"      \
+              : "=r" (sh), "=&r" (sl) : "r" (bh), "rI" (al), "r" (bl));\
+    else if (__builtin_constant_p (ah) && (ah) == ~(UDItype) 0)                \
+      __asm__ ("{sf%I3|subf%I3c} %1,%4,%3\n\t{sfme|subfme} %0,%2"      \
+              : "=r" (sh), "=&r" (sl) : "r" (bh), "rI" (al), "r" (bl));\
+    else if (__builtin_constant_p (bh) && (bh) == 0)                   \
+      __asm__ ("{sf%I3|subf%I3c} %1,%4,%3\n\t{ame|addme} %0,%2"                \
+              : "=r" (sh), "=&r" (sl) : "r" (ah), "rI" (al), "r" (bl));\
+    else if (__builtin_constant_p (bh) && (bh) == ~(UDItype) 0)                \
+      __asm__ ("{sf%I3|subf%I3c} %1,%4,%3\n\t{aze|addze} %0,%2"                \
+              : "=r" (sh), "=&r" (sl) : "r" (ah), "rI" (al), "r" (bl));\
+    else                                                               \
+      __asm__ ("{sf%I4|subf%I4c} %1,%5,%4\n\t{sfe|subfe} %0,%3,%2"     \
+              : "=r" (sh), "=&r" (sl)                                  \
+              : "r" (ah), "r" (bh), "rI" (al), "r" (bl));              \
+  } while (0)
+#define count_leading_zeros(count, x) \
+  __asm__ ("cntlzd %0,%1" : "=r" (count) : "r" (x))
+#define COUNT_LEADING_ZEROS_0 64
+#define umul_ppmm(ph, pl, m0, m1) \
+  do {                                                                 \
+    UDItype __m0 = (m0), __m1 = (m1);                                  \
+    __asm__ ("mulhdu %0,%1,%2" : "=r" (ph) : "%r" (m0), "r" (m1));     \
+    (pl) = __m0 * __m1;                                                        \
+  } while (0)
+#define UMUL_TIME 15
+#define smul_ppmm(ph, pl, m0, m1) \
+  do {                                                                 \
+    DItype __m0 = (m0), __m1 = (m1);                                   \
+    __asm__ ("mulhd %0,%1,%2" : "=r" (ph) : "%r" (m0), "r" (m1));      \
+    (pl) = __m0 * __m1;                                                        \
+  } while (0)
+#define SMUL_TIME 14  /* ??? */
+#define UDIV_TIME 120 /* ??? */
+#endif /* 64-bit PowerPC.  */
+#endif /* if 0 */
 
 /***************************************
  **************  PYR  ******************
  ***************************************/
 #if defined (__pyr__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("addw        %5,%1
-       addwc   %3,%0"                                                  \
+  __asm__ ("addw        %5,%1 \n" \
+          "addwc       %3,%0"                                          \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -935,8 +993,8 @@ extern USItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "g" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subw        %5,%1
-       subwb   %3,%0"                                                  \
+  __asm__ ("subw        %5,%1 \n" \
+          "subwb       %3,%0"                                          \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -948,8 +1006,8 @@ extern USItype __udiv_qrnnd ();
   ({union {UDItype __ll;                                               \
           struct {USItype __h, __l;} __i;                              \
          } __xx;                                                       \
-  __asm__ ("movw %1,%R0
-       uemul %2,%0"                                                    \
+  __asm__ ("movw %1,%R0 \n" \
+          "uemul %2,%0"                                                \
           : "=&r" (__xx.__ll)                                          \
           : "g" ((USItype) (u)),                                       \
             "g" ((USItype)(v)));                                       \
@@ -962,8 +1020,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__ibm032__) /* RT/ROMP */ && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("a %1,%5
-       ae %0,%3"                                                       \
+  __asm__ ("a %1,%5 \n" \
+          "ae %0,%3"                                                   \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -971,8 +1029,8 @@ extern USItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "r" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("s %1,%5
-       se %0,%3"                                                       \
+  __asm__ ("s %1,%5\n" \
+          "se %0,%3"                                                   \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -983,26 +1041,26 @@ extern USItype __udiv_qrnnd ();
   do {                                                                 \
     USItype __m0 = (m0), __m1 = (m1);                                  \
     __asm__ (                                                          \
-       "s       r2,r2
-       mts     r10,%2
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       m       r2,%3
-       cas     %0,r2,r0
-       mfs     r10,%1"                                                 \
+       "s       r2,r2    \n" \
+       "mts    r10,%2   \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "m      r2,%3    \n" \
+       "cas    %0,r2,r0 \n" \
+       "mfs    r10,%1"                                                 \
             : "=r" ((USItype)(ph)),                                    \
               "=r" ((USItype)(pl))                                     \
             : "%r" (__m0),                                             \
@@ -1033,12 +1091,13 @@ extern USItype __udiv_qrnnd ();
 /***************************************
  **************  SH2  ******************
  ***************************************/
-#if defined (__sh2__) && W_TYPE_SIZE == 32
+#if (defined (__sh2__) || defined(__sh3__) || defined(__SH4__) ) \
+    && W_TYPE_SIZE == 32
 #define umul_ppmm(w1, w0, u, v) \
   __asm__ (                                                            \
-       "dmulu.l %2,%3
-       sts     macl,%1
-       sts     mach,%0"                                                \
+        "dmulu.l %2,%3\n"  \
+       "sts    macl,%1\n" \
+       "sts    mach,%0"                                                \
           : "=r" ((USItype)(w1)),                                      \
             "=r" ((USItype)(w0))                                       \
           : "r" ((USItype)(u)),                                        \
@@ -1052,8 +1111,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__sparc__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("addcc %r4,%5,%1
-       addx %r2,%3,%0"                                                 \
+  __asm__ ("addcc %r4,%5,%1\n" \
+          "addx %r2,%3,%0"                                             \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "%rJ" ((USItype)(ah)),                                     \
@@ -1062,8 +1121,8 @@ extern USItype __udiv_qrnnd ();
             "rI" ((USItype)(bl))                                       \
           __CLOBBER_CC)
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subcc %r4,%5,%1
-       subx %r2,%3,%0"                                                 \
+  __asm__ ("subcc %r4,%5,%1\n" \
+          "subx %r2,%3,%0"                                             \
           : "=r" ((USItype)(sh)),                                      \
             "=&r" ((USItype)(sl))                                      \
           : "rJ" ((USItype)(ah)),                                      \
@@ -1109,45 +1168,45 @@ extern USItype __udiv_qrnnd ();
             "r" ((USItype)(v)))
 #define UMUL_TIME 5
 #define udiv_qrnnd(q, r, n1, n0, d) \
-  __asm__ ("! Inlined udiv_qrnnd
-       wr      %%g0,%2,%%y     ! Not a delayed write for sparclite
-       tst     %%g0
-       divscc  %3,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%%g1
-       divscc  %%g1,%4,%0
-       rd      %%y,%1
-       bl,a 1f
-       add     %1,%4,%1
-1:     ! End of inline udiv_qrnnd"                                     \
+  __asm__ ("! Inlined udiv_qrnnd                                     \n" \
+ "        wr   %%g0,%2,%%y     ! Not a delayed write for sparclite  \n" \
+ "        tst  %%g0                                                 \n" \
+ "        divscc       %3,%4,%%g1                                   \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%%g1                                 \n" \
+ "        divscc       %%g1,%4,%0                                   \n" \
+ "        rd   %%y,%1                                               \n" \
+ "        bl,a 1f                                                    \n" \
+ "        add  %1,%4,%1                                             \n" \
+ "1:   ! End of inline udiv_qrnnd"                                     \
           : "=r" ((USItype)(q)),                                       \
             "=r" ((USItype)(r))                                        \
           : "r" ((USItype)(n1)),                                       \
@@ -1167,46 +1226,46 @@ extern USItype __udiv_qrnnd ();
 /* Default to sparc v7 versions of umul_ppmm and udiv_qrnnd.  */
 #ifndef umul_ppmm
 #define umul_ppmm(w1, w0, u, v) \
-  __asm__ ("! Inlined umul_ppmm
-       wr      %%g0,%2,%%y     ! SPARC has 0-3 delay insn after a wr
-       sra     %3,31,%%g2      ! Don't move this insn
-       and     %2,%%g2,%%g2    ! Don't move this insn
-       andcc   %%g0,0,%%g1     ! Don't move this insn
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,%3,%%g1
-       mulscc  %%g1,0,%%g1
-       add     %%g1,%%g2,%0
      rd      %%y,%1"                                                 \
+  __asm__ ("! Inlined umul_ppmm                                        \n" \
+ "        wr   %%g0,%2,%%y     ! SPARC has 0-3 delay insn after a wr  \n" \
+ "        sra  %3,31,%%g2      ! Don't move this insn                 \n" \
+ "        and  %2,%%g2,%%g2    ! Don't move this insn                 \n" \
+ "        andcc        %%g0,0,%%g1     ! Don't move this insn                 \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,%3,%%g1                                   \n" \
+ "        mulscc       %%g1,0,%%g1                                    \n" \
+ "        add  %%g1,%%g2,%0                                           \n" \
"        rd   %%y,%1"                                                 \
           : "=r" ((USItype)(w1)),                                      \
             "=r" ((USItype)(w0))                                       \
           : "%rI" ((USItype)(u)),                                      \
@@ -1233,8 +1292,8 @@ extern USItype __udiv_qrnnd ();
  ***************************************/
 #if defined (__vax__) && W_TYPE_SIZE == 32
 #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
-  __asm__ ("addl2 %5,%1
-       adwc %3,%0"                                                     \
+  __asm__ ("addl2 %5,%1\n" \
+          "adwc %3,%0"                                                 \
           : "=g" ((USItype)(sh)),                                      \
             "=&g" ((USItype)(sl))                                      \
           : "%0" ((USItype)(ah)),                                      \
@@ -1242,8 +1301,8 @@ extern USItype __udiv_qrnnd ();
             "%1" ((USItype)(al)),                                      \
             "g" ((USItype)(bl)))
 #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
-  __asm__ ("subl2 %5,%1
-       sbwc %3,%0"                                                     \
+  __asm__ ("subl2 %5,%1\n" \
+          "sbwc %3,%0"                                                 \
           : "=g" ((USItype)(sh)),                                      \
             "=&g" ((USItype)(sl))                                      \
           : "0" ((USItype)(ah)),                                       \
@@ -1315,7 +1374,6 @@ extern USItype __udiv_qrnnd ();
 #endif /* __z8000__ */
 
 #endif /* __GNUC__ */
-#endif /* !__riscos__ */
 
 
 /***************************************