asm fix for older gcc versions.
authorWerner Koch <wk@gnupg.org>
Tue, 19 Apr 2016 18:05:07 +0000 (20:05 +0200)
committerWerner Koch <wk@gnupg.org>
Tue, 19 Apr 2016 18:05:37 +0000 (20:05 +0200)
* cipher/crc-intel-pclmul.c: Remove extra trailing colon from
asm statements.
--

gcc 4.2 is not able to grok a third colon without clobber
expressions.  Reported for FreeBSD 9.

GnuPG-bug-id: 2326
Signed-off-by: Werner Koch <wk@gnupg.org>
cipher/crc-intel-pclmul.c

index 5002f80..c034e2e 100644 (file)
@@ -143,7 +143,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [inbuf_2] "m" (inbuf[2 * 16]),
                      [inbuf_3] "m" (inbuf[3 * 16]),
                      [crc] "m" (*pcrc)
                      [inbuf_2] "m" (inbuf[2 * 16]),
                      [inbuf_3] "m" (inbuf[3 * 16]),
                      [crc] "m" (*pcrc)
-                   );
+                   );
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
@@ -151,7 +151,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
                    :
                    : [k1k2] "m" (consts->k[1 - 1])
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
                    :
                    : [k1k2] "m" (consts->k[1 - 1])
-                   );
+                   );
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
@@ -188,7 +188,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                          [inbuf_1] "m" (inbuf[1 * 16]),
                          [inbuf_2] "m" (inbuf[2 * 16]),
                          [inbuf_3] "m" (inbuf[3 * 16])
                          [inbuf_1] "m" (inbuf[1 * 16]),
                          [inbuf_2] "m" (inbuf[2 * 16]),
                          [inbuf_3] "m" (inbuf[3 * 16])
-                       );
+                       );
 
          inbuf += 4 * 16;
          inlen -= 4 * 16;
 
          inbuf += 4 * 16;
          inlen -= 4 * 16;
@@ -199,7 +199,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                    :
                    : [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
                    :
                    : [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       /* Fold 4 to 1. */
 
 
       /* Fold 4 to 1. */
 
@@ -222,7 +222,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                    "pxor %%xmm4, %%xmm0\n\t"
                    :
                    :
                    "pxor %%xmm4, %%xmm0\n\t"
                    :
                    :
-                   );
+                   );
     }
   else
     {
     }
   else
     {
@@ -236,7 +236,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [crc] "m" (*pcrc),
                      [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
                      [crc] "m" (*pcrc),
                      [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       inbuf += 16;
       inlen -= 16;
 
       inbuf += 16;
       inlen -= 16;
@@ -256,7 +256,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                        "pxor %%xmm1, %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
                        "pxor %%xmm1, %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
-                       );
+                       );
 
          inbuf += 16;
          inlen -= 16;
 
          inbuf += 16;
          inlen -= 16;
@@ -288,7 +288,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
                      [shl_shuf] "m" (crc32_refl_shuf_shift[inlen]),
                      [shr_shuf] "m" (crc32_refl_shuf_shift[inlen + 16])
                      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
                      [shl_shuf] "m" (crc32_refl_shuf_shift[inlen]),
                      [shr_shuf] "m" (crc32_refl_shuf_shift[inlen + 16])
-                   );
+                   );
 
       inbuf += inlen;
       inlen -= inlen;
 
       inbuf += inlen;
       inlen -= inlen;
@@ -318,7 +318,7 @@ crc32_reflected_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                "pextrd $2, %%xmm0, %[out]\n\t"
                : [out] "=m" (*pcrc)
                : [k5] "m" (consts->k[5 - 1])
                "pextrd $2, %%xmm0, %[out]\n\t"
                : [out] "=m" (*pcrc)
                : [k5] "m" (consts->k[5 - 1])
-               );
+               );
 }
 
 static inline void
 }
 
 static inline void
@@ -333,7 +333,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
                    :
                    : [my_p] "m" (consts->my_p[0])
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
                    :
                    : [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       if (inlen == 1)
        {
 
       if (inlen == 1)
        {
@@ -372,7 +372,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                    : [out] "=m" (*pcrc)
                    : [in] "rm" (data),
                      [crc] "rm" (crc)
                    : [out] "=m" (*pcrc)
                    : [in] "rm" (data),
                      [crc] "rm" (crc)
-                   );
+                   );
     }
   else if (inlen == 4)
     {
     }
   else if (inlen == 4)
     {
@@ -391,7 +391,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                    : [in] "m" (*inbuf),
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0])
                    : [in] "m" (*inbuf),
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0])
-                   );
+                   );
     }
   else
     {
     }
   else
     {
@@ -404,14 +404,14 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0]),
                      [k3k4] "m" (consts->k[3 - 1])
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0]),
                      [k3k4] "m" (consts->k[3 - 1])
-                   );
+                   );
 
       if (inlen >= 8)
        {
          asm volatile ("movq %[inbuf], %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
 
       if (inlen >= 8)
        {
          asm volatile ("movq %[inbuf], %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
-                       );
+                       );
          if (inlen > 8)
            {
              asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
          if (inlen > 8)
            {
              asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
@@ -422,7 +422,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                            : [inbuf_tail] "m" (inbuf[inlen - 8]),
                              [merge_shuf] "m"
                                (*crc32_merge9to15_shuf[inlen - 9])
                            : [inbuf_tail] "m" (inbuf[inlen - 8]),
                              [merge_shuf] "m"
                                (*crc32_merge9to15_shuf[inlen - 9])
-                           );
+                           );
            }
        }
       else
            }
        }
       else
@@ -435,7 +435,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                          [inbuf_tail] "m" (inbuf[inlen - 4]),
                          [merge_shuf] "m"
                            (*crc32_merge5to7_shuf[inlen - 5])
                          [inbuf_tail] "m" (inbuf[inlen - 4]),
                          [merge_shuf] "m"
                            (*crc32_merge5to7_shuf[inlen - 5])
-                       );
+                       );
        }
 
       /* Final fold. */
        }
 
       /* Final fold. */
@@ -465,7 +465,7 @@ crc32_reflected_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                    "pextrd $2, %%xmm0, %[out]\n\t"
                    : [out] "=m" (*pcrc)
                    : [k5] "m" (consts->k[5 - 1])
                    "pextrd $2, %%xmm0, %[out]\n\t"
                    : [out] "=m" (*pcrc)
                    : [k5] "m" (consts->k[5 - 1])
-                   );
+                   );
     }
 }
 
     }
 }
 
@@ -477,7 +477,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
   asm volatile ("movdqa %[bswap], %%xmm7\n\t"
                :
                : [bswap] "m" (*crc32_bswap_shuf)
   asm volatile ("movdqa %[bswap], %%xmm7\n\t"
                :
                : [bswap] "m" (*crc32_bswap_shuf)
-               );
+               );
 
   if (inlen >= 8 * 16)
     {
 
   if (inlen >= 8 * 16)
     {
@@ -497,7 +497,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [inbuf_2] "m" (inbuf[2 * 16]),
                      [inbuf_3] "m" (inbuf[3 * 16]),
                      [crc] "m" (*pcrc)
                      [inbuf_2] "m" (inbuf[2 * 16]),
                      [inbuf_3] "m" (inbuf[3 * 16]),
                      [crc] "m" (*pcrc)
-                   );
+                   );
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
 
       inbuf += 4 * 16;
       inlen -= 4 * 16;
@@ -505,7 +505,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
                    :
                    : [k1k2] "m" (consts->k[1 - 1])
       asm volatile ("movdqa %[k1k2], %%xmm4\n\t"
                    :
                    : [k1k2] "m" (consts->k[1 - 1])
-                   );
+                   );
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
 
       /* Fold by 4. */
       while (inlen >= 4 * 16)
@@ -546,7 +546,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                          [inbuf_1] "m" (inbuf[1 * 16]),
                          [inbuf_2] "m" (inbuf[2 * 16]),
                          [inbuf_3] "m" (inbuf[3 * 16])
                          [inbuf_1] "m" (inbuf[1 * 16]),
                          [inbuf_2] "m" (inbuf[2 * 16]),
                          [inbuf_3] "m" (inbuf[3 * 16])
-                       );
+                       );
 
          inbuf += 4 * 16;
          inlen -= 4 * 16;
 
          inbuf += 4 * 16;
          inlen -= 4 * 16;
@@ -557,7 +557,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                    :
                    : [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
                    :
                    : [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       /* Fold 4 to 1. */
 
 
       /* Fold 4 to 1. */
 
@@ -580,7 +580,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                    "pxor %%xmm4, %%xmm0\n\t"
                    :
                    :
                    "pxor %%xmm4, %%xmm0\n\t"
                    :
                    :
-                   );
+                   );
     }
   else
     {
     }
   else
     {
@@ -595,7 +595,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [crc] "m" (*pcrc),
                      [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
                      [crc] "m" (*pcrc),
                      [k3k4] "m" (consts->k[3 - 1]),
                      [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       inbuf += 16;
       inlen -= 16;
 
       inbuf += 16;
       inlen -= 16;
@@ -616,7 +616,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                        "pxor %%xmm1, %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
                        "pxor %%xmm1, %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
-                       );
+                       );
 
          inbuf += 16;
          inlen -= 16;
 
          inbuf += 16;
          inlen -= 16;
@@ -650,7 +650,7 @@ crc32_bulk (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
                      [shl_shuf] "m" (crc32_refl_shuf_shift[32 - inlen]),
                      [shr_shuf] "m" (crc32_shuf_shift[inlen + 16])
                      [mask] "m" (crc32_partial_fold_input_mask[inlen]),
                      [shl_shuf] "m" (crc32_refl_shuf_shift[32 - inlen]),
                      [shr_shuf] "m" (crc32_shuf_shift[inlen + 16])
-                   );
+                   );
 
       inbuf += inlen;
       inlen -= inlen;
 
       inbuf += inlen;
       inlen -= inlen;
@@ -697,7 +697,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
                    :
                    : [my_p] "m" (consts->my_p[0])
       asm volatile ("movdqa %[my_p], %%xmm5\n\t"
                    :
                    : [my_p] "m" (consts->my_p[0])
-                   );
+                   );
 
       if (inlen == 1)
        {
 
       if (inlen == 1)
        {
@@ -774,14 +774,14 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0]),
                      [k3k4] "m" (consts->k[3 - 1])
                      [crc] "m" (*pcrc),
                      [my_p] "m" (consts->my_p[0]),
                      [k3k4] "m" (consts->k[3 - 1])
-                   );
+                   );
 
       if (inlen >= 8)
        {
          asm volatile ("movq %[inbuf], %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
 
       if (inlen >= 8)
        {
          asm volatile ("movq %[inbuf], %%xmm0\n\t"
                        :
                        : [inbuf] "m" (*inbuf)
-                       );
+                       );
          if (inlen > 8)
            {
              asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
          if (inlen > 8)
            {
              asm volatile (/*"pinsrq $1, %[inbuf_tail], %%xmm0\n\t"*/
@@ -792,7 +792,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                            : [inbuf_tail] "m" (inbuf[inlen - 8]),
                              [merge_shuf] "m"
                                (*crc32_merge9to15_shuf[inlen - 9])
                            : [inbuf_tail] "m" (inbuf[inlen - 8]),
                              [merge_shuf] "m"
                                (*crc32_merge9to15_shuf[inlen - 9])
-                           );
+                           );
            }
        }
       else
            }
        }
       else
@@ -805,7 +805,7 @@ crc32_less_than_16 (u32 *pcrc, const byte *inbuf, size_t inlen,
                          [inbuf_tail] "m" (inbuf[inlen - 4]),
                          [merge_shuf] "m"
                            (*crc32_merge5to7_shuf[inlen - 5])
                          [inbuf_tail] "m" (inbuf[inlen - 4]),
                          [merge_shuf] "m"
                            (*crc32_merge5to7_shuf[inlen - 5])
-                       );
+                       );
        }
 
       /* Final fold. */
        }
 
       /* Final fold. */