20 typedef long long i64;
21 typedef unsigned long long u64;
22 #define U64(C) C##ULL
23 #endif
24
25 typedef unsigned int u32;
26 typedef unsigned char u8;
27
28 #define STRICT_ALIGNMENT 1
29 #if defined(__i386) || defined(__i386__) || \
30 defined(__x86_64) || defined(__x86_64__) || \
31 defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \
32 defined(__s390__) || defined(__s390x__)
33 # undef STRICT_ALIGNMENT
34 #endif
35
36 #if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
37 #if defined(__GNUC__) && __GNUC__>=2
38 # if defined(__x86_64) || defined(__x86_64__)
39 # define BSWAP8(x) ({ u64 ret=(x); \
40 asm ("bswapq %0" \
41 : "+r"(ret)); ret; })
42 # define BSWAP4(x) ({ u32 ret=(x); \
43 asm ("bswapl %0" \
44 : "+r"(ret)); ret; })
45 # elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY)
46 # define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
47 asm ("bswapl %0; bswapl %1" \
48 : "+r"(hi),"+r"(lo)); \
49 (u64)hi<<32|lo; })
50 # define BSWAP4(x) ({ u32 ret=(x); \
51 asm ("bswapl %0" \
52 : "+r"(ret)); ret; })
53 # elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT)
54 # define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
55 asm ("rev %0,%0; rev %1,%1" \
56 : "+r"(hi),"+r"(lo)); \
57 (u64)hi<<32|lo; })
58 # define BSWAP4(x) ({ u32 ret; \
59 asm ("rev %0,%1" \
60 : "=r"(ret) : "r"((u32)(x))); \
61 ret; })
62 # endif
63 #elif defined(_MSC_VER)
64 # if _MSC_VER>=1300
65 # pragma intrinsic(_byteswap_uint64,_byteswap_ulong)
66 # define BSWAP8(x) _byteswap_uint64((u64)(x))
67 # define BSWAP4(x) _byteswap_ulong((u32)(x))
68 # elif defined(_M_IX86)
69 __inline u32 _bswap4(u32 val) {
70 _asm mov eax,val
71 _asm bswap eax
72 }
73 # define BSWAP4(x) _bswap4(x)
74 # endif
75 #endif
76 #endif
77
78 #if defined(BSWAP4) && !defined(STRICT_ALIGNMENT)
79 #define GETU32(p) BSWAP4(*(const u32 *)(p))
|
20 typedef long long i64;
21 typedef unsigned long long u64;
22 #define U64(C) C##ULL
23 #endif
24
25 typedef unsigned int u32;
26 typedef unsigned char u8;
27
28 #define STRICT_ALIGNMENT 1
29 #if defined(__i386) || defined(__i386__) || \
30 defined(__x86_64) || defined(__x86_64__) || \
31 defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \
32 defined(__s390__) || defined(__s390x__)
33 # undef STRICT_ALIGNMENT
34 #endif
35
36 #if !defined(PEDANTIC) && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
37 #if defined(__GNUC__) && __GNUC__>=2
38 # if defined(__x86_64) || defined(__x86_64__)
39 # define BSWAP8(x) ({ u64 ret=(x); \
40 __asm__ ("bswapq %0" \
41 : "+r"(ret)); ret; })
42 # define BSWAP4(x) ({ u32 ret=(x); \
43 __asm__ ("bswapl %0" \
44 : "+r"(ret)); ret; })
45 # elif (defined(__i386) || defined(__i386__)) && !defined(I386_ONLY)
46 # define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
47 __asm__ ("bswapl %0; bswapl %1" \
48 : "+r"(hi),"+r"(lo)); \
49 (u64)hi<<32|lo; })
50 # define BSWAP4(x) ({ u32 ret=(x); \
51 __asm__ ("bswapl %0" \
52 : "+r"(ret)); ret; })
53 # elif (defined(__arm__) || defined(__arm)) && !defined(STRICT_ALIGNMENT)
54 # define BSWAP8(x) ({ u32 lo=(u64)(x)>>32,hi=(x); \
55 __asm__ ("rev %0,%0; rev %1,%1" \
56 : "+r"(hi),"+r"(lo)); \
57 (u64)hi<<32|lo; })
58 # define BSWAP4(x) ({ u32 ret; \
59 __asm__ ("rev %0,%1" \
60 : "=r"(ret) : "r"((u32)(x))); \
61 ret; })
62 # endif
63 #elif defined(_MSC_VER)
64 # if _MSC_VER>=1300
65 # pragma intrinsic(_byteswap_uint64,_byteswap_ulong)
66 # define BSWAP8(x) _byteswap_uint64((u64)(x))
67 # define BSWAP4(x) _byteswap_ulong((u32)(x))
68 # elif defined(_M_IX86)
69 __inline u32 _bswap4(u32 val) {
70 _asm mov eax,val
71 _asm bswap eax
72 }
73 # define BSWAP4(x) _bswap4(x)
74 # endif
75 #endif
76 #endif
77
78 #if defined(BSWAP4) && !defined(STRICT_ALIGNMENT)
79 #define GETU32(p) BSWAP4(*(const u32 *)(p))
|