/* CpuArch.h -- CPU specific code\r
-2016-06-09: Igor Pavlov : Public domain */\r
+2017-09-04 : Igor Pavlov : Public domain */\r
\r
#ifndef __CPU_ARCH_H\r
#define __CPU_ARCH_H\r
MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned memory accesses.\r
*/\r
\r
-#if defined(_M_X64) \\r
- || defined(_M_AMD64) \\r
- || defined(__x86_64__) \\r
- || defined(__AMD64__) \\r
- || defined(__amd64__)\r
+#if defined(_M_X64) \\r
+ || defined(_M_AMD64) \\r
+ || defined(__x86_64__) \\r
+ || defined(__AMD64__) \\r
+ || defined(__amd64__)\r
#define MY_CPU_AMD64\r
+ #ifdef __ILP32__\r
+ #define MY_CPU_NAME "x32"\r
+ #else\r
+ #define MY_CPU_NAME "x64"\r
+ #endif\r
+ #define MY_CPU_64BIT\r
#endif\r
\r
-#if defined(MY_CPU_AMD64) \\r
- || defined(_M_IA64) \\r
- || defined(__AARCH64EL__) \\r
- || defined(__AARCH64EB__)\r
+\r
+#if defined(_M_IX86) \\r
+ || defined(__i386__)\r
+ #define MY_CPU_X86\r
+ #define MY_CPU_NAME "x86"\r
+ #define MY_CPU_32BIT\r
+#endif\r
+\r
+\r
+#if defined(_M_ARM64) \\r
+ || defined(__AARCH64EL__) \\r
+ || defined(__AARCH64EB__) \\r
+ || defined(__aarch64__)\r
+ #define MY_CPU_ARM64\r
+ #define MY_CPU_NAME "arm64"\r
#define MY_CPU_64BIT\r
#endif\r
\r
-#if defined(_M_IX86) || defined(__i386__)\r
-#define MY_CPU_X86\r
+\r
+#if defined(_M_ARM) \\r
+ || defined(_M_ARM_NT) \\r
+ || defined(_M_ARMT) \\r
+ || defined(__arm__) \\r
+ || defined(__thumb__) \\r
+ || defined(__ARMEL__) \\r
+ || defined(__ARMEB__) \\r
+ || defined(__THUMBEL__) \\r
+ || defined(__THUMBEB__)\r
+ #define MY_CPU_ARM\r
+ #define MY_CPU_NAME "arm"\r
+ #define MY_CPU_32BIT\r
#endif\r
\r
-#if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)\r
-#define MY_CPU_X86_OR_AMD64\r
+\r
+#if defined(_M_IA64) \\r
+ || defined(__ia64__)\r
+ #define MY_CPU_IA64\r
+ #define MY_CPU_NAME "ia64"\r
+ #define MY_CPU_64BIT\r
#endif\r
\r
-#if defined(MY_CPU_X86) \\r
- || defined(_M_ARM) \\r
- || defined(__ARMEL__) \\r
- || defined(__THUMBEL__) \\r
- || defined(__ARMEB__) \\r
- || defined(__THUMBEB__)\r
+\r
+#if defined(__mips64) \\r
+ || defined(__mips64__) \\r
+ || (defined(__mips) && (__mips == 64 || __mips == 4 || __mips == 3))\r
+ #define MY_CPU_NAME "mips64"\r
+ #define MY_CPU_64BIT\r
+#elif defined(__mips__)\r
+ #define MY_CPU_NAME "mips"\r
+ /* #define MY_CPU_32BIT */\r
+#endif\r
+\r
+\r
+#if defined(__ppc64__) \\r
+ || defined(__powerpc64__)\r
+ #ifdef __ILP32__\r
+ #define MY_CPU_NAME "ppc64-32"\r
+ #else\r
+ #define MY_CPU_NAME "ppc64"\r
+ #endif\r
+ #define MY_CPU_64BIT\r
+#elif defined(__ppc__) \\r
+ || defined(__powerpc__)\r
+ #define MY_CPU_NAME "ppc"\r
#define MY_CPU_32BIT\r
#endif\r
\r
-#if defined(_WIN32) && defined(_M_ARM)\r
-#define MY_CPU_ARM_LE\r
+\r
+#if defined(__sparc64__)\r
+ #define MY_CPU_NAME "sparc64"\r
+ #define MY_CPU_64BIT\r
+#elif defined(__sparc__)\r
+ #define MY_CPU_NAME "sparc"\r
+ /* #define MY_CPU_32BIT */\r
#endif\r
\r
-#if defined(_WIN32) && defined(_M_IA64)\r
-#define MY_CPU_IA64_LE\r
+\r
+#if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)\r
+#define MY_CPU_X86_OR_AMD64\r
#endif\r
\r
+\r
+#ifdef _WIN32\r
+\r
+ #ifdef MY_CPU_ARM\r
+ #define MY_CPU_ARM_LE\r
+ #endif\r
+\r
+ #ifdef MY_CPU_ARM64\r
+ #define MY_CPU_ARM64_LE\r
+ #endif\r
+\r
+ #ifdef _M_IA64\r
+ #define MY_CPU_IA64_LE\r
+ #endif\r
+\r
+#endif\r
+\r
+\r
#if defined(MY_CPU_X86_OR_AMD64) \\r
|| defined(MY_CPU_ARM_LE) \\r
+ || defined(MY_CPU_ARM64_LE) \\r
|| defined(MY_CPU_IA64_LE) \\r
|| defined(__LITTLE_ENDIAN__) \\r
|| defined(__ARMEL__) \\r
#define MY_CPU_BE\r
#endif\r
\r
+\r
#if defined(MY_CPU_LE) && defined(MY_CPU_BE)\r
-Stop_Compiling_Bad_Endian\r
+ #error Stop_Compiling_Bad_Endian\r
+#endif\r
+\r
+\r
+#if defined(MY_CPU_32BIT) && defined(MY_CPU_64BIT)\r
+ #error Stop_Compiling_Bad_32_64_BIT\r
#endif\r
\r
\r
+#ifndef MY_CPU_NAME\r
+ #ifdef MY_CPU_LE\r
+ #define MY_CPU_NAME "LE"\r
+ #elif defined(MY_CPU_BE)\r
+ #define MY_CPU_NAME "BE"\r
+ #else\r
+ /*\r
+ #define MY_CPU_NAME ""\r
+ */\r
+ #endif\r
+#endif\r
+\r
+\r
+\r
+\r
+\r
#ifdef MY_CPU_LE\r
#if defined(MY_CPU_X86_OR_AMD64) \\r
- /* || defined(__AARCH64EL__) */\r
+ || defined(MY_CPU_ARM64) \\r
+ || defined(__ARM_FEATURE_UNALIGNED)\r
#define MY_CPU_LE_UNALIGN\r
#endif\r
#endif\r
\r
#endif\r
\r
+#ifdef __has_builtin\r
+ #define MY__has_builtin(x) __has_builtin(x)\r
+#else\r
+ #define MY__has_builtin(x) 0\r
+#endif\r
\r
#if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)\r
\r
\r
#include <stdlib.h>\r
\r
+#pragma intrinsic(_byteswap_ushort)\r
#pragma intrinsic(_byteswap_ulong)\r
#pragma intrinsic(_byteswap_uint64)\r
+\r
+/* #define GetBe16(p) _byteswap_ushort(*(const UInt16 *)(const Byte *)(p)) */\r
#define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))\r
#define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))\r
\r
#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)\r
\r
-#elif defined(MY_CPU_LE_UNALIGN) && defined (__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))\r
+#elif defined(MY_CPU_LE_UNALIGN) && ( \\r
+ (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))) \\r
+ || (defined(__clang__) && MY__has_builtin(__builtin_bswap16)) )\r
\r
+/* #define GetBe16(p) __builtin_bswap16(*(const UInt16 *)(const Byte *)(p)) */\r
#define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))\r
#define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))\r
\r
#endif\r
\r
\r
+#ifndef GetBe16\r
+\r
#define GetBe16(p) ( (UInt16) ( \\r
((UInt16)((const Byte *)(p))[0] << 8) | \\r
((const Byte *)(p))[1] ))\r
\r
+#endif\r
+\r
\r
\r
#ifdef MY_CPU_X86_OR_AMD64\r