MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned memory accesses.\r
*/\r
\r
-#if defined(_M_X64) \\r
- || defined(_M_AMD64) \\r
- || defined(__x86_64__) \\r
- || defined(__AMD64__) \\r
- || defined(__amd64__)\r
- #define MY_CPU_AMD64\r
+#if defined (_M_X64) \\r
+ || defined (_M_AMD64) \\r
+ || defined (__x86_64__) \\r
+ || defined (__AMD64__) \\r
+ || defined (__amd64__)\r
+#define MY_CPU_AMD64\r
#ifdef __ILP32__\r
- #define MY_CPU_NAME "x32"\r
+#define MY_CPU_NAME "x32"\r
#else\r
- #define MY_CPU_NAME "x64"\r
+#define MY_CPU_NAME "x64"\r
#endif\r
- #define MY_CPU_64BIT\r
+#define MY_CPU_64BIT\r
#endif\r
\r
-\r
-#if defined(_M_IX86) \\r
- || defined(__i386__)\r
- #define MY_CPU_X86\r
- #define MY_CPU_NAME "x86"\r
- #define MY_CPU_32BIT\r
+#if defined (_M_IX86) \\r
+ || defined (__i386__)\r
+#define MY_CPU_X86\r
+#define MY_CPU_NAME "x86"\r
+#define MY_CPU_32BIT\r
#endif\r
\r
-\r
-#if defined(_M_ARM64) \\r
- || defined(__AARCH64EL__) \\r
- || defined(__AARCH64EB__) \\r
- || defined(__aarch64__)\r
- #define MY_CPU_ARM64\r
- #define MY_CPU_NAME "arm64"\r
- #define MY_CPU_64BIT\r
+#if defined (_M_ARM64) \\r
+ || defined (__AARCH64EL__) \\r
+ || defined (__AARCH64EB__) \\r
+ || defined (__aarch64__)\r
+#define MY_CPU_ARM64\r
+#define MY_CPU_NAME "arm64"\r
+#define MY_CPU_64BIT\r
#endif\r
\r
-\r
-#if defined(_M_ARM) \\r
- || defined(_M_ARM_NT) \\r
- || defined(_M_ARMT) \\r
- || defined(__arm__) \\r
- || defined(__thumb__) \\r
- || defined(__ARMEL__) \\r
- || defined(__ARMEB__) \\r
- || defined(__THUMBEL__) \\r
- || defined(__THUMBEB__)\r
- #define MY_CPU_ARM\r
- #define MY_CPU_NAME "arm"\r
- #define MY_CPU_32BIT\r
+#if defined (_M_ARM) \\r
+ || defined (_M_ARM_NT) \\r
+ || defined (_M_ARMT) \\r
+ || defined (__arm__) \\r
+ || defined (__thumb__) \\r
+ || defined (__ARMEL__) \\r
+ || defined (__ARMEB__) \\r
+ || defined (__THUMBEL__) \\r
+ || defined (__THUMBEB__)\r
+#define MY_CPU_ARM\r
+#define MY_CPU_NAME "arm"\r
+#define MY_CPU_32BIT\r
#endif\r
\r
-\r
-#if defined(_M_IA64) \\r
- || defined(__ia64__)\r
- #define MY_CPU_IA64\r
- #define MY_CPU_NAME "ia64"\r
- #define MY_CPU_64BIT\r
+#if defined (_M_IA64) \\r
+ || defined (__ia64__)\r
+#define MY_CPU_IA64\r
+#define MY_CPU_NAME "ia64"\r
+#define MY_CPU_64BIT\r
#endif\r
\r
-\r
-#if defined(__mips64) \\r
- || defined(__mips64__) \\r
- || (defined(__mips) && (__mips == 64 || __mips == 4 || __mips == 3))\r
- #define MY_CPU_NAME "mips64"\r
- #define MY_CPU_64BIT\r
-#elif defined(__mips__)\r
- #define MY_CPU_NAME "mips"\r
- /* #define MY_CPU_32BIT */\r
+#if defined (__mips64) \\r
+ || defined (__mips64__) \\r
+ || (defined (__mips) && (__mips == 64 || __mips == 4 || __mips == 3))\r
+#define MY_CPU_NAME "mips64"\r
+#define MY_CPU_64BIT\r
+#elif defined (__mips__)\r
+#define MY_CPU_NAME "mips"\r
+/* #define MY_CPU_32BIT */\r
#endif\r
\r
-\r
-#if defined(__ppc64__) \\r
- || defined(__powerpc64__)\r
+#if defined (__ppc64__) \\r
+ || defined (__powerpc64__)\r
#ifdef __ILP32__\r
- #define MY_CPU_NAME "ppc64-32"\r
+#define MY_CPU_NAME "ppc64-32"\r
#else\r
- #define MY_CPU_NAME "ppc64"\r
+#define MY_CPU_NAME "ppc64"\r
#endif\r
- #define MY_CPU_64BIT\r
-#elif defined(__ppc__) \\r
- || defined(__powerpc__)\r
- #define MY_CPU_NAME "ppc"\r
- #define MY_CPU_32BIT\r
+#define MY_CPU_64BIT\r
+#elif defined (__ppc__) \\r
+ || defined (__powerpc__)\r
+#define MY_CPU_NAME "ppc"\r
+#define MY_CPU_32BIT\r
#endif\r
\r
-\r
-#if defined(__sparc64__)\r
- #define MY_CPU_NAME "sparc64"\r
- #define MY_CPU_64BIT\r
-#elif defined(__sparc__)\r
- #define MY_CPU_NAME "sparc"\r
- /* #define MY_CPU_32BIT */\r
+#if defined (__sparc64__)\r
+#define MY_CPU_NAME "sparc64"\r
+#define MY_CPU_64BIT\r
+#elif defined (__sparc__)\r
+#define MY_CPU_NAME "sparc"\r
+/* #define MY_CPU_32BIT */\r
#endif\r
\r
-\r
-#if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)\r
+#if defined (MY_CPU_X86) || defined (MY_CPU_AMD64)\r
#define MY_CPU_X86_OR_AMD64\r
#endif\r
\r
-\r
#ifdef _WIN32\r
\r
#ifdef MY_CPU_ARM\r
- #define MY_CPU_ARM_LE\r
+#define MY_CPU_ARM_LE\r
#endif\r
\r
#ifdef MY_CPU_ARM64\r
- #define MY_CPU_ARM64_LE\r
+#define MY_CPU_ARM64_LE\r
#endif\r
\r
#ifdef _M_IA64\r
- #define MY_CPU_IA64_LE\r
+#define MY_CPU_IA64_LE\r
#endif\r
\r
#endif\r
\r
-\r
-#if defined(MY_CPU_X86_OR_AMD64) \\r
- || defined(MY_CPU_ARM_LE) \\r
- || defined(MY_CPU_ARM64_LE) \\r
- || defined(MY_CPU_IA64_LE) \\r
- || defined(__LITTLE_ENDIAN__) \\r
- || defined(__ARMEL__) \\r
- || defined(__THUMBEL__) \\r
- || defined(__AARCH64EL__) \\r
- || defined(__MIPSEL__) \\r
- || defined(__MIPSEL) \\r
- || defined(_MIPSEL) \\r
- || defined(__BFIN__) \\r
- || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))\r
- #define MY_CPU_LE\r
+#if defined (MY_CPU_X86_OR_AMD64) \\r
+ || defined (MY_CPU_ARM_LE) \\r
+ || defined (MY_CPU_ARM64_LE) \\r
+ || defined (MY_CPU_IA64_LE) \\r
+ || defined (__LITTLE_ENDIAN__) \\r
+ || defined (__ARMEL__) \\r
+ || defined (__THUMBEL__) \\r
+ || defined (__AARCH64EL__) \\r
+ || defined (__MIPSEL__) \\r
+ || defined (__MIPSEL) \\r
+ || defined (_MIPSEL) \\r
+ || defined (__BFIN__) \\r
+ || (defined (__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))\r
+#define MY_CPU_LE\r
#endif\r
\r
-#if defined(__BIG_ENDIAN__) \\r
- || defined(__ARMEB__) \\r
- || defined(__THUMBEB__) \\r
- || defined(__AARCH64EB__) \\r
- || defined(__MIPSEB__) \\r
- || defined(__MIPSEB) \\r
- || defined(_MIPSEB) \\r
- || defined(__m68k__) \\r
- || defined(__s390__) \\r
- || defined(__s390x__) \\r
- || defined(__zarch__) \\r
- || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))\r
- #define MY_CPU_BE\r
+#if defined (__BIG_ENDIAN__) \\r
+ || defined (__ARMEB__) \\r
+ || defined (__THUMBEB__) \\r
+ || defined (__AARCH64EB__) \\r
+ || defined (__MIPSEB__) \\r
+ || defined (__MIPSEB) \\r
+ || defined (_MIPSEB) \\r
+ || defined (__m68k__) \\r
+ || defined (__s390__) \\r
+ || defined (__s390x__) \\r
+ || defined (__zarch__) \\r
+ || (defined (__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))\r
+#define MY_CPU_BE\r
#endif\r
\r
-\r
-#if defined(MY_CPU_LE) && defined(MY_CPU_BE)\r
+#if defined (MY_CPU_LE) && defined (MY_CPU_BE)\r
#error Stop_Compiling_Bad_Endian\r
#endif\r
\r
-\r
-#if defined(MY_CPU_32BIT) && defined(MY_CPU_64BIT)\r
+#if defined (MY_CPU_32BIT) && defined (MY_CPU_64BIT)\r
#error Stop_Compiling_Bad_32_64_BIT\r
#endif\r
\r
-\r
#ifndef MY_CPU_NAME\r
#ifdef MY_CPU_LE\r
- #define MY_CPU_NAME "LE"\r
- #elif defined(MY_CPU_BE)\r
- #define MY_CPU_NAME "BE"\r
+#define MY_CPU_NAME "LE"\r
+ #elif defined (MY_CPU_BE)\r
+#define MY_CPU_NAME "BE"\r
#else\r
- /*\r
- #define MY_CPU_NAME ""\r
- */\r
+\r
+/*\r
+#define MY_CPU_NAME ""\r
+*/\r
#endif\r
#endif\r
\r
-\r
-\r
-\r
-\r
#ifdef MY_CPU_LE\r
- #if defined(MY_CPU_X86_OR_AMD64) \\r
- || defined(MY_CPU_ARM64) \\r
- || defined(__ARM_FEATURE_UNALIGNED)\r
- #define MY_CPU_LE_UNALIGN\r
+ #if defined (MY_CPU_X86_OR_AMD64) \\r
+ || defined (MY_CPU_ARM64) \\r
+ || defined (__ARM_FEATURE_UNALIGNED)\r
+#define MY_CPU_LE_UNALIGN\r
#endif\r
#endif\r
\r
-\r
#ifdef MY_CPU_LE_UNALIGN\r
\r
-#define GetUi16(p) (*(const UInt16 *)(const void *)(p))\r
-#define GetUi32(p) (*(const UInt32 *)(const void *)(p))\r
-#define GetUi64(p) (*(const UInt64 *)(const void *)(p))\r
+#define GetUi16(p) (*(const UInt16 *)(const void *)(p))\r
+#define GetUi32(p) (*(const UInt32 *)(const void *)(p))\r
+#define GetUi64(p) (*(const UInt64 *)(const void *)(p))\r
\r
-#define SetUi16(p, v) { *(UInt16 *)(p) = (v); }\r
-#define SetUi32(p, v) { *(UInt32 *)(p) = (v); }\r
-#define SetUi64(p, v) { *(UInt64 *)(p) = (v); }\r
+#define SetUi16(p, v) { *(UInt16 *)(p) = (v); }\r
+#define SetUi32(p, v) { *(UInt32 *)(p) = (v); }\r
+#define SetUi64(p, v) { *(UInt64 *)(p) = (v); }\r
\r
#else\r
\r
-#define GetUi16(p) ( (UInt16) ( \\r
+#define GetUi16(p) ( (UInt16) (\\r
((const Byte *)(p))[0] | \\r
((UInt16)((const Byte *)(p))[1] << 8) ))\r
\r
-#define GetUi32(p) ( \\r
+#define GetUi32(p) (\\r
((const Byte *)(p))[0] | \\r
((UInt32)((const Byte *)(p))[1] << 8) | \\r
((UInt32)((const Byte *)(p))[2] << 16) | \\r
((UInt32)((const Byte *)(p))[3] << 24))\r
\r
-#define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32))\r
+#define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32))\r
\r
-#define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
+#define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v);\\r
_ppp_[0] = (Byte)_vvv_; \\r
_ppp_[1] = (Byte)(_vvv_ >> 8); }\r
\r
-#define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
+#define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v);\\r
_ppp_[0] = (Byte)_vvv_; \\r
_ppp_[1] = (Byte)(_vvv_ >> 8); \\r
_ppp_[2] = (Byte)(_vvv_ >> 16); \\r
_ppp_[3] = (Byte)(_vvv_ >> 24); }\r
\r
-#define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v); \\r
+#define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v);\\r
SetUi32(_ppp2_ , (UInt32)_vvv2_); \\r
SetUi32(_ppp2_ + 4, (UInt32)(_vvv2_ >> 32)); }\r
\r
#endif\r
\r
#ifdef __has_builtin\r
- #define MY__has_builtin(x) __has_builtin(x)\r
+#define MY__has_builtin(x) __has_builtin(x)\r
#else\r
- #define MY__has_builtin(x) 0\r
+#define MY__has_builtin(x) 0\r
#endif\r
\r
-#if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)\r
+#if defined (MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)\r
\r
/* Note: we use bswap instruction, that is unsupported in 386 cpu */\r
\r
-#include <stdlib.h>\r
+ #include <stdlib.h>\r
\r
-#pragma intrinsic(_byteswap_ushort)\r
-#pragma intrinsic(_byteswap_ulong)\r
-#pragma intrinsic(_byteswap_uint64)\r
+ #pragma intrinsic(_byteswap_ushort)\r
+ #pragma intrinsic(_byteswap_ulong)\r
+ #pragma intrinsic(_byteswap_uint64)\r
\r
/* #define GetBe16(p) _byteswap_ushort(*(const UInt16 *)(const Byte *)(p)) */\r
-#define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))\r
-#define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))\r
+#define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))\r
+#define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))\r
\r
-#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)\r
+#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)\r
\r
-#elif defined(MY_CPU_LE_UNALIGN) && ( \\r
- (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))) \\r
- || (defined(__clang__) && MY__has_builtin(__builtin_bswap16)) )\r
+#elif defined (MY_CPU_LE_UNALIGN) && ( \\r
+ (defined (__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))) \\r
+ || (defined (__clang__) && MY__has_builtin (__builtin_bswap16)))\r
\r
/* #define GetBe16(p) __builtin_bswap16(*(const UInt16 *)(const Byte *)(p)) */\r
-#define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))\r
-#define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))\r
+#define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))\r
+#define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))\r
\r
-#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)\r
+#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)\r
\r
#else\r
\r
-#define GetBe32(p) ( \\r
+#define GetBe32(p) (\\r
((UInt32)((const Byte *)(p))[0] << 24) | \\r
((UInt32)((const Byte *)(p))[1] << 16) | \\r
((UInt32)((const Byte *)(p))[2] << 8) | \\r
((const Byte *)(p))[3] )\r
\r
-#define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4))\r
+#define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4))\r
\r
-#define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
+#define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v);\\r
_ppp_[0] = (Byte)(_vvv_ >> 24); \\r
_ppp_[1] = (Byte)(_vvv_ >> 16); \\r
_ppp_[2] = (Byte)(_vvv_ >> 8); \\r
\r
#endif\r
\r
-\r
#ifndef GetBe16\r
\r
-#define GetBe16(p) ( (UInt16) ( \\r
+#define GetBe16(p) ( (UInt16) (\\r
((UInt16)((const Byte *)(p))[0] << 8) | \\r
((const Byte *)(p))[1] ))\r
\r
#endif\r
\r
-\r
-\r
#ifdef MY_CPU_X86_OR_AMD64\r
\r
-typedef struct\r
-{\r
- UInt32 maxFunc;\r
- UInt32 vendor[3];\r
- UInt32 ver;\r
- UInt32 b;\r
- UInt32 c;\r
- UInt32 d;\r
+typedef struct {\r
+ UInt32 maxFunc;\r
+ UInt32 vendor[3];\r
+ UInt32 ver;\r
+ UInt32 b;\r
+ UInt32 c;\r
+ UInt32 d;\r
} Cx86cpuid;\r
\r
-enum\r
-{\r
+enum {\r
CPU_FIRM_INTEL,\r
CPU_FIRM_AMD,\r
CPU_FIRM_VIA\r
};\r
\r
-void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);\r
-\r
-BoolInt x86cpuid_CheckAndRead(Cx86cpuid *p);\r
-int x86cpuid_GetFirm(const Cx86cpuid *p);\r
-\r
-#define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))\r
-#define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))\r
-#define x86cpuid_GetStepping(ver) (ver & 0xF)\r
-\r
-BoolInt CPU_Is_InOrder();\r
-BoolInt CPU_Is_Aes_Supported();\r
-BoolInt CPU_IsSupported_PageGB();\r
+void\r
+MyCPUID (\r
+ UInt32 function,\r
+ UInt32 *a,\r
+ UInt32 *b,\r
+ UInt32 *c,\r
+ UInt32 *d\r
+ );\r
+\r
+BoolInt\r
+x86cpuid_CheckAndRead (\r
+ Cx86cpuid *p\r
+ );\r
+\r
+int\r
+x86cpuid_GetFirm (\r
+ const Cx86cpuid *p\r
+ );\r
+\r
+#define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))\r
+#define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))\r
+#define x86cpuid_GetStepping(ver) (ver & 0xF)\r
+\r
+BoolInt\r
+CPU_Is_InOrder (\r
+ );\r
+\r
+BoolInt\r
+CPU_Is_Aes_Supported (\r
+ );\r
+\r
+BoolInt\r
+CPU_IsSupported_PageGB (\r
+ );\r
\r
#endif\r
\r