]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/C/LzmaCompress/Sdk/C/CpuArch.h
BaseTools Lzma: Update LZMA SDK version to 18.05
[mirror_edk2.git] / BaseTools / Source / C / LzmaCompress / Sdk / C / CpuArch.h
CommitLineData
c4ab09ef 1/* CpuArch.h -- CPU specific code\r
5ec5a236 22017-09-04 : Igor Pavlov : Public domain */\r
30fdf114 3\r
c4ab09ef
LG
4#ifndef __CPU_ARCH_H\r
5#define __CPU_ARCH_H\r
6\r
7#include "7zTypes.h"\r
8\r
9EXTERN_C_BEGIN\r
30fdf114
LG
10\r
11/*\r
c4ab09ef
LG
12MY_CPU_LE means that CPU is LITTLE ENDIAN.\r
13MY_CPU_BE means that CPU is BIG ENDIAN.\r
14If MY_CPU_LE and MY_CPU_BE are not defined, we don't know about ENDIANNESS of platform.\r
15\r
16MY_CPU_LE_UNALIGN means that CPU is LITTLE ENDIAN and CPU supports unaligned memory accesses.\r
30fdf114
LG
17*/\r
18\r
5ec5a236
LG
19#if defined(_M_X64) \\r
20 || defined(_M_AMD64) \\r
21 || defined(__x86_64__) \\r
22 || defined(__AMD64__) \\r
23 || defined(__amd64__)\r
c4ab09ef 24 #define MY_CPU_AMD64\r
5ec5a236
LG
25 #ifdef __ILP32__\r
26 #define MY_CPU_NAME "x32"\r
27 #else\r
28 #define MY_CPU_NAME "x64"\r
29 #endif\r
30 #define MY_CPU_64BIT\r
c4ab09ef
LG
31#endif\r
32\r
5ec5a236
LG
33\r
34#if defined(_M_IX86) \\r
35 || defined(__i386__)\r
36 #define MY_CPU_X86\r
37 #define MY_CPU_NAME "x86"\r
38 #define MY_CPU_32BIT\r
39#endif\r
40\r
41\r
42#if defined(_M_ARM64) \\r
43 || defined(__AARCH64EL__) \\r
44 || defined(__AARCH64EB__) \\r
45 || defined(__aarch64__)\r
46 #define MY_CPU_ARM64\r
47 #define MY_CPU_NAME "arm64"\r
c4ab09ef
LG
48 #define MY_CPU_64BIT\r
49#endif\r
50\r
5ec5a236
LG
51\r
52#if defined(_M_ARM) \\r
53 || defined(_M_ARM_NT) \\r
54 || defined(_M_ARMT) \\r
55 || defined(__arm__) \\r
56 || defined(__thumb__) \\r
57 || defined(__ARMEL__) \\r
58 || defined(__ARMEB__) \\r
59 || defined(__THUMBEL__) \\r
60 || defined(__THUMBEB__)\r
61 #define MY_CPU_ARM\r
62 #define MY_CPU_NAME "arm"\r
63 #define MY_CPU_32BIT\r
c4ab09ef
LG
64#endif\r
65\r
5ec5a236
LG
66\r
67#if defined(_M_IA64) \\r
68 || defined(__ia64__)\r
69 #define MY_CPU_IA64\r
70 #define MY_CPU_NAME "ia64"\r
71 #define MY_CPU_64BIT\r
c4ab09ef
LG
72#endif\r
73\r
5ec5a236
LG
74\r
75#if defined(__mips64) \\r
76 || defined(__mips64__) \\r
77 || (defined(__mips) && (__mips == 64 || __mips == 4 || __mips == 3))\r
78 #define MY_CPU_NAME "mips64"\r
79 #define MY_CPU_64BIT\r
80#elif defined(__mips__)\r
81 #define MY_CPU_NAME "mips"\r
82 /* #define MY_CPU_32BIT */\r
83#endif\r
84\r
85\r
86#if defined(__ppc64__) \\r
87 || defined(__powerpc64__)\r
88 #ifdef __ILP32__\r
89 #define MY_CPU_NAME "ppc64-32"\r
90 #else\r
91 #define MY_CPU_NAME "ppc64"\r
92 #endif\r
93 #define MY_CPU_64BIT\r
94#elif defined(__ppc__) \\r
95 || defined(__powerpc__)\r
96 #define MY_CPU_NAME "ppc"\r
c4ab09ef
LG
97 #define MY_CPU_32BIT\r
98#endif\r
99\r
5ec5a236
LG
100\r
101#if defined(__sparc64__)\r
102 #define MY_CPU_NAME "sparc64"\r
103 #define MY_CPU_64BIT\r
104#elif defined(__sparc__)\r
105 #define MY_CPU_NAME "sparc"\r
106 /* #define MY_CPU_32BIT */\r
30fdf114
LG
107#endif\r
108\r
5ec5a236
LG
109\r
110#if defined(MY_CPU_X86) || defined(MY_CPU_AMD64)\r
111#define MY_CPU_X86_OR_AMD64\r
c4ab09ef
LG
112#endif\r
113\r
5ec5a236
LG
114\r
115#ifdef _WIN32\r
116\r
117 #ifdef MY_CPU_ARM\r
118 #define MY_CPU_ARM_LE\r
119 #endif\r
120\r
121 #ifdef MY_CPU_ARM64\r
122 #define MY_CPU_ARM64_LE\r
123 #endif\r
124\r
125 #ifdef _M_IA64\r
126 #define MY_CPU_IA64_LE\r
127 #endif\r
128\r
129#endif\r
130\r
131\r
c4ab09ef
LG
132#if defined(MY_CPU_X86_OR_AMD64) \\r
133 || defined(MY_CPU_ARM_LE) \\r
5ec5a236 134 || defined(MY_CPU_ARM64_LE) \\r
c4ab09ef
LG
135 || defined(MY_CPU_IA64_LE) \\r
136 || defined(__LITTLE_ENDIAN__) \\r
137 || defined(__ARMEL__) \\r
138 || defined(__THUMBEL__) \\r
139 || defined(__AARCH64EL__) \\r
140 || defined(__MIPSEL__) \\r
141 || defined(__MIPSEL) \\r
142 || defined(_MIPSEL) \\r
143 || defined(__BFIN__) \\r
144 || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__))\r
145 #define MY_CPU_LE\r
146#endif\r
147\r
148#if defined(__BIG_ENDIAN__) \\r
149 || defined(__ARMEB__) \\r
150 || defined(__THUMBEB__) \\r
151 || defined(__AARCH64EB__) \\r
152 || defined(__MIPSEB__) \\r
153 || defined(__MIPSEB) \\r
154 || defined(_MIPSEB) \\r
155 || defined(__m68k__) \\r
156 || defined(__s390__) \\r
157 || defined(__s390x__) \\r
158 || defined(__zarch__) \\r
159 || (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))\r
160 #define MY_CPU_BE\r
161#endif\r
162\r
5ec5a236 163\r
c4ab09ef 164#if defined(MY_CPU_LE) && defined(MY_CPU_BE)\r
5ec5a236
LG
165 #error Stop_Compiling_Bad_Endian\r
166#endif\r
167\r
168\r
169#if defined(MY_CPU_32BIT) && defined(MY_CPU_64BIT)\r
170 #error Stop_Compiling_Bad_32_64_BIT\r
c4ab09ef
LG
171#endif\r
172\r
173\r
5ec5a236
LG
174#ifndef MY_CPU_NAME\r
175 #ifdef MY_CPU_LE\r
176 #define MY_CPU_NAME "LE"\r
177 #elif defined(MY_CPU_BE)\r
178 #define MY_CPU_NAME "BE"\r
179 #else\r
180 /*\r
181 #define MY_CPU_NAME ""\r
182 */\r
183 #endif\r
184#endif\r
185\r
186\r
187\r
188\r
189\r
c4ab09ef
LG
190#ifdef MY_CPU_LE\r
191 #if defined(MY_CPU_X86_OR_AMD64) \\r
5ec5a236
LG
192 || defined(MY_CPU_ARM64) \\r
193 || defined(__ARM_FEATURE_UNALIGNED)\r
c4ab09ef
LG
194 #define MY_CPU_LE_UNALIGN\r
195 #endif\r
196#endif\r
197\r
198\r
199#ifdef MY_CPU_LE_UNALIGN\r
30fdf114 200\r
c4ab09ef
LG
201#define GetUi16(p) (*(const UInt16 *)(const void *)(p))\r
202#define GetUi32(p) (*(const UInt32 *)(const void *)(p))\r
203#define GetUi64(p) (*(const UInt64 *)(const void *)(p))\r
204\r
205#define SetUi16(p, v) { *(UInt16 *)(p) = (v); }\r
206#define SetUi32(p, v) { *(UInt32 *)(p) = (v); }\r
207#define SetUi64(p, v) { *(UInt64 *)(p) = (v); }\r
30fdf114
LG
208\r
209#else\r
210\r
c4ab09ef
LG
211#define GetUi16(p) ( (UInt16) ( \\r
212 ((const Byte *)(p))[0] | \\r
213 ((UInt16)((const Byte *)(p))[1] << 8) ))\r
30fdf114
LG
214\r
215#define GetUi32(p) ( \\r
216 ((const Byte *)(p))[0] | \\r
217 ((UInt32)((const Byte *)(p))[1] << 8) | \\r
218 ((UInt32)((const Byte *)(p))[2] << 16) | \\r
219 ((UInt32)((const Byte *)(p))[3] << 24))\r
220\r
221#define GetUi64(p) (GetUi32(p) | ((UInt64)GetUi32(((const Byte *)(p)) + 4) << 32))\r
222\r
c4ab09ef
LG
223#define SetUi16(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
224 _ppp_[0] = (Byte)_vvv_; \\r
225 _ppp_[1] = (Byte)(_vvv_ >> 8); }\r
226\r
227#define SetUi32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
228 _ppp_[0] = (Byte)_vvv_; \\r
229 _ppp_[1] = (Byte)(_vvv_ >> 8); \\r
230 _ppp_[2] = (Byte)(_vvv_ >> 16); \\r
231 _ppp_[3] = (Byte)(_vvv_ >> 24); }\r
232\r
233#define SetUi64(p, v) { Byte *_ppp2_ = (Byte *)(p); UInt64 _vvv2_ = (v); \\r
234 SetUi32(_ppp2_ , (UInt32)_vvv2_); \\r
235 SetUi32(_ppp2_ + 4, (UInt32)(_vvv2_ >> 32)); }\r
30fdf114
LG
236\r
237#endif\r
238\r
5ec5a236
LG
239#ifdef __has_builtin\r
240 #define MY__has_builtin(x) __has_builtin(x)\r
241#else\r
242 #define MY__has_builtin(x) 0\r
243#endif\r
c4ab09ef
LG
244\r
245#if defined(MY_CPU_LE_UNALIGN) && /* defined(_WIN64) && */ (_MSC_VER >= 1300)\r
246\r
247/* Note: we use bswap instruction, that is unsupported in 386 cpu */\r
248\r
249#include <stdlib.h>\r
30fdf114 250\r
5ec5a236 251#pragma intrinsic(_byteswap_ushort)\r
30fdf114
LG
252#pragma intrinsic(_byteswap_ulong)\r
253#pragma intrinsic(_byteswap_uint64)\r
5ec5a236
LG
254\r
255/* #define GetBe16(p) _byteswap_ushort(*(const UInt16 *)(const Byte *)(p)) */\r
30fdf114
LG
256#define GetBe32(p) _byteswap_ulong(*(const UInt32 *)(const Byte *)(p))\r
257#define GetBe64(p) _byteswap_uint64(*(const UInt64 *)(const Byte *)(p))\r
258\r
c4ab09ef
LG
259#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = _byteswap_ulong(v)\r
260\r
5ec5a236
LG
261#elif defined(MY_CPU_LE_UNALIGN) && ( \\r
262 (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))) \\r
263 || (defined(__clang__) && MY__has_builtin(__builtin_bswap16)) )\r
c4ab09ef 264\r
5ec5a236 265/* #define GetBe16(p) __builtin_bswap16(*(const UInt16 *)(const Byte *)(p)) */\r
c4ab09ef
LG
266#define GetBe32(p) __builtin_bswap32(*(const UInt32 *)(const Byte *)(p))\r
267#define GetBe64(p) __builtin_bswap64(*(const UInt64 *)(const Byte *)(p))\r
268\r
269#define SetBe32(p, v) (*(UInt32 *)(void *)(p)) = __builtin_bswap32(v)\r
270\r
30fdf114
LG
271#else\r
272\r
273#define GetBe32(p) ( \\r
274 ((UInt32)((const Byte *)(p))[0] << 24) | \\r
275 ((UInt32)((const Byte *)(p))[1] << 16) | \\r
276 ((UInt32)((const Byte *)(p))[2] << 8) | \\r
277 ((const Byte *)(p))[3] )\r
278\r
279#define GetBe64(p) (((UInt64)GetBe32(p) << 32) | GetBe32(((const Byte *)(p)) + 4))\r
280\r
c4ab09ef
LG
281#define SetBe32(p, v) { Byte *_ppp_ = (Byte *)(p); UInt32 _vvv_ = (v); \\r
282 _ppp_[0] = (Byte)(_vvv_ >> 24); \\r
283 _ppp_[1] = (Byte)(_vvv_ >> 16); \\r
284 _ppp_[2] = (Byte)(_vvv_ >> 8); \\r
285 _ppp_[3] = (Byte)_vvv_; }\r
286\r
287#endif\r
288\r
289\r
5ec5a236
LG
290#ifndef GetBe16\r
291\r
c4ab09ef
LG
292#define GetBe16(p) ( (UInt16) ( \\r
293 ((UInt16)((const Byte *)(p))[0] << 8) | \\r
294 ((const Byte *)(p))[1] ))\r
295\r
5ec5a236
LG
296#endif\r
297\r
c4ab09ef
LG
298\r
299\r
300#ifdef MY_CPU_X86_OR_AMD64\r
301\r
302typedef struct\r
303{\r
304 UInt32 maxFunc;\r
305 UInt32 vendor[3];\r
306 UInt32 ver;\r
307 UInt32 b;\r
308 UInt32 c;\r
309 UInt32 d;\r
310} Cx86cpuid;\r
311\r
312enum\r
313{\r
314 CPU_FIRM_INTEL,\r
315 CPU_FIRM_AMD,\r
316 CPU_FIRM_VIA\r
317};\r
318\r
319void MyCPUID(UInt32 function, UInt32 *a, UInt32 *b, UInt32 *c, UInt32 *d);\r
320\r
321Bool x86cpuid_CheckAndRead(Cx86cpuid *p);\r
322int x86cpuid_GetFirm(const Cx86cpuid *p);\r
323\r
324#define x86cpuid_GetFamily(ver) (((ver >> 16) & 0xFF0) | ((ver >> 8) & 0xF))\r
325#define x86cpuid_GetModel(ver) (((ver >> 12) & 0xF0) | ((ver >> 4) & 0xF))\r
326#define x86cpuid_GetStepping(ver) (ver & 0xF)\r
327\r
328Bool CPU_Is_InOrder();\r
329Bool CPU_Is_Aes_Supported();\r
330\r
30fdf114
LG
331#endif\r
332\r
c4ab09ef 333EXTERN_C_END\r
30fdf114
LG
334\r
335#endif\r