]>
git.proxmox.com Git - ceph.git/blob - ceph/src/zstd/lib/common/mem.h
2 * Copyright (c) 2016-present, Yann Collet, Facebook, Inc.
5 * This source code is licensed under the BSD-style license found in the
6 * LICENSE file in the root directory of this source tree. An additional grant
7 * of patent rights can be found in the PATENTS file in the same directory.
13 #if defined (__cplusplus)
17 /*-****************************************
19 ******************************************/
20 #include <stddef.h> /* size_t, ptrdiff_t */
21 #include <string.h> /* memcpy */
24 /*-****************************************
26 ******************************************/
27 #if defined(_MSC_VER) /* Visual Studio */
28 # include <stdlib.h> /* _byteswap_ulong */
29 # include <intrin.h> /* _byteswap_* */
32 # define MEM_STATIC static __inline __attribute__((unused))
33 #elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
34 # define MEM_STATIC static inline
35 #elif defined(_MSC_VER)
36 # define MEM_STATIC static __inline
38 # define MEM_STATIC static /* this version may generate warnings for unused static functions; disable the relevant warning */
41 /* code only tested on 32 and 64 bits systems */
42 #define MEM_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(int)(!!(c)) }; }
43 MEM_STATIC
void MEM_check(void) { MEM_STATIC_ASSERT((sizeof(size_t)==4) || (sizeof(size_t)==8)); }
46 /*-**************************************************************
48 *****************************************************************/
49 #if !defined (__VMS) && (defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) )
58 typedef intptr_t iPtrDiff
;
60 typedef unsigned char BYTE
;
61 typedef unsigned short U16
;
62 typedef signed short S16
;
63 typedef unsigned int U32
;
64 typedef signed int S32
;
65 typedef unsigned long long U64
;
66 typedef signed long long S64
;
67 typedef ptrdiff_t iPtrDiff
;
71 /*-**************************************************************
73 *****************************************************************/
74 /* MEM_FORCE_MEMORY_ACCESS :
75 * By default, access to unaligned memory is controlled by `memcpy()`, which is safe and portable.
76 * Unfortunately, on some target/compiler combinations, the generated assembly is sub-optimal.
77 * The below switch allow to select different access method for improved performance.
78 * Method 0 (default) : use `memcpy()`. Safe and portable.
79 * Method 1 : `__packed` statement. It depends on compiler extension (ie, not portable).
80 * This method is safe if your compiler supports it, and *generally* as fast or faster than `memcpy`.
81 * Method 2 : direct access. This method is portable but violate C standard.
82 * It can generate buggy code on targets depending on alignment.
83 * In some circumstances, it's the only known way to get the most performance (ie GCC + ARMv6)
84 * See http://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html for details.
85 * Prefer these methods in priority order (0 > 1 > 2)
87 #ifndef MEM_FORCE_MEMORY_ACCESS /* can be defined externally, on command line for example */
88 # if defined(__GNUC__) && ( defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__) )
89 # define MEM_FORCE_MEMORY_ACCESS 2
90 # elif defined(__INTEL_COMPILER) /*|| defined(_MSC_VER)*/ || \
91 (defined(__GNUC__) && ( defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__) ))
92 # define MEM_FORCE_MEMORY_ACCESS 1
96 MEM_STATIC
unsigned MEM_32bits(void) { return sizeof(size_t)==4; }
97 MEM_STATIC
unsigned MEM_64bits(void) { return sizeof(size_t)==8; }
99 MEM_STATIC
unsigned MEM_isLittleEndian(void)
101 const union { U32 u
; BYTE c
[4]; } one
= { 1 }; /* don't use static : performance detrimental */
105 #if defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==2)
107 /* violates C standard, by lying on structure alignment.
108 Only use if no other choice to achieve best performance on target platform */
109 MEM_STATIC U16
MEM_read16(const void* memPtr
) { return *(const U16
*) memPtr
; }
110 MEM_STATIC U32
MEM_read32(const void* memPtr
) { return *(const U32
*) memPtr
; }
111 MEM_STATIC U64
MEM_read64(const void* memPtr
) { return *(const U64
*) memPtr
; }
112 MEM_STATIC U64
MEM_readST(const void* memPtr
) { return *(const size_t*) memPtr
; }
114 MEM_STATIC
void MEM_write16(void* memPtr
, U16 value
) { *(U16
*)memPtr
= value
; }
115 MEM_STATIC
void MEM_write32(void* memPtr
, U32 value
) { *(U32
*)memPtr
= value
; }
116 MEM_STATIC
void MEM_write64(void* memPtr
, U64 value
) { *(U64
*)memPtr
= value
; }
118 #elif defined(MEM_FORCE_MEMORY_ACCESS) && (MEM_FORCE_MEMORY_ACCESS==1)
120 /* __pack instructions are safer, but compiler specific, hence potentially problematic for some compilers */
121 /* currently only defined for gcc and icc */
122 #if defined(_MSC_VER) || (defined(__INTEL_COMPILER) && defined(WIN32))
123 __pragma( pack(push
, 1) )
124 typedef union { U16 u16
; U32 u32
; U64 u64
; size_t st
; } unalign
;
125 __pragma( pack(pop
) )
127 typedef union { U16 u16
; U32 u32
; U64 u64
; size_t st
; } __attribute__((packed
)) unalign
;
130 MEM_STATIC U16
MEM_read16(const void* ptr
) { return ((const unalign
*)ptr
)->u16
; }
131 MEM_STATIC U32
MEM_read32(const void* ptr
) { return ((const unalign
*)ptr
)->u32
; }
132 MEM_STATIC U64
MEM_read64(const void* ptr
) { return ((const unalign
*)ptr
)->u64
; }
133 MEM_STATIC U64
MEM_readST(const void* ptr
) { return ((const unalign
*)ptr
)->st
; }
135 MEM_STATIC
void MEM_write16(void* memPtr
, U16 value
) { ((unalign
*)memPtr
)->u16
= value
; }
136 MEM_STATIC
void MEM_write32(void* memPtr
, U32 value
) { ((unalign
*)memPtr
)->u32
= value
; }
137 MEM_STATIC
void MEM_write64(void* memPtr
, U64 value
) { ((unalign
*)memPtr
)->u64
= value
; }
141 /* default method, safe and standard.
142 can sometimes prove slower */
144 MEM_STATIC U16
MEM_read16(const void* memPtr
)
146 U16 val
; memcpy(&val
, memPtr
, sizeof(val
)); return val
;
149 MEM_STATIC U32
MEM_read32(const void* memPtr
)
151 U32 val
; memcpy(&val
, memPtr
, sizeof(val
)); return val
;
154 MEM_STATIC U64
MEM_read64(const void* memPtr
)
156 U64 val
; memcpy(&val
, memPtr
, sizeof(val
)); return val
;
159 MEM_STATIC
size_t MEM_readST(const void* memPtr
)
161 size_t val
; memcpy(&val
, memPtr
, sizeof(val
)); return val
;
164 MEM_STATIC
void MEM_write16(void* memPtr
, U16 value
)
166 memcpy(memPtr
, &value
, sizeof(value
));
169 MEM_STATIC
void MEM_write32(void* memPtr
, U32 value
)
171 memcpy(memPtr
, &value
, sizeof(value
));
174 MEM_STATIC
void MEM_write64(void* memPtr
, U64 value
)
176 memcpy(memPtr
, &value
, sizeof(value
));
179 #endif /* MEM_FORCE_MEMORY_ACCESS */
181 MEM_STATIC U32
MEM_swap32(U32 in
)
183 #if defined(_MSC_VER) /* Visual Studio */
184 return _byteswap_ulong(in
);
185 #elif defined (__GNUC__)
186 return __builtin_bswap32(in
);
188 return ((in
<< 24) & 0xff000000 ) |
189 ((in
<< 8) & 0x00ff0000 ) |
190 ((in
>> 8) & 0x0000ff00 ) |
191 ((in
>> 24) & 0x000000ff );
195 MEM_STATIC U64
MEM_swap64(U64 in
)
197 #if defined(_MSC_VER) /* Visual Studio */
198 return _byteswap_uint64(in
);
199 #elif defined (__GNUC__)
200 return __builtin_bswap64(in
);
202 return ((in
<< 56) & 0xff00000000000000ULL
) |
203 ((in
<< 40) & 0x00ff000000000000ULL
) |
204 ((in
<< 24) & 0x0000ff0000000000ULL
) |
205 ((in
<< 8) & 0x000000ff00000000ULL
) |
206 ((in
>> 8) & 0x00000000ff000000ULL
) |
207 ((in
>> 24) & 0x0000000000ff0000ULL
) |
208 ((in
>> 40) & 0x000000000000ff00ULL
) |
209 ((in
>> 56) & 0x00000000000000ffULL
);
213 MEM_STATIC
size_t MEM_swapST(size_t in
)
216 return (size_t)MEM_swap32((U32
)in
);
218 return (size_t)MEM_swap64((U64
)in
);
221 /*=== Little endian r/w ===*/
223 MEM_STATIC U16
MEM_readLE16(const void* memPtr
)
225 if (MEM_isLittleEndian())
226 return MEM_read16(memPtr
);
228 const BYTE
* p
= (const BYTE
*)memPtr
;
229 return (U16
)(p
[0] + (p
[1]<<8));
233 MEM_STATIC
void MEM_writeLE16(void* memPtr
, U16 val
)
235 if (MEM_isLittleEndian()) {
236 MEM_write16(memPtr
, val
);
238 BYTE
* p
= (BYTE
*)memPtr
;
240 p
[1] = (BYTE
)(val
>>8);
244 MEM_STATIC U32
MEM_readLE24(const void* memPtr
)
246 return MEM_readLE16(memPtr
) + (((const BYTE
*)memPtr
)[2] << 16);
249 MEM_STATIC
void MEM_writeLE24(void* memPtr
, U32 val
)
251 MEM_writeLE16(memPtr
, (U16
)val
);
252 ((BYTE
*)memPtr
)[2] = (BYTE
)(val
>>16);
255 MEM_STATIC U32
MEM_readLE32(const void* memPtr
)
257 if (MEM_isLittleEndian())
258 return MEM_read32(memPtr
);
260 return MEM_swap32(MEM_read32(memPtr
));
263 MEM_STATIC
void MEM_writeLE32(void* memPtr
, U32 val32
)
265 if (MEM_isLittleEndian())
266 MEM_write32(memPtr
, val32
);
268 MEM_write32(memPtr
, MEM_swap32(val32
));
271 MEM_STATIC U64
MEM_readLE64(const void* memPtr
)
273 if (MEM_isLittleEndian())
274 return MEM_read64(memPtr
);
276 return MEM_swap64(MEM_read64(memPtr
));
279 MEM_STATIC
void MEM_writeLE64(void* memPtr
, U64 val64
)
281 if (MEM_isLittleEndian())
282 MEM_write64(memPtr
, val64
);
284 MEM_write64(memPtr
, MEM_swap64(val64
));
287 MEM_STATIC
size_t MEM_readLEST(const void* memPtr
)
290 return (size_t)MEM_readLE32(memPtr
);
292 return (size_t)MEM_readLE64(memPtr
);
295 MEM_STATIC
void MEM_writeLEST(void* memPtr
, size_t val
)
298 MEM_writeLE32(memPtr
, (U32
)val
);
300 MEM_writeLE64(memPtr
, (U64
)val
);
303 /*=== Big endian r/w ===*/
305 MEM_STATIC U32
MEM_readBE32(const void* memPtr
)
307 if (MEM_isLittleEndian())
308 return MEM_swap32(MEM_read32(memPtr
));
310 return MEM_read32(memPtr
);
313 MEM_STATIC
void MEM_writeBE32(void* memPtr
, U32 val32
)
315 if (MEM_isLittleEndian())
316 MEM_write32(memPtr
, MEM_swap32(val32
));
318 MEM_write32(memPtr
, val32
);
321 MEM_STATIC U64
MEM_readBE64(const void* memPtr
)
323 if (MEM_isLittleEndian())
324 return MEM_swap64(MEM_read64(memPtr
));
326 return MEM_read64(memPtr
);
329 MEM_STATIC
void MEM_writeBE64(void* memPtr
, U64 val64
)
331 if (MEM_isLittleEndian())
332 MEM_write64(memPtr
, MEM_swap64(val64
));
334 MEM_write64(memPtr
, val64
);
337 MEM_STATIC
size_t MEM_readBEST(const void* memPtr
)
340 return (size_t)MEM_readBE32(memPtr
);
342 return (size_t)MEM_readBE64(memPtr
);
345 MEM_STATIC
void MEM_writeBEST(void* memPtr
, size_t val
)
348 MEM_writeBE32(memPtr
, (U32
)val
);
350 MEM_writeBE64(memPtr
, (U64
)val
);
354 /* function safe only for comparisons */
355 MEM_STATIC U32
MEM_readMINMATCH(const void* memPtr
, U32 length
)
360 case 4 : return MEM_read32(memPtr
);
361 case 3 : if (MEM_isLittleEndian())
362 return MEM_read32(memPtr
)<<8;
364 return MEM_read32(memPtr
)>>8;
368 #if defined (__cplusplus)
372 #endif /* MEM_H_MODULE */