]> git.proxmox.com Git - ceph.git/blame - ceph/src/zstd/lib/common/zstd_internal.h
update source to Ceph Pacific 16.2.2
[ceph.git] / ceph / src / zstd / lib / common / zstd_internal.h
CommitLineData
11fdf7f2 1/*
f67539c2 2 * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc.
7c673cae
FG
3 * All rights reserved.
4 *
11fdf7f2
TL
5 * This source code is licensed under both the BSD-style license (found in the
6 * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7 * in the COPYING file in the root directory of this source tree).
8 * You may select, at your option, one of the above-listed licenses.
7c673cae
FG
9 */
10
11#ifndef ZSTD_CCOMMON_H_MODULE
12#define ZSTD_CCOMMON_H_MODULE
13
9f95a23c
TL
14/* this module contains definitions which must be identical
15 * across compression, decompression and dictBuilder.
16 * It also contains a few functions useful to at least 2 of them
17 * and which benefit from being inlined */
7c673cae
FG
18
19/*-*************************************
20* Dependencies
21***************************************/
f67539c2
TL
22#ifdef __aarch64__
23#include <arm_neon.h>
24#endif
11fdf7f2 25#include "compiler.h"
7c673cae 26#include "mem.h"
9f95a23c 27#include "debug.h" /* assert, DEBUGLOG, RAWLOG, g_debuglevel */
7c673cae
FG
28#include "error_private.h"
29#define ZSTD_STATIC_LINKING_ONLY
f67539c2 30#include "../zstd.h"
11fdf7f2
TL
31#define FSE_STATIC_LINKING_ONLY
32#include "fse.h"
33#define HUF_STATIC_LINKING_ONLY
34#include "huf.h"
35#ifndef XXH_STATIC_LINKING_ONLY
36# define XXH_STATIC_LINKING_ONLY /* XXH64_state_t */
37#endif
38#include "xxhash.h" /* XXH_reset, update, digest */
39
11fdf7f2
TL
40#if defined (__cplusplus)
41extern "C" {
42#endif
43
9f95a23c
TL
44/* ---- static assert (debug) --- */
45#define ZSTD_STATIC_ASSERT(c) DEBUG_STATIC_ASSERT(c)
46#define ZSTD_isError ERR_isError /* for inlining */
47#define FSE_isError ERR_isError
48#define HUF_isError ERR_isError
7c673cae
FG
49
50
51/*-*************************************
52* shared macros
53***************************************/
11fdf7f2
TL
54#undef MIN
55#undef MAX
7c673cae
FG
56#define MIN(a,b) ((a)<(b) ? (a) : (b))
57#define MAX(a,b) ((a)>(b) ? (a) : (b))
9f95a23c 58
f67539c2
TL
59/**
60 * Ignore: this is an internal helper.
61 *
62 * This is a helper function to help force C99-correctness during compilation.
63 * Under strict compilation modes, variadic macro arguments can't be empty.
64 * However, variadic function arguments can be. Using a function therefore lets
65 * us statically check that at least one (string) argument was passed,
66 * independent of the compilation flags.
67 */
68static INLINE_KEYWORD UNUSED_ATTR
69void _force_has_format_string(const char *format, ...) {
70 (void)format;
71}
72
73/**
74 * Ignore: this is an internal helper.
75 *
76 * We want to force this function invocation to be syntactically correct, but
77 * we don't want to force runtime evaluation of its arguments.
78 */
79#define _FORCE_HAS_FORMAT_STRING(...) \
80 if (0) { \
81 _force_has_format_string(__VA_ARGS__); \
82 }
83
9f95a23c
TL
84/**
85 * Return the specified error if the condition evaluates to true.
86 *
f67539c2
TL
87 * In debug modes, prints additional information.
88 * In order to do that (particularly, printing the conditional that failed),
89 * this can't just wrap RETURN_ERROR().
9f95a23c
TL
90 */
91#define RETURN_ERROR_IF(cond, err, ...) \
92 if (cond) { \
f67539c2
TL
93 RAWLOG(3, "%s:%d: ERROR!: check %s failed, returning %s", \
94 __FILE__, __LINE__, ZSTD_QUOTE(cond), ZSTD_QUOTE(ERROR(err))); \
95 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
9f95a23c
TL
96 RAWLOG(3, ": " __VA_ARGS__); \
97 RAWLOG(3, "\n"); \
98 return ERROR(err); \
99 }
100
101/**
102 * Unconditionally return the specified error.
103 *
104 * In debug modes, prints additional information.
105 */
106#define RETURN_ERROR(err, ...) \
107 do { \
f67539c2
TL
108 RAWLOG(3, "%s:%d: ERROR!: unconditional check failed, returning %s", \
109 __FILE__, __LINE__, ZSTD_QUOTE(ERROR(err))); \
110 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
9f95a23c
TL
111 RAWLOG(3, ": " __VA_ARGS__); \
112 RAWLOG(3, "\n"); \
113 return ERROR(err); \
114 } while(0);
115
116/**
117 * If the provided expression evaluates to an error code, returns that error code.
118 *
119 * In debug modes, prints additional information.
120 */
121#define FORWARD_IF_ERROR(err, ...) \
122 do { \
123 size_t const err_code = (err); \
124 if (ERR_isError(err_code)) { \
f67539c2
TL
125 RAWLOG(3, "%s:%d: ERROR!: forwarding error in %s: %s", \
126 __FILE__, __LINE__, ZSTD_QUOTE(err), ERR_getErrorName(err_code)); \
127 _FORCE_HAS_FORMAT_STRING(__VA_ARGS__); \
9f95a23c
TL
128 RAWLOG(3, ": " __VA_ARGS__); \
129 RAWLOG(3, "\n"); \
130 return err_code; \
131 } \
132 } while(0);
7c673cae
FG
133
134
135/*-*************************************
136* Common constants
137***************************************/
138#define ZSTD_OPT_NUM (1<<12)
7c673cae
FG
139
140#define ZSTD_REP_NUM 3 /* number of repcodes */
7c673cae 141#define ZSTD_REP_MOVE (ZSTD_REP_NUM-1)
7c673cae
FG
142static const U32 repStartValue[ZSTD_REP_NUM] = { 1, 4, 8 };
143
144#define KB *(1 <<10)
145#define MB *(1 <<20)
146#define GB *(1U<<30)
147
148#define BIT7 128
149#define BIT6 64
150#define BIT5 32
151#define BIT4 16
152#define BIT1 2
153#define BIT0 1
154
155#define ZSTD_WINDOWLOG_ABSOLUTEMIN 10
156static const size_t ZSTD_fcs_fieldSize[4] = { 0, 2, 4, 8 };
157static const size_t ZSTD_did_fieldSize[4] = { 0, 1, 2, 4 };
158
9f95a23c 159#define ZSTD_FRAMEIDSIZE 4 /* magic number size */
11fdf7f2 160
7c673cae
FG
161#define ZSTD_BLOCKHEADERSIZE 3 /* C standard doesn't allow `static const` variable to be init using another `static const` variable */
162static const size_t ZSTD_blockHeaderSize = ZSTD_BLOCKHEADERSIZE;
163typedef enum { bt_raw, bt_rle, bt_compressed, bt_reserved } blockType_e;
164
f67539c2
TL
165#define ZSTD_FRAMECHECKSUMSIZE 4
166
7c673cae
FG
167#define MIN_SEQUENCES_SIZE 1 /* nbSeq==0 */
168#define MIN_CBLOCK_SIZE (1 /*litCSize*/ + 1 /* RLE or RAW */ + MIN_SEQUENCES_SIZE /* nbSeq==0 */) /* for a non-null block */
169
170#define HufLog 12
171typedef enum { set_basic, set_rle, set_compressed, set_repeat } symbolEncodingType_e;
172
173#define LONGNBSEQ 0x7F00
174
175#define MINMATCH 3
7c673cae
FG
176
177#define Litbits 8
178#define MaxLit ((1<<Litbits) - 1)
9f95a23c
TL
179#define MaxML 52
180#define MaxLL 35
11fdf7f2 181#define DefaultMaxOff 28
9f95a23c 182#define MaxOff 31
7c673cae
FG
183#define MaxSeq MAX(MaxLL, MaxML) /* Assumption : MaxOff < MaxLL,MaxML */
184#define MLFSELog 9
185#define LLFSELog 9
186#define OffFSELog 8
9f95a23c 187#define MaxFSELog MAX(MAX(MLFSELog, LLFSELog), OffFSELog)
7c673cae 188
9f95a23c
TL
189static const U32 LL_bits[MaxLL+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
190 0, 0, 0, 0, 0, 0, 0, 0,
191 1, 1, 1, 1, 2, 2, 3, 3,
192 4, 6, 7, 8, 9,10,11,12,
7c673cae 193 13,14,15,16 };
9f95a23c
TL
194static const S16 LL_defaultNorm[MaxLL+1] = { 4, 3, 2, 2, 2, 2, 2, 2,
195 2, 2, 2, 2, 2, 1, 1, 1,
196 2, 2, 2, 2, 2, 2, 2, 2,
197 2, 3, 2, 1, 1, 1, 1, 1,
7c673cae
FG
198 -1,-1,-1,-1 };
199#define LL_DEFAULTNORMLOG 6 /* for static allocation */
200static const U32 LL_defaultNormLog = LL_DEFAULTNORMLOG;
201
9f95a23c
TL
202static const U32 ML_bits[MaxML+1] = { 0, 0, 0, 0, 0, 0, 0, 0,
203 0, 0, 0, 0, 0, 0, 0, 0,
204 0, 0, 0, 0, 0, 0, 0, 0,
205 0, 0, 0, 0, 0, 0, 0, 0,
206 1, 1, 1, 1, 2, 2, 3, 3,
207 4, 4, 5, 7, 8, 9,10,11,
7c673cae 208 12,13,14,15,16 };
9f95a23c
TL
209static const S16 ML_defaultNorm[MaxML+1] = { 1, 4, 3, 2, 2, 2, 2, 2,
210 2, 1, 1, 1, 1, 1, 1, 1,
211 1, 1, 1, 1, 1, 1, 1, 1,
212 1, 1, 1, 1, 1, 1, 1, 1,
213 1, 1, 1, 1, 1, 1, 1, 1,
214 1, 1, 1, 1, 1, 1,-1,-1,
7c673cae
FG
215 -1,-1,-1,-1,-1 };
216#define ML_DEFAULTNORMLOG 6 /* for static allocation */
217static const U32 ML_defaultNormLog = ML_DEFAULTNORMLOG;
218
9f95a23c
TL
219static const S16 OF_defaultNorm[DefaultMaxOff+1] = { 1, 1, 1, 1, 1, 1, 2, 2,
220 2, 1, 1, 1, 1, 1, 1, 1,
221 1, 1, 1, 1, 1, 1, 1, 1,
222 -1,-1,-1,-1,-1 };
7c673cae
FG
223#define OF_DEFAULTNORMLOG 5 /* for static allocation */
224static const U32 OF_defaultNormLog = OF_DEFAULTNORMLOG;
225
226
227/*-*******************************************
228* Shared functions to include for inlining
229*********************************************/
f67539c2
TL
230static void ZSTD_copy8(void* dst, const void* src) {
231#ifdef __aarch64__
232 vst1_u8((uint8_t*)dst, vld1_u8((const uint8_t*)src));
233#else
234 memcpy(dst, src, 8);
235#endif
236}
237
7c673cae 238#define COPY8(d,s) { ZSTD_copy8(d,s); d+=8; s+=8; }
f67539c2
TL
239static void ZSTD_copy16(void* dst, const void* src) {
240#ifdef __aarch64__
241 vst1q_u8((uint8_t*)dst, vld1q_u8((const uint8_t*)src));
242#else
243 memcpy(dst, src, 16);
244#endif
245}
246#define COPY16(d,s) { ZSTD_copy16(d,s); d+=16; s+=16; }
247
248#define WILDCOPY_OVERLENGTH 32
249#define WILDCOPY_VECLEN 16
250
251typedef enum {
252 ZSTD_no_overlap,
253 ZSTD_overlap_src_before_dst
254 /* ZSTD_overlap_dst_before_src, */
255} ZSTD_overlap_e;
7c673cae
FG
256
257/*! ZSTD_wildcopy() :
f67539c2
TL
258 * Custom version of memcpy(), can over read/write up to WILDCOPY_OVERLENGTH bytes (if length==0)
259 * @param ovtype controls the overlap detection
260 * - ZSTD_no_overlap: The source and destination are guaranteed to be at least WILDCOPY_VECLEN bytes apart.
261 * - ZSTD_overlap_src_before_dst: The src and dst may overlap, but they MUST be at least 8 bytes apart.
262 * The src buffer must be before the dst buffer.
263 */
264MEM_STATIC FORCE_INLINE_ATTR
265void ZSTD_wildcopy(void* dst, const void* src, ptrdiff_t length, ZSTD_overlap_e const ovtype)
7c673cae 266{
f67539c2 267 ptrdiff_t diff = (BYTE*)dst - (const BYTE*)src;
7c673cae
FG
268 const BYTE* ip = (const BYTE*)src;
269 BYTE* op = (BYTE*)dst;
270 BYTE* const oend = op + length;
f67539c2
TL
271
272 assert(diff >= 8 || (ovtype == ZSTD_no_overlap && diff <= -WILDCOPY_VECLEN));
273
274 if (ovtype == ZSTD_overlap_src_before_dst && diff < WILDCOPY_VECLEN) {
275 /* Handle short offset copies. */
276 do {
277 COPY8(op, ip)
278 } while (op < oend);
279 } else {
280 assert(diff >= WILDCOPY_VECLEN || diff <= -WILDCOPY_VECLEN);
281 /* Separate out the first COPY16() call because the copy length is
282 * almost certain to be short, so the branches have different
283 * probabilities. Since it is almost certain to be short, only do
284 * one COPY16() in the first call. Then, do two calls per loop since
285 * at that point it is more likely to have a high trip count.
286 */
287#ifndef __aarch64__
288 do {
289 COPY16(op, ip);
290 }
291 while (op < oend);
292#else
293 COPY16(op, ip);
294 if (op >= oend) return;
295 do {
296 COPY16(op, ip);
297 COPY16(op, ip);
298 }
299 while (op < oend);
300#endif
301 }
7c673cae
FG
302}
303
f67539c2 304MEM_STATIC size_t ZSTD_limitCopy(void* dst, size_t dstCapacity, const void* src, size_t srcSize)
7c673cae 305{
f67539c2
TL
306 size_t const length = MIN(dstCapacity, srcSize);
307 if (length > 0) {
308 memcpy(dst, src, length);
309 }
310 return length;
7c673cae
FG
311}
312
f67539c2
TL
313/* define "workspace is too large" as this number of times larger than needed */
314#define ZSTD_WORKSPACETOOLARGE_FACTOR 3
315
316/* when workspace is continuously too large
317 * during at least this number of times,
318 * context's memory usage is considered wasteful,
319 * because it's sized to handle a worst case scenario which rarely happens.
320 * In which case, resize it down to free some memory */
321#define ZSTD_WORKSPACETOOLARGE_MAXDURATION 128
322
7c673cae
FG
323
324/*-*******************************************
9f95a23c 325* Private declarations
7c673cae 326*********************************************/
7c673cae
FG
327typedef struct seqDef_s {
328 U32 offset;
329 U16 litLength;
330 U16 matchLength;
331} seqDef;
332
7c673cae
FG
333typedef struct {
334 seqDef* sequencesStart;
335 seqDef* sequences;
336 BYTE* litStart;
337 BYTE* lit;
338 BYTE* llCode;
339 BYTE* mlCode;
340 BYTE* ofCode;
9f95a23c
TL
341 size_t maxNbSeq;
342 size_t maxNbLit;
7c673cae
FG
343 U32 longLengthID; /* 0 == no longLength; 1 == Lit.longLength; 2 == Match.longLength; */
344 U32 longLengthPos;
11fdf7f2
TL
345} seqStore_t;
346
f67539c2
TL
347typedef struct {
348 U32 litLength;
349 U32 matchLength;
350} ZSTD_sequenceLength;
351
352/**
353 * Returns the ZSTD_sequenceLength for the given sequences. It handles the decoding of long sequences
354 * indicated by longLengthPos and longLengthID, and adds MINMATCH back to matchLength.
355 */
356MEM_STATIC ZSTD_sequenceLength ZSTD_getSequenceLength(seqStore_t const* seqStore, seqDef const* seq)
357{
358 ZSTD_sequenceLength seqLen;
359 seqLen.litLength = seq->litLength;
360 seqLen.matchLength = seq->matchLength + MINMATCH;
361 if (seqStore->longLengthPos == (U32)(seq - seqStore->sequencesStart)) {
362 if (seqStore->longLengthID == 1) {
363 seqLen.litLength += 0xFFFF;
364 }
365 if (seqStore->longLengthID == 2) {
366 seqLen.matchLength += 0xFFFF;
367 }
368 }
369 return seqLen;
370}
371
9f95a23c
TL
372/**
373 * Contains the compressed frame size and an upper-bound for the decompressed frame size.
374 * Note: before using `compressedSize`, check for errors using ZSTD_isError().
375 * similarly, before using `decompressedBound`, check for errors using:
376 * `decompressedBound != ZSTD_CONTENTSIZE_ERROR`
377 */
11fdf7f2 378typedef struct {
9f95a23c
TL
379 size_t compressedSize;
380 unsigned long long decompressedBound;
381} ZSTD_frameSizeInfo; /* decompress & legacy */
11fdf7f2 382
9f95a23c
TL
383const seqStore_t* ZSTD_getSeqStore(const ZSTD_CCtx* ctx); /* compress & dictBuilder */
384void ZSTD_seqToCodes(const seqStore_t* seqStorePtr); /* compress, dictBuilder, decodeCorpus (shouldn't get its definition from here) */
7c673cae
FG
385
386/* custom memory allocation functions */
7c673cae 387void* ZSTD_malloc(size_t size, ZSTD_customMem customMem);
11fdf7f2 388void* ZSTD_calloc(size_t size, ZSTD_customMem customMem);
7c673cae
FG
389void ZSTD_free(void* ptr, ZSTD_customMem customMem);
390
391
9f95a23c 392MEM_STATIC U32 ZSTD_highbit32(U32 val) /* compress, dictBuilder, decodeCorpus */
7c673cae 393{
11fdf7f2
TL
394 assert(val != 0);
395 {
7c673cae 396# if defined(_MSC_VER) /* Visual */
11fdf7f2 397 unsigned long r=0;
f67539c2 398 return _BitScanReverse(&r, val) ? (unsigned)r : 0;
7c673cae 399# elif defined(__GNUC__) && (__GNUC__ >= 3) /* GCC Intrinsic */
f67539c2
TL
400 return __builtin_clz (val) ^ 31;
401# elif defined(__ICCARM__) /* IAR Intrinsic */
402 return 31 - __CLZ(val);
7c673cae 403# else /* Software version */
9f95a23c 404 static const U32 DeBruijnClz[32] = { 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31 };
11fdf7f2 405 U32 v = val;
11fdf7f2
TL
406 v |= v >> 1;
407 v |= v >> 2;
408 v |= v >> 4;
409 v |= v >> 8;
410 v |= v >> 16;
9f95a23c 411 return DeBruijnClz[(v * 0x07C4ACDDU) >> 27];
7c673cae 412# endif
11fdf7f2 413 }
7c673cae
FG
414}
415
416
11fdf7f2
TL
417/* ZSTD_invalidateRepCodes() :
418 * ensures next compression will not use repcodes from previous block.
419 * Note : only works with regular variant;
420 * do not use with extDict variant ! */
9f95a23c
TL
421void ZSTD_invalidateRepCodes(ZSTD_CCtx* cctx); /* zstdmt, adaptive_compression (shouldn't get this definition from here) */
422
11fdf7f2
TL
423
424typedef struct {
425 blockType_e blockType;
426 U32 lastBlock;
427 U32 origSize;
9f95a23c 428} blockProperties_t; /* declared here for decompress and fullbench */
11fdf7f2
TL
429
430/*! ZSTD_getcBlockSize() :
9f95a23c
TL
431 * Provides the size of compressed block from block header `src` */
432/* Used by: decompress, fullbench (does not get its definition from here) */
11fdf7f2
TL
433size_t ZSTD_getcBlockSize(const void* src, size_t srcSize,
434 blockProperties_t* bpPtr);
435
9f95a23c
TL
436/*! ZSTD_decodeSeqHeaders() :
437 * decode sequence header from src */
438/* Used by: decompress, fullbench (does not get its definition from here) */
439size_t ZSTD_decodeSeqHeaders(ZSTD_DCtx* dctx, int* nbSeqPtr,
440 const void* src, size_t srcSize);
441
442
11fdf7f2
TL
443#if defined (__cplusplus)
444}
445#endif
446
7c673cae 447#endif /* ZSTD_CCOMMON_H_MODULE */