\r
#include <string.h> /* memcmp, memcpy, memset */\r
\r
-#include "../common/types.h"\r
+#include "../common/constants.h"\r
+#include "../common/platform.h"\r
+#include <brotli/types.h>\r
#include "./bit_cost.h"\r
#include "./brotli_bit_stream.h"\r
#include "./entropy_encode.h"\r
#include "./fast_log.h"\r
#include "./find_match_length.h"\r
#include "./memory.h"\r
-#include "./port.h"\r
#include "./write_bits.h"\r
\r
-\r
#if defined(__cplusplus) || defined(c_plusplus)\r
extern "C" {\r
#endif\r
\r
+#define MAX_DISTANCE (long)BROTLI_MAX_BACKWARD_LIMIT(18)\r
+\r
/* kHashMul32 multiplier has these properties:\r
* The multiplier must be odd. Otherwise we may lose the highest bit.\r
- * No long streaks of 1s or 0s.\r
+ * No long streaks of ones or zeros.\r
* There is no effort to ensure that it is a prime, the oddity is enough\r
for this use.\r
* The number has been tuned heuristically against compression benchmarks. */\r
-static const uint32_t kHashMul32 = 0x1e35a7bd;\r
+static const uint32_t kHashMul32 = 0x1E35A7BD;\r
\r
-static BROTLI_INLINE uint32_t Hash(const uint8_t* p, size_t shift) {\r
- const uint64_t h = (BROTLI_UNALIGNED_LOAD64(p) << 16) * kHashMul32;\r
+static BROTLI_INLINE uint32_t Hash(const uint8_t* p,\r
+ size_t shift, size_t length) {\r
+ const uint64_t h =\r
+ (BROTLI_UNALIGNED_LOAD64LE(p) << ((8 - length) * 8)) * kHashMul32;\r
return (uint32_t)(h >> shift);\r
}\r
\r
-static BROTLI_INLINE uint32_t HashBytesAtOffset(\r
- uint64_t v, int offset, size_t shift) {\r
- assert(offset >= 0);\r
- assert(offset <= 2);\r
+static BROTLI_INLINE uint32_t HashBytesAtOffset(uint64_t v, size_t offset,\r
+ size_t shift, size_t length) {\r
+ BROTLI_DCHECK(offset <= 8 - length);\r
{\r
- const uint64_t h = ((v >> (8 * offset)) << 16) * kHashMul32;\r
+ const uint64_t h = ((v >> (8 * offset)) << ((8 - length) * 8)) * kHashMul32;\r
return (uint32_t)(h >> shift);\r
}\r
}\r
\r
-static BROTLI_INLINE BROTLI_BOOL IsMatch(const uint8_t* p1, const uint8_t* p2) {\r
- return TO_BROTLI_BOOL(\r
- BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) &&\r
- p1[4] == p2[4] &&\r
- p1[5] == p2[5]);\r
+static BROTLI_INLINE BROTLI_BOOL IsMatch(const uint8_t* p1, const uint8_t* p2,\r
+ size_t length) {\r
+ if (BrotliUnalignedRead32(p1) == BrotliUnalignedRead32(p2)) {\r
+ if (length == 4) return BROTLI_TRUE;\r
+ return TO_BROTLI_BOOL(p1[4] == p2[4] && p1[5] == p2[5]);\r
+ }\r
+ return BROTLI_FALSE;\r
}\r
\r
/* Builds a command and distance prefix code (each 64 symbols) into "depth" and\r
uint16_t cmd_bits[64];\r
BrotliCreateHuffmanTree(histogram, 64, 15, tree, depth);\r
BrotliCreateHuffmanTree(&histogram[64], 64, 14, tree, &depth[64]);\r
- /* We have to jump through a few hoopes here in order to compute\r
+ /* We have to jump through a few hoops here in order to compute\r
the command bits because the symbols are in a different order than in\r
the full alphabet. This looks complicated, but having the symbols\r
in this order in the command bits saves a few branches in the Emit*\r
++(*commands);\r
}\r
\r
-/* REQUIRES: len <= 1 << 20. */\r
+/* REQUIRES: len <= 1 << 24. */\r
static void BrotliStoreMetaBlockHeader(\r
size_t len, BROTLI_BOOL is_uncompressed, size_t* storage_ix,\r
uint8_t* storage) {\r
+ size_t nibbles = 6;\r
/* ISLAST */\r
BrotliWriteBits(1, 0, storage_ix, storage);\r
if (len <= (1U << 16)) {\r
- /* MNIBBLES is 4 */\r
- BrotliWriteBits(2, 0, storage_ix, storage);\r
- BrotliWriteBits(16, len - 1, storage_ix, storage);\r
- } else {\r
- /* MNIBBLES is 5 */\r
- BrotliWriteBits(2, 1, storage_ix, storage);\r
- BrotliWriteBits(20, len - 1, storage_ix, storage);\r
+ nibbles = 4;\r
+ } else if (len <= (1U << 20)) {\r
+ nibbles = 5;\r
}\r
+ BrotliWriteBits(2, nibbles - 4, storage_ix, storage);\r
+ BrotliWriteBits(nibbles * 4, len - 1, storage_ix, storage);\r
/* ISUNCOMPRESSED */\r
BrotliWriteBits(1, (uint64_t)is_uncompressed, storage_ix, storage);\r
}\r
\r
-static void CreateCommands(const uint8_t* input, size_t block_size,\r
- size_t input_size, const uint8_t* base_ip, int* table, size_t table_size,\r
+static BROTLI_INLINE void CreateCommands(const uint8_t* input,\r
+ size_t block_size, size_t input_size, const uint8_t* base_ip, int* table,\r
+ size_t table_bits, size_t min_match,\r
uint8_t** literals, uint32_t** commands) {\r
/* "ip" is the input pointer. */\r
const uint8_t* ip = input;\r
- const size_t shift = 64u - Log2FloorNonZero(table_size);\r
+ const size_t shift = 64u - table_bits;\r
const uint8_t* ip_end = input + block_size;\r
/* "next_emit" is a pointer to the first byte that is not covered by a\r
previous copy. Bytes between "next_emit" and the start of the next copy or\r
const uint8_t* next_emit = input;\r
\r
int last_distance = -1;\r
- const size_t kInputMarginBytes = 16;\r
- const size_t kMinMatchLen = 6;\r
-\r
- assert(table_size);\r
- assert(table_size <= (1u << 31));\r
- /* table must be power of two */\r
- assert((table_size & (table_size - 1)) == 0);\r
- assert(table_size - 1 ==\r
- (size_t)(MAKE_UINT64_T(0xFFFFFFFF, 0xFFFFFF) >> shift));\r
+ const size_t kInputMarginBytes = BROTLI_WINDOW_GAP;\r
\r
- if (PREDICT_TRUE(block_size >= kInputMarginBytes)) {\r
+ if (BROTLI_PREDICT_TRUE(block_size >= kInputMarginBytes)) {\r
/* For the last block, we need to keep a 16 bytes margin so that we can be\r
sure that all distances are at most window size - 16.\r
For all other blocks, we only need to keep a margin of 5 bytes so that\r
we don't go over the block size with a copy. */\r
- const size_t len_limit = BROTLI_MIN(size_t, block_size - kMinMatchLen,\r
+ const size_t len_limit = BROTLI_MIN(size_t, block_size - min_match,\r
input_size - kInputMarginBytes);\r
const uint8_t* ip_limit = input + len_limit;\r
\r
uint32_t next_hash;\r
- for (next_hash = Hash(++ip, shift); ; ) {\r
+ for (next_hash = Hash(++ip, shift, min_match); ; ) {\r
/* Step 1: Scan forward in the input looking for a 6-byte-long match.\r
If we get close to exhausting the input then goto emit_remainder.\r
\r
const uint8_t* next_ip = ip;\r
const uint8_t* candidate;\r
\r
- assert(next_emit < ip);\r
-\r
+ BROTLI_DCHECK(next_emit < ip);\r
+trawl:\r
do {\r
uint32_t hash = next_hash;\r
uint32_t bytes_between_hash_lookups = skip++ >> 5;\r
ip = next_ip;\r
- assert(hash == Hash(ip, shift));\r
+ BROTLI_DCHECK(hash == Hash(ip, shift, min_match));\r
next_ip = ip + bytes_between_hash_lookups;\r
- if (PREDICT_FALSE(next_ip > ip_limit)) {\r
+ if (BROTLI_PREDICT_FALSE(next_ip > ip_limit)) {\r
goto emit_remainder;\r
}\r
- next_hash = Hash(next_ip, shift);\r
+ next_hash = Hash(next_ip, shift, min_match);\r
candidate = ip - last_distance;\r
- if (IsMatch(ip, candidate)) {\r
- if (PREDICT_TRUE(candidate < ip)) {\r
+ if (IsMatch(ip, candidate, min_match)) {\r
+ if (BROTLI_PREDICT_TRUE(candidate < ip)) {\r
table[hash] = (int)(ip - base_ip);\r
break;\r
}\r
}\r
candidate = base_ip + table[hash];\r
- assert(candidate >= base_ip);\r
- assert(candidate < ip);\r
+ BROTLI_DCHECK(candidate >= base_ip);\r
+ BROTLI_DCHECK(candidate < ip);\r
\r
table[hash] = (int)(ip - base_ip);\r
- } while (PREDICT_TRUE(!IsMatch(ip, candidate)));\r
+ } while (BROTLI_PREDICT_TRUE(!IsMatch(ip, candidate, min_match)));\r
+\r
+ /* Check copy distance. If candidate is not feasible, continue search.\r
+ Checking is done outside of hot loop to reduce overhead. */\r
+ if (ip - candidate > MAX_DISTANCE) goto trawl;\r
\r
/* Step 2: Emit the found match together with the literal bytes from\r
- "next_emit", and then see if we can find a next macth immediately\r
+ "next_emit", and then see if we can find a next match immediately\r
afterwards. Repeat until we find no match for the input\r
without emitting some literal bytes. */\r
\r
/* We have a 6-byte match at ip, and we need to emit bytes in\r
[next_emit, ip). */\r
const uint8_t* base = ip;\r
- size_t matched = 6 + FindMatchLengthWithLimit(\r
- candidate + 6, ip + 6, (size_t)(ip_end - ip) - 6);\r
+ size_t matched = min_match + FindMatchLengthWithLimit(\r
+ candidate + min_match, ip + min_match,\r
+ (size_t)(ip_end - ip) - min_match);\r
int distance = (int)(base - candidate); /* > 0 */\r
int insert = (int)(base - next_emit);\r
ip += matched;\r
- assert(0 == memcmp(base, candidate, matched));\r
+ BROTLI_DCHECK(0 == memcmp(base, candidate, matched));\r
EmitInsertLen((uint32_t)insert, commands);\r
memcpy(*literals, next_emit, (size_t)insert);\r
*literals += insert;\r
EmitCopyLenLastDistance(matched, commands);\r
\r
next_emit = ip;\r
- if (PREDICT_FALSE(ip >= ip_limit)) {\r
+ if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {\r
goto emit_remainder;\r
}\r
{\r
/* We could immediately start working at ip now, but to improve\r
compression we first update "table" with the hashes of some\r
positions within the last copy. */\r
- uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 5);\r
- uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);\r
+ uint64_t input_bytes;\r
uint32_t cur_hash;\r
- table[prev_hash] = (int)(ip - base_ip - 5);\r
- prev_hash = HashBytesAtOffset(input_bytes, 1, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 4);\r
- prev_hash = HashBytesAtOffset(input_bytes, 2, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 3);\r
- input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 2);\r
- cur_hash = HashBytesAtOffset(input_bytes, 2, shift);\r
- prev_hash = HashBytesAtOffset(input_bytes, 0, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 2);\r
- prev_hash = HashBytesAtOffset(input_bytes, 1, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 1);\r
+ uint32_t prev_hash;\r
+ if (min_match == 4) {\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 3);\r
+ cur_hash = HashBytesAtOffset(input_bytes, 3, shift, min_match);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 3);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 2);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 1);\r
+ } else {\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 5);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 5);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 4);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 2, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 3);\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 2);\r
+ cur_hash = HashBytesAtOffset(input_bytes, 2, shift, min_match);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 2);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 1);\r
+ }\r
\r
candidate = base_ip + table[cur_hash];\r
table[cur_hash] = (int)(ip - base_ip);\r
}\r
}\r
\r
- while (IsMatch(ip, candidate)) {\r
+ while (ip - candidate <= MAX_DISTANCE &&\r
+ IsMatch(ip, candidate, min_match)) {\r
/* We have a 6-byte match at ip, and no need to emit any\r
literal bytes prior to ip. */\r
const uint8_t* base = ip;\r
- size_t matched = 6 + FindMatchLengthWithLimit(\r
- candidate + 6, ip + 6, (size_t)(ip_end - ip) - 6);\r
+ size_t matched = min_match + FindMatchLengthWithLimit(\r
+ candidate + min_match, ip + min_match,\r
+ (size_t)(ip_end - ip) - min_match);\r
ip += matched;\r
last_distance = (int)(base - candidate); /* > 0 */\r
- assert(0 == memcmp(base, candidate, matched));\r
+ BROTLI_DCHECK(0 == memcmp(base, candidate, matched));\r
EmitCopyLen(matched, commands);\r
EmitDistance((uint32_t)last_distance, commands);\r
\r
next_emit = ip;\r
- if (PREDICT_FALSE(ip >= ip_limit)) {\r
+ if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {\r
goto emit_remainder;\r
}\r
{\r
/* We could immediately start working at ip now, but to improve\r
compression we first update "table" with the hashes of some\r
positions within the last copy. */\r
- uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 5);\r
- uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);\r
+ uint64_t input_bytes;\r
uint32_t cur_hash;\r
- table[prev_hash] = (int)(ip - base_ip - 5);\r
- prev_hash = HashBytesAtOffset(input_bytes, 1, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 4);\r
- prev_hash = HashBytesAtOffset(input_bytes, 2, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 3);\r
- input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 2);\r
- cur_hash = HashBytesAtOffset(input_bytes, 2, shift);\r
- prev_hash = HashBytesAtOffset(input_bytes, 0, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 2);\r
- prev_hash = HashBytesAtOffset(input_bytes, 1, shift);\r
- table[prev_hash] = (int)(ip - base_ip - 1);\r
+ uint32_t prev_hash;\r
+ if (min_match == 4) {\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 3);\r
+ cur_hash = HashBytesAtOffset(input_bytes, 3, shift, min_match);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 3);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 2);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 2, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 1);\r
+ } else {\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 5);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 5);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 4);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 2, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 3);\r
+ input_bytes = BROTLI_UNALIGNED_LOAD64LE(ip - 2);\r
+ cur_hash = HashBytesAtOffset(input_bytes, 2, shift, min_match);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 0, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 2);\r
+ prev_hash = HashBytesAtOffset(input_bytes, 1, shift, min_match);\r
+ table[prev_hash] = (int)(ip - base_ip - 1);\r
+ }\r
\r
candidate = base_ip + table[cur_hash];\r
table[cur_hash] = (int)(ip - base_ip);\r
}\r
}\r
\r
- next_hash = Hash(++ip, shift);\r
+ next_hash = Hash(++ip, shift, min_match);\r
}\r
}\r
\r
emit_remainder:\r
- assert(next_emit <= ip_end);\r
+ BROTLI_DCHECK(next_emit <= ip_end);\r
/* Emit the remaining bytes as literals. */\r
if (next_emit < ip_end) {\r
const uint32_t insert = (uint32_t)(ip_end - next_emit);\r
if (BROTLI_IS_OOM(m)) return;\r
\r
for (i = 0; i < num_commands; ++i) {\r
- ++cmd_histo[commands[i] & 0xff];\r
+ const uint32_t code = commands[i] & 0xFF;\r
+ BROTLI_DCHECK(code < 128);\r
+ ++cmd_histo[code];\r
}\r
cmd_histo[1] += 1;\r
cmd_histo[2] += 1;\r
\r
for (i = 0; i < num_commands; ++i) {\r
const uint32_t cmd = commands[i];\r
- const uint32_t code = cmd & 0xff;\r
+ const uint32_t code = cmd & 0xFF;\r
const uint32_t extra = cmd >> 8;\r
+ BROTLI_DCHECK(code < 128);\r
BrotliWriteBits(cmd_depths[code], cmd_bits[code], storage_ix, storage);\r
BrotliWriteBits(kNumExtraBits[code], extra, storage_ix, storage);\r
if (code < 24) {\r
}\r
}\r
\r
-void BrotliCompressFragmentTwoPass(MemoryManager* m,\r
- const uint8_t* input, size_t input_size,\r
- BROTLI_BOOL is_last,\r
- uint32_t* command_buf, uint8_t* literal_buf,\r
- int* table, size_t table_size,\r
- size_t* storage_ix, uint8_t* storage) {\r
+static void RewindBitPosition(const size_t new_storage_ix,\r
+ size_t* storage_ix, uint8_t* storage) {\r
+ const size_t bitpos = new_storage_ix & 7;\r
+ const size_t mask = (1u << bitpos) - 1;\r
+ storage[new_storage_ix >> 3] &= (uint8_t)mask;\r
+ *storage_ix = new_storage_ix;\r
+}\r
+\r
+static void EmitUncompressedMetaBlock(const uint8_t* input, size_t input_size,\r
+ size_t* storage_ix, uint8_t* storage) {\r
+ BrotliStoreMetaBlockHeader(input_size, 1, storage_ix, storage);\r
+ *storage_ix = (*storage_ix + 7u) & ~7u;\r
+ memcpy(&storage[*storage_ix >> 3], input, input_size);\r
+ *storage_ix += input_size << 3;\r
+ storage[*storage_ix >> 3] = 0;\r
+}\r
+\r
+static BROTLI_INLINE void BrotliCompressFragmentTwoPassImpl(\r
+ MemoryManager* m, const uint8_t* input, size_t input_size,\r
+ BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf,\r
+ int* table, size_t table_bits, size_t min_match,\r
+ size_t* storage_ix, uint8_t* storage) {\r
/* Save the start of the first block for position and distance computations.\r
*/\r
const uint8_t* base_ip = input;\r
+ BROTLI_UNUSED(is_last);\r
\r
while (input_size > 0) {\r
size_t block_size =\r
uint32_t* commands = command_buf;\r
uint8_t* literals = literal_buf;\r
size_t num_literals;\r
- CreateCommands(input, block_size, input_size, base_ip, table, table_size,\r
- &literals, &commands);\r
+ CreateCommands(input, block_size, input_size, base_ip, table,\r
+ table_bits, min_match, &literals, &commands);\r
num_literals = (size_t)(literals - literal_buf);\r
if (ShouldCompress(input, block_size, num_literals)) {\r
const size_t num_commands = (size_t)(commands - command_buf);\r
/* Since we did not find many backward references and the entropy of\r
the data is close to 8 bits, we can simply emit an uncompressed block.\r
This makes compression speed of uncompressible data about 3x faster. */\r
- BrotliStoreMetaBlockHeader(block_size, 1, storage_ix, storage);\r
- *storage_ix = (*storage_ix + 7u) & ~7u;\r
- memcpy(&storage[*storage_ix >> 3], input, block_size);\r
- *storage_ix += block_size << 3;\r
- storage[*storage_ix >> 3] = 0;\r
+ EmitUncompressedMetaBlock(input, block_size, storage_ix, storage);\r
}\r
input += block_size;\r
input_size -= block_size;\r
}\r
+}\r
+\r
+#define FOR_TABLE_BITS_(X) \\r
+ X(8) X(9) X(10) X(11) X(12) X(13) X(14) X(15) X(16) X(17)\r
+\r
+#define BAKE_METHOD_PARAM_(B) \\r
+static BROTLI_NOINLINE void BrotliCompressFragmentTwoPassImpl ## B( \\r
+ MemoryManager* m, const uint8_t* input, size_t input_size, \\r
+ BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf, \\r
+ int* table, size_t* storage_ix, uint8_t* storage) { \\r
+ size_t min_match = (B <= 15) ? 4 : 6; \\r
+ BrotliCompressFragmentTwoPassImpl(m, input, input_size, is_last, command_buf,\\r
+ literal_buf, table, B, min_match, storage_ix, storage); \\r
+}\r
+FOR_TABLE_BITS_(BAKE_METHOD_PARAM_)\r
+#undef BAKE_METHOD_PARAM_\r
+\r
+void BrotliCompressFragmentTwoPass(\r
+ MemoryManager* m, const uint8_t* input, size_t input_size,\r
+ BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf,\r
+ int* table, size_t table_size, size_t* storage_ix, uint8_t* storage) {\r
+ const size_t initial_storage_ix = *storage_ix;\r
+ const size_t table_bits = Log2FloorNonZero(table_size);\r
+ switch (table_bits) {\r
+#define CASE_(B) \\r
+ case B: \\r
+ BrotliCompressFragmentTwoPassImpl ## B( \\r
+ m, input, input_size, is_last, command_buf, \\r
+ literal_buf, table, storage_ix, storage); \\r
+ break;\r
+ FOR_TABLE_BITS_(CASE_)\r
+#undef CASE_\r
+ default: BROTLI_DCHECK(0); break;\r
+ }\r
+\r
+ /* If output is larger than single uncompressed block, rewrite it. */\r
+ if (*storage_ix - initial_storage_ix > 31 + (input_size << 3)) {\r
+ RewindBitPosition(initial_storage_ix, storage_ix, storage);\r
+ EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);\r
+ }\r
\r
if (is_last) {\r
BrotliWriteBits(1, 1, storage_ix, storage); /* islast */\r
}\r
}\r
\r
+#undef FOR_TABLE_BITS_\r
+\r
#if defined(__cplusplus) || defined(c_plusplus)\r
} /* extern "C" */\r
#endif\r