46 #define LZ4_HEAPMODE 0
53 #define LZ4_ACCELERATION_DEFAULT 1
59 #define LZ4_ACCELERATION_MAX 65537
79 #ifndef LZ4_FORCE_MEMORY_ACCESS
80 #if defined(__GNUC__) && \
81 (defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
82 defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || \
83 defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__))
84 #define LZ4_FORCE_MEMORY_ACCESS 2
85 #elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) || \
87 #define LZ4_FORCE_MEMORY_ACCESS 1
96 #if defined(_MSC_VER) && \
99 #undef LZ4_FORCE_SW_BITCOUNT
100 #define LZ4_FORCE_SW_BITCOUNT
110 #ifndef LZ4_SRC_INCLUDED
111 #define LZ4_SRC_INCLUDED 1
114 #ifndef LZ4_DISABLE_DEPRECATE_WARNINGS
115 #define LZ4_DISABLE_DEPRECATE_WARNINGS
119 #ifndef LZ4_STATIC_LINKING_ONLY
120 #define LZ4_STATIC_LINKING_ONLY
128 #if defined(_MSC_VER) && (_MSC_VER >= 1400)
140 #pragma warning(disable : 6326)
144 #ifndef LZ4_FORCE_INLINE
145 #if defined(_MSC_VER) && !defined(__clang__)
146 #define LZ4_FORCE_INLINE static __forceinline
148 #if defined(__cplusplus) || \
149 defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
150 #if defined(__GNUC__) || defined(__clang__)
151 #define LZ4_FORCE_INLINE static inline __attribute__((always_inline))
153 #define LZ4_FORCE_INLINE static inline
156 #define LZ4_FORCE_INLINE static
175 #if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && \
177 #define LZ4_FORCE_O2 __attribute__((optimize("O2")))
178 #undef LZ4_FORCE_INLINE
179 #define LZ4_FORCE_INLINE \
180 static __inline __attribute__((optimize("O2"), always_inline))
185 #if (defined(__GNUC__) && (__GNUC__ >= 3)) || \
186 (defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 800)) || \
188 #define expect(expr, value) (__builtin_expect((expr), (value)))
190 #define expect(expr, value) (expr)
194 #define likely(expr) expect((expr) != 0, 1)
197 #define unlikely(expr) expect((expr) != 0, 0)
202 #ifndef LZ4_ALIGN_TEST
203 #define LZ4_ALIGN_TEST 1
226 #if defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
227 #define ALLOC(s) lz4_error_memory_allocation_is_disabled
228 #define ALLOC_AND_ZERO(s) lz4_error_memory_allocation_is_disabled
229 #define FREEMEM(p) lz4_error_memory_allocation_is_disabled
230 #elif defined(LZ4_USER_MEMORY_FUNCTIONS)
234 void *LZ4_malloc(
size_t s);
235 void *LZ4_calloc(
size_t n,
size_t s);
236 void LZ4_free(
void *p);
237 #define ALLOC(s) LZ4_malloc(s)
238 #define ALLOC_AND_ZERO(s) LZ4_calloc(1, s)
239 #define FREEMEM(p) LZ4_free(p)
242 #define ALLOC(s) malloc(s)
243 #define ALLOC_AND_ZERO(s) calloc(1, s)
244 #define FREEMEM(p) free(p)
247 #if !LZ4_FREESTANDING
250 #if !defined(LZ4_memset)
251 #define LZ4_memset(p, v, s) memset((p), (v), (s))
253 #define MEM_INIT(p, v, s) LZ4_memset((p), (v), (s))
260 #define WILDCOPYLENGTH 8
261 #define LASTLITERALS \
265 #define MATCH_SAFEGUARD_DISTANCE \
266 ((2 * WILDCOPYLENGTH) - \
269 #define FASTLOOP_SAFE_DISTANCE 64
270 static const int LZ4_minLength = (
MFLIMIT + 1);
272 #define KB *(1 << 10)
273 #define MB *(1 << 20)
274 #define GB *(1U << 30)
276 #define LZ4_DISTANCE_ABSOLUTE_MAX 65535
277 #if (LZ4_DISTANCE_MAX > \
278 LZ4_DISTANCE_ABSOLUTE_MAX)
279 #error "LZ4_DISTANCE_MAX is too big : must be <= 65535"
283 #define ML_MASK ((1U << ML_BITS) - 1)
284 #define RUN_BITS (8 - ML_BITS)
285 #define RUN_MASK ((1U << RUN_BITS) - 1)
290 #if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 1)
294 #define assert(condition) ((void)0)
298 #define LZ4_STATIC_ASSERT(c) \
300 enum { LZ4_static_assert = 1 / (int)(!!(c)) }; \
303 #if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 2)
305 static int g_debuglog_enable = 1;
306 #define DEBUGLOG(l, ...) \
308 if ((g_debuglog_enable) && (l <= LZ4_DEBUG)) { \
309 fprintf(stderr, __FILE__ " %i: ", __LINE__); \
310 fprintf(stderr, __VA_ARGS__); \
311 fprintf(stderr, " \n"); \
315 #define DEBUGLOG(l, ...) \
320 static int LZ4_isAligned(
const void *ptr,
size_t alignment)
322 return ((
size_t)ptr & (alignment - 1)) == 0;
329 #if defined(__cplusplus) || \
330 (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) )
332 typedef uint8_t
BYTE;
333 typedef uint16_t
U16;
334 typedef uint32_t
U32;
336 typedef uint64_t
U64;
339 #if UINT_MAX != 4294967295UL
340 #error "LZ4 code (when not C++ or C99) assumes that sizeof(int) == 4"
343 typedef unsigned short U16;
345 typedef signed int S32;
346 typedef unsigned long long U64;
350 #if defined(__x86_64__)
375 #if !defined(LZ4_memcpy)
376 #if defined(__GNUC__) && (__GNUC__ >= 4)
377 #define LZ4_memcpy(dst, src, size) __builtin_memcpy(dst, src, size)
379 #define LZ4_memcpy(dst, src, size) memcpy(dst, src, size)
383 #if !defined(LZ4_memmove)
384 #if defined(__GNUC__) && (__GNUC__ >= 4)
385 #define LZ4_memmove __builtin_memmove
387 #define LZ4_memmove memmove
391 static unsigned LZ4_isLittleEndian(
void)
400 #if defined(__GNUC__) || defined(__INTEL_COMPILER)
401 #define LZ4_PACK(__Declaration__) __Declaration__ __attribute__((__packed__))
402 #elif defined(_MSC_VER)
403 #define LZ4_PACK(__Declaration__) \
404 __pragma(pack(push, 1)) __Declaration__ __pragma(pack(pop))
407 #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS == 2)
410 static U16 LZ4_read16(
const void *memPtr)
412 return *(
const U16 *)memPtr;
414 static U32 LZ4_read32(
const void *memPtr)
416 return *(
const U32 *)memPtr;
418 static reg_t LZ4_read_ARCH(
const void *memPtr)
420 return *(
const reg_t *)memPtr;
423 static void LZ4_write16(
void *memPtr,
U16 value)
425 *(
U16 *)memPtr = value;
427 static void LZ4_write32(
void *memPtr,
U32 value)
429 *(
U32 *)memPtr = value;
432 #elif defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS == 1)
437 LZ4_PACK(
typedef struct {
U16 u16; }) LZ4_unalign16;
438 LZ4_PACK(
typedef struct {
U32 u32; }) LZ4_unalign32;
439 LZ4_PACK(
typedef struct {
reg_t uArch; }) LZ4_unalignST;
441 static U16 LZ4_read16(
const void *ptr)
443 return ((
const LZ4_unalign16 *)ptr)->u16;
445 static U32 LZ4_read32(
const void *ptr)
447 return ((
const LZ4_unalign32 *)ptr)->u32;
449 static reg_t LZ4_read_ARCH(
const void *ptr)
451 return ((
const LZ4_unalignST *)ptr)->uArch;
454 static void LZ4_write16(
void *memPtr,
U16 value)
456 ((LZ4_unalign16 *)memPtr)->u16 = value;
458 static void LZ4_write32(
void *memPtr,
U32 value)
460 ((LZ4_unalign32 *)memPtr)->u32 = value;
465 static U16 LZ4_read16(
const void *memPtr)
472 static U32 LZ4_read32(
const void *memPtr)
479 static reg_t LZ4_read_ARCH(
const void *memPtr)
486 static void LZ4_write16(
void *memPtr,
U16 value)
491 static void LZ4_write32(
void *memPtr,
U32 value)
498 static U16 LZ4_readLE16(
const void *memPtr)
500 if (LZ4_isLittleEndian()) {
501 return LZ4_read16(memPtr);
504 const BYTE *p = (
const BYTE *)memPtr;
505 return (
U16)((
U16)p[0] | (p[1] << 8));
509 #ifdef LZ4_STATIC_LINKING_ONLY_ENDIANNESS_INDEPENDENT_OUTPUT
510 static U32 LZ4_readLE32(
const void *memPtr)
512 if (LZ4_isLittleEndian()) {
513 return LZ4_read32(memPtr);
516 const BYTE *p = (
const BYTE *)memPtr;
517 return (
U32)p[0] | (p[1] << 8) | (p[2] << 16) | (p[3] << 24);
522 static void LZ4_writeLE16(
void *memPtr,
U16 value)
524 if (LZ4_isLittleEndian()) {
525 LZ4_write16(memPtr, value);
530 p[1] = (
BYTE)(value >> 8);
537 void LZ4_wildCopy8(
void *dstPtr,
const void *srcPtr,
void *dstEnd)
540 const BYTE *s = (
const BYTE *)srcPtr;
550 static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4};
551 static const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3};
553 #ifndef LZ4_FAST_DEC_LOOP
554 #if defined __i386__ || defined _M_IX86 || defined __x86_64__ || defined _M_X64
555 #define LZ4_FAST_DEC_LOOP 1
556 #elif defined(__aarch64__) && defined(__APPLE__)
557 #define LZ4_FAST_DEC_LOOP 1
558 #elif defined(__aarch64__) && !defined(__clang__)
562 #define LZ4_FAST_DEC_LOOP 1
564 #define LZ4_FAST_DEC_LOOP 0
568 #if LZ4_FAST_DEC_LOOP
575 assert(srcPtr + offset == dstPtr);
577 LZ4_write32(dstPtr, 0);
578 dstPtr[0] = srcPtr[0];
579 dstPtr[1] = srcPtr[1];
580 dstPtr[2] = srcPtr[2];
581 dstPtr[3] = srcPtr[3];
582 srcPtr += inc32table[offset];
584 srcPtr -= dec64table[offset];
593 LZ4_wildCopy8(dstPtr, srcPtr, dstEnd);
603 const BYTE *s = (
const BYTE *)srcPtr;
618 BYTE *dstEnd,
const size_t offset)
631 #if defined(_MSC_VER) && (_MSC_VER <= 1937)
633 #pragma warning(push)
638 #if defined(_MSC_VER) && (_MSC_VER <= 1937)
648 LZ4_memcpy_using_offset_base(dstPtr, srcPtr, dstEnd, offset);
654 while (dstPtr < dstEnd) {
664 static unsigned LZ4_NbCommonBytes(
reg_t val)
667 if (LZ4_isLittleEndian()) {
668 if (
sizeof(val) == 8) {
669 #if defined(_MSC_VER) && (_MSC_VER >= 1800) && \
670 (defined(_M_AMD64) && !defined(_M_ARM64EC)) && \
671 !defined(LZ4_FORCE_SW_BITCOUNT)
678 #if defined(__clang__) && (__clang_major__ < 10)
681 return (
unsigned)__builtin_ia32_tzcnt_u64(val) >> 3;
684 return (
unsigned)_tzcnt_u64(val) >> 3;
686 #elif defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT)
688 _BitScanForward64(&
r, (
U64)val);
689 return (
unsigned)
r >> 3;
690 #elif (defined(__clang__) || \
691 (defined(__GNUC__) && \
692 ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
693 !defined(LZ4_FORCE_SW_BITCOUNT)
694 return (
unsigned)__builtin_ctzll((
U64)val) >> 3;
696 const U64 m = 0x0101010101010101ULL;
698 return (
unsigned)(((
U64)((val & (m - 1)) * m)) >> 56);
702 #if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT)
704 _BitScanForward(&
r, (
U32)val);
705 return (
unsigned)
r >> 3;
706 #elif (defined(__clang__) || \
707 (defined(__GNUC__) && \
708 ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
709 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT)
710 return (
unsigned)__builtin_ctz((
U32)val) >> 3;
712 const U32 m = 0x01010101;
713 return (
unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24;
718 if (
sizeof(val) == 8) {
719 #if (defined(__clang__) || \
720 (defined(__GNUC__) && \
721 ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
722 !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT)
723 return (
unsigned)__builtin_clzll((
U64)val) >> 3;
728 static const unsigned char ctz7_tab[128] = {
729 7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0, 1,
730 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 5, 0, 1, 0, 2, 0,
731 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0, 1, 0, 2, 0, 1, 0, 3,
732 0, 1, 0, 2, 0, 1, 0, 6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0,
733 2, 0, 1, 0, 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1,
734 0, 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0,
735 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0,
737 U64 const mask = 0x0101010101010101ULL;
738 U64 const t = (((val >> 8) - mask) | val) & mask;
739 return ctz7_tab[(
t * 0x0080402010080402ULL) >> 57];
744 static const U32 by32 =
749 if (!(val >> by32)) {
769 #if (defined(__clang__) || \
770 (defined(__GNUC__) && \
771 ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \
772 !defined(LZ4_FORCE_SW_BITCOUNT)
773 return (
unsigned)__builtin_clz((
U32)val) >> 3;
776 val = ((((val + 0x00FFFF00) | 0x00FFFFFF) + val) |
777 (val + 0x00FF0000)) >>
779 return (
unsigned)val ^ 3;
785 #define STEPSIZE sizeof(reg_t)
787 unsigned LZ4_count(
const BYTE *pIn,
const BYTE *pMatch,
const BYTE *pInLimit)
789 const BYTE *
const pStart = pIn;
792 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
798 return LZ4_NbCommonBytes(diff);
803 reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
809 pIn += LZ4_NbCommonBytes(diff);
810 return (
unsigned)(pIn - pStart);
813 if ((
STEPSIZE == 8) && (pIn < (pInLimit - 3)) &&
814 (LZ4_read32(pMatch) == LZ4_read32(pIn))) {
818 if ((pIn < (pInLimit - 1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) {
822 if ((pIn < pInLimit) && (*pMatch == *pIn))
824 return (
unsigned)(pIn - pStart);
827 #ifndef LZ4_COMMONDEFS_ONLY
831 static const int LZ4_64Klimit = ((64
KB) + (
MFLIMIT - 1));
832 static const U32 LZ4_skipTrigger = 6;
894 #if defined(__cplusplus)
899 char *dest,
int srcSize);
902 int compressedSize,
int maxOutputSize,
903 const void *dictStart,
size_t dictSize);
905 const char *source,
char *dest,
int compressedSize,
int targetOutputSize,
906 int dstCapacity,
const void *dictStart,
size_t dictSize);
907 #if defined(__cplusplus)
916 if (tableType ==
byU16)
917 return ((sequence * 2654435761U) >>
926 if (LZ4_isLittleEndian()) {
927 const U64 prime5bytes = 889523592379ULL;
928 return (
U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog));
931 const U64 prime8bytes = 11400714785074694791ULL;
932 return (
U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog));
939 if ((
sizeof(
reg_t) == 8) && (tableType !=
byU16))
940 return LZ4_hash5(LZ4_read_ARCH(p), tableType);
942 #ifdef LZ4_STATIC_LINKING_ONLY_ENDIANNESS_INDEPENDENT_OUTPUT
943 return LZ4_hash4(LZ4_readLE32(p), tableType);
945 return LZ4_hash4(LZ4_read32(p), tableType);
959 const BYTE **hashTable = (
const BYTE **)tableBase;
964 U32 *hashTable = (
U32 *)tableBase;
969 U16 *hashTable = (
U16 *)tableBase;
987 U32 *hashTable = (
U32 *)tableBase;
992 U16 *hashTable = (
U16 *)tableBase;
994 hashTable[h] = (
U16)idx;
1005 const BYTE **
const hashTable = (
const BYTE **)tableBase;
1014 U32 const h = LZ4_hashPosition(p, tableType);
1015 LZ4_putPositionOnHash(p, h, tableBase, tableType);
1028 if (tableType ==
byU32) {
1029 const U32 *
const hashTable = (
const U32 *)tableBase;
1031 return hashTable[h];
1033 if (tableType ==
byU16) {
1034 const U16 *
const hashTable = (
const U16 *)tableBase;
1036 return hashTable[h];
1042 static const BYTE *LZ4_getPositionOnHash(
U32 h,
const void *tableBase,
1048 const BYTE *
const *hashTable = (
const BYTE *
const *)tableBase;
1049 return hashTable[h];
1054 LZ4_getPosition(
const BYTE *p,
const void *tableBase,
tableType_t tableType)
1056 U32 const h = LZ4_hashPosition(p, tableType);
1057 return LZ4_getPositionOnHash(h, tableBase, tableType);
1061 const int inputSize,
1071 ((tableType ==
byU16) &&
1074 tableType ==
byPtr || inputSize >= 4
KB) {
1075 DEBUGLOG(4,
"LZ4_prepareTable: Resetting table in %p", cctx);
1081 DEBUGLOG(4,
"LZ4_prepareTable: Re-use hash table (no reset)");
1091 DEBUGLOG(5,
"LZ4_prepareTable: adding 64KB to currentOffset");
1109 char *
const dest,
const int inputSize,
1116 const BYTE *ip = (
const BYTE *)source;
1119 const BYTE *base = (
const BYTE *)source - startIndex;
1120 const BYTE *lowLimit;
1124 const BYTE *
const dictionary =
1126 const U32 dictSize =
1133 int const maybe_extMem =
1135 U32 const prefixIdxLimit =
1136 startIndex - dictSize;
1137 const BYTE *
const dictEnd = dictionary ? dictionary + dictSize : dictionary;
1138 const BYTE *anchor = (
const BYTE *)source;
1139 const BYTE *
const iend = ip + inputSize;
1140 const BYTE *
const mflimitPlusOne = iend -
MFLIMIT + 1;
1148 : dictionary + dictSize - startIndex;
1151 BYTE *
const olimit = op + maxOutputSize;
1156 DEBUGLOG(5,
"LZ4_compress_generic_validated: srcSize=%i, tableType=%u",
1157 inputSize, tableType);
1159 if (tableType ==
byU16)
1162 if (tableType ==
byPtr)
1167 if (outputDirective ==
fillOutput && maxOutputSize < 1) {
1170 assert(acceleration >= 1);
1188 if (inputSize < LZ4_minLength)
1189 goto _last_literals;
1194 U32 const h = LZ4_hashPosition(ip, tableType);
1195 if (tableType ==
byPtr) {
1199 LZ4_putIndexOnHash(startIndex, h, cctx->
hashTable, tableType);
1203 forwardH = LZ4_hashPosition(ip, tableType);
1209 const BYTE *filledIp;
1212 if (tableType ==
byPtr) {
1213 const BYTE *forwardIp = ip;
1215 int searchMatchNb = acceleration << LZ4_skipTrigger;
1217 U32 const h = forwardH;
1220 step = (searchMatchNb++ >> LZ4_skipTrigger);
1222 if (
unlikely(forwardIp > mflimitPlusOne))
1223 goto _last_literals;
1224 assert(ip < mflimitPlusOne);
1226 match = LZ4_getPositionOnHash(h, cctx->
hashTable, tableType);
1227 forwardH = LZ4_hashPosition(forwardIp, tableType);
1228 LZ4_putPositionOnHash(ip, h, cctx->
hashTable, tableType);
1230 }
while ((match + LZ4_DISTANCE_MAX < ip) ||
1231 (LZ4_read32(match) != LZ4_read32(ip)));
1235 const BYTE *forwardIp = ip;
1237 int searchMatchNb = acceleration << LZ4_skipTrigger;
1239 U32 const h = forwardH;
1240 U32 const current = (
U32)(forwardIp - base);
1242 LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
1243 assert(matchIndex <= current);
1244 assert(forwardIp - base < (ptrdiff_t)(2
GB - 1));
1247 step = (searchMatchNb++ >> LZ4_skipTrigger);
1249 if (
unlikely(forwardIp > mflimitPlusOne))
1250 goto _last_literals;
1251 assert(ip < mflimitPlusOne);
1254 if (matchIndex < startIndex) {
1259 match = dictBase + matchIndex;
1263 lowLimit = dictionary;
1266 match = base + matchIndex;
1267 lowLimit = (
const BYTE *)source;
1271 if (matchIndex < startIndex) {
1273 "extDict candidate: matchIndex=%5u < "
1275 matchIndex, startIndex);
1278 match = dictBase + matchIndex;
1279 lowLimit = dictionary;
1282 match = base + matchIndex;
1283 lowLimit = (
const BYTE *)source;
1287 match = base + matchIndex;
1289 forwardH = LZ4_hashPosition(forwardIp, tableType);
1290 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
1292 DEBUGLOG(7,
"candidate at pos=%u (offset=%u \n", matchIndex,
1293 current - matchIndex);
1294 if ((dictIssue ==
dictSmall) && (matchIndex < prefixIdxLimit)) {
1297 assert(matchIndex < current);
1298 if (((tableType !=
byU16) ||
1300 (matchIndex + LZ4_DISTANCE_MAX < current)) {
1304 (current - matchIndex) <=
1307 if (LZ4_read32(match) == LZ4_read32(ip)) {
1309 offset = current - matchIndex;
1320 if ((match > lowLimit) &&
unlikely(ip[-1] == match[-1])) {
1324 }
while (((ip > anchor) & (match > lowLimit)) &&
1330 unsigned const litLength = (unsigned)(ip - anchor);
1332 if ((outputDirective ==
1341 (
unlikely(op + (litLength + 240) / 255 +
1348 goto _last_literals;
1351 unsigned len = litLength -
RUN_MASK;
1353 for (; len >= 255; len -= 255)
1361 LZ4_wildCopy8(op, anchor, op + litLength);
1363 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1364 (
int)(anchor - (
const BYTE *)source), litLength,
1365 (
int)(ip - (
const BYTE *)source));
1388 goto _last_literals;
1393 DEBUGLOG(6,
" with offset=%u (ext if > %i)", offset,
1394 (
int)(ip - (
const BYTE *)source));
1395 assert(offset <= LZ4_DISTANCE_MAX && offset > 0);
1396 LZ4_writeLE16(op, (
U16)offset);
1400 DEBUGLOG(6,
" with offset=%u (same segment)",
1402 assert(ip - match <= LZ4_DISTANCE_MAX);
1403 LZ4_writeLE16(op, (
U16)(ip - match));
1413 (lowLimit == dictionary) ) {
1414 const BYTE *limit = ip + (dictEnd - match);
1416 if (limit > matchlimit)
1419 ip += (size_t)matchCode +
MINMATCH;
1421 unsigned const more =
1422 LZ4_count(limit, (
const BYTE *)source, matchlimit);
1427 " with matchLength=%u starting in extDict",
1433 ip += (size_t)matchCode +
MINMATCH;
1434 DEBUGLOG(6,
" with matchLength=%u",
1438 if ((outputDirective) &&
1447 ip -= matchCode - newMatchCode;
1448 assert(newMatchCode < matchCode);
1449 matchCode = newMatchCode;
1458 DEBUGLOG(5,
"Clearing %u positions",
1459 (
U32)(filledIp - ip));
1460 for (ptr = ip; ptr <= filledIp; ++ptr) {
1461 U32 const h = LZ4_hashPosition(ptr, tableType);
1462 LZ4_clearHash(h, cctx->
hashTable, tableType);
1475 LZ4_write32(op, 0xFFFFFFFF);
1476 while (matchCode >= 4 * 255) {
1478 LZ4_write32(op, 0xFFFFFFFF);
1479 matchCode -= 4 * 255;
1481 op += matchCode / 255;
1482 *op++ = (
BYTE)(matchCode % 255);
1485 *token += (
BYTE)(matchCode);
1494 if (ip >= mflimitPlusOne)
1499 U32 const h = LZ4_hashPosition(ip - 2, tableType);
1500 if (tableType ==
byPtr) {
1504 U32 const idx = (
U32)((ip - 2) - base);
1505 LZ4_putIndexOnHash(idx, h, cctx->
hashTable, tableType);
1510 if (tableType ==
byPtr) {
1512 match = LZ4_getPosition(ip, cctx->
hashTable, tableType);
1513 LZ4_putPosition(ip, cctx->
hashTable, tableType);
1514 if ((match + LZ4_DISTANCE_MAX >= ip) &&
1515 (LZ4_read32(match) == LZ4_read32(ip))) {
1523 U32 const h = LZ4_hashPosition(ip, tableType);
1524 U32 const current = (
U32)(ip - base);
1525 U32 matchIndex = LZ4_getIndexOnHash(h, cctx->
hashTable, tableType);
1526 assert(matchIndex < current);
1528 if (matchIndex < startIndex) {
1533 match = dictBase + matchIndex;
1536 matchIndex += dictDelta;
1539 match = base + matchIndex;
1540 lowLimit = (
const BYTE *)
1545 if (matchIndex < startIndex) {
1547 match = dictBase + matchIndex;
1552 match = base + matchIndex;
1553 lowLimit = (
const BYTE *)
1558 match = base + matchIndex;
1560 LZ4_putIndexOnHash(current, h, cctx->
hashTable, tableType);
1561 assert(matchIndex < current);
1562 if (((dictIssue ==
dictSmall) ? (matchIndex >= prefixIdxLimit)
1564 (((tableType ==
byU16) &&
1567 : (matchIndex + LZ4_DISTANCE_MAX >= current)) &&
1568 (LZ4_read32(match) == LZ4_read32(ip))) {
1572 offset = current - matchIndex;
1573 DEBUGLOG(6,
"seq.start:%i, literals=%u, match.start:%i",
1574 (
int)(anchor - (
const BYTE *)source), 0,
1575 (
int)(ip - (
const BYTE *)source));
1581 forwardH = LZ4_hashPosition(++ip, tableType);
1587 size_t lastRun = (size_t)(iend - anchor);
1588 if ((outputDirective) &&
1589 (op + lastRun + 1 + ((lastRun + 255 -
RUN_MASK) / 255) > olimit)) {
1593 lastRun = (size_t)(olimit - op) - 1 ;
1594 lastRun -= (lastRun + 256 -
RUN_MASK) /
1603 DEBUGLOG(6,
"Final literal run : %i literals", (
int)lastRun);
1605 size_t accumulator = lastRun -
RUN_MASK;
1607 for (; accumulator >= 255; accumulator -= 255)
1609 *op++ = (
BYTE)accumulator;
1615 ip = anchor + lastRun;
1620 *inputConsumed = (int)(((
const char *)ip) - source);
1622 result = (int)(((
char *)op) - dest);
1624 DEBUGLOG(5,
"LZ4_compress_generic: compressed %i bytes into %i bytes",
1641 DEBUGLOG(5,
"LZ4_compress_generic: srcSize=%i, dstCapacity=%i", srcSize,
1648 if (outputDirective !=
notLimited && dstCapacity <= 0)
1650 DEBUGLOG(5,
"Generating an empty block");
1662 return LZ4_compress_generic_validated(
1663 cctx, src, dst, srcSize,
1665 dstCapacity, outputDirective, tableType, dictDirective, dictIssue,
1670 int inputSize,
int maxOutputSize,
1676 if (acceleration < 1)
1681 if (inputSize < LZ4_64Klimit) {
1682 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, 0,
1688 ((
sizeof(
void *) == 4) && ((
uptrval)source > LZ4_DISTANCE_MAX))
1691 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL, 0,
1697 if (inputSize < LZ4_64Klimit) {
1698 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL,
1704 ((
sizeof(
void *) == 4) && ((
uptrval)source > LZ4_DISTANCE_MAX))
1707 return LZ4_compress_generic(ctx, source, dest, inputSize,
NULL,
1724 char *dst,
int srcSize,
1725 int dstCapacity,
int acceleration)
1729 if (acceleration < 1)
1736 if (srcSize < LZ4_64Klimit) {
1738 LZ4_prepareTable(ctx, srcSize, tableType);
1740 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
1745 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
1752 ((
sizeof(
void *) == 4) && ((
uptrval)src > LZ4_DISTANCE_MAX))
1755 LZ4_prepareTable(ctx, srcSize, tableType);
1756 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL, 0,
1762 if (srcSize < LZ4_64Klimit) {
1764 LZ4_prepareTable(ctx, srcSize, tableType);
1766 return LZ4_compress_generic(
1771 return LZ4_compress_generic(
1778 ((
sizeof(
void *) == 4) && ((
uptrval)src > LZ4_DISTANCE_MAX))
1781 LZ4_prepareTable(ctx, srcSize, tableType);
1782 return LZ4_compress_generic(ctx, src, dst, srcSize,
NULL,
1789 int LZ4_compress_fast(
const char *src,
char *dest,
int srcSize,
int dstCapacity,
1821 const char *src,
char *dst,
1830 if (targetDstSize >=
1834 targetDstSize, acceleration);
1837 if (*srcSizePtr < LZ4_64Klimit) {
1838 return LZ4_compress_generic(&
state->internal_donotuse, src, dst,
1839 *srcSizePtr, srcSizePtr, targetDstSize,
1845 ((
sizeof(
void *) == 4) && ((
uptrval)src > LZ4_DISTANCE_MAX))
1848 return LZ4_compress_generic(&
state->internal_donotuse, src, dst,
1849 *srcSizePtr, srcSizePtr, targetDstSize,
1857 int *srcSizePtr,
int targetDstSize,
1860 int const r = LZ4_compress_destSize_extState_internal(
1881 int result = LZ4_compress_destSize_extState_internal(
1882 ctx, src, dst, srcSizePtr, targetDstSize, 1);
1894 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
1899 DEBUGLOG(4,
"LZ4_createStream %p", lz4s);
1907 static size_t LZ4_stream_t_alignment(
void)
1923 if (buffer ==
NULL) {
1929 if (!LZ4_isAligned(buffer, LZ4_stream_t_alignment()))
1939 DEBUGLOG(5,
"LZ4_resetStream (ctx:%p)", LZ4_stream);
1948 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
1953 DEBUGLOG(5,
"LZ4_freeStream %p", LZ4_stream);
1960 #define HASH_UNIT sizeof(reg_t)
1966 const BYTE *p = (
const BYTE *)dictionary;
1967 const BYTE *
const dictEnd = p + dictSize;
1970 DEBUGLOG(4,
"LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary,
1991 if ((dictEnd - p) > 64
KB)
1992 p = dictEnd - 64
KB;
1999 U32 const h = LZ4_hashPosition(p, tableType);
2001 LZ4_putIndexOnHash(idx32, h, dict->
hashTable, tableType);
2012 U32 const h = LZ4_hashPosition(p, tableType);
2014 if (LZ4_getIndexOnHash(h, dict->
hashTable, tableType) <= limit) {
2017 LZ4_putIndexOnHash(idx32, h, dict->
hashTable, tableType);
2045 DEBUGLOG(4,
"LZ4_attach_dictionary (%p, %p, size %u)", workingStream,
2046 dictionaryStream, dictCtx !=
NULL ? dictCtx->
dictSize : 0);
2048 if (dictCtx !=
NULL) {
2091 char *dest,
int inputSize,
int maxOutputSize,
2096 const char *dictEnd =
2101 DEBUGLOG(5,
"LZ4_compress_fast_continue (inputSize=%i, dictSize=%u)",
2104 LZ4_renormDictT(streamPtr, inputSize);
2105 if (acceleration < 1)
2112 && (dictEnd != source)
2119 "LZ4_compress_fast_continue: dictSize(%u) at addr:%p is too small",
2130 const char *
const sourceEnd = source + inputSize;
2131 if ((sourceEnd > (
const char *)streamPtr->
dictionary) &&
2132 (sourceEnd < dictEnd)) {
2143 if (dictEnd == source) {
2146 return LZ4_compress_generic(streamPtr, source, dest, inputSize,
2151 return LZ4_compress_generic(streamPtr, source, dest, inputSize,
2167 if (inputSize > 4
KB) {
2173 result = LZ4_compress_generic(
2174 streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
2179 result = LZ4_compress_generic(
2180 streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
2188 result = LZ4_compress_generic(
2189 streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
2194 result = LZ4_compress_generic(
2195 streamPtr, source, dest, inputSize,
NULL, maxOutputSize,
2208 char *dest,
int srcSize)
2213 LZ4_renormDictT(streamPtr, srcSize);
2218 LZ4_compress_generic(streamPtr, source, dest, srcSize,
NULL, 0,
2222 result = LZ4_compress_generic(streamPtr, source, dest, srcSize,
NULL, 0,
2245 DEBUGLOG(5,
"LZ4_saveDict : dictSize=%i, safeBuffer=%p", dictSize,
2248 if ((
U32)dictSize > 64
KB) {
2255 if (safeBuffer ==
NULL)
2260 LZ4_memmove(safeBuffer, previousDictEnd - dictSize, (
size_t)dictSize);
2276 #define MIN(a, b) ((a) < (b) ? (a) : (b))
2282 static size_t read_long_length_no_check(
const BYTE **pp)
2290 DEBUGLOG(6,
"read_long_length_no_check: +length=%zu using %zu input bytes",
2305 const
BYTE *const istart,
BYTE *const ostart,
int decompressedSize,
2308 const
BYTE *const dictStart,
2309 const
size_t dictSize
2312 const BYTE *ip = istart;
2314 BYTE *
const oend = ostart + decompressedSize;
2315 const BYTE *
const prefixStart = ostart - prefixSize;
2317 DEBUGLOG(5,
"LZ4_decompress_unsafe_generic");
2318 if (dictStart ==
NULL)
2323 unsigned token = *ip++;
2330 ll += read_long_length_no_check(&ip);
2332 if ((
size_t)(oend - op) < ll)
2337 if ((
size_t)(oend - op) <
MFLIMIT) {
2342 "invalid: literals end at distance %zi from end of block",
2353 size_t ml = token & 15;
2354 size_t const offset = LZ4_readLE16(ip);
2359 ml += read_long_length_no_check(&ip);
2363 if ((
size_t)(oend - op) < ml)
2367 const BYTE *match = op - offset;
2370 if (offset > (
size_t)(op - prefixStart) + dictSize) {
2371 DEBUGLOG(6,
"offset out of range");
2376 if (offset > (
size_t)(op - prefixStart)) {
2378 const BYTE *
const dictEnd = dictStart + dictSize;
2379 const BYTE *extMatch =
2380 dictEnd - (offset - (size_t)(op - prefixStart));
2381 size_t const extml = (size_t)(dictEnd - extMatch);
2394 match = prefixStart;
2400 for (u = 0; u < ml; u++) {
2408 5,
"invalid: match ends at distance %zi from end of block",
2417 return (
int)(ip - istart);
2429 typedef size_t Rvl_t;
2434 Rvl_t s, length = 0;
2438 if (initial_check &&
unlikely((*ip) >= ilimit)) {
2448 if ((
sizeof(length) < 8) &&
unlikely(length > ((
Rvl_t)(-1) / 2))) {
2461 if ((
sizeof(length) < 8) &&
unlikely(length > ((
Rvl_t)(-1) / 2))) {
2476 const char *
const src,
char *
const dst,
int srcSize,
2482 const BYTE *
const lowPrefix,
2483 const BYTE *
const dictStart,
2484 const size_t dictSize
2487 if ((src ==
NULL) || (outputSize < 0)) {
2492 const BYTE *ip = (
const BYTE *)src;
2493 const BYTE *
const iend = ip + srcSize;
2496 BYTE *
const oend = op + outputSize;
2499 const BYTE *
const dictEnd =
2500 (dictStart ==
NULL) ?
NULL : dictStart + dictSize;
2502 const int checkOffset = (dictSize < (int)(64
KB));
2505 const BYTE *
const shortiend = iend - 14 - 2 ;
2506 const BYTE *
const shortoend = oend - 14 - 18 ;
2513 DEBUGLOG(5,
"LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize,
2520 if (partialDecoding)
2522 return ((srcSize == 1) && (*ip == 0)) ? 0 : -1;
2533 #if LZ4_FAST_DEC_LOOP
2535 DEBUGLOG(6,
"move to safe decode loop");
2541 DEBUGLOG(6,
"using fast decode loop");
2549 DEBUGLOG(7,
"blockPos%6u: litLength token = %u",
2550 (
unsigned)(op - (
BYTE *)dst), (
unsigned)length);
2555 read_variable_length(&ip, iend -
RUN_MASK, 1);
2556 if (addl == rvl_error) {
2557 DEBUGLOG(6,
"error reading long literal length");
2570 if ((op + length > oend - 32) || (ip + length > iend - 32)) {
2571 goto safe_literal_copy;
2573 LZ4_wildCopy32(op, ip, op + length);
2577 else if (ip <= iend - (16 + 1 )) {
2580 DEBUGLOG(7,
"copy %u bytes in a 16-bytes stripe",
2589 goto safe_literal_copy;
2593 offset = LZ4_readLE16(ip);
2595 DEBUGLOG(6,
"blockPos%6u: offset = %u",
2596 (
unsigned)(op - (
BYTE *)dst), (
unsigned)offset);
2597 match = op - offset;
2602 DEBUGLOG(7,
" match length token = %u (len==%u)", (
unsigned)length,
2608 if (addl == rvl_error) {
2609 DEBUGLOG(5,
"error reading long match length");
2614 DEBUGLOG(7,
" long match length == %u", (
unsigned)length);
2619 goto safe_match_copy;
2625 DEBUGLOG(7,
"moving to safe_match_copy (ml==%u)",
2627 goto safe_match_copy;
2633 assert(match >= lowPrefix);
2646 if (checkOffset && (
unlikely(match + dictSize < lowPrefix))) {
2647 DEBUGLOG(5,
"Error : pos=%zi, offset=%zi => outside buffers",
2648 op - lowPrefix, op - match);
2655 if (partialDecoding) {
2656 DEBUGLOG(7,
"partialDecoding: dictionary match, close "
2658 length =
MIN(length, (
size_t)(oend - op));
2661 DEBUGLOG(6,
"end-of-block condition violated")
2666 if (length <= (
size_t)(lowPrefix - match)) {
2669 LZ4_memmove(op, dictEnd - (lowPrefix - match), length);
2675 size_t const copySize = (size_t)(lowPrefix - match);
2676 size_t const restSize = length - copySize;
2677 LZ4_memcpy(op, dictEnd - copySize, copySize);
2680 (
size_t)(op - lowPrefix)) {
2681 BYTE *
const endOfMatch = op + restSize;
2682 const BYTE *copyFrom = lowPrefix;
2683 while (op < endOfMatch) {
2684 *op++ = *copyFrom++;
2698 assert((op <= oend) && (oend - op >= 32));
2700 LZ4_memcpy_using_offset(op, match, cpy, offset);
2703 LZ4_wildCopy32(op, match, cpy);
2713 DEBUGLOG(6,
"using safe decode loop");
2718 DEBUGLOG(7,
"blockPos%6u: litLength token = %u",
2719 (
unsigned)(op - (
BYTE *)dst), (
unsigned)length);
2733 &&
likely((ip < shortiend) & (op <= shortoend))) {
2742 DEBUGLOG(7,
"blockPos%6u: matchLength token = %u (len=%u)",
2743 (
unsigned)(op - (
BYTE *)dst), (
unsigned)length,
2744 (
unsigned)length + 4);
2745 offset = LZ4_readLE16(ip);
2747 match = op - offset;
2751 if ((length !=
ML_MASK) && (offset >= 8) &&
2770 read_variable_length(&ip, iend -
RUN_MASK, 1);
2771 if (addl == rvl_error) {
2783 #if LZ4_FAST_DEC_LOOP
2798 if (partialDecoding) {
2804 DEBUGLOG(7,
"partialDecoding: copying literals, close to "
2805 "input or output end")
2806 DEBUGLOG(7, "partialDecoding: literal length = %u",
2809 7, "partialDecoding: remaining space in dstBuffer : %i",
2812 7, "partialDecoding: remaining space in srcBuffer : %i",
2817 if (ip + length > iend) {
2818 length = (size_t)(iend - ip);
2827 length = (size_t)(oend - op);
2835 if ((ip + length != iend) || (cpy > oend)) {
2836 DEBUGLOG(5,
"should have been last run of literals")
2837 DEBUGLOG(5, "ip(%p) + length(%i) = %p != iend (%p)", ip,
2838 (
int)length, ip + length, iend);
2842 "after writing %u bytes / %i bytes available",
2843 (
unsigned)(op - (
BYTE *)dst), outputSize);
2857 if (!partialDecoding || (cpy == oend) || (ip >= (iend - 2))) {
2862 LZ4_wildCopy8(op, ip,
2869 offset = LZ4_readLE16(ip);
2871 match = op - offset;
2875 DEBUGLOG(7,
"blockPos%6u: matchLength token = %u",
2876 (
unsigned)(op - (
BYTE *)dst), (
unsigned)length);
2882 if (addl == rvl_error) {
2891 #if LZ4_FAST_DEC_LOOP
2894 if ((checkOffset) && (
unlikely(match + dictSize < lowPrefix)))
2900 if (partialDecoding)
2901 length =
MIN(length, (
size_t)(oend - op));
2907 if (length <= (
size_t)(lowPrefix - match)) {
2910 LZ4_memmove(op, dictEnd - (lowPrefix - match), length);
2916 size_t const copySize = (size_t)(lowPrefix - match);
2917 size_t const restSize = length - copySize;
2918 LZ4_memcpy(op, dictEnd - copySize, copySize);
2921 (
size_t)(op - lowPrefix)) {
2922 BYTE *
const endOfMatch = op + restSize;
2923 const BYTE *copyFrom = lowPrefix;
2924 while (op < endOfMatch)
2925 *op++ = *copyFrom++;
2934 assert(match >= lowPrefix);
2942 size_t const mlen =
MIN(length, (
size_t)(oend - op));
2943 const BYTE *
const matchEnd = match + mlen;
2944 BYTE *
const copyEnd = op + mlen;
2945 if (matchEnd > op) {
2946 while (op < copyEnd) {
2966 match += inc32table[offset];
2968 match -= dec64table[offset];
2982 if (op < oCopyLimit) {
2983 LZ4_wildCopy8(op, match, oCopyLimit);
2984 match += oCopyLimit - op;
2994 LZ4_wildCopy8(op + 8, match + 8, cpy);
3001 DEBUGLOG(5,
"decoded %i bytes", (
int)(((
char *)op) - dst));
3002 return (
int)(((
char *)op) - dst);
3006 return (
int)(-(((
const char *)ip) - src)) - 1;
3014 int maxDecompressedSize)
3016 return LZ4_decompress_generic(source, dest, compressedSize,
3023 int targetOutputSize,
int dstCapacity)
3025 dstCapacity =
MIN(targetOutputSize, dstCapacity);
3026 return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity,
3033 DEBUGLOG(5,
"LZ4_decompress_fast");
3034 return LZ4_decompress_unsafe_generic((
const BYTE *)source, (
BYTE *)dest,
3035 originalSize, 0,
NULL, 0);
3043 int compressedSize,
int maxOutputSize)
3045 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
3051 static int LZ4_decompress_safe_partial_withPrefix64k(
const char *source,
3054 int targetOutputSize,
3057 dstCapacity =
MIN(targetOutputSize, dstCapacity);
3058 return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity,
3067 return LZ4_decompress_unsafe_generic((
const BYTE *)source, (
BYTE *)dest,
3068 originalSize, 64
KB,
NULL, 0);
3072 static int LZ4_decompress_safe_withSmallPrefix(
const char *source,
char *dest,
3077 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
3079 (
BYTE *)dest - prefixSize,
NULL, 0);
3083 static int LZ4_decompress_safe_partial_withSmallPrefix(
3084 const char *source,
char *dest,
int compressedSize,
int targetOutputSize,
3085 int dstCapacity,
size_t prefixSize)
3087 dstCapacity =
MIN(targetOutputSize, dstCapacity);
3088 return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity,
3090 (
BYTE *)dest - prefixSize,
NULL, 0);
3095 int compressedSize,
int maxOutputSize,
3096 const void *dictStart,
size_t dictSize)
3098 DEBUGLOG(5,
"LZ4_decompress_safe_forceExtDict");
3099 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
3101 (
const BYTE *)dictStart, dictSize);
3106 const char *source,
char *dest,
int compressedSize,
int targetOutputSize,
3107 int dstCapacity,
const void *dictStart,
size_t dictSize)
3109 dstCapacity =
MIN(targetOutputSize, dstCapacity);
3110 return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity,
3112 (
const BYTE *)dictStart, dictSize);
3116 static int LZ4_decompress_fast_extDict(
const char *source,
char *dest,
3117 int originalSize,
const void *dictStart,
3120 return LZ4_decompress_unsafe_generic((
const BYTE *)source, (
BYTE *)dest,
3122 (
const BYTE *)dictStart, dictSize);
3130 int LZ4_decompress_safe_doubleDict(
const char *source,
char *dest,
3131 int compressedSize,
int maxOutputSize,
3132 size_t prefixSize,
const void *dictStart,
3135 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
3137 (
BYTE *)dest - prefixSize,
3138 (
const BYTE *)dictStart, dictSize);
3143 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
3153 if (LZ4_stream ==
NULL) {
3168 const char *dictionary,
int dictSize)
3197 if (maxBlockSize < 0)
3201 if (maxBlockSize < 16)
3216 const char *source,
char *dest,
3217 int compressedSize,
int maxOutputSize)
3236 source, dest, compressedSize, maxOutputSize);
3238 result = LZ4_decompress_safe_withSmallPrefix(
3239 source, dest, compressedSize, maxOutputSize, lz4sd->
prefixSize);
3241 result = LZ4_decompress_safe_doubleDict(
3242 source, dest, compressedSize, maxOutputSize, lz4sd->
prefixSize,
3254 source, dest, compressedSize, maxOutputSize, lz4sd->
externalDict,
3267 const char *source,
char *dest,
int originalSize)
3274 DEBUGLOG(5,
"LZ4_decompress_fast_continue (toDecodeSize=%i)", originalSize);
3275 assert(originalSize >= 0);
3278 DEBUGLOG(5,
"first invocation : no prefix nor extDict");
3287 DEBUGLOG(5,
"continue using existing prefix");
3288 result = LZ4_decompress_unsafe_generic(
3297 DEBUGLOG(5,
"prefix becomes extDict");
3300 result = LZ4_decompress_fast_extDict(source, dest, originalSize,
3320 int compressedSize,
int maxOutputSize,
3321 const char *dictStart,
int dictSize)
3325 if (dictStart + dictSize == dest) {
3326 if (dictSize >= 64
KB - 1) {
3328 source, dest, compressedSize, maxOutputSize);
3331 return LZ4_decompress_safe_withSmallPrefix(
3332 source, dest, compressedSize, maxOutputSize, (
size_t)dictSize);
3336 maxOutputSize, dictStart,
3342 int targetOutputSize,
int dstCapacity,
3343 const char *dictStart,
int dictSize)
3347 targetOutputSize, dstCapacity);
3348 if (dictStart + dictSize == dest) {
3349 if (dictSize >= 64
KB - 1) {
3350 return LZ4_decompress_safe_partial_withPrefix64k(
3351 source, dest, compressedSize, targetOutputSize, dstCapacity);
3354 return LZ4_decompress_safe_partial_withSmallPrefix(
3355 source, dest, compressedSize, targetOutputSize, dstCapacity,
3360 source, dest, compressedSize, targetOutputSize, dstCapacity, dictStart,
3365 int originalSize,
const char *dictStart,
3368 if (dictSize == 0 || dictStart + dictSize == dest)
3369 return LZ4_decompress_unsafe_generic((
const BYTE *)source, (
BYTE *)dest,
3370 originalSize, (
size_t)dictSize,
3373 return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart,
3386 int LZ4_compress(
const char *src,
char *dest,
int srcSize)
3391 char *dst,
int srcSize,
int dstSize)
3401 const char *src,
char *dst,
int srcSize,
3408 char *dest,
int inputSize)
3420 int LZ4_uncompress(
const char *source,
char *dest,
int outputSize)
3444 #if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION)
3456 ->internal_donotuse.dictionary;
int LZ4_decompress_safe_forceExtDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const void *dictStart, size_t dictSize)
int LZ4_compress_fast_extState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
const char * LZ4_versionString(void)
#define LZ4_STATIC_ASSERT(c)
int LZ4_compressBound(int isize)
int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize)
LZ4_stream_t * LZ4_createStream(void)
int LZ4_decompress_safe_partial_forceExtDict(const char *source, char *dest, int compressedSize, int targetOutputSize, int dstCapacity, const void *dictStart, size_t dictSize)
LZ4_stream_t * LZ4_initStream(void *buffer, size_t size)
int LZ4_loadDict_internal(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize, LoadDict_mode_e _ld)
int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize)
int LZ4_sizeofState(void)
#define LZ4_memcpy(dst, src, size)
void LZ4_resetStream(LZ4_stream_t *LZ4_stream)
#define LZ4_DISTANCE_ABSOLUTE_MAX
void LZ4_resetStream_fast(LZ4_stream_t *ctx)
int LZ4_freeStream(LZ4_stream_t *LZ4_stream)
int LZ4_loadDictSlow(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize)
int LZ4_versionNumber(void)
int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, char *dest, int srcSize)
#define FASTLOOP_SAFE_DISTANCE
#define ALLOC_AND_ZERO(s)
#define MEM_INIT(p, v, s)
int LZ4_compress_default(const char *src, char *dst, int srcSize, int dstCapacity)
int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize)
int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
#define LZ4_ACCELERATION_MAX
#define MATCH_SAFEGUARD_DISTANCE
#define assert(condition)
int LZ4_compress_fast_extState_fastReset(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration)
#define LZ4_ACCELERATION_DEFAULT
void LZ4_attach_dictionary(LZ4_stream_t *workingStream, const LZ4_stream_t *dictionaryStream)
int LZ4_compress_fast(const char *src, char *dest, int srcSize, int dstCapacity, int acceleration)
int LZ4_compress_destSize_extState(void *state, const char *src, char *dst, int *srcSizePtr, int targetDstSize, int acceleration)
int LZ4_decompress_fast_withPrefix64k(const char *src, char *dst, int originalSize)
int LZ4_decompress_fast_usingDict(const char *src, char *dst, int originalSize, const char *dictStart, int dictSize)
#define LZ4_HASHTABLESIZE
int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_streamPtr, const char *source, char *dest, int inputSize, int maxOutputSize)
#define LZ4_COMPRESSBOUND(isize)
#define LZ4_VERSION_STRING
LZ4_streamDecode_t * LZ4_createStreamDecode(void)
int LZ4_decompress_safe_partial(const char *src, char *dst, int srcSize, int targetOutputSize, int dstCapacity)
void * LZ4_create(char *inputBuffer)
int LZ4_compress_withState(void *state, const char *source, char *dest, int inputSize)
int LZ4_decoderRingBufferSize(int maxBlockSize)
union LZ4_stream_u LZ4_stream_t
int LZ4_compress(const char *src, char *dest, int srcSize)
int LZ4_decompress_safe_partial_usingDict(const char *src, char *dst, int compressedSize, int targetOutputSize, int maxOutputSize, const char *dictStart, int dictSize)
#define LZ4_HASH_SIZE_U32
int LZ4_uncompress(const char *source, char *dest, int outputSize)
int LZ4_decompress_safe_withPrefix64k(const char *src, char *dst, int compressedSize, int maxDstSize)
int LZ4_decompress_fast(const char *src, char *dst, int originalSize)
#define LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize)
int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize)
int LZ4_compress_limitedOutput(const char *src, char *dest, int srcSize, int maxOutputSize)
#define LZ4_MAX_INPUT_SIZE
char * LZ4_slideInputBuffer(void *state)
int LZ4_sizeofStreamState(void)
int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize)
#define LZ4_VERSION_NUMBER
int LZ4_compress_limitedOutput_withState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize)
int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *src, char *dst, int srcSize, int dstCapacity)
int LZ4_resetStreamState(void *state, char *inputBuffer)
int LZ4_decompress_safe(const char *src, char *dst, int compressedSize, int dstCapacity)
int LZ4_compress_continue(LZ4_stream_t *LZ4_streamPtr, const char *source, char *dest, int inputSize)
int LZ4_decompress_safe_usingDict(const char *src, char *dst, int srcSize, int dstCapacity, const char *dictStart, int dictSize)
int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *src, char *dst, int originalSize)
int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream)
const LZ4_byte * prefixEnd
const LZ4_byte * externalDict
const LZ4_stream_t_internal * dictCtx
LZ4_u32 hashTable[(1<<(14 - 2))]
const LZ4_byte * dictionary
LZ4_streamDecode_t_internal internal_donotuse
LZ4_stream_t_internal internal_donotuse