50 #define ACCELERATION_DEFAULT 1 60 #if defined(_MSC_VER) && defined(_WIN32_WCE) 61 # define LZ4_FORCE_SW_BITCOUNT 75 # define FORCE_INLINE static __forceinline 77 # pragma warning(disable : 4127) 78 # pragma warning(disable : 4293) 80 # if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) 81 # if defined(__GNUC__) || defined(__clang__) 82 # define FORCE_INLINE static inline __attribute__((always_inline)) 84 # define FORCE_INLINE static inline 87 # define FORCE_INLINE static 92 #if (LZ4_GCC_VERSION >= 302) || (__INTEL_COMPILER >= 800) || defined(__clang__) 93 # define expect(expr,value) (__builtin_expect ((expr),(value)) ) 95 # define expect(expr,value) (expr) 98 #define likely(expr) expect((expr) != 0, 1) 99 #define unlikely(expr) expect((expr) != 0, 0) 106 #define ALLOCATOR(n,s) calloc(n,s) 109 #define MEM_INIT memset 115 #if defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) 117 typedef uint8_t
BYTE;
118 typedef uint16_t
U16;
119 typedef uint32_t
U32;
121 typedef uint64_t
U64;
124 typedef unsigned short U16;
127 typedef unsigned long long U64;
134 #define STEPSIZE sizeof(size_t) 136 static unsigned LZ4_64bits(
void) {
return sizeof(
void*)==8; }
138 static unsigned LZ4_isLittleEndian(
void)
140 const union {
U32 i;
BYTE c[4]; } one = { 1 };
145 static U16 LZ4_read16(
const void* memPtr)
148 memcpy(&val16, memPtr, 2);
152 static U16 LZ4_readLE16(
const void* memPtr)
154 if (LZ4_isLittleEndian())
156 return LZ4_read16(memPtr);
160 const BYTE* p = (
const BYTE*)memPtr;
161 return (
U16)((
U16)p[0] + (p[1]<<8));
165 static void LZ4_writeLE16(
void* memPtr,
U16 value)
167 if (LZ4_isLittleEndian())
169 memcpy(memPtr, &value, 2);
175 p[1] = (
BYTE)(value>>8);
179 static U32 LZ4_read32(
const void* memPtr)
182 memcpy(&val32, memPtr, 4);
186 static U64 LZ4_read64(
const void* memPtr)
189 memcpy(&val64, memPtr, 8);
193 static size_t LZ4_read_ARCH(
const void* p)
196 return (
size_t)LZ4_read64(p);
198 return (
size_t)LZ4_read32(p);
202 static void LZ4_copy4(
void* dstPtr,
const void* srcPtr) { memcpy(dstPtr, srcPtr, 4); }
204 static void LZ4_copy8(
void* dstPtr,
const void* srcPtr) { memcpy(dstPtr, srcPtr, 8); }
207 static void LZ4_wildCopy(
void* dstPtr,
const void* srcPtr,
void* dstEnd)
210 const BYTE* s = (
const BYTE*)srcPtr;
212 do { LZ4_copy8(d,s); d+=8; s+=8; }
while (d<e);
222 #define LASTLITERALS 5 223 #define MFLIMIT (COPYLENGTH+MINMATCH) 224 static const int LZ4_minLength = (
MFLIMIT+1);
231 #define MAX_DISTANCE ((1 << MAXD_LOG) - 1) 234 #define ML_MASK ((1U<<ML_BITS)-1) 235 #define RUN_BITS (8-ML_BITS) 236 #define RUN_MASK ((1U<<RUN_BITS)-1) 242 #define LZ4_STATIC_ASSERT(c) { enum { LZ4_static_assert = 1/(int)(!!(c)) }; } 248 static unsigned LZ4_NbCommonBytes (
register size_t val)
250 if (LZ4_isLittleEndian())
254 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 256 _BitScanForward64( &r, (
U64)val );
258 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) 259 return (__builtin_ctzll((
U64)val) >> 3);
261 static const int DeBruijnBytePos[64] = { 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7 };
262 return DeBruijnBytePos[((
U64)((val & -(
long long)val) * 0x0218A392CDABBD3FULL)) >> 58];
267 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 269 _BitScanForward( &r, (
U32)val );
271 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) 272 return (__builtin_ctz((
U32)val) >> 3);
274 static const int DeBruijnBytePos[32] = { 0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1, 3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1 };
275 return DeBruijnBytePos[((
U32)((val & -(
S32)val) * 0x077CB531U)) >> 27];
283 # if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) 285 _BitScanReverse64( &r, val );
286 return (
unsigned)(r>>3);
287 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) 288 return (__builtin_clzll((
U64)val) >> 3);
291 if (!(val>>32)) { r=4; }
else { r=0; val>>=32; }
292 if (!(val>>16)) { r+=2; val>>=8; }
else { val>>=24; }
299 # if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) 301 _BitScanReverse( &r, (
unsigned long)val );
302 return (
unsigned)(r>>3);
303 # elif (defined(__clang__) || (LZ4_GCC_VERSION >= 304)) && !defined(LZ4_FORCE_SW_BITCOUNT) 304 return (__builtin_clz((
U32)val) >> 3);
307 if (!(val>>16)) { r=2; val>>=8; }
else { r=0; val>>=24; }
315 static unsigned LZ4_count(
const BYTE* pIn,
const BYTE* pMatch,
const BYTE* pInLimit)
317 const BYTE*
const pStart = pIn;
321 size_t diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn);
323 pIn += LZ4_NbCommonBytes(diff);
324 return (
unsigned)(pIn - pStart);
327 if (LZ4_64bits())
if ((pIn<(pInLimit-3)) && (LZ4_read32(pMatch) == LZ4_read32(pIn))) { pIn+=4; pMatch+=4; }
328 if ((pIn<(pInLimit-1)) && (LZ4_read16(pMatch) == LZ4_read16(pIn))) { pIn+=2; pMatch+=2; }
329 if ((pIn<pInLimit) && (*pMatch == *pIn)) pIn++;
330 return (
unsigned)(pIn - pStart);
334 #ifndef LZ4_COMMONDEFS_ONLY 338 #define LZ4_HASHLOG (LZ4_MEMORY_USAGE-2) 339 #define HASHTABLESIZE (1 << LZ4_MEMORY_USAGE) 340 #define HASH_SIZE_U32 (1 << LZ4_HASHLOG) 342 static const int LZ4_64Klimit = ((64
KB) + (
MFLIMIT-1));
343 static const U32 LZ4_skipTrigger = 6;
353 const BYTE* dictionary;
356 } LZ4_stream_t_internal;
381 static U32 LZ4_hashSequence(
U32 sequence, tableType_t
const tableType)
383 if (tableType ==
byU16)
389 static const U64 prime5bytes = 889523592379ULL;
390 static U32 LZ4_hashSequence64(
size_t sequence, tableType_t
const tableType)
393 const U32 hashMask = (1<<hashLog) - 1;
394 return ((sequence * prime5bytes) >> (40 - hashLog)) & hashMask;
397 static U32 LZ4_hashSequenceT(
size_t sequence, tableType_t
const tableType)
400 return LZ4_hashSequence64(sequence, tableType);
401 return LZ4_hashSequence((
U32)sequence, tableType);
404 static U32 LZ4_hashPosition(
const void* p, tableType_t tableType) {
return LZ4_hashSequenceT(LZ4_read_ARCH(p), tableType); }
406 static void LZ4_putPositionOnHash(
const BYTE* p,
U32 h,
void* tableBase, tableType_t
const tableType,
const BYTE* srcBase)
410 case byPtr: {
const BYTE** hashTable = (
const BYTE**)tableBase; hashTable[h] = p;
return; }
411 case byU32: {
U32* hashTable = (
U32*) tableBase; hashTable[h] = (
U32)(p-srcBase);
return; }
412 case byU16: {
U16* hashTable = (
U16*) tableBase; hashTable[h] = (
U16)(p-srcBase);
return; }
416 static void LZ4_putPosition(
const BYTE* p,
void* tableBase, tableType_t tableType,
const BYTE* srcBase)
418 U32 h = LZ4_hashPosition(p, tableType);
419 LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase);
422 static const BYTE* LZ4_getPositionOnHash(
U32 h,
void* tableBase, tableType_t tableType,
const BYTE* srcBase)
424 if (tableType ==
byPtr) {
const BYTE** hashTable = (
const BYTE**) tableBase;
return hashTable[h]; }
425 if (tableType ==
byU32) {
U32* hashTable = (
U32*) tableBase;
return hashTable[h] + srcBase; }
426 {
U16* hashTable = (
U16*) tableBase;
return hashTable[h] + srcBase; }
429 static const BYTE* LZ4_getPosition(
const BYTE* p,
void* tableBase, tableType_t tableType,
const BYTE* srcBase)
431 U32 h = LZ4_hashPosition(p, tableType);
432 return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase);
437 const char*
const source,
440 const int maxOutputSize,
442 const tableType_t tableType,
443 const dict_directive dict,
444 const dictIssue_directive dictIssue,
445 const U32 acceleration)
447 LZ4_stream_t_internal*
const dictPtr = (LZ4_stream_t_internal*)ctx;
449 const BYTE* ip = (
const BYTE*) source;
451 const BYTE* lowLimit;
452 const BYTE*
const lowRefLimit = ip - dictPtr->dictSize;
453 const BYTE*
const dictionary = dictPtr->dictionary;
454 const BYTE*
const dictEnd = dictionary + dictPtr->dictSize;
455 const size_t dictDelta = dictEnd - (
const BYTE*)source;
456 const BYTE* anchor = (
const BYTE*) source;
457 const BYTE*
const iend = ip + inputSize;
462 BYTE*
const olimit = op + maxOutputSize;
473 base = (
const BYTE*)source;
474 lowLimit = (
const BYTE*)source;
477 base = (
const BYTE*)source - dictPtr->currentOffset;
478 lowLimit = (
const BYTE*)source - dictPtr->dictSize;
481 base = (
const BYTE*)source - dictPtr->currentOffset;
482 lowLimit = (
const BYTE*)source;
485 if ((tableType ==
byU16) && (inputSize>=LZ4_64Klimit))
return 0;
486 if (inputSize<LZ4_minLength)
goto _last_literals;
489 LZ4_putPosition(ip, ctx, tableType, base);
490 ip++; forwardH = LZ4_hashPosition(ip, tableType);
498 const BYTE* forwardIp = ip;
500 unsigned searchMatchNb = acceleration << LZ4_skipTrigger;
507 step = (searchMatchNb++ >> LZ4_skipTrigger);
509 if (
unlikely(forwardIp > mflimit))
goto _last_literals;
511 match = LZ4_getPositionOnHash(h, ctx, tableType, base);
514 if (match<(
const BYTE*)source)
516 refDelta = dictDelta;
517 lowLimit = dictionary;
522 lowLimit = (
const BYTE*)source;
525 forwardH = LZ4_hashPosition(forwardIp, tableType);
526 LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
528 }
while ( ((dictIssue==
dictSmall) ? (match < lowRefLimit) : 0)
530 || (LZ4_read32(match+refDelta) != LZ4_read32(ip)) );
534 while ((ip>anchor) && (match+refDelta > lowLimit) && (
unlikely(ip[-1]==match[refDelta-1]))) { ip--; match--; }
538 unsigned litLength = (unsigned)(ip - anchor);
540 if ((outputLimited) && (
unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength/255) > olimit)))
546 for(; len >= 255 ; len-=255) *op++ = 255;
552 LZ4_wildCopy(op, anchor, op+litLength);
558 LZ4_writeLE16(op, (
U16)(ip-match)); op+=2;
562 unsigned matchLength;
568 limit = ip + (dictEnd-match);
569 if (limit > matchlimit) limit = matchlimit;
574 unsigned more = LZ4_count(ip, (
const BYTE*)source, matchlimit);
585 if ((outputLimited) && (
unlikely(op + (1 + LASTLITERALS) + (matchLength>>8) > olimit)))
591 for (; matchLength >= 510 ; matchLength-=510) { *op++ = 255; *op++ = 255; }
592 if (matchLength >= 255) { matchLength-=255; *op++ = 255; }
593 *op++ = (
BYTE)matchLength;
595 else *token += (
BYTE)(matchLength);
601 if (ip > mflimit)
break;
604 LZ4_putPosition(ip-2, ctx, tableType, base);
607 match = LZ4_getPosition(ip, ctx, tableType, base);
610 if (match<(
const BYTE*)source)
612 refDelta = dictDelta;
613 lowLimit = dictionary;
618 lowLimit = (
const BYTE*)source;
621 LZ4_putPosition(ip, ctx, tableType, base);
622 if ( ((dictIssue==
dictSmall) ? (match>=lowRefLimit) : 1)
624 && (LZ4_read32(match+refDelta)==LZ4_read32(ip)) )
625 { token=op++; *token=0;
goto _next_match; }
628 forwardH = LZ4_hashPosition(++ip, tableType);
634 const size_t lastRun = (size_t)(iend - anchor);
635 if ((outputLimited) && ((op - (
BYTE*)dest) + lastRun + 1 + ((lastRun+255-
RUN_MASK)/255) > (
U32)maxOutputSize))
639 size_t accumulator = lastRun -
RUN_MASK;
641 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
642 *op++ = (
BYTE) accumulator;
648 memcpy(op, anchor, lastRun);
653 return (
int) (((
char*)op)-dest);
664 if (inputSize < LZ4_64Klimit)
671 if (inputSize < LZ4_64Klimit)
679 int LZ4_compress_fast(
const char* source,
char* dest,
int inputSize,
int maxOutputSize,
int acceleration)
711 if (inputSize < LZ4_64Klimit)
722 static int LZ4_compress_destSize_generic(
724 const char*
const src,
726 int*
const srcSizePtr,
727 const int targetDstSize,
728 const tableType_t tableType)
731 const BYTE* base = (
const BYTE*) src;
732 const BYTE* lowLimit = (
const BYTE*) src;
733 const BYTE* anchor = ip;
734 const BYTE*
const iend = ip + *srcSizePtr;
739 BYTE*
const oend = op + targetDstSize;
740 BYTE*
const oMaxLit = op + targetDstSize - 2 - 8 - 1 ;
741 BYTE*
const oMaxMatch = op + targetDstSize - (LASTLITERALS + 1 );
742 BYTE*
const oMaxSeq = oMaxLit - 1 ;
748 if (targetDstSize < 1)
return 0;
750 if ((tableType ==
byU16) && (*srcSizePtr>=LZ4_64Klimit))
return 0;
751 if (*srcSizePtr<LZ4_minLength)
goto _last_literals;
755 LZ4_putPosition(ip, ctx, tableType, base);
756 ip++; forwardH = LZ4_hashPosition(ip, tableType);
764 const BYTE* forwardIp = ip;
766 unsigned searchMatchNb = 1 << LZ4_skipTrigger;
773 step = (searchMatchNb++ >> LZ4_skipTrigger);
778 match = LZ4_getPositionOnHash(h, ctx, tableType, base);
779 forwardH = LZ4_hashPosition(forwardIp, tableType);
780 LZ4_putPositionOnHash(ip, h, ctx, tableType, base);
783 || (LZ4_read32(match) != LZ4_read32(ip)) );
787 while ((ip>anchor) && (match > lowLimit) && (
unlikely(ip[-1]==match[-1]))) { ip--; match--; }
791 unsigned litLength = (unsigned)(ip - anchor);
793 if (op + ((litLength+240)/255) + litLength > oMaxLit)
801 unsigned len = litLength -
RUN_MASK;
803 for(; len >= 255 ; len-=255) *op++ = 255;
809 LZ4_wildCopy(op, anchor, op+litLength);
815 LZ4_writeLE16(op, (
U16)(ip-match)); op+=2;
823 if (op + ((matchLength+240)/255) > oMaxMatch)
826 matchLength = (15-1) + (oMaxMatch-op) * 255;
835 while (matchLength >= 255) { matchLength-=255; *op++ = 255; }
836 *op++ = (
BYTE)matchLength;
838 else *token += (
BYTE)(matchLength);
844 if (ip > mflimit)
break;
845 if (op > oMaxSeq)
break;
848 LZ4_putPosition(ip-2, ctx, tableType, base);
851 match = LZ4_getPosition(ip, ctx, tableType, base);
852 LZ4_putPosition(ip, ctx, tableType, base);
854 && (LZ4_read32(match)==LZ4_read32(ip)) )
855 { token=op++; *token=0;
goto _next_match; }
858 forwardH = LZ4_hashPosition(++ip, tableType);
864 size_t lastRunSize = (size_t)(iend - anchor);
865 if (op + 1 + ((lastRunSize+240)/255) + lastRunSize > oend)
868 lastRunSize = (oend-op) - 1;
869 lastRunSize -= (lastRunSize+240)/255;
871 ip = anchor + lastRunSize;
875 size_t accumulator = lastRunSize -
RUN_MASK;
877 for(; accumulator >= 255 ; accumulator-=255) *op++ = 255;
878 *op++ = (
BYTE) accumulator;
884 memcpy(op, anchor, lastRunSize);
889 *srcSizePtr = (int) (((
const char*)ip)-src);
890 return (
int) (((
char*)op)-
dst);
894 static int LZ4_compress_destSize_extState (
void*
state,
const char* src,
char* dst,
int* srcSizePtr,
int targetDstSize)
904 if (*srcSizePtr < LZ4_64Klimit)
905 return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize,
byU16);
907 return LZ4_compress_destSize_generic(state, src, dst, srcSizePtr, targetDstSize, LZ4_64bits() ?
byU32 :
byPtr);
918 void* ctx = &ctxBody;
921 int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, targetDstSize);
955 #define HASH_UNIT sizeof(size_t) 958 LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict;
959 const BYTE* p = (
const BYTE*)dictionary;
960 const BYTE*
const dictEnd = p + dictSize;
963 if ((dict->initCheck) || (dict->currentOffset > 1
GB))
968 dict->dictionary =
NULL;
973 if ((dictEnd - p) > 64
KB) p = dictEnd - 64
KB;
974 dict->currentOffset += 64
KB;
975 base = p - dict->currentOffset;
976 dict->dictionary = p;
977 dict->dictSize = (
U32)(dictEnd - p);
978 dict->currentOffset += dict->dictSize;
980 while (p <= dictEnd-HASH_UNIT)
982 LZ4_putPosition(p, dict->hashTable,
byU32, base);
986 return dict->dictSize;
990 static void LZ4_renormDictT(LZ4_stream_t_internal* LZ4_dict,
const BYTE* src)
992 if ((LZ4_dict->currentOffset > 0x80000000) ||
993 ((
size_t)LZ4_dict->currentOffset > (
size_t)src))
996 U32 delta = LZ4_dict->currentOffset - 64
KB;
997 const BYTE* dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize;
1001 if (LZ4_dict->hashTable[i] < delta) LZ4_dict->hashTable[i]=0;
1002 else LZ4_dict->hashTable[i] -= delta;
1004 LZ4_dict->currentOffset = 64
KB;
1005 if (LZ4_dict->dictSize > 64
KB) LZ4_dict->dictSize = 64
KB;
1006 LZ4_dict->dictionary = dictEnd - LZ4_dict->dictSize;
1013 LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_stream;
1014 const BYTE*
const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
1016 const BYTE* smallest = (
const BYTE*) source;
1017 if (streamPtr->initCheck)
return 0;
1018 if ((streamPtr->dictSize>0) && (smallest>dictEnd)) smallest = dictEnd;
1019 LZ4_renormDictT(streamPtr, smallest);
1024 const BYTE* sourceEnd = (
const BYTE*) source + inputSize;
1025 if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd))
1027 streamPtr->dictSize = (
U32)(dictEnd - sourceEnd);
1028 if (streamPtr->dictSize > 64
KB) streamPtr->dictSize = 64
KB;
1029 if (streamPtr->dictSize < 4) streamPtr->dictSize = 0;
1030 streamPtr->dictionary = dictEnd - streamPtr->dictSize;
1035 if (dictEnd == (
const BYTE*)source)
1038 if ((streamPtr->dictSize < 64
KB) && (streamPtr->dictSize < streamPtr->currentOffset))
1042 streamPtr->dictSize += (
U32)inputSize;
1043 streamPtr->currentOffset += (
U32)inputSize;
1050 if ((streamPtr->dictSize < 64
KB) && (streamPtr->dictSize < streamPtr->currentOffset))
1054 streamPtr->dictionary = (
const BYTE*)source;
1055 streamPtr->dictSize = (
U32)inputSize;
1056 streamPtr->currentOffset += (
U32)inputSize;
1065 LZ4_stream_t_internal* streamPtr = (LZ4_stream_t_internal*)LZ4_dict;
1067 const BYTE*
const dictEnd = streamPtr->dictionary + streamPtr->dictSize;
1069 const BYTE* smallest = dictEnd;
1070 if (smallest > (
const BYTE*) source) smallest = (
const BYTE*) source;
1071 LZ4_renormDictT((LZ4_stream_t_internal*)LZ4_dict, smallest);
1075 streamPtr->dictionary = (
const BYTE*)source;
1076 streamPtr->dictSize = (
U32)inputSize;
1077 streamPtr->currentOffset += (
U32)inputSize;
1085 LZ4_stream_t_internal* dict = (LZ4_stream_t_internal*) LZ4_dict;
1086 const BYTE* previousDictEnd = dict->dictionary + dict->dictSize;
1088 if ((
U32)dictSize > 64
KB) dictSize = 64
KB;
1089 if ((
U32)dictSize > dict->dictSize) dictSize = dict->dictSize;
1091 memmove(safeBuffer, previousDictEnd - dictSize, dictSize);
1093 dict->dictionary = (
const BYTE*)safeBuffer;
1094 dict->dictSize = (
U32)dictSize;
1111 const char*
const source,
1117 int partialDecoding,
1118 int targetOutputSize,
1120 const BYTE*
const lowPrefix,
1121 const BYTE*
const dictStart,
1122 const size_t dictSize
1126 const BYTE* ip = (
const BYTE*) source;
1127 const BYTE*
const iend = ip + inputSize;
1130 BYTE*
const oend = op + outputSize;
1132 BYTE* oexit = op + targetOutputSize;
1133 const BYTE*
const lowLimit = lowPrefix - dictSize;
1135 const BYTE*
const dictEnd = (
const BYTE*)dictStart + dictSize;
1136 const size_t dec32table[] = {4, 1, 2, 1, 4, 4, 4, 4};
1137 const size_t dec64table[] = {0, 0, 0, (size_t)-1, 0, 1, 2, 3};
1140 const int checkOffset = ((safeDecode) && (dictSize < (
int)(64
KB)));
1144 if ((partialDecoding) && (oexit> oend-
MFLIMIT)) oexit = oend-
MFLIMIT;
1145 if ((endOnInput) && (
unlikely(outputSize==0)))
return ((inputSize==1) && (*ip==0)) ? 0 : -1;
1146 if ((!endOnInput) && (
unlikely(outputSize==0)))
return (*ip==0?1:-1);
1167 if ((safeDecode) &&
unlikely((
size_t)(op+length)<(
size_t)(op)))
goto _output_error;
1168 if ((safeDecode) &&
unlikely((
size_t)(ip+length)<(
size_t)(ip)))
goto _output_error;
1173 if (((endOnInput) && ((cpy>(partialDecoding?oexit:oend-MFLIMIT)) || (ip+length>iend-(2+1+
LASTLITERALS))) )
1176 if (partialDecoding)
1178 if (cpy > oend)
goto _output_error;
1179 if ((endOnInput) && (ip+length > iend))
goto _output_error;
1183 if ((!endOnInput) && (cpy != oend))
goto _output_error;
1184 if ((endOnInput) && ((ip+length != iend) || (cpy > oend)))
goto _output_error;
1186 memcpy(op, ip, length);
1191 LZ4_wildCopy(op, ip, cpy);
1192 ip += length; op = cpy;
1195 match = cpy - LZ4_readLE16(ip); ip+=2;
1196 if ((checkOffset) && (
unlikely(match < lowLimit)))
goto _output_error;
1200 if (length == ML_MASK)
1205 if ((endOnInput) && (ip > iend-
LASTLITERALS))
goto _output_error;
1209 if ((safeDecode) &&
unlikely((
size_t)(op+length)<(
size_t)op))
goto _output_error;
1218 if (length <= (
size_t)(lowPrefix-match))
1221 match = dictEnd - (lowPrefix-match);
1222 memmove(op, match, length); op += length;
1227 size_t copySize = (size_t)(lowPrefix-match);
1228 memcpy(op, dictEnd - copySize, copySize);
1230 copySize = length - copySize;
1231 if (copySize > (
size_t)(op-lowPrefix))
1233 BYTE*
const endOfMatch = op + copySize;
1234 const BYTE* copyFrom = lowPrefix;
1235 while (op < endOfMatch) *op++ = *copyFrom++;
1239 memcpy(op, lowPrefix, copySize);
1250 const size_t dec64 = dec64table[op-match];
1255 match += dec32table[op-match];
1256 LZ4_copy4(op+4, match);
1257 op += 8; match -= dec64;
1258 }
else { LZ4_copy8(op, match); op+=8; match+=8; }
1265 LZ4_wildCopy(op, match, oend-8);
1266 match += (oend-8) - op;
1269 while (op<cpy) *op++ = *match++;
1272 LZ4_wildCopy(op, match, cpy);
1278 return (
int) (((
char*)op)-dest);
1280 return (
int) (((
const char*)ip)-source);
1284 return (
int) (-(((
const char*)ip)-source))-1;
1290 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize,
endOnInputSize,
full, 0,
noDict, (
BYTE*)dest,
NULL, 0);
1295 return LZ4_decompress_generic(source, dest, compressedSize, maxDecompressedSize,
endOnInputSize,
partial, targetOutputSize,
noDict, (
BYTE*)dest,
NULL, 0);
1300 return LZ4_decompress_generic(source, dest, 0, originalSize,
endOnOutputSize,
full, 0,
withPrefix64k, (
BYTE*)(dest - 64
KB),
NULL, 64
KB);
1308 const BYTE* externalDict;
1310 const BYTE* prefixEnd;
1312 } LZ4_streamDecode_t_internal;
1340 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
1341 lz4sd->prefixSize = (size_t) dictSize;
1342 lz4sd->prefixEnd = (
const BYTE*) dictionary + dictSize;
1343 lz4sd->externalDict =
NULL;
1344 lz4sd->extDictSize = 0;
1357 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
1360 if (lz4sd->prefixEnd == (
BYTE*)dest)
1364 usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
1365 if (result <= 0)
return result;
1366 lz4sd->prefixSize += result;
1367 lz4sd->prefixEnd += result;
1371 lz4sd->extDictSize = lz4sd->prefixSize;
1372 lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize;
1376 if (result <= 0)
return result;
1377 lz4sd->prefixSize = result;
1378 lz4sd->prefixEnd = (
BYTE*)dest + result;
1386 LZ4_streamDecode_t_internal* lz4sd = (LZ4_streamDecode_t_internal*) LZ4_streamDecode;
1389 if (lz4sd->prefixEnd == (
BYTE*)dest)
1393 usingExtDict, lz4sd->prefixEnd - lz4sd->prefixSize, lz4sd->externalDict, lz4sd->extDictSize);
1394 if (result <= 0)
return result;
1400 lz4sd->extDictSize = lz4sd->prefixSize;
1401 lz4sd->externalDict = (
BYTE*)dest - lz4sd->extDictSize;
1405 if (result <= 0)
return result;
1407 lz4sd->prefixEnd = (
BYTE*)dest + originalSize;
1424 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe,
full, 0,
noDict, (
BYTE*)dest,
NULL, 0);
1425 if (dictStart+dictSize == dest)
1427 if (dictSize >= (
int)(64
KB - 1))
1428 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe,
full, 0,
withPrefix64k, (
BYTE*)dest-64
KB,
NULL, 0);
1429 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe,
full, 0,
noDict, (
BYTE*)dest-dictSize,
NULL, 0);
1431 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, safe,
full, 0,
usingExtDict, (
BYTE*)dest, (
const BYTE*)dictStart, dictSize);
1447 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
endOnInputSize,
full, 0,
usingExtDict, (
BYTE*)dest, (
const BYTE*)dictStart, dictSize);
1476 static void LZ4_init(LZ4_stream_t_internal* lz4ds,
BYTE* base)
1479 lz4ds->bufferStart = base;
1484 if ((((
size_t)state) & 3) != 0)
return 1;
1485 LZ4_init((LZ4_stream_t_internal*)state, (
BYTE*)inputBuffer);
1492 LZ4_init ((LZ4_stream_t_internal*)lz4ds, (
BYTE*)inputBuffer);
1498 LZ4_stream_t_internal* ctx = (LZ4_stream_t_internal*)LZ4_Data;
1500 return (
char*)(ctx->bufferStart + dictSize);
1507 return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize,
endOnInputSize,
full, 0,
withPrefix64k, (
BYTE*)dest - 64
KB,
NULL, 64
KB);
1512 return LZ4_decompress_generic(source, dest, 0, originalSize,
endOnOutputSize,
full, 0,
withPrefix64k, (
BYTE*)dest - 64
KB,
NULL, 64
KB);
void * LZ4_create(char *inputBuffer)
int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize)
LZ4_stream_t * LZ4_createStream(void)
char * LZ4_slideInputBuffer(void *LZ4_Data)
int LZ4_compress_fast_force(const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize)
int LZ4_compress_limitedOutput(const char *source, char *dest, int inputSize, int maxOutputSize)
int LZ4_decompress_fast(const char *source, char *dest, int originalSize)
int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize)
int LZ4_decompress_safe_partial(const char *source, char *dest, int compressedSize, int targetOutputSize, int maxDecompressedSize)
int LZ4_resetStreamState(void *state, char *inputBuffer)
int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize)
#define LZ4_STREAMSIZE_U64
#define ACCELERATION_DEFAULT
int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream)
int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, char *dest, int inputSize)
LZ4_streamDecode_t * LZ4_createStreamDecode(void)
int LZ4_decompress_safe_withPrefix64k(const char *source, char *dest, int compressedSize, int maxOutputSize)
int LZ4_decompress_safe(const char *source, char *dest, int compressedSize, int maxDecompressedSize)
int LZ4_compress_fast_extState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
int LZ4_compress_default(const char *source, char *dest, int inputSize, int maxOutputSize)
int LZ4_uncompress(const char *source, char *dest, int outputSize)
FORCE_INLINE int LZ4_compress_generic(void *const ctx, const char *const source, char *const dest, const int inputSize, const int maxOutputSize, const limitedOutput_directive outputLimited, const tableType_t tableType, const dict_directive dict, const dictIssue_directive dictIssue, const U32 acceleration)
int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int compressedSize, int maxOutputSize)
int LZ4_decompress_fast_withPrefix64k(const char *source, char *dest, int originalSize)
int LZ4_decompress_safe_forceExtDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const char *dictStart, int dictSize)
int LZ4_decompress_safe_usingDict(const char *source, char *dest, int compressedSize, int maxOutputSize, const char *dictStart, int dictSize)
int LZ4_compress_withState(void *state, const char *src, char *dst, int srcSize)
int LZ4_compress_fast(const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)
int LZ4_freeStream(LZ4_stream_t *LZ4_stream)
int LZ4_versionNumber(void)
void LZ4_resetStream(LZ4_stream_t *LZ4_stream)
int LZ4_compress_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize)
int LZ4_decompress_fast_usingDict(const char *source, char *dest, int originalSize, const char *dictStart, int dictSize)
#define LZ4_STATIC_ASSERT(c)
FORCE_INLINE int LZ4_decompress_generic(const char *const source, char *const dest, int inputSize, int outputSize, int endOnInput, int partialDecoding, int targetOutputSize, int dict, const BYTE *const lowPrefix, const BYTE *const dictStart, const size_t dictSize)
#define LZ4_MAX_INPUT_SIZE
int LZ4_compress_limitedOutput_withState(void *state, const char *src, char *dst, int srcSize, int dstSize)
FORCE_INLINE int LZ4_decompress_usingDict_generic(const char *source, char *dest, int compressedSize, int maxOutputSize, int safe, const char *dictStart, int dictSize)
#define LZ4_VERSION_NUMBER
int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int originalSize)
int LZ4_sizeofStreamState()
int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_stream, const char *src, char *dst, int srcSize, int maxDstSize)
int LZ4_compress(const char *source, char *dest, int inputSize)
int LZ4_compressBound(int isize)
#define LZ4_COMPRESSBOUND(isize)
int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize)
int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration)