Lines Matching +full:64 +full:- +full:byte

2  * LZ4 - Fast LZ compression algorithm
3 * Copyright (C) 2011 - 2016, Yann Collet.
4 * BSD 2 - Clause License (http://www.opensource.org/licenses/bsd - license.php)
26 * - LZ4 homepage : http://www.lz4.org
27 * - LZ4 source repository : https://github.com/lz4/lz4
30 * Sven Schmidt <[email protected]-hamburg.de>
33 /*-************************************
42 static const int LZ4_64Klimit = ((64 * KB) + (MFLIMIT - 1));
44 /*-******************************
53 >> ((MINMATCH * 8) - (LZ4_HASHLOG + 1))); in LZ4_hash4()
56 >> ((MINMATCH * 8) - LZ4_HASHLOG)); in LZ4_hash4()
70 return (U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog)); in LZ4_hash5()
74 return (U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog)); in LZ4_hash5()
91 const BYTE *p, in LZ4_putPositionOnHash()
95 const BYTE *srcBase) in LZ4_putPositionOnHash()
100 const BYTE **hashTable = (const BYTE **)tableBase; in LZ4_putPositionOnHash()
109 hashTable[h] = (U32)(p - srcBase); in LZ4_putPositionOnHash()
116 hashTable[h] = (U16)(p - srcBase); in LZ4_putPositionOnHash()
123 const BYTE *p, in LZ4_putPosition()
126 const BYTE *srcBase) in LZ4_putPosition()
133 static const BYTE *LZ4_getPositionOnHash( in LZ4_getPositionOnHash()
137 const BYTE *srcBase) in LZ4_getPositionOnHash()
140 const BYTE **hashTable = (const BYTE **) tableBase; in LZ4_getPositionOnHash()
159 static FORCE_INLINE const BYTE *LZ4_getPosition( in LZ4_getPosition()
160 const BYTE *p, in LZ4_getPosition()
163 const BYTE *srcBase) in LZ4_getPosition()
187 const BYTE *ip = (const BYTE *) source; in LZ4_compress_generic()
188 const BYTE *base; in LZ4_compress_generic()
189 const BYTE *lowLimit; in LZ4_compress_generic()
190 const BYTE * const lowRefLimit = ip - dictPtr->dictSize; in LZ4_compress_generic()
191 const BYTE * const dictionary = dictPtr->dictionary; in LZ4_compress_generic()
192 const BYTE * const dictEnd = dictionary + dictPtr->dictSize; in LZ4_compress_generic()
193 const size_t dictDelta = dictEnd - (const BYTE *)source; in LZ4_compress_generic()
194 const BYTE *anchor = (const BYTE *) source; in LZ4_compress_generic()
195 const BYTE * const iend = ip + inputSize; in LZ4_compress_generic()
196 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_generic()
197 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_generic()
199 BYTE *op = (BYTE *) dest; in LZ4_compress_generic()
200 BYTE * const olimit = op + maxOutputSize; in LZ4_compress_generic()
214 base = (const BYTE *)source; in LZ4_compress_generic()
215 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
218 base = (const BYTE *)source - dictPtr->currentOffset; in LZ4_compress_generic()
219 lowLimit = (const BYTE *)source - dictPtr->dictSize; in LZ4_compress_generic()
222 base = (const BYTE *)source - dictPtr->currentOffset; in LZ4_compress_generic()
223 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
229 /* Size too large (not within 64K limit) */ in LZ4_compress_generic()
238 /* First Byte */ in LZ4_compress_generic()
239 LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
245 const BYTE *match; in LZ4_compress_generic()
246 BYTE *token; in LZ4_compress_generic()
250 const BYTE *forwardIp = ip; in LZ4_compress_generic()
265 dictPtr->hashTable, in LZ4_compress_generic()
269 if (match < (const BYTE *)source) { in LZ4_compress_generic()
274 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
280 LZ4_putPositionOnHash(ip, h, dictPtr->hashTable, in LZ4_compress_generic()
294 && (unlikely(ip[-1] == match[refDelta - 1]))) { in LZ4_compress_generic()
295 ip--; in LZ4_compress_generic()
296 match--; in LZ4_compress_generic()
301 unsigned const int litLength = (unsigned int)(ip - anchor); in LZ4_compress_generic()
313 int len = (int)litLength - RUN_MASK; in LZ4_compress_generic()
317 for (; len >= 255; len -= 255) in LZ4_compress_generic()
319 *op++ = (BYTE)len; in LZ4_compress_generic()
321 *token = (BYTE)(litLength << ML_BITS); in LZ4_compress_generic()
330 LZ4_writeLE16(op, (U16)(ip - match)); in LZ4_compress_generic()
339 const BYTE *limit; in LZ4_compress_generic()
342 limit = ip + (dictEnd - match); in LZ4_compress_generic()
354 (const BYTE *)source, in LZ4_compress_generic()
375 matchCode -= ML_MASK; in LZ4_compress_generic()
381 matchCode -= 4 * 255; in LZ4_compress_generic()
385 *op++ = (BYTE)(matchCode % 255); in LZ4_compress_generic()
387 *token += (BYTE)(matchCode); in LZ4_compress_generic()
397 LZ4_putPosition(ip - 2, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
400 match = LZ4_getPosition(ip, dictPtr->hashTable, in LZ4_compress_generic()
404 if (match < (const BYTE *)source) { in LZ4_compress_generic()
409 lowLimit = (const BYTE *)source; in LZ4_compress_generic()
413 LZ4_putPosition(ip, dictPtr->hashTable, tableType, base); in LZ4_compress_generic()
430 size_t const lastRun = (size_t)(iend - anchor); in LZ4_compress_generic()
434 ((op - (BYTE *)dest) + lastRun + 1 + in LZ4_compress_generic()
435 ((lastRun + 255 - RUN_MASK) / 255) > (U32)maxOutputSize)) in LZ4_compress_generic()
439 size_t accumulator = lastRun - RUN_MASK; in LZ4_compress_generic()
441 for (; accumulator >= 255; accumulator -= 255) in LZ4_compress_generic()
443 *op++ = (BYTE) accumulator; in LZ4_compress_generic()
445 *op++ = (BYTE)(lastRun << ML_BITS); in LZ4_compress_generic()
454 return (int) (((char *)op) - dest); in LZ4_compress_generic()
465 LZ4_stream_t_internal *ctx = &((LZ4_stream_t *)state)->internal_donotuse; in LZ4_compress_fast_extState()
518 /*-******************************
529 const BYTE *ip = (const BYTE *) src; in LZ4_compress_destSize_generic()
530 const BYTE *base = (const BYTE *) src; in LZ4_compress_destSize_generic()
531 const BYTE *lowLimit = (const BYTE *) src; in LZ4_compress_destSize_generic()
532 const BYTE *anchor = ip; in LZ4_compress_destSize_generic()
533 const BYTE * const iend = ip + *srcSizePtr; in LZ4_compress_destSize_generic()
534 const BYTE * const mflimit = iend - MFLIMIT; in LZ4_compress_destSize_generic()
535 const BYTE * const matchlimit = iend - LASTLITERALS; in LZ4_compress_destSize_generic()
537 BYTE *op = (BYTE *) dst; in LZ4_compress_destSize_generic()
538 BYTE * const oend = op + targetDstSize; in LZ4_compress_destSize_generic()
539 BYTE * const oMaxLit = op + targetDstSize - 2 /* offset */ in LZ4_compress_destSize_generic()
540 - 8 /* because 8 + MINMATCH == MFLIMIT */ - 1 /* token */; in LZ4_compress_destSize_generic()
541 BYTE * const oMaxMatch = op + targetDstSize in LZ4_compress_destSize_generic()
542 - (LASTLITERALS + 1 /* token */); in LZ4_compress_destSize_generic()
543 BYTE * const oMaxSeq = oMaxLit - 1 /* token */; in LZ4_compress_destSize_generic()
554 /* Size too large (not within 64K limit) */ in LZ4_compress_destSize_generic()
561 /* First Byte */ in LZ4_compress_destSize_generic()
563 LZ4_putPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
568 const BYTE *match; in LZ4_compress_destSize_generic()
569 BYTE *token; in LZ4_compress_destSize_generic()
573 const BYTE *forwardIp = ip; in LZ4_compress_destSize_generic()
587 match = LZ4_getPositionOnHash(h, ctx->hashTable, in LZ4_compress_destSize_generic()
592 ctx->hashTable, tableType, in LZ4_compress_destSize_generic()
604 && (unlikely(ip[-1] == match[-1]))) { in LZ4_compress_destSize_generic()
605 ip--; in LZ4_compress_destSize_generic()
606 match--; in LZ4_compress_destSize_generic()
611 unsigned int litLength = (unsigned int)(ip - anchor); in LZ4_compress_destSize_generic()
617 op--; in LZ4_compress_destSize_generic()
621 unsigned int len = litLength - RUN_MASK; in LZ4_compress_destSize_generic()
623 for (; len >= 255; len -= 255) in LZ4_compress_destSize_generic()
625 *op++ = (BYTE)len; in LZ4_compress_destSize_generic()
627 *token = (BYTE)(litLength << ML_BITS); in LZ4_compress_destSize_generic()
636 LZ4_writeLE16(op, (U16)(ip - match)); op += 2; in LZ4_compress_destSize_generic()
645 matchLength = (15 - 1) + (oMaxMatch - op) * 255; in LZ4_compress_destSize_generic()
651 matchLength -= ML_MASK; in LZ4_compress_destSize_generic()
653 matchLength -= 255; in LZ4_compress_destSize_generic()
656 *op++ = (BYTE)matchLength; in LZ4_compress_destSize_generic()
658 *token += (BYTE)(matchLength); in LZ4_compress_destSize_generic()
670 LZ4_putPosition(ip - 2, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
673 match = LZ4_getPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
674 LZ4_putPosition(ip, ctx->hashTable, tableType, base); in LZ4_compress_destSize_generic()
689 size_t lastRunSize = (size_t)(iend - anchor); in LZ4_compress_destSize_generic()
695 lastRunSize = (oend - op) - 1; in LZ4_compress_destSize_generic()
696 lastRunSize -= (lastRunSize + 240) / 255; in LZ4_compress_destSize_generic()
701 size_t accumulator = lastRunSize - RUN_MASK; in LZ4_compress_destSize_generic()
704 for (; accumulator >= 255; accumulator -= 255) in LZ4_compress_destSize_generic()
706 *op++ = (BYTE) accumulator; in LZ4_compress_destSize_generic()
708 *op++ = (BYTE)(lastRunSize<<ML_BITS); in LZ4_compress_destSize_generic()
715 *srcSizePtr = (int) (((const char *)ip) - src); in LZ4_compress_destSize_generic()
716 return (int) (((char *)op) - dst); in LZ4_compress_destSize_generic()
742 &state->internal_donotuse, in LZ4_compress_destSize_extState()
747 &state->internal_donotuse, in LZ4_compress_destSize_extState()
766 /*-******************************
777 LZ4_stream_t_internal *dict = &LZ4_dict->internal_donotuse; in LZ4_loadDict()
778 const BYTE *p = (const BYTE *)dictionary; in LZ4_loadDict()
779 const BYTE * const dictEnd = p + dictSize; in LZ4_loadDict()
780 const BYTE *base; in LZ4_loadDict()
782 if ((dict->initCheck) in LZ4_loadDict()
783 || (dict->currentOffset > 1 * GB)) { in LZ4_loadDict()
789 dict->dictionary = NULL; in LZ4_loadDict()
790 dict->dictSize = 0; in LZ4_loadDict()
794 if ((dictEnd - p) > 64 * KB) in LZ4_loadDict()
795 p = dictEnd - 64 * KB; in LZ4_loadDict()
796 dict->currentOffset += 64 * KB; in LZ4_loadDict()
797 base = p - dict->currentOffset; in LZ4_loadDict()
798 dict->dictionary = p; in LZ4_loadDict()
799 dict->dictSize = (U32)(dictEnd - p); in LZ4_loadDict()
800 dict->currentOffset += dict->dictSize; in LZ4_loadDict()
802 while (p <= dictEnd - HASH_UNIT) { in LZ4_loadDict()
803 LZ4_putPosition(p, dict->hashTable, byU32, base); in LZ4_loadDict()
807 return dict->dictSize; in LZ4_loadDict()
812 const BYTE *src) in LZ4_renormDictT()
814 if ((LZ4_dict->currentOffset > 0x80000000) || in LZ4_renormDictT()
815 ((uptrval)LZ4_dict->currentOffset > (uptrval)src)) { in LZ4_renormDictT()
818 U32 const delta = LZ4_dict->currentOffset - 64 * KB; in LZ4_renormDictT()
819 const BYTE *dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize; in LZ4_renormDictT()
823 if (LZ4_dict->hashTable[i] < delta) in LZ4_renormDictT()
824 LZ4_dict->hashTable[i] = 0; in LZ4_renormDictT()
826 LZ4_dict->hashTable[i] -= delta; in LZ4_renormDictT()
828 LZ4_dict->currentOffset = 64 * KB; in LZ4_renormDictT()
829 if (LZ4_dict->dictSize > 64 * KB) in LZ4_renormDictT()
830 LZ4_dict->dictSize = 64 * KB; in LZ4_renormDictT()
831 LZ4_dict->dictionary = dictEnd - LZ4_dict->dictSize; in LZ4_renormDictT()
837 LZ4_stream_t_internal * const dict = &LZ4_dict->internal_donotuse; in LZ4_saveDict()
838 const BYTE * const previousDictEnd = dict->dictionary + dict->dictSize; in LZ4_saveDict()
840 if ((U32)dictSize > 64 * KB) { in LZ4_saveDict()
841 /* useless to define a dictionary > 64 * KB */ in LZ4_saveDict()
842 dictSize = 64 * KB; in LZ4_saveDict()
844 if ((U32)dictSize > dict->dictSize) in LZ4_saveDict()
845 dictSize = dict->dictSize; in LZ4_saveDict()
847 memmove(safeBuffer, previousDictEnd - dictSize, dictSize); in LZ4_saveDict()
849 dict->dictionary = (const BYTE *)safeBuffer; in LZ4_saveDict()
850 dict->dictSize = (U32)dictSize; in LZ4_saveDict()
859 LZ4_stream_t_internal *streamPtr = &LZ4_stream->internal_donotuse; in LZ4_compress_fast_continue()
860 const BYTE * const dictEnd = streamPtr->dictionary in LZ4_compress_fast_continue()
861 + streamPtr->dictSize; in LZ4_compress_fast_continue()
863 const BYTE *smallest = (const BYTE *) source; in LZ4_compress_fast_continue()
865 if (streamPtr->initCheck) { in LZ4_compress_fast_continue()
870 if ((streamPtr->dictSize > 0) && (smallest > dictEnd)) in LZ4_compress_fast_continue()
880 const BYTE *sourceEnd = (const BYTE *) source + inputSize; in LZ4_compress_fast_continue()
882 if ((sourceEnd > streamPtr->dictionary) in LZ4_compress_fast_continue()
884 streamPtr->dictSize = (U32)(dictEnd - sourceEnd); in LZ4_compress_fast_continue()
885 if (streamPtr->dictSize > 64 * KB) in LZ4_compress_fast_continue()
886 streamPtr->dictSize = 64 * KB; in LZ4_compress_fast_continue()
887 if (streamPtr->dictSize < 4) in LZ4_compress_fast_continue()
888 streamPtr->dictSize = 0; in LZ4_compress_fast_continue()
889 streamPtr->dictionary = dictEnd - streamPtr->dictSize; in LZ4_compress_fast_continue()
894 if (dictEnd == (const BYTE *)source) { in LZ4_compress_fast_continue()
897 if ((streamPtr->dictSize < 64 * KB) && in LZ4_compress_fast_continue()
898 (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_fast_continue()
909 streamPtr->dictSize += (U32)inputSize; in LZ4_compress_fast_continue()
910 streamPtr->currentOffset += (U32)inputSize; in LZ4_compress_fast_continue()
918 if ((streamPtr->dictSize < 64 * KB) && in LZ4_compress_fast_continue()
919 (streamPtr->dictSize < streamPtr->currentOffset)) { in LZ4_compress_fast_continue()
930 streamPtr->dictionary = (const BYTE *)source; in LZ4_compress_fast_continue()
931 streamPtr->dictSize = (U32)inputSize; in LZ4_compress_fast_continue()
932 streamPtr->currentOffset += (U32)inputSize; in LZ4_compress_fast_continue()