Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*! HEAPMODE : * Select how stateless HC compression functions like `LZ4_compress_HC()` * allocate memory for their workspace: * in stack (0:fastest), or in heap (1:default, requires malloc()). * Since workspace is rather large, heap mode is recommended.
**/ #ifndef LZ4HC_HEAPMODE # define LZ4HC_HEAPMODE 1 #endif
#ifdefined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==2) /* lie to the compiler about data alignment; use with caution */ static U64 LZ4_read64(constvoid* memPtr) { return *(const U64*) memPtr; }
#elifdefined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS==1) /* __pack instructions are safer, but compiler specific */
LZ4_PACK(typedefstruct { U64 u64; }) LZ4_unalign64; static U64 LZ4_read64(constvoid* ptr) { return ((const LZ4_unalign64*)ptr)->u64; }
/* input sanitization */
DEBUGLOG(5, "LZ4MID_compress (%i bytes)", *srcSizePtr); if (dict == usingDictCtxHc) DEBUGLOG(5, "usingDictCtxHc");
assert(*srcSizePtr >= 0); if (*srcSizePtr) assert(src != NULL); if (maxOutputSize) assert(dst != NULL); if (*srcSizePtr < 0) return 0; /* invalid */ if (maxOutputSize < 0) return 0; /* invalid */ if (*srcSizePtr > LZ4_MAX_INPUT_SIZE) { /* forbidden: no input is allowed to be that large */ return 0;
} if (limit == fillOutput) oend -= LASTLITERALS; /* Hack for support LZ4 format restriction */ if (*srcSizePtr < LZ4_minLength) goto _lz4mid_last_literals; /* Input too small, no compression (all literals) */
/* main loop */ while (ip <= mflimit) { const U32 ipIndex = (U32)(ip - prefixPtr) + prefixIdx; /* search long match */
{ U32 const h8 = LZ4MID_hash8Ptr(ip);
U32 const pos8 = hash8Table[h8];
assert(h8 < LZ4MID_HASHTABLESIZE);
assert(pos8 < ipIndex);
LZ4MID_addPosition(hash8Table, h8, ipIndex); if (ipIndex - pos8 <= LZ4_DISTANCE_MAX) { /* match candidate found */ if (pos8 >= prefixIdx) { const BYTE* const matchPtr = prefixPtr + pos8 - prefixIdx;
assert(matchPtr < ip);
matchLength = LZ4_count(ip, matchPtr, matchlimit); if (matchLength >= MINMATCH) {
DEBUGLOG(7, "found long match at pos %u (len=%u)", pos8, matchLength);
matchDistance = ipIndex - pos8; goto _lz4mid_encode_sequence;
}
} else { if (pos8 >= dictIdx) { /* extDict match candidate */ const BYTE* const matchPtr = dictStart + (pos8 - dictIdx); const size_t safeLen = MIN(prefixIdx - pos8, (size_t)(matchlimit - ip));
matchLength = LZ4_count(ip, matchPtr, ip + safeLen); if (matchLength >= MINMATCH) {
DEBUGLOG(7, "found long match at ExtDict pos %u (len=%u)", pos8, matchLength);
matchDistance = ipIndex - pos8; goto _lz4mid_encode_sequence;
}
}
}
} } /* search short match */
{ U32 const h4 = LZ4MID_hash4Ptr(ip);
U32 const pos4 = hash4Table[h4];
assert(h4 < LZ4MID_HASHTABLESIZE);
assert(pos4 < ipIndex);
LZ4MID_addPosition(hash4Table, h4, ipIndex); if (ipIndex - pos4 <= LZ4_DISTANCE_MAX) { /* match candidate found */ if (pos4 >= prefixIdx) { /* only search within prefix */ const BYTE* const matchPtr = prefixPtr + (pos4 - prefixIdx);
assert(matchPtr < ip);
assert(matchPtr >= prefixPtr);
matchLength = LZ4_count(ip, matchPtr, matchlimit); if (matchLength >= MINMATCH) { /* short match found, let's just check ip+1 for longer */
U32 const h8 = LZ4MID_hash8Ptr(ip+1);
U32 const pos8 = hash8Table[h8];
U32 const m2Distance = ipIndex + 1 - pos8;
matchDistance = ipIndex - pos4; if ( m2Distance <= LZ4_DISTANCE_MAX
&& pos8 >= prefixIdx /* only search within prefix */
&& likely(ip < mflimit)
) { const BYTE* const m2Ptr = prefixPtr + (pos8 - prefixIdx); unsigned ml2 = LZ4_count(ip+1, m2Ptr, matchlimit); if (ml2 > matchLength) {
LZ4MID_addPosition(hash8Table, h8, ipIndex+1);
ip++;
matchLength = ml2;
matchDistance = m2Distance;
} } goto _lz4mid_encode_sequence;
}
} else { if (pos4 >= dictIdx) { /* extDict match candidate */ const BYTE* const matchPtr = dictStart + (pos4 - dictIdx); const size_t safeLen = MIN(prefixIdx - pos4, (size_t)(matchlimit - ip));
matchLength = LZ4_count(ip, matchPtr, ip + safeLen); if (matchLength >= MINMATCH) {
DEBUGLOG(7, "found match at ExtDict pos %u (len=%u)", pos4, matchLength);
matchDistance = ipIndex - pos4; goto _lz4mid_encode_sequence;
}
}
}
} } /* no match found in prefix */ if ( (dict == usingDictCtxHc)
&& (ipIndex - gDictEndIndex < LZ4_DISTANCE_MAX - 8) ) { /* search a match into external dictionary */
LZ4HC_match_t dMatch = searchIntoDict(ip, ipIndex,
matchlimit,
ctx->dictCtx, gDictEndIndex); if (dMatch.len >= MINMATCH) {
DEBUGLOG(7, "found Dictionary match (offset=%i)", dMatch.off);
assert(dMatch.back == 0);
matchLength = (unsigned)dMatch.len;
matchDistance = (unsigned)dMatch.off; goto _lz4mid_encode_sequence;
}
} /* no match found */
ip += 1 + ((ip-anchor) >> 9); /* skip faster over incompressible data */ continue;
/* LZ4HC_countPattern() :
* pattern32 must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!) */ staticunsigned
LZ4HC_countPattern(const BYTE* ip, const BYTE* const iEnd, U32 const pattern32)
{ const BYTE* const iStart = ip;
reg_t const pattern = (sizeof(pattern)==8) ?
(reg_t)pattern32 + (((reg_t)pattern32) << (sizeof(pattern)*4)) : pattern32;
while (likely(ip < iEnd-(sizeof(pattern)-1))) {
reg_t const diff = LZ4_read_ARCH(ip) ^ pattern; if (!diff) { ip+=sizeof(pattern); continue; }
ip += LZ4_NbCommonBytes(diff); return (unsigned)(ip - iStart);
}
if (LZ4_isLittleEndian()) {
reg_t patternByte = pattern; while ((ip<iEnd) && (*ip == (BYTE)patternByte)) {
ip++; patternByte >>= 8;
}
} else { /* big endian */
U32 bitOffset = (sizeof(pattern)*8) - 8; while (ip < iEnd) {
BYTE const byte = (BYTE)(pattern >> bitOffset); if (*ip != byte) break;
ip ++; bitOffset -= 8;
} }
return (unsigned)(ip - iStart);
}
/* LZ4HC_reverseCountPattern() : * pattern must be a sample of repetitive pattern of length 1, 2 or 4 (but not 3!)
* read using natural platform endianness */ staticunsigned
LZ4HC_reverseCountPattern(const BYTE* ip, const BYTE* const iLow, U32 pattern)
{ const BYTE* const iStart = ip;
while (likely(ip >= iLow+4)) { if (LZ4_read32(ip-4) != pattern) break;
ip -= 4;
}
{ const BYTE* bytePtr = (const BYTE*)(&pattern) + 3; /* works for any endianness */ while (likely(ip>iLow)) { if (ip[-1] != *bytePtr) break;
ip--; bytePtr--;
} } return (unsigned)(iStart - ip);
}
/* LZ4HC_protectDictEnd() : * Checks if the match is in the last 3 bytes of the dictionary, so reading the * 4 byte MINMATCH would overflow. * @returns true if the match index is okay.
*/ staticint LZ4HC_protectDictEnd(U32 const dictLimit, U32 const matchIndex)
{ return ((U32)((dictLimit - 1) - matchIndex) >= 3);
}
DEBUGLOG(7, "LZ4HC_InsertAndGetWiderMatch"); /* First Match */
LZ4HC_Insert(hc4, ip); /* insert all prior positions up to ip (excluded) */
matchIndex = hashTable[LZ4HC_hashPtr(ip)];
DEBUGLOG(7, "First candidate match for pos %u found at index %u / %u (lowestMatchIndex)",
ipIndex, matchIndex, lowestMatchIndex);
while ((matchIndex>=lowestMatchIndex) && (nbAttempts>0)) { int matchLength=0;
nbAttempts--;
assert(matchIndex < ipIndex); if (favorDecSpeed && (ipIndex - matchIndex < 8)) { /* do nothing:
* favorDecSpeed intentionally skips matches with offset < 8 */
} elseif (matchIndex >= prefixIdx) { /* within current Prefix */ const BYTE* const matchPtr = prefixPtr + (matchIndex - prefixIdx);
assert(matchPtr < ip);
assert(longest >= 1); if (LZ4_read16(iLowLimit + longest - 1) == LZ4_read16(matchPtr - lookBackLength + longest - 1)) { if (LZ4_read32(matchPtr) == pattern) { intconst back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, prefixPtr) : 0;
matchLength = MINMATCH + (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, iHighLimit);
matchLength -= back; if (matchLength > longest) {
longest = matchLength;
offset = (int)(ipIndex - matchIndex);
sBack = back;
DEBUGLOG(7, "Found match of len=%i within prefix, offset=%i, back=%i", longest, offset, -back);
} } }
} else { /* lowestMatchIndex <= matchIndex < dictLimit : within Ext Dict */ const BYTE* const matchPtr = dictStart + (matchIndex - dictIdx);
assert(matchIndex >= dictIdx); if ( likely(matchIndex <= prefixIdx - 4)
&& (LZ4_read32(matchPtr) == pattern) ) { int back = 0; const BYTE* vLimit = ip + (prefixIdx - matchIndex); if (vLimit > iHighLimit) vLimit = iHighLimit;
matchLength = (int)LZ4_count(ip+MINMATCH, matchPtr+MINMATCH, vLimit) + MINMATCH; if ((ip+matchLength == vLimit) && (vLimit < iHighLimit))
matchLength += LZ4_count(ip+matchLength, prefixPtr, iHighLimit);
back = lookBackLength ? LZ4HC_countBack(ip, matchPtr, iLowLimit, dictStart) : 0;
matchLength -= back; if (matchLength > longest) {
longest = matchLength;
offset = (int)(ipIndex - matchIndex);
sBack = back;
DEBUGLOG(7, "Found match of len=%i within dict, offset=%i, back=%i", longest, offset, -back);
} } }
if (chainSwap && matchLength==longest) { /* better match => select a better chain */
assert(lookBackLength==0); /* search forward only */ if (matchIndex + (U32)longest <= ipIndex) { intconst kTrigger = 4;
U32 distanceToNextMatch = 1; intconst end = longest - MINMATCH + 1; int step = 1; int accel = 1 << kTrigger; int pos; for (pos = 0; pos < end; pos += step) {
U32 const candidateDist = DELTANEXTU16(chainTable, matchIndex + (U32)pos);
step = (accel++ >> kTrigger); if (candidateDist > distanceToNextMatch) {
distanceToNextMatch = candidateDist;
matchChainPos = (U32)pos;
accel = 1 << kTrigger;
} } if (distanceToNextMatch > 1) { if (distanceToNextMatch > matchIndex) break; /* avoid overflow */
matchIndex -= distanceToNextMatch; continue;
} } }
{ U32 const distNextMatch = DELTANEXTU16(chainTable, matchIndex); if (patternAnalysis && distNextMatch==1 && matchChainPos==0) {
U32 const matchCandidateIdx = matchIndex-1; /* may be a repeated pattern */ if (repeat == rep_untested) { if ( ((pattern & 0xFFFF) == (pattern >> 16))
& ((pattern & 0xFF) == (pattern >> 24)) ) {
DEBUGLOG(7, "Repeat pattern detected, char %02X", pattern >> 24);
repeat = rep_confirmed;
srcPatternLength = LZ4HC_countPattern(ip+sizeof(pattern), iHighLimit, pattern) + sizeof(pattern);
} else {
repeat = rep_not;
} } if ( (repeat == rep_confirmed) && (matchCandidateIdx >= lowestMatchIndex)
&& LZ4HC_protectDictEnd(prefixIdx, matchCandidateIdx) ) { constint extDict = matchCandidateIdx < prefixIdx; const BYTE* const matchPtr = extDict ? dictStart + (matchCandidateIdx - dictIdx) : prefixPtr + (matchCandidateIdx - prefixIdx); if (LZ4_read32(matchPtr) == pattern) { /* good candidate */ const BYTE* const iLimit = extDict ? dictEnd : iHighLimit;
size_t forwardPatternLength = LZ4HC_countPattern(matchPtr+sizeof(pattern), iLimit, pattern) + sizeof(pattern); if (extDict && matchPtr + forwardPatternLength == iLimit) {
U32 const rotatedPattern = LZ4HC_rotatePattern(forwardPatternLength, pattern);
forwardPatternLength += LZ4HC_countPattern(prefixPtr, iHighLimit, rotatedPattern);
}
{ const BYTE* const lowestMatchPtr = extDict ? dictStart : prefixPtr;
size_t backLength = LZ4HC_reverseCountPattern(matchPtr, lowestMatchPtr, pattern);
size_t currentSegmentLength; if (!extDict
&& matchPtr - backLength == prefixPtr
&& dictIdx < prefixIdx) {
U32 const rotatedPattern = LZ4HC_rotatePattern((U32)(-(int)backLength), pattern);
backLength += LZ4HC_reverseCountPattern(dictEnd, dictStart, rotatedPattern);
} /* Limit backLength not go further than lowestMatchIndex */
backLength = matchCandidateIdx - MAX(matchCandidateIdx - (U32)backLength, lowestMatchIndex);
assert(matchCandidateIdx - backLength >= lowestMatchIndex);
currentSegmentLength = backLength + forwardPatternLength; /* Adjust to end of pattern if the source pattern fits, otherwise the beginning of the pattern */ if ( (currentSegmentLength >= srcPatternLength) /* current pattern segment large enough to contain full srcPatternLength */
&& (forwardPatternLength <= srcPatternLength) ) { /* haven't reached this position yet */
U32 const newMatchIndex = matchCandidateIdx + (U32)forwardPatternLength - (U32)srcPatternLength; /* best position, full pattern, might be followed by more match */ if (LZ4HC_protectDictEnd(prefixIdx, newMatchIndex))
matchIndex = newMatchIndex; else { /* Can only happen if started in the prefix */
assert(newMatchIndex >= prefixIdx - 3 && newMatchIndex < prefixIdx && !extDict);
matchIndex = prefixIdx;
}
} else {
U32 const newMatchIndex = matchCandidateIdx - (U32)backLength; /* farthest position in current segment, will find a match of length currentSegmentLength + maybe some back */ if (!LZ4HC_protectDictEnd(prefixIdx, newMatchIndex)) {
assert(newMatchIndex >= prefixIdx - 3 && newMatchIndex < prefixIdx && !extDict);
matchIndex = prefixIdx;
} else {
matchIndex = newMatchIndex; if (lookBackLength==0) { /* no back possible */
size_t const maxML = MIN(currentSegmentLength, srcPatternLength); if ((size_t)longest < maxML) {
assert(prefixPtr - prefixIdx + matchIndex != ip); if ((size_t)(ip - prefixPtr) + prefixIdx - matchIndex > LZ4_DISTANCE_MAX) break;
assert(maxML < 2 GB);
longest = (int)maxML;
offset = (int)(ipIndex - matchIndex);
assert(sBack == 0);
DEBUGLOG(7, "Found repeat pattern match of len=%i, offset=%i", longest, offset);
}
{ U32 const distToNextPattern = DELTANEXTU16(chainTable, matchIndex); if (distToNextPattern > matchIndex) break; /* avoid overflow */
matchIndex -= distToNextPattern;
} } } } } continue;
} }
} } /* PA optimization */
LZ4_FORCE_INLINE LZ4HC_match_t
LZ4HC_InsertAndFindBestMatch(LZ4HC_CCtx_internal* const hc4, /* Index table will be updated */ const BYTE* const ip, const BYTE* const iLimit, constint maxNbAttempts, constint patternAnalysis, const dictCtx_directive dict)
{
DEBUGLOG(7, "LZ4HC_InsertAndFindBestMatch"); /* note : LZ4HC_InsertAndGetWiderMatch() is able to modify the starting position of a match (*startpos), * but this won't be the case here, as we define iLowLimit==ip,
* so LZ4HC_InsertAndGetWiderMatch() won't be allowed to search past ip */ return LZ4HC_InsertAndGetWiderMatch(hc4, ip, ip, iLimit, MINMATCH-1, maxNbAttempts, patternAnalysis, 0 /*chainSwap*/, dict, favorCompressionRatio);
}
/* init */
DEBUGLOG(5, "LZ4HC_compress_hashChain (dict?=>%i)", dict);
*srcSizePtr = 0; if (limit == fillOutput) oend -= LASTLITERALS; /* Hack for support LZ4 format restriction */ if (inputSize < LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */
/* Main Loop */ while (ip <= mflimit) {
m1 = LZ4HC_InsertAndFindBestMatch(ctx, ip, matchlimit, maxNbAttempts, patternAnalysis, dict); if (m1.len<MINMATCH) { ip++; continue; }
/* saved, in case we would skip too much */
start0 = ip; m0 = m1;
_Search2:
DEBUGLOG(7, "_Search2 (currently found match of size %i)", m1.len); if (ip+m1.len <= mflimit) {
start2 = ip + m1.len - 2;
m2 = LZ4HC_InsertAndGetWiderMatch(ctx,
start2, ip + 0, matchlimit, m1.len,
maxNbAttempts, patternAnalysis, 0, dict, favorCompressionRatio);
start2 += m2.back;
} else {
m2 = nomatch; /* do not search further */
}
if (m2.len <= m1.len) { /* No better match => encode ML1 immediately */
optr = op; if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
m1.len, m1.off,
limit, oend) ) goto _dest_overflow; continue;
}
if (start0 < ip) { /* first match was skipped at least once */ if (start2 < ip + m0.len) { /* squeezing ML1 between ML0(original ML1) and ML2 */
ip = start0; m1 = m0; /* restore initial Match1 */
} }
/* Here, start0==ip */ if ((start2 - ip) < 3) { /* First Match too small : removed */
ip = start2;
m1 = m2; goto _Search2;
}
_Search3: if ((start2 - ip) < OPTIMAL_ML) { int correction; int new_ml = m1.len; if (new_ml > OPTIMAL_ML) new_ml = OPTIMAL_ML; if (ip+new_ml > start2 + m2.len - MINMATCH)
new_ml = (int)(start2 - ip) + m2.len - MINMATCH;
correction = new_ml - (int)(start2 - ip); if (correction > 0) {
start2 += correction;
m2.len -= correction;
}
}
if (m3.len <= m2.len) { /* No better match => encode ML1 and ML2 */ /* ip & ref are known; Now for ml */ if (start2 < ip+m1.len) m1.len = (int)(start2 - ip); /* Now, encode 2 sequences */
optr = op; if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
m1.len, m1.off,
limit, oend) ) goto _dest_overflow;
ip = start2;
optr = op; if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
m2.len, m2.off,
limit, oend) ) {
m1 = m2; goto _dest_overflow;
} continue;
}
if (start3 < ip+m1.len+3) { /* Not enough space for match 2 : remove it */ if (start3 >= (ip+m1.len)) { /* can write Seq1 immediately ==> Seq2 is removed, so Seq3 becomes Seq1 */ if (start2 < ip+m1.len) { int correction = (int)(ip+m1.len - start2);
start2 += correction;
m2.len -= correction; if (m2.len < MINMATCH) {
start2 = start3;
m2 = m3;
}
}
optr = op; if (LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
m1.len, m1.off,
limit, oend) ) goto _dest_overflow;
ip = start3;
m1 = m3;
start0 = start2;
m0 = m2; goto _Search2;
}
start2 = start3;
m2 = m3; goto _Search3;
}
/* * OK, now we have 3 ascending matches; * let's write the first one ML1. * ip & ref are known; Now decide ml.
*/ if (start2 < ip+m1.len) { if ((start2 - ip) < OPTIMAL_ML) { int correction; if (m1.len > OPTIMAL_ML) m1.len = OPTIMAL_ML; if (ip + m1.len > start2 + m2.len - MINMATCH)
m1.len = (int)(start2 - ip) + m2.len - MINMATCH;
correction = m1.len - (int)(start2 - ip); if (correction > 0) {
start2 += correction;
m2.len -= correction;
}
} else {
m1.len = (int)(start2 - ip);
}
}
optr = op; if ( LZ4HC_encodeSequence(UPDATABLE(ip, op, anchor),
m1.len, m1.off,
limit, oend) ) goto _dest_overflow;
/* ML2 becomes ML1 */
ip = start2; m1 = m2;
/* ML3 becomes ML2 */
start2 = start3; m2 = m3;
/* let's find a new ML3 */ goto _Search3;
}
_last_literals: /* Encode Last Literals */
{ size_t lastRunSize = (size_t)(iend - anchor); /* literals */
size_t llAdd = (lastRunSize + 255 - RUN_MASK) / 255;
size_t const totalSize = 1 + llAdd + lastRunSize; if (limit == fillOutput) oend += LASTLITERALS; /* restore correct value */ if (limit && (op + totalSize > oend)) { if (limit == limitedOutput) return 0; /* adapt lastRunSize to fill 'dest' */
lastRunSize = (size_t)(oend - op) - 1 /*token*/;
llAdd = (lastRunSize + 256 - RUN_MASK) / 256;
lastRunSize -= llAdd;
}
DEBUGLOG(6, "Final literal run : %i literals", (int)lastRunSize);
ip = anchor + lastRunSize; /* can be != iend if limit==fillOutput */
/* state is presumed correctly initialized,
* in which case its size and alignment have already been validate */ int LZ4_compress_HC_extStateHC_fastReset (void* state, constchar* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
{
LZ4HC_CCtx_internal* const ctx = &((LZ4_streamHC_t*)state)->internal_donotuse; if (!LZ4_isAligned(state, LZ4_streamHC_t_alignment())) return 0;
LZ4_resetStreamHC_fast((LZ4_streamHC_t*)state, compressionLevel);
LZ4HC_init_internal (ctx, (const BYTE*)src); if (dstCapacity < LZ4_compressBound(srcSize)) return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, limitedOutput); else return LZ4HC_compress_generic (ctx, src, dst, &srcSize, dstCapacity, compressionLevel, notLimited);
}
int LZ4_compress_HC_extStateHC (void* state, constchar* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
{
LZ4_streamHC_t* const ctx = LZ4_initStreamHC(state, sizeof(*ctx)); if (ctx==NULL) return 0; /* init failure */ return LZ4_compress_HC_extStateHC_fastReset(state, src, dst, srcSize, dstCapacity, compressionLevel);
}
int LZ4_compress_HC(constchar* src, char* dst, int srcSize, int dstCapacity, int compressionLevel)
{ int cSize; #ifdefined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
LZ4_streamHC_t* const statePtr = (LZ4_streamHC_t*)ALLOC(sizeof(LZ4_streamHC_t)); if (statePtr==NULL) return 0; #else
LZ4_streamHC_t state;
LZ4_streamHC_t* const statePtr = &state; #endif
DEBUGLOG(5, "LZ4_compress_HC")
cSize = LZ4_compress_HC_extStateHC(statePtr, src, dst, srcSize, dstCapacity, compressionLevel); #ifdefined(LZ4HC_HEAPMODE) && LZ4HC_HEAPMODE==1
FREEMEM(statePtr); #endif return cSize;
}
/* state is presumed sized correctly (>= sizeof(LZ4_streamHC_t)) */ int LZ4_compress_HC_destSize(void* state, constchar* source, char* dest, int* sourceSizePtr, int targetDestSize, int cLevel)
{
LZ4_streamHC_t* const ctx = LZ4_initStreamHC(state, sizeof(*ctx)); if (ctx==NULL) return 0; /* init failure */
LZ4HC_init_internal(&ctx->internal_donotuse, (const BYTE*) source);
LZ4_setCompressionLevel(ctx, cLevel); return LZ4HC_compress_generic(&ctx->internal_donotuse, source, dest, sourceSizePtr, targetDestSize, cLevel, fillOutput);
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.