mirror of
https://github.com/wolfpld/tracy.git
synced 2024-11-10 02:31:48 +00:00
625 lines
24 KiB
C
625 lines
24 KiB
C
/* ******************************************************************
|
|
* FSE : Finite State Entropy encoder
|
|
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
*
|
|
* You can contact the author at :
|
|
* - FSE source repository : https://github.com/Cyan4973/FiniteStateEntropy
|
|
* - Public forum : https://groups.google.com/forum/#!forum/lz4c
|
|
*
|
|
* This source code is licensed under both the BSD-style license (found in the
|
|
* LICENSE file in the root directory of this source tree) and the GPLv2 (found
|
|
* in the COPYING file in the root directory of this source tree).
|
|
* You may select, at your option, one of the above-listed licenses.
|
|
****************************************************************** */
|
|
|
|
/* **************************************************************
|
|
* Includes
|
|
****************************************************************/
|
|
#include "../common/compiler.h"
|
|
#include "../common/mem.h" /* U32, U16, etc. */
|
|
#include "../common/debug.h" /* assert, DEBUGLOG */
|
|
#include "hist.h" /* HIST_count_wksp */
|
|
#include "../common/bitstream.h"
|
|
#define FSE_STATIC_LINKING_ONLY
|
|
#include "../common/fse.h"
|
|
#include "../common/error_private.h"
|
|
#define ZSTD_DEPS_NEED_MALLOC
|
|
#define ZSTD_DEPS_NEED_MATH64
|
|
#include "../common/zstd_deps.h" /* ZSTD_malloc, ZSTD_free, ZSTD_memcpy, ZSTD_memset */
|
|
#include "../common/bits.h" /* ZSTD_highbit32 */
|
|
|
|
|
|
/* **************************************************************
|
|
* Error Management
|
|
****************************************************************/
|
|
#define FSE_isError ERR_isError
|
|
|
|
|
|
/* **************************************************************
|
|
* Templates
|
|
****************************************************************/
|
|
/*
|
|
designed to be included
|
|
for type-specific functions (template emulation in C)
|
|
Objective is to write these functions only once, for improved maintenance
|
|
*/
|
|
|
|
/* safety checks */
|
|
#ifndef FSE_FUNCTION_EXTENSION
|
|
# error "FSE_FUNCTION_EXTENSION must be defined"
|
|
#endif
|
|
#ifndef FSE_FUNCTION_TYPE
|
|
# error "FSE_FUNCTION_TYPE must be defined"
|
|
#endif
|
|
|
|
/* Function names */
|
|
#define FSE_CAT(X,Y) X##Y
|
|
#define FSE_FUNCTION_NAME(X,Y) FSE_CAT(X,Y)
|
|
#define FSE_TYPE_NAME(X,Y) FSE_CAT(X,Y)
|
|
|
|
|
|
/* Function templates */
|
|
|
|
/* FSE_buildCTable_wksp() :
|
|
* Same as FSE_buildCTable(), but using an externally allocated scratch buffer (`workSpace`).
|
|
* wkspSize should be sized to handle worst case situation, which is `1<<max_tableLog * sizeof(FSE_FUNCTION_TYPE)`
|
|
* workSpace must also be properly aligned with FSE_FUNCTION_TYPE requirements
|
|
*/
|
|
size_t FSE_buildCTable_wksp(FSE_CTable* ct,
|
|
const short* normalizedCounter, unsigned maxSymbolValue, unsigned tableLog,
|
|
void* workSpace, size_t wkspSize)
|
|
{
|
|
U32 const tableSize = 1 << tableLog;
|
|
U32 const tableMask = tableSize - 1;
|
|
void* const ptr = ct;
|
|
U16* const tableU16 = ( (U16*) ptr) + 2;
|
|
void* const FSCT = ((U32*)ptr) + 1 /* header */ + (tableLog ? tableSize>>1 : 1) ;
|
|
FSE_symbolCompressionTransform* const symbolTT = (FSE_symbolCompressionTransform*) (FSCT);
|
|
U32 const step = FSE_TABLESTEP(tableSize);
|
|
U32 const maxSV1 = maxSymbolValue+1;
|
|
|
|
U16* cumul = (U16*)workSpace; /* size = maxSV1 */
|
|
FSE_FUNCTION_TYPE* const tableSymbol = (FSE_FUNCTION_TYPE*)(cumul + (maxSV1+1)); /* size = tableSize */
|
|
|
|
U32 highThreshold = tableSize-1;
|
|
|
|
assert(((size_t)workSpace & 1) == 0); /* Must be 2 bytes-aligned */
|
|
if (FSE_BUILD_CTABLE_WORKSPACE_SIZE(maxSymbolValue, tableLog) > wkspSize) return ERROR(tableLog_tooLarge);
|
|
/* CTable header */
|
|
tableU16[-2] = (U16) tableLog;
|
|
tableU16[-1] = (U16) maxSymbolValue;
|
|
assert(tableLog < 16); /* required for threshold strategy to work */
|
|
|
|
/* For explanations on how to distribute symbol values over the table :
|
|
* https://fastcompression.blogspot.fr/2014/02/fse-distributing-symbol-values.html */
|
|
|
|
#ifdef __clang_analyzer__
|
|
ZSTD_memset(tableSymbol, 0, sizeof(*tableSymbol) * tableSize); /* useless initialization, just to keep scan-build happy */
|
|
#endif
|
|
|
|
/* symbol start positions */
|
|
{ U32 u;
|
|
cumul[0] = 0;
|
|
for (u=1; u <= maxSV1; u++) {
|
|
if (normalizedCounter[u-1]==-1) { /* Low proba symbol */
|
|
cumul[u] = cumul[u-1] + 1;
|
|
tableSymbol[highThreshold--] = (FSE_FUNCTION_TYPE)(u-1);
|
|
} else {
|
|
assert(normalizedCounter[u-1] >= 0);
|
|
cumul[u] = cumul[u-1] + (U16)normalizedCounter[u-1];
|
|
assert(cumul[u] >= cumul[u-1]); /* no overflow */
|
|
} }
|
|
cumul[maxSV1] = (U16)(tableSize+1);
|
|
}
|
|
|
|
/* Spread symbols */
|
|
if (highThreshold == tableSize - 1) {
|
|
/* Case for no low prob count symbols. Lay down 8 bytes at a time
|
|
* to reduce branch misses since we are operating on a small block
|
|
*/
|
|
BYTE* const spread = tableSymbol + tableSize; /* size = tableSize + 8 (may write beyond tableSize) */
|
|
{ U64 const add = 0x0101010101010101ull;
|
|
size_t pos = 0;
|
|
U64 sv = 0;
|
|
U32 s;
|
|
for (s=0; s<maxSV1; ++s, sv += add) {
|
|
int i;
|
|
int const n = normalizedCounter[s];
|
|
MEM_write64(spread + pos, sv);
|
|
for (i = 8; i < n; i += 8) {
|
|
MEM_write64(spread + pos + i, sv);
|
|
}
|
|
assert(n>=0);
|
|
pos += (size_t)n;
|
|
}
|
|
}
|
|
/* Spread symbols across the table. Lack of lowprob symbols means that
|
|
* we don't need variable sized inner loop, so we can unroll the loop and
|
|
* reduce branch misses.
|
|
*/
|
|
{ size_t position = 0;
|
|
size_t s;
|
|
size_t const unroll = 2; /* Experimentally determined optimal unroll */
|
|
assert(tableSize % unroll == 0); /* FSE_MIN_TABLELOG is 5 */
|
|
for (s = 0; s < (size_t)tableSize; s += unroll) {
|
|
size_t u;
|
|
for (u = 0; u < unroll; ++u) {
|
|
size_t const uPosition = (position + (u * step)) & tableMask;
|
|
tableSymbol[uPosition] = spread[s + u];
|
|
}
|
|
position = (position + (unroll * step)) & tableMask;
|
|
}
|
|
assert(position == 0); /* Must have initialized all positions */
|
|
}
|
|
} else {
|
|
U32 position = 0;
|
|
U32 symbol;
|
|
for (symbol=0; symbol<maxSV1; symbol++) {
|
|
int nbOccurrences;
|
|
int const freq = normalizedCounter[symbol];
|
|
for (nbOccurrences=0; nbOccurrences<freq; nbOccurrences++) {
|
|
tableSymbol[position] = (FSE_FUNCTION_TYPE)symbol;
|
|
position = (position + step) & tableMask;
|
|
while (position > highThreshold)
|
|
position = (position + step) & tableMask; /* Low proba area */
|
|
} }
|
|
assert(position==0); /* Must have initialized all positions */
|
|
}
|
|
|
|
/* Build table */
|
|
{ U32 u; for (u=0; u<tableSize; u++) {
|
|
FSE_FUNCTION_TYPE s = tableSymbol[u]; /* note : static analyzer may not understand tableSymbol is properly initialized */
|
|
tableU16[cumul[s]++] = (U16) (tableSize+u); /* TableU16 : sorted by symbol order; gives next state value */
|
|
} }
|
|
|
|
/* Build Symbol Transformation Table */
|
|
{ unsigned total = 0;
|
|
unsigned s;
|
|
for (s=0; s<=maxSymbolValue; s++) {
|
|
switch (normalizedCounter[s])
|
|
{
|
|
case 0:
|
|
/* filling nonetheless, for compatibility with FSE_getMaxNbBits() */
|
|
symbolTT[s].deltaNbBits = ((tableLog+1) << 16) - (1<<tableLog);
|
|
break;
|
|
|
|
case -1:
|
|
case 1:
|
|
symbolTT[s].deltaNbBits = (tableLog << 16) - (1<<tableLog);
|
|
assert(total <= INT_MAX);
|
|
symbolTT[s].deltaFindState = (int)(total - 1);
|
|
total ++;
|
|
break;
|
|
default :
|
|
assert(normalizedCounter[s] > 1);
|
|
{ U32 const maxBitsOut = tableLog - ZSTD_highbit32 ((U32)normalizedCounter[s]-1);
|
|
U32 const minStatePlus = (U32)normalizedCounter[s] << maxBitsOut;
|
|
symbolTT[s].deltaNbBits = (maxBitsOut << 16) - minStatePlus;
|
|
symbolTT[s].deltaFindState = (int)(total - (unsigned)normalizedCounter[s]);
|
|
total += (unsigned)normalizedCounter[s];
|
|
} } } }
|
|
|
|
#if 0 /* debug : symbol costs */
|
|
DEBUGLOG(5, "\n --- table statistics : ");
|
|
{ U32 symbol;
|
|
for (symbol=0; symbol<=maxSymbolValue; symbol++) {
|
|
DEBUGLOG(5, "%3u: w=%3i, maxBits=%u, fracBits=%.2f",
|
|
symbol, normalizedCounter[symbol],
|
|
FSE_getMaxNbBits(symbolTT, symbol),
|
|
(double)FSE_bitCost(symbolTT, tableLog, symbol, 8) / 256);
|
|
} }
|
|
#endif
|
|
|
|
return 0;
|
|
}
|
|
|
|
|
|
|
|
#ifndef FSE_COMMONDEFS_ONLY
|
|
|
|
/*-**************************************************************
|
|
* FSE NCount encoding
|
|
****************************************************************/
|
|
size_t FSE_NCountWriteBound(unsigned maxSymbolValue, unsigned tableLog)
|
|
{
|
|
size_t const maxHeaderSize = (((maxSymbolValue+1) * tableLog
|
|
+ 4 /* bitCount initialized at 4 */
|
|
+ 2 /* first two symbols may use one additional bit each */) / 8)
|
|
+ 1 /* round up to whole nb bytes */
|
|
+ 2 /* additional two bytes for bitstream flush */;
|
|
return maxSymbolValue ? maxHeaderSize : FSE_NCOUNTBOUND; /* maxSymbolValue==0 ? use default */
|
|
}
|
|
|
|
static size_t
|
|
FSE_writeNCount_generic (void* header, size_t headerBufferSize,
|
|
const short* normalizedCounter, unsigned maxSymbolValue, unsigned tableLog,
|
|
unsigned writeIsSafe)
|
|
{
|
|
BYTE* const ostart = (BYTE*) header;
|
|
BYTE* out = ostart;
|
|
BYTE* const oend = ostart + headerBufferSize;
|
|
int nbBits;
|
|
const int tableSize = 1 << tableLog;
|
|
int remaining;
|
|
int threshold;
|
|
U32 bitStream = 0;
|
|
int bitCount = 0;
|
|
unsigned symbol = 0;
|
|
unsigned const alphabetSize = maxSymbolValue + 1;
|
|
int previousIs0 = 0;
|
|
|
|
/* Table Size */
|
|
bitStream += (tableLog-FSE_MIN_TABLELOG) << bitCount;
|
|
bitCount += 4;
|
|
|
|
/* Init */
|
|
remaining = tableSize+1; /* +1 for extra accuracy */
|
|
threshold = tableSize;
|
|
nbBits = tableLog+1;
|
|
|
|
while ((symbol < alphabetSize) && (remaining>1)) { /* stops at 1 */
|
|
if (previousIs0) {
|
|
unsigned start = symbol;
|
|
while ((symbol < alphabetSize) && !normalizedCounter[symbol]) symbol++;
|
|
if (symbol == alphabetSize) break; /* incorrect distribution */
|
|
while (symbol >= start+24) {
|
|
start+=24;
|
|
bitStream += 0xFFFFU << bitCount;
|
|
if ((!writeIsSafe) && (out > oend-2))
|
|
return ERROR(dstSize_tooSmall); /* Buffer overflow */
|
|
out[0] = (BYTE) bitStream;
|
|
out[1] = (BYTE)(bitStream>>8);
|
|
out+=2;
|
|
bitStream>>=16;
|
|
}
|
|
while (symbol >= start+3) {
|
|
start+=3;
|
|
bitStream += 3 << bitCount;
|
|
bitCount += 2;
|
|
}
|
|
bitStream += (symbol-start) << bitCount;
|
|
bitCount += 2;
|
|
if (bitCount>16) {
|
|
if ((!writeIsSafe) && (out > oend - 2))
|
|
return ERROR(dstSize_tooSmall); /* Buffer overflow */
|
|
out[0] = (BYTE)bitStream;
|
|
out[1] = (BYTE)(bitStream>>8);
|
|
out += 2;
|
|
bitStream >>= 16;
|
|
bitCount -= 16;
|
|
} }
|
|
{ int count = normalizedCounter[symbol++];
|
|
int const max = (2*threshold-1) - remaining;
|
|
remaining -= count < 0 ? -count : count;
|
|
count++; /* +1 for extra accuracy */
|
|
if (count>=threshold)
|
|
count += max; /* [0..max[ [max..threshold[ (...) [threshold+max 2*threshold[ */
|
|
bitStream += count << bitCount;
|
|
bitCount += nbBits;
|
|
bitCount -= (count<max);
|
|
previousIs0 = (count==1);
|
|
if (remaining<1) return ERROR(GENERIC);
|
|
while (remaining<threshold) { nbBits--; threshold>>=1; }
|
|
}
|
|
if (bitCount>16) {
|
|
if ((!writeIsSafe) && (out > oend - 2))
|
|
return ERROR(dstSize_tooSmall); /* Buffer overflow */
|
|
out[0] = (BYTE)bitStream;
|
|
out[1] = (BYTE)(bitStream>>8);
|
|
out += 2;
|
|
bitStream >>= 16;
|
|
bitCount -= 16;
|
|
} }
|
|
|
|
if (remaining != 1)
|
|
return ERROR(GENERIC); /* incorrect normalized distribution */
|
|
assert(symbol <= alphabetSize);
|
|
|
|
/* flush remaining bitStream */
|
|
if ((!writeIsSafe) && (out > oend - 2))
|
|
return ERROR(dstSize_tooSmall); /* Buffer overflow */
|
|
out[0] = (BYTE)bitStream;
|
|
out[1] = (BYTE)(bitStream>>8);
|
|
out+= (bitCount+7) /8;
|
|
|
|
return (out-ostart);
|
|
}
|
|
|
|
|
|
size_t FSE_writeNCount (void* buffer, size_t bufferSize,
|
|
const short* normalizedCounter, unsigned maxSymbolValue, unsigned tableLog)
|
|
{
|
|
if (tableLog > FSE_MAX_TABLELOG) return ERROR(tableLog_tooLarge); /* Unsupported */
|
|
if (tableLog < FSE_MIN_TABLELOG) return ERROR(GENERIC); /* Unsupported */
|
|
|
|
if (bufferSize < FSE_NCountWriteBound(maxSymbolValue, tableLog))
|
|
return FSE_writeNCount_generic(buffer, bufferSize, normalizedCounter, maxSymbolValue, tableLog, 0);
|
|
|
|
return FSE_writeNCount_generic(buffer, bufferSize, normalizedCounter, maxSymbolValue, tableLog, 1 /* write in buffer is safe */);
|
|
}
|
|
|
|
|
|
/*-**************************************************************
|
|
* FSE Compression Code
|
|
****************************************************************/
|
|
|
|
/* provides the minimum logSize to safely represent a distribution */
|
|
static unsigned FSE_minTableLog(size_t srcSize, unsigned maxSymbolValue)
|
|
{
|
|
U32 minBitsSrc = ZSTD_highbit32((U32)(srcSize)) + 1;
|
|
U32 minBitsSymbols = ZSTD_highbit32(maxSymbolValue) + 2;
|
|
U32 minBits = minBitsSrc < minBitsSymbols ? minBitsSrc : minBitsSymbols;
|
|
assert(srcSize > 1); /* Not supported, RLE should be used instead */
|
|
return minBits;
|
|
}
|
|
|
|
unsigned FSE_optimalTableLog_internal(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue, unsigned minus)
|
|
{
|
|
U32 maxBitsSrc = ZSTD_highbit32((U32)(srcSize - 1)) - minus;
|
|
U32 tableLog = maxTableLog;
|
|
U32 minBits = FSE_minTableLog(srcSize, maxSymbolValue);
|
|
assert(srcSize > 1); /* Not supported, RLE should be used instead */
|
|
if (tableLog==0) tableLog = FSE_DEFAULT_TABLELOG;
|
|
if (maxBitsSrc < tableLog) tableLog = maxBitsSrc; /* Accuracy can be reduced */
|
|
if (minBits > tableLog) tableLog = minBits; /* Need a minimum to safely represent all symbol values */
|
|
if (tableLog < FSE_MIN_TABLELOG) tableLog = FSE_MIN_TABLELOG;
|
|
if (tableLog > FSE_MAX_TABLELOG) tableLog = FSE_MAX_TABLELOG;
|
|
return tableLog;
|
|
}
|
|
|
|
unsigned FSE_optimalTableLog(unsigned maxTableLog, size_t srcSize, unsigned maxSymbolValue)
|
|
{
|
|
return FSE_optimalTableLog_internal(maxTableLog, srcSize, maxSymbolValue, 2);
|
|
}
|
|
|
|
/* Secondary normalization method.
|
|
To be used when primary method fails. */
|
|
|
|
static size_t FSE_normalizeM2(short* norm, U32 tableLog, const unsigned* count, size_t total, U32 maxSymbolValue, short lowProbCount)
|
|
{
|
|
short const NOT_YET_ASSIGNED = -2;
|
|
U32 s;
|
|
U32 distributed = 0;
|
|
U32 ToDistribute;
|
|
|
|
/* Init */
|
|
U32 const lowThreshold = (U32)(total >> tableLog);
|
|
U32 lowOne = (U32)((total * 3) >> (tableLog + 1));
|
|
|
|
for (s=0; s<=maxSymbolValue; s++) {
|
|
if (count[s] == 0) {
|
|
norm[s]=0;
|
|
continue;
|
|
}
|
|
if (count[s] <= lowThreshold) {
|
|
norm[s] = lowProbCount;
|
|
distributed++;
|
|
total -= count[s];
|
|
continue;
|
|
}
|
|
if (count[s] <= lowOne) {
|
|
norm[s] = 1;
|
|
distributed++;
|
|
total -= count[s];
|
|
continue;
|
|
}
|
|
|
|
norm[s]=NOT_YET_ASSIGNED;
|
|
}
|
|
ToDistribute = (1 << tableLog) - distributed;
|
|
|
|
if (ToDistribute == 0)
|
|
return 0;
|
|
|
|
if ((total / ToDistribute) > lowOne) {
|
|
/* risk of rounding to zero */
|
|
lowOne = (U32)((total * 3) / (ToDistribute * 2));
|
|
for (s=0; s<=maxSymbolValue; s++) {
|
|
if ((norm[s] == NOT_YET_ASSIGNED) && (count[s] <= lowOne)) {
|
|
norm[s] = 1;
|
|
distributed++;
|
|
total -= count[s];
|
|
continue;
|
|
} }
|
|
ToDistribute = (1 << tableLog) - distributed;
|
|
}
|
|
|
|
if (distributed == maxSymbolValue+1) {
|
|
/* all values are pretty poor;
|
|
probably incompressible data (should have already been detected);
|
|
find max, then give all remaining points to max */
|
|
U32 maxV = 0, maxC = 0;
|
|
for (s=0; s<=maxSymbolValue; s++)
|
|
if (count[s] > maxC) { maxV=s; maxC=count[s]; }
|
|
norm[maxV] += (short)ToDistribute;
|
|
return 0;
|
|
}
|
|
|
|
if (total == 0) {
|
|
/* all of the symbols were low enough for the lowOne or lowThreshold */
|
|
for (s=0; ToDistribute > 0; s = (s+1)%(maxSymbolValue+1))
|
|
if (norm[s] > 0) { ToDistribute--; norm[s]++; }
|
|
return 0;
|
|
}
|
|
|
|
{ U64 const vStepLog = 62 - tableLog;
|
|
U64 const mid = (1ULL << (vStepLog-1)) - 1;
|
|
U64 const rStep = ZSTD_div64((((U64)1<<vStepLog) * ToDistribute) + mid, (U32)total); /* scale on remaining */
|
|
U64 tmpTotal = mid;
|
|
for (s=0; s<=maxSymbolValue; s++) {
|
|
if (norm[s]==NOT_YET_ASSIGNED) {
|
|
U64 const end = tmpTotal + (count[s] * rStep);
|
|
U32 const sStart = (U32)(tmpTotal >> vStepLog);
|
|
U32 const sEnd = (U32)(end >> vStepLog);
|
|
U32 const weight = sEnd - sStart;
|
|
if (weight < 1)
|
|
return ERROR(GENERIC);
|
|
norm[s] = (short)weight;
|
|
tmpTotal = end;
|
|
} } }
|
|
|
|
return 0;
|
|
}
|
|
|
|
size_t FSE_normalizeCount (short* normalizedCounter, unsigned tableLog,
|
|
const unsigned* count, size_t total,
|
|
unsigned maxSymbolValue, unsigned useLowProbCount)
|
|
{
|
|
/* Sanity checks */
|
|
if (tableLog==0) tableLog = FSE_DEFAULT_TABLELOG;
|
|
if (tableLog < FSE_MIN_TABLELOG) return ERROR(GENERIC); /* Unsupported size */
|
|
if (tableLog > FSE_MAX_TABLELOG) return ERROR(tableLog_tooLarge); /* Unsupported size */
|
|
if (tableLog < FSE_minTableLog(total, maxSymbolValue)) return ERROR(GENERIC); /* Too small tableLog, compression potentially impossible */
|
|
|
|
{ static U32 const rtbTable[] = { 0, 473195, 504333, 520860, 550000, 700000, 750000, 830000 };
|
|
short const lowProbCount = useLowProbCount ? -1 : 1;
|
|
U64 const scale = 62 - tableLog;
|
|
U64 const step = ZSTD_div64((U64)1<<62, (U32)total); /* <== here, one division ! */
|
|
U64 const vStep = 1ULL<<(scale-20);
|
|
int stillToDistribute = 1<<tableLog;
|
|
unsigned s;
|
|
unsigned largest=0;
|
|
short largestP=0;
|
|
U32 lowThreshold = (U32)(total >> tableLog);
|
|
|
|
for (s=0; s<=maxSymbolValue; s++) {
|
|
if (count[s] == total) return 0; /* rle special case */
|
|
if (count[s] == 0) { normalizedCounter[s]=0; continue; }
|
|
if (count[s] <= lowThreshold) {
|
|
normalizedCounter[s] = lowProbCount;
|
|
stillToDistribute--;
|
|
} else {
|
|
short proba = (short)((count[s]*step) >> scale);
|
|
if (proba<8) {
|
|
U64 restToBeat = vStep * rtbTable[proba];
|
|
proba += (count[s]*step) - ((U64)proba<<scale) > restToBeat;
|
|
}
|
|
if (proba > largestP) { largestP=proba; largest=s; }
|
|
normalizedCounter[s] = proba;
|
|
stillToDistribute -= proba;
|
|
} }
|
|
if (-stillToDistribute >= (normalizedCounter[largest] >> 1)) {
|
|
/* corner case, need another normalization method */
|
|
size_t const errorCode = FSE_normalizeM2(normalizedCounter, tableLog, count, total, maxSymbolValue, lowProbCount);
|
|
if (FSE_isError(errorCode)) return errorCode;
|
|
}
|
|
else normalizedCounter[largest] += (short)stillToDistribute;
|
|
}
|
|
|
|
#if 0
|
|
{ /* Print Table (debug) */
|
|
U32 s;
|
|
U32 nTotal = 0;
|
|
for (s=0; s<=maxSymbolValue; s++)
|
|
RAWLOG(2, "%3i: %4i \n", s, normalizedCounter[s]);
|
|
for (s=0; s<=maxSymbolValue; s++)
|
|
nTotal += abs(normalizedCounter[s]);
|
|
if (nTotal != (1U<<tableLog))
|
|
RAWLOG(2, "Warning !!! Total == %u != %u !!!", nTotal, 1U<<tableLog);
|
|
getchar();
|
|
}
|
|
#endif
|
|
|
|
return tableLog;
|
|
}
|
|
|
|
/* fake FSE_CTable, for rle input (always same symbol) */
|
|
size_t FSE_buildCTable_rle (FSE_CTable* ct, BYTE symbolValue)
|
|
{
|
|
void* ptr = ct;
|
|
U16* tableU16 = ( (U16*) ptr) + 2;
|
|
void* FSCTptr = (U32*)ptr + 2;
|
|
FSE_symbolCompressionTransform* symbolTT = (FSE_symbolCompressionTransform*) FSCTptr;
|
|
|
|
/* header */
|
|
tableU16[-2] = (U16) 0;
|
|
tableU16[-1] = (U16) symbolValue;
|
|
|
|
/* Build table */
|
|
tableU16[0] = 0;
|
|
tableU16[1] = 0; /* just in case */
|
|
|
|
/* Build Symbol Transformation Table */
|
|
symbolTT[symbolValue].deltaNbBits = 0;
|
|
symbolTT[symbolValue].deltaFindState = 0;
|
|
|
|
return 0;
|
|
}
|
|
|
|
|
|
static size_t FSE_compress_usingCTable_generic (void* dst, size_t dstSize,
|
|
const void* src, size_t srcSize,
|
|
const FSE_CTable* ct, const unsigned fast)
|
|
{
|
|
const BYTE* const istart = (const BYTE*) src;
|
|
const BYTE* const iend = istart + srcSize;
|
|
const BYTE* ip=iend;
|
|
|
|
BIT_CStream_t bitC;
|
|
FSE_CState_t CState1, CState2;
|
|
|
|
/* init */
|
|
if (srcSize <= 2) return 0;
|
|
{ size_t const initError = BIT_initCStream(&bitC, dst, dstSize);
|
|
if (FSE_isError(initError)) return 0; /* not enough space available to write a bitstream */ }
|
|
|
|
#define FSE_FLUSHBITS(s) (fast ? BIT_flushBitsFast(s) : BIT_flushBits(s))
|
|
|
|
if (srcSize & 1) {
|
|
FSE_initCState2(&CState1, ct, *--ip);
|
|
FSE_initCState2(&CState2, ct, *--ip);
|
|
FSE_encodeSymbol(&bitC, &CState1, *--ip);
|
|
FSE_FLUSHBITS(&bitC);
|
|
} else {
|
|
FSE_initCState2(&CState2, ct, *--ip);
|
|
FSE_initCState2(&CState1, ct, *--ip);
|
|
}
|
|
|
|
/* join to mod 4 */
|
|
srcSize -= 2;
|
|
if ((sizeof(bitC.bitContainer)*8 > FSE_MAX_TABLELOG*4+7 ) && (srcSize & 2)) { /* test bit 2 */
|
|
FSE_encodeSymbol(&bitC, &CState2, *--ip);
|
|
FSE_encodeSymbol(&bitC, &CState1, *--ip);
|
|
FSE_FLUSHBITS(&bitC);
|
|
}
|
|
|
|
/* 2 or 4 encoding per loop */
|
|
while ( ip>istart ) {
|
|
|
|
FSE_encodeSymbol(&bitC, &CState2, *--ip);
|
|
|
|
if (sizeof(bitC.bitContainer)*8 < FSE_MAX_TABLELOG*2+7 ) /* this test must be static */
|
|
FSE_FLUSHBITS(&bitC);
|
|
|
|
FSE_encodeSymbol(&bitC, &CState1, *--ip);
|
|
|
|
if (sizeof(bitC.bitContainer)*8 > FSE_MAX_TABLELOG*4+7 ) { /* this test must be static */
|
|
FSE_encodeSymbol(&bitC, &CState2, *--ip);
|
|
FSE_encodeSymbol(&bitC, &CState1, *--ip);
|
|
}
|
|
|
|
FSE_FLUSHBITS(&bitC);
|
|
}
|
|
|
|
FSE_flushCState(&bitC, &CState2);
|
|
FSE_flushCState(&bitC, &CState1);
|
|
return BIT_closeCStream(&bitC);
|
|
}
|
|
|
|
size_t FSE_compress_usingCTable (void* dst, size_t dstSize,
|
|
const void* src, size_t srcSize,
|
|
const FSE_CTable* ct)
|
|
{
|
|
unsigned const fast = (dstSize >= FSE_BLOCKBOUND(srcSize));
|
|
|
|
if (fast)
|
|
return FSE_compress_usingCTable_generic(dst, dstSize, src, srcSize, ct, 1);
|
|
else
|
|
return FSE_compress_usingCTable_generic(dst, dstSize, src, srcSize, ct, 0);
|
|
}
|
|
|
|
|
|
size_t FSE_compressBound(size_t size) { return FSE_COMPRESSBOUND(size); }
|
|
|
|
#endif /* FSE_COMMONDEFS_ONLY */
|