TUN-7585: Remove h2mux compression

h2mux is already deprecated and will be eventually removed, in the meantime,
the compression tests cause flaky failures. Removing them and the brotli
code slims down our binaries and dependencies on CGO.
This commit is contained in:
Devin Carr 2023-07-14 15:40:20 -07:00
parent 2084a123c2
commit 2ee90483bf
90 changed files with 1 additions and 33436 deletions

1
go.mod
View File

@ -3,7 +3,6 @@ module github.com/cloudflare/cloudflared
go 1.19
require (
github.com/cloudflare/brotli-go v0.0.0-20191101163834-d34379f7ff93
github.com/cloudflare/golibs v0.0.0-20170913112048-333127dbecfc
github.com/coredns/coredns v1.10.0
github.com/coreos/go-oidc/v3 v3.6.0

2
go.sum
View File

@ -59,8 +59,6 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/brotli-go v0.0.0-20191101163834-d34379f7ff93 h1:QrGfkZDnMxcWHaYDdB7CmqS9i26OAnUj/xcus/abYkY=
github.com/cloudflare/brotli-go v0.0.0-20191101163834-d34379f7ff93/go.mod h1:QiTe66jFdP7cUKMCCf/WrvDyYdtdmdZfVcdoLbzaKVY=
github.com/cloudflare/circl v1.2.1-0.20220809205628-0a9554f37a47 h1:YzpECHxZ9TzO7LpnKmPxItSd79lLgrR5heIlnqU4dTU=
github.com/cloudflare/circl v1.2.1-0.20220809205628-0a9554f37a47/go.mod h1:qhx8gBILsYlbam7h09SvHDSkjpe3TfLA7b/z4rxJvkE=
github.com/cloudflare/golibs v0.0.0-20170913112048-333127dbecfc h1:Dvk3ySBsOm5EviLx6VCyILnafPcQinXGP5jbTdHUJgE=

View File

@ -1,6 +1,3 @@
//go:build !cgo
// +build !cgo
package h2mux
import (

View File

@ -1,22 +0,0 @@
//go:build cgo
// +build cgo
package h2mux
import (
"io"
"github.com/cloudflare/brotli-go"
)
func CompressionIsSupported() bool {
return true
}
func newDecompressor(src io.Reader) *brotli.Reader {
return brotli.NewReader(src)
}
func newCompressor(dst io.Writer, quality, lgwin int) *brotli.Writer {
return brotli.NewWriter(dst, brotli.WriterOptions{Quality: quality, LGWin: lgwin})
}

View File

@ -135,14 +135,7 @@ func Handshake(
m.f.ReadMetaHeaders = hpack.NewDecoder(4096, func(hpack.HeaderField) {})
// Initialise the settings to identify this connection and confirm the other end is sane.
handshakeSetting := http2.Setting{ID: SettingMuxerMagic, Val: MuxerMagicEdge}
compressionSetting := http2.Setting{ID: SettingCompression, Val: config.CompressionQuality.toH2Setting()}
if CompressionIsSupported() {
config.Log.Debug().Msg("muxer: Compression is supported")
m.compressionQuality = config.CompressionQuality.getPreset()
} else {
config.Log.Debug().Msg("muxer: Compression is not supported")
compressionSetting = http2.Setting{ID: SettingCompression, Val: 0}
}
compressionSetting := http2.Setting{ID: SettingCompression, Val: 0}
expectedMagic := MuxerMagicOrigin
if config.IsClient {

View File

@ -677,157 +677,6 @@ func AssertIfPipeReadable(t *testing.T, pipe io.ReadCloser) {
}
}
func TestMultipleStreamsWithDictionaries(t *testing.T) {
l := zerolog.Nop()
for q := CompressionNone; q <= CompressionMax; q++ {
htmlBody := `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"` +
`"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">` +
`<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">` +
`<head>` +
` <title>Your page title here</title>` +
`</head>` +
`<body>` +
`<h1>Your major heading here</h1>` +
`<p>` +
`This is a regular text paragraph.` +
`</p>` +
`<ul>` +
` <li>` +
` First bullet of a bullet list.` +
` </li>` +
` <li>` +
` This is the <em>second</em> bullet.` +
` </li>` +
`</ul>` +
`</body>` +
`</html>`
f := MuxedStreamFunc(func(stream *MuxedStream) error {
var contentType string
var pathHeader Header
for _, h := range stream.Headers {
if h.Name == ":path" {
pathHeader = h
break
}
}
if pathHeader.Name != ":path" {
panic("Couldn't find :path header in test")
}
if strings.Contains(pathHeader.Value, "html") {
contentType = "text/html; charset=utf-8"
} else if strings.Contains(pathHeader.Value, "js") {
contentType = "application/javascript"
} else if strings.Contains(pathHeader.Value, "css") {
contentType = "text/css"
} else {
contentType = "img/gif"
}
_ = stream.WriteHeaders([]Header{
{Name: "content-type", Value: contentType},
})
_, _ = stream.Write([]byte(strings.Replace(htmlBody, "paragraph", pathHeader.Value, 1) + stream.Headers[5].Value))
return nil
})
muxPair := NewCompressedMuxerPair(t, fmt.Sprintf("%s_%d", t.Name(), q), q, f)
muxPair.Serve(t)
var wg sync.WaitGroup
paths := []string{
"/html1",
"/html2?sa:ds",
"/html3",
"/css1",
"/html1",
"/html2?sa:ds",
"/html3",
"/css1",
"/css2",
"/css3",
"/js",
"/js",
"/js",
"/js2",
"/img2",
"/html1",
"/html2?sa:ds",
"/html3",
"/css1",
"/css2",
"/css3",
"/js",
"/js",
"/js",
"/js2",
"/img1",
}
wg.Add(len(paths))
errorsC := make(chan error, len(paths))
for i, s := range paths {
go func(index int, path string) {
defer wg.Done()
stream, err := muxPair.OpenEdgeMuxStream(
[]Header{
{Name: ":method", Value: "GET"},
{Name: ":scheme", Value: "https"},
{Name: ":authority", Value: "tunnel.otterlyadorable.co.uk"},
{Name: ":path", Value: path},
{Name: "cf-ray", Value: "378948953f044408-SFO-DOG"},
{Name: "idx", Value: strconv.Itoa(index)},
{Name: "accept-encoding", Value: "gzip, br"},
},
nil,
)
if err != nil {
errorsC <- fmt.Errorf("error in OpenStream: %v", err)
return
}
expectBody := strings.Replace(htmlBody, "paragraph", path, 1) + strconv.Itoa(index)
responseBody := make([]byte, len(expectBody)*2)
n, err := stream.Read(responseBody)
if err != nil {
errorsC <- fmt.Errorf("stream %d error from (*MuxedStream).Read: %s", stream.streamID, err)
return
}
if n != len(expectBody) {
errorsC <- fmt.Errorf("stream %d expected response body to have %d bytes, got %d", stream.streamID, len(expectBody), n)
return
}
if string(responseBody[:n]) != expectBody {
errorsC <- fmt.Errorf("stream %d expected response body %s, got %s", stream.streamID, expectBody, responseBody[:n])
return
}
}(i, s)
}
wg.Wait()
close(errorsC)
testFail := false
for err := range errorsC {
testFail = true
l.Error().Msgf("%s", err)
}
if testFail {
t.Fatalf("TestMultipleStreams failed")
}
originMuxMetrics := muxPair.OriginMux.Metrics()
if q > CompressionNone && originMuxMetrics.CompBytesBefore.Value() <= 10*originMuxMetrics.CompBytesAfter.Value() {
t.Fatalf("Cross-stream compression is expected to give a better compression ratio")
}
}
}
func sampleSiteHandler(files map[string][]byte) MuxedStreamFunc {
return func(stream *MuxedStream) error {
var contentType string
@ -914,82 +763,6 @@ func loadSampleFiles(paths []string) (map[string][]byte, error) {
return files, nil
}
func TestSampleSiteWithDictionaries(t *testing.T) {
paths := []string{
"./sample/index.html",
"./sample/index2.html",
"./sample/index1.html",
"./sample/ghost-url.min.js",
"./sample/jquery.fitvids.js",
"./sample/index1.html",
"./sample/index2.html",
"./sample/index.html",
}
files, err := loadSampleFiles(paths)
assert.NoError(t, err)
for q := CompressionNone; q <= CompressionMax; q++ {
muxPair := NewCompressedMuxerPair(t, fmt.Sprintf("%s_%d", t.Name(), q), q, sampleSiteHandler(files))
muxPair.Serve(t)
var wg sync.WaitGroup
errC := make(chan error, len(paths))
wg.Add(len(paths))
for _, s := range paths {
go func(path string) {
defer wg.Done()
errC <- sampleSiteTest(muxPair, path, files)
}(s)
}
wg.Wait()
close(errC)
for err := range errC {
assert.NoError(t, err)
}
originMuxMetrics := muxPair.OriginMux.Metrics()
if q > CompressionNone && originMuxMetrics.CompBytesBefore.Value() <= 10*originMuxMetrics.CompBytesAfter.Value() {
t.Fatalf("Cross-stream compression is expected to give a better compression ratio")
}
}
}
func TestLongSiteWithDictionaries(t *testing.T) {
paths := []string{
"./sample/index.html",
"./sample/index1.html",
"./sample/index2.html",
"./sample/ghost-url.min.js",
"./sample/jquery.fitvids.js",
}
files, err := loadSampleFiles(paths)
assert.NoError(t, err)
for q := CompressionNone; q <= CompressionMedium; q++ {
muxPair := NewCompressedMuxerPair(t, fmt.Sprintf("%s_%d", t.Name(), q), q, sampleSiteHandler(files))
muxPair.Serve(t)
rand.Seed(time.Now().Unix())
tstLen := 500
errGroup, _ := errgroup.WithContext(context.Background())
for i := 0; i < tstLen; i++ {
errGroup.Go(func() error {
path := paths[rand.Int()%len(paths)]
return sampleSiteTest(muxPair, path, files)
})
}
assert.NoError(t, errGroup.Wait())
originMuxMetrics := muxPair.OriginMux.Metrics()
if q > CompressionNone && originMuxMetrics.CompBytesBefore.Value() <= 10*originMuxMetrics.CompBytesAfter.Value() {
t.Fatalf("Cross-stream compression is expected to give a better compression ratio")
}
}
}
func BenchmarkOpenStream(b *testing.B) {
const streams = 5000
for i := 0; i < b.N; i++ {

View File

@ -1,130 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function to find backward reference copies. */
#include "./enc/backward_references.h"
#include "./common/constants.h"
#include "./common/dictionary.h"
#include <brotli/types.h>
#include "./enc/command.h"
#include "./enc/dictionary_hash.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static BROTLI_INLINE size_t ComputeDistanceCode(size_t distance,
size_t max_distance,
const int* dist_cache) {
if (distance <= max_distance) {
size_t distance_plus_3 = distance + 3;
size_t offset0 = distance_plus_3 - (size_t)dist_cache[0];
size_t offset1 = distance_plus_3 - (size_t)dist_cache[1];
if (distance == (size_t)dist_cache[0]) {
return 0;
} else if (distance == (size_t)dist_cache[1]) {
return 1;
} else if (offset0 < 7) {
return (0x9750468 >> (4 * offset0)) & 0xF;
} else if (offset1 < 7) {
return (0xFDB1ACE >> (4 * offset1)) & 0xF;
} else if (distance == (size_t)dist_cache[2]) {
return 2;
} else if (distance == (size_t)dist_cache[3]) {
return 3;
}
}
return distance + BROTLI_NUM_DISTANCE_SHORT_CODES - 1;
}
#define EXPAND_CAT(a, b) CAT(a, b)
#define CAT(a, b) a ## b
#define FN(X) EXPAND_CAT(X, HASHER())
#define HASHER() H2
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H3
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H4
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H5
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H6
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H40
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H41
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H42
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#define HASHER() H54
/* NOLINTNEXTLINE(build/include) */
#include "./enc/backward_references_inc.h"
#undef HASHER
#undef FN
#undef CAT
#undef EXPAND_CAT
void BrotliCreateBackwardReferences(const BrotliDictionary* dictionary,
size_t num_bytes,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const BrotliEncoderParams* params,
HasherHandle hasher,
int* dist_cache,
size_t* last_insert_len,
Command* commands,
size_t* num_commands,
size_t* num_literals) {
switch (params->hasher.type) {
#define CASE_(N) \
case N: \
CreateBackwardReferencesH ## N(dictionary, \
kStaticDictionaryHash, num_bytes, position, ringbuffer, \
ringbuffer_mask, params, hasher, dist_cache, \
last_insert_len, commands, num_commands, num_literals); \
break;
FOR_GENERIC_HASHERS(CASE_)
#undef CASE_
default:
break;
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,790 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function to find backward reference copies. */
#include "./enc/backward_references_hq.h"
#include <string.h> /* memcpy, memset */
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/command.h"
#include "./enc/fast_log.h"
#include "./enc/find_match_length.h"
#include "./enc/literal_cost.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/prefix.h"
#include "./enc/quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const float kInfinity = 1.7e38f; /* ~= 2 ^ 127 */
static const uint32_t kDistanceCacheIndex[] = {
0, 1, 2, 3, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
};
static const int kDistanceCacheOffset[] = {
0, 0, 0, 0, -1, 1, -2, 2, -3, 3, -1, 1, -2, 2, -3, 3
};
void BrotliInitZopfliNodes(ZopfliNode* array, size_t length) {
ZopfliNode stub;
size_t i;
stub.length = 1;
stub.distance = 0;
stub.insert_length = 0;
stub.u.cost = kInfinity;
for (i = 0; i < length; ++i) array[i] = stub;
}
static BROTLI_INLINE uint32_t ZopfliNodeCopyLength(const ZopfliNode* self) {
return self->length & 0xffffff;
}
static BROTLI_INLINE uint32_t ZopfliNodeLengthCode(const ZopfliNode* self) {
const uint32_t modifier = self->length >> 24;
return ZopfliNodeCopyLength(self) + 9u - modifier;
}
static BROTLI_INLINE uint32_t ZopfliNodeCopyDistance(const ZopfliNode* self) {
return self->distance & 0x1ffffff;
}
static BROTLI_INLINE uint32_t ZopfliNodeDistanceCode(const ZopfliNode* self) {
const uint32_t short_code = self->distance >> 25;
return short_code == 0 ?
ZopfliNodeCopyDistance(self) + BROTLI_NUM_DISTANCE_SHORT_CODES - 1 :
short_code - 1;
}
static BROTLI_INLINE uint32_t ZopfliNodeCommandLength(const ZopfliNode* self) {
return ZopfliNodeCopyLength(self) + self->insert_length;
}
/* Histogram based cost model for zopflification. */
typedef struct ZopfliCostModel {
/* The insert and copy length symbols. */
float cost_cmd_[BROTLI_NUM_COMMAND_SYMBOLS];
float cost_dist_[BROTLI_NUM_DISTANCE_SYMBOLS];
/* Cumulative costs of literals per position in the stream. */
float* literal_costs_;
float min_cost_cmd_;
size_t num_bytes_;
} ZopfliCostModel;
static void InitZopfliCostModel(
MemoryManager* m, ZopfliCostModel* self, size_t num_bytes) {
self->num_bytes_ = num_bytes;
self->literal_costs_ = BROTLI_ALLOC(m, float, num_bytes + 2);
if (BROTLI_IS_OOM(m)) return;
}
static void CleanupZopfliCostModel(MemoryManager* m, ZopfliCostModel* self) {
BROTLI_FREE(m, self->literal_costs_);
}
static void SetCost(const uint32_t* histogram, size_t histogram_size,
float* cost) {
size_t sum = 0;
float log2sum;
size_t i;
for (i = 0; i < histogram_size; i++) {
sum += histogram[i];
}
log2sum = (float)FastLog2(sum);
for (i = 0; i < histogram_size; i++) {
if (histogram[i] == 0) {
cost[i] = log2sum + 2;
continue;
}
/* Shannon bits for this symbol. */
cost[i] = log2sum - (float)FastLog2(histogram[i]);
/* Cannot be coded with less than 1 bit */
if (cost[i] < 1) cost[i] = 1;
}
}
static void ZopfliCostModelSetFromCommands(ZopfliCostModel* self,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const Command* commands,
size_t num_commands,
size_t last_insert_len) {
uint32_t histogram_literal[BROTLI_NUM_LITERAL_SYMBOLS];
uint32_t histogram_cmd[BROTLI_NUM_COMMAND_SYMBOLS];
uint32_t histogram_dist[BROTLI_NUM_DISTANCE_SYMBOLS];
float cost_literal[BROTLI_NUM_LITERAL_SYMBOLS];
size_t pos = position - last_insert_len;
float min_cost_cmd = kInfinity;
size_t i;
float* cost_cmd = self->cost_cmd_;
memset(histogram_literal, 0, sizeof(histogram_literal));
memset(histogram_cmd, 0, sizeof(histogram_cmd));
memset(histogram_dist, 0, sizeof(histogram_dist));
for (i = 0; i < num_commands; i++) {
size_t inslength = commands[i].insert_len_;
size_t copylength = CommandCopyLen(&commands[i]);
size_t distcode = commands[i].dist_prefix_;
size_t cmdcode = commands[i].cmd_prefix_;
size_t j;
histogram_cmd[cmdcode]++;
if (cmdcode >= 128) histogram_dist[distcode]++;
for (j = 0; j < inslength; j++) {
histogram_literal[ringbuffer[(pos + j) & ringbuffer_mask]]++;
}
pos += inslength + copylength;
}
SetCost(histogram_literal, BROTLI_NUM_LITERAL_SYMBOLS, cost_literal);
SetCost(histogram_cmd, BROTLI_NUM_COMMAND_SYMBOLS, cost_cmd);
SetCost(histogram_dist, BROTLI_NUM_DISTANCE_SYMBOLS, self->cost_dist_);
for (i = 0; i < BROTLI_NUM_COMMAND_SYMBOLS; ++i) {
min_cost_cmd = BROTLI_MIN(float, min_cost_cmd, cost_cmd[i]);
}
self->min_cost_cmd_ = min_cost_cmd;
{
float* literal_costs = self->literal_costs_;
size_t num_bytes = self->num_bytes_;
literal_costs[0] = 0.0;
for (i = 0; i < num_bytes; ++i) {
literal_costs[i + 1] = literal_costs[i] +
cost_literal[ringbuffer[(position + i) & ringbuffer_mask]];
}
}
}
static void ZopfliCostModelSetFromLiteralCosts(ZopfliCostModel* self,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask) {
float* literal_costs = self->literal_costs_;
float* cost_dist = self->cost_dist_;
float* cost_cmd = self->cost_cmd_;
size_t num_bytes = self->num_bytes_;
size_t i;
BrotliEstimateBitCostsForLiterals(position, num_bytes, ringbuffer_mask,
ringbuffer, &literal_costs[1]);
literal_costs[0] = 0.0;
for (i = 0; i < num_bytes; ++i) {
literal_costs[i + 1] += literal_costs[i];
}
for (i = 0; i < BROTLI_NUM_COMMAND_SYMBOLS; ++i) {
cost_cmd[i] = (float)FastLog2(11 + (uint32_t)i);
}
for (i = 0; i < BROTLI_NUM_DISTANCE_SYMBOLS; ++i) {
cost_dist[i] = (float)FastLog2(20 + (uint32_t)i);
}
self->min_cost_cmd_ = (float)FastLog2(11);
}
static BROTLI_INLINE float ZopfliCostModelGetCommandCost(
const ZopfliCostModel* self, uint16_t cmdcode) {
return self->cost_cmd_[cmdcode];
}
static BROTLI_INLINE float ZopfliCostModelGetDistanceCost(
const ZopfliCostModel* self, size_t distcode) {
return self->cost_dist_[distcode];
}
static BROTLI_INLINE float ZopfliCostModelGetLiteralCosts(
const ZopfliCostModel* self, size_t from, size_t to) {
return self->literal_costs_[to] - self->literal_costs_[from];
}
static BROTLI_INLINE float ZopfliCostModelGetMinCostCmd(
const ZopfliCostModel* self) {
return self->min_cost_cmd_;
}
/* REQUIRES: len >= 2, start_pos <= pos */
/* REQUIRES: cost < kInfinity, nodes[start_pos].cost < kInfinity */
/* Maintains the "ZopfliNode array invariant". */
static BROTLI_INLINE void UpdateZopfliNode(ZopfliNode* nodes, size_t pos,
size_t start_pos, size_t len, size_t len_code, size_t dist,
size_t short_code, float cost) {
ZopfliNode* next = &nodes[pos + len];
next->length = (uint32_t)(len | ((len + 9u - len_code) << 24));
next->distance = (uint32_t)(dist | (short_code << 25));
next->insert_length = (uint32_t)(pos - start_pos);
next->u.cost = cost;
}
typedef struct PosData {
size_t pos;
int distance_cache[4];
float costdiff;
float cost;
} PosData;
/* Maintains the smallest 8 cost difference together with their positions */
typedef struct StartPosQueue {
PosData q_[8];
size_t idx_;
} StartPosQueue;
static BROTLI_INLINE void InitStartPosQueue(StartPosQueue* self) {
self->idx_ = 0;
}
static size_t StartPosQueueSize(const StartPosQueue* self) {
return BROTLI_MIN(size_t, self->idx_, 8);
}
static void StartPosQueuePush(StartPosQueue* self, const PosData* posdata) {
size_t offset = ~(self->idx_++) & 7;
size_t len = StartPosQueueSize(self);
size_t i;
PosData* q = self->q_;
q[offset] = *posdata;
/* Restore the sorted order. In the list of |len| items at most |len - 1|
adjacent element comparisons / swaps are required. */
for (i = 1; i < len; ++i) {
if (q[offset & 7].costdiff > q[(offset + 1) & 7].costdiff) {
BROTLI_SWAP(PosData, q, offset & 7, (offset + 1) & 7);
}
++offset;
}
}
static const PosData* StartPosQueueAt(const StartPosQueue* self, size_t k) {
return &self->q_[(k - self->idx_) & 7];
}
/* Returns the minimum possible copy length that can improve the cost of any */
/* future position. */
static size_t ComputeMinimumCopyLength(const float start_cost,
const ZopfliNode* nodes,
const size_t num_bytes,
const size_t pos) {
/* Compute the minimum possible cost of reaching any future position. */
float min_cost = start_cost;
size_t len = 2;
size_t next_len_bucket = 4;
size_t next_len_offset = 10;
while (pos + len <= num_bytes && nodes[pos + len].u.cost <= min_cost) {
/* We already reached (pos + len) with no more cost than the minimum
possible cost of reaching anything from this pos, so there is no point in
looking for lengths <= len. */
++len;
if (len == next_len_offset) {
/* We reached the next copy length code bucket, so we add one more
extra bit to the minimum cost. */
min_cost += 1.0f;
next_len_offset += next_len_bucket;
next_len_bucket *= 2;
}
}
return len;
}
/* REQUIRES: nodes[pos].cost < kInfinity
REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
static uint32_t ComputeDistanceShortcut(const size_t block_start,
const size_t pos,
const size_t max_backward,
const ZopfliNode* nodes) {
const size_t clen = ZopfliNodeCopyLength(&nodes[pos]);
const size_t ilen = nodes[pos].insert_length;
const size_t dist = ZopfliNodeCopyDistance(&nodes[pos]);
/* Since |block_start + pos| is the end position of the command, the copy part
starts from |block_start + pos - clen|. Distances that are greater than
this or greater than |max_backward| are static dictionary references, and
do not update the last distances. Also distance code 0 (last distance)
does not update the last distances. */
if (pos == 0) {
return 0;
} else if (dist + clen <= block_start + pos &&
dist <= max_backward &&
ZopfliNodeDistanceCode(&nodes[pos]) > 0) {
return (uint32_t)pos;
} else {
return nodes[pos - clen - ilen].u.shortcut;
}
}
/* Fills in dist_cache[0..3] with the last four distances (as defined by
Section 4. of the Spec) that would be used at (block_start + pos) if we
used the shortest path of commands from block_start, computed from
nodes[0..pos]. The last four distances at block_start are in
starting_dist_cache[0..3].
REQUIRES: nodes[pos].cost < kInfinity
REQUIRES: nodes[0..pos] satisfies that "ZopfliNode array invariant". */
static void ComputeDistanceCache(const size_t pos,
const int* starting_dist_cache,
const ZopfliNode* nodes,
int* dist_cache) {
int idx = 0;
size_t p = nodes[pos].u.shortcut;
while (idx < 4 && p > 0) {
const size_t ilen = nodes[p].insert_length;
const size_t clen = ZopfliNodeCopyLength(&nodes[p]);
const size_t dist = ZopfliNodeCopyDistance(&nodes[p]);
dist_cache[idx++] = (int)dist;
/* Because of prerequisite, p >= clen + ilen >= 2. */
p = nodes[p - clen - ilen].u.shortcut;
}
for (; idx < 4; ++idx) {
dist_cache[idx] = *starting_dist_cache++;
}
}
/* Maintains "ZopfliNode array invariant" and pushes node to the queue, if it
is eligible. */
static void EvaluateNode(
const size_t block_start, const size_t pos, const size_t max_backward_limit,
const int* starting_dist_cache, const ZopfliCostModel* model,
StartPosQueue* queue, ZopfliNode* nodes) {
/* Save cost, because ComputeDistanceCache invalidates it. */
float node_cost = nodes[pos].u.cost;
nodes[pos].u.shortcut = ComputeDistanceShortcut(
block_start, pos, max_backward_limit, nodes);
if (node_cost <= ZopfliCostModelGetLiteralCosts(model, 0, pos)) {
PosData posdata;
posdata.pos = pos;
posdata.cost = node_cost;
posdata.costdiff = node_cost -
ZopfliCostModelGetLiteralCosts(model, 0, pos);
ComputeDistanceCache(
pos, starting_dist_cache, nodes, posdata.distance_cache);
StartPosQueuePush(queue, &posdata);
}
}
/* Returns longest copy length. */
static size_t UpdateNodes(
const size_t num_bytes, const size_t block_start, const size_t pos,
const uint8_t* ringbuffer, const size_t ringbuffer_mask,
const BrotliEncoderParams* params, const size_t max_backward_limit,
const int* starting_dist_cache, const size_t num_matches,
const BackwardMatch* matches, const ZopfliCostModel* model,
StartPosQueue* queue, ZopfliNode* nodes) {
const size_t cur_ix = block_start + pos;
const size_t cur_ix_masked = cur_ix & ringbuffer_mask;
const size_t max_distance = BROTLI_MIN(size_t, cur_ix, max_backward_limit);
const size_t max_len = num_bytes - pos;
const size_t max_zopfli_len = MaxZopfliLen(params);
const size_t max_iters = MaxZopfliCandidates(params);
size_t min_len;
size_t result = 0;
size_t k;
EvaluateNode(block_start, pos, max_backward_limit, starting_dist_cache, model,
queue, nodes);
{
const PosData* posdata = StartPosQueueAt(queue, 0);
float min_cost = (posdata->cost + ZopfliCostModelGetMinCostCmd(model) +
ZopfliCostModelGetLiteralCosts(model, posdata->pos, pos));
min_len = ComputeMinimumCopyLength(min_cost, nodes, num_bytes, pos);
}
/* Go over the command starting positions in order of increasing cost
difference. */
for (k = 0; k < max_iters && k < StartPosQueueSize(queue); ++k) {
const PosData* posdata = StartPosQueueAt(queue, k);
const size_t start = posdata->pos;
const uint16_t inscode = GetInsertLengthCode(pos - start);
const float start_costdiff = posdata->costdiff;
const float base_cost = start_costdiff + (float)GetInsertExtra(inscode) +
ZopfliCostModelGetLiteralCosts(model, 0, pos);
/* Look for last distance matches using the distance cache from this
starting position. */
size_t best_len = min_len - 1;
size_t j = 0;
for (; j < BROTLI_NUM_DISTANCE_SHORT_CODES && best_len < max_len; ++j) {
const size_t idx = kDistanceCacheIndex[j];
const size_t backward =
(size_t)(posdata->distance_cache[idx] + kDistanceCacheOffset[j]);
size_t prev_ix = cur_ix - backward;
if (prev_ix >= cur_ix) {
continue;
}
if (BROTLI_PREDICT_FALSE(backward > max_distance)) {
continue;
}
prev_ix &= ringbuffer_mask;
if (cur_ix_masked + best_len > ringbuffer_mask ||
prev_ix + best_len > ringbuffer_mask ||
ringbuffer[cur_ix_masked + best_len] !=
ringbuffer[prev_ix + best_len]) {
continue;
}
{
const size_t len =
FindMatchLengthWithLimit(&ringbuffer[prev_ix],
&ringbuffer[cur_ix_masked],
max_len);
const float dist_cost = base_cost +
ZopfliCostModelGetDistanceCost(model, j);
size_t l;
for (l = best_len + 1; l <= len; ++l) {
const uint16_t copycode = GetCopyLengthCode(l);
const uint16_t cmdcode =
CombineLengthCodes(inscode, copycode, j == 0);
const float cost = (cmdcode < 128 ? base_cost : dist_cost) +
(float)GetCopyExtra(copycode) +
ZopfliCostModelGetCommandCost(model, cmdcode);
if (cost < nodes[pos + l].u.cost) {
UpdateZopfliNode(nodes, pos, start, l, l, backward, j + 1, cost);
result = BROTLI_MAX(size_t, result, l);
}
best_len = l;
}
}
}
/* At higher iterations look only for new last distance matches, since
looking only for new command start positions with the same distances
does not help much. */
if (k >= 2) continue;
{
/* Loop through all possible copy lengths at this position. */
size_t len = min_len;
for (j = 0; j < num_matches; ++j) {
BackwardMatch match = matches[j];
size_t dist = match.distance;
BROTLI_BOOL is_dictionary_match = TO_BROTLI_BOOL(dist > max_distance);
/* We already tried all possible last distance matches, so we can use
normal distance code here. */
size_t dist_code = dist + BROTLI_NUM_DISTANCE_SHORT_CODES - 1;
uint16_t dist_symbol;
uint32_t distextra;
uint32_t distnumextra;
float dist_cost;
size_t max_match_len;
PrefixEncodeCopyDistance(dist_code, 0, 0, &dist_symbol, &distextra);
distnumextra = distextra >> 24;
dist_cost = base_cost + (float)distnumextra +
ZopfliCostModelGetDistanceCost(model, dist_symbol);
/* Try all copy lengths up until the maximum copy length corresponding
to this distance. If the distance refers to the static dictionary, or
the maximum length is long enough, try only one maximum length. */
max_match_len = BackwardMatchLength(&match);
if (len < max_match_len &&
(is_dictionary_match || max_match_len > max_zopfli_len)) {
len = max_match_len;
}
for (; len <= max_match_len; ++len) {
const size_t len_code =
is_dictionary_match ? BackwardMatchLengthCode(&match) : len;
const uint16_t copycode = GetCopyLengthCode(len_code);
const uint16_t cmdcode = CombineLengthCodes(inscode, copycode, 0);
const float cost = dist_cost + (float)GetCopyExtra(copycode) +
ZopfliCostModelGetCommandCost(model, cmdcode);
if (cost < nodes[pos + len].u.cost) {
UpdateZopfliNode(nodes, pos, start, len, len_code, dist, 0, cost);
result = BROTLI_MAX(size_t, result, len);
}
}
}
}
}
return result;
}
static size_t ComputeShortestPathFromNodes(size_t num_bytes,
ZopfliNode* nodes) {
size_t index = num_bytes;
size_t num_commands = 0;
while (nodes[index].insert_length == 0 && nodes[index].length == 1) --index;
nodes[index].u.next = BROTLI_UINT32_MAX;
while (index != 0) {
size_t len = ZopfliNodeCommandLength(&nodes[index]);
index -= len;
nodes[index].u.next = (uint32_t)len;
num_commands++;
}
return num_commands;
}
/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */
void BrotliZopfliCreateCommands(const size_t num_bytes,
const size_t block_start,
const size_t max_backward_limit,
const ZopfliNode* nodes,
int* dist_cache,
size_t* last_insert_len,
Command* commands,
size_t* num_literals) {
size_t pos = 0;
uint32_t offset = nodes[0].u.next;
size_t i;
for (i = 0; offset != BROTLI_UINT32_MAX; i++) {
const ZopfliNode* next = &nodes[pos + offset];
size_t copy_length = ZopfliNodeCopyLength(next);
size_t insert_length = next->insert_length;
pos += insert_length;
offset = next->u.next;
if (i == 0) {
insert_length += *last_insert_len;
*last_insert_len = 0;
}
{
size_t distance = ZopfliNodeCopyDistance(next);
size_t len_code = ZopfliNodeLengthCode(next);
size_t max_distance =
BROTLI_MIN(size_t, block_start + pos, max_backward_limit);
BROTLI_BOOL is_dictionary = TO_BROTLI_BOOL(distance > max_distance);
size_t dist_code = ZopfliNodeDistanceCode(next);
InitCommand(&commands[i], insert_length,
copy_length, (int)len_code - (int)copy_length, dist_code);
if (!is_dictionary && dist_code > 0) {
dist_cache[3] = dist_cache[2];
dist_cache[2] = dist_cache[1];
dist_cache[1] = dist_cache[0];
dist_cache[0] = (int)distance;
}
}
*num_literals += insert_length;
pos += copy_length;
}
*last_insert_len += num_bytes - pos;
}
static size_t ZopfliIterate(size_t num_bytes,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const BrotliEncoderParams* params,
const size_t max_backward_limit,
const int* dist_cache,
const ZopfliCostModel* model,
const uint32_t* num_matches,
const BackwardMatch* matches,
ZopfliNode* nodes) {
const size_t max_zopfli_len = MaxZopfliLen(params);
StartPosQueue queue;
size_t cur_match_pos = 0;
size_t i;
nodes[0].length = 0;
nodes[0].u.cost = 0;
InitStartPosQueue(&queue);
for (i = 0; i + 3 < num_bytes; i++) {
size_t skip = UpdateNodes(num_bytes, position, i, ringbuffer,
ringbuffer_mask, params, max_backward_limit, dist_cache,
num_matches[i], &matches[cur_match_pos], model, &queue, nodes);
if (skip < BROTLI_LONG_COPY_QUICK_STEP) skip = 0;
cur_match_pos += num_matches[i];
if (num_matches[i] == 1 &&
BackwardMatchLength(&matches[cur_match_pos - 1]) > max_zopfli_len) {
skip = BROTLI_MAX(size_t,
BackwardMatchLength(&matches[cur_match_pos - 1]), skip);
}
if (skip > 1) {
skip--;
while (skip) {
i++;
if (i + 3 >= num_bytes) break;
EvaluateNode(
position, i, max_backward_limit, dist_cache, model, &queue, nodes);
cur_match_pos += num_matches[i];
skip--;
}
}
}
return ComputeShortestPathFromNodes(num_bytes, nodes);
}
/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */
size_t BrotliZopfliComputeShortestPath(MemoryManager* m,
const BrotliDictionary* dictionary,
size_t num_bytes,
size_t position,
const uint8_t* ringbuffer,
size_t ringbuffer_mask,
const BrotliEncoderParams* params,
const size_t max_backward_limit,
const int* dist_cache,
HasherHandle hasher,
ZopfliNode* nodes) {
const size_t max_zopfli_len = MaxZopfliLen(params);
ZopfliCostModel model;
StartPosQueue queue;
BackwardMatch matches[MAX_NUM_MATCHES_H10];
const size_t store_end = num_bytes >= StoreLookaheadH10() ?
position + num_bytes - StoreLookaheadH10() + 1 : position;
size_t i;
nodes[0].length = 0;
nodes[0].u.cost = 0;
InitZopfliCostModel(m, &model, num_bytes);
if (BROTLI_IS_OOM(m)) return 0;
ZopfliCostModelSetFromLiteralCosts(
&model, position, ringbuffer, ringbuffer_mask);
InitStartPosQueue(&queue);
for (i = 0; i + HashTypeLengthH10() - 1 < num_bytes; i++) {
const size_t pos = position + i;
const size_t max_distance = BROTLI_MIN(size_t, pos, max_backward_limit);
size_t num_matches = FindAllMatchesH10(hasher, dictionary, ringbuffer,
ringbuffer_mask, pos, num_bytes - i, max_distance, params, matches);
size_t skip;
if (num_matches > 0 &&
BackwardMatchLength(&matches[num_matches - 1]) > max_zopfli_len) {
matches[0] = matches[num_matches - 1];
num_matches = 1;
}
skip = UpdateNodes(num_bytes, position, i, ringbuffer, ringbuffer_mask,
params, max_backward_limit, dist_cache, num_matches, matches, &model,
&queue, nodes);
if (skip < BROTLI_LONG_COPY_QUICK_STEP) skip = 0;
if (num_matches == 1 && BackwardMatchLength(&matches[0]) > max_zopfli_len) {
skip = BROTLI_MAX(size_t, BackwardMatchLength(&matches[0]), skip);
}
if (skip > 1) {
/* Add the tail of the copy to the hasher. */
StoreRangeH10(hasher, ringbuffer, ringbuffer_mask, pos + 1, BROTLI_MIN(
size_t, pos + skip, store_end));
skip--;
while (skip) {
i++;
if (i + HashTypeLengthH10() - 1 >= num_bytes) break;
EvaluateNode(
position, i, max_backward_limit, dist_cache, &model, &queue, nodes);
skip--;
}
}
}
CleanupZopfliCostModel(m, &model);
return ComputeShortestPathFromNodes(num_bytes, nodes);
}
void BrotliCreateZopfliBackwardReferences(
MemoryManager* m, const BrotliDictionary* dictionary, size_t num_bytes,
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals) {
const size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params->lgwin);
ZopfliNode* nodes;
nodes = BROTLI_ALLOC(m, ZopfliNode, num_bytes + 1);
if (BROTLI_IS_OOM(m)) return;
BrotliInitZopfliNodes(nodes, num_bytes + 1);
*num_commands += BrotliZopfliComputeShortestPath(m, dictionary, num_bytes,
position, ringbuffer, ringbuffer_mask, params, max_backward_limit,
dist_cache, hasher, nodes);
if (BROTLI_IS_OOM(m)) return;
BrotliZopfliCreateCommands(num_bytes, position, max_backward_limit, nodes,
dist_cache, last_insert_len, commands, num_literals);
BROTLI_FREE(m, nodes);
}
void BrotliCreateHqZopfliBackwardReferences(
MemoryManager* m, const BrotliDictionary* dictionary, size_t num_bytes,
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals) {
const size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params->lgwin);
uint32_t* num_matches = BROTLI_ALLOC(m, uint32_t, num_bytes);
size_t matches_size = 4 * num_bytes;
const size_t store_end = num_bytes >= StoreLookaheadH10() ?
position + num_bytes - StoreLookaheadH10() + 1 : position;
size_t cur_match_pos = 0;
size_t i;
size_t orig_num_literals;
size_t orig_last_insert_len;
int orig_dist_cache[4];
size_t orig_num_commands;
ZopfliCostModel model;
ZopfliNode* nodes;
BackwardMatch* matches = BROTLI_ALLOC(m, BackwardMatch, matches_size);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i + HashTypeLengthH10() - 1 < num_bytes; ++i) {
const size_t pos = position + i;
size_t max_distance = BROTLI_MIN(size_t, pos, max_backward_limit);
size_t max_length = num_bytes - i;
size_t num_found_matches;
size_t cur_match_end;
size_t j;
/* Ensure that we have enough free slots. */
BROTLI_ENSURE_CAPACITY(m, BackwardMatch, matches, matches_size,
cur_match_pos + MAX_NUM_MATCHES_H10);
if (BROTLI_IS_OOM(m)) return;
num_found_matches = FindAllMatchesH10(hasher, dictionary, ringbuffer,
ringbuffer_mask, pos, max_length, max_distance, params,
&matches[cur_match_pos]);
cur_match_end = cur_match_pos + num_found_matches;
for (j = cur_match_pos; j + 1 < cur_match_end; ++j) {
assert(BackwardMatchLength(&matches[j]) <
BackwardMatchLength(&matches[j + 1]));
assert(matches[j].distance > max_distance ||
matches[j].distance <= matches[j + 1].distance);
}
num_matches[i] = (uint32_t)num_found_matches;
if (num_found_matches > 0) {
const size_t match_len = BackwardMatchLength(&matches[cur_match_end - 1]);
if (match_len > MAX_ZOPFLI_LEN_QUALITY_11) {
const size_t skip = match_len - 1;
matches[cur_match_pos++] = matches[cur_match_end - 1];
num_matches[i] = 1;
/* Add the tail of the copy to the hasher. */
StoreRangeH10(hasher, ringbuffer, ringbuffer_mask, pos + 1,
BROTLI_MIN(size_t, pos + match_len, store_end));
memset(&num_matches[i + 1], 0, skip * sizeof(num_matches[0]));
i += skip;
} else {
cur_match_pos = cur_match_end;
}
}
}
orig_num_literals = *num_literals;
orig_last_insert_len = *last_insert_len;
memcpy(orig_dist_cache, dist_cache, 4 * sizeof(dist_cache[0]));
orig_num_commands = *num_commands;
nodes = BROTLI_ALLOC(m, ZopfliNode, num_bytes + 1);
if (BROTLI_IS_OOM(m)) return;
InitZopfliCostModel(m, &model, num_bytes);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < 2; i++) {
BrotliInitZopfliNodes(nodes, num_bytes + 1);
if (i == 0) {
ZopfliCostModelSetFromLiteralCosts(
&model, position, ringbuffer, ringbuffer_mask);
} else {
ZopfliCostModelSetFromCommands(&model, position, ringbuffer,
ringbuffer_mask, commands, *num_commands - orig_num_commands,
orig_last_insert_len);
}
*num_commands = orig_num_commands;
*num_literals = orig_num_literals;
*last_insert_len = orig_last_insert_len;
memcpy(dist_cache, orig_dist_cache, 4 * sizeof(dist_cache[0]));
*num_commands += ZopfliIterate(num_bytes, position, ringbuffer,
ringbuffer_mask, params, max_backward_limit, dist_cache,
&model, num_matches, matches, nodes);
BrotliZopfliCreateCommands(num_bytes, position, max_backward_limit,
nodes, dist_cache, last_insert_len, commands, num_literals);
}
CleanupZopfliCostModel(m, &model);
BROTLI_FREE(m, nodes);
BROTLI_FREE(m, matches);
BROTLI_FREE(m, num_matches);
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,35 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions to estimate the bit cost of Huffman trees. */
#include "./enc/bit_cost.h"
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/fast_log.h"
#include "./enc/histogram.h"
#include "./enc/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define FN(X) X ## Literal
#include "./enc/bit_cost_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Command
#include "./enc/bit_cost_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Distance
#include "./enc/bit_cost_inc.h" /* NOLINT(build/include) */
#undef FN
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,48 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Bit reading helpers */
#include "./dec/bit_reader.h"
#include <brotli/types.h>
#include "./dec/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
void BrotliInitBitReader(BrotliBitReader* const br) {
br->val_ = 0;
br->bit_pos_ = sizeof(br->val_) << 3;
}
BROTLI_BOOL BrotliWarmupBitReader(BrotliBitReader* const br) {
size_t aligned_read_mask = (sizeof(br->val_) >> 1) - 1;
/* Fixing alignment after unaligned BrotliFillWindow would result accumulator
overflow. If unalignment is caused by BrotliSafeReadBits, then there is
enough space in accumulator to fix alignment. */
if (!BROTLI_ALIGNED_READ) {
aligned_read_mask = 0;
}
if (BrotliGetAvailableBits(br) == 0) {
if (!BrotliPullByte(br)) {
return BROTLI_FALSE;
}
}
while ((((size_t)br->next_in) & aligned_read_mask) != 0) {
if (!BrotliPullByte(br)) {
/* If we consumed all the input, we don't care about the alignment. */
return BROTLI_TRUE;
}
}
return BROTLI_TRUE;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,197 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Block split point selection utilities. */
#include "./enc/block_splitter.h"
#include <assert.h>
#include <string.h> /* memcpy, memset */
#include "./enc/bit_cost.h"
#include "./enc/cluster.h"
#include "./enc/command.h"
#include "./enc/fast_log.h"
#include "./enc/histogram.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const size_t kMaxLiteralHistograms = 100;
static const size_t kMaxCommandHistograms = 50;
static const double kLiteralBlockSwitchCost = 28.1;
static const double kCommandBlockSwitchCost = 13.5;
static const double kDistanceBlockSwitchCost = 14.6;
static const size_t kLiteralStrideLength = 70;
static const size_t kCommandStrideLength = 40;
static const size_t kSymbolsPerLiteralHistogram = 544;
static const size_t kSymbolsPerCommandHistogram = 530;
static const size_t kSymbolsPerDistanceHistogram = 544;
static const size_t kMinLengthForBlockSplitting = 128;
static const size_t kIterMulForRefining = 2;
static const size_t kMinItersForRefining = 100;
static size_t CountLiterals(const Command* cmds, const size_t num_commands) {
/* Count how many we have. */
size_t total_length = 0;
size_t i;
for (i = 0; i < num_commands; ++i) {
total_length += cmds[i].insert_len_;
}
return total_length;
}
static void CopyLiteralsToByteArray(const Command* cmds,
const size_t num_commands,
const uint8_t* data,
const size_t offset,
const size_t mask,
uint8_t* literals) {
size_t pos = 0;
size_t from_pos = offset & mask;
size_t i;
for (i = 0; i < num_commands; ++i) {
size_t insert_len = cmds[i].insert_len_;
if (from_pos + insert_len > mask) {
size_t head_size = mask + 1 - from_pos;
memcpy(literals + pos, data + from_pos, head_size);
from_pos = 0;
pos += head_size;
insert_len -= head_size;
}
if (insert_len > 0) {
memcpy(literals + pos, data + from_pos, insert_len);
pos += insert_len;
}
from_pos = (from_pos + insert_len + CommandCopyLen(&cmds[i])) & mask;
}
}
static BROTLI_INLINE unsigned int MyRand(unsigned int* seed) {
*seed *= 16807U;
if (*seed == 0) {
*seed = 1;
}
return *seed;
}
static BROTLI_INLINE double BitCost(size_t count) {
return count == 0 ? -2.0 : FastLog2(count);
}
#define HISTOGRAMS_PER_BATCH 64
#define CLUSTERS_PER_BATCH 16
#define FN(X) X ## Literal
#define DataType uint8_t
/* NOLINTNEXTLINE(build/include) */
#include "./enc/block_splitter_inc.h"
#undef DataType
#undef FN
#define FN(X) X ## Command
#define DataType uint16_t
/* NOLINTNEXTLINE(build/include) */
#include "./enc/block_splitter_inc.h"
#undef FN
#define FN(X) X ## Distance
/* NOLINTNEXTLINE(build/include) */
#include "./enc/block_splitter_inc.h"
#undef DataType
#undef FN
void BrotliInitBlockSplit(BlockSplit* self) {
self->num_types = 0;
self->num_blocks = 0;
self->types = 0;
self->lengths = 0;
self->types_alloc_size = 0;
self->lengths_alloc_size = 0;
}
void BrotliDestroyBlockSplit(MemoryManager* m, BlockSplit* self) {
BROTLI_FREE(m, self->types);
BROTLI_FREE(m, self->lengths);
}
void BrotliSplitBlock(MemoryManager* m,
const Command* cmds,
const size_t num_commands,
const uint8_t* data,
const size_t pos,
const size_t mask,
const BrotliEncoderParams* params,
BlockSplit* literal_split,
BlockSplit* insert_and_copy_split,
BlockSplit* dist_split) {
{
size_t literals_count = CountLiterals(cmds, num_commands);
uint8_t* literals = BROTLI_ALLOC(m, uint8_t, literals_count);
if (BROTLI_IS_OOM(m)) return;
/* Create a continuous array of literals. */
CopyLiteralsToByteArray(cmds, num_commands, data, pos, mask, literals);
/* Create the block split on the array of literals.
Literal histograms have alphabet size 256. */
SplitByteVectorLiteral(
m, literals, literals_count,
kSymbolsPerLiteralHistogram, kMaxLiteralHistograms,
kLiteralStrideLength, kLiteralBlockSwitchCost, params,
literal_split);
if (BROTLI_IS_OOM(m)) return;
BROTLI_FREE(m, literals);
}
{
/* Compute prefix codes for commands. */
uint16_t* insert_and_copy_codes = BROTLI_ALLOC(m, uint16_t, num_commands);
size_t i;
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_commands; ++i) {
insert_and_copy_codes[i] = cmds[i].cmd_prefix_;
}
/* Create the block split on the array of command prefixes. */
SplitByteVectorCommand(
m, insert_and_copy_codes, num_commands,
kSymbolsPerCommandHistogram, kMaxCommandHistograms,
kCommandStrideLength, kCommandBlockSwitchCost, params,
insert_and_copy_split);
if (BROTLI_IS_OOM(m)) return;
/* TODO: reuse for distances? */
BROTLI_FREE(m, insert_and_copy_codes);
}
{
/* Create a continuous array of distance prefixes. */
uint16_t* distance_prefixes = BROTLI_ALLOC(m, uint16_t, num_commands);
size_t j = 0;
size_t i;
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_commands; ++i) {
const Command* cmd = &cmds[i];
if (CommandCopyLen(cmd) && cmd->cmd_prefix_ >= 128) {
distance_prefixes[j++] = cmd->dist_prefix_;
}
}
/* Create the block split on the array of distance prefixes. */
SplitByteVectorDistance(
m, distance_prefixes, j,
kSymbolsPerDistanceHistogram, kMaxCommandHistograms,
kCommandStrideLength, kDistanceBlockSwitchCost, params,
dist_split);
if (BROTLI_IS_OOM(m)) return;
BROTLI_FREE(m, distance_prefixes);
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,5 +0,0 @@
package brotli
import (
"C"
)

View File

@ -1,362 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/**
* @file
* API for Brotli decompression.
*/
#ifndef BROTLI_DEC_DECODE_H_
#define BROTLI_DEC_DECODE_H_
#include <brotli/port.h>
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/**
* Opaque structure that holds decoder state.
*
* Allocated and initialized with ::BrotliDecoderCreateInstance.
* Cleaned up and deallocated with ::BrotliDecoderDestroyInstance.
*/
typedef struct BrotliDecoderStateStruct BrotliDecoderState;
/**
* Result type for ::BrotliDecoderDecompress and
* ::BrotliDecoderDecompressStream functions.
*/
typedef enum {
/** Decoding error, e.g. corrupted input or memory allocation problem. */
BROTLI_DECODER_RESULT_ERROR = 0,
/** Decoding successfully completed */
BROTLI_DECODER_RESULT_SUCCESS = 1,
/** Partially done; should be called again with more input */
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT = 2,
/** Partially done; should be called again with more output */
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT = 3
} BrotliDecoderResult;
/**
* Template that evaluates items of ::BrotliDecoderErrorCode.
*
* Example: @code {.cpp}
* // Log Brotli error code.
* switch (brotliDecoderErrorCode) {
* #define CASE_(PREFIX, NAME, CODE) \
* case BROTLI_DECODER ## PREFIX ## NAME: \
* LOG(INFO) << "error code:" << #NAME; \
* break;
* #define NEWLINE_
* BROTLI_DECODER_ERROR_CODES_LIST(CASE_, NEWLINE_)
* #undef CASE_
* #undef NEWLINE_
* default: LOG(FATAL) << "unknown brotli error code";
* }
* @endcode
*/
#define BROTLI_DECODER_ERROR_CODES_LIST(BROTLI_ERROR_CODE, SEPARATOR) \
BROTLI_ERROR_CODE(_, NO_ERROR, 0) SEPARATOR \
/* Same as BrotliDecoderResult values */ \
BROTLI_ERROR_CODE(_, SUCCESS, 1) SEPARATOR \
BROTLI_ERROR_CODE(_, NEEDS_MORE_INPUT, 2) SEPARATOR \
BROTLI_ERROR_CODE(_, NEEDS_MORE_OUTPUT, 3) SEPARATOR \
\
/* Errors caused by invalid input */ \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, EXUBERANT_NIBBLE, -1) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, RESERVED, -2) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, EXUBERANT_META_NIBBLE, -3) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, SIMPLE_HUFFMAN_ALPHABET, -4) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, SIMPLE_HUFFMAN_SAME, -5) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, CL_SPACE, -6) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, HUFFMAN_SPACE, -7) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, CONTEXT_MAP_REPEAT, -8) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, BLOCK_LENGTH_1, -9) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, BLOCK_LENGTH_2, -10) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, TRANSFORM, -11) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, DICTIONARY, -12) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, WINDOW_BITS, -13) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, PADDING_1, -14) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_FORMAT_, PADDING_2, -15) SEPARATOR \
\
/* -16..-18 codes are reserved */ \
\
BROTLI_ERROR_CODE(_ERROR_, DICTIONARY_NOT_SET, -19) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_, INVALID_ARGUMENTS, -20) SEPARATOR \
\
/* Memory allocation problems */ \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, CONTEXT_MODES, -21) SEPARATOR \
/* Literal, insert and distance trees together */ \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, TREE_GROUPS, -22) SEPARATOR \
/* -23..-24 codes are reserved for distinct tree groups */ \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, CONTEXT_MAP, -25) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, RING_BUFFER_1, -26) SEPARATOR \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, RING_BUFFER_2, -27) SEPARATOR \
/* -28..-29 codes are reserved for dynamic ring-buffer allocation */ \
BROTLI_ERROR_CODE(_ERROR_ALLOC_, BLOCK_TYPE_TREES, -30) SEPARATOR \
\
/* "Impossible" states */ \
BROTLI_ERROR_CODE(_ERROR_, UNREACHABLE, -31)
/**
* Error code for detailed logging / production debugging.
*
* See ::BrotliDecoderGetErrorCode and ::BROTLI_LAST_ERROR_CODE.
*/
typedef enum {
#define BROTLI_COMMA_ ,
#define BROTLI_ERROR_CODE_ENUM_ITEM_(PREFIX, NAME, CODE) \
BROTLI_DECODER ## PREFIX ## NAME = CODE
BROTLI_DECODER_ERROR_CODES_LIST(BROTLI_ERROR_CODE_ENUM_ITEM_, BROTLI_COMMA_)
} BrotliDecoderErrorCode;
#undef BROTLI_ERROR_CODE_ENUM_ITEM_
#undef BROTLI_COMMA_
/**
* The value of the last error code, negative integer.
*
* All other error code values are in the range from ::BROTLI_LAST_ERROR_CODE
* to @c -1. There are also 4 other possible non-error codes @c 0 .. @c 3 in
* ::BrotliDecoderErrorCode enumeration.
*/
#define BROTLI_LAST_ERROR_CODE BROTLI_DECODER_ERROR_UNREACHABLE
/** Options to be used with ::BrotliDecoderSetParameter. */
typedef enum BrotliDecoderParameter {
/**
* Disable "canny" ring buffer allocation strategy.
*
* Ring buffer is allocated according to window size, despite the real size of
* the content.
*/
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION = 0
} BrotliDecoderParameter;
/**
* Sets the specified parameter to the given decoder instance.
*
* @param state decoder instance
* @param param parameter to set
* @param value new parameter value
* @returns ::BROTLI_FALSE if parameter is unrecognized, or value is invalid
* @returns ::BROTLI_TRUE if value is accepted
*/
BROTLI_DEC_API BROTLI_BOOL BrotliDecoderSetParameter(
BrotliDecoderState* state, BrotliDecoderParameter param, uint32_t value);
/**
* Creates an instance of ::BrotliDecoderState and initializes it.
*
* The instance can be used once for decoding and should then be destroyed with
* ::BrotliDecoderDestroyInstance, it cannot be reused for a new decoding
* session.
*
* @p alloc_func and @p free_func @b MUST be both zero or both non-zero. In the
* case they are both zero, default memory allocators are used. @p opaque is
* passed to @p alloc_func and @p free_func when they are called.
*
* @param alloc_func custom memory allocation function
* @param free_func custom memory fee function
* @param opaque custom memory manager handle
* @returns @c 0 if instance can not be allocated or initialized
* @returns pointer to initialized ::BrotliDecoderState otherwise
*/
BROTLI_DEC_API BrotliDecoderState* BrotliDecoderCreateInstance(
brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque);
/**
* Deinitializes and frees ::BrotliDecoderState instance.
*
* @param state decoder instance to be cleaned up and deallocated
*/
BROTLI_DEC_API void BrotliDecoderDestroyInstance(BrotliDecoderState* state);
/**
* Performs one-shot memory-to-memory decompression.
*
* Decompresses the data in @p encoded_buffer into @p decoded_buffer, and sets
* @p *decoded_size to the decompressed length.
*
* @param encoded_size size of @p encoded_buffer
* @param encoded_buffer compressed data buffer with at least @p encoded_size
* addressable bytes
* @param[in, out] decoded_size @b in: size of @p decoded_buffer; \n
* @b out: length of decompressed data written to
* @p decoded_buffer
* @param decoded_buffer decompressed data destination buffer
* @returns ::BROTLI_DECODER_RESULT_ERROR if input is corrupted, memory
* allocation failed, or @p decoded_buffer is not large enough;
* @returns ::BROTLI_DECODER_RESULT_SUCCESS otherwise
*/
BROTLI_DEC_API BrotliDecoderResult BrotliDecoderDecompress(
size_t encoded_size,
const uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(encoded_size)],
size_t* decoded_size,
uint8_t decoded_buffer[BROTLI_ARRAY_PARAM(*decoded_size)]);
/**
* Decompresses the input stream to the output stream.
*
* The values @p *available_in and @p *available_out must specify the number of
* bytes addressable at @p *next_in and @p *next_out respectively.
* When @p *available_out is @c 0, @p next_out is allowed to be @c NULL.
*
* After each call, @p *available_in will be decremented by the amount of input
* bytes consumed, and the @p *next_in pointer will be incremented by that
* amount. Similarly, @p *available_out will be decremented by the amount of
* output bytes written, and the @p *next_out pointer will be incremented by
* that amount.
*
* @p total_out, if it is not a null-pointer, will be set to the number
* of bytes decompressed since the last @p state initialization.
*
* @note Input is never overconsumed, so @p next_in and @p available_in could be
* passed to the next consumer after decoding is complete.
*
* @param state decoder instance
* @param[in, out] available_in @b in: amount of available input; \n
* @b out: amount of unused input
* @param[in, out] next_in pointer to the next compressed byte
* @param[in, out] available_out @b in: length of output buffer; \n
* @b out: remaining size of output buffer
* @param[in, out] next_out output buffer cursor;
* can be @c NULL if @p available_out is @c 0
* @param[out] total_out number of bytes decompressed so far; can be @c NULL
* @returns ::BROTLI_DECODER_RESULT_ERROR if input is corrupted, memory
* allocation failed, arguments were invalid, etc.;
* use ::BrotliDecoderGetErrorCode to get detailed error code
* @returns ::BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT decoding is blocked until
* more input data is provided
* @returns ::BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT decoding is blocked until
* more output space is provided
* @returns ::BROTLI_DECODER_RESULT_SUCCESS decoding is finished, no more
* input might be consumed and no more output will be produced
*/
BROTLI_DEC_API BrotliDecoderResult BrotliDecoderDecompressStream(
BrotliDecoderState* state, size_t* available_in, const uint8_t** next_in,
size_t* available_out, uint8_t** next_out, size_t* total_out);
/**
* Prepends LZ77 dictionary.
*
* Fills the fresh ::BrotliDecoderState with additional data corpus for LZ77
* backward references.
*
* @note Not to be confused with the static dictionary (see RFC7932 section 8).
* @warning The dictionary must exist in memory until decoding is done and
* is owned by the caller.
*
* Workflow:
* -# Allocate and initialize state with ::BrotliDecoderCreateInstance
* -# Invoke ::BrotliDecoderSetCustomDictionary
* -# Use ::BrotliDecoderDecompressStream
* -# Clean up and free state with ::BrotliDecoderDestroyInstance
*
* @param state decoder instance
* @param size length of @p dict; should be less or equal to 2^24 (16MiB),
* otherwise the dictionary will be ignored
* @param dict "dictionary"; @b MUST be the same as used during compression
*/
BROTLI_DEC_API void BrotliDecoderSetCustomDictionary(
BrotliDecoderState* state, size_t size,
const uint8_t dict[BROTLI_ARRAY_PARAM(size)]);
/**
* Checks if decoder has more output.
*
* @param state decoder instance
* @returns ::BROTLI_TRUE, if decoder has some unconsumed output
* @returns ::BROTLI_FALSE otherwise
*/
BROTLI_DEC_API BROTLI_BOOL BrotliDecoderHasMoreOutput(
const BrotliDecoderState* state);
/**
* Acquires pointer to internal output buffer.
*
* This method is used to make language bindings easier and more efficient:
* -# push data to ::BrotliDecoderDecompressStream,
* until ::BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT is reported
* -# use ::BrotliDecoderTakeOutput to peek bytes and copy to language-specific
* entity
*
* Also this could be useful if there is an output stream that is able to
* consume all the provided data (e.g. when data is saved to file system).
*
* @attention After every call to ::BrotliDecoderTakeOutput @p *size bytes of
* output are considered consumed for all consecutive calls to the
* instance methods; returned pointer becomes invalidated as well.
*
* @note Decoder output is not guaranteed to be contiguous. This means that
* after the size-unrestricted call to ::BrotliDecoderTakeOutput,
* immediate next call to ::BrotliDecoderTakeOutput may return more data.
*
* @param state decoder instance
* @param[in, out] size @b in: number of bytes caller is ready to take, @c 0 if
* any amount could be handled; \n
* @b out: amount of data pointed by returned pointer and
* considered consumed; \n
* out value is never greater than in value, unless it is @c 0
* @returns pointer to output data
*/
BROTLI_DEC_API const uint8_t* BrotliDecoderTakeOutput(
BrotliDecoderState* state, size_t* size);
/**
* Checks if instance has already consumed input.
*
* Instance that returns ::BROTLI_FALSE is considered "fresh" and could be
* reused.
*
* @param state decoder instance
* @returns ::BROTLI_TRUE if decoder has already used some input bytes
* @returns ::BROTLI_FALSE otherwise
*/
BROTLI_DEC_API BROTLI_BOOL BrotliDecoderIsUsed(const BrotliDecoderState* state);
/**
* Checks if decoder instance reached the final state.
*
* @param state decoder instance
* @returns ::BROTLI_TRUE if decoder is in a state where it reached the end of
* the input and produced all of the output
* @returns ::BROTLI_FALSE otherwise
*/
BROTLI_DEC_API BROTLI_BOOL BrotliDecoderIsFinished(const BrotliDecoderState* state);
/**
* Acquires a detailed error code.
*
* Should be used only after ::BrotliDecoderDecompressStream returns
* ::BROTLI_DECODER_RESULT_ERROR.
*
* See also ::BrotliDecoderErrorString
*
* @param state decoder instance
* @returns last saved error code
*/
BROTLI_DEC_API BrotliDecoderErrorCode BrotliDecoderGetErrorCode(
const BrotliDecoderState* state);
/**
* Converts error code to a c-string.
*/
BROTLI_DEC_API const char* BrotliDecoderErrorString(BrotliDecoderErrorCode c);
/**
* Gets a decoder library version.
*
* Look at BROTLI_VERSION for more information.
*/
BROTLI_DEC_API uint32_t BrotliDecoderVersion(void);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_DEC_DECODE_H_ */

View File

@ -1,421 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/**
* @file
* API for Brotli compression.
*/
#ifndef BROTLI_ENC_ENCODE_H_
#define BROTLI_ENC_ENCODE_H_
#include <brotli/port.h>
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/** Minimal value for ::BROTLI_PARAM_LGWIN parameter. */
#define BROTLI_MIN_WINDOW_BITS 10
/**
* Maximal value for ::BROTLI_PARAM_LGWIN parameter.
*
* @note equal to @c BROTLI_MAX_DISTANCE_BITS constant.
*/
#define BROTLI_MAX_WINDOW_BITS 24
/** Minimal value for ::BROTLI_PARAM_LGBLOCK parameter. */
#define BROTLI_MIN_INPUT_BLOCK_BITS 16
/** Maximal value for ::BROTLI_PARAM_LGBLOCK parameter. */
#define BROTLI_MAX_INPUT_BLOCK_BITS 24
/** Minimal value for ::BROTLI_PARAM_QUALITY parameter. */
#define BROTLI_MIN_QUALITY 0
/** Maximal value for ::BROTLI_PARAM_QUALITY parameter. */
#define BROTLI_MAX_QUALITY 11
/** Options for ::BROTLI_PARAM_MODE parameter. */
typedef enum BrotliEncoderMode {
/**
* Default compression mode.
*
* In this mode compressor does not know anything in advance about the
* properties of the input.
*/
BROTLI_MODE_GENERIC = 0,
/** Compression mode for UTF-8 formatted text input. */
BROTLI_MODE_TEXT = 1,
/** Compression mode used in WOFF 2.0. */
BROTLI_MODE_FONT = 2
} BrotliEncoderMode;
/** Default value for ::BROTLI_PARAM_QUALITY parameter. */
#define BROTLI_DEFAULT_QUALITY 11
/** Default value for ::BROTLI_PARAM_LGWIN parameter. */
#define BROTLI_DEFAULT_WINDOW 22
/** Default value for ::BROTLI_PARAM_MODE parameter. */
#define BROTLI_DEFAULT_MODE BROTLI_MODE_GENERIC
/** Operations that can be performed by streaming encoder. */
typedef enum BrotliEncoderOperation {
/**
* Process input.
*
* Encoder may postpone producing output, until it has processed enough input.
*/
BROTLI_OPERATION_PROCESS = 0,
/**
* Produce output for all processed input.
*
* Actual flush is performed when input stream is depleted and there is enough
* space in output stream. This means that client should repeat
* ::BROTLI_OPERATION_FLUSH operation until @p available_in becomes @c 0, and
* ::BrotliEncoderHasMoreOutput returns ::BROTLI_FALSE.
*
* @warning Until flush is complete, client @b SHOULD @b NOT swap,
* reduce or extend input stream.
*
* When flush is complete, output data will be sufficient for decoder to
* reproduce all the given input.
*/
BROTLI_OPERATION_FLUSH = 1,
/**
* Finalize the stream.
*
* Actual finalization is performed when input stream is depleted and there is
* enough space in output stream. This means that client should repeat
* ::BROTLI_OPERATION_FLUSH operation until @p available_in becomes @c 0, and
* ::BrotliEncoderHasMoreOutput returns ::BROTLI_FALSE.
*
* @warning Until finalization is complete, client @b SHOULD @b NOT swap,
* reduce or extend input stream.
*
* Helper function ::BrotliEncoderIsFinished checks if stream is finalized and
* output fully dumped.
*
* Adding more input data to finalized stream is impossible.
*/
BROTLI_OPERATION_FINISH = 2,
/**
* Emit metadata block to stream.
*
* Metadata is opaque to Brotli: neither encoder, nor decoder processes this
* data or relies on it. It may be used to pass some extra information from
* encoder client to decoder client without interfering with main data stream.
*
* @note Encoder may emit empty metadata blocks internally, to pad encoded
* stream to byte boundary.
*
* @warning Until emitting metadata is complete client @b SHOULD @b NOT swap,
* reduce or extend input stream.
*
* @warning The whole content of input buffer is considered to be the content
* of metadata block. Do @b NOT @e append metadata to input stream,
* before it is depleted with other operations.
*
* Stream is soft-flushed before metadata block is emitted. Metadata block
* @b MUST be no longer than than 16MiB.
*/
BROTLI_OPERATION_EMIT_METADATA = 3
} BrotliEncoderOperation;
/** Options to be used with ::BrotliEncoderSetParameter. */
typedef enum BrotliEncoderParameter {
/**
* Tune encoder for specific input.
*
* ::BrotliEncoderMode enumerates all available values.
*/
BROTLI_PARAM_MODE = 0,
/**
* The main compression speed-density lever.
*
* The higher the quality, the slower the compression. Range is
* from ::BROTLI_MIN_QUALITY to ::BROTLI_MAX_QUALITY.
*/
BROTLI_PARAM_QUALITY = 1,
/**
* Recommended sliding LZ77 window size.
*
* Encoder may reduce this value, e.g. if input is much smaller than
* window size.
*
* Window size is `(1 << value) - 16`.
*
* Range is from ::BROTLI_MIN_WINDOW_BITS to ::BROTLI_MAX_WINDOW_BITS.
*/
BROTLI_PARAM_LGWIN = 2,
/**
* Recommended input block size.
*
* Encoder may reduce this value, e.g. if input is much smaller than input
* block size.
*
* Range is from ::BROTLI_MIN_INPUT_BLOCK_BITS to
* ::BROTLI_MAX_INPUT_BLOCK_BITS.
*
* @note Bigger input block size allows better compression, but consumes more
* memory. \n The rough formula of memory used for temporary input
* storage is `3 << lgBlock`.
*/
BROTLI_PARAM_LGBLOCK = 3,
/**
* Flag that affects usage of "literal context modeling" format feature.
*
* This flag is a "decoding-speed vs compression ratio" trade-off.
*/
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING = 4,
/**
* Estimated total input size for all ::BrotliEncoderCompressStream calls.
*
* The default value is 0, which means that the total input size is unknown.
*/
BROTLI_PARAM_SIZE_HINT = 5
} BrotliEncoderParameter;
/**
* Opaque structure that holds encoder state.
*
* Allocated and initialized with ::BrotliEncoderCreateInstance.
* Cleaned up and deallocated with ::BrotliEncoderDestroyInstance.
*/
typedef struct BrotliEncoderStateStruct BrotliEncoderState;
/**
* Sets the specified parameter to the given encoder instance.
*
* @param state encoder instance
* @param param parameter to set
* @param value new parameter value
* @returns ::BROTLI_FALSE if parameter is unrecognized, or value is invalid
* @returns ::BROTLI_FALSE if value of parameter can not be changed at current
* encoder state (e.g. when encoding is started, window size might be
* already encoded and therefore it is impossible to change it)
* @returns ::BROTLI_TRUE if value is accepted
* @warning invalid values might be accepted in case they would not break
* encoding process.
*/
BROTLI_ENC_API BROTLI_BOOL BrotliEncoderSetParameter(
BrotliEncoderState* state, BrotliEncoderParameter param, uint32_t value);
/**
* Creates an instance of ::BrotliEncoderState and initializes it.
*
* @p alloc_func and @p free_func @b MUST be both zero or both non-zero. In the
* case they are both zero, default memory allocators are used. @p opaque is
* passed to @p alloc_func and @p free_func when they are called.
*
* @param alloc_func custom memory allocation function
* @param free_func custom memory fee function
* @param opaque custom memory manager handle
* @returns @c 0 if instance can not be allocated or initialized
* @returns pointer to initialized ::BrotliEncoderState otherwise
*/
BROTLI_ENC_API BrotliEncoderState* BrotliEncoderCreateInstance(
brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque);
/**
* Deinitializes and frees ::BrotliEncoderState instance.
*
* @param state decoder instance to be cleaned up and deallocated
*/
BROTLI_ENC_API void BrotliEncoderDestroyInstance(BrotliEncoderState* state);
/**
* Prepends imaginary LZ77 dictionary.
*
* Fills the fresh ::BrotliEncoderState with additional data corpus for LZ77
* backward references.
*
* @note Not to be confused with the static dictionary (see RFC7932 section 8).
*
* Workflow:
* -# Allocate and initialize state with ::BrotliEncoderCreateInstance
* -# Set ::BROTLI_PARAM_LGWIN parameter
* -# Invoke ::BrotliEncoderSetCustomDictionary
* -# Use ::BrotliEncoderCompressStream
* -# Clean up and free state with ::BrotliEncoderDestroyInstance
*
* @param state encoder instance
* @param size length of @p dict; at most "window size" bytes are used
* @param dict "dictionary"; @b MUST use same dictionary during decompression
*/
BROTLI_ENC_API void BrotliEncoderSetCustomDictionary(
BrotliEncoderState* state, size_t size,
const uint8_t dict[BROTLI_ARRAY_PARAM(size)]);
/**
* Calculates the output size bound for the given @p input_size.
*
* @warning Result is not applicable to ::BrotliEncoderCompressStream output,
* because every "flush" adds extra overhead bytes, and some encoder
* settings (e.g. quality @c 0 and @c 1) might imply a "soft flush"
* after every chunk of input.
*
* @param input_size size of projected input
* @returns @c 0 if result does not fit @c size_t
*/
BROTLI_ENC_API size_t BrotliEncoderMaxCompressedSize(size_t input_size);
/**
* Performs one-shot memory-to-memory compression.
*
* Compresses the data in @p input_buffer into @p encoded_buffer, and sets
* @p *encoded_size to the compressed length.
*
* @note If ::BrotliEncoderMaxCompressedSize(@p input_size) returns non-zero
* value, then output is guaranteed to be no longer than that.
*
* @param quality quality parameter value, e.g. ::BROTLI_DEFAULT_QUALITY
* @param lgwin lgwin parameter value, e.g. ::BROTLI_DEFAULT_WINDOW
* @param mode mode parameter value, e.g. ::BROTLI_DEFAULT_MODE
* @param input_size size of @p input_buffer
* @param input_buffer input data buffer with at least @p input_size
* addressable bytes
* @param[in, out] encoded_size @b in: size of @p encoded_buffer; \n
* @b out: length of compressed data written to
* @p encoded_buffer, or @c 0 if compression fails
* @param encoded_buffer compressed data destination buffer
* @returns ::BROTLI_FALSE in case of compression error
* @returns ::BROTLI_FALSE if output buffer is too small
* @returns ::BROTLI_TRUE otherwise
*/
BROTLI_ENC_API BROTLI_BOOL BrotliEncoderCompress(
int quality, int lgwin, BrotliEncoderMode mode, size_t input_size,
const uint8_t input_buffer[BROTLI_ARRAY_PARAM(input_size)],
size_t* encoded_size,
uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(*encoded_size)]);
/**
* Compresses input stream to output stream.
*
* The values @p *available_in and @p *available_out must specify the number of
* bytes addressable at @p *next_in and @p *next_out respectively.
* When @p *available_out is @c 0, @p next_out is allowed to be @c NULL.
*
* After each call, @p *available_in will be decremented by the amount of input
* bytes consumed, and the @p *next_in pointer will be incremented by that
* amount. Similarly, @p *available_out will be decremented by the amount of
* output bytes written, and the @p *next_out pointer will be incremented by
* that amount.
*
* @p total_out, if it is not a null-pointer, will be set to the number
* of bytes decompressed since the last @p state initialization.
*
*
*
* Internally workflow consists of 3 tasks:
* -# (optionally) copy input data to internal buffer
* -# actually compress data and (optionally) store it to internal buffer
* -# (optionally) copy compressed bytes from internal buffer to output stream
*
* Whenever all 3 tasks can't move forward anymore, or error occurs, this
* method returns the control flow to caller.
*
* @p op is used to perform flush, finish the stream, or inject metadata block.
* See ::BrotliEncoderOperation for more information.
*
* Flushing the stream means forcing encoding of all input passed to encoder and
* completing the current output block, so it could be fully decoded by stream
* decoder. To perform flush set @p op to ::BROTLI_OPERATION_FLUSH.
* Under some circumstances (e.g. lack of output stream capacity) this operation
* would require several calls to ::BrotliEncoderCompressStream. The method must
* be called again until both input stream is depleted and encoder has no more
* output (see ::BrotliEncoderHasMoreOutput) after the method is called.
*
* Finishing the stream means encoding of all input passed to encoder and
* adding specific "final" marks, so stream decoder could determine that stream
* is complete. To perform finish set @p op to ::BROTLI_OPERATION_FINISH.
* Under some circumstances (e.g. lack of output stream capacity) this operation
* would require several calls to ::BrotliEncoderCompressStream. The method must
* be called again until both input stream is depleted and encoder has no more
* output (see ::BrotliEncoderHasMoreOutput) after the method is called.
*
* @warning When flushing and finishing, @p op should not change until operation
* is complete; input stream should not be swapped, reduced or
* extended as well.
*
* @param state encoder instance
* @param op requested operation
* @param[in, out] available_in @b in: amount of available input; \n
* @b out: amount of unused input
* @param[in, out] next_in pointer to the next input byte
* @param[in, out] available_out @b in: length of output buffer; \n
* @b out: remaining size of output buffer
* @param[in, out] next_out compressed output buffer cursor;
* can be @c NULL if @p available_out is @c 0
* @param[out] total_out number of bytes produced so far; can be @c NULL
* @returns ::BROTLI_FALSE if there was an error
* @returns ::BROTLI_TRUE otherwise
*/
BROTLI_ENC_API BROTLI_BOOL BrotliEncoderCompressStream(
BrotliEncoderState* state, BrotliEncoderOperation op, size_t* available_in,
const uint8_t** next_in, size_t* available_out, uint8_t** next_out,
size_t* total_out);
/**
* Checks if encoder instance reached the final state.
*
* @param state encoder instance
* @returns ::BROTLI_TRUE if encoder is in a state where it reached the end of
* the input and produced all of the output
* @returns ::BROTLI_FALSE otherwise
*/
BROTLI_ENC_API BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* state);
/**
* Checks if encoder has more output.
*
* @param state encoder instance
* @returns ::BROTLI_TRUE, if encoder has some unconsumed output
* @returns ::BROTLI_FALSE otherwise
*/
BROTLI_ENC_API BROTLI_BOOL BrotliEncoderHasMoreOutput(
BrotliEncoderState* state);
/**
* Acquires pointer to internal output buffer.
*
* This method is used to make language bindings easier and more efficient:
* -# push data to ::BrotliEncoderCompressStream,
* until ::BrotliEncoderHasMoreOutput returns BROTL_TRUE
* -# use ::BrotliEncoderTakeOutput to peek bytes and copy to language-specific
* entity
*
* Also this could be useful if there is an output stream that is able to
* consume all the provided data (e.g. when data is saved to file system).
*
* @attention After every call to ::BrotliEncoderTakeOutput @p *size bytes of
* output are considered consumed for all consecutive calls to the
* instance methods; returned pointer becomes invalidated as well.
*
* @note Encoder output is not guaranteed to be contiguous. This means that
* after the size-unrestricted call to ::BrotliEncoderTakeOutput,
* immediate next call to ::BrotliEncoderTakeOutput may return more data.
*
* @param state encoder instance
* @param[in, out] size @b in: number of bytes caller is ready to take, @c 0 if
* any amount could be handled; \n
* @b out: amount of data pointed by returned pointer and
* considered consumed; \n
* out value is never greater than in value, unless it is @c 0
* @returns pointer to output data
*/
BROTLI_ENC_API const uint8_t* BrotliEncoderTakeOutput(
BrotliEncoderState* state, size_t* size);
/**
* Gets an encoder library version.
*
* Look at BROTLI_VERSION for more information.
*/
BROTLI_ENC_API uint32_t BrotliEncoderVersion(void);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_ENCODE_H_ */

View File

@ -1,146 +0,0 @@
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Macros for compiler / platform specific features and build options. */
#ifndef BROTLI_COMMON_PORT_H_
#define BROTLI_COMMON_PORT_H_
/* Compatibility with non-clang compilers. */
#ifndef __has_builtin
#define __has_builtin(x) 0
#endif
#ifndef __has_attribute
#define __has_attribute(x) 0
#endif
#ifndef __has_feature
#define __has_feature(x) 0
#endif
#if defined(__GNUC__) && defined(__GNUC_MINOR__)
#define BROTLI_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
#else
#define BROTLI_GCC_VERSION 0
#endif
#if defined(__ICC)
#define BROTLI_ICC_VERSION __ICC
#else
#define BROTLI_ICC_VERSION 0
#endif
#if defined(BROTLI_BUILD_MODERN_COMPILER)
#define BROTLI_MODERN_COMPILER 1
#elif BROTLI_GCC_VERSION >= 304 || BROTLI_ICC_VERSION >= 1600
#define BROTLI_MODERN_COMPILER 1
#else
#define BROTLI_MODERN_COMPILER 0
#endif
/* Define "BROTLI_PREDICT_TRUE" and "BROTLI_PREDICT_FALSE" macros for capable
compilers.
To apply compiler hint, enclose the branching condition into macros, like this:
if (BROTLI_PREDICT_TRUE(zero == 0)) {
// main execution path
} else {
// compiler should place this code outside of main execution path
}
OR:
if (BROTLI_PREDICT_FALSE(something_rare_or_unexpected_happens)) {
// compiler should place this code outside of main execution path
}
*/
#if BROTLI_MODERN_COMPILER || __has_builtin(__builtin_expect)
#define BROTLI_PREDICT_TRUE(x) (__builtin_expect(!!(x), 1))
#define BROTLI_PREDICT_FALSE(x) (__builtin_expect(x, 0))
#else
#define BROTLI_PREDICT_FALSE(x) (x)
#define BROTLI_PREDICT_TRUE(x) (x)
#endif
#if BROTLI_MODERN_COMPILER || __has_attribute(always_inline)
#define BROTLI_ATTRIBUTE_ALWAYS_INLINE __attribute__ ((always_inline))
#else
#define BROTLI_ATTRIBUTE_ALWAYS_INLINE
#endif
#if defined(_WIN32) || defined(__CYGWIN__)
#define BROTLI_ATTRIBUTE_VISIBILITY_HIDDEN
#elif BROTLI_MODERN_COMPILER || __has_attribute(visibility)
#define BROTLI_ATTRIBUTE_VISIBILITY_HIDDEN \
__attribute__ ((visibility ("hidden")))
#else
#define BROTLI_ATTRIBUTE_VISIBILITY_HIDDEN
#endif
#ifndef BROTLI_INTERNAL
#define BROTLI_INTERNAL BROTLI_ATTRIBUTE_VISIBILITY_HIDDEN
#endif
#if defined(BROTLI_SHARED_COMPILATION) && defined(_WIN32)
#if defined(BROTLICOMMON_SHARED_COMPILATION)
#define BROTLI_COMMON_API __declspec(dllexport)
#else
#define BROTLI_COMMON_API __declspec(dllimport)
#endif /* BROTLICOMMON_SHARED_COMPILATION */
#if defined(BROTLIDEC_SHARED_COMPILATION)
#define BROTLI_DEC_API __declspec(dllexport)
#else
#define BROTLI_DEC_API __declspec(dllimport)
#endif /* BROTLIDEC_SHARED_COMPILATION */
#if defined(BROTLIENC_SHARED_COMPILATION)
#define BROTLI_ENC_API __declspec(dllexport)
#else
#define BROTLI_ENC_API __declspec(dllimport)
#endif /* BROTLIENC_SHARED_COMPILATION */
#else /* BROTLI_SHARED_COMPILATION && _WIN32 */
#define BROTLI_COMMON_API
#define BROTLI_DEC_API
#define BROTLI_ENC_API
#endif
#ifndef _MSC_VER
#if defined(__cplusplus) || !defined(__STRICT_ANSI__) || \
(defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L)
#define BROTLI_INLINE inline BROTLI_ATTRIBUTE_ALWAYS_INLINE
#else
#define BROTLI_INLINE
#endif
#else /* _MSC_VER */
#define BROTLI_INLINE __forceinline
#endif /* _MSC_VER */
#if !defined(__cplusplus) && !defined(c_plusplus) && \
(defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L)
#define BROTLI_RESTRICT restrict
#elif BROTLI_GCC_VERSION > 295 || defined(__llvm__)
#define BROTLI_RESTRICT __restrict
#else
#define BROTLI_RESTRICT
#endif
#if BROTLI_MODERN_COMPILER || __has_attribute(noinline)
#define BROTLI_NOINLINE __attribute__((noinline))
#else
#define BROTLI_NOINLINE
#endif
#if BROTLI_MODERN_COMPILER || __has_attribute(deprecated)
#define BROTLI_DEPRECATED __attribute__((deprecated))
#else
#define BROTLI_DEPRECATED
#endif
#define BROTLI_UNUSED(X) (void)(X)
#endif /* BROTLI_COMMON_PORT_H_ */

View File

@ -1,90 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/**
* @file
* Common types used in decoder and encoder API.
*/
#ifndef BROTLI_COMMON_TYPES_H_
#define BROTLI_COMMON_TYPES_H_
#include <stddef.h> /* for size_t */
#if defined(_MSC_VER) && (_MSC_VER < 1600)
typedef __int8 int8_t;
typedef unsigned __int8 uint8_t;
typedef __int16 int16_t;
typedef unsigned __int16 uint16_t;
typedef __int32 int32_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int64 uint64_t;
typedef __int64 int64_t;
#else
#include <stdint.h>
#endif /* defined(_MSC_VER) && (_MSC_VER < 1600) */
/**
* A portable @c bool replacement.
*
* ::BROTLI_BOOL is a "documentation" type: actually it is @c int, but in API it
* denotes a type, whose only values are ::BROTLI_TRUE and ::BROTLI_FALSE.
*
* ::BROTLI_BOOL values passed to Brotli should either be ::BROTLI_TRUE or
* ::BROTLI_FALSE, or be a result of ::TO_BROTLI_BOOL macros.
*
* ::BROTLI_BOOL values returned by Brotli should not be tested for equality
* with @c true, @c false, ::BROTLI_TRUE, ::BROTLI_FALSE, but rather should be
* evaluated, for example: @code{.cpp}
* if (SomeBrotliFunction(encoder, BROTLI_TRUE) &&
* !OtherBrotliFunction(decoder, BROTLI_FALSE)) {
* bool x = !!YetAnotherBrotliFunction(encoder, TO_BROLTI_BOOL(2 * 2 == 4));
* DoSomething(x);
* }
* @endcode
*/
#define BROTLI_BOOL int
/** Portable @c true replacement. */
#define BROTLI_TRUE 1
/** Portable @c false replacement. */
#define BROTLI_FALSE 0
/** @c bool to ::BROTLI_BOOL conversion macros. */
#define TO_BROTLI_BOOL(X) (!!(X) ? BROTLI_TRUE : BROTLI_FALSE)
#define BROTLI_MAKE_UINT64_T(high, low) ((((uint64_t)(high)) << 32) | low)
#define BROTLI_UINT32_MAX (~((uint32_t)0))
#define BROTLI_SIZE_MAX (~((size_t)0))
/**
* Allocating function pointer type.
*
* @param opaque custom memory manager handle provided by client
* @param size requested memory region size; can not be @c 0
* @returns @c 0 in the case of failure
* @returns a valid pointer to a memory region of at least @p size bytes
* long otherwise
*/
typedef void* (*brotli_alloc_func)(void* opaque, size_t size);
/**
* Deallocating function pointer type.
*
* This function @b SHOULD do nothing if @p address is @c 0.
*
* @param opaque custom memory manager handle provided by client
* @param address memory region pointer returned by ::brotli_alloc_func, or @c 0
*/
typedef void (*brotli_free_func)(void* opaque, void* address);
#if defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \
!defined(__cplusplus) && !defined(__PGI)
#define BROTLI_ARRAY_PARAM(L) L
#else
#define BROTLI_ARRAY_PARAM(L)
#endif
#endif /* BROTLI_COMMON_TYPES_H_ */

File diff suppressed because it is too large Load Diff

View File

@ -1,19 +0,0 @@
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Distributed under MIT license.
// See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
package brotli
// Inform golang build system that it should link brotli libraries.
// #cgo CFLAGS: -O3
// #cgo LDFLAGS: -lm
import "C"
import (
_ "github.com/cloudflare/brotli-go/brotli"
_ "github.com/cloudflare/brotli-go/common"
_ "github.com/cloudflare/brotli-go/dec"
_ "github.com/cloudflare/brotli-go/enc"
)

View File

@ -1,56 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions for clustering similar histograms together. */
#include "./enc/cluster.h"
#include <brotli/types.h>
#include "./enc/bit_cost.h" /* BrotliPopulationCost */
#include "./enc/fast_log.h"
#include "./enc/histogram.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static BROTLI_INLINE BROTLI_BOOL HistogramPairIsLess(
const HistogramPair* p1, const HistogramPair* p2) {
if (p1->cost_diff != p2->cost_diff) {
return TO_BROTLI_BOOL(p1->cost_diff > p2->cost_diff);
}
return TO_BROTLI_BOOL((p1->idx2 - p1->idx1) > (p2->idx2 - p2->idx1));
}
/* Returns entropy reduction of the context map when we combine two clusters. */
static BROTLI_INLINE double ClusterCostDiff(size_t size_a, size_t size_b) {
size_t size_c = size_a + size_b;
return (double)size_a * FastLog2(size_a) +
(double)size_b * FastLog2(size_b) -
(double)size_c * FastLog2(size_c);
}
#define CODE(X) X
#define FN(X) X ## Literal
#include "./enc/cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Command
#include "./enc/cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Distance
#include "./enc/cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#undef CODE
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,5 +0,0 @@
package common
import (
"C"
)

View File

@ -1,55 +0,0 @@
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
#ifndef BROTLI_COMMON_CONSTANTS_H_
#define BROTLI_COMMON_CONSTANTS_H_
/* Specification: 7.3. Encoding of the context map */
#define BROTLI_CONTEXT_MAP_MAX_RLE 16
/* Specification: 2. Compressed representation overview */
#define BROTLI_MAX_NUMBER_OF_BLOCK_TYPES 256
/* Specification: 3.3. Alphabet sizes: insert-and-copy length */
#define BROTLI_NUM_LITERAL_SYMBOLS 256
#define BROTLI_NUM_COMMAND_SYMBOLS 704
#define BROTLI_NUM_BLOCK_LEN_SYMBOLS 26
#define BROTLI_MAX_CONTEXT_MAP_SYMBOLS (BROTLI_MAX_NUMBER_OF_BLOCK_TYPES + \
BROTLI_CONTEXT_MAP_MAX_RLE)
#define BROTLI_MAX_BLOCK_TYPE_SYMBOLS (BROTLI_MAX_NUMBER_OF_BLOCK_TYPES + 2)
/* Specification: 3.5. Complex prefix codes */
#define BROTLI_REPEAT_PREVIOUS_CODE_LENGTH 16
#define BROTLI_REPEAT_ZERO_CODE_LENGTH 17
#define BROTLI_CODE_LENGTH_CODES (BROTLI_REPEAT_ZERO_CODE_LENGTH + 1)
/* "code length of 8 is repeated" */
#define BROTLI_INITIAL_REPEATED_CODE_LENGTH 8
/* Specification: 4. Encoding of distances */
#define BROTLI_NUM_DISTANCE_SHORT_CODES 16
#define BROTLI_MAX_NPOSTFIX 3
#define BROTLI_MAX_NDIRECT 120
#define BROTLI_MAX_DISTANCE_BITS 24U
/* BROTLI_NUM_DISTANCE_SYMBOLS == 520 */
#define BROTLI_NUM_DISTANCE_SYMBOLS (BROTLI_NUM_DISTANCE_SHORT_CODES + \
BROTLI_MAX_NDIRECT + \
(BROTLI_MAX_DISTANCE_BITS << \
(BROTLI_MAX_NPOSTFIX + 1)))
/* 7.1. Context modes and context ID lookup for literals */
/* "context IDs for literals are in the range of 0..63" */
#define BROTLI_LITERAL_CONTEXT_BITS 6
/* 7.2. Context ID for distances */
#define BROTLI_DISTANCE_CONTEXT_BITS 2
/* 9.1. Format of the Stream Header */
/* Number of slack bytes for window size. Don't confuse
with BROTLI_NUM_DISTANCE_SHORT_CODES. */
#define BROTLI_WINDOW_GAP 16
#define BROTLI_MAX_BACKWARD_LIMIT(W) (((size_t)1 << (W)) - BROTLI_WINDOW_GAP)
#endif /* BROTLI_COMMON_CONSTANTS_H_ */

View File

@ -1,64 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Collection of static dictionary words. */
#ifndef BROTLI_COMMON_DICTIONARY_H_
#define BROTLI_COMMON_DICTIONARY_H_
#include <brotli/port.h>
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef struct BrotliDictionary {
/**
* Number of bits to encode index of dictionary word in a bucket.
*
* Specification: Appendix A. Static Dictionary Data
*
* Words in a dictionary are bucketed by length.
* @c 0 means that there are no words of a given length.
* Dictionary consists of words with length of [4..24] bytes.
* Values at [0..3] and [25..31] indices should not be addressed.
*/
const uint8_t size_bits_by_length[32];
/* assert(offset[i + 1] == offset[i] + (bits[i] ? (i << bits[i]) : 0)) */
const uint32_t offsets_by_length[32];
/* assert(data_size == offsets_by_length[31]) */
const size_t data_size;
/* Data array is not bound, and should obey to size_bits_by_length values.
Specified size matches default (RFC 7932) dictionary. Its size is
defined by data_size */
const uint8_t* data;
} BrotliDictionary;
BROTLI_COMMON_API extern const BrotliDictionary* BrotliGetDictionary(void);
/**
* Sets dictionary data.
*
* When dictionary data is already set / present, this method is no-op.
*
* Dictionary data MUST be provided before BrotliGetDictionary is invoked.
* This method is used ONLY in multi-client environment (e.g. C + Java),
* to reduce storage by sharing single dictionary between implementations.
*/
BROTLI_COMMON_API void BrotliSetDictionaryData(const uint8_t* data);
#define BROTLI_MIN_DICTIONARY_WORD_LENGTH 4
#define BROTLI_MAX_DICTIONARY_WORD_LENGTH 24
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_COMMON_DICTIONARY_H_ */

View File

@ -1,19 +0,0 @@
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Version definition. */
#ifndef BROTLI_COMMON_VERSION_H_
#define BROTLI_COMMON_VERSION_H_
/* This macro should only be used when library is compiled together with client.
If library is dynamically linked, use BrotliDecoderVersion and
BrotliEncoderVersion methods. */
/* Semantic version, calculated as (MAJOR << 24) | (MINOR << 12) | PATCH */
#define BROTLI_VERSION 0x1000000
#endif /* BROTLI_COMMON_VERSION_H_ */

View File

@ -1,791 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function for fast encoding of an input fragment, independently from the input
history. This function uses one-pass processing: when we find a backward
match, we immediately emit the corresponding command and literal codes to
the bit stream.
Adapted from the CompressFragment() function in
https://github.com/google/snappy/blob/master/snappy.cc */
#include "./enc/compress_fragment.h"
#include <string.h> /* memcmp, memcpy, memset */
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/brotli_bit_stream.h"
#include "./enc/entropy_encode.h"
#include "./enc/fast_log.h"
#include "./enc/find_match_length.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/write_bits.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define MAX_DISTANCE (long)BROTLI_MAX_BACKWARD_LIMIT(18)
/* kHashMul32 multiplier has these properties:
* The multiplier must be odd. Otherwise we may lose the highest bit.
* No long streaks of ones or zeros.
* There is no effort to ensure that it is a prime, the oddity is enough
for this use.
* The number has been tuned heuristically against compression benchmarks. */
static const uint32_t kHashMul32 = 0x1e35a7bd;
static BROTLI_INLINE uint32_t Hash(const uint8_t* p, size_t shift) {
const uint64_t h = (BROTLI_UNALIGNED_LOAD64(p) << 24) * kHashMul32;
return (uint32_t)(h >> shift);
}
static BROTLI_INLINE uint32_t HashBytesAtOffset(
uint64_t v, int offset, size_t shift) {
assert(offset >= 0);
assert(offset <= 3);
{
const uint64_t h = ((v >> (8 * offset)) << 24) * kHashMul32;
return (uint32_t)(h >> shift);
}
}
static BROTLI_INLINE BROTLI_BOOL IsMatch(const uint8_t* p1, const uint8_t* p2) {
return TO_BROTLI_BOOL(
BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) &&
p1[4] == p2[4]);
}
/* Builds a literal prefix code into "depths" and "bits" based on the statistics
of the "input" string and stores it into the bit stream.
Note that the prefix code here is built from the pre-LZ77 input, therefore
we can only approximate the statistics of the actual literal stream.
Moreover, for long inputs we build a histogram from a sample of the input
and thus have to assign a non-zero depth for each literal.
Returns estimated compression ratio millibytes/char for encoding given input
with generated code. */
static size_t BuildAndStoreLiteralPrefixCode(MemoryManager* m,
const uint8_t* input,
const size_t input_size,
uint8_t depths[256],
uint16_t bits[256],
size_t* storage_ix,
uint8_t* storage) {
uint32_t histogram[256] = { 0 };
size_t histogram_total;
size_t i;
if (input_size < (1 << 15)) {
for (i = 0; i < input_size; ++i) {
++histogram[input[i]];
}
histogram_total = input_size;
for (i = 0; i < 256; ++i) {
/* We weigh the first 11 samples with weight 3 to account for the
balancing effect of the LZ77 phase on the histogram. */
const uint32_t adjust = 2 * BROTLI_MIN(uint32_t, histogram[i], 11u);
histogram[i] += adjust;
histogram_total += adjust;
}
} else {
static const size_t kSampleRate = 29;
for (i = 0; i < input_size; i += kSampleRate) {
++histogram[input[i]];
}
histogram_total = (input_size + kSampleRate - 1) / kSampleRate;
for (i = 0; i < 256; ++i) {
/* We add 1 to each population count to avoid 0 bit depths (since this is
only a sample and we don't know if the symbol appears or not), and we
weigh the first 11 samples with weight 3 to account for the balancing
effect of the LZ77 phase on the histogram (more frequent symbols are
more likely to be in backward references instead as literals). */
const uint32_t adjust = 1 + 2 * BROTLI_MIN(uint32_t, histogram[i], 11u);
histogram[i] += adjust;
histogram_total += adjust;
}
}
BrotliBuildAndStoreHuffmanTreeFast(m, histogram, histogram_total,
/* max_bits = */ 8,
depths, bits, storage_ix, storage);
if (BROTLI_IS_OOM(m)) return 0;
{
size_t literal_ratio = 0;
for (i = 0; i < 256; ++i) {
if (histogram[i]) literal_ratio += histogram[i] * depths[i];
}
/* Estimated encoding ratio, millibytes per symbol. */
return (literal_ratio * 125) / histogram_total;
}
}
/* Builds a command and distance prefix code (each 64 symbols) into "depth" and
"bits" based on "histogram" and stores it into the bit stream. */
static void BuildAndStoreCommandPrefixCode(const uint32_t histogram[128],
uint8_t depth[128], uint16_t bits[128], size_t* storage_ix,
uint8_t* storage) {
/* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */
HuffmanTree tree[129];
uint8_t cmd_depth[BROTLI_NUM_COMMAND_SYMBOLS] = { 0 };
uint16_t cmd_bits[64];
BrotliCreateHuffmanTree(histogram, 64, 15, tree, depth);
BrotliCreateHuffmanTree(&histogram[64], 64, 14, tree, &depth[64]);
/* We have to jump through a few hoops here in order to compute
the command bits because the symbols are in a different order than in
the full alphabet. This looks complicated, but having the symbols
in this order in the command bits saves a few branches in the Emit*
functions. */
memcpy(cmd_depth, depth, 24);
memcpy(cmd_depth + 24, depth + 40, 8);
memcpy(cmd_depth + 32, depth + 24, 8);
memcpy(cmd_depth + 40, depth + 48, 8);
memcpy(cmd_depth + 48, depth + 32, 8);
memcpy(cmd_depth + 56, depth + 56, 8);
BrotliConvertBitDepthsToSymbols(cmd_depth, 64, cmd_bits);
memcpy(bits, cmd_bits, 48);
memcpy(bits + 24, cmd_bits + 32, 16);
memcpy(bits + 32, cmd_bits + 48, 16);
memcpy(bits + 40, cmd_bits + 24, 16);
memcpy(bits + 48, cmd_bits + 40, 16);
memcpy(bits + 56, cmd_bits + 56, 16);
BrotliConvertBitDepthsToSymbols(&depth[64], 64, &bits[64]);
{
/* Create the bit length array for the full command alphabet. */
size_t i;
memset(cmd_depth, 0, 64); /* only 64 first values were used */
memcpy(cmd_depth, depth, 8);
memcpy(cmd_depth + 64, depth + 8, 8);
memcpy(cmd_depth + 128, depth + 16, 8);
memcpy(cmd_depth + 192, depth + 24, 8);
memcpy(cmd_depth + 384, depth + 32, 8);
for (i = 0; i < 8; ++i) {
cmd_depth[128 + 8 * i] = depth[40 + i];
cmd_depth[256 + 8 * i] = depth[48 + i];
cmd_depth[448 + 8 * i] = depth[56 + i];
}
BrotliStoreHuffmanTree(
cmd_depth, BROTLI_NUM_COMMAND_SYMBOLS, tree, storage_ix, storage);
}
BrotliStoreHuffmanTree(&depth[64], 64, tree, storage_ix, storage);
}
/* REQUIRES: insertlen < 6210 */
static BROTLI_INLINE void EmitInsertLen(size_t insertlen,
const uint8_t depth[128],
const uint16_t bits[128],
uint32_t histo[128],
size_t* storage_ix,
uint8_t* storage) {
if (insertlen < 6) {
const size_t code = insertlen + 40;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
++histo[code];
} else if (insertlen < 130) {
const size_t tail = insertlen - 2;
const uint32_t nbits = Log2FloorNonZero(tail) - 1u;
const size_t prefix = tail >> nbits;
const size_t inscode = (nbits << 1) + prefix + 42;
BrotliWriteBits(depth[inscode], bits[inscode], storage_ix, storage);
BrotliWriteBits(nbits, tail - (prefix << nbits), storage_ix, storage);
++histo[inscode];
} else if (insertlen < 2114) {
const size_t tail = insertlen - 66;
const uint32_t nbits = Log2FloorNonZero(tail);
const size_t code = nbits + 50;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(nbits, tail - ((size_t)1 << nbits), storage_ix, storage);
++histo[code];
} else {
BrotliWriteBits(depth[61], bits[61], storage_ix, storage);
BrotliWriteBits(12, insertlen - 2114, storage_ix, storage);
++histo[21];
}
}
static BROTLI_INLINE void EmitLongInsertLen(size_t insertlen,
const uint8_t depth[128],
const uint16_t bits[128],
uint32_t histo[128],
size_t* storage_ix,
uint8_t* storage) {
if (insertlen < 22594) {
BrotliWriteBits(depth[62], bits[62], storage_ix, storage);
BrotliWriteBits(14, insertlen - 6210, storage_ix, storage);
++histo[22];
} else {
BrotliWriteBits(depth[63], bits[63], storage_ix, storage);
BrotliWriteBits(24, insertlen - 22594, storage_ix, storage);
++histo[23];
}
}
static BROTLI_INLINE void EmitCopyLen(size_t copylen,
const uint8_t depth[128],
const uint16_t bits[128],
uint32_t histo[128],
size_t* storage_ix,
uint8_t* storage) {
if (copylen < 10) {
BrotliWriteBits(
depth[copylen + 14], bits[copylen + 14], storage_ix, storage);
++histo[copylen + 14];
} else if (copylen < 134) {
const size_t tail = copylen - 6;
const uint32_t nbits = Log2FloorNonZero(tail) - 1u;
const size_t prefix = tail >> nbits;
const size_t code = (nbits << 1) + prefix + 20;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(nbits, tail - (prefix << nbits), storage_ix, storage);
++histo[code];
} else if (copylen < 2118) {
const size_t tail = copylen - 70;
const uint32_t nbits = Log2FloorNonZero(tail);
const size_t code = nbits + 28;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(nbits, tail - ((size_t)1 << nbits), storage_ix, storage);
++histo[code];
} else {
BrotliWriteBits(depth[39], bits[39], storage_ix, storage);
BrotliWriteBits(24, copylen - 2118, storage_ix, storage);
++histo[47];
}
}
static BROTLI_INLINE void EmitCopyLenLastDistance(size_t copylen,
const uint8_t depth[128],
const uint16_t bits[128],
uint32_t histo[128],
size_t* storage_ix,
uint8_t* storage) {
if (copylen < 12) {
BrotliWriteBits(depth[copylen - 4], bits[copylen - 4], storage_ix, storage);
++histo[copylen - 4];
} else if (copylen < 72) {
const size_t tail = copylen - 8;
const uint32_t nbits = Log2FloorNonZero(tail) - 1;
const size_t prefix = tail >> nbits;
const size_t code = (nbits << 1) + prefix + 4;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(nbits, tail - (prefix << nbits), storage_ix, storage);
++histo[code];
} else if (copylen < 136) {
const size_t tail = copylen - 8;
const size_t code = (tail >> 5) + 30;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(5, tail & 31, storage_ix, storage);
BrotliWriteBits(depth[64], bits[64], storage_ix, storage);
++histo[code];
++histo[64];
} else if (copylen < 2120) {
const size_t tail = copylen - 72;
const uint32_t nbits = Log2FloorNonZero(tail);
const size_t code = nbits + 28;
BrotliWriteBits(depth[code], bits[code], storage_ix, storage);
BrotliWriteBits(nbits, tail - ((size_t)1 << nbits), storage_ix, storage);
BrotliWriteBits(depth[64], bits[64], storage_ix, storage);
++histo[code];
++histo[64];
} else {
BrotliWriteBits(depth[39], bits[39], storage_ix, storage);
BrotliWriteBits(24, copylen - 2120, storage_ix, storage);
BrotliWriteBits(depth[64], bits[64], storage_ix, storage);
++histo[47];
++histo[64];
}
}
static BROTLI_INLINE void EmitDistance(size_t distance,
const uint8_t depth[128],
const uint16_t bits[128],
uint32_t histo[128],
size_t* storage_ix, uint8_t* storage) {
const size_t d = distance + 3;
const uint32_t nbits = Log2FloorNonZero(d) - 1u;
const size_t prefix = (d >> nbits) & 1;
const size_t offset = (2 + prefix) << nbits;
const size_t distcode = 2 * (nbits - 1) + prefix + 80;
BrotliWriteBits(depth[distcode], bits[distcode], storage_ix, storage);
BrotliWriteBits(nbits, d - offset, storage_ix, storage);
++histo[distcode];
}
static BROTLI_INLINE void EmitLiterals(const uint8_t* input, const size_t len,
const uint8_t depth[256],
const uint16_t bits[256],
size_t* storage_ix, uint8_t* storage) {
size_t j;
for (j = 0; j < len; j++) {
const uint8_t lit = input[j];
BrotliWriteBits(depth[lit], bits[lit], storage_ix, storage);
}
}
/* REQUIRES: len <= 1 << 24. */
static void BrotliStoreMetaBlockHeader(
size_t len, BROTLI_BOOL is_uncompressed, size_t* storage_ix,
uint8_t* storage) {
size_t nibbles = 6;
/* ISLAST */
BrotliWriteBits(1, 0, storage_ix, storage);
if (len <= (1U << 16)) {
nibbles = 4;
} else if (len <= (1U << 20)) {
nibbles = 5;
}
BrotliWriteBits(2, nibbles - 4, storage_ix, storage);
BrotliWriteBits(nibbles * 4, len - 1, storage_ix, storage);
/* ISUNCOMPRESSED */
BrotliWriteBits(1, (uint64_t)is_uncompressed, storage_ix, storage);
}
static void UpdateBits(size_t n_bits, uint32_t bits, size_t pos,
uint8_t *array) {
while (n_bits > 0) {
size_t byte_pos = pos >> 3;
size_t n_unchanged_bits = pos & 7;
size_t n_changed_bits = BROTLI_MIN(size_t, n_bits, 8 - n_unchanged_bits);
size_t total_bits = n_unchanged_bits + n_changed_bits;
uint32_t mask =
(~((1u << total_bits) - 1u)) | ((1u << n_unchanged_bits) - 1u);
uint32_t unchanged_bits = array[byte_pos] & mask;
uint32_t changed_bits = bits & ((1u << n_changed_bits) - 1u);
array[byte_pos] =
(uint8_t)((changed_bits << n_unchanged_bits) | unchanged_bits);
n_bits -= n_changed_bits;
bits >>= n_changed_bits;
pos += n_changed_bits;
}
}
static void RewindBitPosition(const size_t new_storage_ix,
size_t* storage_ix, uint8_t* storage) {
const size_t bitpos = new_storage_ix & 7;
const size_t mask = (1u << bitpos) - 1;
storage[new_storage_ix >> 3] &= (uint8_t)mask;
*storage_ix = new_storage_ix;
}
static BROTLI_BOOL ShouldMergeBlock(
const uint8_t* data, size_t len, const uint8_t* depths) {
size_t histo[256] = { 0 };
static const size_t kSampleRate = 43;
size_t i;
for (i = 0; i < len; i += kSampleRate) {
++histo[data[i]];
}
{
const size_t total = (len + kSampleRate - 1) / kSampleRate;
double r = (FastLog2(total) + 0.5) * (double)total + 200;
for (i = 0; i < 256; ++i) {
r -= (double)histo[i] * (depths[i] + FastLog2(histo[i]));
}
return TO_BROTLI_BOOL(r >= 0.0);
}
}
/* Acceptable loss for uncompressible speedup is 2% */
#define MIN_RATIO 980
static BROTLI_INLINE BROTLI_BOOL ShouldUseUncompressedMode(
const uint8_t* metablock_start, const uint8_t* next_emit,
const size_t insertlen, const size_t literal_ratio) {
const size_t compressed = (size_t)(next_emit - metablock_start);
if (compressed * 50 > insertlen) {
return BROTLI_FALSE;
} else {
return TO_BROTLI_BOOL(literal_ratio > MIN_RATIO);
}
}
static void EmitUncompressedMetaBlock(const uint8_t* begin, const uint8_t* end,
const size_t storage_ix_start,
size_t* storage_ix, uint8_t* storage) {
const size_t len = (size_t)(end - begin);
RewindBitPosition(storage_ix_start, storage_ix, storage);
BrotliStoreMetaBlockHeader(len, 1, storage_ix, storage);
*storage_ix = (*storage_ix + 7u) & ~7u;
memcpy(&storage[*storage_ix >> 3], begin, len);
*storage_ix += len << 3;
storage[*storage_ix >> 3] = 0;
}
static uint32_t kCmdHistoSeed[128] = {
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0,
};
static BROTLI_INLINE void BrotliCompressFragmentFastImpl(
MemoryManager* m, const uint8_t* input, size_t input_size,
BROTLI_BOOL is_last, int* table, size_t table_bits, uint8_t cmd_depth[128],
uint16_t cmd_bits[128], size_t* cmd_code_numbits, uint8_t* cmd_code,
size_t* storage_ix, uint8_t* storage) {
uint32_t cmd_histo[128];
const uint8_t* ip_end;
/* "next_emit" is a pointer to the first byte that is not covered by a
previous copy. Bytes between "next_emit" and the start of the next copy or
the end of the input will be emitted as literal bytes. */
const uint8_t* next_emit = input;
/* Save the start of the first block for position and distance computations.
*/
const uint8_t* base_ip = input;
static const size_t kFirstBlockSize = 3 << 15;
static const size_t kMergeBlockSize = 1 << 16;
const size_t kInputMarginBytes = BROTLI_WINDOW_GAP;
const size_t kMinMatchLen = 5;
const uint8_t* metablock_start = input;
size_t block_size = BROTLI_MIN(size_t, input_size, kFirstBlockSize);
size_t total_block_size = block_size;
/* Save the bit position of the MLEN field of the meta-block header, so that
we can update it later if we decide to extend this meta-block. */
size_t mlen_storage_ix = *storage_ix + 3;
uint8_t lit_depth[256];
uint16_t lit_bits[256];
size_t literal_ratio;
const uint8_t* ip;
int last_distance;
const size_t shift = 64u - table_bits;
BrotliStoreMetaBlockHeader(block_size, 0, storage_ix, storage);
/* No block splits, no contexts. */
BrotliWriteBits(13, 0, storage_ix, storage);
literal_ratio = BuildAndStoreLiteralPrefixCode(
m, input, block_size, lit_depth, lit_bits, storage_ix, storage);
if (BROTLI_IS_OOM(m)) return;
{
/* Store the pre-compressed command and distance prefix codes. */
size_t i;
for (i = 0; i + 7 < *cmd_code_numbits; i += 8) {
BrotliWriteBits(8, cmd_code[i >> 3], storage_ix, storage);
}
}
BrotliWriteBits(*cmd_code_numbits & 7, cmd_code[*cmd_code_numbits >> 3],
storage_ix, storage);
emit_commands:
/* Initialize the command and distance histograms. We will gather
statistics of command and distance codes during the processing
of this block and use it to update the command and distance
prefix codes for the next block. */
memcpy(cmd_histo, kCmdHistoSeed, sizeof(kCmdHistoSeed));
/* "ip" is the input pointer. */
ip = input;
last_distance = -1;
ip_end = input + block_size;
if (BROTLI_PREDICT_TRUE(block_size >= kInputMarginBytes)) {
/* For the last block, we need to keep a 16 bytes margin so that we can be
sure that all distances are at most window size - 16.
For all other blocks, we only need to keep a margin of 5 bytes so that
we don't go over the block size with a copy. */
const size_t len_limit = BROTLI_MIN(size_t, block_size - kMinMatchLen,
input_size - kInputMarginBytes);
const uint8_t* ip_limit = input + len_limit;
uint32_t next_hash;
for (next_hash = Hash(++ip, shift); ; ) {
/* Step 1: Scan forward in the input looking for a 5-byte-long match.
If we get close to exhausting the input then goto emit_remainder.
Heuristic match skipping: If 32 bytes are scanned with no matches
found, start looking only at every other byte. If 32 more bytes are
scanned, look at every third byte, etc.. When a match is found,
immediately go back to looking at every byte. This is a small loss
(~5% performance, ~0.1% density) for compressible data due to more
bookkeeping, but for non-compressible data (such as JPEG) it's a huge
win since the compressor quickly "realizes" the data is incompressible
and doesn't bother looking for matches everywhere.
The "skip" variable keeps track of how many bytes there are since the
last match; dividing it by 32 (i.e. right-shifting by five) gives the
number of bytes to move ahead for each iteration. */
uint32_t skip = 32;
const uint8_t* next_ip = ip;
const uint8_t* candidate;
assert(next_emit < ip);
trawl:
do {
uint32_t hash = next_hash;
uint32_t bytes_between_hash_lookups = skip++ >> 5;
assert(hash == Hash(next_ip, shift));
ip = next_ip;
next_ip = ip + bytes_between_hash_lookups;
if (BROTLI_PREDICT_FALSE(next_ip > ip_limit)) {
goto emit_remainder;
}
next_hash = Hash(next_ip, shift);
candidate = ip - last_distance;
if (IsMatch(ip, candidate)) {
if (BROTLI_PREDICT_TRUE(candidate < ip)) {
table[hash] = (int)(ip - base_ip);
break;
}
}
candidate = base_ip + table[hash];
assert(candidate >= base_ip);
assert(candidate < ip);
table[hash] = (int)(ip - base_ip);
} while (BROTLI_PREDICT_TRUE(!IsMatch(ip, candidate)));
/* Check copy distance. If candidate is not feasible, continue search.
Checking is done outside of hot loop to reduce overhead. */
if (ip - candidate > MAX_DISTANCE) goto trawl;
/* Step 2: Emit the found match together with the literal bytes from
"next_emit" to the bit stream, and then see if we can find a next match
immediately afterwards. Repeat until we find no match for the input
without emitting some literal bytes. */
{
/* We have a 5-byte match at ip, and we need to emit bytes in
[next_emit, ip). */
const uint8_t* base = ip;
size_t matched = 5 + FindMatchLengthWithLimit(
candidate + 5, ip + 5, (size_t)(ip_end - ip) - 5);
int distance = (int)(base - candidate); /* > 0 */
size_t insert = (size_t)(base - next_emit);
ip += matched;
assert(0 == memcmp(base, candidate, matched));
if (BROTLI_PREDICT_TRUE(insert < 6210)) {
EmitInsertLen(insert, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
} else if (ShouldUseUncompressedMode(metablock_start, next_emit, insert,
literal_ratio)) {
EmitUncompressedMetaBlock(metablock_start, base, mlen_storage_ix - 3,
storage_ix, storage);
input_size -= (size_t)(base - input);
input = base;
next_emit = input;
goto next_block;
} else {
EmitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
}
EmitLiterals(next_emit, insert, lit_depth, lit_bits,
storage_ix, storage);
if (distance == last_distance) {
BrotliWriteBits(cmd_depth[64], cmd_bits[64], storage_ix, storage);
++cmd_histo[64];
} else {
EmitDistance((size_t)distance, cmd_depth, cmd_bits,
cmd_histo, storage_ix, storage);
last_distance = distance;
}
EmitCopyLenLastDistance(matched, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
next_emit = ip;
if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {
goto emit_remainder;
}
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some positions
within the last copy. */
{
uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 3);
uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
uint32_t cur_hash = HashBytesAtOffset(input_bytes, 3, shift);
table[prev_hash] = (int)(ip - base_ip - 3);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 2);
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash] = (int)(ip - base_ip - 1);
candidate = base_ip + table[cur_hash];
table[cur_hash] = (int)(ip - base_ip);
}
}
while (IsMatch(ip, candidate)) {
/* We have a 5-byte match at ip, and no need to emit any literal bytes
prior to ip. */
const uint8_t* base = ip;
size_t matched = 5 + FindMatchLengthWithLimit(
candidate + 5, ip + 5, (size_t)(ip_end - ip) - 5);
if (ip - candidate > MAX_DISTANCE) break;
ip += matched;
last_distance = (int)(base - candidate); /* > 0 */
assert(0 == memcmp(base, candidate, matched));
EmitCopyLen(matched, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
EmitDistance((size_t)last_distance, cmd_depth, cmd_bits,
cmd_histo, storage_ix, storage);
next_emit = ip;
if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {
goto emit_remainder;
}
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some positions
within the last copy. */
{
uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 3);
uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
uint32_t cur_hash = HashBytesAtOffset(input_bytes, 3, shift);
table[prev_hash] = (int)(ip - base_ip - 3);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 2);
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash] = (int)(ip - base_ip - 1);
candidate = base_ip + table[cur_hash];
table[cur_hash] = (int)(ip - base_ip);
}
}
next_hash = Hash(++ip, shift);
}
}
emit_remainder:
assert(next_emit <= ip_end);
input += block_size;
input_size -= block_size;
block_size = BROTLI_MIN(size_t, input_size, kMergeBlockSize);
/* Decide if we want to continue this meta-block instead of emitting the
last insert-only command. */
if (input_size > 0 &&
total_block_size + block_size <= (1 << 20) &&
ShouldMergeBlock(input, block_size, lit_depth)) {
assert(total_block_size > (1 << 16));
/* Update the size of the current meta-block and continue emitting commands.
We can do this because the current size and the new size both have 5
nibbles. */
total_block_size += block_size;
UpdateBits(20, (uint32_t)(total_block_size - 1), mlen_storage_ix, storage);
goto emit_commands;
}
/* Emit the remaining bytes as literals. */
if (next_emit < ip_end) {
const size_t insert = (size_t)(ip_end - next_emit);
if (BROTLI_PREDICT_TRUE(insert < 6210)) {
EmitInsertLen(insert, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
EmitLiterals(next_emit, insert, lit_depth, lit_bits, storage_ix, storage);
} else if (ShouldUseUncompressedMode(metablock_start, next_emit, insert,
literal_ratio)) {
EmitUncompressedMetaBlock(metablock_start, ip_end, mlen_storage_ix - 3,
storage_ix, storage);
} else {
EmitLongInsertLen(insert, cmd_depth, cmd_bits, cmd_histo,
storage_ix, storage);
EmitLiterals(next_emit, insert, lit_depth, lit_bits,
storage_ix, storage);
}
}
next_emit = ip_end;
next_block:
/* If we have more data, write a new meta-block header and prefix codes and
then continue emitting commands. */
if (input_size > 0) {
metablock_start = input;
block_size = BROTLI_MIN(size_t, input_size, kFirstBlockSize);
total_block_size = block_size;
/* Save the bit position of the MLEN field of the meta-block header, so that
we can update it later if we decide to extend this meta-block. */
mlen_storage_ix = *storage_ix + 3;
BrotliStoreMetaBlockHeader(block_size, 0, storage_ix, storage);
/* No block splits, no contexts. */
BrotliWriteBits(13, 0, storage_ix, storage);
literal_ratio = BuildAndStoreLiteralPrefixCode(
m, input, block_size, lit_depth, lit_bits, storage_ix, storage);
if (BROTLI_IS_OOM(m)) return;
BuildAndStoreCommandPrefixCode(cmd_histo, cmd_depth, cmd_bits,
storage_ix, storage);
goto emit_commands;
}
if (!is_last) {
/* If this is not the last block, update the command and distance prefix
codes for the next block and store the compressed forms. */
cmd_code[0] = 0;
*cmd_code_numbits = 0;
BuildAndStoreCommandPrefixCode(cmd_histo, cmd_depth, cmd_bits,
cmd_code_numbits, cmd_code);
}
}
#define FOR_TABLE_BITS_(X) X(9) X(11) X(13) X(15)
#define BAKE_METHOD_PARAM_(B) \
static BROTLI_NOINLINE void BrotliCompressFragmentFastImpl ## B( \
MemoryManager* m, const uint8_t* input, size_t input_size, \
BROTLI_BOOL is_last, int* table, uint8_t cmd_depth[128], \
uint16_t cmd_bits[128], size_t* cmd_code_numbits, uint8_t* cmd_code, \
size_t* storage_ix, uint8_t* storage) { \
BrotliCompressFragmentFastImpl(m, input, input_size, is_last, table, B, \
cmd_depth, cmd_bits, cmd_code_numbits, cmd_code, storage_ix, storage); \
}
FOR_TABLE_BITS_(BAKE_METHOD_PARAM_)
#undef BAKE_METHOD_PARAM_
void BrotliCompressFragmentFast(
MemoryManager* m, const uint8_t* input, size_t input_size,
BROTLI_BOOL is_last, int* table, size_t table_size, uint8_t cmd_depth[128],
uint16_t cmd_bits[128], size_t* cmd_code_numbits, uint8_t* cmd_code,
size_t* storage_ix, uint8_t* storage) {
const size_t initial_storage_ix = *storage_ix;
const size_t table_bits = Log2FloorNonZero(table_size);
if (input_size == 0) {
assert(is_last);
BrotliWriteBits(1, 1, storage_ix, storage); /* islast */
BrotliWriteBits(1, 1, storage_ix, storage); /* isempty */
*storage_ix = (*storage_ix + 7u) & ~7u;
return;
}
switch (table_bits) {
#define CASE_(B) \
case B: \
BrotliCompressFragmentFastImpl ## B( \
m, input, input_size, is_last, table, cmd_depth, cmd_bits, \
cmd_code_numbits, cmd_code, storage_ix, storage); \
break;
FOR_TABLE_BITS_(CASE_)
#undef CASE_
default: assert(0); break;
}
/* If output is larger than single uncompressed block, rewrite it. */
if (*storage_ix - initial_storage_ix > 31 + (input_size << 3)) {
EmitUncompressedMetaBlock(input, input + input_size, initial_storage_ix,
storage_ix, storage);
}
if (is_last) {
BrotliWriteBits(1, 1, storage_ix, storage); /* islast */
BrotliWriteBits(1, 1, storage_ix, storage); /* isempty */
*storage_ix = (*storage_ix + 7u) & ~7u;
}
}
#undef FOR_TABLE_BITS_
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,612 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function for fast encoding of an input fragment, independently from the input
history. This function uses two-pass processing: in the first pass we save
the found backward matches and literal bytes into a buffer, and in the
second pass we emit them into the bit stream using prefix codes built based
on the actual command and literal byte histograms. */
#include "./enc/compress_fragment_two_pass.h"
#include <string.h> /* memcmp, memcpy, memset */
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/bit_cost.h"
#include "./enc/brotli_bit_stream.h"
#include "./enc/entropy_encode.h"
#include "./enc/fast_log.h"
#include "./enc/find_match_length.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/write_bits.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define MAX_DISTANCE (long)BROTLI_MAX_BACKWARD_LIMIT(18)
/* kHashMul32 multiplier has these properties:
* The multiplier must be odd. Otherwise we may lose the highest bit.
* No long streaks of ones or zeros.
* There is no effort to ensure that it is a prime, the oddity is enough
for this use.
* The number has been tuned heuristically against compression benchmarks. */
static const uint32_t kHashMul32 = 0x1e35a7bd;
static BROTLI_INLINE uint32_t Hash(const uint8_t* p, size_t shift) {
const uint64_t h = (BROTLI_UNALIGNED_LOAD64(p) << 16) * kHashMul32;
return (uint32_t)(h >> shift);
}
static BROTLI_INLINE uint32_t HashBytesAtOffset(
uint64_t v, int offset, size_t shift) {
assert(offset >= 0);
assert(offset <= 2);
{
const uint64_t h = ((v >> (8 * offset)) << 16) * kHashMul32;
return (uint32_t)(h >> shift);
}
}
static BROTLI_INLINE BROTLI_BOOL IsMatch(const uint8_t* p1, const uint8_t* p2) {
return TO_BROTLI_BOOL(
BROTLI_UNALIGNED_LOAD32(p1) == BROTLI_UNALIGNED_LOAD32(p2) &&
p1[4] == p2[4] &&
p1[5] == p2[5]);
}
/* Builds a command and distance prefix code (each 64 symbols) into "depth" and
"bits" based on "histogram" and stores it into the bit stream. */
static void BuildAndStoreCommandPrefixCode(
const uint32_t histogram[128],
uint8_t depth[128], uint16_t bits[128],
size_t* storage_ix, uint8_t* storage) {
/* Tree size for building a tree over 64 symbols is 2 * 64 + 1. */
HuffmanTree tree[129];
uint8_t cmd_depth[BROTLI_NUM_COMMAND_SYMBOLS] = { 0 };
uint16_t cmd_bits[64];
BrotliCreateHuffmanTree(histogram, 64, 15, tree, depth);
BrotliCreateHuffmanTree(&histogram[64], 64, 14, tree, &depth[64]);
/* We have to jump through a few hoops here in order to compute
the command bits because the symbols are in a different order than in
the full alphabet. This looks complicated, but having the symbols
in this order in the command bits saves a few branches in the Emit*
functions. */
memcpy(cmd_depth, depth + 24, 24);
memcpy(cmd_depth + 24, depth, 8);
memcpy(cmd_depth + 32, depth + 48, 8);
memcpy(cmd_depth + 40, depth + 8, 8);
memcpy(cmd_depth + 48, depth + 56, 8);
memcpy(cmd_depth + 56, depth + 16, 8);
BrotliConvertBitDepthsToSymbols(cmd_depth, 64, cmd_bits);
memcpy(bits, cmd_bits + 24, 16);
memcpy(bits + 8, cmd_bits + 40, 16);
memcpy(bits + 16, cmd_bits + 56, 16);
memcpy(bits + 24, cmd_bits, 48);
memcpy(bits + 48, cmd_bits + 32, 16);
memcpy(bits + 56, cmd_bits + 48, 16);
BrotliConvertBitDepthsToSymbols(&depth[64], 64, &bits[64]);
{
/* Create the bit length array for the full command alphabet. */
size_t i;
memset(cmd_depth, 0, 64); /* only 64 first values were used */
memcpy(cmd_depth, depth + 24, 8);
memcpy(cmd_depth + 64, depth + 32, 8);
memcpy(cmd_depth + 128, depth + 40, 8);
memcpy(cmd_depth + 192, depth + 48, 8);
memcpy(cmd_depth + 384, depth + 56, 8);
for (i = 0; i < 8; ++i) {
cmd_depth[128 + 8 * i] = depth[i];
cmd_depth[256 + 8 * i] = depth[8 + i];
cmd_depth[448 + 8 * i] = depth[16 + i];
}
BrotliStoreHuffmanTree(
cmd_depth, BROTLI_NUM_COMMAND_SYMBOLS, tree, storage_ix, storage);
}
BrotliStoreHuffmanTree(&depth[64], 64, tree, storage_ix, storage);
}
static BROTLI_INLINE void EmitInsertLen(
uint32_t insertlen, uint32_t** commands) {
if (insertlen < 6) {
**commands = insertlen;
} else if (insertlen < 130) {
const uint32_t tail = insertlen - 2;
const uint32_t nbits = Log2FloorNonZero(tail) - 1u;
const uint32_t prefix = tail >> nbits;
const uint32_t inscode = (nbits << 1) + prefix + 2;
const uint32_t extra = tail - (prefix << nbits);
**commands = inscode | (extra << 8);
} else if (insertlen < 2114) {
const uint32_t tail = insertlen - 66;
const uint32_t nbits = Log2FloorNonZero(tail);
const uint32_t code = nbits + 10;
const uint32_t extra = tail - (1u << nbits);
**commands = code | (extra << 8);
} else if (insertlen < 6210) {
const uint32_t extra = insertlen - 2114;
**commands = 21 | (extra << 8);
} else if (insertlen < 22594) {
const uint32_t extra = insertlen - 6210;
**commands = 22 | (extra << 8);
} else {
const uint32_t extra = insertlen - 22594;
**commands = 23 | (extra << 8);
}
++(*commands);
}
static BROTLI_INLINE void EmitCopyLen(size_t copylen, uint32_t** commands) {
if (copylen < 10) {
**commands = (uint32_t)(copylen + 38);
} else if (copylen < 134) {
const size_t tail = copylen - 6;
const size_t nbits = Log2FloorNonZero(tail) - 1;
const size_t prefix = tail >> nbits;
const size_t code = (nbits << 1) + prefix + 44;
const size_t extra = tail - (prefix << nbits);
**commands = (uint32_t)(code | (extra << 8));
} else if (copylen < 2118) {
const size_t tail = copylen - 70;
const size_t nbits = Log2FloorNonZero(tail);
const size_t code = nbits + 52;
const size_t extra = tail - ((size_t)1 << nbits);
**commands = (uint32_t)(code | (extra << 8));
} else {
const size_t extra = copylen - 2118;
**commands = (uint32_t)(63 | (extra << 8));
}
++(*commands);
}
static BROTLI_INLINE void EmitCopyLenLastDistance(
size_t copylen, uint32_t** commands) {
if (copylen < 12) {
**commands = (uint32_t)(copylen + 20);
++(*commands);
} else if (copylen < 72) {
const size_t tail = copylen - 8;
const size_t nbits = Log2FloorNonZero(tail) - 1;
const size_t prefix = tail >> nbits;
const size_t code = (nbits << 1) + prefix + 28;
const size_t extra = tail - (prefix << nbits);
**commands = (uint32_t)(code | (extra << 8));
++(*commands);
} else if (copylen < 136) {
const size_t tail = copylen - 8;
const size_t code = (tail >> 5) + 54;
const size_t extra = tail & 31;
**commands = (uint32_t)(code | (extra << 8));
++(*commands);
**commands = 64;
++(*commands);
} else if (copylen < 2120) {
const size_t tail = copylen - 72;
const size_t nbits = Log2FloorNonZero(tail);
const size_t code = nbits + 52;
const size_t extra = tail - ((size_t)1 << nbits);
**commands = (uint32_t)(code | (extra << 8));
++(*commands);
**commands = 64;
++(*commands);
} else {
const size_t extra = copylen - 2120;
**commands = (uint32_t)(63 | (extra << 8));
++(*commands);
**commands = 64;
++(*commands);
}
}
static BROTLI_INLINE void EmitDistance(uint32_t distance, uint32_t** commands) {
uint32_t d = distance + 3;
uint32_t nbits = Log2FloorNonZero(d) - 1;
const uint32_t prefix = (d >> nbits) & 1;
const uint32_t offset = (2 + prefix) << nbits;
const uint32_t distcode = 2 * (nbits - 1) + prefix + 80;
uint32_t extra = d - offset;
**commands = distcode | (extra << 8);
++(*commands);
}
/* REQUIRES: len <= 1 << 24. */
static void BrotliStoreMetaBlockHeader(
size_t len, BROTLI_BOOL is_uncompressed, size_t* storage_ix,
uint8_t* storage) {
size_t nibbles = 6;
/* ISLAST */
BrotliWriteBits(1, 0, storage_ix, storage);
if (len <= (1U << 16)) {
nibbles = 4;
} else if (len <= (1U << 20)) {
nibbles = 5;
}
BrotliWriteBits(2, nibbles - 4, storage_ix, storage);
BrotliWriteBits(nibbles * 4, len - 1, storage_ix, storage);
/* ISUNCOMPRESSED */
BrotliWriteBits(1, (uint64_t)is_uncompressed, storage_ix, storage);
}
static BROTLI_INLINE void CreateCommands(const uint8_t* input,
size_t block_size, size_t input_size, const uint8_t* base_ip, int* table,
size_t table_bits, uint8_t** literals, uint32_t** commands) {
/* "ip" is the input pointer. */
const uint8_t* ip = input;
const size_t shift = 64u - table_bits;
const uint8_t* ip_end = input + block_size;
/* "next_emit" is a pointer to the first byte that is not covered by a
previous copy. Bytes between "next_emit" and the start of the next copy or
the end of the input will be emitted as literal bytes. */
const uint8_t* next_emit = input;
int last_distance = -1;
const size_t kInputMarginBytes = BROTLI_WINDOW_GAP;
const size_t kMinMatchLen = 6;
if (BROTLI_PREDICT_TRUE(block_size >= kInputMarginBytes)) {
/* For the last block, we need to keep a 16 bytes margin so that we can be
sure that all distances are at most window size - 16.
For all other blocks, we only need to keep a margin of 5 bytes so that
we don't go over the block size with a copy. */
const size_t len_limit = BROTLI_MIN(size_t, block_size - kMinMatchLen,
input_size - kInputMarginBytes);
const uint8_t* ip_limit = input + len_limit;
uint32_t next_hash;
for (next_hash = Hash(++ip, shift); ; ) {
/* Step 1: Scan forward in the input looking for a 6-byte-long match.
If we get close to exhausting the input then goto emit_remainder.
Heuristic match skipping: If 32 bytes are scanned with no matches
found, start looking only at every other byte. If 32 more bytes are
scanned, look at every third byte, etc.. When a match is found,
immediately go back to looking at every byte. This is a small loss
(~5% performance, ~0.1% density) for compressible data due to more
bookkeeping, but for non-compressible data (such as JPEG) it's a huge
win since the compressor quickly "realizes" the data is incompressible
and doesn't bother looking for matches everywhere.
The "skip" variable keeps track of how many bytes there are since the
last match; dividing it by 32 (ie. right-shifting by five) gives the
number of bytes to move ahead for each iteration. */
uint32_t skip = 32;
const uint8_t* next_ip = ip;
const uint8_t* candidate;
assert(next_emit < ip);
trawl:
do {
uint32_t hash = next_hash;
uint32_t bytes_between_hash_lookups = skip++ >> 5;
ip = next_ip;
assert(hash == Hash(ip, shift));
next_ip = ip + bytes_between_hash_lookups;
if (BROTLI_PREDICT_FALSE(next_ip > ip_limit)) {
goto emit_remainder;
}
next_hash = Hash(next_ip, shift);
candidate = ip - last_distance;
if (IsMatch(ip, candidate)) {
if (BROTLI_PREDICT_TRUE(candidate < ip)) {
table[hash] = (int)(ip - base_ip);
break;
}
}
candidate = base_ip + table[hash];
assert(candidate >= base_ip);
assert(candidate < ip);
table[hash] = (int)(ip - base_ip);
} while (BROTLI_PREDICT_TRUE(!IsMatch(ip, candidate)));
/* Check copy distance. If candidate is not feasible, continue search.
Checking is done outside of hot loop to reduce overhead. */
if (ip - candidate > MAX_DISTANCE) goto trawl;
/* Step 2: Emit the found match together with the literal bytes from
"next_emit", and then see if we can find a next match immediately
afterwards. Repeat until we find no match for the input
without emitting some literal bytes. */
{
/* We have a 6-byte match at ip, and we need to emit bytes in
[next_emit, ip). */
const uint8_t* base = ip;
size_t matched = 6 + FindMatchLengthWithLimit(
candidate + 6, ip + 6, (size_t)(ip_end - ip) - 6);
int distance = (int)(base - candidate); /* > 0 */
int insert = (int)(base - next_emit);
ip += matched;
assert(0 == memcmp(base, candidate, matched));
EmitInsertLen((uint32_t)insert, commands);
memcpy(*literals, next_emit, (size_t)insert);
*literals += insert;
if (distance == last_distance) {
**commands = 64;
++(*commands);
} else {
EmitDistance((uint32_t)distance, commands);
last_distance = distance;
}
EmitCopyLenLastDistance(matched, commands);
next_emit = ip;
if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {
goto emit_remainder;
}
{
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some
positions within the last copy. */
uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 5);
uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
uint32_t cur_hash;
table[prev_hash] = (int)(ip - base_ip - 5);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 4);
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash] = (int)(ip - base_ip - 3);
input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 2);
cur_hash = HashBytesAtOffset(input_bytes, 2, shift);
prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
table[prev_hash] = (int)(ip - base_ip - 2);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 1);
candidate = base_ip + table[cur_hash];
table[cur_hash] = (int)(ip - base_ip);
}
}
while (ip - candidate <= MAX_DISTANCE && IsMatch(ip, candidate)) {
/* We have a 6-byte match at ip, and no need to emit any
literal bytes prior to ip. */
const uint8_t* base = ip;
size_t matched = 6 + FindMatchLengthWithLimit(
candidate + 6, ip + 6, (size_t)(ip_end - ip) - 6);
ip += matched;
last_distance = (int)(base - candidate); /* > 0 */
assert(0 == memcmp(base, candidate, matched));
EmitCopyLen(matched, commands);
EmitDistance((uint32_t)last_distance, commands);
next_emit = ip;
if (BROTLI_PREDICT_FALSE(ip >= ip_limit)) {
goto emit_remainder;
}
{
/* We could immediately start working at ip now, but to improve
compression we first update "table" with the hashes of some
positions within the last copy. */
uint64_t input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 5);
uint32_t prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
uint32_t cur_hash;
table[prev_hash] = (int)(ip - base_ip - 5);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 4);
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash] = (int)(ip - base_ip - 3);
input_bytes = BROTLI_UNALIGNED_LOAD64(ip - 2);
cur_hash = HashBytesAtOffset(input_bytes, 2, shift);
prev_hash = HashBytesAtOffset(input_bytes, 0, shift);
table[prev_hash] = (int)(ip - base_ip - 2);
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash] = (int)(ip - base_ip - 1);
candidate = base_ip + table[cur_hash];
table[cur_hash] = (int)(ip - base_ip);
}
}
next_hash = Hash(++ip, shift);
}
}
emit_remainder:
assert(next_emit <= ip_end);
/* Emit the remaining bytes as literals. */
if (next_emit < ip_end) {
const uint32_t insert = (uint32_t)(ip_end - next_emit);
EmitInsertLen(insert, commands);
memcpy(*literals, next_emit, insert);
*literals += insert;
}
}
static void StoreCommands(MemoryManager* m,
const uint8_t* literals, const size_t num_literals,
const uint32_t* commands, const size_t num_commands,
size_t* storage_ix, uint8_t* storage) {
static const uint32_t kNumExtraBits[128] = {
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4,
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 24,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8,
9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15, 15, 16, 16,
17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
};
static const uint32_t kInsertOffset[24] = {
0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50, 66, 98, 130, 194, 322, 578,
1090, 2114, 6210, 22594,
};
uint8_t lit_depths[256];
uint16_t lit_bits[256];
uint32_t lit_histo[256] = { 0 };
uint8_t cmd_depths[128] = { 0 };
uint16_t cmd_bits[128] = { 0 };
uint32_t cmd_histo[128] = { 0 };
size_t i;
for (i = 0; i < num_literals; ++i) {
++lit_histo[literals[i]];
}
BrotliBuildAndStoreHuffmanTreeFast(m, lit_histo, num_literals,
/* max_bits = */ 8,
lit_depths, lit_bits,
storage_ix, storage);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_commands; ++i) {
const uint32_t code = commands[i] & 0xFF;
assert(code < 128);
++cmd_histo[code];
}
cmd_histo[1] += 1;
cmd_histo[2] += 1;
cmd_histo[64] += 1;
cmd_histo[84] += 1;
BuildAndStoreCommandPrefixCode(cmd_histo, cmd_depths, cmd_bits,
storage_ix, storage);
for (i = 0; i < num_commands; ++i) {
const uint32_t cmd = commands[i];
const uint32_t code = cmd & 0xFF;
const uint32_t extra = cmd >> 8;
assert(code < 128);
BrotliWriteBits(cmd_depths[code], cmd_bits[code], storage_ix, storage);
BrotliWriteBits(kNumExtraBits[code], extra, storage_ix, storage);
if (code < 24) {
const uint32_t insert = kInsertOffset[code] + extra;
uint32_t j;
for (j = 0; j < insert; ++j) {
const uint8_t lit = *literals;
BrotliWriteBits(lit_depths[lit], lit_bits[lit], storage_ix, storage);
++literals;
}
}
}
}
/* Acceptable loss for uncompressible speedup is 2% */
#define MIN_RATIO 0.98
#define SAMPLE_RATE 43
static BROTLI_BOOL ShouldCompress(
const uint8_t* input, size_t input_size, size_t num_literals) {
double corpus_size = (double)input_size;
if (num_literals < MIN_RATIO * corpus_size) {
return BROTLI_TRUE;
} else {
uint32_t literal_histo[256] = { 0 };
const double max_total_bit_cost = corpus_size * 8 * MIN_RATIO / SAMPLE_RATE;
size_t i;
for (i = 0; i < input_size; i += SAMPLE_RATE) {
++literal_histo[input[i]];
}
return TO_BROTLI_BOOL(BitsEntropy(literal_histo, 256) < max_total_bit_cost);
}
}
static void RewindBitPosition(const size_t new_storage_ix,
size_t* storage_ix, uint8_t* storage) {
const size_t bitpos = new_storage_ix & 7;
const size_t mask = (1u << bitpos) - 1;
storage[new_storage_ix >> 3] &= (uint8_t)mask;
*storage_ix = new_storage_ix;
}
static void EmitUncompressedMetaBlock(const uint8_t* input, size_t input_size,
size_t* storage_ix, uint8_t* storage) {
BrotliStoreMetaBlockHeader(input_size, 1, storage_ix, storage);
*storage_ix = (*storage_ix + 7u) & ~7u;
memcpy(&storage[*storage_ix >> 3], input, input_size);
*storage_ix += input_size << 3;
storage[*storage_ix >> 3] = 0;
}
static BROTLI_INLINE void BrotliCompressFragmentTwoPassImpl(
MemoryManager* m, const uint8_t* input, size_t input_size,
BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf,
int* table, size_t table_bits, size_t* storage_ix, uint8_t* storage) {
/* Save the start of the first block for position and distance computations.
*/
const uint8_t* base_ip = input;
BROTLI_UNUSED(is_last);
while (input_size > 0) {
size_t block_size =
BROTLI_MIN(size_t, input_size, kCompressFragmentTwoPassBlockSize);
uint32_t* commands = command_buf;
uint8_t* literals = literal_buf;
size_t num_literals;
CreateCommands(input, block_size, input_size, base_ip, table, table_bits,
&literals, &commands);
num_literals = (size_t)(literals - literal_buf);
if (ShouldCompress(input, block_size, num_literals)) {
const size_t num_commands = (size_t)(commands - command_buf);
BrotliStoreMetaBlockHeader(block_size, 0, storage_ix, storage);
/* No block splits, no contexts. */
BrotliWriteBits(13, 0, storage_ix, storage);
StoreCommands(m, literal_buf, num_literals, command_buf, num_commands,
storage_ix, storage);
if (BROTLI_IS_OOM(m)) return;
} else {
/* Since we did not find many backward references and the entropy of
the data is close to 8 bits, we can simply emit an uncompressed block.
This makes compression speed of uncompressible data about 3x faster. */
EmitUncompressedMetaBlock(input, block_size, storage_ix, storage);
}
input += block_size;
input_size -= block_size;
}
}
#define FOR_TABLE_BITS_(X) \
X(8) X(9) X(10) X(11) X(12) X(13) X(14) X(15) X(16) X(17)
#define BAKE_METHOD_PARAM_(B) \
static BROTLI_NOINLINE void BrotliCompressFragmentTwoPassImpl ## B( \
MemoryManager* m, const uint8_t* input, size_t input_size, \
BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf, \
int* table, size_t* storage_ix, uint8_t* storage) { \
BrotliCompressFragmentTwoPassImpl(m, input, input_size, is_last, command_buf,\
literal_buf, table, B, storage_ix, storage); \
}
FOR_TABLE_BITS_(BAKE_METHOD_PARAM_)
#undef BAKE_METHOD_PARAM_
void BrotliCompressFragmentTwoPass(
MemoryManager* m, const uint8_t* input, size_t input_size,
BROTLI_BOOL is_last, uint32_t* command_buf, uint8_t* literal_buf,
int* table, size_t table_size, size_t* storage_ix, uint8_t* storage) {
const size_t initial_storage_ix = *storage_ix;
const size_t table_bits = Log2FloorNonZero(table_size);
switch (table_bits) {
#define CASE_(B) \
case B: \
BrotliCompressFragmentTwoPassImpl ## B( \
m, input, input_size, is_last, command_buf, \
literal_buf, table, storage_ix, storage); \
break;
FOR_TABLE_BITS_(CASE_)
#undef CASE_
default: assert(0); break;
}
/* If output is larger than single uncompressed block, rewrite it. */
if (*storage_ix - initial_storage_ix > 31 + (input_size << 3)) {
RewindBitPosition(initial_storage_ix, storage_ix, storage);
EmitUncompressedMetaBlock(input, input_size, storage_ix, storage);
}
if (is_last) {
BrotliWriteBits(1, 1, storage_ix, storage); /* islast */
BrotliWriteBits(1, 1, storage_ix, storage); /* isempty */
*storage_ix = (*storage_ix + 7u) & ~7u;
}
}
#undef FOR_TABLE_BITS_
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,360 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Bit reading helpers */
#ifndef BROTLI_DEC_BIT_READER_H_
#define BROTLI_DEC_BIT_READER_H_
#include <string.h> /* memcpy */
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define BROTLI_SHORT_FILL_BIT_WINDOW_READ (sizeof(reg_t) >> 1)
static const uint32_t kBitMask[33] = { 0x0000,
0x00000001, 0x00000003, 0x00000007, 0x0000000F,
0x0000001F, 0x0000003F, 0x0000007F, 0x000000FF,
0x000001FF, 0x000003FF, 0x000007FF, 0x00000FFF,
0x00001FFF, 0x00003FFF, 0x00007FFF, 0x0000FFFF,
0x0001FFFF, 0x0003FFFF, 0x0007FFFF, 0x000FFFFF,
0x001FFFFF, 0x003FFFFF, 0x007FFFFF, 0x00FFFFFF,
0x01FFFFFF, 0x03FFFFFF, 0x07FFFFFF, 0x0FFFFFFF,
0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF, 0xFFFFFFFF
};
static BROTLI_INLINE uint32_t BitMask(uint32_t n) {
if (IS_CONSTANT(n) || BROTLI_HAS_UBFX) {
/* Masking with this expression turns to a single
"Unsigned Bit Field Extract" UBFX instruction on ARM. */
return ~((0xffffffffU) << n);
} else {
return kBitMask[n];
}
}
typedef struct {
reg_t val_; /* pre-fetched bits */
uint32_t bit_pos_; /* current bit-reading position in val_ */
const uint8_t* next_in; /* the byte we're reading from */
size_t avail_in;
} BrotliBitReader;
typedef struct {
reg_t val_;
uint32_t bit_pos_;
const uint8_t* next_in;
size_t avail_in;
} BrotliBitReaderState;
/* Initializes the BrotliBitReader fields. */
BROTLI_INTERNAL void BrotliInitBitReader(BrotliBitReader* const br);
/* Ensures that accumulator is not empty. May consume one byte of input.
Returns 0 if data is required but there is no input available.
For BROTLI_ALIGNED_READ this function also prepares bit reader for aligned
reading. */
BROTLI_INTERNAL BROTLI_BOOL BrotliWarmupBitReader(BrotliBitReader* const br);
static BROTLI_INLINE void BrotliBitReaderSaveState(
BrotliBitReader* const from, BrotliBitReaderState* to) {
to->val_ = from->val_;
to->bit_pos_ = from->bit_pos_;
to->next_in = from->next_in;
to->avail_in = from->avail_in;
}
static BROTLI_INLINE void BrotliBitReaderRestoreState(
BrotliBitReader* const to, BrotliBitReaderState* from) {
to->val_ = from->val_;
to->bit_pos_ = from->bit_pos_;
to->next_in = from->next_in;
to->avail_in = from->avail_in;
}
static BROTLI_INLINE uint32_t BrotliGetAvailableBits(
const BrotliBitReader* br) {
return (BROTLI_64_BITS ? 64 : 32) - br->bit_pos_;
}
/* Returns amount of unread bytes the bit reader still has buffered from the
BrotliInput, including whole bytes in br->val_. */
static BROTLI_INLINE size_t BrotliGetRemainingBytes(BrotliBitReader* br) {
return br->avail_in + (BrotliGetAvailableBits(br) >> 3);
}
/* Checks if there is at least |num| bytes left in the input ring-buffer
(excluding the bits remaining in br->val_). */
static BROTLI_INLINE BROTLI_BOOL BrotliCheckInputAmount(
BrotliBitReader* const br, size_t num) {
return TO_BROTLI_BOOL(br->avail_in >= num);
}
static BROTLI_INLINE uint16_t BrotliLoad16LE(const uint8_t* in) {
if (BROTLI_LITTLE_ENDIAN) {
return *((const uint16_t*)in);
} else if (BROTLI_BIG_ENDIAN) {
uint16_t value = *((const uint16_t*)in);
return (uint16_t)(((value & 0xFFU) << 8) | ((value & 0xFF00U) >> 8));
} else {
return (uint16_t)(in[0] | (in[1] << 8));
}
}
static BROTLI_INLINE uint32_t BrotliLoad32LE(const uint8_t* in) {
if (BROTLI_LITTLE_ENDIAN) {
return *((const uint32_t*)in);
} else if (BROTLI_BIG_ENDIAN) {
uint32_t value = *((const uint32_t*)in);
return ((value & 0xFFU) << 24) | ((value & 0xFF00U) << 8) |
((value & 0xFF0000U) >> 8) | ((value & 0xFF000000U) >> 24);
} else {
uint32_t value = (uint32_t)(*(in++));
value |= (uint32_t)(*(in++)) << 8;
value |= (uint32_t)(*(in++)) << 16;
value |= (uint32_t)(*(in++)) << 24;
return value;
}
}
#if (BROTLI_64_BITS)
static BROTLI_INLINE uint64_t BrotliLoad64LE(const uint8_t* in) {
if (BROTLI_LITTLE_ENDIAN) {
return *((const uint64_t*)in);
} else if (BROTLI_BIG_ENDIAN) {
uint64_t value = *((const uint64_t*)in);
return
((value & 0xFFU) << 56) |
((value & 0xFF00U) << 40) |
((value & 0xFF0000U) << 24) |
((value & 0xFF000000U) << 8) |
((value & 0xFF00000000U) >> 8) |
((value & 0xFF0000000000U) >> 24) |
((value & 0xFF000000000000U) >> 40) |
((value & 0xFF00000000000000U) >> 56);
} else {
uint64_t value = (uint64_t)(*(in++));
value |= (uint64_t)(*(in++)) << 8;
value |= (uint64_t)(*(in++)) << 16;
value |= (uint64_t)(*(in++)) << 24;
value |= (uint64_t)(*(in++)) << 32;
value |= (uint64_t)(*(in++)) << 40;
value |= (uint64_t)(*(in++)) << 48;
value |= (uint64_t)(*(in++)) << 56;
return value;
}
}
#endif
/* Guarantees that there are at least n_bits + 1 bits in accumulator.
Precondition: accumulator contains at least 1 bit.
n_bits should be in the range [1..24] for regular build. For portable
non-64-bit little-endian build only 16 bits are safe to request. */
static BROTLI_INLINE void BrotliFillBitWindow(
BrotliBitReader* const br, uint32_t n_bits) {
#if (BROTLI_64_BITS)
if (!BROTLI_ALIGNED_READ && IS_CONSTANT(n_bits) && (n_bits <= 8)) {
if (br->bit_pos_ >= 56) {
br->val_ >>= 56;
br->bit_pos_ ^= 56; /* here same as -= 56 because of the if condition */
br->val_ |= BrotliLoad64LE(br->next_in) << 8;
br->avail_in -= 7;
br->next_in += 7;
}
} else if (!BROTLI_ALIGNED_READ && IS_CONSTANT(n_bits) && (n_bits <= 16)) {
if (br->bit_pos_ >= 48) {
br->val_ >>= 48;
br->bit_pos_ ^= 48; /* here same as -= 48 because of the if condition */
br->val_ |= BrotliLoad64LE(br->next_in) << 16;
br->avail_in -= 6;
br->next_in += 6;
}
} else {
if (br->bit_pos_ >= 32) {
br->val_ >>= 32;
br->bit_pos_ ^= 32; /* here same as -= 32 because of the if condition */
br->val_ |= ((uint64_t)BrotliLoad32LE(br->next_in)) << 32;
br->avail_in -= BROTLI_SHORT_FILL_BIT_WINDOW_READ;
br->next_in += BROTLI_SHORT_FILL_BIT_WINDOW_READ;
}
}
#else
if (!BROTLI_ALIGNED_READ && IS_CONSTANT(n_bits) && (n_bits <= 8)) {
if (br->bit_pos_ >= 24) {
br->val_ >>= 24;
br->bit_pos_ ^= 24; /* here same as -= 24 because of the if condition */
br->val_ |= BrotliLoad32LE(br->next_in) << 8;
br->avail_in -= 3;
br->next_in += 3;
}
} else {
if (br->bit_pos_ >= 16) {
br->val_ >>= 16;
br->bit_pos_ ^= 16; /* here same as -= 16 because of the if condition */
br->val_ |= ((uint32_t)BrotliLoad16LE(br->next_in)) << 16;
br->avail_in -= BROTLI_SHORT_FILL_BIT_WINDOW_READ;
br->next_in += BROTLI_SHORT_FILL_BIT_WINDOW_READ;
}
}
#endif
}
/* Mostly like BrotliFillBitWindow, but guarantees only 16 bits and reads no
more than BROTLI_SHORT_FILL_BIT_WINDOW_READ bytes of input. */
static BROTLI_INLINE void BrotliFillBitWindow16(BrotliBitReader* const br) {
BrotliFillBitWindow(br, 17);
}
/* Pulls one byte of input to accumulator. */
static BROTLI_INLINE BROTLI_BOOL BrotliPullByte(BrotliBitReader* const br) {
if (br->avail_in == 0) {
return BROTLI_FALSE;
}
br->val_ >>= 8;
#if (BROTLI_64_BITS)
br->val_ |= ((uint64_t)*br->next_in) << 56;
#else
br->val_ |= ((uint32_t)*br->next_in) << 24;
#endif
br->bit_pos_ -= 8;
--br->avail_in;
++br->next_in;
return BROTLI_TRUE;
}
/* Returns currently available bits.
The number of valid bits could be calculated by BrotliGetAvailableBits. */
static BROTLI_INLINE reg_t BrotliGetBitsUnmasked(BrotliBitReader* const br) {
return br->val_ >> br->bit_pos_;
}
/* Like BrotliGetBits, but does not mask the result.
The result contains at least 16 valid bits. */
static BROTLI_INLINE uint32_t BrotliGet16BitsUnmasked(
BrotliBitReader* const br) {
BrotliFillBitWindow(br, 16);
return (uint32_t)BrotliGetBitsUnmasked(br);
}
/* Returns the specified number of bits from |br| without advancing bit pos. */
static BROTLI_INLINE uint32_t BrotliGetBits(
BrotliBitReader* const br, uint32_t n_bits) {
BrotliFillBitWindow(br, n_bits);
return (uint32_t)BrotliGetBitsUnmasked(br) & BitMask(n_bits);
}
/* Tries to peek the specified amount of bits. Returns 0, if there is not
enough input. */
static BROTLI_INLINE BROTLI_BOOL BrotliSafeGetBits(
BrotliBitReader* const br, uint32_t n_bits, uint32_t* val) {
while (BrotliGetAvailableBits(br) < n_bits) {
if (!BrotliPullByte(br)) {
return BROTLI_FALSE;
}
}
*val = (uint32_t)BrotliGetBitsUnmasked(br) & BitMask(n_bits);
return BROTLI_TRUE;
}
/* Advances the bit pos by n_bits. */
static BROTLI_INLINE void BrotliDropBits(
BrotliBitReader* const br, uint32_t n_bits) {
br->bit_pos_ += n_bits;
}
static BROTLI_INLINE void BrotliBitReaderUnload(BrotliBitReader* br) {
uint32_t unused_bytes = BrotliGetAvailableBits(br) >> 3;
uint32_t unused_bits = unused_bytes << 3;
br->avail_in += unused_bytes;
br->next_in -= unused_bytes;
if (unused_bits == sizeof(br->val_) << 3) {
br->val_ = 0;
} else {
br->val_ <<= unused_bits;
}
br->bit_pos_ += unused_bits;
}
/* Reads the specified number of bits from |br| and advances the bit pos.
Precondition: accumulator MUST contain at least n_bits. */
static BROTLI_INLINE void BrotliTakeBits(
BrotliBitReader* const br, uint32_t n_bits, uint32_t* val) {
*val = (uint32_t)BrotliGetBitsUnmasked(br) & BitMask(n_bits);
BROTLI_LOG(("[BrotliReadBits] %d %d %d val: %6x\n",
(int)br->avail_in, (int)br->bit_pos_, n_bits, (int)*val));
BrotliDropBits(br, n_bits);
}
/* Reads the specified number of bits from |br| and advances the bit pos.
Assumes that there is enough input to perform BrotliFillBitWindow. */
static BROTLI_INLINE uint32_t BrotliReadBits(
BrotliBitReader* const br, uint32_t n_bits) {
if (BROTLI_64_BITS || (n_bits <= 16)) {
uint32_t val;
BrotliFillBitWindow(br, n_bits);
BrotliTakeBits(br, n_bits, &val);
return val;
} else {
uint32_t low_val;
uint32_t high_val;
BrotliFillBitWindow(br, 16);
BrotliTakeBits(br, 16, &low_val);
BrotliFillBitWindow(br, 8);
BrotliTakeBits(br, n_bits - 16, &high_val);
return low_val | (high_val << 16);
}
}
/* Tries to read the specified amount of bits. Returns 0, if there is not
enough input. n_bits MUST be positive. */
static BROTLI_INLINE BROTLI_BOOL BrotliSafeReadBits(
BrotliBitReader* const br, uint32_t n_bits, uint32_t* val) {
while (BrotliGetAvailableBits(br) < n_bits) {
if (!BrotliPullByte(br)) {
return BROTLI_FALSE;
}
}
BrotliTakeBits(br, n_bits, val);
return BROTLI_TRUE;
}
/* Advances the bit reader position to the next byte boundary and verifies
that any skipped bits are set to zero. */
static BROTLI_INLINE BROTLI_BOOL BrotliJumpToByteBoundary(BrotliBitReader* br) {
uint32_t pad_bits_count = BrotliGetAvailableBits(br) & 0x7;
uint32_t pad_bits = 0;
if (pad_bits_count != 0) {
BrotliTakeBits(br, pad_bits_count, &pad_bits);
}
return TO_BROTLI_BOOL(pad_bits == 0);
}
/* Copies remaining input bytes stored in the bit reader to the output. Value
num may not be larger than BrotliGetRemainingBytes. The bit reader must be
warmed up again after this. */
static BROTLI_INLINE void BrotliCopyBytes(uint8_t* dest,
BrotliBitReader* br, size_t num) {
while (BrotliGetAvailableBits(br) >= 8 && num > 0) {
*dest = (uint8_t)BrotliGetBitsUnmasked(br);
BrotliDropBits(br, 8);
++dest;
--num;
}
memcpy(dest, br->next_in, num);
br->avail_in -= num;
br->next_in += num;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_DEC_BIT_READER_H_ */

View File

@ -1,251 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Lookup table to map the previous two bytes to a context id.
There are four different context modeling modes defined here:
CONTEXT_LSB6: context id is the least significant 6 bits of the last byte,
CONTEXT_MSB6: context id is the most significant 6 bits of the last byte,
CONTEXT_UTF8: second-order context model tuned for UTF8-encoded text,
CONTEXT_SIGNED: second-order context model tuned for signed integers.
The context id for the UTF8 context model is calculated as follows. If p1
and p2 are the previous two bytes, we calculate the context as
context = kContextLookup[p1] | kContextLookup[p2 + 256].
If the previous two bytes are ASCII characters (i.e. < 128), this will be
equivalent to
context = 4 * context1(p1) + context2(p2),
where context1 is based on the previous byte in the following way:
0 : non-ASCII control
1 : \t, \n, \r
2 : space
3 : other punctuation
4 : " '
5 : %
6 : ( < [ {
7 : ) > ] }
8 : , ; :
9 : .
10 : =
11 : number
12 : upper-case vowel
13 : upper-case consonant
14 : lower-case vowel
15 : lower-case consonant
and context2 is based on the second last byte:
0 : control, space
1 : punctuation
2 : upper-case letter, number
3 : lower-case letter
If the last byte is ASCII, and the second last byte is not (in a valid UTF8
stream it will be a continuation byte, value between 128 and 191), the
context is the same as if the second last byte was an ASCII control or space.
If the last byte is a UTF8 lead byte (value >= 192), then the next byte will
be a continuation byte and the context id is 2 or 3 depending on the LSB of
the last byte and to a lesser extent on the second last byte if it is ASCII.
If the last byte is a UTF8 continuation byte, the second last byte can be:
- continuation byte: the next byte is probably ASCII or lead byte (assuming
4-byte UTF8 characters are rare) and the context id is 0 or 1.
- lead byte (192 - 207): next byte is ASCII or lead byte, context is 0 or 1
- lead byte (208 - 255): next byte is continuation byte, context is 2 or 3
The possible value combinations of the previous two bytes, the range of
context ids and the type of the next byte is summarized in the table below:
|--------\-----------------------------------------------------------------|
| \ Last byte |
| Second \---------------------------------------------------------------|
| last byte \ ASCII | cont. byte | lead byte |
| \ (0-127) | (128-191) | (192-) |
|=============|===================|=====================|==================|
| ASCII | next: ASCII/lead | not valid | next: cont. |
| (0-127) | context: 4 - 63 | | context: 2 - 3 |
|-------------|-------------------|---------------------|------------------|
| cont. byte | next: ASCII/lead | next: ASCII/lead | next: cont. |
| (128-191) | context: 4 - 63 | context: 0 - 1 | context: 2 - 3 |
|-------------|-------------------|---------------------|------------------|
| lead byte | not valid | next: ASCII/lead | not valid |
| (192-207) | | context: 0 - 1 | |
|-------------|-------------------|---------------------|------------------|
| lead byte | not valid | next: cont. | not valid |
| (208-) | | context: 2 - 3 | |
|-------------|-------------------|---------------------|------------------|
The context id for the signed context mode is calculated as:
context = (kContextLookup[512 + p1] << 3) | kContextLookup[512 + p2].
For any context modeling modes, the context ids can be calculated by |-ing
together two lookups from one table using context model dependent offsets:
context = kContextLookup[offset1 + p1] | kContextLookup[offset2 + p2].
where offset1 and offset2 are dependent on the context mode.
*/
#ifndef BROTLI_DEC_CONTEXT_H_
#define BROTLI_DEC_CONTEXT_H_
#include <brotli/types.h>
enum ContextType {
CONTEXT_LSB6 = 0,
CONTEXT_MSB6 = 1,
CONTEXT_UTF8 = 2,
CONTEXT_SIGNED = 3
};
/* Common context lookup table for all context modes. */
static const uint8_t kContextLookup[1792] = {
/* CONTEXT_UTF8, last byte. */
/* ASCII range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
8, 12, 16, 12, 12, 20, 12, 16, 24, 28, 12, 12, 32, 12, 36, 12,
44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 32, 32, 24, 40, 28, 12,
12, 48, 52, 52, 52, 48, 52, 52, 52, 48, 52, 52, 52, 52, 52, 48,
52, 52, 52, 52, 52, 48, 52, 52, 52, 52, 52, 24, 12, 28, 12, 12,
12, 56, 60, 60, 60, 56, 60, 60, 60, 56, 60, 60, 60, 60, 60, 56,
60, 60, 60, 60, 60, 56, 60, 60, 60, 60, 60, 24, 12, 28, 12, 0,
/* UTF8 continuation byte range. */
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
/* UTF8 lead byte range. */
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
/* CONTEXT_UTF8 second last byte. */
/* ASCII range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1,
1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1,
1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 0,
/* UTF8 continuation byte range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
/* UTF8 lead byte range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
/* CONTEXT_SIGNED, second last byte. */
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7,
/* CONTEXT_SIGNED, last byte, same as the above values shifted by 3 bits. */
0, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40,
48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 56,
/* CONTEXT_LSB6, last byte. */
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
/* CONTEXT_MSB6, last byte. */
0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3,
4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7,
8, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 11, 11,
12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15,
16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19,
20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23,
24, 24, 24, 24, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 27,
28, 28, 28, 28, 29, 29, 29, 29, 30, 30, 30, 30, 31, 31, 31, 31,
32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34, 34, 35, 35, 35, 35,
36, 36, 36, 36, 37, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39, 39,
40, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 43,
44, 44, 44, 44, 45, 45, 45, 45, 46, 46, 46, 46, 47, 47, 47, 47,
48, 48, 48, 48, 49, 49, 49, 49, 50, 50, 50, 50, 51, 51, 51, 51,
52, 52, 52, 52, 53, 53, 53, 53, 54, 54, 54, 54, 55, 55, 55, 55,
56, 56, 56, 56, 57, 57, 57, 57, 58, 58, 58, 58, 59, 59, 59, 59,
60, 60, 60, 60, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63, 63,
/* CONTEXT_{M,L}SB6, second last byte, */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
};
static const int kContextLookupOffsets[8] = {
/* CONTEXT_LSB6 */
1024, 1536,
/* CONTEXT_MSB6 */
1280, 1536,
/* CONTEXT_UTF8 */
0, 256,
/* CONTEXT_SIGNED */
768, 512,
};
#endif /* BROTLI_DEC_CONTEXT_H_ */

View File

@ -1,5 +0,0 @@
package dec
import (
"C"
)

View File

@ -1,68 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Utilities for building Huffman decoding tables. */
#ifndef BROTLI_DEC_HUFFMAN_H_
#define BROTLI_DEC_HUFFMAN_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define BROTLI_HUFFMAN_MAX_CODE_LENGTH 15
/* Maximum possible Huffman table size for an alphabet size of (index * 32),
* max code length 15 and root table bits 8. */
static const uint16_t kMaxHuffmanTableSize[] = {
256, 402, 436, 468, 500, 534, 566, 598, 630, 662, 694, 726, 758, 790, 822,
854, 886, 920, 952, 984, 1016, 1048, 1080};
/* BROTLI_NUM_BLOCK_LEN_SYMBOLS == 26 */
#define BROTLI_HUFFMAN_MAX_SIZE_26 396
/* BROTLI_MAX_BLOCK_TYPE_SYMBOLS == 258 */
#define BROTLI_HUFFMAN_MAX_SIZE_258 632
/* BROTLI_MAX_CONTEXT_MAP_SYMBOLS == 272 */
#define BROTLI_HUFFMAN_MAX_SIZE_272 646
#define BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH 5
typedef struct {
uint8_t bits; /* number of bits used for this symbol */
uint16_t value; /* symbol value or table offset */
} HuffmanCode;
/* Builds Huffman lookup table assuming code lengths are in symbol order. */
BROTLI_INTERNAL void BrotliBuildCodeLengthsHuffmanTable(HuffmanCode* root_table,
const uint8_t* const code_lengths, uint16_t* count);
/* Builds Huffman lookup table assuming code lengths are in symbol order. */
/* Returns size of resulting table. */
BROTLI_INTERNAL uint32_t BrotliBuildHuffmanTable(HuffmanCode* root_table,
int root_bits, const uint16_t* const symbol_lists, uint16_t* count_arg);
/* Builds a simple Huffman table. The num_symbols parameter is to be */
/* interpreted as follows: 0 means 1 symbol, 1 means 2 symbols, 2 means 3 */
/* symbols, 3 means 4 symbols with lengths 2,2,2,2, 4 means 4 symbols with */
/* lengths 1,2,3,3. */
BROTLI_INTERNAL uint32_t BrotliBuildSimpleHuffmanTable(HuffmanCode* table,
int root_bits, uint16_t* symbols, uint32_t num_symbols);
/* Contains a collection of Huffman trees with the same alphabet size. */
typedef struct {
HuffmanCode** htrees;
HuffmanCode* codes;
uint16_t alphabet_size;
uint16_t num_htrees;
} HuffmanTreeGroup;
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_DEC_HUFFMAN_H_ */

View File

@ -1,168 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Macros for compiler / platform specific features and build options.
Build options are:
* BROTLI_BUILD_32_BIT disables 64-bit optimizations
* BROTLI_BUILD_64_BIT forces to use 64-bit optimizations
* BROTLI_BUILD_BIG_ENDIAN forces to use big-endian optimizations
* BROTLI_BUILD_ENDIAN_NEUTRAL disables endian-aware optimizations
* BROTLI_BUILD_LITTLE_ENDIAN forces to use little-endian optimizations
* BROTLI_BUILD_MODERN_COMPILER forces to use modern compilers built-ins,
features and attributes
* BROTLI_BUILD_PORTABLE disables dangerous optimizations, like unaligned
read and overlapping memcpy; this reduces decompression speed by 5%
* BROTLI_BUILD_NO_RBIT disables "rbit" optimization for ARM CPUs
* BROTLI_DEBUG dumps file name and line number when decoder detects stream
or memory error
* BROTLI_ENABLE_LOG enables asserts and dumps various state information
*/
#ifndef BROTLI_DEC_PORT_H_
#define BROTLI_DEC_PORT_H_
#if defined(BROTLI_ENABLE_LOG) || defined(BROTLI_DEBUG)
#include <assert.h>
#include <stdio.h>
#endif
#include <brotli/port.h>
#if defined(__arm__) || defined(__thumb__) || \
defined(_M_ARM) || defined(_M_ARMT) || defined(__ARM64_ARCH_8__)
#define BROTLI_TARGET_ARM
#if (defined(__ARM_ARCH) && (__ARM_ARCH == 7)) || \
(defined(M_ARM) && (M_ARM == 7))
#define BROTLI_TARGET_ARMV7
#endif /* ARMv7 */
#if defined(__aarch64__) || defined(__ARM64_ARCH_8__)
#define BROTLI_TARGET_ARMV8
#endif /* ARMv8 */
#endif /* ARM */
#if defined(__i386) || defined(_M_IX86)
#define BROTLI_TARGET_X86
#endif
#if defined(__x86_64__) || defined(_M_X64)
#define BROTLI_TARGET_X64
#endif
#if defined(__PPC64__)
#define BROTLI_TARGET_POWERPC64
#endif
#ifdef BROTLI_BUILD_PORTABLE
#define BROTLI_ALIGNED_READ (!!1)
#elif defined(BROTLI_TARGET_X86) || defined(BROTLI_TARGET_X64) || \
defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8)
/* Allow unaligned read only for white-listed CPUs. */
#define BROTLI_ALIGNED_READ (!!0)
#else
#define BROTLI_ALIGNED_READ (!!1)
#endif
/* IS_CONSTANT macros returns true for compile-time constant expressions. */
#if BROTLI_MODERN_COMPILER || __has_builtin(__builtin_constant_p)
#define IS_CONSTANT(x) (!!__builtin_constant_p(x))
#else
#define IS_CONSTANT(x) (!!0)
#endif
#ifdef BROTLI_ENABLE_LOG
#define BROTLI_DCHECK(x) assert(x)
#define BROTLI_LOG(x) printf x
#else
#define BROTLI_DCHECK(x)
#define BROTLI_LOG(x)
#endif
#if defined(BROTLI_DEBUG) || defined(BROTLI_ENABLE_LOG)
static BROTLI_INLINE void BrotliDump(const char* f, int l, const char* fn) {
fprintf(stderr, "%s:%d (%s)\n", f, l, fn);
fflush(stderr);
}
#define BROTLI_DUMP() BrotliDump(__FILE__, __LINE__, __FUNCTION__)
#else
#define BROTLI_DUMP() (void)(0)
#endif
#if defined(BROTLI_BUILD_64_BIT)
#define BROTLI_64_BITS 1
#elif defined(BROTLI_BUILD_32_BIT)
#define BROTLI_64_BITS 0
#elif defined(BROTLI_TARGET_X64) || defined(BROTLI_TARGET_ARMV8) || \
defined(BROTLI_TARGET_POWERPC64)
#define BROTLI_64_BITS 1
#else
#define BROTLI_64_BITS 0
#endif
#if (BROTLI_64_BITS)
#define reg_t uint64_t
#else
#define reg_t uint32_t
#endif
#if defined(BROTLI_BUILD_BIG_ENDIAN)
#define BROTLI_LITTLE_ENDIAN 0
#define BROTLI_BIG_ENDIAN 1
#elif defined(BROTLI_BUILD_LITTLE_ENDIAN)
#define BROTLI_LITTLE_ENDIAN 1
#define BROTLI_BIG_ENDIAN 0
#elif defined(BROTLI_BUILD_ENDIAN_NEUTRAL)
#define BROTLI_LITTLE_ENDIAN 0
#define BROTLI_BIG_ENDIAN 0
#elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
#define BROTLI_LITTLE_ENDIAN 1
#define BROTLI_BIG_ENDIAN 0
#elif defined(_WIN32)
/* Win32 can currently always be assumed to be little endian */
#define BROTLI_LITTLE_ENDIAN 1
#define BROTLI_BIG_ENDIAN 0
#else
#if (defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__))
#define BROTLI_BIG_ENDIAN 1
#else
#define BROTLI_BIG_ENDIAN 0
#endif
#define BROTLI_LITTLE_ENDIAN 0
#endif
#define BROTLI_REPEAT(N, X) { \
if ((N & 1) != 0) {X;} \
if ((N & 2) != 0) {X; X;} \
if ((N & 4) != 0) {X; X; X; X;} \
}
#if (BROTLI_MODERN_COMPILER || defined(__llvm__)) && \
!defined(BROTLI_BUILD_NO_RBIT)
#if defined(BROTLI_TARGET_ARMV7) || defined(BROTLI_TARGET_ARMV8)
/* TODO: detect ARMv6T2 and enable this code for it. */
static BROTLI_INLINE reg_t BrotliRBit(reg_t input) {
reg_t output;
__asm__("rbit %0, %1\n" : "=r"(output) : "r"(input));
return output;
}
#define BROTLI_RBIT(x) BrotliRBit(x)
#endif /* armv7 */
#endif /* gcc || clang */
#if defined(BROTLI_TARGET_ARM)
#define BROTLI_HAS_UBFX (!!1)
#else
#define BROTLI_HAS_UBFX (!!0)
#endif
#define BROTLI_ALLOC(S, L) S->alloc_func(S->memory_manager_opaque, L)
#define BROTLI_FREE(S, X) { \
S->free_func(S->memory_manager_opaque, X); \
X = NULL; \
}
#endif /* BROTLI_DEC_PORT_H_ */

View File

@ -1,751 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Lookup tables to map prefix codes to value ranges. This is used during
decoding of the block lengths, literal insertion lengths and copy lengths.
*/
#ifndef BROTLI_DEC_PREFIX_H_
#define BROTLI_DEC_PREFIX_H_
#include "../common/constants.h"
#include <brotli/types.h>
/* Represents the range of values belonging to a prefix code: */
/* [offset, offset + 2^nbits) */
struct PrefixCodeRange {
uint16_t offset;
uint8_t nbits;
};
static const struct PrefixCodeRange
kBlockLengthPrefixCode[BROTLI_NUM_BLOCK_LEN_SYMBOLS] = {
{ 1, 2}, { 5, 2}, { 9, 2}, { 13, 2},
{ 17, 3}, { 25, 3}, { 33, 3}, { 41, 3},
{ 49, 4}, { 65, 4}, { 81, 4}, { 97, 4},
{ 113, 5}, { 145, 5}, { 177, 5}, { 209, 5},
{ 241, 6}, { 305, 6}, { 369, 7}, { 497, 8},
{ 753, 9}, { 1265, 10}, {2289, 11}, {4337, 12},
{8433, 13}, {16625, 24}
};
typedef struct CmdLutElement {
uint8_t insert_len_extra_bits;
uint8_t copy_len_extra_bits;
int8_t distance_code;
uint8_t context;
uint16_t insert_len_offset;
uint16_t copy_len_offset;
} CmdLutElement;
static const CmdLutElement kCmdLut[BROTLI_NUM_COMMAND_SYMBOLS] = {
{ 0x00, 0x00, 0, 0x00, 0x0000, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0000, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0000, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0000, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0000, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0000, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0000, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0000, 0x0009 },
{ 0x00, 0x00, 0, 0x00, 0x0001, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0001, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0001, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0001, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0001, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0001, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0001, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0001, 0x0009 },
{ 0x00, 0x00, 0, 0x00, 0x0002, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0002, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0002, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0002, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0002, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0002, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0002, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0002, 0x0009 },
{ 0x00, 0x00, 0, 0x00, 0x0003, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0003, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0003, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0003, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0003, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0003, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0003, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0003, 0x0009 },
{ 0x00, 0x00, 0, 0x00, 0x0004, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0004, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0004, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0004, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0004, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0004, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0004, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0004, 0x0009 },
{ 0x00, 0x00, 0, 0x00, 0x0005, 0x0002 },
{ 0x00, 0x00, 0, 0x01, 0x0005, 0x0003 },
{ 0x00, 0x00, 0, 0x02, 0x0005, 0x0004 },
{ 0x00, 0x00, 0, 0x03, 0x0005, 0x0005 },
{ 0x00, 0x00, 0, 0x03, 0x0005, 0x0006 },
{ 0x00, 0x00, 0, 0x03, 0x0005, 0x0007 },
{ 0x00, 0x00, 0, 0x03, 0x0005, 0x0008 },
{ 0x00, 0x00, 0, 0x03, 0x0005, 0x0009 },
{ 0x01, 0x00, 0, 0x00, 0x0006, 0x0002 },
{ 0x01, 0x00, 0, 0x01, 0x0006, 0x0003 },
{ 0x01, 0x00, 0, 0x02, 0x0006, 0x0004 },
{ 0x01, 0x00, 0, 0x03, 0x0006, 0x0005 },
{ 0x01, 0x00, 0, 0x03, 0x0006, 0x0006 },
{ 0x01, 0x00, 0, 0x03, 0x0006, 0x0007 },
{ 0x01, 0x00, 0, 0x03, 0x0006, 0x0008 },
{ 0x01, 0x00, 0, 0x03, 0x0006, 0x0009 },
{ 0x01, 0x00, 0, 0x00, 0x0008, 0x0002 },
{ 0x01, 0x00, 0, 0x01, 0x0008, 0x0003 },
{ 0x01, 0x00, 0, 0x02, 0x0008, 0x0004 },
{ 0x01, 0x00, 0, 0x03, 0x0008, 0x0005 },
{ 0x01, 0x00, 0, 0x03, 0x0008, 0x0006 },
{ 0x01, 0x00, 0, 0x03, 0x0008, 0x0007 },
{ 0x01, 0x00, 0, 0x03, 0x0008, 0x0008 },
{ 0x01, 0x00, 0, 0x03, 0x0008, 0x0009 },
{ 0x00, 0x01, 0, 0x03, 0x0000, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0000, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0000, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0000, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0000, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0000, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0000, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0000, 0x0036 },
{ 0x00, 0x01, 0, 0x03, 0x0001, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0001, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0001, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0001, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0001, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0001, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0001, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0001, 0x0036 },
{ 0x00, 0x01, 0, 0x03, 0x0002, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0002, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0002, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0002, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0002, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0002, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0002, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0002, 0x0036 },
{ 0x00, 0x01, 0, 0x03, 0x0003, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0003, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0003, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0003, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0003, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0003, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0003, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0003, 0x0036 },
{ 0x00, 0x01, 0, 0x03, 0x0004, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0004, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0004, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0004, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0004, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0004, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0004, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0004, 0x0036 },
{ 0x00, 0x01, 0, 0x03, 0x0005, 0x000a },
{ 0x00, 0x01, 0, 0x03, 0x0005, 0x000c },
{ 0x00, 0x02, 0, 0x03, 0x0005, 0x000e },
{ 0x00, 0x02, 0, 0x03, 0x0005, 0x0012 },
{ 0x00, 0x03, 0, 0x03, 0x0005, 0x0016 },
{ 0x00, 0x03, 0, 0x03, 0x0005, 0x001e },
{ 0x00, 0x04, 0, 0x03, 0x0005, 0x0026 },
{ 0x00, 0x04, 0, 0x03, 0x0005, 0x0036 },
{ 0x01, 0x01, 0, 0x03, 0x0006, 0x000a },
{ 0x01, 0x01, 0, 0x03, 0x0006, 0x000c },
{ 0x01, 0x02, 0, 0x03, 0x0006, 0x000e },
{ 0x01, 0x02, 0, 0x03, 0x0006, 0x0012 },
{ 0x01, 0x03, 0, 0x03, 0x0006, 0x0016 },
{ 0x01, 0x03, 0, 0x03, 0x0006, 0x001e },
{ 0x01, 0x04, 0, 0x03, 0x0006, 0x0026 },
{ 0x01, 0x04, 0, 0x03, 0x0006, 0x0036 },
{ 0x01, 0x01, 0, 0x03, 0x0008, 0x000a },
{ 0x01, 0x01, 0, 0x03, 0x0008, 0x000c },
{ 0x01, 0x02, 0, 0x03, 0x0008, 0x000e },
{ 0x01, 0x02, 0, 0x03, 0x0008, 0x0012 },
{ 0x01, 0x03, 0, 0x03, 0x0008, 0x0016 },
{ 0x01, 0x03, 0, 0x03, 0x0008, 0x001e },
{ 0x01, 0x04, 0, 0x03, 0x0008, 0x0026 },
{ 0x01, 0x04, 0, 0x03, 0x0008, 0x0036 },
{ 0x00, 0x00, -1, 0x00, 0x0000, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0000, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0000, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0000, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0000, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0000, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0000, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0000, 0x0009 },
{ 0x00, 0x00, -1, 0x00, 0x0001, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0001, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0001, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0001, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0001, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0001, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0001, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0001, 0x0009 },
{ 0x00, 0x00, -1, 0x00, 0x0002, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0002, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0002, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0002, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0002, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0002, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0002, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0002, 0x0009 },
{ 0x00, 0x00, -1, 0x00, 0x0003, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0003, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0003, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0003, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0003, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0003, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0003, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0003, 0x0009 },
{ 0x00, 0x00, -1, 0x00, 0x0004, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0004, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0004, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0004, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0004, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0004, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0004, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0004, 0x0009 },
{ 0x00, 0x00, -1, 0x00, 0x0005, 0x0002 },
{ 0x00, 0x00, -1, 0x01, 0x0005, 0x0003 },
{ 0x00, 0x00, -1, 0x02, 0x0005, 0x0004 },
{ 0x00, 0x00, -1, 0x03, 0x0005, 0x0005 },
{ 0x00, 0x00, -1, 0x03, 0x0005, 0x0006 },
{ 0x00, 0x00, -1, 0x03, 0x0005, 0x0007 },
{ 0x00, 0x00, -1, 0x03, 0x0005, 0x0008 },
{ 0x00, 0x00, -1, 0x03, 0x0005, 0x0009 },
{ 0x01, 0x00, -1, 0x00, 0x0006, 0x0002 },
{ 0x01, 0x00, -1, 0x01, 0x0006, 0x0003 },
{ 0x01, 0x00, -1, 0x02, 0x0006, 0x0004 },
{ 0x01, 0x00, -1, 0x03, 0x0006, 0x0005 },
{ 0x01, 0x00, -1, 0x03, 0x0006, 0x0006 },
{ 0x01, 0x00, -1, 0x03, 0x0006, 0x0007 },
{ 0x01, 0x00, -1, 0x03, 0x0006, 0x0008 },
{ 0x01, 0x00, -1, 0x03, 0x0006, 0x0009 },
{ 0x01, 0x00, -1, 0x00, 0x0008, 0x0002 },
{ 0x01, 0x00, -1, 0x01, 0x0008, 0x0003 },
{ 0x01, 0x00, -1, 0x02, 0x0008, 0x0004 },
{ 0x01, 0x00, -1, 0x03, 0x0008, 0x0005 },
{ 0x01, 0x00, -1, 0x03, 0x0008, 0x0006 },
{ 0x01, 0x00, -1, 0x03, 0x0008, 0x0007 },
{ 0x01, 0x00, -1, 0x03, 0x0008, 0x0008 },
{ 0x01, 0x00, -1, 0x03, 0x0008, 0x0009 },
{ 0x00, 0x01, -1, 0x03, 0x0000, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0000, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0000, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0000, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0000, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0000, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0000, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0000, 0x0036 },
{ 0x00, 0x01, -1, 0x03, 0x0001, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0001, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0001, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0001, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0001, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0001, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0001, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0001, 0x0036 },
{ 0x00, 0x01, -1, 0x03, 0x0002, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0002, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0002, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0002, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0002, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0002, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0002, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0002, 0x0036 },
{ 0x00, 0x01, -1, 0x03, 0x0003, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0003, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0003, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0003, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0003, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0003, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0003, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0003, 0x0036 },
{ 0x00, 0x01, -1, 0x03, 0x0004, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0004, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0004, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0004, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0004, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0004, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0004, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0004, 0x0036 },
{ 0x00, 0x01, -1, 0x03, 0x0005, 0x000a },
{ 0x00, 0x01, -1, 0x03, 0x0005, 0x000c },
{ 0x00, 0x02, -1, 0x03, 0x0005, 0x000e },
{ 0x00, 0x02, -1, 0x03, 0x0005, 0x0012 },
{ 0x00, 0x03, -1, 0x03, 0x0005, 0x0016 },
{ 0x00, 0x03, -1, 0x03, 0x0005, 0x001e },
{ 0x00, 0x04, -1, 0x03, 0x0005, 0x0026 },
{ 0x00, 0x04, -1, 0x03, 0x0005, 0x0036 },
{ 0x01, 0x01, -1, 0x03, 0x0006, 0x000a },
{ 0x01, 0x01, -1, 0x03, 0x0006, 0x000c },
{ 0x01, 0x02, -1, 0x03, 0x0006, 0x000e },
{ 0x01, 0x02, -1, 0x03, 0x0006, 0x0012 },
{ 0x01, 0x03, -1, 0x03, 0x0006, 0x0016 },
{ 0x01, 0x03, -1, 0x03, 0x0006, 0x001e },
{ 0x01, 0x04, -1, 0x03, 0x0006, 0x0026 },
{ 0x01, 0x04, -1, 0x03, 0x0006, 0x0036 },
{ 0x01, 0x01, -1, 0x03, 0x0008, 0x000a },
{ 0x01, 0x01, -1, 0x03, 0x0008, 0x000c },
{ 0x01, 0x02, -1, 0x03, 0x0008, 0x000e },
{ 0x01, 0x02, -1, 0x03, 0x0008, 0x0012 },
{ 0x01, 0x03, -1, 0x03, 0x0008, 0x0016 },
{ 0x01, 0x03, -1, 0x03, 0x0008, 0x001e },
{ 0x01, 0x04, -1, 0x03, 0x0008, 0x0026 },
{ 0x01, 0x04, -1, 0x03, 0x0008, 0x0036 },
{ 0x02, 0x00, -1, 0x00, 0x000a, 0x0002 },
{ 0x02, 0x00, -1, 0x01, 0x000a, 0x0003 },
{ 0x02, 0x00, -1, 0x02, 0x000a, 0x0004 },
{ 0x02, 0x00, -1, 0x03, 0x000a, 0x0005 },
{ 0x02, 0x00, -1, 0x03, 0x000a, 0x0006 },
{ 0x02, 0x00, -1, 0x03, 0x000a, 0x0007 },
{ 0x02, 0x00, -1, 0x03, 0x000a, 0x0008 },
{ 0x02, 0x00, -1, 0x03, 0x000a, 0x0009 },
{ 0x02, 0x00, -1, 0x00, 0x000e, 0x0002 },
{ 0x02, 0x00, -1, 0x01, 0x000e, 0x0003 },
{ 0x02, 0x00, -1, 0x02, 0x000e, 0x0004 },
{ 0x02, 0x00, -1, 0x03, 0x000e, 0x0005 },
{ 0x02, 0x00, -1, 0x03, 0x000e, 0x0006 },
{ 0x02, 0x00, -1, 0x03, 0x000e, 0x0007 },
{ 0x02, 0x00, -1, 0x03, 0x000e, 0x0008 },
{ 0x02, 0x00, -1, 0x03, 0x000e, 0x0009 },
{ 0x03, 0x00, -1, 0x00, 0x0012, 0x0002 },
{ 0x03, 0x00, -1, 0x01, 0x0012, 0x0003 },
{ 0x03, 0x00, -1, 0x02, 0x0012, 0x0004 },
{ 0x03, 0x00, -1, 0x03, 0x0012, 0x0005 },
{ 0x03, 0x00, -1, 0x03, 0x0012, 0x0006 },
{ 0x03, 0x00, -1, 0x03, 0x0012, 0x0007 },
{ 0x03, 0x00, -1, 0x03, 0x0012, 0x0008 },
{ 0x03, 0x00, -1, 0x03, 0x0012, 0x0009 },
{ 0x03, 0x00, -1, 0x00, 0x001a, 0x0002 },
{ 0x03, 0x00, -1, 0x01, 0x001a, 0x0003 },
{ 0x03, 0x00, -1, 0x02, 0x001a, 0x0004 },
{ 0x03, 0x00, -1, 0x03, 0x001a, 0x0005 },
{ 0x03, 0x00, -1, 0x03, 0x001a, 0x0006 },
{ 0x03, 0x00, -1, 0x03, 0x001a, 0x0007 },
{ 0x03, 0x00, -1, 0x03, 0x001a, 0x0008 },
{ 0x03, 0x00, -1, 0x03, 0x001a, 0x0009 },
{ 0x04, 0x00, -1, 0x00, 0x0022, 0x0002 },
{ 0x04, 0x00, -1, 0x01, 0x0022, 0x0003 },
{ 0x04, 0x00, -1, 0x02, 0x0022, 0x0004 },
{ 0x04, 0x00, -1, 0x03, 0x0022, 0x0005 },
{ 0x04, 0x00, -1, 0x03, 0x0022, 0x0006 },
{ 0x04, 0x00, -1, 0x03, 0x0022, 0x0007 },
{ 0x04, 0x00, -1, 0x03, 0x0022, 0x0008 },
{ 0x04, 0x00, -1, 0x03, 0x0022, 0x0009 },
{ 0x04, 0x00, -1, 0x00, 0x0032, 0x0002 },
{ 0x04, 0x00, -1, 0x01, 0x0032, 0x0003 },
{ 0x04, 0x00, -1, 0x02, 0x0032, 0x0004 },
{ 0x04, 0x00, -1, 0x03, 0x0032, 0x0005 },
{ 0x04, 0x00, -1, 0x03, 0x0032, 0x0006 },
{ 0x04, 0x00, -1, 0x03, 0x0032, 0x0007 },
{ 0x04, 0x00, -1, 0x03, 0x0032, 0x0008 },
{ 0x04, 0x00, -1, 0x03, 0x0032, 0x0009 },
{ 0x05, 0x00, -1, 0x00, 0x0042, 0x0002 },
{ 0x05, 0x00, -1, 0x01, 0x0042, 0x0003 },
{ 0x05, 0x00, -1, 0x02, 0x0042, 0x0004 },
{ 0x05, 0x00, -1, 0x03, 0x0042, 0x0005 },
{ 0x05, 0x00, -1, 0x03, 0x0042, 0x0006 },
{ 0x05, 0x00, -1, 0x03, 0x0042, 0x0007 },
{ 0x05, 0x00, -1, 0x03, 0x0042, 0x0008 },
{ 0x05, 0x00, -1, 0x03, 0x0042, 0x0009 },
{ 0x05, 0x00, -1, 0x00, 0x0062, 0x0002 },
{ 0x05, 0x00, -1, 0x01, 0x0062, 0x0003 },
{ 0x05, 0x00, -1, 0x02, 0x0062, 0x0004 },
{ 0x05, 0x00, -1, 0x03, 0x0062, 0x0005 },
{ 0x05, 0x00, -1, 0x03, 0x0062, 0x0006 },
{ 0x05, 0x00, -1, 0x03, 0x0062, 0x0007 },
{ 0x05, 0x00, -1, 0x03, 0x0062, 0x0008 },
{ 0x05, 0x00, -1, 0x03, 0x0062, 0x0009 },
{ 0x02, 0x01, -1, 0x03, 0x000a, 0x000a },
{ 0x02, 0x01, -1, 0x03, 0x000a, 0x000c },
{ 0x02, 0x02, -1, 0x03, 0x000a, 0x000e },
{ 0x02, 0x02, -1, 0x03, 0x000a, 0x0012 },
{ 0x02, 0x03, -1, 0x03, 0x000a, 0x0016 },
{ 0x02, 0x03, -1, 0x03, 0x000a, 0x001e },
{ 0x02, 0x04, -1, 0x03, 0x000a, 0x0026 },
{ 0x02, 0x04, -1, 0x03, 0x000a, 0x0036 },
{ 0x02, 0x01, -1, 0x03, 0x000e, 0x000a },
{ 0x02, 0x01, -1, 0x03, 0x000e, 0x000c },
{ 0x02, 0x02, -1, 0x03, 0x000e, 0x000e },
{ 0x02, 0x02, -1, 0x03, 0x000e, 0x0012 },
{ 0x02, 0x03, -1, 0x03, 0x000e, 0x0016 },
{ 0x02, 0x03, -1, 0x03, 0x000e, 0x001e },
{ 0x02, 0x04, -1, 0x03, 0x000e, 0x0026 },
{ 0x02, 0x04, -1, 0x03, 0x000e, 0x0036 },
{ 0x03, 0x01, -1, 0x03, 0x0012, 0x000a },
{ 0x03, 0x01, -1, 0x03, 0x0012, 0x000c },
{ 0x03, 0x02, -1, 0x03, 0x0012, 0x000e },
{ 0x03, 0x02, -1, 0x03, 0x0012, 0x0012 },
{ 0x03, 0x03, -1, 0x03, 0x0012, 0x0016 },
{ 0x03, 0x03, -1, 0x03, 0x0012, 0x001e },
{ 0x03, 0x04, -1, 0x03, 0x0012, 0x0026 },
{ 0x03, 0x04, -1, 0x03, 0x0012, 0x0036 },
{ 0x03, 0x01, -1, 0x03, 0x001a, 0x000a },
{ 0x03, 0x01, -1, 0x03, 0x001a, 0x000c },
{ 0x03, 0x02, -1, 0x03, 0x001a, 0x000e },
{ 0x03, 0x02, -1, 0x03, 0x001a, 0x0012 },
{ 0x03, 0x03, -1, 0x03, 0x001a, 0x0016 },
{ 0x03, 0x03, -1, 0x03, 0x001a, 0x001e },
{ 0x03, 0x04, -1, 0x03, 0x001a, 0x0026 },
{ 0x03, 0x04, -1, 0x03, 0x001a, 0x0036 },
{ 0x04, 0x01, -1, 0x03, 0x0022, 0x000a },
{ 0x04, 0x01, -1, 0x03, 0x0022, 0x000c },
{ 0x04, 0x02, -1, 0x03, 0x0022, 0x000e },
{ 0x04, 0x02, -1, 0x03, 0x0022, 0x0012 },
{ 0x04, 0x03, -1, 0x03, 0x0022, 0x0016 },
{ 0x04, 0x03, -1, 0x03, 0x0022, 0x001e },
{ 0x04, 0x04, -1, 0x03, 0x0022, 0x0026 },
{ 0x04, 0x04, -1, 0x03, 0x0022, 0x0036 },
{ 0x04, 0x01, -1, 0x03, 0x0032, 0x000a },
{ 0x04, 0x01, -1, 0x03, 0x0032, 0x000c },
{ 0x04, 0x02, -1, 0x03, 0x0032, 0x000e },
{ 0x04, 0x02, -1, 0x03, 0x0032, 0x0012 },
{ 0x04, 0x03, -1, 0x03, 0x0032, 0x0016 },
{ 0x04, 0x03, -1, 0x03, 0x0032, 0x001e },
{ 0x04, 0x04, -1, 0x03, 0x0032, 0x0026 },
{ 0x04, 0x04, -1, 0x03, 0x0032, 0x0036 },
{ 0x05, 0x01, -1, 0x03, 0x0042, 0x000a },
{ 0x05, 0x01, -1, 0x03, 0x0042, 0x000c },
{ 0x05, 0x02, -1, 0x03, 0x0042, 0x000e },
{ 0x05, 0x02, -1, 0x03, 0x0042, 0x0012 },
{ 0x05, 0x03, -1, 0x03, 0x0042, 0x0016 },
{ 0x05, 0x03, -1, 0x03, 0x0042, 0x001e },
{ 0x05, 0x04, -1, 0x03, 0x0042, 0x0026 },
{ 0x05, 0x04, -1, 0x03, 0x0042, 0x0036 },
{ 0x05, 0x01, -1, 0x03, 0x0062, 0x000a },
{ 0x05, 0x01, -1, 0x03, 0x0062, 0x000c },
{ 0x05, 0x02, -1, 0x03, 0x0062, 0x000e },
{ 0x05, 0x02, -1, 0x03, 0x0062, 0x0012 },
{ 0x05, 0x03, -1, 0x03, 0x0062, 0x0016 },
{ 0x05, 0x03, -1, 0x03, 0x0062, 0x001e },
{ 0x05, 0x04, -1, 0x03, 0x0062, 0x0026 },
{ 0x05, 0x04, -1, 0x03, 0x0062, 0x0036 },
{ 0x00, 0x05, -1, 0x03, 0x0000, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0000, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0000, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0000, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0000, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0000, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0000, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0000, 0x0846 },
{ 0x00, 0x05, -1, 0x03, 0x0001, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0001, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0001, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0001, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0001, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0001, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0001, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0001, 0x0846 },
{ 0x00, 0x05, -1, 0x03, 0x0002, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0002, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0002, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0002, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0002, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0002, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0002, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0002, 0x0846 },
{ 0x00, 0x05, -1, 0x03, 0x0003, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0003, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0003, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0003, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0003, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0003, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0003, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0003, 0x0846 },
{ 0x00, 0x05, -1, 0x03, 0x0004, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0004, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0004, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0004, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0004, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0004, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0004, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0004, 0x0846 },
{ 0x00, 0x05, -1, 0x03, 0x0005, 0x0046 },
{ 0x00, 0x05, -1, 0x03, 0x0005, 0x0066 },
{ 0x00, 0x06, -1, 0x03, 0x0005, 0x0086 },
{ 0x00, 0x07, -1, 0x03, 0x0005, 0x00c6 },
{ 0x00, 0x08, -1, 0x03, 0x0005, 0x0146 },
{ 0x00, 0x09, -1, 0x03, 0x0005, 0x0246 },
{ 0x00, 0x0a, -1, 0x03, 0x0005, 0x0446 },
{ 0x00, 0x18, -1, 0x03, 0x0005, 0x0846 },
{ 0x01, 0x05, -1, 0x03, 0x0006, 0x0046 },
{ 0x01, 0x05, -1, 0x03, 0x0006, 0x0066 },
{ 0x01, 0x06, -1, 0x03, 0x0006, 0x0086 },
{ 0x01, 0x07, -1, 0x03, 0x0006, 0x00c6 },
{ 0x01, 0x08, -1, 0x03, 0x0006, 0x0146 },
{ 0x01, 0x09, -1, 0x03, 0x0006, 0x0246 },
{ 0x01, 0x0a, -1, 0x03, 0x0006, 0x0446 },
{ 0x01, 0x18, -1, 0x03, 0x0006, 0x0846 },
{ 0x01, 0x05, -1, 0x03, 0x0008, 0x0046 },
{ 0x01, 0x05, -1, 0x03, 0x0008, 0x0066 },
{ 0x01, 0x06, -1, 0x03, 0x0008, 0x0086 },
{ 0x01, 0x07, -1, 0x03, 0x0008, 0x00c6 },
{ 0x01, 0x08, -1, 0x03, 0x0008, 0x0146 },
{ 0x01, 0x09, -1, 0x03, 0x0008, 0x0246 },
{ 0x01, 0x0a, -1, 0x03, 0x0008, 0x0446 },
{ 0x01, 0x18, -1, 0x03, 0x0008, 0x0846 },
{ 0x06, 0x00, -1, 0x00, 0x0082, 0x0002 },
{ 0x06, 0x00, -1, 0x01, 0x0082, 0x0003 },
{ 0x06, 0x00, -1, 0x02, 0x0082, 0x0004 },
{ 0x06, 0x00, -1, 0x03, 0x0082, 0x0005 },
{ 0x06, 0x00, -1, 0x03, 0x0082, 0x0006 },
{ 0x06, 0x00, -1, 0x03, 0x0082, 0x0007 },
{ 0x06, 0x00, -1, 0x03, 0x0082, 0x0008 },
{ 0x06, 0x00, -1, 0x03, 0x0082, 0x0009 },
{ 0x07, 0x00, -1, 0x00, 0x00c2, 0x0002 },
{ 0x07, 0x00, -1, 0x01, 0x00c2, 0x0003 },
{ 0x07, 0x00, -1, 0x02, 0x00c2, 0x0004 },
{ 0x07, 0x00, -1, 0x03, 0x00c2, 0x0005 },
{ 0x07, 0x00, -1, 0x03, 0x00c2, 0x0006 },
{ 0x07, 0x00, -1, 0x03, 0x00c2, 0x0007 },
{ 0x07, 0x00, -1, 0x03, 0x00c2, 0x0008 },
{ 0x07, 0x00, -1, 0x03, 0x00c2, 0x0009 },
{ 0x08, 0x00, -1, 0x00, 0x0142, 0x0002 },
{ 0x08, 0x00, -1, 0x01, 0x0142, 0x0003 },
{ 0x08, 0x00, -1, 0x02, 0x0142, 0x0004 },
{ 0x08, 0x00, -1, 0x03, 0x0142, 0x0005 },
{ 0x08, 0x00, -1, 0x03, 0x0142, 0x0006 },
{ 0x08, 0x00, -1, 0x03, 0x0142, 0x0007 },
{ 0x08, 0x00, -1, 0x03, 0x0142, 0x0008 },
{ 0x08, 0x00, -1, 0x03, 0x0142, 0x0009 },
{ 0x09, 0x00, -1, 0x00, 0x0242, 0x0002 },
{ 0x09, 0x00, -1, 0x01, 0x0242, 0x0003 },
{ 0x09, 0x00, -1, 0x02, 0x0242, 0x0004 },
{ 0x09, 0x00, -1, 0x03, 0x0242, 0x0005 },
{ 0x09, 0x00, -1, 0x03, 0x0242, 0x0006 },
{ 0x09, 0x00, -1, 0x03, 0x0242, 0x0007 },
{ 0x09, 0x00, -1, 0x03, 0x0242, 0x0008 },
{ 0x09, 0x00, -1, 0x03, 0x0242, 0x0009 },
{ 0x0a, 0x00, -1, 0x00, 0x0442, 0x0002 },
{ 0x0a, 0x00, -1, 0x01, 0x0442, 0x0003 },
{ 0x0a, 0x00, -1, 0x02, 0x0442, 0x0004 },
{ 0x0a, 0x00, -1, 0x03, 0x0442, 0x0005 },
{ 0x0a, 0x00, -1, 0x03, 0x0442, 0x0006 },
{ 0x0a, 0x00, -1, 0x03, 0x0442, 0x0007 },
{ 0x0a, 0x00, -1, 0x03, 0x0442, 0x0008 },
{ 0x0a, 0x00, -1, 0x03, 0x0442, 0x0009 },
{ 0x0c, 0x00, -1, 0x00, 0x0842, 0x0002 },
{ 0x0c, 0x00, -1, 0x01, 0x0842, 0x0003 },
{ 0x0c, 0x00, -1, 0x02, 0x0842, 0x0004 },
{ 0x0c, 0x00, -1, 0x03, 0x0842, 0x0005 },
{ 0x0c, 0x00, -1, 0x03, 0x0842, 0x0006 },
{ 0x0c, 0x00, -1, 0x03, 0x0842, 0x0007 },
{ 0x0c, 0x00, -1, 0x03, 0x0842, 0x0008 },
{ 0x0c, 0x00, -1, 0x03, 0x0842, 0x0009 },
{ 0x0e, 0x00, -1, 0x00, 0x1842, 0x0002 },
{ 0x0e, 0x00, -1, 0x01, 0x1842, 0x0003 },
{ 0x0e, 0x00, -1, 0x02, 0x1842, 0x0004 },
{ 0x0e, 0x00, -1, 0x03, 0x1842, 0x0005 },
{ 0x0e, 0x00, -1, 0x03, 0x1842, 0x0006 },
{ 0x0e, 0x00, -1, 0x03, 0x1842, 0x0007 },
{ 0x0e, 0x00, -1, 0x03, 0x1842, 0x0008 },
{ 0x0e, 0x00, -1, 0x03, 0x1842, 0x0009 },
{ 0x18, 0x00, -1, 0x00, 0x5842, 0x0002 },
{ 0x18, 0x00, -1, 0x01, 0x5842, 0x0003 },
{ 0x18, 0x00, -1, 0x02, 0x5842, 0x0004 },
{ 0x18, 0x00, -1, 0x03, 0x5842, 0x0005 },
{ 0x18, 0x00, -1, 0x03, 0x5842, 0x0006 },
{ 0x18, 0x00, -1, 0x03, 0x5842, 0x0007 },
{ 0x18, 0x00, -1, 0x03, 0x5842, 0x0008 },
{ 0x18, 0x00, -1, 0x03, 0x5842, 0x0009 },
{ 0x02, 0x05, -1, 0x03, 0x000a, 0x0046 },
{ 0x02, 0x05, -1, 0x03, 0x000a, 0x0066 },
{ 0x02, 0x06, -1, 0x03, 0x000a, 0x0086 },
{ 0x02, 0x07, -1, 0x03, 0x000a, 0x00c6 },
{ 0x02, 0x08, -1, 0x03, 0x000a, 0x0146 },
{ 0x02, 0x09, -1, 0x03, 0x000a, 0x0246 },
{ 0x02, 0x0a, -1, 0x03, 0x000a, 0x0446 },
{ 0x02, 0x18, -1, 0x03, 0x000a, 0x0846 },
{ 0x02, 0x05, -1, 0x03, 0x000e, 0x0046 },
{ 0x02, 0x05, -1, 0x03, 0x000e, 0x0066 },
{ 0x02, 0x06, -1, 0x03, 0x000e, 0x0086 },
{ 0x02, 0x07, -1, 0x03, 0x000e, 0x00c6 },
{ 0x02, 0x08, -1, 0x03, 0x000e, 0x0146 },
{ 0x02, 0x09, -1, 0x03, 0x000e, 0x0246 },
{ 0x02, 0x0a, -1, 0x03, 0x000e, 0x0446 },
{ 0x02, 0x18, -1, 0x03, 0x000e, 0x0846 },
{ 0x03, 0x05, -1, 0x03, 0x0012, 0x0046 },
{ 0x03, 0x05, -1, 0x03, 0x0012, 0x0066 },
{ 0x03, 0x06, -1, 0x03, 0x0012, 0x0086 },
{ 0x03, 0x07, -1, 0x03, 0x0012, 0x00c6 },
{ 0x03, 0x08, -1, 0x03, 0x0012, 0x0146 },
{ 0x03, 0x09, -1, 0x03, 0x0012, 0x0246 },
{ 0x03, 0x0a, -1, 0x03, 0x0012, 0x0446 },
{ 0x03, 0x18, -1, 0x03, 0x0012, 0x0846 },
{ 0x03, 0x05, -1, 0x03, 0x001a, 0x0046 },
{ 0x03, 0x05, -1, 0x03, 0x001a, 0x0066 },
{ 0x03, 0x06, -1, 0x03, 0x001a, 0x0086 },
{ 0x03, 0x07, -1, 0x03, 0x001a, 0x00c6 },
{ 0x03, 0x08, -1, 0x03, 0x001a, 0x0146 },
{ 0x03, 0x09, -1, 0x03, 0x001a, 0x0246 },
{ 0x03, 0x0a, -1, 0x03, 0x001a, 0x0446 },
{ 0x03, 0x18, -1, 0x03, 0x001a, 0x0846 },
{ 0x04, 0x05, -1, 0x03, 0x0022, 0x0046 },
{ 0x04, 0x05, -1, 0x03, 0x0022, 0x0066 },
{ 0x04, 0x06, -1, 0x03, 0x0022, 0x0086 },
{ 0x04, 0x07, -1, 0x03, 0x0022, 0x00c6 },
{ 0x04, 0x08, -1, 0x03, 0x0022, 0x0146 },
{ 0x04, 0x09, -1, 0x03, 0x0022, 0x0246 },
{ 0x04, 0x0a, -1, 0x03, 0x0022, 0x0446 },
{ 0x04, 0x18, -1, 0x03, 0x0022, 0x0846 },
{ 0x04, 0x05, -1, 0x03, 0x0032, 0x0046 },
{ 0x04, 0x05, -1, 0x03, 0x0032, 0x0066 },
{ 0x04, 0x06, -1, 0x03, 0x0032, 0x0086 },
{ 0x04, 0x07, -1, 0x03, 0x0032, 0x00c6 },
{ 0x04, 0x08, -1, 0x03, 0x0032, 0x0146 },
{ 0x04, 0x09, -1, 0x03, 0x0032, 0x0246 },
{ 0x04, 0x0a, -1, 0x03, 0x0032, 0x0446 },
{ 0x04, 0x18, -1, 0x03, 0x0032, 0x0846 },
{ 0x05, 0x05, -1, 0x03, 0x0042, 0x0046 },
{ 0x05, 0x05, -1, 0x03, 0x0042, 0x0066 },
{ 0x05, 0x06, -1, 0x03, 0x0042, 0x0086 },
{ 0x05, 0x07, -1, 0x03, 0x0042, 0x00c6 },
{ 0x05, 0x08, -1, 0x03, 0x0042, 0x0146 },
{ 0x05, 0x09, -1, 0x03, 0x0042, 0x0246 },
{ 0x05, 0x0a, -1, 0x03, 0x0042, 0x0446 },
{ 0x05, 0x18, -1, 0x03, 0x0042, 0x0846 },
{ 0x05, 0x05, -1, 0x03, 0x0062, 0x0046 },
{ 0x05, 0x05, -1, 0x03, 0x0062, 0x0066 },
{ 0x05, 0x06, -1, 0x03, 0x0062, 0x0086 },
{ 0x05, 0x07, -1, 0x03, 0x0062, 0x00c6 },
{ 0x05, 0x08, -1, 0x03, 0x0062, 0x0146 },
{ 0x05, 0x09, -1, 0x03, 0x0062, 0x0246 },
{ 0x05, 0x0a, -1, 0x03, 0x0062, 0x0446 },
{ 0x05, 0x18, -1, 0x03, 0x0062, 0x0846 },
{ 0x06, 0x01, -1, 0x03, 0x0082, 0x000a },
{ 0x06, 0x01, -1, 0x03, 0x0082, 0x000c },
{ 0x06, 0x02, -1, 0x03, 0x0082, 0x000e },
{ 0x06, 0x02, -1, 0x03, 0x0082, 0x0012 },
{ 0x06, 0x03, -1, 0x03, 0x0082, 0x0016 },
{ 0x06, 0x03, -1, 0x03, 0x0082, 0x001e },
{ 0x06, 0x04, -1, 0x03, 0x0082, 0x0026 },
{ 0x06, 0x04, -1, 0x03, 0x0082, 0x0036 },
{ 0x07, 0x01, -1, 0x03, 0x00c2, 0x000a },
{ 0x07, 0x01, -1, 0x03, 0x00c2, 0x000c },
{ 0x07, 0x02, -1, 0x03, 0x00c2, 0x000e },
{ 0x07, 0x02, -1, 0x03, 0x00c2, 0x0012 },
{ 0x07, 0x03, -1, 0x03, 0x00c2, 0x0016 },
{ 0x07, 0x03, -1, 0x03, 0x00c2, 0x001e },
{ 0x07, 0x04, -1, 0x03, 0x00c2, 0x0026 },
{ 0x07, 0x04, -1, 0x03, 0x00c2, 0x0036 },
{ 0x08, 0x01, -1, 0x03, 0x0142, 0x000a },
{ 0x08, 0x01, -1, 0x03, 0x0142, 0x000c },
{ 0x08, 0x02, -1, 0x03, 0x0142, 0x000e },
{ 0x08, 0x02, -1, 0x03, 0x0142, 0x0012 },
{ 0x08, 0x03, -1, 0x03, 0x0142, 0x0016 },
{ 0x08, 0x03, -1, 0x03, 0x0142, 0x001e },
{ 0x08, 0x04, -1, 0x03, 0x0142, 0x0026 },
{ 0x08, 0x04, -1, 0x03, 0x0142, 0x0036 },
{ 0x09, 0x01, -1, 0x03, 0x0242, 0x000a },
{ 0x09, 0x01, -1, 0x03, 0x0242, 0x000c },
{ 0x09, 0x02, -1, 0x03, 0x0242, 0x000e },
{ 0x09, 0x02, -1, 0x03, 0x0242, 0x0012 },
{ 0x09, 0x03, -1, 0x03, 0x0242, 0x0016 },
{ 0x09, 0x03, -1, 0x03, 0x0242, 0x001e },
{ 0x09, 0x04, -1, 0x03, 0x0242, 0x0026 },
{ 0x09, 0x04, -1, 0x03, 0x0242, 0x0036 },
{ 0x0a, 0x01, -1, 0x03, 0x0442, 0x000a },
{ 0x0a, 0x01, -1, 0x03, 0x0442, 0x000c },
{ 0x0a, 0x02, -1, 0x03, 0x0442, 0x000e },
{ 0x0a, 0x02, -1, 0x03, 0x0442, 0x0012 },
{ 0x0a, 0x03, -1, 0x03, 0x0442, 0x0016 },
{ 0x0a, 0x03, -1, 0x03, 0x0442, 0x001e },
{ 0x0a, 0x04, -1, 0x03, 0x0442, 0x0026 },
{ 0x0a, 0x04, -1, 0x03, 0x0442, 0x0036 },
{ 0x0c, 0x01, -1, 0x03, 0x0842, 0x000a },
{ 0x0c, 0x01, -1, 0x03, 0x0842, 0x000c },
{ 0x0c, 0x02, -1, 0x03, 0x0842, 0x000e },
{ 0x0c, 0x02, -1, 0x03, 0x0842, 0x0012 },
{ 0x0c, 0x03, -1, 0x03, 0x0842, 0x0016 },
{ 0x0c, 0x03, -1, 0x03, 0x0842, 0x001e },
{ 0x0c, 0x04, -1, 0x03, 0x0842, 0x0026 },
{ 0x0c, 0x04, -1, 0x03, 0x0842, 0x0036 },
{ 0x0e, 0x01, -1, 0x03, 0x1842, 0x000a },
{ 0x0e, 0x01, -1, 0x03, 0x1842, 0x000c },
{ 0x0e, 0x02, -1, 0x03, 0x1842, 0x000e },
{ 0x0e, 0x02, -1, 0x03, 0x1842, 0x0012 },
{ 0x0e, 0x03, -1, 0x03, 0x1842, 0x0016 },
{ 0x0e, 0x03, -1, 0x03, 0x1842, 0x001e },
{ 0x0e, 0x04, -1, 0x03, 0x1842, 0x0026 },
{ 0x0e, 0x04, -1, 0x03, 0x1842, 0x0036 },
{ 0x18, 0x01, -1, 0x03, 0x5842, 0x000a },
{ 0x18, 0x01, -1, 0x03, 0x5842, 0x000c },
{ 0x18, 0x02, -1, 0x03, 0x5842, 0x000e },
{ 0x18, 0x02, -1, 0x03, 0x5842, 0x0012 },
{ 0x18, 0x03, -1, 0x03, 0x5842, 0x0016 },
{ 0x18, 0x03, -1, 0x03, 0x5842, 0x001e },
{ 0x18, 0x04, -1, 0x03, 0x5842, 0x0026 },
{ 0x18, 0x04, -1, 0x03, 0x5842, 0x0036 },
{ 0x06, 0x05, -1, 0x03, 0x0082, 0x0046 },
{ 0x06, 0x05, -1, 0x03, 0x0082, 0x0066 },
{ 0x06, 0x06, -1, 0x03, 0x0082, 0x0086 },
{ 0x06, 0x07, -1, 0x03, 0x0082, 0x00c6 },
{ 0x06, 0x08, -1, 0x03, 0x0082, 0x0146 },
{ 0x06, 0x09, -1, 0x03, 0x0082, 0x0246 },
{ 0x06, 0x0a, -1, 0x03, 0x0082, 0x0446 },
{ 0x06, 0x18, -1, 0x03, 0x0082, 0x0846 },
{ 0x07, 0x05, -1, 0x03, 0x00c2, 0x0046 },
{ 0x07, 0x05, -1, 0x03, 0x00c2, 0x0066 },
{ 0x07, 0x06, -1, 0x03, 0x00c2, 0x0086 },
{ 0x07, 0x07, -1, 0x03, 0x00c2, 0x00c6 },
{ 0x07, 0x08, -1, 0x03, 0x00c2, 0x0146 },
{ 0x07, 0x09, -1, 0x03, 0x00c2, 0x0246 },
{ 0x07, 0x0a, -1, 0x03, 0x00c2, 0x0446 },
{ 0x07, 0x18, -1, 0x03, 0x00c2, 0x0846 },
{ 0x08, 0x05, -1, 0x03, 0x0142, 0x0046 },
{ 0x08, 0x05, -1, 0x03, 0x0142, 0x0066 },
{ 0x08, 0x06, -1, 0x03, 0x0142, 0x0086 },
{ 0x08, 0x07, -1, 0x03, 0x0142, 0x00c6 },
{ 0x08, 0x08, -1, 0x03, 0x0142, 0x0146 },
{ 0x08, 0x09, -1, 0x03, 0x0142, 0x0246 },
{ 0x08, 0x0a, -1, 0x03, 0x0142, 0x0446 },
{ 0x08, 0x18, -1, 0x03, 0x0142, 0x0846 },
{ 0x09, 0x05, -1, 0x03, 0x0242, 0x0046 },
{ 0x09, 0x05, -1, 0x03, 0x0242, 0x0066 },
{ 0x09, 0x06, -1, 0x03, 0x0242, 0x0086 },
{ 0x09, 0x07, -1, 0x03, 0x0242, 0x00c6 },
{ 0x09, 0x08, -1, 0x03, 0x0242, 0x0146 },
{ 0x09, 0x09, -1, 0x03, 0x0242, 0x0246 },
{ 0x09, 0x0a, -1, 0x03, 0x0242, 0x0446 },
{ 0x09, 0x18, -1, 0x03, 0x0242, 0x0846 },
{ 0x0a, 0x05, -1, 0x03, 0x0442, 0x0046 },
{ 0x0a, 0x05, -1, 0x03, 0x0442, 0x0066 },
{ 0x0a, 0x06, -1, 0x03, 0x0442, 0x0086 },
{ 0x0a, 0x07, -1, 0x03, 0x0442, 0x00c6 },
{ 0x0a, 0x08, -1, 0x03, 0x0442, 0x0146 },
{ 0x0a, 0x09, -1, 0x03, 0x0442, 0x0246 },
{ 0x0a, 0x0a, -1, 0x03, 0x0442, 0x0446 },
{ 0x0a, 0x18, -1, 0x03, 0x0442, 0x0846 },
{ 0x0c, 0x05, -1, 0x03, 0x0842, 0x0046 },
{ 0x0c, 0x05, -1, 0x03, 0x0842, 0x0066 },
{ 0x0c, 0x06, -1, 0x03, 0x0842, 0x0086 },
{ 0x0c, 0x07, -1, 0x03, 0x0842, 0x00c6 },
{ 0x0c, 0x08, -1, 0x03, 0x0842, 0x0146 },
{ 0x0c, 0x09, -1, 0x03, 0x0842, 0x0246 },
{ 0x0c, 0x0a, -1, 0x03, 0x0842, 0x0446 },
{ 0x0c, 0x18, -1, 0x03, 0x0842, 0x0846 },
{ 0x0e, 0x05, -1, 0x03, 0x1842, 0x0046 },
{ 0x0e, 0x05, -1, 0x03, 0x1842, 0x0066 },
{ 0x0e, 0x06, -1, 0x03, 0x1842, 0x0086 },
{ 0x0e, 0x07, -1, 0x03, 0x1842, 0x00c6 },
{ 0x0e, 0x08, -1, 0x03, 0x1842, 0x0146 },
{ 0x0e, 0x09, -1, 0x03, 0x1842, 0x0246 },
{ 0x0e, 0x0a, -1, 0x03, 0x1842, 0x0446 },
{ 0x0e, 0x18, -1, 0x03, 0x1842, 0x0846 },
{ 0x18, 0x05, -1, 0x03, 0x5842, 0x0046 },
{ 0x18, 0x05, -1, 0x03, 0x5842, 0x0066 },
{ 0x18, 0x06, -1, 0x03, 0x5842, 0x0086 },
{ 0x18, 0x07, -1, 0x03, 0x5842, 0x00c6 },
{ 0x18, 0x08, -1, 0x03, 0x5842, 0x0146 },
{ 0x18, 0x09, -1, 0x03, 0x5842, 0x0246 },
{ 0x18, 0x0a, -1, 0x03, 0x5842, 0x0446 },
{ 0x18, 0x18, -1, 0x03, 0x5842, 0x0846 },
};
#endif /* BROTLI_DEC_PREFIX_H_ */

View File

@ -1,251 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Brotli state for partial streaming decoding. */
#ifndef BROTLI_DEC_STATE_H_
#define BROTLI_DEC_STATE_H_
#include "../common/constants.h"
#include "../common/dictionary.h"
#include <brotli/types.h>
#include "./bit_reader.h"
#include "./huffman.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef enum {
BROTLI_STATE_UNINITED,
BROTLI_STATE_METABLOCK_BEGIN,
BROTLI_STATE_METABLOCK_HEADER,
BROTLI_STATE_METABLOCK_HEADER_2,
BROTLI_STATE_CONTEXT_MODES,
BROTLI_STATE_COMMAND_BEGIN,
BROTLI_STATE_COMMAND_INNER,
BROTLI_STATE_COMMAND_POST_DECODE_LITERALS,
BROTLI_STATE_COMMAND_POST_WRAP_COPY,
BROTLI_STATE_UNCOMPRESSED,
BROTLI_STATE_METADATA,
BROTLI_STATE_COMMAND_INNER_WRITE,
BROTLI_STATE_METABLOCK_DONE,
BROTLI_STATE_COMMAND_POST_WRITE_1,
BROTLI_STATE_COMMAND_POST_WRITE_2,
BROTLI_STATE_HUFFMAN_CODE_0,
BROTLI_STATE_HUFFMAN_CODE_1,
BROTLI_STATE_HUFFMAN_CODE_2,
BROTLI_STATE_HUFFMAN_CODE_3,
BROTLI_STATE_CONTEXT_MAP_1,
BROTLI_STATE_CONTEXT_MAP_2,
BROTLI_STATE_TREE_GROUP,
BROTLI_STATE_DONE
} BrotliRunningState;
typedef enum {
BROTLI_STATE_METABLOCK_HEADER_NONE,
BROTLI_STATE_METABLOCK_HEADER_EMPTY,
BROTLI_STATE_METABLOCK_HEADER_NIBBLES,
BROTLI_STATE_METABLOCK_HEADER_SIZE,
BROTLI_STATE_METABLOCK_HEADER_UNCOMPRESSED,
BROTLI_STATE_METABLOCK_HEADER_RESERVED,
BROTLI_STATE_METABLOCK_HEADER_BYTES,
BROTLI_STATE_METABLOCK_HEADER_METADATA
} BrotliRunningMetablockHeaderState;
typedef enum {
BROTLI_STATE_UNCOMPRESSED_NONE,
BROTLI_STATE_UNCOMPRESSED_WRITE
} BrotliRunningUncompressedState;
typedef enum {
BROTLI_STATE_TREE_GROUP_NONE,
BROTLI_STATE_TREE_GROUP_LOOP
} BrotliRunningTreeGroupState;
typedef enum {
BROTLI_STATE_CONTEXT_MAP_NONE,
BROTLI_STATE_CONTEXT_MAP_READ_PREFIX,
BROTLI_STATE_CONTEXT_MAP_HUFFMAN,
BROTLI_STATE_CONTEXT_MAP_DECODE,
BROTLI_STATE_CONTEXT_MAP_TRANSFORM
} BrotliRunningContextMapState;
typedef enum {
BROTLI_STATE_HUFFMAN_NONE,
BROTLI_STATE_HUFFMAN_SIMPLE_SIZE,
BROTLI_STATE_HUFFMAN_SIMPLE_READ,
BROTLI_STATE_HUFFMAN_SIMPLE_BUILD,
BROTLI_STATE_HUFFMAN_COMPLEX,
BROTLI_STATE_HUFFMAN_LENGTH_SYMBOLS
} BrotliRunningHuffmanState;
typedef enum {
BROTLI_STATE_DECODE_UINT8_NONE,
BROTLI_STATE_DECODE_UINT8_SHORT,
BROTLI_STATE_DECODE_UINT8_LONG
} BrotliRunningDecodeUint8State;
typedef enum {
BROTLI_STATE_READ_BLOCK_LENGTH_NONE,
BROTLI_STATE_READ_BLOCK_LENGTH_SUFFIX
} BrotliRunningReadBlockLengthState;
struct BrotliDecoderStateStruct {
BrotliRunningState state;
/* This counter is reused for several disjoint loops. */
int loop_counter;
BrotliBitReader br;
brotli_alloc_func alloc_func;
brotli_free_func free_func;
void* memory_manager_opaque;
/* Temporary storage for remaining input. */
union {
uint64_t u64;
uint8_t u8[8];
} buffer;
uint32_t buffer_length;
int pos;
int max_backward_distance;
int max_distance;
int ringbuffer_size;
int ringbuffer_mask;
int dist_rb_idx;
int dist_rb[4];
int error_code;
uint32_t sub_loop_counter;
uint8_t* ringbuffer;
uint8_t* ringbuffer_end;
HuffmanCode* htree_command;
const uint8_t* context_lookup1;
const uint8_t* context_lookup2;
uint8_t* context_map_slice;
uint8_t* dist_context_map_slice;
/* This ring buffer holds a few past copy distances that will be used by */
/* some special distance codes. */
HuffmanTreeGroup literal_hgroup;
HuffmanTreeGroup insert_copy_hgroup;
HuffmanTreeGroup distance_hgroup;
HuffmanCode* block_type_trees;
HuffmanCode* block_len_trees;
/* This is true if the literal context map histogram type always matches the
block type. It is then not needed to keep the context (faster decoding). */
int trivial_literal_context;
/* Distance context is actual after command is decoded and before distance
is computed. After distance computation it is used as a temporary variable. */
int distance_context;
int meta_block_remaining_len;
uint32_t block_length_index;
uint32_t block_length[3];
uint32_t num_block_types[3];
uint32_t block_type_rb[6];
uint32_t distance_postfix_bits;
uint32_t num_direct_distance_codes;
int distance_postfix_mask;
uint32_t num_dist_htrees;
uint8_t* dist_context_map;
HuffmanCode* literal_htree;
uint8_t dist_htree_index;
uint32_t repeat_code_len;
uint32_t prev_code_len;
int copy_length;
int distance_code;
/* For partial write operations */
size_t rb_roundtrips; /* How many times we went around the ring-buffer */
size_t partial_pos_out; /* How much output to the user in total */
/* For ReadHuffmanCode */
uint32_t symbol;
uint32_t repeat;
uint32_t space;
HuffmanCode table[32];
/* List of heads of symbol chains. */
uint16_t* symbol_lists;
/* Storage from symbol_lists. */
uint16_t symbols_lists_array[BROTLI_HUFFMAN_MAX_CODE_LENGTH + 1 +
BROTLI_NUM_COMMAND_SYMBOLS];
/* Tails of symbol chains. */
int next_symbol[32];
uint8_t code_length_code_lengths[BROTLI_CODE_LENGTH_CODES];
/* Population counts for the code lengths */
uint16_t code_length_histo[16];
/* For HuffmanTreeGroupDecode */
int htree_index;
HuffmanCode* next;
/* For DecodeContextMap */
uint32_t context_index;
uint32_t max_run_length_prefix;
uint32_t code;
HuffmanCode context_map_table[BROTLI_HUFFMAN_MAX_SIZE_272];
/* For InverseMoveToFrontTransform */
uint32_t mtf_upper_bound;
uint32_t mtf[64 + 1];
/* For custom dictionaries */
const uint8_t* custom_dict;
int custom_dict_size;
/* less used attributes are in the end of this struct */
/* States inside function calls */
BrotliRunningMetablockHeaderState substate_metablock_header;
BrotliRunningTreeGroupState substate_tree_group;
BrotliRunningContextMapState substate_context_map;
BrotliRunningUncompressedState substate_uncompressed;
BrotliRunningHuffmanState substate_huffman;
BrotliRunningDecodeUint8State substate_decode_uint8;
BrotliRunningReadBlockLengthState substate_read_block_length;
unsigned int is_last_metablock : 1;
unsigned int is_uncompressed : 1;
unsigned int is_metadata : 1;
unsigned int should_wrap_ringbuffer : 1;
unsigned int canny_ringbuffer_allocation : 1;
unsigned int size_nibbles : 8;
uint32_t window_bits;
int new_ringbuffer_size;
uint32_t num_literal_htrees;
uint8_t* context_map;
uint8_t* context_modes;
const BrotliDictionary* dictionary;
uint32_t trivial_literal_contexts[8]; /* 256 bits */
};
typedef struct BrotliDecoderStateStruct BrotliDecoderStateInternal;
#define BrotliDecoderState BrotliDecoderStateInternal
BROTLI_INTERNAL void BrotliDecoderStateInit(BrotliDecoderState* s);
BROTLI_INTERNAL void BrotliDecoderStateInitWithCustomAllocators(
BrotliDecoderState* s, brotli_alloc_func alloc_func,
brotli_free_func free_func, void* opaque);
BROTLI_INTERNAL void BrotliDecoderStateCleanup(BrotliDecoderState* s);
BROTLI_INTERNAL void BrotliDecoderStateMetablockBegin(BrotliDecoderState* s);
BROTLI_INTERNAL void BrotliDecoderStateCleanupAfterMetablock(
BrotliDecoderState* s);
BROTLI_INTERNAL BROTLI_BOOL BrotliDecoderHuffmanTreeGroupInit(
BrotliDecoderState* s, HuffmanTreeGroup* group, uint32_t alphabet_size,
uint32_t ntrees);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_DEC_STATE_H_ */

View File

@ -1,300 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Transformations on dictionary words. */
#ifndef BROTLI_DEC_TRANSFORM_H_
#define BROTLI_DEC_TRANSFORM_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
enum WordTransformType {
kIdentity = 0,
kOmitLast1 = 1,
kOmitLast2 = 2,
kOmitLast3 = 3,
kOmitLast4 = 4,
kOmitLast5 = 5,
kOmitLast6 = 6,
kOmitLast7 = 7,
kOmitLast8 = 8,
kOmitLast9 = 9,
kUppercaseFirst = 10,
kUppercaseAll = 11,
kOmitFirst1 = 12,
kOmitFirst2 = 13,
kOmitFirst3 = 14,
kOmitFirst4 = 15,
kOmitFirst5 = 16,
kOmitFirst6 = 17,
kOmitFirst7 = 18,
kOmitFirst8 = 19,
kOmitFirst9 = 20
};
typedef struct {
const uint8_t prefix_id;
const uint8_t transform;
const uint8_t suffix_id;
} Transform;
static const char kPrefixSuffix[208] =
"\0 \0, \0 of the \0 of \0s \0.\0 and \0 in \0\"\0 to \0\">\0\n\0. \0]\0"
" for \0 a \0 that \0\'\0 with \0 from \0 by \0(\0. The \0 on \0 as \0"
" is \0ing \0\n\t\0:\0ed \0=\"\0 at \0ly \0,\0=\'\0.com/\0. This \0"
" not \0er \0al \0ful \0ive \0less \0est \0ize \0\xc2\xa0\0ous ";
enum {
/* EMPTY = ""
SP = " "
DQUOT = "\""
SQUOT = "'"
CLOSEBR = "]"
OPEN = "("
SLASH = "/"
NBSP = non-breaking space "\0xc2\xa0"
*/
kPFix_EMPTY = 0,
kPFix_SP = 1,
kPFix_COMMASP = 3,
kPFix_SPofSPtheSP = 6,
kPFix_SPtheSP = 9,
kPFix_eSP = 12,
kPFix_SPofSP = 15,
kPFix_sSP = 20,
kPFix_DOT = 23,
kPFix_SPandSP = 25,
kPFix_SPinSP = 31,
kPFix_DQUOT = 36,
kPFix_SPtoSP = 38,
kPFix_DQUOTGT = 43,
kPFix_NEWLINE = 46,
kPFix_DOTSP = 48,
kPFix_CLOSEBR = 51,
kPFix_SPforSP = 53,
kPFix_SPaSP = 59,
kPFix_SPthatSP = 63,
kPFix_SQUOT = 70,
kPFix_SPwithSP = 72,
kPFix_SPfromSP = 79,
kPFix_SPbySP = 86,
kPFix_OPEN = 91,
kPFix_DOTSPTheSP = 93,
kPFix_SPonSP = 100,
kPFix_SPasSP = 105,
kPFix_SPisSP = 110,
kPFix_ingSP = 115,
kPFix_NEWLINETAB = 120,
kPFix_COLON = 123,
kPFix_edSP = 125,
kPFix_EQDQUOT = 129,
kPFix_SPatSP = 132,
kPFix_lySP = 137,
kPFix_COMMA = 141,
kPFix_EQSQUOT = 143,
kPFix_DOTcomSLASH = 146,
kPFix_DOTSPThisSP = 152,
kPFix_SPnotSP = 160,
kPFix_erSP = 166,
kPFix_alSP = 170,
kPFix_fulSP = 174,
kPFix_iveSP = 179,
kPFix_lessSP = 184,
kPFix_estSP = 190,
kPFix_izeSP = 195,
kPFix_NBSP = 200,
kPFix_ousSP = 203
};
static const Transform kTransforms[] = {
{ kPFix_EMPTY, kIdentity, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SP },
{ kPFix_SP, kIdentity, kPFix_SP },
{ kPFix_EMPTY, kOmitFirst1, kPFix_EMPTY },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_SPtheSP },
{ kPFix_SP, kIdentity, kPFix_EMPTY },
{ kPFix_sSP, kIdentity, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_SPofSP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SPandSP },
{ kPFix_EMPTY, kOmitFirst2, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast1, kPFix_EMPTY },
{ kPFix_COMMASP, kIdentity, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_COMMASP },
{ kPFix_SP, kUppercaseFirst, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_SPinSP },
{ kPFix_EMPTY, kIdentity, kPFix_SPtoSP },
{ kPFix_eSP, kIdentity, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_DQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_DOT },
{ kPFix_EMPTY, kIdentity, kPFix_DQUOTGT },
{ kPFix_EMPTY, kIdentity, kPFix_NEWLINE },
{ kPFix_EMPTY, kOmitLast3, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_CLOSEBR },
{ kPFix_EMPTY, kIdentity, kPFix_SPforSP },
{ kPFix_EMPTY, kOmitFirst3, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast2, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SPaSP },
{ kPFix_EMPTY, kIdentity, kPFix_SPthatSP },
{ kPFix_SP, kUppercaseFirst, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_DOTSP },
{ kPFix_DOT, kIdentity, kPFix_EMPTY },
{ kPFix_SP, kIdentity, kPFix_COMMASP },
{ kPFix_EMPTY, kOmitFirst4, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SPwithSP },
{ kPFix_EMPTY, kIdentity, kPFix_SQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_SPfromSP },
{ kPFix_EMPTY, kIdentity, kPFix_SPbySP },
{ kPFix_EMPTY, kOmitFirst5, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitFirst6, kPFix_EMPTY },
{ kPFix_SPtheSP, kIdentity, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast4, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_DOTSPTheSP },
{ kPFix_EMPTY, kUppercaseAll, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SPonSP },
{ kPFix_EMPTY, kIdentity, kPFix_SPasSP },
{ kPFix_EMPTY, kIdentity, kPFix_SPisSP },
{ kPFix_EMPTY, kOmitLast7, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast1, kPFix_ingSP },
{ kPFix_EMPTY, kIdentity, kPFix_NEWLINETAB },
{ kPFix_EMPTY, kIdentity, kPFix_COLON },
{ kPFix_SP, kIdentity, kPFix_DOTSP },
{ kPFix_EMPTY, kIdentity, kPFix_edSP },
{ kPFix_EMPTY, kOmitFirst9, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitFirst7, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast6, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_OPEN },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_COMMASP },
{ kPFix_EMPTY, kOmitLast8, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_SPatSP },
{ kPFix_EMPTY, kIdentity, kPFix_lySP },
{ kPFix_SPtheSP, kIdentity, kPFix_SPofSP },
{ kPFix_EMPTY, kOmitLast5, kPFix_EMPTY },
{ kPFix_EMPTY, kOmitLast9, kPFix_EMPTY },
{ kPFix_SP, kUppercaseFirst, kPFix_COMMASP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_DQUOT },
{ kPFix_DOT, kIdentity, kPFix_OPEN },
{ kPFix_EMPTY, kUppercaseAll, kPFix_SP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_DQUOTGT },
{ kPFix_EMPTY, kIdentity, kPFix_EQDQUOT },
{ kPFix_SP, kIdentity, kPFix_DOT },
{ kPFix_DOTcomSLASH, kIdentity, kPFix_EMPTY },
{ kPFix_SPtheSP, kIdentity, kPFix_SPofSPtheSP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_SQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_DOTSPThisSP },
{ kPFix_EMPTY, kIdentity, kPFix_COMMA },
{ kPFix_DOT, kIdentity, kPFix_SP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_OPEN },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_DOT },
{ kPFix_EMPTY, kIdentity, kPFix_SPnotSP },
{ kPFix_SP, kIdentity, kPFix_EQDQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_erSP },
{ kPFix_SP, kUppercaseAll, kPFix_SP },
{ kPFix_EMPTY, kIdentity, kPFix_alSP },
{ kPFix_SP, kUppercaseAll, kPFix_EMPTY },
{ kPFix_EMPTY, kIdentity, kPFix_EQSQUOT },
{ kPFix_EMPTY, kUppercaseAll, kPFix_DQUOT },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_DOTSP },
{ kPFix_SP, kIdentity, kPFix_OPEN },
{ kPFix_EMPTY, kIdentity, kPFix_fulSP },
{ kPFix_SP, kUppercaseFirst, kPFix_DOTSP },
{ kPFix_EMPTY, kIdentity, kPFix_iveSP },
{ kPFix_EMPTY, kIdentity, kPFix_lessSP },
{ kPFix_EMPTY, kUppercaseAll, kPFix_SQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_estSP },
{ kPFix_SP, kUppercaseFirst, kPFix_DOT },
{ kPFix_EMPTY, kUppercaseAll, kPFix_DQUOTGT },
{ kPFix_SP, kIdentity, kPFix_EQSQUOT },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_COMMA },
{ kPFix_EMPTY, kIdentity, kPFix_izeSP },
{ kPFix_EMPTY, kUppercaseAll, kPFix_DOT },
{ kPFix_NBSP, kIdentity, kPFix_EMPTY },
{ kPFix_SP, kIdentity, kPFix_COMMA },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_EQDQUOT },
{ kPFix_EMPTY, kUppercaseAll, kPFix_EQDQUOT },
{ kPFix_EMPTY, kIdentity, kPFix_ousSP },
{ kPFix_EMPTY, kUppercaseAll, kPFix_COMMASP },
{ kPFix_EMPTY, kUppercaseFirst, kPFix_EQSQUOT },
{ kPFix_SP, kUppercaseFirst, kPFix_COMMA },
{ kPFix_SP, kUppercaseAll, kPFix_EQDQUOT },
{ kPFix_SP, kUppercaseAll, kPFix_COMMASP },
{ kPFix_EMPTY, kUppercaseAll, kPFix_COMMA },
{ kPFix_EMPTY, kUppercaseAll, kPFix_OPEN },
{ kPFix_EMPTY, kUppercaseAll, kPFix_DOTSP },
{ kPFix_SP, kUppercaseAll, kPFix_DOT },
{ kPFix_EMPTY, kUppercaseAll, kPFix_EQSQUOT },
{ kPFix_SP, kUppercaseAll, kPFix_DOTSP },
{ kPFix_SP, kUppercaseFirst, kPFix_EQDQUOT },
{ kPFix_SP, kUppercaseAll, kPFix_EQSQUOT },
{ kPFix_SP, kUppercaseFirst, kPFix_EQSQUOT },
};
static const int kNumTransforms = sizeof(kTransforms) / sizeof(kTransforms[0]);
static int ToUpperCase(uint8_t* p) {
if (p[0] < 0xc0) {
if (p[0] >= 'a' && p[0] <= 'z') {
p[0] ^= 32;
}
return 1;
}
/* An overly simplified uppercasing model for UTF-8. */
if (p[0] < 0xe0) {
p[1] ^= 32;
return 2;
}
/* An arbitrary transform for three byte characters. */
p[2] ^= 5;
return 3;
}
static BROTLI_NOINLINE int TransformDictionaryWord(
uint8_t* dst, const uint8_t* word, int len, int transform) {
int idx = 0;
{
const char* prefix = &kPrefixSuffix[kTransforms[transform].prefix_id];
while (*prefix) { dst[idx++] = (uint8_t)*prefix++; }
}
{
const int t = kTransforms[transform].transform;
int i = 0;
int skip = t - (kOmitFirst1 - 1);
if (skip > 0) {
word += skip;
len -= skip;
} else if (t <= kOmitLast9) {
len -= t;
}
while (i < len) { dst[idx++] = word[i++]; }
if (t == kUppercaseFirst) {
ToUpperCase(&dst[idx - len]);
} else if (t == kUppercaseAll) {
uint8_t* uppercase = &dst[idx - len];
while (len > 0) {
int step = ToUpperCase(uppercase);
uppercase += step;
len -= step;
}
}
}
{
const char* suffix = &kPrefixSuffix[kTransforms[transform].suffix_id];
while (*suffix) { dst[idx++] = (uint8_t)*suffix++; }
return idx;
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_DEC_TRANSFORM_H_ */

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,39 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function to find backward reference copies. */
#ifndef BROTLI_ENC_BACKWARD_REFERENCES_H_
#define BROTLI_ENC_BACKWARD_REFERENCES_H_
#include "../common/constants.h"
#include "../common/dictionary.h"
#include <brotli/types.h>
#include "./command.h"
#include "./hash.h"
#include "./port.h"
#include "./quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* "commands" points to the next output command to write to, "*num_commands" is
initially the total amount of commands output by previous
CreateBackwardReferences calls, and must be incremented by the amount written
by this call. */
BROTLI_INTERNAL void BrotliCreateBackwardReferences(
const BrotliDictionary* dictionary, size_t num_bytes, size_t position,
const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_BACKWARD_REFERENCES_H_ */

View File

@ -1,99 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function to find backward reference copies. */
#ifndef BROTLI_ENC_BACKWARD_REFERENCES_HQ_H_
#define BROTLI_ENC_BACKWARD_REFERENCES_HQ_H_
#include "../common/constants.h"
#include "../common/dictionary.h"
#include <brotli/types.h>
#include "./command.h"
#include "./hash.h"
#include "./memory.h"
#include "./port.h"
#include "./quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
BROTLI_INTERNAL void BrotliCreateZopfliBackwardReferences(
MemoryManager* m, const BrotliDictionary* dictionary, size_t num_bytes,
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals);
BROTLI_INTERNAL void BrotliCreateHqZopfliBackwardReferences(
MemoryManager* m, const BrotliDictionary* dictionary, size_t num_bytes,
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals);
typedef struct ZopfliNode {
/* best length to get up to this byte (not including this byte itself)
highest 8 bit is used to reconstruct the length code */
uint32_t length;
/* distance associated with the length
highest 7 bit contains distance short code + 1 (or zero if no short code)
*/
uint32_t distance;
/* number of literal inserts before this copy */
uint32_t insert_length;
/* This union holds information used by dynamic-programming. During forward
pass |cost| it used to store the goal function. When node is processed its
|cost| is invalidated in favor of |shortcut|. On path back-tracing pass
|next| is assigned the offset to next node on the path. */
union {
/* Smallest cost to get to this byte from the beginning, as found so far. */
float cost;
/* Offset to the next node on the path. Equals to command_length() of the
next node on the path. For last node equals to BROTLI_UINT32_MAX */
uint32_t next;
/* Node position that provides next distance for distance cache. */
uint32_t shortcut;
} u;
} ZopfliNode;
BROTLI_INTERNAL void BrotliInitZopfliNodes(ZopfliNode* array, size_t length);
/* Computes the shortest path of commands from position to at most
position + num_bytes.
On return, path->size() is the number of commands found and path[i] is the
length of the i-th command (copy length plus insert length).
Note that the sum of the lengths of all commands can be less than num_bytes.
On return, the nodes[0..num_bytes] array will have the following
"ZopfliNode array invariant":
For each i in [1..num_bytes], if nodes[i].cost < kInfinity, then
(1) nodes[i].copy_length() >= 2
(2) nodes[i].command_length() <= i and
(3) nodes[i - nodes[i].command_length()].cost < kInfinity */
BROTLI_INTERNAL size_t BrotliZopfliComputeShortestPath(
MemoryManager* m, const BrotliDictionary* dictionary, size_t num_bytes,
size_t position, const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, const size_t max_backward_limit,
const int* dist_cache, HasherHandle hasher, ZopfliNode* nodes);
BROTLI_INTERNAL void BrotliZopfliCreateCommands(const size_t num_bytes,
const size_t block_start,
const size_t max_backward_limit,
const ZopfliNode* nodes,
int* dist_cache,
size_t* last_insert_len,
Command* commands,
size_t* num_literals);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_BACKWARD_REFERENCES_HQ_H_ */

View File

@ -1,143 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
static BROTLI_NOINLINE void FN(CreateBackwardReferences)(
const BrotliDictionary* dictionary, const uint16_t* dictionary_hash,
size_t num_bytes, size_t position,
const uint8_t* ringbuffer, size_t ringbuffer_mask,
const BrotliEncoderParams* params, HasherHandle hasher, int* dist_cache,
size_t* last_insert_len, Command* commands, size_t* num_commands,
size_t* num_literals) {
/* Set maximum distance, see section 9.1. of the spec. */
const size_t max_backward_limit = BROTLI_MAX_BACKWARD_LIMIT(params->lgwin);
const Command* const orig_commands = commands;
size_t insert_length = *last_insert_len;
const size_t pos_end = position + num_bytes;
const size_t store_end = num_bytes >= FN(StoreLookahead)() ?
position + num_bytes - FN(StoreLookahead)() + 1 : position;
/* For speed up heuristics for random data. */
const size_t random_heuristics_window_size =
LiteralSpreeLengthForSparseSearch(params);
size_t apply_random_heuristics = position + random_heuristics_window_size;
/* Minimum score to accept a backward reference. */
const score_t kMinScore = BROTLI_SCORE_BASE + 100;
FN(PrepareDistanceCache)(hasher, dist_cache);
while (position + FN(HashTypeLength)() < pos_end) {
size_t max_length = pos_end - position;
size_t max_distance = BROTLI_MIN(size_t, position, max_backward_limit);
HasherSearchResult sr;
sr.len = 0;
sr.len_code_delta = 0;
sr.distance = 0;
sr.score = kMinScore;
FN(FindLongestMatch)(hasher, dictionary, dictionary_hash, ringbuffer,
ringbuffer_mask, dist_cache, position,
max_length, max_distance, &sr);
if (sr.score > kMinScore) {
/* Found a match. Let's look for something even better ahead. */
int delayed_backward_references_in_row = 0;
--max_length;
for (;; --max_length) {
const score_t cost_diff_lazy = 175;
HasherSearchResult sr2;
sr2.len = params->quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH ?
BROTLI_MIN(size_t, sr.len - 1, max_length) : 0;
sr2.len_code_delta = 0;
sr2.distance = 0;
sr2.score = kMinScore;
max_distance = BROTLI_MIN(size_t, position + 1, max_backward_limit);
FN(FindLongestMatch)(hasher, dictionary, dictionary_hash, ringbuffer,
ringbuffer_mask, dist_cache, position + 1,
max_length, max_distance, &sr2);
if (sr2.score >= sr.score + cost_diff_lazy) {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
++position;
++insert_length;
sr = sr2;
if (++delayed_backward_references_in_row < 4 &&
position + FN(HashTypeLength)() < pos_end) {
continue;
}
}
break;
}
apply_random_heuristics =
position + 2 * sr.len + random_heuristics_window_size;
max_distance = BROTLI_MIN(size_t, position, max_backward_limit);
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
size_t distance_code =
ComputeDistanceCode(sr.distance, max_distance, dist_cache);
if (sr.distance <= max_distance && distance_code > 0) {
dist_cache[3] = dist_cache[2];
dist_cache[2] = dist_cache[1];
dist_cache[1] = dist_cache[0];
dist_cache[0] = (int)sr.distance;
FN(PrepareDistanceCache)(hasher, dist_cache);
}
InitCommand(commands++, insert_length, sr.len, sr.len_code_delta,
distance_code);
}
*num_literals += insert_length;
insert_length = 0;
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them. */
FN(StoreRange)(hasher, ringbuffer, ringbuffer_mask, position + 2,
BROTLI_MIN(size_t, position + sr.len, store_end));
position += sr.len;
} else {
++insert_length;
++position;
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if (position > apply_random_heuristics) {
/* Going through uncompressible data, jump. */
if (position >
apply_random_heuristics + 4 * random_heuristics_window_size) {
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
const size_t kMargin =
BROTLI_MAX(size_t, FN(StoreLookahead)() - 1, 4);
size_t pos_jump =
BROTLI_MIN(size_t, position + 16, pos_end - kMargin);
for (; position < pos_jump; position += 4) {
FN(Store)(hasher, ringbuffer, ringbuffer_mask, position);
insert_length += 4;
}
} else {
const size_t kMargin =
BROTLI_MAX(size_t, FN(StoreLookahead)() - 1, 2);
size_t pos_jump =
BROTLI_MIN(size_t, position + 8, pos_end - kMargin);
for (; position < pos_jump; position += 2) {
FN(Store)(hasher, ringbuffer, ringbuffer_mask, position);
insert_length += 2;
}
}
}
}
}
insert_length += pos_end - position;
*last_insert_len = insert_length;
*num_commands += (size_t)(commands - orig_commands);
}

View File

@ -1,63 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions to estimate the bit cost of Huffman trees. */
#ifndef BROTLI_ENC_BIT_COST_H_
#define BROTLI_ENC_BIT_COST_H_
#include <brotli/types.h>
#include "./fast_log.h"
#include "./histogram.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static BROTLI_INLINE double ShannonEntropy(const uint32_t *population,
size_t size, size_t *total) {
size_t sum = 0;
double retval = 0;
const uint32_t *population_end = population + size;
size_t p;
if (size & 1) {
goto odd_number_of_elements_left;
}
while (population < population_end) {
p = *population++;
sum += p;
retval -= (double)p * FastLog2(p);
odd_number_of_elements_left:
p = *population++;
sum += p;
retval -= (double)p * FastLog2(p);
}
if (sum) retval += (double)sum * FastLog2(sum);
*total = sum;
return retval;
}
static BROTLI_INLINE double BitsEntropy(
const uint32_t *population, size_t size) {
size_t sum;
double retval = ShannonEntropy(population, size, &sum);
if (retval < sum) {
/* At least one bit per literal is needed. */
retval = (double)sum;
}
return retval;
}
BROTLI_INTERNAL double BrotliPopulationCostLiteral(const HistogramLiteral*);
BROTLI_INTERNAL double BrotliPopulationCostCommand(const HistogramCommand*);
BROTLI_INTERNAL double BrotliPopulationCostDistance(const HistogramDistance*);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_BIT_COST_H_ */

View File

@ -1,127 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
#define HistogramType FN(Histogram)
double FN(BrotliPopulationCost)(const HistogramType* histogram) {
static const double kOneSymbolHistogramCost = 12;
static const double kTwoSymbolHistogramCost = 20;
static const double kThreeSymbolHistogramCost = 28;
static const double kFourSymbolHistogramCost = 37;
const size_t data_size = FN(HistogramDataSize)();
int count = 0;
size_t s[5];
double bits = 0.0;
size_t i;
if (histogram->total_count_ == 0) {
return kOneSymbolHistogramCost;
}
for (i = 0; i < data_size; ++i) {
if (histogram->data_[i] > 0) {
s[count] = i;
++count;
if (count > 4) break;
}
}
if (count == 1) {
return kOneSymbolHistogramCost;
}
if (count == 2) {
return (kTwoSymbolHistogramCost + (double)histogram->total_count_);
}
if (count == 3) {
const uint32_t histo0 = histogram->data_[s[0]];
const uint32_t histo1 = histogram->data_[s[1]];
const uint32_t histo2 = histogram->data_[s[2]];
const uint32_t histomax =
BROTLI_MAX(uint32_t, histo0, BROTLI_MAX(uint32_t, histo1, histo2));
return (kThreeSymbolHistogramCost +
2 * (histo0 + histo1 + histo2) - histomax);
}
if (count == 4) {
uint32_t histo[4];
uint32_t h23;
uint32_t histomax;
for (i = 0; i < 4; ++i) {
histo[i] = histogram->data_[s[i]];
}
/* Sort */
for (i = 0; i < 4; ++i) {
size_t j;
for (j = i + 1; j < 4; ++j) {
if (histo[j] > histo[i]) {
BROTLI_SWAP(uint32_t, histo, j, i);
}
}
}
h23 = histo[2] + histo[3];
histomax = BROTLI_MAX(uint32_t, h23, histo[0]);
return (kFourSymbolHistogramCost +
3 * h23 + 2 * (histo[0] + histo[1]) - histomax);
}
{
/* In this loop we compute the entropy of the histogram and simultaneously
build a simplified histogram of the code length codes where we use the
zero repeat code 17, but we don't use the non-zero repeat code 16. */
size_t max_depth = 1;
uint32_t depth_histo[BROTLI_CODE_LENGTH_CODES] = { 0 };
const double log2total = FastLog2(histogram->total_count_);
for (i = 0; i < data_size;) {
if (histogram->data_[i] > 0) {
/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
= log2(total_count) - log2(count(symbol)) */
double log2p = log2total - FastLog2(histogram->data_[i]);
/* Approximate the bit depth by round(-log2(P(symbol))) */
size_t depth = (size_t)(log2p + 0.5);
bits += histogram->data_[i] * log2p;
if (depth > 15) {
depth = 15;
}
if (depth > max_depth) {
max_depth = depth;
}
++depth_histo[depth];
++i;
} else {
/* Compute the run length of zeros and add the appropriate number of 0
and 17 code length codes to the code length code histogram. */
uint32_t reps = 1;
size_t k;
for (k = i + 1; k < data_size && histogram->data_[k] == 0; ++k) {
++reps;
}
i += reps;
if (i == data_size) {
/* Don't add any cost for the last zero run, since these are encoded
only implicitly. */
break;
}
if (reps < 3) {
depth_histo[0] += reps;
} else {
reps -= 2;
while (reps > 0) {
++depth_histo[BROTLI_REPEAT_ZERO_CODE_LENGTH];
/* Add the 3 extra bits for the 17 code length code. */
bits += 3;
reps >>= 3;
}
}
}
}
/* Add the estimated encoding cost of the code length code histogram. */
bits += (double)(18 + 2 * max_depth);
/* Add the entropy of the code length code histogram. */
bits += BitsEntropy(depth_histo, BROTLI_CODE_LENGTH_CODES);
}
return bits;
}
#undef HistogramType

View File

@ -1,33 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2014 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
#define HistogramType FN(Histogram)
/* Creates entropy codes for all block types and stores them to the bit
stream. */
static void FN(BuildAndStoreEntropyCodes)(MemoryManager* m, BlockEncoder* self,
const HistogramType* histograms, const size_t histograms_size,
HuffmanTree* tree, size_t* storage_ix, uint8_t* storage) {
const size_t alphabet_size = self->alphabet_size_;
const size_t table_size = histograms_size * alphabet_size;
self->depths_ = BROTLI_ALLOC(m, uint8_t, table_size);
self->bits_ = BROTLI_ALLOC(m, uint16_t, table_size);
if (BROTLI_IS_OOM(m)) return;
{
size_t i;
for (i = 0; i < histograms_size; ++i) {
size_t ix = i * alphabet_size;
BuildAndStoreHuffmanTree(&histograms[i].data_[0], alphabet_size, tree,
&self->depths_[ix], &self->bits_[ix], storage_ix, storage);
}
}
}
#undef HistogramType

View File

@ -1,51 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Block split point selection utilities. */
#ifndef BROTLI_ENC_BLOCK_SPLITTER_H_
#define BROTLI_ENC_BLOCK_SPLITTER_H_
#include <brotli/types.h>
#include "./command.h"
#include "./memory.h"
#include "./port.h"
#include "./quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef struct BlockSplit {
size_t num_types; /* Amount of distinct types */
size_t num_blocks; /* Amount of values in types and length */
uint8_t* types;
uint32_t* lengths;
size_t types_alloc_size;
size_t lengths_alloc_size;
} BlockSplit;
BROTLI_INTERNAL void BrotliInitBlockSplit(BlockSplit* self);
BROTLI_INTERNAL void BrotliDestroyBlockSplit(MemoryManager* m,
BlockSplit* self);
BROTLI_INTERNAL void BrotliSplitBlock(MemoryManager* m,
const Command* cmds,
const size_t num_commands,
const uint8_t* data,
const size_t offset,
const size_t mask,
const BrotliEncoderParams* params,
BlockSplit* literal_split,
BlockSplit* insert_and_copy_split,
BlockSplit* dist_split);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_BLOCK_SPLITTER_H_ */

View File

@ -1,432 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, DataType */
#define HistogramType FN(Histogram)
static void FN(InitialEntropyCodes)(const DataType* data, size_t length,
size_t stride,
size_t num_histograms,
HistogramType* histograms) {
unsigned int seed = 7;
size_t block_length = length / num_histograms;
size_t i;
FN(ClearHistograms)(histograms, num_histograms);
for (i = 0; i < num_histograms; ++i) {
size_t pos = length * i / num_histograms;
if (i != 0) {
pos += MyRand(&seed) % block_length;
}
if (pos + stride >= length) {
pos = length - stride - 1;
}
FN(HistogramAddVector)(&histograms[i], data + pos, stride);
}
}
static void FN(RandomSample)(unsigned int* seed,
const DataType* data,
size_t length,
size_t stride,
HistogramType* sample) {
size_t pos = 0;
if (stride >= length) {
pos = 0;
stride = length;
} else {
pos = MyRand(seed) % (length - stride + 1);
}
FN(HistogramAddVector)(sample, data + pos, stride);
}
static void FN(RefineEntropyCodes)(const DataType* data, size_t length,
size_t stride,
size_t num_histograms,
HistogramType* histograms) {
size_t iters =
kIterMulForRefining * length / stride + kMinItersForRefining;
unsigned int seed = 7;
size_t iter;
iters = ((iters + num_histograms - 1) / num_histograms) * num_histograms;
for (iter = 0; iter < iters; ++iter) {
HistogramType sample;
FN(HistogramClear)(&sample);
FN(RandomSample)(&seed, data, length, stride, &sample);
FN(HistogramAddHistogram)(&histograms[iter % num_histograms], &sample);
}
}
/* Assigns a block id from the range [0, num_histograms) to each data element
in data[0..length) and fills in block_id[0..length) with the assigned values.
Returns the number of blocks, i.e. one plus the number of block switches. */
static size_t FN(FindBlocks)(const DataType* data, const size_t length,
const double block_switch_bitcost,
const size_t num_histograms,
const HistogramType* histograms,
double* insert_cost,
double* cost,
uint8_t* switch_signal,
uint8_t *block_id) {
const size_t data_size = FN(HistogramDataSize)();
const size_t bitmaplen = (num_histograms + 7) >> 3;
size_t num_blocks = 1;
size_t i;
size_t j;
assert(num_histograms <= 256);
if (num_histograms <= 1) {
for (i = 0; i < length; ++i) {
block_id[i] = 0;
}
return 1;
}
memset(insert_cost, 0, sizeof(insert_cost[0]) * data_size * num_histograms);
for (i = 0; i < num_histograms; ++i) {
insert_cost[i] = FastLog2((uint32_t)histograms[i].total_count_);
}
for (i = data_size; i != 0;) {
--i;
for (j = 0; j < num_histograms; ++j) {
insert_cost[i * num_histograms + j] =
insert_cost[j] - BitCost(histograms[j].data_[i]);
}
}
memset(cost, 0, sizeof(cost[0]) * num_histograms);
memset(switch_signal, 0, sizeof(switch_signal[0]) * length * bitmaplen);
/* After each iteration of this loop, cost[k] will contain the difference
between the minimum cost of arriving at the current byte position using
entropy code k, and the minimum cost of arriving at the current byte
position. This difference is capped at the block switch cost, and if it
reaches block switch cost, it means that when we trace back from the last
position, we need to switch here. */
for (i = 0; i < length; ++i) {
const size_t byte_ix = i;
size_t ix = byte_ix * bitmaplen;
size_t insert_cost_ix = data[byte_ix] * num_histograms;
double min_cost = 1e99;
double block_switch_cost = block_switch_bitcost;
size_t k;
for (k = 0; k < num_histograms; ++k) {
/* We are coding the symbol in data[byte_ix] with entropy code k. */
cost[k] += insert_cost[insert_cost_ix + k];
if (cost[k] < min_cost) {
min_cost = cost[k];
block_id[byte_ix] = (uint8_t)k;
}
}
/* More blocks for the beginning. */
if (byte_ix < 2000) {
block_switch_cost *= 0.77 + 0.07 * (double)byte_ix / 2000;
}
for (k = 0; k < num_histograms; ++k) {
cost[k] -= min_cost;
if (cost[k] >= block_switch_cost) {
const uint8_t mask = (uint8_t)(1u << (k & 7));
cost[k] = block_switch_cost;
assert((k >> 3) < bitmaplen);
switch_signal[ix + (k >> 3)] |= mask;
}
}
}
{ /* Trace back from the last position and switch at the marked places. */
size_t byte_ix = length - 1;
size_t ix = byte_ix * bitmaplen;
uint8_t cur_id = block_id[byte_ix];
while (byte_ix > 0) {
const uint8_t mask = (uint8_t)(1u << (cur_id & 7));
assert(((size_t)cur_id >> 3) < bitmaplen);
--byte_ix;
ix -= bitmaplen;
if (switch_signal[ix + (cur_id >> 3)] & mask) {
if (cur_id != block_id[byte_ix]) {
cur_id = block_id[byte_ix];
++num_blocks;
}
}
block_id[byte_ix] = cur_id;
}
}
return num_blocks;
}
static size_t FN(RemapBlockIds)(uint8_t* block_ids, const size_t length,
uint16_t* new_id, const size_t num_histograms) {
static const uint16_t kInvalidId = 256;
uint16_t next_id = 0;
size_t i;
for (i = 0; i < num_histograms; ++i) {
new_id[i] = kInvalidId;
}
for (i = 0; i < length; ++i) {
assert(block_ids[i] < num_histograms);
if (new_id[block_ids[i]] == kInvalidId) {
new_id[block_ids[i]] = next_id++;
}
}
for (i = 0; i < length; ++i) {
block_ids[i] = (uint8_t)new_id[block_ids[i]];
assert(block_ids[i] < num_histograms);
}
assert(next_id <= num_histograms);
return next_id;
}
static void FN(BuildBlockHistograms)(const DataType* data, const size_t length,
const uint8_t* block_ids,
const size_t num_histograms,
HistogramType* histograms) {
size_t i;
FN(ClearHistograms)(histograms, num_histograms);
for (i = 0; i < length; ++i) {
FN(HistogramAdd)(&histograms[block_ids[i]], data[i]);
}
}
static void FN(ClusterBlocks)(MemoryManager* m,
const DataType* data, const size_t length,
const size_t num_blocks,
uint8_t* block_ids,
BlockSplit* split) {
uint32_t* histogram_symbols = BROTLI_ALLOC(m, uint32_t, num_blocks);
uint32_t* block_lengths = BROTLI_ALLOC(m, uint32_t, num_blocks);
const size_t expected_num_clusters = CLUSTERS_PER_BATCH *
(num_blocks + HISTOGRAMS_PER_BATCH - 1) / HISTOGRAMS_PER_BATCH;
size_t all_histograms_size = 0;
size_t all_histograms_capacity = expected_num_clusters;
HistogramType* all_histograms =
BROTLI_ALLOC(m, HistogramType, all_histograms_capacity);
size_t cluster_size_size = 0;
size_t cluster_size_capacity = expected_num_clusters;
uint32_t* cluster_size = BROTLI_ALLOC(m, uint32_t, cluster_size_capacity);
size_t num_clusters = 0;
HistogramType* histograms = BROTLI_ALLOC(m, HistogramType,
BROTLI_MIN(size_t, num_blocks, HISTOGRAMS_PER_BATCH));
size_t max_num_pairs =
HISTOGRAMS_PER_BATCH * HISTOGRAMS_PER_BATCH / 2;
size_t pairs_capacity = max_num_pairs + 1;
HistogramPair* pairs = BROTLI_ALLOC(m, HistogramPair, pairs_capacity);
size_t pos = 0;
uint32_t* clusters;
size_t num_final_clusters;
static const uint32_t kInvalidIndex = BROTLI_UINT32_MAX;
uint32_t* new_index;
size_t i;
uint32_t sizes[HISTOGRAMS_PER_BATCH] = { 0 };
uint32_t new_clusters[HISTOGRAMS_PER_BATCH] = { 0 };
uint32_t symbols[HISTOGRAMS_PER_BATCH] = { 0 };
uint32_t remap[HISTOGRAMS_PER_BATCH] = { 0 };
if (BROTLI_IS_OOM(m)) return;
memset(block_lengths, 0, num_blocks * sizeof(uint32_t));
{
size_t block_idx = 0;
for (i = 0; i < length; ++i) {
assert(block_idx < num_blocks);
++block_lengths[block_idx];
if (i + 1 == length || block_ids[i] != block_ids[i + 1]) {
++block_idx;
}
}
assert(block_idx == num_blocks);
}
for (i = 0; i < num_blocks; i += HISTOGRAMS_PER_BATCH) {
const size_t num_to_combine =
BROTLI_MIN(size_t, num_blocks - i, HISTOGRAMS_PER_BATCH);
size_t num_new_clusters;
size_t j;
for (j = 0; j < num_to_combine; ++j) {
size_t k;
FN(HistogramClear)(&histograms[j]);
for (k = 0; k < block_lengths[i + j]; ++k) {
FN(HistogramAdd)(&histograms[j], data[pos++]);
}
histograms[j].bit_cost_ = FN(BrotliPopulationCost)(&histograms[j]);
new_clusters[j] = (uint32_t)j;
symbols[j] = (uint32_t)j;
sizes[j] = 1;
}
num_new_clusters = FN(BrotliHistogramCombine)(
histograms, sizes, symbols, new_clusters, pairs, num_to_combine,
num_to_combine, HISTOGRAMS_PER_BATCH, max_num_pairs);
BROTLI_ENSURE_CAPACITY(m, HistogramType, all_histograms,
all_histograms_capacity, all_histograms_size + num_new_clusters);
BROTLI_ENSURE_CAPACITY(m, uint32_t, cluster_size,
cluster_size_capacity, cluster_size_size + num_new_clusters);
if (BROTLI_IS_OOM(m)) return;
for (j = 0; j < num_new_clusters; ++j) {
all_histograms[all_histograms_size++] = histograms[new_clusters[j]];
cluster_size[cluster_size_size++] = sizes[new_clusters[j]];
remap[new_clusters[j]] = (uint32_t)j;
}
for (j = 0; j < num_to_combine; ++j) {
histogram_symbols[i + j] = (uint32_t)num_clusters + remap[symbols[j]];
}
num_clusters += num_new_clusters;
assert(num_clusters == cluster_size_size);
assert(num_clusters == all_histograms_size);
}
BROTLI_FREE(m, histograms);
max_num_pairs =
BROTLI_MIN(size_t, 64 * num_clusters, (num_clusters / 2) * num_clusters);
if (pairs_capacity < max_num_pairs + 1) {
BROTLI_FREE(m, pairs);
pairs = BROTLI_ALLOC(m, HistogramPair, max_num_pairs + 1);
if (BROTLI_IS_OOM(m)) return;
}
clusters = BROTLI_ALLOC(m, uint32_t, num_clusters);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_clusters; ++i) {
clusters[i] = (uint32_t)i;
}
num_final_clusters = FN(BrotliHistogramCombine)(
all_histograms, cluster_size, histogram_symbols, clusters, pairs,
num_clusters, num_blocks, BROTLI_MAX_NUMBER_OF_BLOCK_TYPES,
max_num_pairs);
BROTLI_FREE(m, pairs);
BROTLI_FREE(m, cluster_size);
new_index = BROTLI_ALLOC(m, uint32_t, num_clusters);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_clusters; ++i) new_index[i] = kInvalidIndex;
pos = 0;
{
uint32_t next_index = 0;
for (i = 0; i < num_blocks; ++i) {
HistogramType histo;
size_t j;
uint32_t best_out;
double best_bits;
FN(HistogramClear)(&histo);
for (j = 0; j < block_lengths[i]; ++j) {
FN(HistogramAdd)(&histo, data[pos++]);
}
best_out = (i == 0) ? histogram_symbols[0] : histogram_symbols[i - 1];
best_bits =
FN(BrotliHistogramBitCostDistance)(&histo, &all_histograms[best_out]);
for (j = 0; j < num_final_clusters; ++j) {
const double cur_bits = FN(BrotliHistogramBitCostDistance)(
&histo, &all_histograms[clusters[j]]);
if (cur_bits < best_bits) {
best_bits = cur_bits;
best_out = clusters[j];
}
}
histogram_symbols[i] = best_out;
if (new_index[best_out] == kInvalidIndex) {
new_index[best_out] = next_index++;
}
}
}
BROTLI_FREE(m, clusters);
BROTLI_FREE(m, all_histograms);
BROTLI_ENSURE_CAPACITY(
m, uint8_t, split->types, split->types_alloc_size, num_blocks);
BROTLI_ENSURE_CAPACITY(
m, uint32_t, split->lengths, split->lengths_alloc_size, num_blocks);
if (BROTLI_IS_OOM(m)) return;
{
uint32_t cur_length = 0;
size_t block_idx = 0;
uint8_t max_type = 0;
for (i = 0; i < num_blocks; ++i) {
cur_length += block_lengths[i];
if (i + 1 == num_blocks ||
histogram_symbols[i] != histogram_symbols[i + 1]) {
const uint8_t id = (uint8_t)new_index[histogram_symbols[i]];
split->types[block_idx] = id;
split->lengths[block_idx] = cur_length;
max_type = BROTLI_MAX(uint8_t, max_type, id);
cur_length = 0;
++block_idx;
}
}
split->num_blocks = block_idx;
split->num_types = (size_t)max_type + 1;
}
BROTLI_FREE(m, new_index);
BROTLI_FREE(m, block_lengths);
BROTLI_FREE(m, histogram_symbols);
}
static void FN(SplitByteVector)(MemoryManager* m,
const DataType* data, const size_t length,
const size_t literals_per_histogram,
const size_t max_histograms,
const size_t sampling_stride_length,
const double block_switch_cost,
const BrotliEncoderParams* params,
BlockSplit* split) {
const size_t data_size = FN(HistogramDataSize)();
size_t num_histograms = length / literals_per_histogram + 1;
HistogramType* histograms;
if (num_histograms > max_histograms) {
num_histograms = max_histograms;
}
if (length == 0) {
split->num_types = 1;
return;
} else if (length < kMinLengthForBlockSplitting) {
BROTLI_ENSURE_CAPACITY(m, uint8_t,
split->types, split->types_alloc_size, split->num_blocks + 1);
BROTLI_ENSURE_CAPACITY(m, uint32_t,
split->lengths, split->lengths_alloc_size, split->num_blocks + 1);
if (BROTLI_IS_OOM(m)) return;
split->num_types = 1;
split->types[split->num_blocks] = 0;
split->lengths[split->num_blocks] = (uint32_t)length;
split->num_blocks++;
return;
}
histograms = BROTLI_ALLOC(m, HistogramType, num_histograms);
if (BROTLI_IS_OOM(m)) return;
/* Find good entropy codes. */
FN(InitialEntropyCodes)(data, length,
sampling_stride_length,
num_histograms, histograms);
FN(RefineEntropyCodes)(data, length,
sampling_stride_length,
num_histograms, histograms);
{
/* Find a good path through literals with the good entropy codes. */
uint8_t* block_ids = BROTLI_ALLOC(m, uint8_t, length);
size_t num_blocks = 0;
const size_t bitmaplen = (num_histograms + 7) >> 3;
double* insert_cost = BROTLI_ALLOC(m, double, data_size * num_histograms);
double* cost = BROTLI_ALLOC(m, double, num_histograms);
uint8_t* switch_signal = BROTLI_ALLOC(m, uint8_t, length * bitmaplen);
uint16_t* new_id = BROTLI_ALLOC(m, uint16_t, num_histograms);
const size_t iters = params->quality < HQ_ZOPFLIFICATION_QUALITY ? 3 : 10;
size_t i;
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < iters; ++i) {
num_blocks = FN(FindBlocks)(data, length,
block_switch_cost,
num_histograms, histograms,
insert_cost, cost, switch_signal,
block_ids);
num_histograms = FN(RemapBlockIds)(block_ids, length,
new_id, num_histograms);
FN(BuildBlockHistograms)(data, length, block_ids,
num_histograms, histograms);
}
BROTLI_FREE(m, insert_cost);
BROTLI_FREE(m, cost);
BROTLI_FREE(m, switch_signal);
BROTLI_FREE(m, new_id);
BROTLI_FREE(m, histograms);
FN(ClusterBlocks)(m, data, length, num_blocks, block_ids, split);
if (BROTLI_IS_OOM(m)) return;
BROTLI_FREE(m, block_ids);
}
}
#undef HistogramType

View File

@ -1,103 +0,0 @@
/* Copyright 2014 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions to convert brotli-related data structures into the
brotli bit stream. The functions here operate under
assumption that there is enough space in the storage, i.e., there are
no out-of-range checks anywhere.
These functions do bit addressing into a byte array. The byte array
is called "storage" and the index to the bit is called storage_ix
in function arguments. */
#ifndef BROTLI_ENC_BROTLI_BIT_STREAM_H_
#define BROTLI_ENC_BROTLI_BIT_STREAM_H_
#include <brotli/types.h>
#include "./command.h"
#include "./context.h"
#include "./entropy_encode.h"
#include "./memory.h"
#include "./metablock.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* All Store functions here will use a storage_ix, which is always the bit
position for the current storage. */
BROTLI_INTERNAL void BrotliStoreHuffmanTree(const uint8_t* depths, size_t num,
HuffmanTree* tree, size_t *storage_ix, uint8_t *storage);
BROTLI_INTERNAL void BrotliBuildAndStoreHuffmanTreeFast(
MemoryManager* m, const uint32_t* histogram, const size_t histogram_total,
const size_t max_bits, uint8_t* depth, uint16_t* bits, size_t* storage_ix,
uint8_t* storage);
/* REQUIRES: length > 0 */
/* REQUIRES: length <= (1 << 24) */
BROTLI_INTERNAL void BrotliStoreMetaBlock(MemoryManager* m,
const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
BROTLI_BOOL is_final_block,
uint32_t num_direct_distance_codes,
uint32_t distance_postfix_bits,
ContextType literal_context_mode,
const Command* commands,
size_t n_commands,
const MetaBlockSplit* mb,
size_t* storage_ix,
uint8_t* storage);
/* Stores the meta-block without doing any block splitting, just collects
one histogram per block category and uses that for entropy coding.
REQUIRES: length > 0
REQUIRES: length <= (1 << 24) */
BROTLI_INTERNAL void BrotliStoreMetaBlockTrivial(MemoryManager* m,
const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
BROTLI_BOOL is_last,
const Command *commands,
size_t n_commands,
size_t* storage_ix,
uint8_t* storage);
/* Same as above, but uses static prefix codes for histograms with a only a few
symbols, and uses static code length prefix codes for all other histograms.
REQUIRES: length > 0
REQUIRES: length <= (1 << 24) */
BROTLI_INTERNAL void BrotliStoreMetaBlockFast(MemoryManager* m,
const uint8_t* input,
size_t start_pos,
size_t length,
size_t mask,
BROTLI_BOOL is_last,
const Command *commands,
size_t n_commands,
size_t* storage_ix,
uint8_t* storage);
/* This is for storing uncompressed blocks (simple raw storage of
bytes-as-bytes).
REQUIRES: length > 0
REQUIRES: length <= (1 << 24) */
BROTLI_INTERNAL void BrotliStoreUncompressedMetaBlock(
BROTLI_BOOL is_final_block, const uint8_t* input, size_t position,
size_t mask, size_t len, size_t* storage_ix, uint8_t* storage);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_BROTLI_BIT_STREAM_H_ */

View File

@ -1,48 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions for clustering similar histograms together. */
#ifndef BROTLI_ENC_CLUSTER_H_
#define BROTLI_ENC_CLUSTER_H_
#include <brotli/types.h>
#include "./histogram.h"
#include "./memory.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef struct HistogramPair {
uint32_t idx1;
uint32_t idx2;
double cost_combo;
double cost_diff;
} HistogramPair;
#define CODE(X) /* Declaration */;
#define FN(X) X ## Literal
#include "./cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Command
#include "./cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Distance
#include "./cluster_inc.h" /* NOLINT(build/include) */
#undef FN
#undef CODE
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_CLUSTER_H_ */

View File

@ -1,317 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, CODE */
#define HistogramType FN(Histogram)
/* Computes the bit cost reduction by combining out[idx1] and out[idx2] and if
it is below a threshold, stores the pair (idx1, idx2) in the *pairs queue. */
BROTLI_INTERNAL void FN(BrotliCompareAndPushToQueue)(
const HistogramType* out, const uint32_t* cluster_size, uint32_t idx1,
uint32_t idx2, size_t max_num_pairs, HistogramPair* pairs,
size_t* num_pairs) CODE({
BROTLI_BOOL is_good_pair = BROTLI_FALSE;
HistogramPair p;
p.idx1 = p.idx2 = 0;
p.cost_diff = p.cost_combo = 0;
if (idx1 == idx2) {
return;
}
if (idx2 < idx1) {
uint32_t t = idx2;
idx2 = idx1;
idx1 = t;
}
p.idx1 = idx1;
p.idx2 = idx2;
p.cost_diff = 0.5 * ClusterCostDiff(cluster_size[idx1], cluster_size[idx2]);
p.cost_diff -= out[idx1].bit_cost_;
p.cost_diff -= out[idx2].bit_cost_;
if (out[idx1].total_count_ == 0) {
p.cost_combo = out[idx2].bit_cost_;
is_good_pair = BROTLI_TRUE;
} else if (out[idx2].total_count_ == 0) {
p.cost_combo = out[idx1].bit_cost_;
is_good_pair = BROTLI_TRUE;
} else {
double threshold = *num_pairs == 0 ? 1e99 :
BROTLI_MAX(double, 0.0, pairs[0].cost_diff);
HistogramType combo = out[idx1];
double cost_combo;
FN(HistogramAddHistogram)(&combo, &out[idx2]);
cost_combo = FN(BrotliPopulationCost)(&combo);
if (cost_combo < threshold - p.cost_diff) {
p.cost_combo = cost_combo;
is_good_pair = BROTLI_TRUE;
}
}
if (is_good_pair) {
p.cost_diff += p.cost_combo;
if (*num_pairs > 0 && HistogramPairIsLess(&pairs[0], &p)) {
/* Replace the top of the queue if needed. */
if (*num_pairs < max_num_pairs) {
pairs[*num_pairs] = pairs[0];
++(*num_pairs);
}
pairs[0] = p;
} else if (*num_pairs < max_num_pairs) {
pairs[*num_pairs] = p;
++(*num_pairs);
}
}
})
BROTLI_INTERNAL size_t FN(BrotliHistogramCombine)(HistogramType* out,
uint32_t* cluster_size,
uint32_t* symbols,
uint32_t* clusters,
HistogramPair* pairs,
size_t num_clusters,
size_t symbols_size,
size_t max_clusters,
size_t max_num_pairs) CODE({
double cost_diff_threshold = 0.0;
size_t min_cluster_size = 1;
size_t num_pairs = 0;
{
/* We maintain a vector of histogram pairs, with the property that the pair
with the maximum bit cost reduction is the first. */
size_t idx1;
for (idx1 = 0; idx1 < num_clusters; ++idx1) {
size_t idx2;
for (idx2 = idx1 + 1; idx2 < num_clusters; ++idx2) {
FN(BrotliCompareAndPushToQueue)(out, cluster_size, clusters[idx1],
clusters[idx2], max_num_pairs, &pairs[0], &num_pairs);
}
}
}
while (num_clusters > min_cluster_size) {
uint32_t best_idx1;
uint32_t best_idx2;
size_t i;
if (pairs[0].cost_diff >= cost_diff_threshold) {
cost_diff_threshold = 1e99;
min_cluster_size = max_clusters;
continue;
}
/* Take the best pair from the top of heap. */
best_idx1 = pairs[0].idx1;
best_idx2 = pairs[0].idx2;
FN(HistogramAddHistogram)(&out[best_idx1], &out[best_idx2]);
out[best_idx1].bit_cost_ = pairs[0].cost_combo;
cluster_size[best_idx1] += cluster_size[best_idx2];
for (i = 0; i < symbols_size; ++i) {
if (symbols[i] == best_idx2) {
symbols[i] = best_idx1;
}
}
for (i = 0; i < num_clusters; ++i) {
if (clusters[i] == best_idx2) {
memmove(&clusters[i], &clusters[i + 1],
(num_clusters - i - 1) * sizeof(clusters[0]));
break;
}
}
--num_clusters;
{
/* Remove pairs intersecting the just combined best pair. */
size_t copy_to_idx = 0;
for (i = 0; i < num_pairs; ++i) {
HistogramPair* p = &pairs[i];
if (p->idx1 == best_idx1 || p->idx2 == best_idx1 ||
p->idx1 == best_idx2 || p->idx2 == best_idx2) {
/* Remove invalid pair from the queue. */
continue;
}
if (HistogramPairIsLess(&pairs[0], p)) {
/* Replace the top of the queue if needed. */
HistogramPair front = pairs[0];
pairs[0] = *p;
pairs[copy_to_idx] = front;
} else {
pairs[copy_to_idx] = *p;
}
++copy_to_idx;
}
num_pairs = copy_to_idx;
}
/* Push new pairs formed with the combined histogram to the heap. */
for (i = 0; i < num_clusters; ++i) {
FN(BrotliCompareAndPushToQueue)(out, cluster_size, best_idx1, clusters[i],
max_num_pairs, &pairs[0], &num_pairs);
}
}
return num_clusters;
})
/* What is the bit cost of moving histogram from cur_symbol to candidate. */
BROTLI_INTERNAL double FN(BrotliHistogramBitCostDistance)(
const HistogramType* histogram, const HistogramType* candidate) CODE({
if (histogram->total_count_ == 0) {
return 0.0;
} else {
HistogramType tmp = *histogram;
FN(HistogramAddHistogram)(&tmp, candidate);
return FN(BrotliPopulationCost)(&tmp) - candidate->bit_cost_;
}
})
/* Find the best 'out' histogram for each of the 'in' histograms.
When called, clusters[0..num_clusters) contains the unique values from
symbols[0..in_size), but this property is not preserved in this function.
Note: we assume that out[]->bit_cost_ is already up-to-date. */
BROTLI_INTERNAL void FN(BrotliHistogramRemap)(const HistogramType* in,
size_t in_size, const uint32_t* clusters, size_t num_clusters,
HistogramType* out, uint32_t* symbols) CODE({
size_t i;
for (i = 0; i < in_size; ++i) {
uint32_t best_out = i == 0 ? symbols[0] : symbols[i - 1];
double best_bits =
FN(BrotliHistogramBitCostDistance)(&in[i], &out[best_out]);
size_t j;
for (j = 0; j < num_clusters; ++j) {
const double cur_bits =
FN(BrotliHistogramBitCostDistance)(&in[i], &out[clusters[j]]);
if (cur_bits < best_bits) {
best_bits = cur_bits;
best_out = clusters[j];
}
}
symbols[i] = best_out;
}
/* Recompute each out based on raw and symbols. */
for (i = 0; i < num_clusters; ++i) {
FN(HistogramClear)(&out[clusters[i]]);
}
for (i = 0; i < in_size; ++i) {
FN(HistogramAddHistogram)(&out[symbols[i]], &in[i]);
}
})
/* Reorders elements of the out[0..length) array and changes values in
symbols[0..length) array in the following way:
* when called, symbols[] contains indexes into out[], and has N unique
values (possibly N < length)
* on return, symbols'[i] = f(symbols[i]) and
out'[symbols'[i]] = out[symbols[i]], for each 0 <= i < length,
where f is a bijection between the range of symbols[] and [0..N), and
the first occurrences of values in symbols'[i] come in consecutive
increasing order.
Returns N, the number of unique values in symbols[]. */
BROTLI_INTERNAL size_t FN(BrotliHistogramReindex)(MemoryManager* m,
HistogramType* out, uint32_t* symbols, size_t length) CODE({
static const uint32_t kInvalidIndex = BROTLI_UINT32_MAX;
uint32_t* new_index = BROTLI_ALLOC(m, uint32_t, length);
uint32_t next_index;
HistogramType* tmp;
size_t i;
if (BROTLI_IS_OOM(m)) return 0;
for (i = 0; i < length; ++i) {
new_index[i] = kInvalidIndex;
}
next_index = 0;
for (i = 0; i < length; ++i) {
if (new_index[symbols[i]] == kInvalidIndex) {
new_index[symbols[i]] = next_index;
++next_index;
}
}
/* TODO: by using idea of "cycle-sort" we can avoid allocation of
tmp and reduce the number of copying by the factor of 2. */
tmp = BROTLI_ALLOC(m, HistogramType, next_index);
if (BROTLI_IS_OOM(m)) return 0;
next_index = 0;
for (i = 0; i < length; ++i) {
if (new_index[symbols[i]] == next_index) {
tmp[next_index] = out[symbols[i]];
++next_index;
}
symbols[i] = new_index[symbols[i]];
}
BROTLI_FREE(m, new_index);
for (i = 0; i < next_index; ++i) {
out[i] = tmp[i];
}
BROTLI_FREE(m, tmp);
return next_index;
})
BROTLI_INTERNAL void FN(BrotliClusterHistograms)(
MemoryManager* m, const HistogramType* in, const size_t in_size,
size_t max_histograms, HistogramType* out, size_t* out_size,
uint32_t* histogram_symbols) CODE({
uint32_t* cluster_size = BROTLI_ALLOC(m, uint32_t, in_size);
uint32_t* clusters = BROTLI_ALLOC(m, uint32_t, in_size);
size_t num_clusters = 0;
const size_t max_input_histograms = 64;
size_t pairs_capacity = max_input_histograms * max_input_histograms / 2;
/* For the first pass of clustering, we allow all pairs. */
HistogramPair* pairs = BROTLI_ALLOC(m, HistogramPair, pairs_capacity + 1);
size_t i;
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < in_size; ++i) {
cluster_size[i] = 1;
}
for (i = 0; i < in_size; ++i) {
out[i] = in[i];
out[i].bit_cost_ = FN(BrotliPopulationCost)(&in[i]);
histogram_symbols[i] = (uint32_t)i;
}
for (i = 0; i < in_size; i += max_input_histograms) {
size_t num_to_combine =
BROTLI_MIN(size_t, in_size - i, max_input_histograms);
size_t num_new_clusters;
size_t j;
for (j = 0; j < num_to_combine; ++j) {
clusters[num_clusters + j] = (uint32_t)(i + j);
}
num_new_clusters =
FN(BrotliHistogramCombine)(out, cluster_size,
&histogram_symbols[i],
&clusters[num_clusters], pairs,
num_to_combine, num_to_combine,
max_histograms, pairs_capacity);
num_clusters += num_new_clusters;
}
{
/* For the second pass, we limit the total number of histogram pairs.
After this limit is reached, we only keep searching for the best pair. */
size_t max_num_pairs = BROTLI_MIN(size_t,
64 * num_clusters, (num_clusters / 2) * num_clusters);
BROTLI_ENSURE_CAPACITY(
m, HistogramPair, pairs, pairs_capacity, max_num_pairs + 1);
if (BROTLI_IS_OOM(m)) return;
/* Collapse similar histograms. */
num_clusters = FN(BrotliHistogramCombine)(out, cluster_size,
histogram_symbols, clusters,
pairs, num_clusters, in_size,
max_histograms, max_num_pairs);
}
BROTLI_FREE(m, pairs);
BROTLI_FREE(m, cluster_size);
/* Find the optimal map from original histograms to the final ones. */
FN(BrotliHistogramRemap)(in, in_size, clusters, num_clusters,
out, histogram_symbols);
BROTLI_FREE(m, clusters);
/* Convert the context map to a canonical form. */
*out_size = FN(BrotliHistogramReindex)(m, out, histogram_symbols, in_size);
if (BROTLI_IS_OOM(m)) return;
})
#undef HistogramType

View File

@ -1,180 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* This class models a sequence of literals and a backward reference copy. */
#ifndef BROTLI_ENC_COMMAND_H_
#define BROTLI_ENC_COMMAND_H_
#include "../common/constants.h"
#include <brotli/port.h>
#include <brotli/types.h>
#include "./fast_log.h"
#include "./prefix.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static uint32_t kInsBase[] = { 0, 1, 2, 3, 4, 5, 6, 8, 10, 14, 18, 26, 34, 50,
66, 98, 130, 194, 322, 578, 1090, 2114, 6210, 22594 };
static uint32_t kInsExtra[] = { 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4,
5, 5, 6, 7, 8, 9, 10, 12, 14, 24 };
static uint32_t kCopyBase[] = { 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 18, 22, 30,
38, 54, 70, 102, 134, 198, 326, 582, 1094, 2118 };
static uint32_t kCopyExtra[] = { 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3,
4, 4, 5, 5, 6, 7, 8, 9, 10, 24 };
static BROTLI_INLINE uint16_t GetInsertLengthCode(size_t insertlen) {
if (insertlen < 6) {
return (uint16_t)insertlen;
} else if (insertlen < 130) {
uint32_t nbits = Log2FloorNonZero(insertlen - 2) - 1u;
return (uint16_t)((nbits << 1) + ((insertlen - 2) >> nbits) + 2);
} else if (insertlen < 2114) {
return (uint16_t)(Log2FloorNonZero(insertlen - 66) + 10);
} else if (insertlen < 6210) {
return 21u;
} else if (insertlen < 22594) {
return 22u;
} else {
return 23u;
}
}
static BROTLI_INLINE uint16_t GetCopyLengthCode(size_t copylen) {
if (copylen < 10) {
return (uint16_t)(copylen - 2);
} else if (copylen < 134) {
uint32_t nbits = Log2FloorNonZero(copylen - 6) - 1u;
return (uint16_t)((nbits << 1) + ((copylen - 6) >> nbits) + 4);
} else if (copylen < 2118) {
return (uint16_t)(Log2FloorNonZero(copylen - 70) + 12);
} else {
return 23u;
}
}
static BROTLI_INLINE uint16_t CombineLengthCodes(
uint16_t inscode, uint16_t copycode, BROTLI_BOOL use_last_distance) {
uint16_t bits64 =
(uint16_t)((copycode & 0x7u) | ((inscode & 0x7u) << 3));
if (use_last_distance && inscode < 8 && copycode < 16) {
return (copycode < 8) ? bits64 : (bits64 | 64);
} else {
/* Specification: 5 Encoding of ... (last table) */
/* offset = 2 * index, where index is in range [0..8] */
int offset = 2 * ((copycode >> 3) + 3 * (inscode >> 3));
/* All values in specification are K * 64,
where K = [2, 3, 6, 4, 5, 8, 7, 9, 10],
i + 1 = [1, 2, 3, 4, 5, 6, 7, 8, 9],
K - i - 1 = [1, 1, 3, 0, 0, 2, 0, 1, 2] = D.
All values in D require only 2 bits to encode.
Magic constant is shifted 6 bits left, to avoid final multiplication. */
offset = (offset << 5) + 0x40 + ((0x520D40 >> offset) & 0xC0);
return (uint16_t)offset | bits64;
}
}
static BROTLI_INLINE void GetLengthCode(size_t insertlen, size_t copylen,
BROTLI_BOOL use_last_distance,
uint16_t* code) {
uint16_t inscode = GetInsertLengthCode(insertlen);
uint16_t copycode = GetCopyLengthCode(copylen);
*code = CombineLengthCodes(inscode, copycode, use_last_distance);
}
static BROTLI_INLINE uint32_t GetInsertBase(uint16_t inscode) {
return kInsBase[inscode];
}
static BROTLI_INLINE uint32_t GetInsertExtra(uint16_t inscode) {
return kInsExtra[inscode];
}
static BROTLI_INLINE uint32_t GetCopyBase(uint16_t copycode) {
return kCopyBase[copycode];
}
static BROTLI_INLINE uint32_t GetCopyExtra(uint16_t copycode) {
return kCopyExtra[copycode];
}
typedef struct Command {
uint32_t insert_len_;
/* Stores copy_len in low 24 bits and copy_len XOR copy_code in high 8 bit. */
uint32_t copy_len_;
uint32_t dist_extra_;
uint16_t cmd_prefix_;
uint16_t dist_prefix_;
} Command;
/* distance_code is e.g. 0 for same-as-last short code, or 16 for offset 1. */
static BROTLI_INLINE void InitCommand(Command* self, size_t insertlen,
size_t copylen, int copylen_code_delta, size_t distance_code) {
/* Don't rely on signed int representation, use honest casts. */
uint32_t delta = (uint8_t)((int8_t)copylen_code_delta);
self->insert_len_ = (uint32_t)insertlen;
self->copy_len_ = (uint32_t)(copylen | (delta << 24));
/* The distance prefix and extra bits are stored in this Command as if
npostfix and ndirect were 0, they are only recomputed later after the
clustering if needed. */
PrefixEncodeCopyDistance(
distance_code, 0, 0, &self->dist_prefix_, &self->dist_extra_);
GetLengthCode(
insertlen, (size_t)((int)copylen + copylen_code_delta),
TO_BROTLI_BOOL(self->dist_prefix_ == 0), &self->cmd_prefix_);
}
static BROTLI_INLINE void InitInsertCommand(Command* self, size_t insertlen) {
self->insert_len_ = (uint32_t)insertlen;
self->copy_len_ = 4 << 24;
self->dist_extra_ = 0;
self->dist_prefix_ = BROTLI_NUM_DISTANCE_SHORT_CODES;
GetLengthCode(insertlen, 4, BROTLI_FALSE, &self->cmd_prefix_);
}
static BROTLI_INLINE uint32_t CommandRestoreDistanceCode(const Command* self) {
if (self->dist_prefix_ < BROTLI_NUM_DISTANCE_SHORT_CODES) {
return self->dist_prefix_;
} else {
uint32_t nbits = self->dist_extra_ >> 24;
uint32_t extra = self->dist_extra_ & 0xffffff;
/* It is assumed that the distance was first encoded with NPOSTFIX = 0 and
NDIRECT = 0, so the code itself is of this form:
BROTLI_NUM_DISTANCE_SHORT_CODES + 2 * (nbits - 1) + prefix_bit
Therefore, the following expression results in (2 + prefix_bit). */
uint32_t prefix =
self->dist_prefix_ + 4u - BROTLI_NUM_DISTANCE_SHORT_CODES - 2u * nbits;
/* Subtract 4 for offset (Chapter 4.) and
increase by BROTLI_NUM_DISTANCE_SHORT_CODES - 1 */
return (prefix << nbits) + extra + BROTLI_NUM_DISTANCE_SHORT_CODES - 4u;
}
}
static BROTLI_INLINE uint32_t CommandDistanceContext(const Command* self) {
uint32_t r = self->cmd_prefix_ >> 6;
uint32_t c = self->cmd_prefix_ & 7;
if ((r == 0 || r == 2 || r == 4 || r == 7) && (c <= 2)) {
return c;
}
return 3;
}
static BROTLI_INLINE uint32_t CommandCopyLen(const Command* self) {
return self->copy_len_ & 0xFFFFFF;
}
static BROTLI_INLINE uint32_t CommandCopyLenCode(const Command* self) {
int32_t delta = (int8_t)((uint8_t)(self->copy_len_ >> 24));
return (uint32_t)((int32_t)(self->copy_len_ & 0xFFFFFF) + delta);
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_COMMAND_H_ */

View File

@ -1,61 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function for fast encoding of an input fragment, independently from the input
history. This function uses one-pass processing: when we find a backward
match, we immediately emit the corresponding command and literal codes to
the bit stream. */
#ifndef BROTLI_ENC_COMPRESS_FRAGMENT_H_
#define BROTLI_ENC_COMPRESS_FRAGMENT_H_
#include <brotli/types.h>
#include "./memory.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Compresses "input" string to the "*storage" buffer as one or more complete
meta-blocks, and updates the "*storage_ix" bit position.
If "is_last" is 1, emits an additional empty last meta-block.
"cmd_depth" and "cmd_bits" contain the command and distance prefix codes
(see comment in encode.h) used for the encoding of this input fragment.
If "is_last" is 0, they are updated to reflect the statistics
of this input fragment, to be used for the encoding of the next fragment.
"*cmd_code_numbits" is the number of bits of the compressed representation
of the command and distance prefix codes, and "cmd_code" is an array of
at least "(*cmd_code_numbits + 7) >> 3" size that contains the compressed
command and distance prefix codes. If "is_last" is 0, these are also
updated to represent the updated "cmd_depth" and "cmd_bits".
REQUIRES: "input_size" is greater than zero, or "is_last" is 1.
REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24).
REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero.
REQUIRES: "table_size" is an odd (9, 11, 13, 15) power of two
OUTPUT: maximal copy distance <= |input_size|
OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */
BROTLI_INTERNAL void BrotliCompressFragmentFast(MemoryManager* m,
const uint8_t* input,
size_t input_size,
BROTLI_BOOL is_last,
int* table, size_t table_size,
uint8_t cmd_depth[128],
uint16_t cmd_bits[128],
size_t* cmd_code_numbits,
uint8_t* cmd_code,
size_t* storage_ix,
uint8_t* storage);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_COMPRESS_FRAGMENT_H_ */

View File

@ -1,54 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function for fast encoding of an input fragment, independently from the input
history. This function uses two-pass processing: in the first pass we save
the found backward matches and literal bytes into a buffer, and in the
second pass we emit them into the bit stream using prefix codes built based
on the actual command and literal byte histograms. */
#ifndef BROTLI_ENC_COMPRESS_FRAGMENT_TWO_PASS_H_
#define BROTLI_ENC_COMPRESS_FRAGMENT_TWO_PASS_H_
#include <brotli/types.h>
#include "./memory.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const size_t kCompressFragmentTwoPassBlockSize = 1 << 17;
/* Compresses "input" string to the "*storage" buffer as one or more complete
meta-blocks, and updates the "*storage_ix" bit position.
If "is_last" is 1, emits an additional empty last meta-block.
REQUIRES: "input_size" is greater than zero, or "is_last" is 1.
REQUIRES: "input_size" is less or equal to maximal metablock size (1 << 24).
REQUIRES: "command_buf" and "literal_buf" point to at least
kCompressFragmentTwoPassBlockSize long arrays.
REQUIRES: All elements in "table[0..table_size-1]" are initialized to zero.
REQUIRES: "table_size" is a power of two
OUTPUT: maximal copy distance <= |input_size|
OUTPUT: maximal copy distance <= BROTLI_MAX_BACKWARD_LIMIT(18) */
BROTLI_INTERNAL void BrotliCompressFragmentTwoPass(MemoryManager* m,
const uint8_t* input,
size_t input_size,
BROTLI_BOOL is_last,
uint32_t* command_buf,
uint8_t* literal_buf,
int* table,
size_t table_size,
size_t* storage_ix,
uint8_t* storage);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_COMPRESS_FRAGMENT_TWO_PASS_H_ */

View File

@ -1,184 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions to map previous bytes into a context id. */
#ifndef BROTLI_ENC_CONTEXT_H_
#define BROTLI_ENC_CONTEXT_H_
#include <brotli/port.h>
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Second-order context lookup table for UTF8 byte streams.
If p1 and p2 are the previous two bytes, we calculate the context as
context = kUTF8ContextLookup[p1] | kUTF8ContextLookup[p2 + 256].
If the previous two bytes are ASCII characters (i.e. < 128), this will be
equivalent to
context = 4 * context1(p1) + context2(p2),
where context1 is based on the previous byte in the following way:
0 : non-ASCII control
1 : \t, \n, \r
2 : space
3 : other punctuation
4 : " '
5 : %
6 : ( < [ {
7 : ) > ] }
8 : , ; :
9 : .
10 : =
11 : number
12 : upper-case vowel
13 : upper-case consonant
14 : lower-case vowel
15 : lower-case consonant
and context2 is based on the second last byte:
0 : control, space
1 : punctuation
2 : upper-case letter, number
3 : lower-case letter
If the last byte is ASCII, and the second last byte is not (in a valid UTF8
stream it will be a continuation byte, value between 128 and 191), the
context is the same as if the second last byte was an ASCII control or space.
If the last byte is a UTF8 lead byte (value >= 192), then the next byte will
be a continuation byte and the context id is 2 or 3 depending on the LSB of
the last byte and to a lesser extent on the second last byte if it is ASCII.
If the last byte is a UTF8 continuation byte, the second last byte can be:
- continuation byte: the next byte is probably ASCII or lead byte (assuming
4-byte UTF8 characters are rare) and the context id is 0 or 1.
- lead byte (192 - 207): next byte is ASCII or lead byte, context is 0 or 1
- lead byte (208 - 255): next byte is continuation byte, context is 2 or 3
The possible value combinations of the previous two bytes, the range of
context ids and the type of the next byte is summarized in the table below:
|--------\-----------------------------------------------------------------|
| \ Last byte |
| Second \---------------------------------------------------------------|
| last byte \ ASCII | cont. byte | lead byte |
| \ (0-127) | (128-191) | (192-) |
|=============|===================|=====================|==================|
| ASCII | next: ASCII/lead | not valid | next: cont. |
| (0-127) | context: 4 - 63 | | context: 2 - 3 |
|-------------|-------------------|---------------------|------------------|
| cont. byte | next: ASCII/lead | next: ASCII/lead | next: cont. |
| (128-191) | context: 4 - 63 | context: 0 - 1 | context: 2 - 3 |
|-------------|-------------------|---------------------|------------------|
| lead byte | not valid | next: ASCII/lead | not valid |
| (192-207) | | context: 0 - 1 | |
|-------------|-------------------|---------------------|------------------|
| lead byte | not valid | next: cont. | not valid |
| (208-) | | context: 2 - 3 | |
|-------------|-------------------|---------------------|------------------|
*/
static const uint8_t kUTF8ContextLookup[512] = {
/* Last byte. */
/* */
/* ASCII range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
8, 12, 16, 12, 12, 20, 12, 16, 24, 28, 12, 12, 32, 12, 36, 12,
44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 32, 32, 24, 40, 28, 12,
12, 48, 52, 52, 52, 48, 52, 52, 52, 48, 52, 52, 52, 52, 52, 48,
52, 52, 52, 52, 52, 48, 52, 52, 52, 52, 52, 24, 12, 28, 12, 12,
12, 56, 60, 60, 60, 56, 60, 60, 60, 56, 60, 60, 60, 60, 60, 56,
60, 60, 60, 60, 60, 56, 60, 60, 60, 60, 60, 24, 12, 28, 12, 0,
/* UTF8 continuation byte range. */
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
/* UTF8 lead byte range. */
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
/* Second last byte. */
/* */
/* ASCII range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1,
1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1,
1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 0,
/* UTF8 continuation byte range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
/* UTF8 lead byte range. */
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
};
/* Context lookup table for small signed integers. */
static const uint8_t kSigned3BitContextLookup[] = {
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7,
};
typedef enum ContextType {
CONTEXT_LSB6 = 0,
CONTEXT_MSB6 = 1,
CONTEXT_UTF8 = 2,
CONTEXT_SIGNED = 3
} ContextType;
static BROTLI_INLINE uint8_t Context(uint8_t p1, uint8_t p2, ContextType mode) {
switch (mode) {
case CONTEXT_LSB6:
return p1 & 0x3f;
case CONTEXT_MSB6:
return (uint8_t)(p1 >> 2);
case CONTEXT_UTF8:
return kUTF8ContextLookup[p1] | kUTF8ContextLookup[p2 + 256];
case CONTEXT_SIGNED:
return (uint8_t)((kSigned3BitContextLookup[p1] << 3) +
kSigned3BitContextLookup[p2]);
default:
return 0;
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_CONTEXT_H_ */

View File

@ -1,24 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Hash table on the 4-byte prefixes of static dictionary words. */
#ifndef BROTLI_ENC_DICTIONARY_HASH_H_
#define BROTLI_ENC_DICTIONARY_HASH_H_
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
extern const uint16_t kStaticDictionaryHash[32768];
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_DICTIONARY_HASH_H_ */

View File

@ -1,5 +0,0 @@
package enc
import (
"C"
)

View File

@ -1,122 +0,0 @@
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Entropy encoding (Huffman) utilities. */
#ifndef BROTLI_ENC_ENTROPY_ENCODE_H_
#define BROTLI_ENC_ENTROPY_ENCODE_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* A node of a Huffman tree. */
typedef struct HuffmanTree {
uint32_t total_count_;
int16_t index_left_;
int16_t index_right_or_value_;
} HuffmanTree;
static BROTLI_INLINE void InitHuffmanTree(HuffmanTree* self, uint32_t count,
int16_t left, int16_t right) {
self->total_count_ = count;
self->index_left_ = left;
self->index_right_or_value_ = right;
}
/* Returns 1 is assignment of depths succeeded, otherwise 0. */
BROTLI_INTERNAL BROTLI_BOOL BrotliSetDepth(
int p, HuffmanTree* pool, uint8_t* depth, int max_depth);
/* This function will create a Huffman tree.
The (data,length) contains the population counts.
The tree_limit is the maximum bit depth of the Huffman codes.
The depth contains the tree, i.e., how many bits are used for
the symbol.
The actual Huffman tree is constructed in the tree[] array, which has to
be at least 2 * length + 1 long.
See http://en.wikipedia.org/wiki/Huffman_coding */
BROTLI_INTERNAL void BrotliCreateHuffmanTree(const uint32_t *data,
const size_t length,
const int tree_limit,
HuffmanTree* tree,
uint8_t *depth);
/* Change the population counts in a way that the consequent
Huffman tree compression, especially its RLE-part will be more
likely to compress this data more efficiently.
length contains the size of the histogram.
counts contains the population counts.
good_for_rle is a buffer of at least length size */
BROTLI_INTERNAL void BrotliOptimizeHuffmanCountsForRle(
size_t length, uint32_t* counts, uint8_t* good_for_rle);
/* Write a Huffman tree from bit depths into the bit-stream representation
of a Huffman tree. The generated Huffman tree is to be compressed once
more using a Huffman tree */
BROTLI_INTERNAL void BrotliWriteHuffmanTree(const uint8_t* depth,
size_t num,
size_t* tree_size,
uint8_t* tree,
uint8_t* extra_bits_data);
/* Get the actual bit values for a tree of bit depths. */
BROTLI_INTERNAL void BrotliConvertBitDepthsToSymbols(const uint8_t *depth,
size_t len,
uint16_t *bits);
/* Input size optimized Shell sort. */
typedef BROTLI_BOOL (*HuffmanTreeComparator)(
const HuffmanTree*, const HuffmanTree*);
static BROTLI_INLINE void SortHuffmanTreeItems(HuffmanTree* items,
const size_t n, HuffmanTreeComparator comparator) {
static const size_t gaps[] = {132, 57, 23, 10, 4, 1};
if (n < 13) {
/* Insertion sort. */
size_t i;
for (i = 1; i < n; ++i) {
HuffmanTree tmp = items[i];
size_t k = i;
size_t j = i - 1;
while (comparator(&tmp, &items[j])) {
items[k] = items[j];
k = j;
if (!j--) break;
}
items[k] = tmp;
}
return;
} else {
/* Shell sort. */
int g = n < 57 ? 2 : 0;
for (; g < 6; ++g) {
size_t gap = gaps[g];
size_t i;
for (i = gap; i < n; ++i) {
size_t j = i;
HuffmanTree tmp = items[i];
for (; j >= gap && comparator(&tmp, &items[j - gap]); j -= gap) {
items[j] = items[j - gap];
}
items[j] = tmp;
}
}
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_ENTROPY_ENCODE_H_ */

View File

@ -1,539 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Static entropy codes used for faster meta-block encoding. */
#ifndef BROTLI_ENC_ENTROPY_ENCODE_STATIC_H_
#define BROTLI_ENC_ENTROPY_ENCODE_STATIC_H_
#include "../common/constants.h"
#include <brotli/port.h>
#include <brotli/types.h>
#include "./write_bits.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const uint8_t kCodeLengthDepth[18] = {
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 0, 4, 4,
};
static const uint8_t kStaticCommandCodeDepth[BROTLI_NUM_COMMAND_SYMBOLS] = {
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
};
static const uint8_t kStaticDistanceCodeDepth[64] = {
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
};
static const uint32_t kCodeLengthBits[18] = {
0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3, 15, 31, 0, 11, 7,
};
static BROTLI_INLINE void StoreStaticCodeLengthCode(
size_t* storage_ix, uint8_t* storage) {
BrotliWriteBits(
40, BROTLI_MAKE_UINT64_T(0x0000ffU, 0x55555554U), storage_ix, storage);
}
static const uint64_t kZeroRepsBits[BROTLI_NUM_COMMAND_SYMBOLS] = {
0x00000000, 0x00000000, 0x00000000, 0x00000007, 0x00000017, 0x00000027,
0x00000037, 0x00000047, 0x00000057, 0x00000067, 0x00000077, 0x00000770,
0x00000b87, 0x00001387, 0x00001b87, 0x00002387, 0x00002b87, 0x00003387,
0x00003b87, 0x00000397, 0x00000b97, 0x00001397, 0x00001b97, 0x00002397,
0x00002b97, 0x00003397, 0x00003b97, 0x000003a7, 0x00000ba7, 0x000013a7,
0x00001ba7, 0x000023a7, 0x00002ba7, 0x000033a7, 0x00003ba7, 0x000003b7,
0x00000bb7, 0x000013b7, 0x00001bb7, 0x000023b7, 0x00002bb7, 0x000033b7,
0x00003bb7, 0x000003c7, 0x00000bc7, 0x000013c7, 0x00001bc7, 0x000023c7,
0x00002bc7, 0x000033c7, 0x00003bc7, 0x000003d7, 0x00000bd7, 0x000013d7,
0x00001bd7, 0x000023d7, 0x00002bd7, 0x000033d7, 0x00003bd7, 0x000003e7,
0x00000be7, 0x000013e7, 0x00001be7, 0x000023e7, 0x00002be7, 0x000033e7,
0x00003be7, 0x000003f7, 0x00000bf7, 0x000013f7, 0x00001bf7, 0x000023f7,
0x00002bf7, 0x000033f7, 0x00003bf7, 0x0001c387, 0x0005c387, 0x0009c387,
0x000dc387, 0x0011c387, 0x0015c387, 0x0019c387, 0x001dc387, 0x0001cb87,
0x0005cb87, 0x0009cb87, 0x000dcb87, 0x0011cb87, 0x0015cb87, 0x0019cb87,
0x001dcb87, 0x0001d387, 0x0005d387, 0x0009d387, 0x000dd387, 0x0011d387,
0x0015d387, 0x0019d387, 0x001dd387, 0x0001db87, 0x0005db87, 0x0009db87,
0x000ddb87, 0x0011db87, 0x0015db87, 0x0019db87, 0x001ddb87, 0x0001e387,
0x0005e387, 0x0009e387, 0x000de387, 0x0011e387, 0x0015e387, 0x0019e387,
0x001de387, 0x0001eb87, 0x0005eb87, 0x0009eb87, 0x000deb87, 0x0011eb87,
0x0015eb87, 0x0019eb87, 0x001deb87, 0x0001f387, 0x0005f387, 0x0009f387,
0x000df387, 0x0011f387, 0x0015f387, 0x0019f387, 0x001df387, 0x0001fb87,
0x0005fb87, 0x0009fb87, 0x000dfb87, 0x0011fb87, 0x0015fb87, 0x0019fb87,
0x001dfb87, 0x0001c397, 0x0005c397, 0x0009c397, 0x000dc397, 0x0011c397,
0x0015c397, 0x0019c397, 0x001dc397, 0x0001cb97, 0x0005cb97, 0x0009cb97,
0x000dcb97, 0x0011cb97, 0x0015cb97, 0x0019cb97, 0x001dcb97, 0x0001d397,
0x0005d397, 0x0009d397, 0x000dd397, 0x0011d397, 0x0015d397, 0x0019d397,
0x001dd397, 0x0001db97, 0x0005db97, 0x0009db97, 0x000ddb97, 0x0011db97,
0x0015db97, 0x0019db97, 0x001ddb97, 0x0001e397, 0x0005e397, 0x0009e397,
0x000de397, 0x0011e397, 0x0015e397, 0x0019e397, 0x001de397, 0x0001eb97,
0x0005eb97, 0x0009eb97, 0x000deb97, 0x0011eb97, 0x0015eb97, 0x0019eb97,
0x001deb97, 0x0001f397, 0x0005f397, 0x0009f397, 0x000df397, 0x0011f397,
0x0015f397, 0x0019f397, 0x001df397, 0x0001fb97, 0x0005fb97, 0x0009fb97,
0x000dfb97, 0x0011fb97, 0x0015fb97, 0x0019fb97, 0x001dfb97, 0x0001c3a7,
0x0005c3a7, 0x0009c3a7, 0x000dc3a7, 0x0011c3a7, 0x0015c3a7, 0x0019c3a7,
0x001dc3a7, 0x0001cba7, 0x0005cba7, 0x0009cba7, 0x000dcba7, 0x0011cba7,
0x0015cba7, 0x0019cba7, 0x001dcba7, 0x0001d3a7, 0x0005d3a7, 0x0009d3a7,
0x000dd3a7, 0x0011d3a7, 0x0015d3a7, 0x0019d3a7, 0x001dd3a7, 0x0001dba7,
0x0005dba7, 0x0009dba7, 0x000ddba7, 0x0011dba7, 0x0015dba7, 0x0019dba7,
0x001ddba7, 0x0001e3a7, 0x0005e3a7, 0x0009e3a7, 0x000de3a7, 0x0011e3a7,
0x0015e3a7, 0x0019e3a7, 0x001de3a7, 0x0001eba7, 0x0005eba7, 0x0009eba7,
0x000deba7, 0x0011eba7, 0x0015eba7, 0x0019eba7, 0x001deba7, 0x0001f3a7,
0x0005f3a7, 0x0009f3a7, 0x000df3a7, 0x0011f3a7, 0x0015f3a7, 0x0019f3a7,
0x001df3a7, 0x0001fba7, 0x0005fba7, 0x0009fba7, 0x000dfba7, 0x0011fba7,
0x0015fba7, 0x0019fba7, 0x001dfba7, 0x0001c3b7, 0x0005c3b7, 0x0009c3b7,
0x000dc3b7, 0x0011c3b7, 0x0015c3b7, 0x0019c3b7, 0x001dc3b7, 0x0001cbb7,
0x0005cbb7, 0x0009cbb7, 0x000dcbb7, 0x0011cbb7, 0x0015cbb7, 0x0019cbb7,
0x001dcbb7, 0x0001d3b7, 0x0005d3b7, 0x0009d3b7, 0x000dd3b7, 0x0011d3b7,
0x0015d3b7, 0x0019d3b7, 0x001dd3b7, 0x0001dbb7, 0x0005dbb7, 0x0009dbb7,
0x000ddbb7, 0x0011dbb7, 0x0015dbb7, 0x0019dbb7, 0x001ddbb7, 0x0001e3b7,
0x0005e3b7, 0x0009e3b7, 0x000de3b7, 0x0011e3b7, 0x0015e3b7, 0x0019e3b7,
0x001de3b7, 0x0001ebb7, 0x0005ebb7, 0x0009ebb7, 0x000debb7, 0x0011ebb7,
0x0015ebb7, 0x0019ebb7, 0x001debb7, 0x0001f3b7, 0x0005f3b7, 0x0009f3b7,
0x000df3b7, 0x0011f3b7, 0x0015f3b7, 0x0019f3b7, 0x001df3b7, 0x0001fbb7,
0x0005fbb7, 0x0009fbb7, 0x000dfbb7, 0x0011fbb7, 0x0015fbb7, 0x0019fbb7,
0x001dfbb7, 0x0001c3c7, 0x0005c3c7, 0x0009c3c7, 0x000dc3c7, 0x0011c3c7,
0x0015c3c7, 0x0019c3c7, 0x001dc3c7, 0x0001cbc7, 0x0005cbc7, 0x0009cbc7,
0x000dcbc7, 0x0011cbc7, 0x0015cbc7, 0x0019cbc7, 0x001dcbc7, 0x0001d3c7,
0x0005d3c7, 0x0009d3c7, 0x000dd3c7, 0x0011d3c7, 0x0015d3c7, 0x0019d3c7,
0x001dd3c7, 0x0001dbc7, 0x0005dbc7, 0x0009dbc7, 0x000ddbc7, 0x0011dbc7,
0x0015dbc7, 0x0019dbc7, 0x001ddbc7, 0x0001e3c7, 0x0005e3c7, 0x0009e3c7,
0x000de3c7, 0x0011e3c7, 0x0015e3c7, 0x0019e3c7, 0x001de3c7, 0x0001ebc7,
0x0005ebc7, 0x0009ebc7, 0x000debc7, 0x0011ebc7, 0x0015ebc7, 0x0019ebc7,
0x001debc7, 0x0001f3c7, 0x0005f3c7, 0x0009f3c7, 0x000df3c7, 0x0011f3c7,
0x0015f3c7, 0x0019f3c7, 0x001df3c7, 0x0001fbc7, 0x0005fbc7, 0x0009fbc7,
0x000dfbc7, 0x0011fbc7, 0x0015fbc7, 0x0019fbc7, 0x001dfbc7, 0x0001c3d7,
0x0005c3d7, 0x0009c3d7, 0x000dc3d7, 0x0011c3d7, 0x0015c3d7, 0x0019c3d7,
0x001dc3d7, 0x0001cbd7, 0x0005cbd7, 0x0009cbd7, 0x000dcbd7, 0x0011cbd7,
0x0015cbd7, 0x0019cbd7, 0x001dcbd7, 0x0001d3d7, 0x0005d3d7, 0x0009d3d7,
0x000dd3d7, 0x0011d3d7, 0x0015d3d7, 0x0019d3d7, 0x001dd3d7, 0x0001dbd7,
0x0005dbd7, 0x0009dbd7, 0x000ddbd7, 0x0011dbd7, 0x0015dbd7, 0x0019dbd7,
0x001ddbd7, 0x0001e3d7, 0x0005e3d7, 0x0009e3d7, 0x000de3d7, 0x0011e3d7,
0x0015e3d7, 0x0019e3d7, 0x001de3d7, 0x0001ebd7, 0x0005ebd7, 0x0009ebd7,
0x000debd7, 0x0011ebd7, 0x0015ebd7, 0x0019ebd7, 0x001debd7, 0x0001f3d7,
0x0005f3d7, 0x0009f3d7, 0x000df3d7, 0x0011f3d7, 0x0015f3d7, 0x0019f3d7,
0x001df3d7, 0x0001fbd7, 0x0005fbd7, 0x0009fbd7, 0x000dfbd7, 0x0011fbd7,
0x0015fbd7, 0x0019fbd7, 0x001dfbd7, 0x0001c3e7, 0x0005c3e7, 0x0009c3e7,
0x000dc3e7, 0x0011c3e7, 0x0015c3e7, 0x0019c3e7, 0x001dc3e7, 0x0001cbe7,
0x0005cbe7, 0x0009cbe7, 0x000dcbe7, 0x0011cbe7, 0x0015cbe7, 0x0019cbe7,
0x001dcbe7, 0x0001d3e7, 0x0005d3e7, 0x0009d3e7, 0x000dd3e7, 0x0011d3e7,
0x0015d3e7, 0x0019d3e7, 0x001dd3e7, 0x0001dbe7, 0x0005dbe7, 0x0009dbe7,
0x000ddbe7, 0x0011dbe7, 0x0015dbe7, 0x0019dbe7, 0x001ddbe7, 0x0001e3e7,
0x0005e3e7, 0x0009e3e7, 0x000de3e7, 0x0011e3e7, 0x0015e3e7, 0x0019e3e7,
0x001de3e7, 0x0001ebe7, 0x0005ebe7, 0x0009ebe7, 0x000debe7, 0x0011ebe7,
0x0015ebe7, 0x0019ebe7, 0x001debe7, 0x0001f3e7, 0x0005f3e7, 0x0009f3e7,
0x000df3e7, 0x0011f3e7, 0x0015f3e7, 0x0019f3e7, 0x001df3e7, 0x0001fbe7,
0x0005fbe7, 0x0009fbe7, 0x000dfbe7, 0x0011fbe7, 0x0015fbe7, 0x0019fbe7,
0x001dfbe7, 0x0001c3f7, 0x0005c3f7, 0x0009c3f7, 0x000dc3f7, 0x0011c3f7,
0x0015c3f7, 0x0019c3f7, 0x001dc3f7, 0x0001cbf7, 0x0005cbf7, 0x0009cbf7,
0x000dcbf7, 0x0011cbf7, 0x0015cbf7, 0x0019cbf7, 0x001dcbf7, 0x0001d3f7,
0x0005d3f7, 0x0009d3f7, 0x000dd3f7, 0x0011d3f7, 0x0015d3f7, 0x0019d3f7,
0x001dd3f7, 0x0001dbf7, 0x0005dbf7, 0x0009dbf7, 0x000ddbf7, 0x0011dbf7,
0x0015dbf7, 0x0019dbf7, 0x001ddbf7, 0x0001e3f7, 0x0005e3f7, 0x0009e3f7,
0x000de3f7, 0x0011e3f7, 0x0015e3f7, 0x0019e3f7, 0x001de3f7, 0x0001ebf7,
0x0005ebf7, 0x0009ebf7, 0x000debf7, 0x0011ebf7, 0x0015ebf7, 0x0019ebf7,
0x001debf7, 0x0001f3f7, 0x0005f3f7, 0x0009f3f7, 0x000df3f7, 0x0011f3f7,
0x0015f3f7, 0x0019f3f7, 0x001df3f7, 0x0001fbf7, 0x0005fbf7, 0x0009fbf7,
0x000dfbf7, 0x0011fbf7, 0x0015fbf7, 0x0019fbf7, 0x001dfbf7, 0x00e1c387,
0x02e1c387, 0x04e1c387, 0x06e1c387, 0x08e1c387, 0x0ae1c387, 0x0ce1c387,
0x0ee1c387, 0x00e5c387, 0x02e5c387, 0x04e5c387, 0x06e5c387, 0x08e5c387,
0x0ae5c387, 0x0ce5c387, 0x0ee5c387, 0x00e9c387, 0x02e9c387, 0x04e9c387,
0x06e9c387, 0x08e9c387, 0x0ae9c387, 0x0ce9c387, 0x0ee9c387, 0x00edc387,
0x02edc387, 0x04edc387, 0x06edc387, 0x08edc387, 0x0aedc387, 0x0cedc387,
0x0eedc387, 0x00f1c387, 0x02f1c387, 0x04f1c387, 0x06f1c387, 0x08f1c387,
0x0af1c387, 0x0cf1c387, 0x0ef1c387, 0x00f5c387, 0x02f5c387, 0x04f5c387,
0x06f5c387, 0x08f5c387, 0x0af5c387, 0x0cf5c387, 0x0ef5c387, 0x00f9c387,
0x02f9c387, 0x04f9c387, 0x06f9c387, 0x08f9c387, 0x0af9c387, 0x0cf9c387,
0x0ef9c387, 0x00fdc387, 0x02fdc387, 0x04fdc387, 0x06fdc387, 0x08fdc387,
0x0afdc387, 0x0cfdc387, 0x0efdc387, 0x00e1cb87, 0x02e1cb87, 0x04e1cb87,
0x06e1cb87, 0x08e1cb87, 0x0ae1cb87, 0x0ce1cb87, 0x0ee1cb87, 0x00e5cb87,
0x02e5cb87, 0x04e5cb87, 0x06e5cb87, 0x08e5cb87, 0x0ae5cb87, 0x0ce5cb87,
0x0ee5cb87, 0x00e9cb87, 0x02e9cb87, 0x04e9cb87, 0x06e9cb87, 0x08e9cb87,
0x0ae9cb87, 0x0ce9cb87, 0x0ee9cb87, 0x00edcb87, 0x02edcb87, 0x04edcb87,
0x06edcb87, 0x08edcb87, 0x0aedcb87, 0x0cedcb87, 0x0eedcb87, 0x00f1cb87,
0x02f1cb87, 0x04f1cb87, 0x06f1cb87, 0x08f1cb87, 0x0af1cb87, 0x0cf1cb87,
0x0ef1cb87, 0x00f5cb87, 0x02f5cb87, 0x04f5cb87, 0x06f5cb87, 0x08f5cb87,
0x0af5cb87, 0x0cf5cb87, 0x0ef5cb87, 0x00f9cb87, 0x02f9cb87, 0x04f9cb87,
0x06f9cb87, 0x08f9cb87,
};
static const uint32_t kZeroRepsDepth[BROTLI_NUM_COMMAND_SYMBOLS] = {
0, 4, 8, 7, 7, 7, 7, 7, 7, 7, 7, 11, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21,
21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
};
static const uint64_t kNonZeroRepsBits[BROTLI_NUM_COMMAND_SYMBOLS] = {
0x0000000b, 0x0000001b, 0x0000002b, 0x0000003b, 0x000002cb, 0x000006cb,
0x00000acb, 0x00000ecb, 0x000002db, 0x000006db, 0x00000adb, 0x00000edb,
0x000002eb, 0x000006eb, 0x00000aeb, 0x00000eeb, 0x000002fb, 0x000006fb,
0x00000afb, 0x00000efb, 0x0000b2cb, 0x0001b2cb, 0x0002b2cb, 0x0003b2cb,
0x0000b6cb, 0x0001b6cb, 0x0002b6cb, 0x0003b6cb, 0x0000bacb, 0x0001bacb,
0x0002bacb, 0x0003bacb, 0x0000becb, 0x0001becb, 0x0002becb, 0x0003becb,
0x0000b2db, 0x0001b2db, 0x0002b2db, 0x0003b2db, 0x0000b6db, 0x0001b6db,
0x0002b6db, 0x0003b6db, 0x0000badb, 0x0001badb, 0x0002badb, 0x0003badb,
0x0000bedb, 0x0001bedb, 0x0002bedb, 0x0003bedb, 0x0000b2eb, 0x0001b2eb,
0x0002b2eb, 0x0003b2eb, 0x0000b6eb, 0x0001b6eb, 0x0002b6eb, 0x0003b6eb,
0x0000baeb, 0x0001baeb, 0x0002baeb, 0x0003baeb, 0x0000beeb, 0x0001beeb,
0x0002beeb, 0x0003beeb, 0x0000b2fb, 0x0001b2fb, 0x0002b2fb, 0x0003b2fb,
0x0000b6fb, 0x0001b6fb, 0x0002b6fb, 0x0003b6fb, 0x0000bafb, 0x0001bafb,
0x0002bafb, 0x0003bafb, 0x0000befb, 0x0001befb, 0x0002befb, 0x0003befb,
0x002cb2cb, 0x006cb2cb, 0x00acb2cb, 0x00ecb2cb, 0x002db2cb, 0x006db2cb,
0x00adb2cb, 0x00edb2cb, 0x002eb2cb, 0x006eb2cb, 0x00aeb2cb, 0x00eeb2cb,
0x002fb2cb, 0x006fb2cb, 0x00afb2cb, 0x00efb2cb, 0x002cb6cb, 0x006cb6cb,
0x00acb6cb, 0x00ecb6cb, 0x002db6cb, 0x006db6cb, 0x00adb6cb, 0x00edb6cb,
0x002eb6cb, 0x006eb6cb, 0x00aeb6cb, 0x00eeb6cb, 0x002fb6cb, 0x006fb6cb,
0x00afb6cb, 0x00efb6cb, 0x002cbacb, 0x006cbacb, 0x00acbacb, 0x00ecbacb,
0x002dbacb, 0x006dbacb, 0x00adbacb, 0x00edbacb, 0x002ebacb, 0x006ebacb,
0x00aebacb, 0x00eebacb, 0x002fbacb, 0x006fbacb, 0x00afbacb, 0x00efbacb,
0x002cbecb, 0x006cbecb, 0x00acbecb, 0x00ecbecb, 0x002dbecb, 0x006dbecb,
0x00adbecb, 0x00edbecb, 0x002ebecb, 0x006ebecb, 0x00aebecb, 0x00eebecb,
0x002fbecb, 0x006fbecb, 0x00afbecb, 0x00efbecb, 0x002cb2db, 0x006cb2db,
0x00acb2db, 0x00ecb2db, 0x002db2db, 0x006db2db, 0x00adb2db, 0x00edb2db,
0x002eb2db, 0x006eb2db, 0x00aeb2db, 0x00eeb2db, 0x002fb2db, 0x006fb2db,
0x00afb2db, 0x00efb2db, 0x002cb6db, 0x006cb6db, 0x00acb6db, 0x00ecb6db,
0x002db6db, 0x006db6db, 0x00adb6db, 0x00edb6db, 0x002eb6db, 0x006eb6db,
0x00aeb6db, 0x00eeb6db, 0x002fb6db, 0x006fb6db, 0x00afb6db, 0x00efb6db,
0x002cbadb, 0x006cbadb, 0x00acbadb, 0x00ecbadb, 0x002dbadb, 0x006dbadb,
0x00adbadb, 0x00edbadb, 0x002ebadb, 0x006ebadb, 0x00aebadb, 0x00eebadb,
0x002fbadb, 0x006fbadb, 0x00afbadb, 0x00efbadb, 0x002cbedb, 0x006cbedb,
0x00acbedb, 0x00ecbedb, 0x002dbedb, 0x006dbedb, 0x00adbedb, 0x00edbedb,
0x002ebedb, 0x006ebedb, 0x00aebedb, 0x00eebedb, 0x002fbedb, 0x006fbedb,
0x00afbedb, 0x00efbedb, 0x002cb2eb, 0x006cb2eb, 0x00acb2eb, 0x00ecb2eb,
0x002db2eb, 0x006db2eb, 0x00adb2eb, 0x00edb2eb, 0x002eb2eb, 0x006eb2eb,
0x00aeb2eb, 0x00eeb2eb, 0x002fb2eb, 0x006fb2eb, 0x00afb2eb, 0x00efb2eb,
0x002cb6eb, 0x006cb6eb, 0x00acb6eb, 0x00ecb6eb, 0x002db6eb, 0x006db6eb,
0x00adb6eb, 0x00edb6eb, 0x002eb6eb, 0x006eb6eb, 0x00aeb6eb, 0x00eeb6eb,
0x002fb6eb, 0x006fb6eb, 0x00afb6eb, 0x00efb6eb, 0x002cbaeb, 0x006cbaeb,
0x00acbaeb, 0x00ecbaeb, 0x002dbaeb, 0x006dbaeb, 0x00adbaeb, 0x00edbaeb,
0x002ebaeb, 0x006ebaeb, 0x00aebaeb, 0x00eebaeb, 0x002fbaeb, 0x006fbaeb,
0x00afbaeb, 0x00efbaeb, 0x002cbeeb, 0x006cbeeb, 0x00acbeeb, 0x00ecbeeb,
0x002dbeeb, 0x006dbeeb, 0x00adbeeb, 0x00edbeeb, 0x002ebeeb, 0x006ebeeb,
0x00aebeeb, 0x00eebeeb, 0x002fbeeb, 0x006fbeeb, 0x00afbeeb, 0x00efbeeb,
0x002cb2fb, 0x006cb2fb, 0x00acb2fb, 0x00ecb2fb, 0x002db2fb, 0x006db2fb,
0x00adb2fb, 0x00edb2fb, 0x002eb2fb, 0x006eb2fb, 0x00aeb2fb, 0x00eeb2fb,
0x002fb2fb, 0x006fb2fb, 0x00afb2fb, 0x00efb2fb, 0x002cb6fb, 0x006cb6fb,
0x00acb6fb, 0x00ecb6fb, 0x002db6fb, 0x006db6fb, 0x00adb6fb, 0x00edb6fb,
0x002eb6fb, 0x006eb6fb, 0x00aeb6fb, 0x00eeb6fb, 0x002fb6fb, 0x006fb6fb,
0x00afb6fb, 0x00efb6fb, 0x002cbafb, 0x006cbafb, 0x00acbafb, 0x00ecbafb,
0x002dbafb, 0x006dbafb, 0x00adbafb, 0x00edbafb, 0x002ebafb, 0x006ebafb,
0x00aebafb, 0x00eebafb, 0x002fbafb, 0x006fbafb, 0x00afbafb, 0x00efbafb,
0x002cbefb, 0x006cbefb, 0x00acbefb, 0x00ecbefb, 0x002dbefb, 0x006dbefb,
0x00adbefb, 0x00edbefb, 0x002ebefb, 0x006ebefb, 0x00aebefb, 0x00eebefb,
0x002fbefb, 0x006fbefb, 0x00afbefb, 0x00efbefb, 0x0b2cb2cb, 0x1b2cb2cb,
0x2b2cb2cb, 0x3b2cb2cb, 0x0b6cb2cb, 0x1b6cb2cb, 0x2b6cb2cb, 0x3b6cb2cb,
0x0bacb2cb, 0x1bacb2cb, 0x2bacb2cb, 0x3bacb2cb, 0x0becb2cb, 0x1becb2cb,
0x2becb2cb, 0x3becb2cb, 0x0b2db2cb, 0x1b2db2cb, 0x2b2db2cb, 0x3b2db2cb,
0x0b6db2cb, 0x1b6db2cb, 0x2b6db2cb, 0x3b6db2cb, 0x0badb2cb, 0x1badb2cb,
0x2badb2cb, 0x3badb2cb, 0x0bedb2cb, 0x1bedb2cb, 0x2bedb2cb, 0x3bedb2cb,
0x0b2eb2cb, 0x1b2eb2cb, 0x2b2eb2cb, 0x3b2eb2cb, 0x0b6eb2cb, 0x1b6eb2cb,
0x2b6eb2cb, 0x3b6eb2cb, 0x0baeb2cb, 0x1baeb2cb, 0x2baeb2cb, 0x3baeb2cb,
0x0beeb2cb, 0x1beeb2cb, 0x2beeb2cb, 0x3beeb2cb, 0x0b2fb2cb, 0x1b2fb2cb,
0x2b2fb2cb, 0x3b2fb2cb, 0x0b6fb2cb, 0x1b6fb2cb, 0x2b6fb2cb, 0x3b6fb2cb,
0x0bafb2cb, 0x1bafb2cb, 0x2bafb2cb, 0x3bafb2cb, 0x0befb2cb, 0x1befb2cb,
0x2befb2cb, 0x3befb2cb, 0x0b2cb6cb, 0x1b2cb6cb, 0x2b2cb6cb, 0x3b2cb6cb,
0x0b6cb6cb, 0x1b6cb6cb, 0x2b6cb6cb, 0x3b6cb6cb, 0x0bacb6cb, 0x1bacb6cb,
0x2bacb6cb, 0x3bacb6cb, 0x0becb6cb, 0x1becb6cb, 0x2becb6cb, 0x3becb6cb,
0x0b2db6cb, 0x1b2db6cb, 0x2b2db6cb, 0x3b2db6cb, 0x0b6db6cb, 0x1b6db6cb,
0x2b6db6cb, 0x3b6db6cb, 0x0badb6cb, 0x1badb6cb, 0x2badb6cb, 0x3badb6cb,
0x0bedb6cb, 0x1bedb6cb, 0x2bedb6cb, 0x3bedb6cb, 0x0b2eb6cb, 0x1b2eb6cb,
0x2b2eb6cb, 0x3b2eb6cb, 0x0b6eb6cb, 0x1b6eb6cb, 0x2b6eb6cb, 0x3b6eb6cb,
0x0baeb6cb, 0x1baeb6cb, 0x2baeb6cb, 0x3baeb6cb, 0x0beeb6cb, 0x1beeb6cb,
0x2beeb6cb, 0x3beeb6cb, 0x0b2fb6cb, 0x1b2fb6cb, 0x2b2fb6cb, 0x3b2fb6cb,
0x0b6fb6cb, 0x1b6fb6cb, 0x2b6fb6cb, 0x3b6fb6cb, 0x0bafb6cb, 0x1bafb6cb,
0x2bafb6cb, 0x3bafb6cb, 0x0befb6cb, 0x1befb6cb, 0x2befb6cb, 0x3befb6cb,
0x0b2cbacb, 0x1b2cbacb, 0x2b2cbacb, 0x3b2cbacb, 0x0b6cbacb, 0x1b6cbacb,
0x2b6cbacb, 0x3b6cbacb, 0x0bacbacb, 0x1bacbacb, 0x2bacbacb, 0x3bacbacb,
0x0becbacb, 0x1becbacb, 0x2becbacb, 0x3becbacb, 0x0b2dbacb, 0x1b2dbacb,
0x2b2dbacb, 0x3b2dbacb, 0x0b6dbacb, 0x1b6dbacb, 0x2b6dbacb, 0x3b6dbacb,
0x0badbacb, 0x1badbacb, 0x2badbacb, 0x3badbacb, 0x0bedbacb, 0x1bedbacb,
0x2bedbacb, 0x3bedbacb, 0x0b2ebacb, 0x1b2ebacb, 0x2b2ebacb, 0x3b2ebacb,
0x0b6ebacb, 0x1b6ebacb, 0x2b6ebacb, 0x3b6ebacb, 0x0baebacb, 0x1baebacb,
0x2baebacb, 0x3baebacb, 0x0beebacb, 0x1beebacb, 0x2beebacb, 0x3beebacb,
0x0b2fbacb, 0x1b2fbacb, 0x2b2fbacb, 0x3b2fbacb, 0x0b6fbacb, 0x1b6fbacb,
0x2b6fbacb, 0x3b6fbacb, 0x0bafbacb, 0x1bafbacb, 0x2bafbacb, 0x3bafbacb,
0x0befbacb, 0x1befbacb, 0x2befbacb, 0x3befbacb, 0x0b2cbecb, 0x1b2cbecb,
0x2b2cbecb, 0x3b2cbecb, 0x0b6cbecb, 0x1b6cbecb, 0x2b6cbecb, 0x3b6cbecb,
0x0bacbecb, 0x1bacbecb, 0x2bacbecb, 0x3bacbecb, 0x0becbecb, 0x1becbecb,
0x2becbecb, 0x3becbecb, 0x0b2dbecb, 0x1b2dbecb, 0x2b2dbecb, 0x3b2dbecb,
0x0b6dbecb, 0x1b6dbecb, 0x2b6dbecb, 0x3b6dbecb, 0x0badbecb, 0x1badbecb,
0x2badbecb, 0x3badbecb, 0x0bedbecb, 0x1bedbecb, 0x2bedbecb, 0x3bedbecb,
0x0b2ebecb, 0x1b2ebecb, 0x2b2ebecb, 0x3b2ebecb, 0x0b6ebecb, 0x1b6ebecb,
0x2b6ebecb, 0x3b6ebecb, 0x0baebecb, 0x1baebecb, 0x2baebecb, 0x3baebecb,
0x0beebecb, 0x1beebecb, 0x2beebecb, 0x3beebecb, 0x0b2fbecb, 0x1b2fbecb,
0x2b2fbecb, 0x3b2fbecb, 0x0b6fbecb, 0x1b6fbecb, 0x2b6fbecb, 0x3b6fbecb,
0x0bafbecb, 0x1bafbecb, 0x2bafbecb, 0x3bafbecb, 0x0befbecb, 0x1befbecb,
0x2befbecb, 0x3befbecb, 0x0b2cb2db, 0x1b2cb2db, 0x2b2cb2db, 0x3b2cb2db,
0x0b6cb2db, 0x1b6cb2db, 0x2b6cb2db, 0x3b6cb2db, 0x0bacb2db, 0x1bacb2db,
0x2bacb2db, 0x3bacb2db, 0x0becb2db, 0x1becb2db, 0x2becb2db, 0x3becb2db,
0x0b2db2db, 0x1b2db2db, 0x2b2db2db, 0x3b2db2db, 0x0b6db2db, 0x1b6db2db,
0x2b6db2db, 0x3b6db2db, 0x0badb2db, 0x1badb2db, 0x2badb2db, 0x3badb2db,
0x0bedb2db, 0x1bedb2db, 0x2bedb2db, 0x3bedb2db, 0x0b2eb2db, 0x1b2eb2db,
0x2b2eb2db, 0x3b2eb2db, 0x0b6eb2db, 0x1b6eb2db, 0x2b6eb2db, 0x3b6eb2db,
0x0baeb2db, 0x1baeb2db, 0x2baeb2db, 0x3baeb2db, 0x0beeb2db, 0x1beeb2db,
0x2beeb2db, 0x3beeb2db, 0x0b2fb2db, 0x1b2fb2db, 0x2b2fb2db, 0x3b2fb2db,
0x0b6fb2db, 0x1b6fb2db, 0x2b6fb2db, 0x3b6fb2db, 0x0bafb2db, 0x1bafb2db,
0x2bafb2db, 0x3bafb2db, 0x0befb2db, 0x1befb2db, 0x2befb2db, 0x3befb2db,
0x0b2cb6db, 0x1b2cb6db, 0x2b2cb6db, 0x3b2cb6db, 0x0b6cb6db, 0x1b6cb6db,
0x2b6cb6db, 0x3b6cb6db, 0x0bacb6db, 0x1bacb6db, 0x2bacb6db, 0x3bacb6db,
0x0becb6db, 0x1becb6db, 0x2becb6db, 0x3becb6db, 0x0b2db6db, 0x1b2db6db,
0x2b2db6db, 0x3b2db6db, 0x0b6db6db, 0x1b6db6db, 0x2b6db6db, 0x3b6db6db,
0x0badb6db, 0x1badb6db, 0x2badb6db, 0x3badb6db, 0x0bedb6db, 0x1bedb6db,
0x2bedb6db, 0x3bedb6db, 0x0b2eb6db, 0x1b2eb6db, 0x2b2eb6db, 0x3b2eb6db,
0x0b6eb6db, 0x1b6eb6db, 0x2b6eb6db, 0x3b6eb6db, 0x0baeb6db, 0x1baeb6db,
0x2baeb6db, 0x3baeb6db,
};
static const uint32_t kNonZeroRepsDepth[BROTLI_NUM_COMMAND_SYMBOLS] = {
6, 6, 6, 6, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30,
};
static const uint16_t kStaticCommandCodeBits[BROTLI_NUM_COMMAND_SYMBOLS] = {
0, 256, 128, 384, 64, 320, 192, 448,
32, 288, 160, 416, 96, 352, 224, 480,
16, 272, 144, 400, 80, 336, 208, 464,
48, 304, 176, 432, 112, 368, 240, 496,
8, 264, 136, 392, 72, 328, 200, 456,
40, 296, 168, 424, 104, 360, 232, 488,
24, 280, 152, 408, 88, 344, 216, 472,
56, 312, 184, 440, 120, 376, 248, 504,
4, 260, 132, 388, 68, 324, 196, 452,
36, 292, 164, 420, 100, 356, 228, 484,
20, 276, 148, 404, 84, 340, 212, 468,
52, 308, 180, 436, 116, 372, 244, 500,
12, 268, 140, 396, 76, 332, 204, 460,
44, 300, 172, 428, 108, 364, 236, 492,
28, 284, 156, 412, 92, 348, 220, 476,
60, 316, 188, 444, 124, 380, 252, 508,
2, 258, 130, 386, 66, 322, 194, 450,
34, 290, 162, 418, 98, 354, 226, 482,
18, 274, 146, 402, 82, 338, 210, 466,
50, 306, 178, 434, 114, 370, 242, 498,
10, 266, 138, 394, 74, 330, 202, 458,
42, 298, 170, 426, 106, 362, 234, 490,
26, 282, 154, 410, 90, 346, 218, 474,
58, 314, 186, 442, 122, 378, 250, 506,
6, 262, 134, 390, 70, 326, 198, 454,
38, 294, 166, 422, 102, 358, 230, 486,
22, 278, 150, 406, 86, 342, 214, 470,
54, 310, 182, 438, 118, 374, 246, 502,
14, 270, 142, 398, 78, 334, 206, 462,
46, 302, 174, 430, 110, 366, 238, 494,
30, 286, 158, 414, 94, 350, 222, 478,
62, 318, 190, 446, 126, 382, 254, 510,
1, 257, 129, 385, 65, 321, 193, 449,
33, 289, 161, 417, 97, 353, 225, 481,
17, 273, 145, 401, 81, 337, 209, 465,
49, 305, 177, 433, 113, 369, 241, 497,
9, 265, 137, 393, 73, 329, 201, 457,
41, 297, 169, 425, 105, 361, 233, 489,
25, 281, 153, 409, 89, 345, 217, 473,
57, 313, 185, 441, 121, 377, 249, 505,
5, 261, 133, 389, 69, 325, 197, 453,
37, 293, 165, 421, 101, 357, 229, 485,
21, 277, 149, 405, 85, 341, 213, 469,
53, 309, 181, 437, 117, 373, 245, 501,
13, 269, 141, 397, 77, 333, 205, 461,
45, 301, 173, 429, 109, 365, 237, 493,
29, 285, 157, 413, 93, 349, 221, 477,
61, 317, 189, 445, 125, 381, 253, 509,
3, 259, 131, 387, 67, 323, 195, 451,
35, 291, 163, 419, 99, 355, 227, 483,
19, 275, 147, 403, 83, 339, 211, 467,
51, 307, 179, 435, 115, 371, 243, 499,
11, 267, 139, 395, 75, 331, 203, 459,
43, 299, 171, 427, 107, 363, 235, 491,
27, 283, 155, 411, 91, 347, 219, 475,
59, 315, 187, 443, 123, 379, 251, 507,
7, 1031, 519, 1543, 263, 1287, 775, 1799,
135, 1159, 647, 1671, 391, 1415, 903, 1927,
71, 1095, 583, 1607, 327, 1351, 839, 1863,
199, 1223, 711, 1735, 455, 1479, 967, 1991,
39, 1063, 551, 1575, 295, 1319, 807, 1831,
167, 1191, 679, 1703, 423, 1447, 935, 1959,
103, 1127, 615, 1639, 359, 1383, 871, 1895,
231, 1255, 743, 1767, 487, 1511, 999, 2023,
23, 1047, 535, 1559, 279, 1303, 791, 1815,
151, 1175, 663, 1687, 407, 1431, 919, 1943,
87, 1111, 599, 1623, 343, 1367, 855, 1879,
215, 1239, 727, 1751, 471, 1495, 983, 2007,
55, 1079, 567, 1591, 311, 1335, 823, 1847,
183, 1207, 695, 1719, 439, 1463, 951, 1975,
119, 1143, 631, 1655, 375, 1399, 887, 1911,
247, 1271, 759, 1783, 503, 1527, 1015, 2039,
15, 1039, 527, 1551, 271, 1295, 783, 1807,
143, 1167, 655, 1679, 399, 1423, 911, 1935,
79, 1103, 591, 1615, 335, 1359, 847, 1871,
207, 1231, 719, 1743, 463, 1487, 975, 1999,
47, 1071, 559, 1583, 303, 1327, 815, 1839,
175, 1199, 687, 1711, 431, 1455, 943, 1967,
111, 1135, 623, 1647, 367, 1391, 879, 1903,
239, 1263, 751, 1775, 495, 1519, 1007, 2031,
31, 1055, 543, 1567, 287, 1311, 799, 1823,
159, 1183, 671, 1695, 415, 1439, 927, 1951,
95, 1119, 607, 1631, 351, 1375, 863, 1887,
223, 1247, 735, 1759, 479, 1503, 991, 2015,
63, 1087, 575, 1599, 319, 1343, 831, 1855,
191, 1215, 703, 1727, 447, 1471, 959, 1983,
127, 1151, 639, 1663, 383, 1407, 895, 1919,
255, 1279, 767, 1791, 511, 1535, 1023, 2047,
};
static BROTLI_INLINE void StoreStaticCommandHuffmanTree(
size_t* storage_ix, uint8_t* storage) {
BrotliWriteBits(
56, BROTLI_MAKE_UINT64_T(0x926244U, 0x16307003U), storage_ix, storage);
BrotliWriteBits(3, 0x00000000U, storage_ix, storage);
}
static const uint16_t kStaticDistanceCodeBits[64] = {
0, 32, 16, 48, 8, 40, 24, 56, 4, 36, 20, 52, 12, 44, 28, 60,
2, 34, 18, 50, 10, 42, 26, 58, 6, 38, 22, 54, 14, 46, 30, 62,
1, 33, 17, 49, 9, 41, 25, 57, 5, 37, 21, 53, 13, 45, 29, 61,
3, 35, 19, 51, 11, 43, 27, 59, 7, 39, 23, 55, 15, 47, 31, 63,
};
static BROTLI_INLINE void StoreStaticDistanceHuffmanTree(
size_t* storage_ix, uint8_t* storage) {
BrotliWriteBits(28, 0x0369dc03U, storage_ix, storage);
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_ENTROPY_ENCODE_STATIC_H_ */

View File

@ -1,145 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Utilities for fast computation of logarithms. */
#ifndef BROTLI_ENC_FAST_LOG_H_
#define BROTLI_ENC_FAST_LOG_H_
#include <math.h>
#include <brotli/types.h>
#include <brotli/port.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static BROTLI_INLINE uint32_t Log2FloorNonZero(size_t n) {
#if BROTLI_MODERN_COMPILER || __has_builtin(__builtin_clz)
return 31u ^ (uint32_t)__builtin_clz((uint32_t)n);
#else
uint32_t result = 0;
while (n >>= 1) result++;
return result;
#endif
}
/* A lookup table for small values of log2(int) to be used in entropy
computation.
", ".join(["%.16ff" % x for x in [0.0]+[log2(x) for x in range(1, 256)]]) */
static const float kLog2Table[] = {
0.0000000000000000f, 0.0000000000000000f, 1.0000000000000000f,
1.5849625007211563f, 2.0000000000000000f, 2.3219280948873622f,
2.5849625007211561f, 2.8073549220576042f, 3.0000000000000000f,
3.1699250014423126f, 3.3219280948873626f, 3.4594316186372978f,
3.5849625007211565f, 3.7004397181410922f, 3.8073549220576037f,
3.9068905956085187f, 4.0000000000000000f, 4.0874628412503400f,
4.1699250014423122f, 4.2479275134435852f, 4.3219280948873626f,
4.3923174227787607f, 4.4594316186372973f, 4.5235619560570131f,
4.5849625007211570f, 4.6438561897747244f, 4.7004397181410926f,
4.7548875021634691f, 4.8073549220576037f, 4.8579809951275728f,
4.9068905956085187f, 4.9541963103868758f, 5.0000000000000000f,
5.0443941193584534f, 5.0874628412503400f, 5.1292830169449664f,
5.1699250014423122f, 5.2094533656289501f, 5.2479275134435852f,
5.2854022188622487f, 5.3219280948873626f, 5.3575520046180838f,
5.3923174227787607f, 5.4262647547020979f, 5.4594316186372973f,
5.4918530963296748f, 5.5235619560570131f, 5.5545888516776376f,
5.5849625007211570f, 5.6147098441152083f, 5.6438561897747244f,
5.6724253419714961f, 5.7004397181410926f, 5.7279204545631996f,
5.7548875021634691f, 5.7813597135246599f, 5.8073549220576046f,
5.8328900141647422f, 5.8579809951275719f, 5.8826430493618416f,
5.9068905956085187f, 5.9307373375628867f, 5.9541963103868758f,
5.9772799234999168f, 6.0000000000000000f, 6.0223678130284544f,
6.0443941193584534f, 6.0660891904577721f, 6.0874628412503400f,
6.1085244567781700f, 6.1292830169449672f, 6.1497471195046822f,
6.1699250014423122f, 6.1898245588800176f, 6.2094533656289510f,
6.2288186904958804f, 6.2479275134435861f, 6.2667865406949019f,
6.2854022188622487f, 6.3037807481771031f, 6.3219280948873617f,
6.3398500028846252f, 6.3575520046180847f, 6.3750394313469254f,
6.3923174227787598f, 6.4093909361377026f, 6.4262647547020979f,
6.4429434958487288f, 6.4594316186372982f, 6.4757334309663976f,
6.4918530963296748f, 6.5077946401986964f, 6.5235619560570131f,
6.5391588111080319f, 6.5545888516776376f, 6.5698556083309478f,
6.5849625007211561f, 6.5999128421871278f, 6.6147098441152092f,
6.6293566200796095f, 6.6438561897747253f, 6.6582114827517955f,
6.6724253419714952f, 6.6865005271832185f, 6.7004397181410917f,
6.7142455176661224f, 6.7279204545631988f, 6.7414669864011465f,
6.7548875021634691f, 6.7681843247769260f, 6.7813597135246599f,
6.7944158663501062f, 6.8073549220576037f, 6.8201789624151887f,
6.8328900141647422f, 6.8454900509443757f, 6.8579809951275719f,
6.8703647195834048f, 6.8826430493618416f, 6.8948177633079437f,
6.9068905956085187f, 6.9188632372745955f, 6.9307373375628867f,
6.9425145053392399f, 6.9541963103868758f, 6.9657842846620879f,
6.9772799234999168f, 6.9886846867721664f, 7.0000000000000000f,
7.0112272554232540f, 7.0223678130284544f, 7.0334230015374501f,
7.0443941193584534f, 7.0552824355011898f, 7.0660891904577721f,
7.0768155970508317f, 7.0874628412503400f, 7.0980320829605272f,
7.1085244567781700f, 7.1189410727235076f, 7.1292830169449664f,
7.1395513523987937f, 7.1497471195046822f, 7.1598713367783891f,
7.1699250014423130f, 7.1799090900149345f, 7.1898245588800176f,
7.1996723448363644f, 7.2094533656289492f, 7.2191685204621621f,
7.2288186904958804f, 7.2384047393250794f, 7.2479275134435861f,
7.2573878426926521f, 7.2667865406949019f, 7.2761244052742384f,
7.2854022188622487f, 7.2946207488916270f, 7.3037807481771031f,
7.3128829552843557f, 7.3219280948873617f, 7.3309168781146177f,
7.3398500028846243f, 7.3487281542310781f, 7.3575520046180847f,
7.3663222142458151f, 7.3750394313469254f, 7.3837042924740528f,
7.3923174227787607f, 7.4008794362821844f, 7.4093909361377026f,
7.4178525148858991f, 7.4262647547020979f, 7.4346282276367255f,
7.4429434958487288f, 7.4512111118323299f, 7.4594316186372973f,
7.4676055500829976f, 7.4757334309663976f, 7.4838157772642564f,
7.4918530963296748f, 7.4998458870832057f, 7.5077946401986964f,
7.5156998382840436f, 7.5235619560570131f, 7.5313814605163119f,
7.5391588111080319f, 7.5468944598876373f, 7.5545888516776376f,
7.5622424242210728f, 7.5698556083309478f, 7.5774288280357487f,
7.5849625007211561f, 7.5924570372680806f, 7.5999128421871278f,
7.6073303137496113f, 7.6147098441152075f, 7.6220518194563764f,
7.6293566200796095f, 7.6366246205436488f, 7.6438561897747244f,
7.6510516911789290f, 7.6582114827517955f, 7.6653359171851765f,
7.6724253419714952f, 7.6794800995054464f, 7.6865005271832185f,
7.6934869574993252f, 7.7004397181410926f, 7.7073591320808825f,
7.7142455176661224f, 7.7210991887071856f, 7.7279204545631996f,
7.7347096202258392f, 7.7414669864011465f, 7.7481928495894596f,
7.7548875021634691f, 7.7615512324444795f, 7.7681843247769260f,
7.7747870596011737f, 7.7813597135246608f, 7.7879025593914317f,
7.7944158663501062f, 7.8008998999203047f, 7.8073549220576037f,
7.8137811912170374f, 7.8201789624151887f, 7.8265484872909159f,
7.8328900141647422f, 7.8392037880969445f, 7.8454900509443757f,
7.8517490414160571f, 7.8579809951275719f, 7.8641861446542798f,
7.8703647195834048f, 7.8765169465650002f, 7.8826430493618425f,
7.8887432488982601f, 7.8948177633079446f, 7.9008668079807496f,
7.9068905956085187f, 7.9128893362299619f, 7.9188632372745955f,
7.9248125036057813f, 7.9307373375628867f, 7.9366379390025719f,
7.9425145053392399f, 7.9483672315846778f, 7.9541963103868758f,
7.9600019320680806f, 7.9657842846620870f, 7.9715435539507720f,
7.9772799234999168f, 7.9829935746943104f, 7.9886846867721664f,
7.9943534368588578f
};
#define LOG_2_INV 1.4426950408889634
/* Faster logarithm for small integers, with the property of log2(0) == 0. */
static BROTLI_INLINE double FastLog2(size_t v) {
if (v < sizeof(kLog2Table) / sizeof(kLog2Table[0])) {
return kLog2Table[v];
}
#if (defined(_MSC_VER) && _MSC_VER <= 1700) || \
(defined(__ANDROID_API__) && __ANDROID_API__ < 18)
/* Visual Studio 2012 and Android API levels < 18 do not have the log2()
* function defined, so we use log() and a multiplication instead. */
return log((double)v) * LOG_2_INV;
#else
return log2((double)v);
#endif
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_FAST_LOG_H_ */

View File

@ -1,80 +0,0 @@
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Function to find maximal matching prefixes of strings. */
#ifndef BROTLI_ENC_FIND_MATCH_LENGTH_H_
#define BROTLI_ENC_FIND_MATCH_LENGTH_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Separate implementation for little-endian 64-bit targets, for speed. */
#if defined(__GNUC__) && defined(_LP64) && defined(IS_LITTLE_ENDIAN)
static BROTLI_INLINE size_t FindMatchLengthWithLimit(const uint8_t* s1,
const uint8_t* s2,
size_t limit) {
size_t matched = 0;
size_t limit2 = (limit >> 3) + 1; /* + 1 is for pre-decrement in while */
while (BROTLI_PREDICT_TRUE(--limit2)) {
if (BROTLI_PREDICT_FALSE(BROTLI_UNALIGNED_LOAD64(s2) ==
BROTLI_UNALIGNED_LOAD64(s1 + matched))) {
s2 += 8;
matched += 8;
} else {
uint64_t x =
BROTLI_UNALIGNED_LOAD64(s2) ^ BROTLI_UNALIGNED_LOAD64(s1 + matched);
size_t matching_bits = (size_t)__builtin_ctzll(x);
matched += matching_bits >> 3;
return matched;
}
}
limit = (limit & 7) + 1; /* + 1 is for pre-decrement in while */
while (--limit) {
if (BROTLI_PREDICT_TRUE(s1[matched] == *s2)) {
++s2;
++matched;
} else {
return matched;
}
}
return matched;
}
#else
static BROTLI_INLINE size_t FindMatchLengthWithLimit(const uint8_t* s1,
const uint8_t* s2,
size_t limit) {
size_t matched = 0;
const uint8_t* s2_limit = s2 + limit;
const uint8_t* s2_ptr = s2;
/* Find out how long the match is. We loop over the data 32 bits at a
time until we find a 32-bit block that doesn't match; then we find
the first non-matching bit and use that to calculate the total
length of the match. */
while (s2_ptr <= s2_limit - 4 &&
BROTLI_UNALIGNED_LOAD32(s2_ptr) ==
BROTLI_UNALIGNED_LOAD32(s1 + matched)) {
s2_ptr += 4;
matched += 4;
}
while ((s2_ptr < s2_limit) && (s1[matched] == *s2_ptr)) {
++s2_ptr;
++matched;
}
return matched;
}
#endif
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_FIND_MATCH_LENGTH_H_ */

View File

@ -1,467 +0,0 @@
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* A (forgetful) hash table to the data seen by the compressor, to
help create backward references to previous data. */
#ifndef BROTLI_ENC_HASH_H_
#define BROTLI_ENC_HASH_H_
#include <string.h> /* memcmp, memset */
#include "../common/constants.h"
#include "../common/dictionary.h"
#include <brotli/types.h>
#include "./fast_log.h"
#include "./find_match_length.h"
#include "./memory.h"
#include "./port.h"
#include "./quality.h"
#include "./static_dict.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Pointer to hasher data.
*
* Excluding initialization and destruction, hasher can be passed as
* HasherHandle by value.
*
* Typically hasher data consists of 3 sections:
* * HasherCommon structure
* * private structured hasher data, depending on hasher type
* * private dynamic hasher data, depending on hasher type and parameters
*/
typedef uint8_t* HasherHandle;
typedef struct {
BrotliHasherParams params;
/* False if hasher needs to be "prepared" before use. */
BROTLI_BOOL is_prepared_;
size_t dict_num_lookups;
size_t dict_num_matches;
} HasherCommon;
static BROTLI_INLINE HasherCommon* GetHasherCommon(HasherHandle handle) {
return (HasherCommon*)handle;
}
#define score_t size_t
static const uint32_t kCutoffTransformsCount = 10;
/* 0, 12, 27, 23, 42, 63, 56, 48, 59, 64 */
/* 0+0, 4+8, 8+19, 12+11, 16+26, 20+43, 24+32, 28+20, 32+27, 36+28 */
static const uint64_t kCutoffTransforms =
BROTLI_MAKE_UINT64_T(0x071B520A, 0xDA2D3200);
typedef struct HasherSearchResult {
size_t len;
size_t distance;
score_t score;
int len_code_delta; /* == len_code - len */
} HasherSearchResult;
/* kHashMul32 multiplier has these properties:
* The multiplier must be odd. Otherwise we may lose the highest bit.
* No long streaks of ones or zeros.
* There is no effort to ensure that it is a prime, the oddity is enough
for this use.
* The number has been tuned heuristically against compression benchmarks. */
static const uint32_t kHashMul32 = 0x1e35a7bd;
static const uint64_t kHashMul64 = BROTLI_MAKE_UINT64_T(0x1e35a7bd, 0x1e35a7bd);
static const uint64_t kHashMul64Long =
BROTLI_MAKE_UINT64_T(0x1fe35a7bU, 0xd3579bd3U);
static BROTLI_INLINE uint32_t Hash14(const uint8_t* data) {
uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kHashMul32;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return h >> (32 - 14);
}
static BROTLI_INLINE void PrepareDistanceCache(
int* BROTLI_RESTRICT distance_cache, const int num_distances) {
if (num_distances > 4) {
int last_distance = distance_cache[0];
distance_cache[4] = last_distance - 1;
distance_cache[5] = last_distance + 1;
distance_cache[6] = last_distance - 2;
distance_cache[7] = last_distance + 2;
distance_cache[8] = last_distance - 3;
distance_cache[9] = last_distance + 3;
if (num_distances > 10) {
int next_last_distance = distance_cache[1];
distance_cache[10] = next_last_distance - 1;
distance_cache[11] = next_last_distance + 1;
distance_cache[12] = next_last_distance - 2;
distance_cache[13] = next_last_distance + 2;
distance_cache[14] = next_last_distance - 3;
distance_cache[15] = next_last_distance + 3;
}
}
}
#define BROTLI_LITERAL_BYTE_SCORE 135
#define BROTLI_DISTANCE_BIT_PENALTY 30
/* Score must be positive after applying maximal penalty. */
#define BROTLI_SCORE_BASE (BROTLI_DISTANCE_BIT_PENALTY * 8 * sizeof(size_t))
/* Usually, we always choose the longest backward reference. This function
allows for the exception of that rule.
If we choose a backward reference that is further away, it will
usually be coded with more bits. We approximate this by assuming
log2(distance). If the distance can be expressed in terms of the
last four distances, we use some heuristic constants to estimate
the bits cost. For the first up to four literals we use the bit
cost of the literals from the literal cost model, after that we
use the average bit cost of the cost model.
This function is used to sometimes discard a longer backward reference
when it is not much longer and the bit cost for encoding it is more
than the saved literals.
backward_reference_offset MUST be positive. */
static BROTLI_INLINE score_t BackwardReferenceScore(
size_t copy_length, size_t backward_reference_offset) {
return BROTLI_SCORE_BASE + BROTLI_LITERAL_BYTE_SCORE * (score_t)copy_length -
BROTLI_DISTANCE_BIT_PENALTY * Log2FloorNonZero(backward_reference_offset);
}
static BROTLI_INLINE score_t BackwardReferenceScoreUsingLastDistance(
size_t copy_length) {
return BROTLI_LITERAL_BYTE_SCORE * (score_t)copy_length +
BROTLI_SCORE_BASE + 15;
}
static BROTLI_INLINE score_t BackwardReferencePenaltyUsingLastDistance(
size_t distance_short_code) {
return (score_t)39 + ((0x1CA10 >> (distance_short_code & 0xE)) & 0xE);
}
static BROTLI_INLINE BROTLI_BOOL TestStaticDictionaryItem(
const BrotliDictionary* dictionary, size_t item, const uint8_t* data,
size_t max_length, size_t max_backward, HasherSearchResult* out) {
size_t len;
size_t dist;
size_t offset;
size_t matchlen;
size_t backward;
score_t score;
len = item & 0x1F;
dist = item >> 5;
offset = dictionary->offsets_by_length[len] + len * dist;
if (len > max_length) {
return BROTLI_FALSE;
}
matchlen =
FindMatchLengthWithLimit(data, &dictionary->data[offset], len);
if (matchlen + kCutoffTransformsCount <= len || matchlen == 0) {
return BROTLI_FALSE;
}
{
size_t cut = len - matchlen;
size_t transform_id =
(cut << 2) + (size_t)((kCutoffTransforms >> (cut * 6)) & 0x3F);
backward = max_backward + dist + 1 +
(transform_id << dictionary->size_bits_by_length[len]);
}
score = BackwardReferenceScore(matchlen, backward);
if (score < out->score) {
return BROTLI_FALSE;
}
out->len = matchlen;
out->len_code_delta = (int)len - (int)matchlen;
out->distance = backward;
out->score = score;
return BROTLI_TRUE;
}
static BROTLI_INLINE void SearchInStaticDictionary(
const BrotliDictionary* dictionary, const uint16_t* dictionary_hash,
HasherHandle handle, const uint8_t* data, size_t max_length,
size_t max_backward, HasherSearchResult* out, BROTLI_BOOL shallow) {
size_t key;
size_t i;
HasherCommon* self = GetHasherCommon(handle);
if (self->dict_num_matches < (self->dict_num_lookups >> 7)) {
return;
}
key = Hash14(data) << 1;
for (i = 0; i < (shallow ? 1u : 2u); ++i, ++key) {
size_t item = dictionary_hash[key];
self->dict_num_lookups++;
if (item != 0) {
BROTLI_BOOL item_matches = TestStaticDictionaryItem(
dictionary, item, data, max_length, max_backward, out);
if (item_matches) {
self->dict_num_matches++;
}
}
}
}
typedef struct BackwardMatch {
uint32_t distance;
uint32_t length_and_code;
} BackwardMatch;
static BROTLI_INLINE void InitBackwardMatch(BackwardMatch* self,
size_t dist, size_t len) {
self->distance = (uint32_t)dist;
self->length_and_code = (uint32_t)(len << 5);
}
static BROTLI_INLINE void InitDictionaryBackwardMatch(BackwardMatch* self,
size_t dist, size_t len, size_t len_code) {
self->distance = (uint32_t)dist;
self->length_and_code =
(uint32_t)((len << 5) | (len == len_code ? 0 : len_code));
}
static BROTLI_INLINE size_t BackwardMatchLength(const BackwardMatch* self) {
return self->length_and_code >> 5;
}
static BROTLI_INLINE size_t BackwardMatchLengthCode(const BackwardMatch* self) {
size_t code = self->length_and_code & 31;
return code ? code : BackwardMatchLength(self);
}
#define EXPAND_CAT(a, b) CAT(a, b)
#define CAT(a, b) a ## b
#define FN(X) EXPAND_CAT(X, HASHER())
#define HASHER() H10
#define BUCKET_BITS 17
#define MAX_TREE_SEARCH_DEPTH 64
#define MAX_TREE_COMP_LENGTH 128
#include "./hash_to_binary_tree_inc.h" /* NOLINT(build/include) */
#undef MAX_TREE_SEARCH_DEPTH
#undef MAX_TREE_COMP_LENGTH
#undef BUCKET_BITS
#undef HASHER
/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */
#define MAX_NUM_MATCHES_H10 128
/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression
a little faster (0.5% - 1%) and it compresses 0.15% better on small text
and HTML inputs. */
#define HASHER() H2
#define BUCKET_BITS 16
#define BUCKET_SWEEP 1
#define HASH_LEN 5
#define USE_DICTIONARY 1
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
#undef BUCKET_SWEEP
#undef USE_DICTIONARY
#undef HASHER
#define HASHER() H3
#define BUCKET_SWEEP 2
#define USE_DICTIONARY 0
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
#undef USE_DICTIONARY
#undef BUCKET_SWEEP
#undef BUCKET_BITS
#undef HASHER
#define HASHER() H4
#define BUCKET_BITS 17
#define BUCKET_SWEEP 4
#define USE_DICTIONARY 1
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
#undef USE_DICTIONARY
#undef HASH_LEN
#undef BUCKET_SWEEP
#undef BUCKET_BITS
#undef HASHER
#define HASHER() H5
#include "./hash_longest_match_inc.h" /* NOLINT(build/include) */
#undef HASHER
#define HASHER() H6
#include "./hash_longest_match64_inc.h" /* NOLINT(build/include) */
#undef HASHER
#define BUCKET_BITS 15
#define NUM_LAST_DISTANCES_TO_CHECK 4
#define NUM_BANKS 1
#define BANK_BITS 16
#define HASHER() H40
#include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
#undef HASHER
#undef NUM_LAST_DISTANCES_TO_CHECK
#define NUM_LAST_DISTANCES_TO_CHECK 10
#define HASHER() H41
#include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
#undef HASHER
#undef NUM_LAST_DISTANCES_TO_CHECK
#undef NUM_BANKS
#undef BANK_BITS
#define NUM_LAST_DISTANCES_TO_CHECK 16
#define NUM_BANKS 512
#define BANK_BITS 9
#define HASHER() H42
#include "./hash_forgetful_chain_inc.h" /* NOLINT(build/include) */
#undef HASHER
#undef NUM_LAST_DISTANCES_TO_CHECK
#undef NUM_BANKS
#undef BANK_BITS
#undef BUCKET_BITS
#define HASHER() H54
#define BUCKET_BITS 20
#define BUCKET_SWEEP 4
#define HASH_LEN 7
#define USE_DICTIONARY 0
#include "./hash_longest_match_quickly_inc.h" /* NOLINT(build/include) */
#undef USE_DICTIONARY
#undef HASH_LEN
#undef BUCKET_SWEEP
#undef BUCKET_BITS
#undef HASHER
#undef FN
#undef CAT
#undef EXPAND_CAT
#define FOR_GENERIC_HASHERS(H) H(2) H(3) H(4) H(5) H(6) H(40) H(41) H(42) H(54)
#define FOR_ALL_HASHERS(H) FOR_GENERIC_HASHERS(H) H(10)
static BROTLI_INLINE void DestroyHasher(
MemoryManager* m, HasherHandle* handle) {
if (*handle == NULL) return;
BROTLI_FREE(m, *handle);
}
static BROTLI_INLINE void HasherReset(HasherHandle handle) {
if (handle == NULL) return;
GetHasherCommon(handle)->is_prepared_ = BROTLI_FALSE;
}
static BROTLI_INLINE size_t HasherSize(const BrotliEncoderParams* params,
BROTLI_BOOL one_shot, const size_t input_size) {
size_t result = sizeof(HasherCommon);
switch (params->hasher.type) {
#define SIZE_(N) \
case N: \
result += HashMemAllocInBytesH ## N(params, one_shot, input_size); \
break;
FOR_ALL_HASHERS(SIZE_)
#undef SIZE_
default:
break;
}
return result;
}
static BROTLI_INLINE void HasherSetup(MemoryManager* m, HasherHandle* handle,
BrotliEncoderParams* params, const uint8_t* data, size_t position,
size_t input_size, BROTLI_BOOL is_last) {
HasherHandle self = NULL;
HasherCommon* common = NULL;
BROTLI_BOOL one_shot = (position == 0 && is_last);
if (*handle == NULL) {
size_t alloc_size;
ChooseHasher(params, &params->hasher);
alloc_size = HasherSize(params, one_shot, input_size);
self = BROTLI_ALLOC(m, uint8_t, alloc_size);
if (BROTLI_IS_OOM(m)) return;
*handle = self;
common = GetHasherCommon(self);
common->params = params->hasher;
switch (common->params.type) {
#define INITIALIZE_(N) \
case N: \
InitializeH ## N(*handle, params); \
break;
FOR_ALL_HASHERS(INITIALIZE_);
#undef INITIALIZE_
default:
break;
}
HasherReset(*handle);
}
self = *handle;
common = GetHasherCommon(self);
if (!common->is_prepared_) {
switch (common->params.type) {
#define PREPARE_(N) \
case N: \
PrepareH ## N(self, one_shot, input_size, data); \
break;
FOR_ALL_HASHERS(PREPARE_)
#undef PREPARE_
default: break;
}
if (position == 0) {
common->dict_num_lookups = 0;
common->dict_num_matches = 0;
}
common->is_prepared_ = BROTLI_TRUE;
}
}
/* Custom LZ77 window. */
static BROTLI_INLINE void HasherPrependCustomDictionary(
MemoryManager* m, HasherHandle* handle, BrotliEncoderParams* params,
const size_t size, const uint8_t* dict) {
size_t overlap;
size_t i;
HasherHandle self;
HasherSetup(m, handle, params, dict, 0, size, BROTLI_FALSE);
if (BROTLI_IS_OOM(m)) return;
self = *handle;
switch (GetHasherCommon(self)->params.type) {
#define PREPEND_(N) \
case N: \
overlap = (StoreLookaheadH ## N()) - 1; \
for (i = 0; i + overlap < size; i++) { \
StoreH ## N(self, dict, ~(size_t)0, i); \
} \
break;
FOR_ALL_HASHERS(PREPEND_)
#undef PREPEND_
default: break;
}
}
static BROTLI_INLINE void InitOrStitchToPreviousBlock(
MemoryManager* m, HasherHandle* handle, const uint8_t* data, size_t mask,
BrotliEncoderParams* params, size_t position, size_t input_size,
BROTLI_BOOL is_last) {
HasherHandle self;
HasherSetup(m, handle, params, data, position, input_size, is_last);
if (BROTLI_IS_OOM(m)) return;
self = *handle;
switch (GetHasherCommon(self)->params.type) {
#define INIT_(N) \
case N: \
StitchToPreviousBlockH ## N(self, input_size, position, data, mask); \
break;
FOR_ALL_HASHERS(INIT_)
#undef INIT_
default: break;
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_HASH_H_ */

View File

@ -1,253 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, BUCKET_BITS, NUM_BANKS, BANK_BITS,
NUM_LAST_DISTANCES_TO_CHECK */
/* A (forgetful) hash table to the data seen by the compressor, to
help create backward references to previous data.
Hashes are stored in chains which are bucketed to groups. Group of chains
share a storage "bank". When more than "bank size" chain nodes are added,
oldest nodes are replaced; this way several chains may share a tail. */
#define HashForgetfulChain HASHER()
#define BANK_SIZE (1 << BANK_BITS)
/* Number of hash buckets. */
#define BUCKET_SIZE (1 << BUCKET_BITS)
#define CAPPED_CHAINS 0
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; }
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; }
/* HashBytes is the function that chooses the bucket to place the address in.*/
static BROTLI_INLINE size_t FN(HashBytes)(const uint8_t *data) {
const uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kHashMul32;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return h >> (32 - BUCKET_BITS);
}
typedef struct FN(Slot) {
uint16_t delta;
uint16_t next;
} FN(Slot);
typedef struct FN(Bank) {
FN(Slot) slots[BANK_SIZE];
} FN(Bank);
typedef struct HashForgetfulChain {
uint32_t addr[BUCKET_SIZE];
uint16_t head[BUCKET_SIZE];
/* Truncated hash used for quick rejection of "distance cache" candidates. */
uint8_t tiny_hash[65536];
FN(Bank) banks[NUM_BANKS];
uint16_t free_slot_idx[NUM_BANKS];
size_t max_hops;
} HashForgetfulChain;
static BROTLI_INLINE HashForgetfulChain* FN(Self)(HasherHandle handle) {
return (HashForgetfulChain*)&(GetHasherCommon(handle)[1]);
}
static void FN(Initialize)(
HasherHandle handle, const BrotliEncoderParams* params) {
FN(Self)(handle)->max_hops =
(params->quality > 6 ? 7u : 8u) << (params->quality - 4);
}
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* data) {
HashForgetfulChain* self = FN(Self)(handle);
/* Partial preparation is 100 times slower (per socket). */
size_t partial_prepare_threshold = BUCKET_SIZE >> 6;
if (one_shot && input_size <= partial_prepare_threshold) {
size_t i;
for (i = 0; i < input_size; ++i) {
size_t bucket = FN(HashBytes)(&data[i]);
/* See InitEmpty comment. */
self->addr[bucket] = 0xCCCCCCCC;
self->head[bucket] = 0xCCCC;
}
} else {
/* Fill |addr| array with 0xCCCCCCCC value. Because of wrapping, position
processed by hasher never reaches 3GB + 64M; this makes all new chains
to be terminated after the first node. */
memset(self->addr, 0xCC, sizeof(self->addr));
memset(self->head, 0, sizeof(self->head));
}
memset(self->tiny_hash, 0, sizeof(self->tiny_hash));
memset(self->free_slot_idx, 0, sizeof(self->free_slot_idx));
}
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size) {
BROTLI_UNUSED(params);
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
return sizeof(HashForgetfulChain);
}
/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
node to corresponding chain; also update tiny_hash for current position. */
static BROTLI_INLINE void FN(Store)(HasherHandle BROTLI_RESTRICT handle,
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
HashForgetfulChain* self = FN(Self)(handle);
const size_t key = FN(HashBytes)(&data[ix & mask]);
const size_t bank = key & (NUM_BANKS - 1);
const size_t idx = self->free_slot_idx[bank]++ & (BANK_SIZE - 1);
size_t delta = ix - self->addr[key];
self->tiny_hash[(uint16_t)ix] = (uint8_t)key;
if (delta > 0xFFFF) delta = CAPPED_CHAINS ? 0 : 0xFFFF;
self->banks[bank].slots[idx].delta = (uint16_t)delta;
self->banks[bank].slots[idx].next = self->head[key];
self->addr[key] = (uint32_t)ix;
self->head[key] = (uint16_t)idx;
}
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix_start,
const size_t ix_end) {
size_t i;
for (i = ix_start; i < ix_end; ++i) {
FN(Store)(handle, data, mask, i);
}
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
size_t ring_buffer_mask) {
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
/* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge
of both the previous and the current block. */
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 3);
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 2);
FN(Store)(handle, ringbuffer, ring_buffer_mask, position - 1);
}
}
static BROTLI_INLINE void FN(PrepareDistanceCache)(
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
BROTLI_UNUSED(handle);
PrepareDistanceCache(distance_cache, NUM_LAST_DISTANCES_TO_CHECK);
}
/* Find a longest backward match of &data[cur_ix] up to the length of
max_length and stores the position cur_ix in the hash table.
REQUIRES: FN(PrepareDistanceCache) must be invoked for current distance cache
values; if this method is invoked repeatedly with the same distance
cache values, it is enough to invoke FN(PrepareDistanceCache) once.
Does not look for matches longer than max_length.
Does not look for matches further away than max_backward.
Writes the best match into |out|.
|out|->score is updated only if a better match is found. */
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
const BrotliDictionary* dictionary, const uint16_t* dictionary_hash,
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
const int* BROTLI_RESTRICT distance_cache,
const size_t cur_ix, const size_t max_length, const size_t max_backward,
HasherSearchResult* BROTLI_RESTRICT out) {
HashForgetfulChain* self = FN(Self)(handle);
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
/* Don't accept a short copy from far away. */
score_t min_score = out->score;
score_t best_score = out->score;
size_t best_len = out->len;
size_t i;
const size_t key = FN(HashBytes)(&data[cur_ix_masked]);
const uint8_t tiny_hash = (uint8_t)(key);
out->len = 0;
out->len_code_delta = 0;
/* Try last distance first. */
for (i = 0; i < NUM_LAST_DISTANCES_TO_CHECK; ++i) {
const size_t backward = (size_t)distance_cache[i];
size_t prev_ix = (cur_ix - backward);
/* For distance code 0 we want to consider 2-byte matches. */
if (i > 0 && self->tiny_hash[(uint16_t)prev_ix] != tiny_hash) continue;
if (prev_ix >= cur_ix || backward > max_backward) {
continue;
}
prev_ix &= ring_buffer_mask;
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 2) {
score_t score = BackwardReferenceScoreUsingLastDistance(len);
if (best_score < score) {
if (i != 0) score -= BackwardReferencePenaltyUsingLastDistance(i);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
}
{
const size_t bank = key & (NUM_BANKS - 1);
size_t backward = 0;
size_t hops = self->max_hops;
size_t delta = cur_ix - self->addr[key];
size_t slot = self->head[key];
while (hops--) {
size_t prev_ix;
size_t last = slot;
backward += delta;
if (backward > max_backward || (CAPPED_CHAINS && !delta)) break;
prev_ix = (cur_ix - backward) & ring_buffer_mask;
slot = self->banks[bank].slots[last].next;
delta = self->banks[bank].slots[last].delta;
if (cur_ix_masked + best_len > ring_buffer_mask ||
prev_ix + best_len > ring_buffer_mask ||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
continue;
}
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
/* Comparing for >= 3 does not change the semantics, but just saves
for a few unnecessary binary logarithms in backward reference
score, since we are not interested in such short matches. */
score_t score = BackwardReferenceScore(len, backward);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
FN(Store)(handle, data, ring_buffer_mask, cur_ix);
}
if (out->score == min_score) {
SearchInStaticDictionary(dictionary, dictionary_hash,
handle, &data[cur_ix_masked], max_length, max_backward, out,
BROTLI_FALSE);
}
}
#undef BANK_SIZE
#undef BUCKET_SIZE
#undef CAPPED_CHAINS
#undef HashForgetfulChain

View File

@ -1,266 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
/* A (forgetful) hash table to the data seen by the compressor, to
help create backward references to previous data.
This is a hash map of fixed size (bucket_size_) to a ring buffer of
fixed size (block_size_). The ring buffer contains the last block_size_
index positions of the given hash key in the compressed data. */
#define HashLongestMatch HASHER()
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; }
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; }
/* HashBytes is the function that chooses the bucket to place the address in. */
static BROTLI_INLINE uint32_t FN(HashBytes)(const uint8_t *data,
const uint64_t mask,
const int shift) {
const uint64_t h = (BROTLI_UNALIGNED_LOAD64(data) & mask) * kHashMul64Long;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return (uint32_t)(h >> shift);
}
typedef struct HashLongestMatch {
/* Number of hash buckets. */
size_t bucket_size_;
/* Only block_size_ newest backward references are kept,
and the older are forgotten. */
size_t block_size_;
/* Left-shift for computing hash bucket index from hash value. */
int hash_shift_;
/* Mask for selecting the next 4-8 bytes of input */
uint64_t hash_mask_;
/* Mask for accessing entries in a block (in a ring-buffer manner). */
uint32_t block_mask_;
/* --- Dynamic size members --- */
/* Number of entries in a particular bucket. */
/* uint16_t num[bucket_size]; */
/* Buckets containing block_size_ of backward references. */
/* uint32_t* buckets[bucket_size * block_size]; */
} HashLongestMatch;
static BROTLI_INLINE HashLongestMatch* FN(Self)(HasherHandle handle) {
return (HashLongestMatch*)&(GetHasherCommon(handle)[1]);
}
static BROTLI_INLINE uint16_t* FN(Num)(HashLongestMatch* self) {
return (uint16_t*)(&self[1]);
}
static BROTLI_INLINE uint32_t* FN(Buckets)(HashLongestMatch* self) {
return (uint32_t*)(&FN(Num)(self)[self->bucket_size_]);
}
static void FN(Initialize)(
HasherHandle handle, const BrotliEncoderParams* params) {
HasherCommon* common = GetHasherCommon(handle);
HashLongestMatch* self = FN(Self)(handle);
BROTLI_UNUSED(params);
self->hash_shift_ = 64 - common->params.bucket_bits;
self->hash_mask_ = (~((uint64_t)0U)) >> (64 - 8 * common->params.hash_len);
self->bucket_size_ = (size_t)1 << common->params.bucket_bits;
self->block_size_ = (size_t)1 << common->params.block_bits;
self->block_mask_ = (uint32_t)(self->block_size_ - 1);
}
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* data) {
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
/* Partial preparation is 100 times slower (per socket). */
size_t partial_prepare_threshold = self->bucket_size_ >> 6;
if (one_shot && input_size <= partial_prepare_threshold) {
size_t i;
for (i = 0; i < input_size; ++i) {
const uint32_t key = FN(HashBytes)(&data[i], self->hash_mask_,
self->hash_shift_);
num[key] = 0;
}
} else {
memset(num, 0, self->bucket_size_ * sizeof(num[0]));
}
}
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size) {
size_t bucket_size = (size_t)1 << params->hasher.bucket_bits;
size_t block_size = (size_t)1 << params->hasher.block_bits;
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
return sizeof(HashLongestMatch) + bucket_size * (2 + 4 * block_size);
}
/* Look at 4 bytes at &data[ix & mask].
Compute a hash from these, and store the value of ix at that position. */
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t *data,
const size_t mask, const size_t ix) {
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
const uint32_t key = FN(HashBytes)(&data[ix & mask], self->hash_mask_,
self->hash_shift_);
const size_t minor_ix = num[key] & self->block_mask_;
const size_t offset =
minor_ix + (key << GetHasherCommon(handle)->params.block_bits);
FN(Buckets)(self)[offset] = (uint32_t)ix;
++num[key];
}
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix_start,
const size_t ix_end) {
size_t i;
for (i = ix_start; i < ix_end; ++i) {
FN(Store)(handle, data, mask, i);
}
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
size_t ringbuffer_mask) {
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
/* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge
of both the previous and the current block. */
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
}
}
static BROTLI_INLINE void FN(PrepareDistanceCache)(
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
PrepareDistanceCache(distance_cache,
GetHasherCommon(handle)->params.num_last_distances_to_check);
}
/* Find a longest backward match of &data[cur_ix] up to the length of
max_length and stores the position cur_ix in the hash table.
REQUIRES: FN(PrepareDistanceCache) must be invoked for current distance cache
values; if this method is invoked repeatedly with the same distance
cache values, it is enough to invoke FN(PrepareDistanceCache) once.
Does not look for matches longer than max_length.
Does not look for matches further away than max_backward.
Writes the best match into |out|.
|out|->score is updated only if a better match is found. */
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
const BrotliDictionary* dictionary, const uint16_t* dictionary_hash,
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
const size_t max_length, const size_t max_backward,
HasherSearchResult* BROTLI_RESTRICT out) {
HasherCommon* common = GetHasherCommon(handle);
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
uint32_t* buckets = FN(Buckets)(self);
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
/* Don't accept a short copy from far away. */
score_t min_score = out->score;
score_t best_score = out->score;
size_t best_len = out->len;
size_t i;
out->len = 0;
out->len_code_delta = 0;
/* Try last distance first. */
for (i = 0; i < (size_t)common->params.num_last_distances_to_check; ++i) {
const size_t backward = (size_t)distance_cache[i];
size_t prev_ix = (size_t)(cur_ix - backward);
if (prev_ix >= cur_ix) {
continue;
}
if (BROTLI_PREDICT_FALSE(backward > max_backward)) {
continue;
}
prev_ix &= ring_buffer_mask;
if (cur_ix_masked + best_len > ring_buffer_mask ||
prev_ix + best_len > ring_buffer_mask ||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
continue;
}
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 3 || (len == 2 && i < 2)) {
/* Comparing for >= 2 does not change the semantics, but just saves for
a few unnecessary binary logarithms in backward reference score,
since we are not interested in such short matches. */
score_t score = BackwardReferenceScoreUsingLastDistance(len);
if (best_score < score) {
if (i != 0) score -= BackwardReferencePenaltyUsingLastDistance(i);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
}
{
const uint32_t key = FN(HashBytes)(
&data[cur_ix_masked], self->hash_mask_, self->hash_shift_);
uint32_t* BROTLI_RESTRICT bucket =
&buckets[key << common->params.block_bits];
const size_t down =
(num[key] > self->block_size_) ?
(num[key] - self->block_size_) : 0u;
for (i = num[key]; i > down;) {
size_t prev_ix = bucket[--i & self->block_mask_];
const size_t backward = cur_ix - prev_ix;
if (BROTLI_PREDICT_FALSE(backward > max_backward)) {
break;
}
prev_ix &= ring_buffer_mask;
if (cur_ix_masked + best_len > ring_buffer_mask ||
prev_ix + best_len > ring_buffer_mask ||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
continue;
}
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
/* Comparing for >= 3 does not change the semantics, but just saves
for a few unnecessary binary logarithms in backward reference
score, since we are not interested in such short matches. */
score_t score = BackwardReferenceScore(len, backward);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
bucket[num[key] & self->block_mask_] = (uint32_t)cur_ix;
++num[key];
}
if (min_score == out->score) {
SearchInStaticDictionary(dictionary, dictionary_hash,
handle, &data[cur_ix_masked], max_length, max_backward, out,
BROTLI_FALSE);
}
}
#undef HashLongestMatch

View File

@ -1,258 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
/* A (forgetful) hash table to the data seen by the compressor, to
help create backward references to previous data.
This is a hash map of fixed size (bucket_size_) to a ring buffer of
fixed size (block_size_). The ring buffer contains the last block_size_
index positions of the given hash key in the compressed data. */
#define HashLongestMatch HASHER()
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; }
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; }
/* HashBytes is the function that chooses the bucket to place the address in. */
static uint32_t FN(HashBytes)(const uint8_t *data, const int shift) {
uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kHashMul32;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return (uint32_t)(h >> shift);
}
typedef struct HashLongestMatch {
/* Number of hash buckets. */
size_t bucket_size_;
/* Only block_size_ newest backward references are kept,
and the older are forgotten. */
size_t block_size_;
/* Left-shift for computing hash bucket index from hash value. */
int hash_shift_;
/* Mask for accessing entries in a block (in a ring-buffer manner). */
uint32_t block_mask_;
/* --- Dynamic size members --- */
/* Number of entries in a particular bucket. */
/* uint16_t num[bucket_size]; */
/* Buckets containing block_size_ of backward references. */
/* uint32_t* buckets[bucket_size * block_size]; */
} HashLongestMatch;
static BROTLI_INLINE HashLongestMatch* FN(Self)(HasherHandle handle) {
return (HashLongestMatch*)&(GetHasherCommon(handle)[1]);
}
static BROTLI_INLINE uint16_t* FN(Num)(HashLongestMatch* self) {
return (uint16_t*)(&self[1]);
}
static BROTLI_INLINE uint32_t* FN(Buckets)(HashLongestMatch* self) {
return (uint32_t*)(&FN(Num)(self)[self->bucket_size_]);
}
static void FN(Initialize)(
HasherHandle handle, const BrotliEncoderParams* params) {
HasherCommon* common = GetHasherCommon(handle);
HashLongestMatch* self = FN(Self)(handle);
BROTLI_UNUSED(params);
self->hash_shift_ = 32 - common->params.bucket_bits;
self->bucket_size_ = (size_t)1 << common->params.bucket_bits;
self->block_size_ = (size_t)1 << common->params.block_bits;
self->block_mask_ = (uint32_t)(self->block_size_ - 1);
}
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* data) {
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
/* Partial preparation is 100 times slower (per socket). */
size_t partial_prepare_threshold = self->bucket_size_ >> 6;
if (one_shot && input_size <= partial_prepare_threshold) {
size_t i;
for (i = 0; i < input_size; ++i) {
const uint32_t key = FN(HashBytes)(&data[i], self->hash_shift_);
num[key] = 0;
}
} else {
memset(num, 0, self->bucket_size_ * sizeof(num[0]));
}
}
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size) {
size_t bucket_size = (size_t)1 << params->hasher.bucket_bits;
size_t block_size = (size_t)1 << params->hasher.block_bits;
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
return sizeof(HashLongestMatch) + bucket_size * (2 + 4 * block_size);
}
/* Look at 4 bytes at &data[ix & mask].
Compute a hash from these, and store the value of ix at that position. */
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t* data,
const size_t mask, const size_t ix) {
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
const uint32_t key = FN(HashBytes)(&data[ix & mask], self->hash_shift_);
const size_t minor_ix = num[key] & self->block_mask_;
const size_t offset =
minor_ix + (key << GetHasherCommon(handle)->params.block_bits);
FN(Buckets)(self)[offset] = (uint32_t)ix;
++num[key];
}
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix_start,
const size_t ix_end) {
size_t i;
for (i = ix_start; i < ix_end; ++i) {
FN(Store)(handle, data, mask, i);
}
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
size_t ringbuffer_mask) {
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
/* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge
of both the previous and the current block. */
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
}
}
static BROTLI_INLINE void FN(PrepareDistanceCache)(
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
PrepareDistanceCache(distance_cache,
GetHasherCommon(handle)->params.num_last_distances_to_check);
}
/* Find a longest backward match of &data[cur_ix] up to the length of
max_length and stores the position cur_ix in the hash table.
REQUIRES: FN(PrepareDistanceCache) must be invoked for current distance cache
values; if this method is invoked repeatedly with the same distance
cache values, it is enough to invoke FN(PrepareDistanceCache) once.
Does not look for matches longer than max_length.
Does not look for matches further away than max_backward.
Writes the best match into |out|.
|out|->score is updated only if a better match is found. */
static BROTLI_INLINE void FN(FindLongestMatch)(HasherHandle handle,
const BrotliDictionary* dictionary, const uint16_t* dictionary_hash,
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
const size_t max_length, const size_t max_backward,
HasherSearchResult* BROTLI_RESTRICT out) {
HasherCommon* common = GetHasherCommon(handle);
HashLongestMatch* self = FN(Self)(handle);
uint16_t* num = FN(Num)(self);
uint32_t* buckets = FN(Buckets)(self);
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
/* Don't accept a short copy from far away. */
score_t min_score = out->score;
score_t best_score = out->score;
size_t best_len = out->len;
size_t i;
out->len = 0;
out->len_code_delta = 0;
/* Try last distance first. */
for (i = 0; i < (size_t)common->params.num_last_distances_to_check; ++i) {
const size_t backward = (size_t)distance_cache[i];
size_t prev_ix = (size_t)(cur_ix - backward);
if (prev_ix >= cur_ix) {
continue;
}
if (BROTLI_PREDICT_FALSE(backward > max_backward)) {
continue;
}
prev_ix &= ring_buffer_mask;
if (cur_ix_masked + best_len > ring_buffer_mask ||
prev_ix + best_len > ring_buffer_mask ||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
continue;
}
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 3 || (len == 2 && i < 2)) {
/* Comparing for >= 2 does not change the semantics, but just saves for
a few unnecessary binary logarithms in backward reference score,
since we are not interested in such short matches. */
score_t score = BackwardReferenceScoreUsingLastDistance(len);
if (best_score < score) {
if (i != 0) score -= BackwardReferencePenaltyUsingLastDistance(i);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
}
{
const uint32_t key =
FN(HashBytes)(&data[cur_ix_masked], self->hash_shift_);
uint32_t* BROTLI_RESTRICT bucket =
&buckets[key << common->params.block_bits];
const size_t down =
(num[key] > self->block_size_) ? (num[key] - self->block_size_) : 0u;
for (i = num[key]; i > down;) {
size_t prev_ix = bucket[--i & self->block_mask_];
const size_t backward = cur_ix - prev_ix;
if (BROTLI_PREDICT_FALSE(backward > max_backward)) {
break;
}
prev_ix &= ring_buffer_mask;
if (cur_ix_masked + best_len > ring_buffer_mask ||
prev_ix + best_len > ring_buffer_mask ||
data[cur_ix_masked + best_len] != data[prev_ix + best_len]) {
continue;
}
{
const size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
/* Comparing for >= 3 does not change the semantics, but just saves
for a few unnecessary binary logarithms in backward reference
score, since we are not interested in such short matches. */
score_t score = BackwardReferenceScore(len, backward);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = best_score;
}
}
}
}
bucket[num[key] & self->block_mask_] = (uint32_t)cur_ix;
++num[key];
}
if (min_score == out->score) {
SearchInStaticDictionary(dictionary, dictionary_hash,
handle, &data[cur_ix_masked], max_length, max_backward, out,
BROTLI_FALSE);
}
}
#undef HashLongestMatch

View File

@ -1,234 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, BUCKET_BITS, BUCKET_SWEEP, HASH_LEN,
USE_DICTIONARY
*/
#define HashLongestMatchQuickly HASHER()
#define BUCKET_SIZE (1 << BUCKET_BITS)
#define HASH_MAP_SIZE (4 << BUCKET_BITS)
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 8; }
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 8; }
/* HashBytes is the function that chooses the bucket to place
the address in. The HashLongestMatch and HashLongestMatchQuickly
classes have separate, different implementations of hashing. */
static uint32_t FN(HashBytes)(const uint8_t* data) {
const uint64_t h = ((BROTLI_UNALIGNED_LOAD64(data) << (64 - 8 * HASH_LEN)) *
kHashMul64);
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return (uint32_t)(h >> (64 - BUCKET_BITS));
}
/* A (forgetful) hash table to the data seen by the compressor, to
help create backward references to previous data.
This is a hash map of fixed size (BUCKET_SIZE). Starting from the
given index, BUCKET_SWEEP buckets are used to store values of a key. */
typedef struct HashLongestMatchQuickly {
uint32_t buckets_[BUCKET_SIZE + BUCKET_SWEEP];
} HashLongestMatchQuickly;
static BROTLI_INLINE HashLongestMatchQuickly* FN(Self)(HasherHandle handle) {
return (HashLongestMatchQuickly*)&(GetHasherCommon(handle)[1]);
}
static void FN(Initialize)(
HasherHandle handle, const BrotliEncoderParams* params) {
BROTLI_UNUSED(handle);
BROTLI_UNUSED(params);
}
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* data) {
HashLongestMatchQuickly* self = FN(Self)(handle);
/* Partial preparation is 100 times slower (per socket). */
size_t partial_prepare_threshold = HASH_MAP_SIZE >> 7;
if (one_shot && input_size <= partial_prepare_threshold) {
size_t i;
for (i = 0; i < input_size; ++i) {
const uint32_t key = FN(HashBytes)(&data[i]);
memset(&self->buckets_[key], 0, BUCKET_SWEEP * sizeof(self->buckets_[0]));
}
} else {
/* It is not strictly necessary to fill this buffer here, but
not filling will make the results of the compression stochastic
(but correct). This is because random data would cause the
system to find accidentally good backward references here and there. */
memset(&self->buckets_[0], 0, sizeof(self->buckets_));
}
}
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size) {
BROTLI_UNUSED(params);
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
return sizeof(HashLongestMatchQuickly);
}
/* Look at 5 bytes at &data[ix & mask].
Compute a hash from these, and store the value somewhere within
[ix .. ix+3]. */
static BROTLI_INLINE void FN(Store)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix) {
const uint32_t key = FN(HashBytes)(&data[ix & mask]);
/* Wiggle the value with the bucket sweep range. */
const uint32_t off = (ix >> 3) % BUCKET_SWEEP;
FN(Self)(handle)->buckets_[key + off] = (uint32_t)ix;
}
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix_start,
const size_t ix_end) {
size_t i;
for (i = ix_start; i < ix_end; ++i) {
FN(Store)(handle, data, mask, i);
}
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
HasherHandle handle, size_t num_bytes, size_t position,
const uint8_t* ringbuffer, size_t ringbuffer_mask) {
if (num_bytes >= FN(HashTypeLength)() - 1 && position >= 3) {
/* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge
of both the previous and the current block. */
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 3);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 2);
FN(Store)(handle, ringbuffer, ringbuffer_mask, position - 1);
}
}
static BROTLI_INLINE void FN(PrepareDistanceCache)(
HasherHandle handle, int* BROTLI_RESTRICT distance_cache) {
BROTLI_UNUSED(handle);
BROTLI_UNUSED(distance_cache);
}
/* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
up to the length of max_length and stores the position cur_ix in the
hash table.
Does not look for matches longer than max_length.
Does not look for matches further away than max_backward.
Writes the best match into |out|.
|out|->score is updated only if a better match is found. */
static BROTLI_INLINE void FN(FindLongestMatch)(
HasherHandle handle, const BrotliDictionary* dictionary,
const uint16_t* dictionary_hash, const uint8_t* BROTLI_RESTRICT data,
const size_t ring_buffer_mask, const int* BROTLI_RESTRICT distance_cache,
const size_t cur_ix, const size_t max_length, const size_t max_backward,
HasherSearchResult* BROTLI_RESTRICT out) {
HashLongestMatchQuickly* self = FN(Self)(handle);
const size_t best_len_in = out->len;
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]);
int compare_char = data[cur_ix_masked + best_len_in];
score_t min_score = out->score;
score_t best_score = out->score;
size_t best_len = best_len_in;
size_t cached_backward = (size_t)distance_cache[0];
size_t prev_ix = cur_ix - cached_backward;
out->len_code_delta = 0;
if (prev_ix < cur_ix) {
prev_ix &= (uint32_t)ring_buffer_mask;
if (compare_char == data[prev_ix + best_len]) {
size_t len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
const score_t score = BackwardReferenceScoreUsingLastDistance(len);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = len;
out->distance = cached_backward;
out->score = best_score;
compare_char = data[cur_ix_masked + best_len];
if (BUCKET_SWEEP == 1) {
self->buckets_[key] = (uint32_t)cur_ix;
return;
}
}
}
}
}
if (BUCKET_SWEEP == 1) {
size_t backward;
size_t len;
/* Only one to look for, don't bother to prepare for a loop. */
prev_ix = self->buckets_[key];
self->buckets_[key] = (uint32_t)cur_ix;
backward = cur_ix - prev_ix;
prev_ix &= (uint32_t)ring_buffer_mask;
if (compare_char != data[prev_ix + best_len_in]) {
return;
}
if (BROTLI_PREDICT_FALSE(backward == 0 || backward > max_backward)) {
return;
}
len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
const score_t score = BackwardReferenceScore(len, backward);
if (best_score < score) {
out->len = len;
out->distance = backward;
out->score = score;
return;
}
}
} else {
uint32_t *bucket = self->buckets_ + key;
int i;
prev_ix = *bucket++;
for (i = 0; i < BUCKET_SWEEP; ++i, prev_ix = *bucket++) {
const size_t backward = cur_ix - prev_ix;
size_t len;
prev_ix &= (uint32_t)ring_buffer_mask;
if (compare_char != data[prev_ix + best_len]) {
continue;
}
if (BROTLI_PREDICT_FALSE(backward == 0 || backward > max_backward)) {
continue;
}
len = FindMatchLengthWithLimit(&data[prev_ix],
&data[cur_ix_masked],
max_length);
if (len >= 4) {
const score_t score = BackwardReferenceScore(len, backward);
if (best_score < score) {
best_score = score;
best_len = len;
out->len = best_len;
out->distance = backward;
out->score = score;
compare_char = data[cur_ix_masked + best_len];
}
}
}
}
if (USE_DICTIONARY && min_score == out->score) {
SearchInStaticDictionary(dictionary, dictionary_hash,
handle, &data[cur_ix_masked], max_length, max_backward, out,
BROTLI_TRUE);
}
self->buckets_[key + ((cur_ix >> 3) % BUCKET_SWEEP)] = (uint32_t)cur_ix;
}
#undef HASH_MAP_SIZE
#undef BUCKET_SIZE
#undef HashLongestMatchQuickly

View File

@ -1,322 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, BUCKET_BITS, MAX_TREE_COMP_LENGTH,
MAX_TREE_SEARCH_DEPTH */
/* A (forgetful) hash table where each hash bucket contains a binary tree of
sequences whose first 4 bytes share the same hash code.
Each sequence is MAX_TREE_COMP_LENGTH long and is identified by its starting
position in the input data. The binary tree is sorted by the lexicographic
order of the sequences, and it is also a max-heap with respect to the
starting positions. */
#define HashToBinaryTree HASHER()
#define BUCKET_SIZE (1 << BUCKET_BITS)
static size_t FN(HashTypeLength)(void) { return 4; }
static size_t FN(StoreLookahead)(void) { return MAX_TREE_COMP_LENGTH; }
static uint32_t FN(HashBytes)(const uint8_t *data) {
uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kHashMul32;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return h >> (32 - BUCKET_BITS);
}
typedef struct HashToBinaryTree {
/* The window size minus 1 */
size_t window_mask_;
/* Hash table that maps the 4-byte hashes of the sequence to the last
position where this hash was found, which is the root of the binary
tree of sequences that share this hash bucket. */
uint32_t buckets_[BUCKET_SIZE];
/* A position used to mark a non-existent sequence, i.e. a tree is empty if
its root is at invalid_pos_ and a node is a leaf if both its children
are at invalid_pos_. */
uint32_t invalid_pos_;
/* --- Dynamic size members --- */
/* The union of the binary trees of each hash bucket. The root of the tree
corresponding to a hash is a sequence starting at buckets_[hash] and
the left and right children of a sequence starting at pos are
forest_[2 * pos] and forest_[2 * pos + 1]. */
/* uint32_t forest[2 * num_nodes] */
} HashToBinaryTree;
static BROTLI_INLINE HashToBinaryTree* FN(Self)(HasherHandle handle) {
return (HashToBinaryTree*)&(GetHasherCommon(handle)[1]);
}
static BROTLI_INLINE uint32_t* FN(Forest)(HashToBinaryTree* self) {
return (uint32_t*)(&self[1]);
}
static void FN(Initialize)(
HasherHandle handle, const BrotliEncoderParams* params) {
HashToBinaryTree* self = FN(Self)(handle);
self->window_mask_ = (1u << params->lgwin) - 1u;
self->invalid_pos_ = (uint32_t)(0 - self->window_mask_);
}
static void FN(Prepare)(HasherHandle handle, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* data) {
HashToBinaryTree* self = FN(Self)(handle);
uint32_t invalid_pos = self->invalid_pos_;
uint32_t i;
BROTLI_UNUSED(data);
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
for (i = 0; i < BUCKET_SIZE; i++) {
self->buckets_[i] = invalid_pos;
}
}
static BROTLI_INLINE size_t FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size) {
size_t num_nodes = (size_t)1 << params->lgwin;
if (one_shot && input_size < num_nodes) {
num_nodes = input_size;
}
return sizeof(HashToBinaryTree) + 2 * sizeof(uint32_t) * num_nodes;
}
static BROTLI_INLINE size_t FN(LeftChildIndex)(HashToBinaryTree* self,
const size_t pos) {
return 2 * (pos & self->window_mask_);
}
static BROTLI_INLINE size_t FN(RightChildIndex)(HashToBinaryTree* self,
const size_t pos) {
return 2 * (pos & self->window_mask_) + 1;
}
/* Stores the hash of the next 4 bytes and in a single tree-traversal, the
hash bucket's binary tree is searched for matches and is re-rooted at the
current position.
If less than MAX_TREE_COMP_LENGTH data is available, the hash bucket of the
current position is searched for matches, but the state of the hash table
is not changed, since we can not know the final sorting order of the
current (incomplete) sequence.
This function must be called with increasing cur_ix positions. */
static BROTLI_INLINE BackwardMatch* FN(StoreAndFindMatches)(
HashToBinaryTree* self, const uint8_t* const BROTLI_RESTRICT data,
const size_t cur_ix, const size_t ring_buffer_mask, const size_t max_length,
const size_t max_backward, size_t* const BROTLI_RESTRICT best_len,
BackwardMatch* BROTLI_RESTRICT matches) {
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
const size_t max_comp_len =
BROTLI_MIN(size_t, max_length, MAX_TREE_COMP_LENGTH);
const BROTLI_BOOL should_reroot_tree =
TO_BROTLI_BOOL(max_length >= MAX_TREE_COMP_LENGTH);
const uint32_t key = FN(HashBytes)(&data[cur_ix_masked]);
uint32_t* forest = FN(Forest)(self);
size_t prev_ix = self->buckets_[key];
/* The forest index of the rightmost node of the left subtree of the new
root, updated as we traverse and re-root the tree of the hash bucket. */
size_t node_left = FN(LeftChildIndex)(self, cur_ix);
/* The forest index of the leftmost node of the right subtree of the new
root, updated as we traverse and re-root the tree of the hash bucket. */
size_t node_right = FN(RightChildIndex)(self, cur_ix);
/* The match length of the rightmost node of the left subtree of the new
root, updated as we traverse and re-root the tree of the hash bucket. */
size_t best_len_left = 0;
/* The match length of the leftmost node of the right subtree of the new
root, updated as we traverse and re-root the tree of the hash bucket. */
size_t best_len_right = 0;
size_t depth_remaining;
if (should_reroot_tree) {
self->buckets_[key] = (uint32_t)cur_ix;
}
for (depth_remaining = MAX_TREE_SEARCH_DEPTH; ; --depth_remaining) {
const size_t backward = cur_ix - prev_ix;
const size_t prev_ix_masked = prev_ix & ring_buffer_mask;
if (backward == 0 || backward > max_backward || depth_remaining == 0) {
if (should_reroot_tree) {
forest[node_left] = self->invalid_pos_;
forest[node_right] = self->invalid_pos_;
}
break;
}
{
const size_t cur_len = BROTLI_MIN(size_t, best_len_left, best_len_right);
size_t len;
assert(cur_len <= MAX_TREE_COMP_LENGTH);
len = cur_len +
FindMatchLengthWithLimit(&data[cur_ix_masked + cur_len],
&data[prev_ix_masked + cur_len],
max_length - cur_len);
assert(0 == memcmp(&data[cur_ix_masked], &data[prev_ix_masked], len));
if (matches && len > *best_len) {
*best_len = len;
InitBackwardMatch(matches++, backward, len);
}
if (len >= max_comp_len) {
if (should_reroot_tree) {
forest[node_left] = forest[FN(LeftChildIndex)(self, prev_ix)];
forest[node_right] = forest[FN(RightChildIndex)(self, prev_ix)];
}
break;
}
if (data[cur_ix_masked + len] > data[prev_ix_masked + len]) {
best_len_left = len;
if (should_reroot_tree) {
forest[node_left] = (uint32_t)prev_ix;
}
node_left = FN(RightChildIndex)(self, prev_ix);
prev_ix = forest[node_left];
} else {
best_len_right = len;
if (should_reroot_tree) {
forest[node_right] = (uint32_t)prev_ix;
}
node_right = FN(LeftChildIndex)(self, prev_ix);
prev_ix = forest[node_right];
}
}
}
return matches;
}
/* Finds all backward matches of &data[cur_ix & ring_buffer_mask] up to the
length of max_length and stores the position cur_ix in the hash table.
Sets *num_matches to the number of matches found, and stores the found
matches in matches[0] to matches[*num_matches - 1]. The matches will be
sorted by strictly increasing length and (non-strictly) increasing
distance. */
static BROTLI_INLINE size_t FN(FindAllMatches)(HasherHandle handle,
const BrotliDictionary* dictionary, const uint8_t* data,
const size_t ring_buffer_mask, const size_t cur_ix,
const size_t max_length, const size_t max_backward,
const BrotliEncoderParams* params, BackwardMatch* matches) {
BackwardMatch* const orig_matches = matches;
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
size_t best_len = 1;
const size_t short_match_max_backward =
params->quality != HQ_ZOPFLIFICATION_QUALITY ? 16 : 64;
size_t stop = cur_ix - short_match_max_backward;
uint32_t dict_matches[BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN + 1];
size_t i;
if (cur_ix < short_match_max_backward) { stop = 0; }
for (i = cur_ix - 1; i > stop && best_len <= 2; --i) {
size_t prev_ix = i;
const size_t backward = cur_ix - prev_ix;
if (BROTLI_PREDICT_FALSE(backward > max_backward)) {
break;
}
prev_ix &= ring_buffer_mask;
if (data[cur_ix_masked] != data[prev_ix] ||
data[cur_ix_masked + 1] != data[prev_ix + 1]) {
continue;
}
{
const size_t len =
FindMatchLengthWithLimit(&data[prev_ix], &data[cur_ix_masked],
max_length);
if (len > best_len) {
best_len = len;
InitBackwardMatch(matches++, backward, len);
}
}
}
if (best_len < max_length) {
matches = FN(StoreAndFindMatches)(FN(Self)(handle), data, cur_ix,
ring_buffer_mask, max_length, max_backward, &best_len, matches);
}
for (i = 0; i <= BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN; ++i) {
dict_matches[i] = kInvalidMatch;
}
{
size_t minlen = BROTLI_MAX(size_t, 4, best_len + 1);
if (BrotliFindAllStaticDictionaryMatches(dictionary,
&data[cur_ix_masked], minlen, max_length, &dict_matches[0])) {
size_t maxlen = BROTLI_MIN(
size_t, BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN, max_length);
size_t l;
for (l = minlen; l <= maxlen; ++l) {
uint32_t dict_id = dict_matches[l];
if (dict_id < kInvalidMatch) {
InitDictionaryBackwardMatch(matches++,
max_backward + (dict_id >> 5) + 1, l, dict_id & 31);
}
}
}
}
return (size_t)(matches - orig_matches);
}
/* Stores the hash of the next 4 bytes and re-roots the binary tree at the
current sequence, without returning any matches.
REQUIRES: ix + MAX_TREE_COMP_LENGTH <= end-of-current-block */
static BROTLI_INLINE void FN(Store)(HasherHandle handle, const uint8_t *data,
const size_t mask, const size_t ix) {
HashToBinaryTree* self = FN(Self)(handle);
/* Maximum distance is window size - 16, see section 9.1. of the spec. */
const size_t max_backward = self->window_mask_ - BROTLI_WINDOW_GAP + 1;
FN(StoreAndFindMatches)(self, data, ix, mask, MAX_TREE_COMP_LENGTH,
max_backward, NULL, NULL);
}
static BROTLI_INLINE void FN(StoreRange)(HasherHandle handle,
const uint8_t *data, const size_t mask, const size_t ix_start,
const size_t ix_end) {
size_t i = ix_start;
size_t j = ix_start;
if (ix_start + 63 <= ix_end) {
i = ix_end - 63;
}
if (ix_start + 512 <= i) {
for (; j < i; j += 8) {
FN(Store)(handle, data, mask, j);
}
}
for (; i < ix_end; ++i) {
FN(Store)(handle, data, mask, i);
}
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(HasherHandle handle,
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
size_t ringbuffer_mask) {
HashToBinaryTree* self = FN(Self)(handle);
if (num_bytes >= FN(HashTypeLength)() - 1 &&
position >= MAX_TREE_COMP_LENGTH) {
/* Store the last `MAX_TREE_COMP_LENGTH - 1` positions in the hasher.
These could not be calculated before, since they require knowledge
of both the previous and the current block. */
const size_t i_start = position - MAX_TREE_COMP_LENGTH + 1;
const size_t i_end = BROTLI_MIN(size_t, position, i_start + num_bytes);
size_t i;
for (i = i_start; i < i_end; ++i) {
/* Maximum distance is window size - 16, see section 9.1. of the spec.
Furthermore, we have to make sure that we don't look further back
from the start of the next block than the window size, otherwise we
could access already overwritten areas of the ring-buffer. */
const size_t max_backward =
self->window_mask_ - BROTLI_MAX(size_t,
BROTLI_WINDOW_GAP - 1,
position - i);
/* We know that i + MAX_TREE_COMP_LENGTH <= position + num_bytes, i.e. the
end of the current block and that we have at least
MAX_TREE_COMP_LENGTH tail in the ring-buffer. */
FN(StoreAndFindMatches)(self, ringbuffer, i, ringbuffer_mask,
MAX_TREE_COMP_LENGTH, max_backward, NULL, NULL);
}
}
}
#undef BUCKET_SIZE
#undef HashToBinaryTree

View File

@ -1,60 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Models the histograms of literals, commands and distance codes. */
#ifndef BROTLI_ENC_HISTOGRAM_H_
#define BROTLI_ENC_HISTOGRAM_H_
#include <string.h> /* memset */
#include "../common/constants.h"
#include <brotli/types.h>
#include "./block_splitter.h"
#include "./command.h"
#include "./context.h"
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define FN(X) X ## Literal
#define DATA_SIZE BROTLI_NUM_LITERAL_SYMBOLS
#define DataType uint8_t
#include "./histogram_inc.h" /* NOLINT(build/include) */
#undef DataType
#undef DATA_SIZE
#undef FN
#define FN(X) X ## Command
#define DataType uint16_t
#define DATA_SIZE BROTLI_NUM_COMMAND_SYMBOLS
#include "./histogram_inc.h" /* NOLINT(build/include) */
#undef DATA_SIZE
#undef FN
#define FN(X) X ## Distance
#define DATA_SIZE BROTLI_NUM_DISTANCE_SYMBOLS
#include "./histogram_inc.h" /* NOLINT(build/include) */
#undef DataType
#undef DATA_SIZE
#undef FN
BROTLI_INTERNAL void BrotliBuildHistogramsWithContext(
const Command* cmds, const size_t num_commands,
const BlockSplit* literal_split, const BlockSplit* insert_and_copy_split,
const BlockSplit* dist_split, const uint8_t* ringbuffer, size_t pos,
size_t mask, uint8_t prev_byte, uint8_t prev_byte2,
const ContextType* context_modes, HistogramLiteral* literal_histograms,
HistogramCommand* insert_and_copy_histograms,
HistogramDistance* copy_dist_histograms);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_HISTOGRAM_H_ */

View File

@ -1,51 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: Histogram, DATA_SIZE, DataType */
/* A simple container for histograms of data in blocks. */
typedef struct FN(Histogram) {
uint32_t data_[DATA_SIZE];
size_t total_count_;
double bit_cost_;
} FN(Histogram);
static BROTLI_INLINE void FN(HistogramClear)(FN(Histogram)* self) {
memset(self->data_, 0, sizeof(self->data_));
self->total_count_ = 0;
self->bit_cost_ = HUGE_VAL;
}
static BROTLI_INLINE void FN(ClearHistograms)(
FN(Histogram)* array, size_t length) {
size_t i;
for (i = 0; i < length; ++i) FN(HistogramClear)(array + i);
}
static BROTLI_INLINE void FN(HistogramAdd)(FN(Histogram)* self, size_t val) {
++self->data_[val];
++self->total_count_;
}
static BROTLI_INLINE void FN(HistogramAddVector)(FN(Histogram)* self,
const DataType *p, size_t n) {
self->total_count_ += n;
n += 1;
while (--n) ++self->data_[*p++];
}
static BROTLI_INLINE void FN(HistogramAddHistogram)(FN(Histogram)* self,
const FN(Histogram)* v) {
size_t i;
self->total_count_ += v->total_count_;
for (i = 0; i < DATA_SIZE; ++i) {
self->data_[i] += v->data_[i];
}
}
static BROTLI_INLINE size_t FN(HistogramDataSize)(void) { return DATA_SIZE; }

View File

@ -1,30 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Literal cost model to allow backward reference replacement to be efficient.
*/
#ifndef BROTLI_ENC_LITERAL_COST_H_
#define BROTLI_ENC_LITERAL_COST_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Estimates how many bits the literals in the interval [pos, pos + len) in the
ring-buffer (data, mask) will take entropy coded and writes these estimates
to the cost[0..len) array. */
BROTLI_INTERNAL void BrotliEstimateBitCostsForLiterals(
size_t pos, size_t len, size_t mask, const uint8_t *data, float *cost);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_LITERAL_COST_H_ */

View File

@ -1,63 +0,0 @@
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Macros for memory management. */
#ifndef BROTLI_ENC_MEMORY_H_
#define BROTLI_ENC_MEMORY_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#if !defined(BROTLI_ENCODER_CLEANUP_ON_OOM) && \
!defined(BROTLI_ENCODER_EXIT_ON_OOM)
#define BROTLI_ENCODER_EXIT_ON_OOM
#endif
typedef struct MemoryManager {
brotli_alloc_func alloc_func;
brotli_free_func free_func;
void* opaque;
#if !defined(BROTLI_ENCODER_EXIT_ON_OOM)
BROTLI_BOOL is_oom;
size_t perm_allocated;
size_t new_allocated;
size_t new_freed;
void* pointers[256];
#endif /* BROTLI_ENCODER_EXIT_ON_OOM */
} MemoryManager;
BROTLI_INTERNAL void BrotliInitMemoryManager(
MemoryManager* m, brotli_alloc_func alloc_func, brotli_free_func free_func,
void* opaque);
BROTLI_INTERNAL void* BrotliAllocate(MemoryManager* m, size_t n);
#define BROTLI_ALLOC(M, T, N) \
((N) > 0 ? ((T*)BrotliAllocate((M), (N) * sizeof(T))) : NULL)
BROTLI_INTERNAL void BrotliFree(MemoryManager* m, void* p);
#define BROTLI_FREE(M, P) { \
BrotliFree((M), (P)); \
P = NULL; \
}
#if defined(BROTLI_ENCODER_EXIT_ON_OOM)
#define BROTLI_IS_OOM(M) (!!0)
#else /* BROTLI_ENCODER_EXIT_ON_OOM */
#define BROTLI_IS_OOM(M) (!!(M)->is_oom)
#endif /* BROTLI_ENCODER_EXIT_ON_OOM */
BROTLI_INTERNAL void BrotliWipeOutMemoryManager(MemoryManager* m);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_MEMORY_H_ */

View File

@ -1,100 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Algorithms for distributing the literals and commands of a metablock between
block types and contexts. */
#ifndef BROTLI_ENC_METABLOCK_H_
#define BROTLI_ENC_METABLOCK_H_
#include <brotli/types.h>
#include "./block_splitter.h"
#include "./command.h"
#include "./context.h"
#include "./histogram.h"
#include "./memory.h"
#include "./port.h"
#include "./quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef struct MetaBlockSplit {
BlockSplit literal_split;
BlockSplit command_split;
BlockSplit distance_split;
uint32_t* literal_context_map;
size_t literal_context_map_size;
uint32_t* distance_context_map;
size_t distance_context_map_size;
HistogramLiteral* literal_histograms;
size_t literal_histograms_size;
HistogramCommand* command_histograms;
size_t command_histograms_size;
HistogramDistance* distance_histograms;
size_t distance_histograms_size;
} MetaBlockSplit;
static BROTLI_INLINE void InitMetaBlockSplit(MetaBlockSplit* mb) {
BrotliInitBlockSplit(&mb->literal_split);
BrotliInitBlockSplit(&mb->command_split);
BrotliInitBlockSplit(&mb->distance_split);
mb->literal_context_map = 0;
mb->literal_context_map_size = 0;
mb->distance_context_map = 0;
mb->distance_context_map_size = 0;
mb->literal_histograms = 0;
mb->literal_histograms_size = 0;
mb->command_histograms = 0;
mb->command_histograms_size = 0;
mb->distance_histograms = 0;
mb->distance_histograms_size = 0;
}
static BROTLI_INLINE void DestroyMetaBlockSplit(
MemoryManager* m, MetaBlockSplit* mb) {
BrotliDestroyBlockSplit(m, &mb->literal_split);
BrotliDestroyBlockSplit(m, &mb->command_split);
BrotliDestroyBlockSplit(m, &mb->distance_split);
BROTLI_FREE(m, mb->literal_context_map);
BROTLI_FREE(m, mb->distance_context_map);
BROTLI_FREE(m, mb->literal_histograms);
BROTLI_FREE(m, mb->command_histograms);
BROTLI_FREE(m, mb->distance_histograms);
}
/* Uses the slow shortest-path block splitter and does context clustering. */
BROTLI_INTERNAL void BrotliBuildMetaBlock(MemoryManager* m,
const uint8_t* ringbuffer,
const size_t pos,
const size_t mask,
const BrotliEncoderParams* params,
uint8_t prev_byte,
uint8_t prev_byte2,
const Command* cmds,
size_t num_commands,
ContextType literal_context_mode,
MetaBlockSplit* mb);
/* Uses a fast greedy block splitter that tries to merge current block with the
last or the second last block and uses a static context clustering which
is the same for all block types. */
BROTLI_INTERNAL void BrotliBuildMetaBlockGreedy(
MemoryManager* m, const uint8_t* ringbuffer, size_t pos, size_t mask,
uint8_t prev_byte, uint8_t prev_byte2, ContextType literal_context_mode,
size_t num_contexts, const uint32_t* static_context_map,
const Command* commands, size_t n_commands, MetaBlockSplit* mb);
BROTLI_INTERNAL void BrotliOptimizeHistograms(size_t num_direct_distance_codes,
size_t distance_postfix_bits,
MetaBlockSplit* mb);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_METABLOCK_H_ */

View File

@ -1,183 +0,0 @@
/* NOLINT(build/header_guard) */
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN */
#define HistogramType FN(Histogram)
/* Greedy block splitter for one block category (literal, command or distance).
*/
typedef struct FN(BlockSplitter) {
/* Alphabet size of particular block category. */
size_t alphabet_size_;
/* We collect at least this many symbols for each block. */
size_t min_block_size_;
/* We merge histograms A and B if
entropy(A+B) < entropy(A) + entropy(B) + split_threshold_,
where A is the current histogram and B is the histogram of the last or the
second last block type. */
double split_threshold_;
size_t num_blocks_;
BlockSplit* split_; /* not owned */
HistogramType* histograms_; /* not owned */
size_t* histograms_size_; /* not owned */
/* The number of symbols that we want to collect before deciding on whether
or not to merge the block with a previous one or emit a new block. */
size_t target_block_size_;
/* The number of symbols in the current histogram. */
size_t block_size_;
/* Offset of the current histogram. */
size_t curr_histogram_ix_;
/* Offset of the histograms of the previous two block types. */
size_t last_histogram_ix_[2];
/* Entropy of the previous two block types. */
double last_entropy_[2];
/* The number of times we merged the current block with the last one. */
size_t merge_last_count_;
} FN(BlockSplitter);
static void FN(InitBlockSplitter)(
MemoryManager* m, FN(BlockSplitter)* self, size_t alphabet_size,
size_t min_block_size, double split_threshold, size_t num_symbols,
BlockSplit* split, HistogramType** histograms, size_t* histograms_size) {
size_t max_num_blocks = num_symbols / min_block_size + 1;
/* We have to allocate one more histogram than the maximum number of block
types for the current histogram when the meta-block is too big. */
size_t max_num_types =
BROTLI_MIN(size_t, max_num_blocks, BROTLI_MAX_NUMBER_OF_BLOCK_TYPES + 1);
self->alphabet_size_ = alphabet_size;
self->min_block_size_ = min_block_size;
self->split_threshold_ = split_threshold;
self->num_blocks_ = 0;
self->split_ = split;
self->histograms_size_ = histograms_size;
self->target_block_size_ = min_block_size;
self->block_size_ = 0;
self->curr_histogram_ix_ = 0;
self->merge_last_count_ = 0;
BROTLI_ENSURE_CAPACITY(m, uint8_t,
split->types, split->types_alloc_size, max_num_blocks);
BROTLI_ENSURE_CAPACITY(m, uint32_t,
split->lengths, split->lengths_alloc_size, max_num_blocks);
if (BROTLI_IS_OOM(m)) return;
self->split_->num_blocks = max_num_blocks;
assert(*histograms == 0);
*histograms_size = max_num_types;
*histograms = BROTLI_ALLOC(m, HistogramType, *histograms_size);
self->histograms_ = *histograms;
if (BROTLI_IS_OOM(m)) return;
/* Clear only current histogram. */
FN(HistogramClear)(&self->histograms_[0]);
self->last_histogram_ix_[0] = self->last_histogram_ix_[1] = 0;
}
/* Does either of three things:
(1) emits the current block with a new block type;
(2) emits the current block with the type of the second last block;
(3) merges the current block with the last block. */
static void FN(BlockSplitterFinishBlock)(
FN(BlockSplitter)* self, BROTLI_BOOL is_final) {
BlockSplit* split = self->split_;
double* last_entropy = self->last_entropy_;
HistogramType* histograms = self->histograms_;
self->block_size_ =
BROTLI_MAX(size_t, self->block_size_, self->min_block_size_);
if (self->num_blocks_ == 0) {
/* Create first block. */
split->lengths[0] = (uint32_t)self->block_size_;
split->types[0] = 0;
last_entropy[0] =
BitsEntropy(histograms[0].data_, self->alphabet_size_);
last_entropy[1] = last_entropy[0];
++self->num_blocks_;
++split->num_types;
++self->curr_histogram_ix_;
if (self->curr_histogram_ix_ < *self->histograms_size_)
FN(HistogramClear)(&histograms[self->curr_histogram_ix_]);
self->block_size_ = 0;
} else if (self->block_size_ > 0) {
double entropy = BitsEntropy(histograms[self->curr_histogram_ix_].data_,
self->alphabet_size_);
HistogramType combined_histo[2];
double combined_entropy[2];
double diff[2];
size_t j;
for (j = 0; j < 2; ++j) {
size_t last_histogram_ix = self->last_histogram_ix_[j];
combined_histo[j] = histograms[self->curr_histogram_ix_];
FN(HistogramAddHistogram)(&combined_histo[j],
&histograms[last_histogram_ix]);
combined_entropy[j] = BitsEntropy(
&combined_histo[j].data_[0], self->alphabet_size_);
diff[j] = combined_entropy[j] - entropy - last_entropy[j];
}
if (split->num_types < BROTLI_MAX_NUMBER_OF_BLOCK_TYPES &&
diff[0] > self->split_threshold_ &&
diff[1] > self->split_threshold_) {
/* Create new block. */
split->lengths[self->num_blocks_] = (uint32_t)self->block_size_;
split->types[self->num_blocks_] = (uint8_t)split->num_types;
self->last_histogram_ix_[1] = self->last_histogram_ix_[0];
self->last_histogram_ix_[0] = (uint8_t)split->num_types;
last_entropy[1] = last_entropy[0];
last_entropy[0] = entropy;
++self->num_blocks_;
++split->num_types;
++self->curr_histogram_ix_;
if (self->curr_histogram_ix_ < *self->histograms_size_)
FN(HistogramClear)(&histograms[self->curr_histogram_ix_]);
self->block_size_ = 0;
self->merge_last_count_ = 0;
self->target_block_size_ = self->min_block_size_;
} else if (diff[1] < diff[0] - 20.0) {
/* Combine this block with second last block. */
split->lengths[self->num_blocks_] = (uint32_t)self->block_size_;
split->types[self->num_blocks_] = split->types[self->num_blocks_ - 2];
BROTLI_SWAP(size_t, self->last_histogram_ix_, 0, 1);
histograms[self->last_histogram_ix_[0]] = combined_histo[1];
last_entropy[1] = last_entropy[0];
last_entropy[0] = combined_entropy[1];
++self->num_blocks_;
self->block_size_ = 0;
FN(HistogramClear)(&histograms[self->curr_histogram_ix_]);
self->merge_last_count_ = 0;
self->target_block_size_ = self->min_block_size_;
} else {
/* Combine this block with last block. */
split->lengths[self->num_blocks_ - 1] += (uint32_t)self->block_size_;
histograms[self->last_histogram_ix_[0]] = combined_histo[0];
last_entropy[0] = combined_entropy[0];
if (split->num_types == 1) {
last_entropy[1] = last_entropy[0];
}
self->block_size_ = 0;
FN(HistogramClear)(&histograms[self->curr_histogram_ix_]);
if (++self->merge_last_count_ > 1) {
self->target_block_size_ += self->min_block_size_;
}
}
}
if (is_final) {
*self->histograms_size_ = split->num_types;
split->num_blocks = self->num_blocks_;
}
}
/* Adds the next symbol to the current histogram. When the current histogram
reaches the target size, decides on merging the block. */
static void FN(BlockSplitterAddSymbol)(FN(BlockSplitter)* self, size_t symbol) {
FN(HistogramAdd)(&self->histograms_[self->curr_histogram_ix_], symbol);
++self->block_size_;
if (self->block_size_ == self->target_block_size_) {
FN(BlockSplitterFinishBlock)(self, /* is_final = */ BROTLI_FALSE);
}
}
#undef HistogramType

View File

@ -1,160 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Macros for endianness, branch prediction and unaligned loads and stores. */
#ifndef BROTLI_ENC_PORT_H_
#define BROTLI_ENC_PORT_H_
#include <assert.h>
#include <string.h> /* memcpy */
#include <brotli/port.h>
#include <brotli/types.h>
#if defined OS_LINUX || defined OS_CYGWIN
#include <endian.h>
#elif defined OS_FREEBSD
#include <machine/endian.h>
#elif defined OS_MACOSX
#include <machine/endian.h>
/* Let's try and follow the Linux convention */
#define __BYTE_ORDER BYTE_ORDER
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#endif
/* define the macro IS_LITTLE_ENDIAN
using the above endian definitions from endian.h if
endian.h was included */
#ifdef __BYTE_ORDER
#if __BYTE_ORDER == __LITTLE_ENDIAN
#define IS_LITTLE_ENDIAN
#endif
#else
#if defined(__LITTLE_ENDIAN__)
#define IS_LITTLE_ENDIAN
#endif
#endif /* __BYTE_ORDER */
#if defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
#define IS_LITTLE_ENDIAN
#endif
/* Enable little-endian optimization for x64 architecture on Windows. */
#if (defined(_WIN32) || defined(_WIN64)) && defined(_M_X64)
#define IS_LITTLE_ENDIAN
#endif
/* Portable handling of unaligned loads, stores, and copies.
On some platforms, like ARM, the copy functions can be more efficient
then a load and a store. */
#if defined(ARCH_PIII) || \
defined(ARCH_ATHLON) || defined(ARCH_K8) || defined(_ARCH_PPC)
/* x86 and x86-64 can perform unaligned loads/stores directly;
modern PowerPC hardware can also do unaligned integer loads and stores;
but note: the FPU still sends unaligned loads and stores to a trap handler!
*/
#define BROTLI_UNALIGNED_LOAD32(_p) (*(const uint32_t *)(_p))
#define BROTLI_UNALIGNED_LOAD64(_p) (*(const uint64_t *)(_p))
#define BROTLI_UNALIGNED_STORE32(_p, _val) \
(*(uint32_t *)(_p) = (_val))
#define BROTLI_UNALIGNED_STORE64(_p, _val) \
(*(uint64_t *)(_p) = (_val))
#elif defined(__arm__) && \
!defined(__ARM_ARCH_5__) && \
!defined(__ARM_ARCH_5T__) && \
!defined(__ARM_ARCH_5TE__) && \
!defined(__ARM_ARCH_5TEJ__) && \
!defined(__ARM_ARCH_6__) && \
!defined(__ARM_ARCH_6J__) && \
!defined(__ARM_ARCH_6K__) && \
!defined(__ARM_ARCH_6Z__) && \
!defined(__ARM_ARCH_6ZK__) && \
!defined(__ARM_ARCH_6T2__)
/* ARMv7 and newer support native unaligned accesses, but only of 16-bit
and 32-bit values (not 64-bit); older versions either raise a fatal signal,
do an unaligned read and rotate the words around a bit, or do the reads very
slowly (trip through kernel mode). */
#define BROTLI_UNALIGNED_LOAD32(_p) (*(const uint32_t *)(_p))
#define BROTLI_UNALIGNED_STORE32(_p, _val) \
(*(uint32_t *)(_p) = (_val))
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64(const void *p) {
uint64_t t;
memcpy(&t, p, sizeof t);
return t;
}
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64(void *p, uint64_t v) {
memcpy(p, &v, sizeof v);
}
#else
/* These functions are provided for architectures that don't support */
/* unaligned loads and stores. */
static BROTLI_INLINE uint32_t BROTLI_UNALIGNED_LOAD32(const void *p) {
uint32_t t;
memcpy(&t, p, sizeof t);
return t;
}
static BROTLI_INLINE uint64_t BROTLI_UNALIGNED_LOAD64(const void *p) {
uint64_t t;
memcpy(&t, p, sizeof t);
return t;
}
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE32(void *p, uint32_t v) {
memcpy(p, &v, sizeof v);
}
static BROTLI_INLINE void BROTLI_UNALIGNED_STORE64(void *p, uint64_t v) {
memcpy(p, &v, sizeof v);
}
#endif
#define TEMPLATE_(T) \
static BROTLI_INLINE T brotli_min_ ## T (T a, T b) { return a < b ? a : b; } \
static BROTLI_INLINE T brotli_max_ ## T (T a, T b) { return a > b ? a : b; }
TEMPLATE_(double) TEMPLATE_(float) TEMPLATE_(int)
TEMPLATE_(size_t) TEMPLATE_(uint32_t) TEMPLATE_(uint8_t)
#undef TEMPLATE_
#define BROTLI_MIN(T, A, B) (brotli_min_ ## T((A), (B)))
#define BROTLI_MAX(T, A, B) (brotli_max_ ## T((A), (B)))
#define BROTLI_SWAP(T, A, I, J) { \
T __brotli_swap_tmp = (A)[(I)]; \
(A)[(I)] = (A)[(J)]; \
(A)[(J)] = __brotli_swap_tmp; \
}
#define BROTLI_ENSURE_CAPACITY(M, T, A, C, R) { \
if (C < (R)) { \
size_t _new_size = (C == 0) ? (R) : C; \
T* new_array; \
while (_new_size < (R)) _new_size *= 2; \
new_array = BROTLI_ALLOC((M), T, _new_size); \
if (!BROTLI_IS_OOM(m) && C != 0) \
memcpy(new_array, A, C * sizeof(T)); \
BROTLI_FREE((M), A); \
A = new_array; \
C = _new_size; \
} \
}
#endif /* BROTLI_ENC_PORT_H_ */

View File

@ -1,54 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions for encoding of integers into prefix codes the amount of extra
bits, and the actual values of the extra bits. */
#ifndef BROTLI_ENC_PREFIX_H_
#define BROTLI_ENC_PREFIX_H_
#include "../common/constants.h"
#include <brotli/port.h>
#include <brotli/types.h>
#include "./fast_log.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Here distance_code is an intermediate code, i.e. one of the special codes or
the actual distance increased by BROTLI_NUM_DISTANCE_SHORT_CODES - 1. */
static BROTLI_INLINE void PrefixEncodeCopyDistance(size_t distance_code,
size_t num_direct_codes,
size_t postfix_bits,
uint16_t* code,
uint32_t* extra_bits) {
if (distance_code < BROTLI_NUM_DISTANCE_SHORT_CODES + num_direct_codes) {
*code = (uint16_t)distance_code;
*extra_bits = 0;
return;
} else {
size_t dist = ((size_t)1 << (postfix_bits + 2u)) +
(distance_code - BROTLI_NUM_DISTANCE_SHORT_CODES - num_direct_codes);
size_t bucket = Log2FloorNonZero(dist) - 1;
size_t postfix_mask = (1u << postfix_bits) - 1;
size_t postfix = dist & postfix_mask;
size_t prefix = (dist >> bucket) & 1;
size_t offset = (2 + prefix) << bucket;
size_t nbits = bucket - postfix_bits;
*code = (uint16_t)(
(BROTLI_NUM_DISTANCE_SHORT_CODES + num_direct_codes +
((2 * (nbits - 1) + prefix) << postfix_bits) + postfix));
*extra_bits = (uint32_t)(
(nbits << 24) | ((dist - offset) >> postfix_bits));
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_PREFIX_H_ */

View File

@ -1,160 +0,0 @@
/* Copyright 2016 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Constants and formulas that affect speed-ratio trade-offs and thus define
quality levels. */
#ifndef BROTLI_ENC_QUALITY_H_
#define BROTLI_ENC_QUALITY_H_
#include <brotli/encode.h>
#define FAST_ONE_PASS_COMPRESSION_QUALITY 0
#define FAST_TWO_PASS_COMPRESSION_QUALITY 1
#define ZOPFLIFICATION_QUALITY 10
#define HQ_ZOPFLIFICATION_QUALITY 11
#define MAX_QUALITY_FOR_STATIC_ENTROPY_CODES 2
#define MIN_QUALITY_FOR_BLOCK_SPLIT 4
#define MIN_QUALITY_FOR_OPTIMIZE_HISTOGRAMS 4
#define MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH 5
#define MIN_QUALITY_FOR_CONTEXT_MODELING 5
#define MIN_QUALITY_FOR_HQ_CONTEXT_MODELING 7
#define MIN_QUALITY_FOR_HQ_BLOCK_SPLITTING 10
/* Only for "font" mode. */
#define MIN_QUALITY_FOR_RECOMPUTE_DISTANCE_PREFIXES 10
/* For quality below MIN_QUALITY_FOR_BLOCK_SPLIT there is no block splitting,
so we buffer at most this much literals and commands. */
#define MAX_NUM_DELAYED_SYMBOLS 0x2fff
typedef struct BrotliHasherParams {
int type;
int bucket_bits;
int block_bits;
int hash_len;
int num_last_distances_to_check;
} BrotliHasherParams;
/* Encoding parameters */
typedef struct BrotliEncoderParams {
BrotliEncoderMode mode;
int quality;
int lgwin;
int lgblock;
size_t size_hint;
BROTLI_BOOL disable_literal_context_modeling;
BrotliHasherParams hasher;
} BrotliEncoderParams;
/* Returns hash-table size for quality levels 0 and 1. */
static BROTLI_INLINE size_t MaxHashTableSize(int quality) {
return quality == FAST_ONE_PASS_COMPRESSION_QUALITY ? 1 << 15 : 1 << 17;
}
/* The maximum length for which the zopflification uses distinct distances. */
#define MAX_ZOPFLI_LEN_QUALITY_10 150
#define MAX_ZOPFLI_LEN_QUALITY_11 325
/* Do not thoroughly search when a long copy is found. */
#define BROTLI_LONG_COPY_QUICK_STEP 16384
static BROTLI_INLINE size_t MaxZopfliLen(const BrotliEncoderParams* params) {
return params->quality <= 10 ?
MAX_ZOPFLI_LEN_QUALITY_10 :
MAX_ZOPFLI_LEN_QUALITY_11;
}
/* Number of best candidates to evaluate to expand Zopfli chain. */
static BROTLI_INLINE size_t MaxZopfliCandidates(
const BrotliEncoderParams* params) {
return params->quality <= 10 ? 1 : 5;
}
static BROTLI_INLINE void SanitizeParams(BrotliEncoderParams* params) {
params->quality = BROTLI_MIN(int, BROTLI_MAX_QUALITY,
BROTLI_MAX(int, BROTLI_MIN_QUALITY, params->quality));
if (params->lgwin < BROTLI_MIN_WINDOW_BITS) {
params->lgwin = BROTLI_MIN_WINDOW_BITS;
} else if (params->lgwin > BROTLI_MAX_WINDOW_BITS) {
params->lgwin = BROTLI_MAX_WINDOW_BITS;
}
}
/* Returns optimized lg_block value. */
static BROTLI_INLINE int ComputeLgBlock(const BrotliEncoderParams* params) {
int lgblock = params->lgblock;
if (params->quality == FAST_ONE_PASS_COMPRESSION_QUALITY ||
params->quality == FAST_TWO_PASS_COMPRESSION_QUALITY) {
lgblock = params->lgwin;
} else if (params->quality < MIN_QUALITY_FOR_BLOCK_SPLIT) {
lgblock = 14;
} else if (lgblock == 0) {
lgblock = 16;
if (params->quality >= 9 && params->lgwin > lgblock) {
lgblock = BROTLI_MIN(int, 18, params->lgwin);
}
} else {
lgblock = BROTLI_MIN(int, BROTLI_MAX_INPUT_BLOCK_BITS,
BROTLI_MAX(int, BROTLI_MIN_INPUT_BLOCK_BITS, lgblock));
}
return lgblock;
}
/* Returns log2 of the size of main ring buffer area.
Allocate at least lgwin + 1 bits for the ring buffer so that the newly
added block fits there completely and we still get lgwin bits and at least
read_block_size_bits + 1 bits because the copy tail length needs to be
smaller than ring-buffer size. */
static BROTLI_INLINE int ComputeRbBits(const BrotliEncoderParams* params) {
return 1 + BROTLI_MAX(int, params->lgwin, params->lgblock);
}
static BROTLI_INLINE size_t MaxMetablockSize(
const BrotliEncoderParams* params) {
int bits =
BROTLI_MIN(int, ComputeRbBits(params), BROTLI_MAX_INPUT_BLOCK_BITS);
return (size_t)1 << bits;
}
/* When searching for backward references and have not seen matches for a long
time, we can skip some match lookups. Unsuccessful match lookups are very
expensive and this kind of a heuristic speeds up compression quite a lot.
At first 8 byte strides are taken and every second byte is put to hasher.
After 4x more literals stride by 16 bytes, every put 4-th byte to hasher.
Applied only to qualities 2 to 9. */
static BROTLI_INLINE size_t LiteralSpreeLengthForSparseSearch(
const BrotliEncoderParams* params) {
return params->quality < 9 ? 64 : 512;
}
static BROTLI_INLINE void ChooseHasher(const BrotliEncoderParams* params,
BrotliHasherParams* hparams) {
if (params->quality > 9) {
hparams->type = 10;
} else if (params->quality == 4 && params->size_hint >= (1 << 20)) {
hparams->type = 54;
} else if (params->quality < 5) {
hparams->type = params->quality;
} else if (params->lgwin <= 16) {
hparams->type = params->quality < 7 ? 40 : params->quality < 9 ? 41 : 42;
} else if (params->size_hint >= (1 << 20) && params->lgwin >= 19) {
hparams->type = 6;
hparams->block_bits = params->quality - 1;
hparams->bucket_bits = 15;
hparams->hash_len = 5;
hparams->num_last_distances_to_check =
params->quality < 7 ? 4 : params->quality < 9 ? 10 : 16;
} else {
hparams->type = 5;
hparams->block_bits = params->quality - 1;
hparams->bucket_bits = params->quality < 7 ? 14 : 15;
hparams->num_last_distances_to_check =
params->quality < 7 ? 4 : params->quality < 9 ? 10 : 16;
}
}
#endif /* BROTLI_ENC_QUALITY_H_ */

View File

@ -1,160 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Sliding window over the input data. */
#ifndef BROTLI_ENC_RINGBUFFER_H_
#define BROTLI_ENC_RINGBUFFER_H_
#include <string.h> /* memcpy */
#include <brotli/types.h>
#include "./memory.h"
#include "./port.h"
#include "./quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* A RingBuffer(window_bits, tail_bits) contains `1 << window_bits' bytes of
data in a circular manner: writing a byte writes it to:
`position() % (1 << window_bits)'.
For convenience, the RingBuffer array contains another copy of the
first `1 << tail_bits' bytes:
buffer_[i] == buffer_[i + (1 << window_bits)], if i < (1 << tail_bits),
and another copy of the last two bytes:
buffer_[-1] == buffer_[(1 << window_bits) - 1] and
buffer_[-2] == buffer_[(1 << window_bits) - 2]. */
typedef struct RingBuffer {
/* Size of the ring-buffer is (1 << window_bits) + tail_size_. */
const uint32_t size_;
const uint32_t mask_;
const uint32_t tail_size_;
const uint32_t total_size_;
uint32_t cur_size_;
/* Position to write in the ring buffer. */
uint32_t pos_;
/* The actual ring buffer containing the copy of the last two bytes, the data,
and the copy of the beginning as a tail. */
uint8_t *data_;
/* The start of the ring-buffer. */
uint8_t *buffer_;
} RingBuffer;
static BROTLI_INLINE void RingBufferInit(RingBuffer* rb) {
rb->cur_size_ = 0;
rb->pos_ = 0;
rb->data_ = 0;
rb->buffer_ = 0;
}
static BROTLI_INLINE void RingBufferSetup(
const BrotliEncoderParams* params, RingBuffer* rb) {
int window_bits = ComputeRbBits(params);
int tail_bits = params->lgblock;
*(uint32_t*)&rb->size_ = 1u << window_bits;
*(uint32_t*)&rb->mask_ = (1u << window_bits) - 1;
*(uint32_t*)&rb->tail_size_ = 1u << tail_bits;
*(uint32_t*)&rb->total_size_ = rb->size_ + rb->tail_size_;
}
static BROTLI_INLINE void RingBufferFree(MemoryManager* m, RingBuffer* rb) {
BROTLI_FREE(m, rb->data_);
}
/* Allocates or re-allocates data_ to the given length + plus some slack
region before and after. Fills the slack regions with zeros. */
static BROTLI_INLINE void RingBufferInitBuffer(
MemoryManager* m, const uint32_t buflen, RingBuffer* rb) {
static const size_t kSlackForEightByteHashingEverywhere = 7;
uint8_t* new_data = BROTLI_ALLOC(
m, uint8_t, 2 + buflen + kSlackForEightByteHashingEverywhere);
size_t i;
if (BROTLI_IS_OOM(m)) return;
if (rb->data_) {
memcpy(new_data, rb->data_,
2 + rb->cur_size_ + kSlackForEightByteHashingEverywhere);
BROTLI_FREE(m, rb->data_);
}
rb->data_ = new_data;
rb->cur_size_ = buflen;
rb->buffer_ = rb->data_ + 2;
rb->buffer_[-2] = rb->buffer_[-1] = 0;
for (i = 0; i < kSlackForEightByteHashingEverywhere; ++i) {
rb->buffer_[rb->cur_size_ + i] = 0;
}
}
static BROTLI_INLINE void RingBufferWriteTail(
const uint8_t *bytes, size_t n, RingBuffer* rb) {
const size_t masked_pos = rb->pos_ & rb->mask_;
if (BROTLI_PREDICT_FALSE(masked_pos < rb->tail_size_)) {
/* Just fill the tail buffer with the beginning data. */
const size_t p = rb->size_ + masked_pos;
memcpy(&rb->buffer_[p], bytes,
BROTLI_MIN(size_t, n, rb->tail_size_ - masked_pos));
}
}
/* Push bytes into the ring buffer. */
static BROTLI_INLINE void RingBufferWrite(
MemoryManager* m, const uint8_t *bytes, size_t n, RingBuffer* rb) {
if (rb->pos_ == 0 && n < rb->tail_size_) {
/* Special case for the first write: to process the first block, we don't
need to allocate the whole ring-buffer and we don't need the tail
either. However, we do this memory usage optimization only if the
first write is less than the tail size, which is also the input block
size, otherwise it is likely that other blocks will follow and we
will need to reallocate to the full size anyway. */
rb->pos_ = (uint32_t)n;
RingBufferInitBuffer(m, rb->pos_, rb);
if (BROTLI_IS_OOM(m)) return;
memcpy(rb->buffer_, bytes, n);
return;
}
if (rb->cur_size_ < rb->total_size_) {
/* Lazily allocate the full buffer. */
RingBufferInitBuffer(m, rb->total_size_, rb);
if (BROTLI_IS_OOM(m)) return;
/* Initialize the last two bytes to zero, so that we don't have to worry
later when we copy the last two bytes to the first two positions. */
rb->buffer_[rb->size_ - 2] = 0;
rb->buffer_[rb->size_ - 1] = 0;
}
{
const size_t masked_pos = rb->pos_ & rb->mask_;
/* The length of the writes is limited so that we do not need to worry
about a write */
RingBufferWriteTail(bytes, n, rb);
if (BROTLI_PREDICT_TRUE(masked_pos + n <= rb->size_)) {
/* A single write fits. */
memcpy(&rb->buffer_[masked_pos], bytes, n);
} else {
/* Split into two writes.
Copy into the end of the buffer, including the tail buffer. */
memcpy(&rb->buffer_[masked_pos], bytes,
BROTLI_MIN(size_t, n, rb->total_size_ - masked_pos));
/* Copy into the beginning of the buffer */
memcpy(&rb->buffer_[0], bytes + (rb->size_ - masked_pos),
n - (rb->size_ - masked_pos));
}
}
rb->buffer_[-2] = rb->buffer_[rb->size_ - 2];
rb->buffer_[-1] = rb->buffer_[rb->size_ - 1];
rb->pos_ += (uint32_t)n;
if (rb->pos_ > (1u << 30)) {
/* Wrap, but preserve not-a-first-lap feature. */
rb->pos_ = (rb->pos_ & ((1u << 30) - 1)) | (1u << 30);
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_RINGBUFFER_H_ */

View File

@ -1,39 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Class to model the static dictionary. */
#ifndef BROTLI_ENC_STATIC_DICT_H_
#define BROTLI_ENC_STATIC_DICT_H_
#include "../common/dictionary.h"
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN 37
static const uint32_t kInvalidMatch = 0xfffffff;
/* Matches data against static dictionary words, and for each length l,
for which a match is found, updates matches[l] to be the minimum possible
(distance << 5) + len_code.
Returns 1 if matches have been found, otherwise 0.
Prerequisites:
matches array is at least BROTLI_MAX_STATIC_DICTIONARY_MATCH_LEN + 1 long
all elements are initialized to kInvalidMatch */
BROTLI_INTERNAL BROTLI_BOOL BrotliFindAllStaticDictionaryMatches(
const BrotliDictionary* dictionary,
const uint8_t* data, size_t min_length, size_t max_length,
uint32_t* matches);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_STATIC_DICT_H_ */

File diff suppressed because it is too large Load Diff

View File

@ -1,32 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Heuristics for deciding about the UTF8-ness of strings. */
#ifndef BROTLI_ENC_UTF8_UTIL_H_
#define BROTLI_ENC_UTF8_UTIL_H_
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const double kMinUTF8Ratio = 0.75;
/* Returns 1 if at least min_fraction of the bytes between pos and
pos + length in the (data, mask) ring-buffer is UTF8-encoded, otherwise
returns 0. */
BROTLI_INTERNAL BROTLI_BOOL BrotliIsMostlyUTF8(
const uint8_t* data, const size_t pos, const size_t mask,
const size_t length, const double min_fraction);
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_UTF8_UTIL_H_ */

View File

@ -1,90 +0,0 @@
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Write bits into a byte array. */
#ifndef BROTLI_ENC_WRITE_BITS_H_
#define BROTLI_ENC_WRITE_BITS_H_
#include <assert.h>
#include <stdio.h> /* printf */
#include <brotli/types.h>
#include "./port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/*#define BIT_WRITER_DEBUG */
/* This function writes bits into bytes in increasing addresses, and within
a byte least-significant-bit first.
The function can write up to 56 bits in one go with WriteBits
Example: let's assume that 3 bits (Rs below) have been written already:
BYTE-0 BYTE+1 BYTE+2
0000 0RRR 0000 0000 0000 0000
Now, we could write 5 or less bits in MSB by just sifting by 3
and OR'ing to BYTE-0.
For n bits, we take the last 5 bits, OR that with high bits in BYTE-0,
and locate the rest in BYTE+1, BYTE+2, etc. */
static BROTLI_INLINE void BrotliWriteBits(size_t n_bits,
uint64_t bits,
size_t * BROTLI_RESTRICT pos,
uint8_t * BROTLI_RESTRICT array) {
#ifdef IS_LITTLE_ENDIAN
/* This branch of the code can write up to 56 bits at a time,
7 bits are lost by being perhaps already in *p and at least
1 bit is needed to initialize the bit-stream ahead (i.e. if 7
bits are in *p and we write 57 bits, then the next write will
access a byte that was never initialized). */
uint8_t *p = &array[*pos >> 3];
uint64_t v = *p;
#ifdef BIT_WRITER_DEBUG
printf("WriteBits %2d 0x%016llx %10d\n", n_bits, bits, *pos);
#endif
assert((bits >> n_bits) == 0);
assert(n_bits <= 56);
v |= bits << (*pos & 7);
BROTLI_UNALIGNED_STORE64(p, v); /* Set some bits. */
*pos += n_bits;
#else
/* implicit & 0xff is assumed for uint8_t arithmetics */
uint8_t *array_pos = &array[*pos >> 3];
const size_t bits_reserved_in_first_byte = (*pos & 7);
size_t bits_left_to_write;
bits <<= bits_reserved_in_first_byte;
*array_pos++ |= (uint8_t)bits;
for (bits_left_to_write = n_bits + bits_reserved_in_first_byte;
bits_left_to_write >= 9;
bits_left_to_write -= 8) {
bits >>= 8;
*array_pos++ = (uint8_t)bits;
}
*array_pos = 0;
*pos += n_bits;
#endif
}
static BROTLI_INLINE void BrotliWriteBitsPrepareStorage(
size_t pos, uint8_t *array) {
#ifdef BIT_WRITER_DEBUG
printf("WriteBitsPrepareStorage %10d\n", pos);
#endif
assert((pos & 7) == 0);
array[pos >> 3] = 0;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_WRITE_BITS_H_ */

File diff suppressed because it is too large Load Diff

View File

@ -1,501 +0,0 @@
/* Copyright 2010 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Entropy encoding (Huffman) utilities. */
#include "./enc/entropy_encode.h"
#include <string.h> /* memset */
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
BROTLI_BOOL BrotliSetDepth(
int p0, HuffmanTree* pool, uint8_t* depth, int max_depth) {
int stack[16];
int level = 0;
int p = p0;
assert(max_depth <= 15);
stack[0] = -1;
while (BROTLI_TRUE) {
if (pool[p].index_left_ >= 0) {
level++;
if (level > max_depth) return BROTLI_FALSE;
stack[level] = pool[p].index_right_or_value_;
p = pool[p].index_left_;
continue;
} else {
depth[pool[p].index_right_or_value_] = (uint8_t)level;
}
while (level >= 0 && stack[level] == -1) level--;
if (level < 0) return BROTLI_TRUE;
p = stack[level];
stack[level] = -1;
}
}
/* Sort the root nodes, least popular first. */
static BROTLI_INLINE BROTLI_BOOL SortHuffmanTree(
const HuffmanTree* v0, const HuffmanTree* v1) {
if (v0->total_count_ != v1->total_count_) {
return TO_BROTLI_BOOL(v0->total_count_ < v1->total_count_);
}
return TO_BROTLI_BOOL(v0->index_right_or_value_ > v1->index_right_or_value_);
}
/* This function will create a Huffman tree.
The catch here is that the tree cannot be arbitrarily deep.
Brotli specifies a maximum depth of 15 bits for "code trees"
and 7 bits for "code length code trees."
count_limit is the value that is to be faked as the minimum value
and this minimum value is raised until the tree matches the
maximum length requirement.
This algorithm is not of excellent performance for very long data blocks,
especially when population counts are longer than 2**tree_limit, but
we are not planning to use this with extremely long blocks.
See http://en.wikipedia.org/wiki/Huffman_coding */
void BrotliCreateHuffmanTree(const uint32_t *data,
const size_t length,
const int tree_limit,
HuffmanTree* tree,
uint8_t *depth) {
uint32_t count_limit;
HuffmanTree sentinel;
InitHuffmanTree(&sentinel, BROTLI_UINT32_MAX, -1, -1);
/* For block sizes below 64 kB, we never need to do a second iteration
of this loop. Probably all of our block sizes will be smaller than
that, so this loop is mostly of academic interest. If we actually
would need this, we would be better off with the Katajainen algorithm. */
for (count_limit = 1; ; count_limit *= 2) {
size_t n = 0;
size_t i;
size_t j;
size_t k;
for (i = length; i != 0;) {
--i;
if (data[i]) {
const uint32_t count = BROTLI_MAX(uint32_t, data[i], count_limit);
InitHuffmanTree(&tree[n++], count, -1, (int16_t)i);
}
}
if (n == 1) {
depth[tree[0].index_right_or_value_] = 1; /* Only one element. */
break;
}
SortHuffmanTreeItems(tree, n, SortHuffmanTree);
/* The nodes are:
[0, n): the sorted leaf nodes that we start with.
[n]: we add a sentinel here.
[n + 1, 2n): new parent nodes are added here, starting from
(n+1). These are naturally in ascending order.
[2n]: we add a sentinel at the end as well.
There will be (2n+1) elements at the end. */
tree[n] = sentinel;
tree[n + 1] = sentinel;
i = 0; /* Points to the next leaf node. */
j = n + 1; /* Points to the next non-leaf node. */
for (k = n - 1; k != 0; --k) {
size_t left, right;
if (tree[i].total_count_ <= tree[j].total_count_) {
left = i;
++i;
} else {
left = j;
++j;
}
if (tree[i].total_count_ <= tree[j].total_count_) {
right = i;
++i;
} else {
right = j;
++j;
}
{
/* The sentinel node becomes the parent node. */
size_t j_end = 2 * n - k;
tree[j_end].total_count_ =
tree[left].total_count_ + tree[right].total_count_;
tree[j_end].index_left_ = (int16_t)left;
tree[j_end].index_right_or_value_ = (int16_t)right;
/* Add back the last sentinel node. */
tree[j_end + 1] = sentinel;
}
}
if (BrotliSetDepth((int)(2 * n - 1), &tree[0], depth, tree_limit)) {
/* We need to pack the Huffman tree in tree_limit bits. If this was not
successful, add fake entities to the lowest values and retry. */
break;
}
}
}
static void Reverse(uint8_t* v, size_t start, size_t end) {
--end;
while (start < end) {
uint8_t tmp = v[start];
v[start] = v[end];
v[end] = tmp;
++start;
--end;
}
}
static void BrotliWriteHuffmanTreeRepetitions(
const uint8_t previous_value,
const uint8_t value,
size_t repetitions,
size_t* tree_size,
uint8_t* tree,
uint8_t* extra_bits_data) {
assert(repetitions > 0);
if (previous_value != value) {
tree[*tree_size] = value;
extra_bits_data[*tree_size] = 0;
++(*tree_size);
--repetitions;
}
if (repetitions == 7) {
tree[*tree_size] = value;
extra_bits_data[*tree_size] = 0;
++(*tree_size);
--repetitions;
}
if (repetitions < 3) {
size_t i;
for (i = 0; i < repetitions; ++i) {
tree[*tree_size] = value;
extra_bits_data[*tree_size] = 0;
++(*tree_size);
}
} else {
size_t start = *tree_size;
repetitions -= 3;
while (BROTLI_TRUE) {
tree[*tree_size] = BROTLI_REPEAT_PREVIOUS_CODE_LENGTH;
extra_bits_data[*tree_size] = repetitions & 0x3;
++(*tree_size);
repetitions >>= 2;
if (repetitions == 0) {
break;
}
--repetitions;
}
Reverse(tree, start, *tree_size);
Reverse(extra_bits_data, start, *tree_size);
}
}
static void BrotliWriteHuffmanTreeRepetitionsZeros(
size_t repetitions,
size_t* tree_size,
uint8_t* tree,
uint8_t* extra_bits_data) {
if (repetitions == 11) {
tree[*tree_size] = 0;
extra_bits_data[*tree_size] = 0;
++(*tree_size);
--repetitions;
}
if (repetitions < 3) {
size_t i;
for (i = 0; i < repetitions; ++i) {
tree[*tree_size] = 0;
extra_bits_data[*tree_size] = 0;
++(*tree_size);
}
} else {
size_t start = *tree_size;
repetitions -= 3;
while (BROTLI_TRUE) {
tree[*tree_size] = BROTLI_REPEAT_ZERO_CODE_LENGTH;
extra_bits_data[*tree_size] = repetitions & 0x7;
++(*tree_size);
repetitions >>= 3;
if (repetitions == 0) {
break;
}
--repetitions;
}
Reverse(tree, start, *tree_size);
Reverse(extra_bits_data, start, *tree_size);
}
}
void BrotliOptimizeHuffmanCountsForRle(size_t length, uint32_t* counts,
uint8_t* good_for_rle) {
size_t nonzero_count = 0;
size_t stride;
size_t limit;
size_t sum;
const size_t streak_limit = 1240;
/* Let's make the Huffman code more compatible with RLE encoding. */
size_t i;
for (i = 0; i < length; i++) {
if (counts[i]) {
++nonzero_count;
}
}
if (nonzero_count < 16) {
return;
}
while (length != 0 && counts[length - 1] == 0) {
--length;
}
if (length == 0) {
return; /* All zeros. */
}
/* Now counts[0..length - 1] does not have trailing zeros. */
{
size_t nonzeros = 0;
uint32_t smallest_nonzero = 1 << 30;
for (i = 0; i < length; ++i) {
if (counts[i] != 0) {
++nonzeros;
if (smallest_nonzero > counts[i]) {
smallest_nonzero = counts[i];
}
}
}
if (nonzeros < 5) {
/* Small histogram will model it well. */
return;
}
if (smallest_nonzero < 4) {
size_t zeros = length - nonzeros;
if (zeros < 6) {
for (i = 1; i < length - 1; ++i) {
if (counts[i - 1] != 0 && counts[i] == 0 && counts[i + 1] != 0) {
counts[i] = 1;
}
}
}
}
if (nonzeros < 28) {
return;
}
}
/* 2) Let's mark all population counts that already can be encoded
with an RLE code. */
memset(good_for_rle, 0, length);
{
/* Let's not spoil any of the existing good RLE codes.
Mark any seq of 0's that is longer as 5 as a good_for_rle.
Mark any seq of non-0's that is longer as 7 as a good_for_rle. */
uint32_t symbol = counts[0];
size_t step = 0;
for (i = 0; i <= length; ++i) {
if (i == length || counts[i] != symbol) {
if ((symbol == 0 && step >= 5) ||
(symbol != 0 && step >= 7)) {
size_t k;
for (k = 0; k < step; ++k) {
good_for_rle[i - k - 1] = 1;
}
}
step = 1;
if (i != length) {
symbol = counts[i];
}
} else {
++step;
}
}
}
/* 3) Let's replace those population counts that lead to more RLE codes.
Math here is in 24.8 fixed point representation. */
stride = 0;
limit = 256 * (counts[0] + counts[1] + counts[2]) / 3 + 420;
sum = 0;
for (i = 0; i <= length; ++i) {
if (i == length || good_for_rle[i] ||
(i != 0 && good_for_rle[i - 1]) ||
(256 * counts[i] - limit + streak_limit) >= 2 * streak_limit) {
if (stride >= 4 || (stride >= 3 && sum == 0)) {
size_t k;
/* The stride must end, collapse what we have, if we have enough (4). */
size_t count = (sum + stride / 2) / stride;
if (count == 0) {
count = 1;
}
if (sum == 0) {
/* Don't make an all zeros stride to be upgraded to ones. */
count = 0;
}
for (k = 0; k < stride; ++k) {
/* We don't want to change value at counts[i],
that is already belonging to the next stride. Thus - 1. */
counts[i - k - 1] = (uint32_t)count;
}
}
stride = 0;
sum = 0;
if (i < length - 2) {
/* All interesting strides have a count of at least 4, */
/* at least when non-zeros. */
limit = 256 * (counts[i] + counts[i + 1] + counts[i + 2]) / 3 + 420;
} else if (i < length) {
limit = 256 * counts[i];
} else {
limit = 0;
}
}
++stride;
if (i != length) {
sum += counts[i];
if (stride >= 4) {
limit = (256 * sum + stride / 2) / stride;
}
if (stride == 4) {
limit += 120;
}
}
}
}
static void DecideOverRleUse(const uint8_t* depth, const size_t length,
BROTLI_BOOL *use_rle_for_non_zero,
BROTLI_BOOL *use_rle_for_zero) {
size_t total_reps_zero = 0;
size_t total_reps_non_zero = 0;
size_t count_reps_zero = 1;
size_t count_reps_non_zero = 1;
size_t i;
for (i = 0; i < length;) {
const uint8_t value = depth[i];
size_t reps = 1;
size_t k;
for (k = i + 1; k < length && depth[k] == value; ++k) {
++reps;
}
if (reps >= 3 && value == 0) {
total_reps_zero += reps;
++count_reps_zero;
}
if (reps >= 4 && value != 0) {
total_reps_non_zero += reps;
++count_reps_non_zero;
}
i += reps;
}
*use_rle_for_non_zero =
TO_BROTLI_BOOL(total_reps_non_zero > count_reps_non_zero * 2);
*use_rle_for_zero = TO_BROTLI_BOOL(total_reps_zero > count_reps_zero * 2);
}
void BrotliWriteHuffmanTree(const uint8_t* depth,
size_t length,
size_t* tree_size,
uint8_t* tree,
uint8_t* extra_bits_data) {
uint8_t previous_value = BROTLI_INITIAL_REPEATED_CODE_LENGTH;
size_t i;
BROTLI_BOOL use_rle_for_non_zero = BROTLI_FALSE;
BROTLI_BOOL use_rle_for_zero = BROTLI_FALSE;
/* Throw away trailing zeros. */
size_t new_length = length;
for (i = 0; i < length; ++i) {
if (depth[length - i - 1] == 0) {
--new_length;
} else {
break;
}
}
/* First gather statistics on if it is a good idea to do RLE. */
if (length > 50) {
/* Find RLE coding for longer codes.
Shorter codes seem not to benefit from RLE. */
DecideOverRleUse(depth, new_length,
&use_rle_for_non_zero, &use_rle_for_zero);
}
/* Actual RLE coding. */
for (i = 0; i < new_length;) {
const uint8_t value = depth[i];
size_t reps = 1;
if ((value != 0 && use_rle_for_non_zero) ||
(value == 0 && use_rle_for_zero)) {
size_t k;
for (k = i + 1; k < new_length && depth[k] == value; ++k) {
++reps;
}
}
if (value == 0) {
BrotliWriteHuffmanTreeRepetitionsZeros(
reps, tree_size, tree, extra_bits_data);
} else {
BrotliWriteHuffmanTreeRepetitions(previous_value,
value, reps, tree_size,
tree, extra_bits_data);
previous_value = value;
}
i += reps;
}
}
static uint16_t BrotliReverseBits(size_t num_bits, uint16_t bits) {
static const size_t kLut[16] = { /* Pre-reversed 4-bit values. */
0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe,
0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf
};
size_t retval = kLut[bits & 0xf];
size_t i;
for (i = 4; i < num_bits; i += 4) {
retval <<= 4;
bits = (uint16_t)(bits >> 4);
retval |= kLut[bits & 0xf];
}
retval >>= ((0 - num_bits) & 0x3);
return (uint16_t)retval;
}
/* 0..15 are values for bits */
#define MAX_HUFFMAN_BITS 16
void BrotliConvertBitDepthsToSymbols(const uint8_t *depth,
size_t len,
uint16_t *bits) {
/* In Brotli, all bit depths are [1..15]
0 bit depth means that the symbol does not exist. */
uint16_t bl_count[MAX_HUFFMAN_BITS] = { 0 };
uint16_t next_code[MAX_HUFFMAN_BITS];
size_t i;
int code = 0;
for (i = 0; i < len; ++i) {
++bl_count[depth[i]];
}
bl_count[0] = 0;
next_code[0] = 0;
for (i = 1; i < MAX_HUFFMAN_BITS; ++i) {
code = (code + bl_count[i - 1]) << 1;
next_code[i] = (uint16_t)code;
}
for (i = 0; i < len; ++i) {
if (depth[i]) {
bits[i] = BrotliReverseBits(depth[i], next_code[depth[i]]++);
}
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,97 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Build per-context histograms of literals, commands and distance codes. */
#include "./enc/histogram.h"
#include "./enc/block_splitter.h"
#include "./enc/command.h"
#include "./enc/context.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
typedef struct BlockSplitIterator {
const BlockSplit* split_; /* Not owned. */
size_t idx_;
size_t type_;
size_t length_;
} BlockSplitIterator;
static void InitBlockSplitIterator(BlockSplitIterator* self,
const BlockSplit* split) {
self->split_ = split;
self->idx_ = 0;
self->type_ = 0;
self->length_ = split->lengths ? split->lengths[0] : 0;
}
static void BlockSplitIteratorNext(BlockSplitIterator* self) {
if (self->length_ == 0) {
++self->idx_;
self->type_ = self->split_->types[self->idx_];
self->length_ = self->split_->lengths[self->idx_];
}
--self->length_;
}
void BrotliBuildHistogramsWithContext(
const Command* cmds, const size_t num_commands,
const BlockSplit* literal_split, const BlockSplit* insert_and_copy_split,
const BlockSplit* dist_split, const uint8_t* ringbuffer, size_t start_pos,
size_t mask, uint8_t prev_byte, uint8_t prev_byte2,
const ContextType* context_modes, HistogramLiteral* literal_histograms,
HistogramCommand* insert_and_copy_histograms,
HistogramDistance* copy_dist_histograms) {
size_t pos = start_pos;
BlockSplitIterator literal_it;
BlockSplitIterator insert_and_copy_it;
BlockSplitIterator dist_it;
size_t i;
InitBlockSplitIterator(&literal_it, literal_split);
InitBlockSplitIterator(&insert_and_copy_it, insert_and_copy_split);
InitBlockSplitIterator(&dist_it, dist_split);
for (i = 0; i < num_commands; ++i) {
const Command* cmd = &cmds[i];
size_t j;
BlockSplitIteratorNext(&insert_and_copy_it);
HistogramAddCommand(&insert_and_copy_histograms[insert_and_copy_it.type_],
cmd->cmd_prefix_);
for (j = cmd->insert_len_; j != 0; --j) {
size_t context;
BlockSplitIteratorNext(&literal_it);
context = context_modes ?
((literal_it.type_ << BROTLI_LITERAL_CONTEXT_BITS) +
Context(prev_byte, prev_byte2, context_modes[literal_it.type_])) :
literal_it.type_;
HistogramAddLiteral(&literal_histograms[context],
ringbuffer[pos & mask]);
prev_byte2 = prev_byte;
prev_byte = ringbuffer[pos & mask];
++pos;
}
pos += CommandCopyLen(cmd);
if (CommandCopyLen(cmd)) {
prev_byte2 = ringbuffer[(pos - 2) & mask];
prev_byte = ringbuffer[(pos - 1) & mask];
if (cmd->cmd_prefix_ >= 128) {
size_t context;
BlockSplitIteratorNext(&dist_it);
context = (dist_it.type_ << BROTLI_DISTANCE_CONTEXT_BITS) +
CommandDistanceContext(cmd);
HistogramAddDistance(&copy_dist_histograms[context],
cmd->dist_prefix_);
}
}
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,358 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Utilities for building Huffman decoding tables. */
#include "./dec/huffman.h"
#include <string.h> /* memcpy, memset */
#include "./common/constants.h"
#include <brotli/types.h>
#include "./dec/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define BROTLI_REVERSE_BITS_MAX 8
#ifdef BROTLI_RBIT
#define BROTLI_REVERSE_BITS_BASE \
((sizeof(reg_t) << 3) - BROTLI_REVERSE_BITS_MAX)
#else
#define BROTLI_REVERSE_BITS_BASE 0
static uint8_t kReverseBits[1 << BROTLI_REVERSE_BITS_MAX] = {
0x00, 0x80, 0x40, 0xC0, 0x20, 0xA0, 0x60, 0xE0,
0x10, 0x90, 0x50, 0xD0, 0x30, 0xB0, 0x70, 0xF0,
0x08, 0x88, 0x48, 0xC8, 0x28, 0xA8, 0x68, 0xE8,
0x18, 0x98, 0x58, 0xD8, 0x38, 0xB8, 0x78, 0xF8,
0x04, 0x84, 0x44, 0xC4, 0x24, 0xA4, 0x64, 0xE4,
0x14, 0x94, 0x54, 0xD4, 0x34, 0xB4, 0x74, 0xF4,
0x0C, 0x8C, 0x4C, 0xCC, 0x2C, 0xAC, 0x6C, 0xEC,
0x1C, 0x9C, 0x5C, 0xDC, 0x3C, 0xBC, 0x7C, 0xFC,
0x02, 0x82, 0x42, 0xC2, 0x22, 0xA2, 0x62, 0xE2,
0x12, 0x92, 0x52, 0xD2, 0x32, 0xB2, 0x72, 0xF2,
0x0A, 0x8A, 0x4A, 0xCA, 0x2A, 0xAA, 0x6A, 0xEA,
0x1A, 0x9A, 0x5A, 0xDA, 0x3A, 0xBA, 0x7A, 0xFA,
0x06, 0x86, 0x46, 0xC6, 0x26, 0xA6, 0x66, 0xE6,
0x16, 0x96, 0x56, 0xD6, 0x36, 0xB6, 0x76, 0xF6,
0x0E, 0x8E, 0x4E, 0xCE, 0x2E, 0xAE, 0x6E, 0xEE,
0x1E, 0x9E, 0x5E, 0xDE, 0x3E, 0xBE, 0x7E, 0xFE,
0x01, 0x81, 0x41, 0xC1, 0x21, 0xA1, 0x61, 0xE1,
0x11, 0x91, 0x51, 0xD1, 0x31, 0xB1, 0x71, 0xF1,
0x09, 0x89, 0x49, 0xC9, 0x29, 0xA9, 0x69, 0xE9,
0x19, 0x99, 0x59, 0xD9, 0x39, 0xB9, 0x79, 0xF9,
0x05, 0x85, 0x45, 0xC5, 0x25, 0xA5, 0x65, 0xE5,
0x15, 0x95, 0x55, 0xD5, 0x35, 0xB5, 0x75, 0xF5,
0x0D, 0x8D, 0x4D, 0xCD, 0x2D, 0xAD, 0x6D, 0xED,
0x1D, 0x9D, 0x5D, 0xDD, 0x3D, 0xBD, 0x7D, 0xFD,
0x03, 0x83, 0x43, 0xC3, 0x23, 0xA3, 0x63, 0xE3,
0x13, 0x93, 0x53, 0xD3, 0x33, 0xB3, 0x73, 0xF3,
0x0B, 0x8B, 0x4B, 0xCB, 0x2B, 0xAB, 0x6B, 0xEB,
0x1B, 0x9B, 0x5B, 0xDB, 0x3B, 0xBB, 0x7B, 0xFB,
0x07, 0x87, 0x47, 0xC7, 0x27, 0xA7, 0x67, 0xE7,
0x17, 0x97, 0x57, 0xD7, 0x37, 0xB7, 0x77, 0xF7,
0x0F, 0x8F, 0x4F, 0xCF, 0x2F, 0xAF, 0x6F, 0xEF,
0x1F, 0x9F, 0x5F, 0xDF, 0x3F, 0xBF, 0x7F, 0xFF
};
#endif /* BROTLI_RBIT */
#define BROTLI_REVERSE_BITS_LOWEST \
((reg_t)1 << (BROTLI_REVERSE_BITS_MAX - 1 + BROTLI_REVERSE_BITS_BASE))
/* Returns reverse(num >> BROTLI_REVERSE_BITS_BASE, BROTLI_REVERSE_BITS_MAX),
where reverse(value, len) is the bit-wise reversal of the len least
significant bits of value. */
static BROTLI_INLINE reg_t BrotliReverseBits(reg_t num) {
#ifdef BROTLI_RBIT
return BROTLI_RBIT(num);
#else
return kReverseBits[num];
#endif
}
/* Stores code in table[0], table[step], table[2*step], ..., table[end] */
/* Assumes that end is an integer multiple of step */
static BROTLI_INLINE void ReplicateValue(HuffmanCode* table,
int step, int end,
HuffmanCode code) {
do {
end -= step;
table[end] = code;
} while (end > 0);
}
/* Returns the table width of the next 2nd level table. count is the histogram
of bit lengths for the remaining symbols, len is the code length of the next
processed symbol */
static BROTLI_INLINE int NextTableBitSize(const uint16_t* const count,
int len, int root_bits) {
int left = 1 << (len - root_bits);
while (len < BROTLI_HUFFMAN_MAX_CODE_LENGTH) {
left -= count[len];
if (left <= 0) break;
++len;
left <<= 1;
}
return len - root_bits;
}
void BrotliBuildCodeLengthsHuffmanTable(HuffmanCode* table,
const uint8_t* const code_lengths,
uint16_t* count) {
HuffmanCode code; /* current table entry */
int symbol; /* symbol index in original or sorted table */
reg_t key; /* prefix code */
reg_t key_step; /* prefix code addend */
int step; /* step size to replicate values in current table */
int table_size; /* size of current table */
int sorted[BROTLI_CODE_LENGTH_CODES]; /* symbols sorted by code length */
/* offsets in sorted table for each length */
int offset[BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH + 1];
int bits;
int bits_count;
BROTLI_DCHECK(BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH <=
BROTLI_REVERSE_BITS_MAX);
/* generate offsets into sorted symbol table by code length */
symbol = -1;
bits = 1;
BROTLI_REPEAT(BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH, {
symbol += count[bits];
offset[bits] = symbol;
bits++;
});
/* Symbols with code length 0 are placed after all other symbols. */
offset[0] = BROTLI_CODE_LENGTH_CODES - 1;
/* sort symbols by length, by symbol order within each length */
symbol = BROTLI_CODE_LENGTH_CODES;
do {
BROTLI_REPEAT(6, {
symbol--;
sorted[offset[code_lengths[symbol]]--] = symbol;
});
} while (symbol != 0);
table_size = 1 << BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH;
/* Special case: all symbols but one have 0 code length. */
if (offset[0] == 0) {
code.bits = 0;
code.value = (uint16_t)sorted[0];
for (key = 0; key < (reg_t)table_size; ++key) {
table[key] = code;
}
return;
}
/* fill in table */
key = 0;
key_step = BROTLI_REVERSE_BITS_LOWEST;
symbol = 0;
bits = 1;
step = 2;
do {
code.bits = (uint8_t)bits;
for (bits_count = count[bits]; bits_count != 0; --bits_count) {
code.value = (uint16_t)sorted[symbol++];
ReplicateValue(&table[BrotliReverseBits(key)], step, table_size, code);
key += key_step;
}
step <<= 1;
key_step >>= 1;
} while (++bits <= BROTLI_HUFFMAN_MAX_CODE_LENGTH_CODE_LENGTH);
}
uint32_t BrotliBuildHuffmanTable(HuffmanCode* root_table,
int root_bits,
const uint16_t* const symbol_lists,
uint16_t* count) {
HuffmanCode code; /* current table entry */
HuffmanCode* table; /* next available space in table */
int len; /* current code length */
int symbol; /* symbol index in original or sorted table */
reg_t key; /* prefix code */
reg_t key_step; /* prefix code addend */
reg_t sub_key; /* 2nd level table prefix code */
reg_t sub_key_step; /* 2nd level table prefix code addend */
int step; /* step size to replicate values in current table */
int table_bits; /* key length of current table */
int table_size; /* size of current table */
int total_size; /* sum of root table size and 2nd level table sizes */
int max_length = -1;
int bits;
int bits_count;
BROTLI_DCHECK(root_bits <= BROTLI_REVERSE_BITS_MAX);
BROTLI_DCHECK(BROTLI_HUFFMAN_MAX_CODE_LENGTH - root_bits <=
BROTLI_REVERSE_BITS_MAX);
while (symbol_lists[max_length] == 0xFFFF) max_length--;
max_length += BROTLI_HUFFMAN_MAX_CODE_LENGTH + 1;
table = root_table;
table_bits = root_bits;
table_size = 1 << table_bits;
total_size = table_size;
/* fill in root table */
/* let's reduce the table size to a smaller size if possible, and */
/* create the repetitions by memcpy if possible in the coming loop */
if (table_bits > max_length) {
table_bits = max_length;
table_size = 1 << table_bits;
}
key = 0;
key_step = BROTLI_REVERSE_BITS_LOWEST;
bits = 1;
step = 2;
do {
code.bits = (uint8_t)bits;
symbol = bits - (BROTLI_HUFFMAN_MAX_CODE_LENGTH + 1);
for (bits_count = count[bits]; bits_count != 0; --bits_count) {
symbol = symbol_lists[symbol];
code.value = (uint16_t)symbol;
ReplicateValue(&table[BrotliReverseBits(key)], step, table_size, code);
key += key_step;
}
step <<= 1;
key_step >>= 1;
} while (++bits <= table_bits);
/* if root_bits != table_bits we only created one fraction of the */
/* table, and we need to replicate it now. */
while (total_size != table_size) {
memcpy(&table[table_size], &table[0],
(size_t)table_size * sizeof(table[0]));
table_size <<= 1;
}
/* fill in 2nd level tables and add pointers to root table */
key_step = BROTLI_REVERSE_BITS_LOWEST >> (root_bits - 1);
sub_key = (BROTLI_REVERSE_BITS_LOWEST << 1);
sub_key_step = BROTLI_REVERSE_BITS_LOWEST;
for (len = root_bits + 1, step = 2; len <= max_length; ++len) {
symbol = len - (BROTLI_HUFFMAN_MAX_CODE_LENGTH + 1);
for (; count[len] != 0; --count[len]) {
if (sub_key == (BROTLI_REVERSE_BITS_LOWEST << 1U)) {
table += table_size;
table_bits = NextTableBitSize(count, len, root_bits);
table_size = 1 << table_bits;
total_size += table_size;
sub_key = BrotliReverseBits(key);
key += key_step;
root_table[sub_key].bits = (uint8_t)(table_bits + root_bits);
root_table[sub_key].value =
(uint16_t)(((size_t)(table - root_table)) - sub_key);
sub_key = 0;
}
code.bits = (uint8_t)(len - root_bits);
symbol = symbol_lists[symbol];
code.value = (uint16_t)symbol;
ReplicateValue(
&table[BrotliReverseBits(sub_key)], step, table_size, code);
sub_key += sub_key_step;
}
step <<= 1;
sub_key_step >>= 1;
}
return (uint32_t)total_size;
}
uint32_t BrotliBuildSimpleHuffmanTable(HuffmanCode* table,
int root_bits,
uint16_t* val,
uint32_t num_symbols) {
uint32_t table_size = 1;
const uint32_t goal_size = 1U << root_bits;
switch (num_symbols) {
case 0:
table[0].bits = 0;
table[0].value = val[0];
break;
case 1:
table[0].bits = 1;
table[1].bits = 1;
if (val[1] > val[0]) {
table[0].value = val[0];
table[1].value = val[1];
} else {
table[0].value = val[1];
table[1].value = val[0];
}
table_size = 2;
break;
case 2:
table[0].bits = 1;
table[0].value = val[0];
table[2].bits = 1;
table[2].value = val[0];
if (val[2] > val[1]) {
table[1].value = val[1];
table[3].value = val[2];
} else {
table[1].value = val[2];
table[3].value = val[1];
}
table[1].bits = 2;
table[3].bits = 2;
table_size = 4;
break;
case 3: {
int i, k;
for (i = 0; i < 3; ++i) {
for (k = i + 1; k < 4; ++k) {
if (val[k] < val[i]) {
uint16_t t = val[k];
val[k] = val[i];
val[i] = t;
}
}
}
for (i = 0; i < 4; ++i) {
table[i].bits = 2;
}
table[0].value = val[0];
table[2].value = val[1];
table[1].value = val[2];
table[3].value = val[3];
table_size = 4;
break;
}
case 4: {
int i;
if (val[3] < val[2]) {
uint16_t t = val[3];
val[3] = val[2];
val[2] = t;
}
for (i = 0; i < 7; ++i) {
table[i].value = val[0];
table[i].bits = (uint8_t)(1 + (i & 1));
}
table[1].value = val[1];
table[3].value = val[2];
table[5].value = val[1];
table[7].value = val[3];
table[3].bits = 3;
table[7].bits = 3;
table_size = 8;
break;
}
}
while (table_size != goal_size) {
memcpy(&table[table_size], &table[0],
(size_t)table_size * sizeof(table[0]));
table_size <<= 1;
}
return goal_size;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,175 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Literal cost model to allow backward reference replacement to be efficient.
*/
#include "./enc/literal_cost.h"
#include <brotli/types.h>
#include "./enc/fast_log.h"
#include "./enc/port.h"
#include "./enc/utf8_util.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static size_t UTF8Position(size_t last, size_t c, size_t clamp) {
if (c < 128) {
return 0; /* Next one is the 'Byte 1' again. */
} else if (c >= 192) { /* Next one is the 'Byte 2' of utf-8 encoding. */
return BROTLI_MIN(size_t, 1, clamp);
} else {
/* Let's decide over the last byte if this ends the sequence. */
if (last < 0xe0) {
return 0; /* Completed two or three byte coding. */
} else { /* Next one is the 'Byte 3' of utf-8 encoding. */
return BROTLI_MIN(size_t, 2, clamp);
}
}
}
static size_t DecideMultiByteStatsLevel(size_t pos, size_t len, size_t mask,
const uint8_t *data) {
size_t counts[3] = { 0 };
size_t max_utf8 = 1; /* should be 2, but 1 compresses better. */
size_t last_c = 0;
size_t i;
for (i = 0; i < len; ++i) {
size_t c = data[(pos + i) & mask];
++counts[UTF8Position(last_c, c, 2)];
last_c = c;
}
if (counts[2] < 500) {
max_utf8 = 1;
}
if (counts[1] + counts[2] < 25) {
max_utf8 = 0;
}
return max_utf8;
}
static void EstimateBitCostsForLiteralsUTF8(size_t pos, size_t len, size_t mask,
const uint8_t *data, float *cost) {
/* max_utf8 is 0 (normal ASCII single byte modeling),
1 (for 2-byte UTF-8 modeling), or 2 (for 3-byte UTF-8 modeling). */
const size_t max_utf8 = DecideMultiByteStatsLevel(pos, len, mask, data);
size_t histogram[3][256] = { { 0 } };
size_t window_half = 495;
size_t in_window = BROTLI_MIN(size_t, window_half, len);
size_t in_window_utf8[3] = { 0 };
size_t i;
{ /* Bootstrap histograms. */
size_t last_c = 0;
size_t utf8_pos = 0;
for (i = 0; i < in_window; ++i) {
size_t c = data[(pos + i) & mask];
++histogram[utf8_pos][c];
++in_window_utf8[utf8_pos];
utf8_pos = UTF8Position(last_c, c, max_utf8);
last_c = c;
}
}
/* Compute bit costs with sliding window. */
for (i = 0; i < len; ++i) {
if (i >= window_half) {
/* Remove a byte in the past. */
size_t c =
i < window_half + 1 ? 0 : data[(pos + i - window_half - 1) & mask];
size_t last_c =
i < window_half + 2 ? 0 : data[(pos + i - window_half - 2) & mask];
size_t utf8_pos2 = UTF8Position(last_c, c, max_utf8);
--histogram[utf8_pos2][data[(pos + i - window_half) & mask]];
--in_window_utf8[utf8_pos2];
}
if (i + window_half < len) {
/* Add a byte in the future. */
size_t c = data[(pos + i + window_half - 1) & mask];
size_t last_c = data[(pos + i + window_half - 2) & mask];
size_t utf8_pos2 = UTF8Position(last_c, c, max_utf8);
++histogram[utf8_pos2][data[(pos + i + window_half) & mask]];
++in_window_utf8[utf8_pos2];
}
{
size_t c = i < 1 ? 0 : data[(pos + i - 1) & mask];
size_t last_c = i < 2 ? 0 : data[(pos + i - 2) & mask];
size_t utf8_pos = UTF8Position(last_c, c, max_utf8);
size_t masked_pos = (pos + i) & mask;
size_t histo = histogram[utf8_pos][data[masked_pos]];
double lit_cost;
if (histo == 0) {
histo = 1;
}
lit_cost = FastLog2(in_window_utf8[utf8_pos]) - FastLog2(histo);
lit_cost += 0.02905;
if (lit_cost < 1.0) {
lit_cost *= 0.5;
lit_cost += 0.5;
}
/* Make the first bytes more expensive -- seems to help, not sure why.
Perhaps because the entropy source is changing its properties
rapidly in the beginning of the file, perhaps because the beginning
of the data is a statistical "anomaly". */
if (i < 2000) {
lit_cost += 0.7 - ((double)(2000 - i) / 2000.0 * 0.35);
}
cost[i] = (float)lit_cost;
}
}
}
void BrotliEstimateBitCostsForLiterals(size_t pos, size_t len, size_t mask,
const uint8_t *data, float *cost) {
if (BrotliIsMostlyUTF8(data, pos, mask, len, kMinUTF8Ratio)) {
EstimateBitCostsForLiteralsUTF8(pos, len, mask, data, cost);
return;
} else {
size_t histogram[256] = { 0 };
size_t window_half = 2000;
size_t in_window = BROTLI_MIN(size_t, window_half, len);
/* Bootstrap histogram. */
size_t i;
for (i = 0; i < in_window; ++i) {
++histogram[data[(pos + i) & mask]];
}
/* Compute bit costs with sliding window. */
for (i = 0; i < len; ++i) {
size_t histo;
if (i >= window_half) {
/* Remove a byte in the past. */
--histogram[data[(pos + i - window_half) & mask]];
--in_window;
}
if (i + window_half < len) {
/* Add a byte in the future. */
++histogram[data[(pos + i + window_half) & mask]];
++in_window;
}
histo = histogram[data[(pos + i) & mask]];
if (histo == 0) {
histo = 1;
}
{
double lit_cost = FastLog2(in_window) - FastLog2(histo);
lit_cost += 0.029;
if (lit_cost < 1.0) {
lit_cost *= 0.5;
lit_cost += 0.5;
}
cost[i] = (float)lit_cost;
}
}
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,181 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Algorithms for distributing the literals and commands of a metablock between
block types and contexts. */
#include "./enc/memory.h"
#include <assert.h>
#include <stdlib.h> /* exit, free, malloc */
#include <string.h> /* memcpy */
#include <brotli/types.h>
#include "./enc/port.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
#define MAX_PERM_ALLOCATED 128
#define MAX_NEW_ALLOCATED 64
#define MAX_NEW_FREED 64
#define PERM_ALLOCATED_OFFSET 0
#define NEW_ALLOCATED_OFFSET MAX_PERM_ALLOCATED
#define NEW_FREED_OFFSET (MAX_PERM_ALLOCATED + MAX_NEW_ALLOCATED)
static void* DefaultAllocFunc(void* opaque, size_t size) {
BROTLI_UNUSED(opaque);
return malloc(size);
}
static void DefaultFreeFunc(void* opaque, void* address) {
BROTLI_UNUSED(opaque);
free(address);
}
void BrotliInitMemoryManager(
MemoryManager* m, brotli_alloc_func alloc_func, brotli_free_func free_func,
void* opaque) {
if (!alloc_func) {
m->alloc_func = DefaultAllocFunc;
m->free_func = DefaultFreeFunc;
m->opaque = 0;
} else {
m->alloc_func = alloc_func;
m->free_func = free_func;
m->opaque = opaque;
}
#if !defined(BROTLI_ENCODER_EXIT_ON_OOM)
m->is_oom = BROTLI_FALSE;
m->perm_allocated = 0;
m->new_allocated = 0;
m->new_freed = 0;
#endif /* BROTLI_ENCODER_EXIT_ON_OOM */
}
#if defined(BROTLI_ENCODER_EXIT_ON_OOM)
void* BrotliAllocate(MemoryManager* m, size_t n) {
void* result = m->alloc_func(m->opaque, n);
if (!result) exit(EXIT_FAILURE);
return result;
}
void BrotliFree(MemoryManager* m, void* p) {
m->free_func(m->opaque, p);
}
void BrotliWipeOutMemoryManager(MemoryManager* m) {
BROTLI_UNUSED(m);
}
#else /* BROTLI_ENCODER_EXIT_ON_OOM */
static void SortPointers(void** items, const size_t n) {
/* Shell sort. */
static const size_t gaps[] = {23, 10, 4, 1};
int g = 0;
for (; g < 4; ++g) {
size_t gap = gaps[g];
size_t i;
for (i = gap; i < n; ++i) {
size_t j = i;
void* tmp = items[i];
for (; j >= gap && tmp < items[j - gap]; j -= gap) {
items[j] = items[j - gap];
}
items[j] = tmp;
}
}
}
static size_t Annihilate(void** a, size_t a_len, void** b, size_t b_len) {
size_t a_read_index = 0;
size_t b_read_index = 0;
size_t a_write_index = 0;
size_t b_write_index = 0;
size_t annihilated = 0;
while (a_read_index < a_len && b_read_index < b_len) {
if (a[a_read_index] == b[b_read_index]) {
a_read_index++;
b_read_index++;
annihilated++;
} else if (a[a_read_index] < b[b_read_index]) {
a[a_write_index++] = a[a_read_index++];
} else {
b[b_write_index++] = b[b_read_index++];
}
}
while (a_read_index < a_len) a[a_write_index++] = a[a_read_index++];
while (b_read_index < b_len) b[b_write_index++] = b[b_read_index++];
return annihilated;
}
static void CollectGarbagePointers(MemoryManager* m) {
size_t annihilated;
SortPointers(m->pointers + NEW_ALLOCATED_OFFSET, m->new_allocated);
SortPointers(m->pointers + NEW_FREED_OFFSET, m->new_freed);
annihilated = Annihilate(
m->pointers + NEW_ALLOCATED_OFFSET, m->new_allocated,
m->pointers + NEW_FREED_OFFSET, m->new_freed);
m->new_allocated -= annihilated;
m->new_freed -= annihilated;
if (m->new_freed != 0) {
annihilated = Annihilate(
m->pointers + PERM_ALLOCATED_OFFSET, m->perm_allocated,
m->pointers + NEW_FREED_OFFSET, m->new_freed);
m->perm_allocated -= annihilated;
m->new_freed -= annihilated;
assert(m->new_freed == 0);
}
if (m->new_allocated != 0) {
assert(m->perm_allocated + m->new_allocated <= MAX_PERM_ALLOCATED);
memcpy(m->pointers + PERM_ALLOCATED_OFFSET + m->perm_allocated,
m->pointers + NEW_ALLOCATED_OFFSET,
sizeof(void*) * m->new_allocated);
m->perm_allocated += m->new_allocated;
m->new_allocated = 0;
SortPointers(m->pointers + PERM_ALLOCATED_OFFSET, m->perm_allocated);
}
}
void* BrotliAllocate(MemoryManager* m, size_t n) {
void* result = m->alloc_func(m->opaque, n);
if (!result) {
m->is_oom = BROTLI_TRUE;
return NULL;
}
if (m->new_allocated == MAX_NEW_ALLOCATED) CollectGarbagePointers(m);
m->pointers[NEW_ALLOCATED_OFFSET + (m->new_allocated++)] = result;
return result;
}
void BrotliFree(MemoryManager* m, void* p) {
if (!p) return;
m->free_func(m->opaque, p);
if (m->new_freed == MAX_NEW_FREED) CollectGarbagePointers(m);
m->pointers[NEW_FREED_OFFSET + (m->new_freed++)] = p;
}
void BrotliWipeOutMemoryManager(MemoryManager* m) {
size_t i;
CollectGarbagePointers(m);
/* Now all unfreed pointers are in perm-allocated list. */
for (i = 0; i < m->perm_allocated; ++i) {
m->free_func(m->opaque, m->pointers[PERM_ALLOCATED_OFFSET + i]);
}
m->perm_allocated = 0;
}
#endif /* BROTLI_ENCODER_EXIT_ON_OOM */
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,528 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Algorithms for distributing the literals and commands of a metablock between
block types and contexts. */
#include "./enc/metablock.h"
#include "./common/constants.h"
#include <brotli/types.h>
#include "./enc/bit_cost.h"
#include "./enc/block_splitter.h"
#include "./enc/cluster.h"
#include "./enc/context.h"
#include "./enc/entropy_encode.h"
#include "./enc/histogram.h"
#include "./enc/memory.h"
#include "./enc/port.h"
#include "./enc/quality.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
void BrotliBuildMetaBlock(MemoryManager* m,
const uint8_t* ringbuffer,
const size_t pos,
const size_t mask,
const BrotliEncoderParams* params,
uint8_t prev_byte,
uint8_t prev_byte2,
const Command* cmds,
size_t num_commands,
ContextType literal_context_mode,
MetaBlockSplit* mb) {
/* Histogram ids need to fit in one byte. */
static const size_t kMaxNumberOfHistograms = 256;
HistogramDistance* distance_histograms;
HistogramLiteral* literal_histograms;
ContextType* literal_context_modes = NULL;
size_t literal_histograms_size;
size_t distance_histograms_size;
size_t i;
size_t literal_context_multiplier = 1;
BrotliSplitBlock(m, cmds, num_commands,
ringbuffer, pos, mask, params,
&mb->literal_split,
&mb->command_split,
&mb->distance_split);
if (BROTLI_IS_OOM(m)) return;
if (!params->disable_literal_context_modeling) {
literal_context_multiplier = 1 << BROTLI_LITERAL_CONTEXT_BITS;
literal_context_modes =
BROTLI_ALLOC(m, ContextType, mb->literal_split.num_types);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < mb->literal_split.num_types; ++i) {
literal_context_modes[i] = literal_context_mode;
}
}
literal_histograms_size =
mb->literal_split.num_types * literal_context_multiplier;
literal_histograms =
BROTLI_ALLOC(m, HistogramLiteral, literal_histograms_size);
if (BROTLI_IS_OOM(m)) return;
ClearHistogramsLiteral(literal_histograms, literal_histograms_size);
distance_histograms_size =
mb->distance_split.num_types << BROTLI_DISTANCE_CONTEXT_BITS;
distance_histograms =
BROTLI_ALLOC(m, HistogramDistance, distance_histograms_size);
if (BROTLI_IS_OOM(m)) return;
ClearHistogramsDistance(distance_histograms, distance_histograms_size);
assert(mb->command_histograms == 0);
mb->command_histograms_size = mb->command_split.num_types;
mb->command_histograms =
BROTLI_ALLOC(m, HistogramCommand, mb->command_histograms_size);
if (BROTLI_IS_OOM(m)) return;
ClearHistogramsCommand(mb->command_histograms, mb->command_histograms_size);
BrotliBuildHistogramsWithContext(cmds, num_commands,
&mb->literal_split, &mb->command_split, &mb->distance_split,
ringbuffer, pos, mask, prev_byte, prev_byte2, literal_context_modes,
literal_histograms, mb->command_histograms, distance_histograms);
BROTLI_FREE(m, literal_context_modes);
assert(mb->literal_context_map == 0);
mb->literal_context_map_size =
mb->literal_split.num_types << BROTLI_LITERAL_CONTEXT_BITS;
mb->literal_context_map =
BROTLI_ALLOC(m, uint32_t, mb->literal_context_map_size);
if (BROTLI_IS_OOM(m)) return;
assert(mb->literal_histograms == 0);
mb->literal_histograms_size = mb->literal_context_map_size;
mb->literal_histograms =
BROTLI_ALLOC(m, HistogramLiteral, mb->literal_histograms_size);
if (BROTLI_IS_OOM(m)) return;
BrotliClusterHistogramsLiteral(m, literal_histograms, literal_histograms_size,
kMaxNumberOfHistograms, mb->literal_histograms,
&mb->literal_histograms_size, mb->literal_context_map);
if (BROTLI_IS_OOM(m)) return;
BROTLI_FREE(m, literal_histograms);
if (params->disable_literal_context_modeling) {
/* Distribute assignment to all contexts. */
for (i = mb->literal_split.num_types; i != 0;) {
size_t j = 0;
i--;
for (; j < (1 << BROTLI_LITERAL_CONTEXT_BITS); j++) {
mb->literal_context_map[(i << BROTLI_LITERAL_CONTEXT_BITS) + j] =
mb->literal_context_map[i];
}
}
}
assert(mb->distance_context_map == 0);
mb->distance_context_map_size =
mb->distance_split.num_types << BROTLI_DISTANCE_CONTEXT_BITS;
mb->distance_context_map =
BROTLI_ALLOC(m, uint32_t, mb->distance_context_map_size);
if (BROTLI_IS_OOM(m)) return;
assert(mb->distance_histograms == 0);
mb->distance_histograms_size = mb->distance_context_map_size;
mb->distance_histograms =
BROTLI_ALLOC(m, HistogramDistance, mb->distance_histograms_size);
if (BROTLI_IS_OOM(m)) return;
BrotliClusterHistogramsDistance(m, distance_histograms,
mb->distance_context_map_size,
kMaxNumberOfHistograms,
mb->distance_histograms,
&mb->distance_histograms_size,
mb->distance_context_map);
if (BROTLI_IS_OOM(m)) return;
BROTLI_FREE(m, distance_histograms);
}
#define FN(X) X ## Literal
#include "./enc/metablock_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Command
#include "./enc/metablock_inc.h" /* NOLINT(build/include) */
#undef FN
#define FN(X) X ## Distance
#include "./enc/metablock_inc.h" /* NOLINT(build/include) */
#undef FN
#define BROTLI_MAX_STATIC_CONTEXTS 13
/* Greedy block splitter for one block category (literal, command or distance).
Gathers histograms for all context buckets. */
typedef struct ContextBlockSplitter {
/* Alphabet size of particular block category. */
size_t alphabet_size_;
size_t num_contexts_;
size_t max_block_types_;
/* We collect at least this many symbols for each block. */
size_t min_block_size_;
/* We merge histograms A and B if
entropy(A+B) < entropy(A) + entropy(B) + split_threshold_,
where A is the current histogram and B is the histogram of the last or the
second last block type. */
double split_threshold_;
size_t num_blocks_;
BlockSplit* split_; /* not owned */
HistogramLiteral* histograms_; /* not owned */
size_t* histograms_size_; /* not owned */
/* The number of symbols that we want to collect before deciding on whether
or not to merge the block with a previous one or emit a new block. */
size_t target_block_size_;
/* The number of symbols in the current histogram. */
size_t block_size_;
/* Offset of the current histogram. */
size_t curr_histogram_ix_;
/* Offset of the histograms of the previous two block types. */
size_t last_histogram_ix_[2];
/* Entropy of the previous two block types. */
double last_entropy_[2 * BROTLI_MAX_STATIC_CONTEXTS];
/* The number of times we merged the current block with the last one. */
size_t merge_last_count_;
} ContextBlockSplitter;
static void InitContextBlockSplitter(
MemoryManager* m, ContextBlockSplitter* self, size_t alphabet_size,
size_t num_contexts, size_t min_block_size, double split_threshold,
size_t num_symbols, BlockSplit* split, HistogramLiteral** histograms,
size_t* histograms_size) {
size_t max_num_blocks = num_symbols / min_block_size + 1;
size_t max_num_types;
assert(num_contexts <= BROTLI_MAX_STATIC_CONTEXTS);
self->alphabet_size_ = alphabet_size;
self->num_contexts_ = num_contexts;
self->max_block_types_ = BROTLI_MAX_NUMBER_OF_BLOCK_TYPES / num_contexts;
self->min_block_size_ = min_block_size;
self->split_threshold_ = split_threshold;
self->num_blocks_ = 0;
self->split_ = split;
self->histograms_size_ = histograms_size;
self->target_block_size_ = min_block_size;
self->block_size_ = 0;
self->curr_histogram_ix_ = 0;
self->merge_last_count_ = 0;
/* We have to allocate one more histogram than the maximum number of block
types for the current histogram when the meta-block is too big. */
max_num_types =
BROTLI_MIN(size_t, max_num_blocks, self->max_block_types_ + 1);
BROTLI_ENSURE_CAPACITY(m, uint8_t,
split->types, split->types_alloc_size, max_num_blocks);
BROTLI_ENSURE_CAPACITY(m, uint32_t,
split->lengths, split->lengths_alloc_size, max_num_blocks);
if (BROTLI_IS_OOM(m)) return;
split->num_blocks = max_num_blocks;
if (BROTLI_IS_OOM(m)) return;
assert(*histograms == 0);
*histograms_size = max_num_types * num_contexts;
*histograms = BROTLI_ALLOC(m, HistogramLiteral, *histograms_size);
self->histograms_ = *histograms;
if (BROTLI_IS_OOM(m)) return;
/* Clear only current histogram. */
ClearHistogramsLiteral(&self->histograms_[0], num_contexts);
self->last_histogram_ix_[0] = self->last_histogram_ix_[1] = 0;
}
/* Does either of three things:
(1) emits the current block with a new block type;
(2) emits the current block with the type of the second last block;
(3) merges the current block with the last block. */
static void ContextBlockSplitterFinishBlock(
ContextBlockSplitter* self, MemoryManager* m, BROTLI_BOOL is_final) {
BlockSplit* split = self->split_;
const size_t num_contexts = self->num_contexts_;
double* last_entropy = self->last_entropy_;
HistogramLiteral* histograms = self->histograms_;
if (self->block_size_ < self->min_block_size_) {
self->block_size_ = self->min_block_size_;
}
if (self->num_blocks_ == 0) {
size_t i;
/* Create first block. */
split->lengths[0] = (uint32_t)self->block_size_;
split->types[0] = 0;
for (i = 0; i < num_contexts; ++i) {
last_entropy[i] =
BitsEntropy(histograms[i].data_, self->alphabet_size_);
last_entropy[num_contexts + i] = last_entropy[i];
}
++self->num_blocks_;
++split->num_types;
self->curr_histogram_ix_ += num_contexts;
if (self->curr_histogram_ix_ < *self->histograms_size_) {
ClearHistogramsLiteral(
&self->histograms_[self->curr_histogram_ix_], self->num_contexts_);
}
self->block_size_ = 0;
} else if (self->block_size_ > 0) {
/* Try merging the set of histograms for the current block type with the
respective set of histograms for the last and second last block types.
Decide over the split based on the total reduction of entropy across
all contexts. */
double entropy[BROTLI_MAX_STATIC_CONTEXTS];
HistogramLiteral* combined_histo =
BROTLI_ALLOC(m, HistogramLiteral, 2 * num_contexts);
double combined_entropy[2 * BROTLI_MAX_STATIC_CONTEXTS];
double diff[2] = { 0.0 };
size_t i;
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < num_contexts; ++i) {
size_t curr_histo_ix = self->curr_histogram_ix_ + i;
size_t j;
entropy[i] = BitsEntropy(histograms[curr_histo_ix].data_,
self->alphabet_size_);
for (j = 0; j < 2; ++j) {
size_t jx = j * num_contexts + i;
size_t last_histogram_ix = self->last_histogram_ix_[j] + i;
combined_histo[jx] = histograms[curr_histo_ix];
HistogramAddHistogramLiteral(&combined_histo[jx],
&histograms[last_histogram_ix]);
combined_entropy[jx] = BitsEntropy(
&combined_histo[jx].data_[0], self->alphabet_size_);
diff[j] += combined_entropy[jx] - entropy[i] - last_entropy[jx];
}
}
if (split->num_types < self->max_block_types_ &&
diff[0] > self->split_threshold_ &&
diff[1] > self->split_threshold_) {
/* Create new block. */
split->lengths[self->num_blocks_] = (uint32_t)self->block_size_;
split->types[self->num_blocks_] = (uint8_t)split->num_types;
self->last_histogram_ix_[1] = self->last_histogram_ix_[0];
self->last_histogram_ix_[0] = split->num_types * num_contexts;
for (i = 0; i < num_contexts; ++i) {
last_entropy[num_contexts + i] = last_entropy[i];
last_entropy[i] = entropy[i];
}
++self->num_blocks_;
++split->num_types;
self->curr_histogram_ix_ += num_contexts;
if (self->curr_histogram_ix_ < *self->histograms_size_) {
ClearHistogramsLiteral(
&self->histograms_[self->curr_histogram_ix_], self->num_contexts_);
}
self->block_size_ = 0;
self->merge_last_count_ = 0;
self->target_block_size_ = self->min_block_size_;
} else if (diff[1] < diff[0] - 20.0) {
/* Combine this block with second last block. */
split->lengths[self->num_blocks_] = (uint32_t)self->block_size_;
split->types[self->num_blocks_] = split->types[self->num_blocks_ - 2];
BROTLI_SWAP(size_t, self->last_histogram_ix_, 0, 1);
for (i = 0; i < num_contexts; ++i) {
histograms[self->last_histogram_ix_[0] + i] =
combined_histo[num_contexts + i];
last_entropy[num_contexts + i] = last_entropy[i];
last_entropy[i] = combined_entropy[num_contexts + i];
HistogramClearLiteral(&histograms[self->curr_histogram_ix_ + i]);
}
++self->num_blocks_;
self->block_size_ = 0;
self->merge_last_count_ = 0;
self->target_block_size_ = self->min_block_size_;
} else {
/* Combine this block with last block. */
split->lengths[self->num_blocks_ - 1] += (uint32_t)self->block_size_;
for (i = 0; i < num_contexts; ++i) {
histograms[self->last_histogram_ix_[0] + i] = combined_histo[i];
last_entropy[i] = combined_entropy[i];
if (split->num_types == 1) {
last_entropy[num_contexts + i] = last_entropy[i];
}
HistogramClearLiteral(&histograms[self->curr_histogram_ix_ + i]);
}
self->block_size_ = 0;
if (++self->merge_last_count_ > 1) {
self->target_block_size_ += self->min_block_size_;
}
}
BROTLI_FREE(m, combined_histo);
}
if (is_final) {
*self->histograms_size_ = split->num_types * num_contexts;
split->num_blocks = self->num_blocks_;
}
}
/* Adds the next symbol to the current block type and context. When the
current block reaches the target size, decides on merging the block. */
static void ContextBlockSplitterAddSymbol(
ContextBlockSplitter* self, MemoryManager* m,
size_t symbol, size_t context) {
HistogramAddLiteral(&self->histograms_[self->curr_histogram_ix_ + context],
symbol);
++self->block_size_;
if (self->block_size_ == self->target_block_size_) {
ContextBlockSplitterFinishBlock(self, m, /* is_final = */ BROTLI_FALSE);
if (BROTLI_IS_OOM(m)) return;
}
}
static void MapStaticContexts(MemoryManager* m,
size_t num_contexts,
const uint32_t* static_context_map,
MetaBlockSplit* mb) {
size_t i;
assert(mb->literal_context_map == 0);
mb->literal_context_map_size =
mb->literal_split.num_types << BROTLI_LITERAL_CONTEXT_BITS;
mb->literal_context_map =
BROTLI_ALLOC(m, uint32_t, mb->literal_context_map_size);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < mb->literal_split.num_types; ++i) {
uint32_t offset = (uint32_t)(i * num_contexts);
size_t j;
for (j = 0; j < (1u << BROTLI_LITERAL_CONTEXT_BITS); ++j) {
mb->literal_context_map[(i << BROTLI_LITERAL_CONTEXT_BITS) + j] =
offset + static_context_map[j];
}
}
}
static BROTLI_INLINE void BrotliBuildMetaBlockGreedyInternal(
MemoryManager* m, const uint8_t* ringbuffer, size_t pos, size_t mask,
uint8_t prev_byte, uint8_t prev_byte2, ContextType literal_context_mode,
const size_t num_contexts, const uint32_t* static_context_map,
const Command *commands, size_t n_commands, MetaBlockSplit* mb) {
union {
BlockSplitterLiteral plain;
ContextBlockSplitter ctx;
} lit_blocks;
BlockSplitterCommand cmd_blocks;
BlockSplitterDistance dist_blocks;
size_t num_literals = 0;
size_t i;
for (i = 0; i < n_commands; ++i) {
num_literals += commands[i].insert_len_;
}
if (num_contexts == 1) {
InitBlockSplitterLiteral(m, &lit_blocks.plain, 256, 512, 400.0,
num_literals, &mb->literal_split, &mb->literal_histograms,
&mb->literal_histograms_size);
} else {
InitContextBlockSplitter(m, &lit_blocks.ctx, 256, num_contexts, 512, 400.0,
num_literals, &mb->literal_split, &mb->literal_histograms,
&mb->literal_histograms_size);
}
if (BROTLI_IS_OOM(m)) return;
InitBlockSplitterCommand(m, &cmd_blocks, BROTLI_NUM_COMMAND_SYMBOLS, 1024,
500.0, n_commands, &mb->command_split, &mb->command_histograms,
&mb->command_histograms_size);
if (BROTLI_IS_OOM(m)) return;
InitBlockSplitterDistance(m, &dist_blocks, 64, 512, 100.0, n_commands,
&mb->distance_split, &mb->distance_histograms,
&mb->distance_histograms_size);
if (BROTLI_IS_OOM(m)) return;
for (i = 0; i < n_commands; ++i) {
const Command cmd = commands[i];
size_t j;
BlockSplitterAddSymbolCommand(&cmd_blocks, cmd.cmd_prefix_);
for (j = cmd.insert_len_; j != 0; --j) {
uint8_t literal = ringbuffer[pos & mask];
if (num_contexts == 1) {
BlockSplitterAddSymbolLiteral(&lit_blocks.plain, literal);
} else {
size_t context = Context(prev_byte, prev_byte2, literal_context_mode);
ContextBlockSplitterAddSymbol(&lit_blocks.ctx, m, literal,
static_context_map[context]);
if (BROTLI_IS_OOM(m)) return;
}
prev_byte2 = prev_byte;
prev_byte = literal;
++pos;
}
pos += CommandCopyLen(&cmd);
if (CommandCopyLen(&cmd)) {
prev_byte2 = ringbuffer[(pos - 2) & mask];
prev_byte = ringbuffer[(pos - 1) & mask];
if (cmd.cmd_prefix_ >= 128) {
BlockSplitterAddSymbolDistance(&dist_blocks, cmd.dist_prefix_);
}
}
}
if (num_contexts == 1) {
BlockSplitterFinishBlockLiteral(
&lit_blocks.plain, /* is_final = */ BROTLI_TRUE);
} else {
ContextBlockSplitterFinishBlock(
&lit_blocks.ctx, m, /* is_final = */ BROTLI_TRUE);
if (BROTLI_IS_OOM(m)) return;
}
BlockSplitterFinishBlockCommand(&cmd_blocks, /* is_final = */ BROTLI_TRUE);
BlockSplitterFinishBlockDistance(&dist_blocks, /* is_final = */ BROTLI_TRUE);
if (num_contexts > 1) {
MapStaticContexts(m, num_contexts, static_context_map, mb);
}
}
void BrotliBuildMetaBlockGreedy(MemoryManager* m,
const uint8_t* ringbuffer,
size_t pos,
size_t mask,
uint8_t prev_byte,
uint8_t prev_byte2,
ContextType literal_context_mode,
size_t num_contexts,
const uint32_t* static_context_map,
const Command* commands,
size_t n_commands,
MetaBlockSplit* mb) {
if (num_contexts == 1) {
BrotliBuildMetaBlockGreedyInternal(m, ringbuffer, pos, mask, prev_byte,
prev_byte2, literal_context_mode, 1, NULL, commands, n_commands, mb);
} else {
BrotliBuildMetaBlockGreedyInternal(m, ringbuffer, pos, mask, prev_byte,
prev_byte2, literal_context_mode, num_contexts, static_context_map,
commands, n_commands, mb);
}
}
void BrotliOptimizeHistograms(size_t num_direct_distance_codes,
size_t distance_postfix_bits,
MetaBlockSplit* mb) {
uint8_t good_for_rle[BROTLI_NUM_COMMAND_SYMBOLS];
size_t num_distance_codes;
size_t i;
for (i = 0; i < mb->literal_histograms_size; ++i) {
BrotliOptimizeHuffmanCountsForRle(256, mb->literal_histograms[i].data_,
good_for_rle);
}
for (i = 0; i < mb->command_histograms_size; ++i) {
BrotliOptimizeHuffmanCountsForRle(BROTLI_NUM_COMMAND_SYMBOLS,
mb->command_histograms[i].data_,
good_for_rle);
}
num_distance_codes = BROTLI_NUM_DISTANCE_SHORT_CODES +
num_direct_distance_codes +
((2 * BROTLI_MAX_DISTANCE_BITS) << distance_postfix_bits);
for (i = 0; i < mb->distance_histograms_size; ++i) {
BrotliOptimizeHuffmanCountsForRle(num_distance_codes,
mb->distance_histograms[i].data_,
good_for_rle);
}
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,168 +0,0 @@
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Distributed under MIT license.
// See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
// Package cbrotli compresses and decompresses data with C-Brotli library.
package brotli
/*
#include <stddef.h>
#include <stdint.h>
#include "brotli/decode.h"
static BrotliDecoderResult DecompressStream(BrotliDecoderState* s,
uint8_t* out, size_t out_len,
const uint8_t* in, size_t in_len,
size_t* bytes_written,
size_t* bytes_consumed) {
size_t in_remaining = in_len;
size_t out_remaining = out_len;
BrotliDecoderResult result = BrotliDecoderDecompressStream(
s, &in_remaining, &in, &out_remaining, &out, NULL);
*bytes_written = out_len - out_remaining;
*bytes_consumed = in_len - in_remaining;
return result;
}
*/
import "C"
import (
"bytes"
"errors"
"io"
"io/ioutil"
)
type decodeError C.BrotliDecoderErrorCode
func (err decodeError) Error() string {
return "cbrotli: " +
C.GoString(C.BrotliDecoderErrorString(C.BrotliDecoderErrorCode(err)))
}
var errExcessiveInput = errors.New("cbrotli: excessive input")
var errInvalidState = errors.New("cbrotli: invalid state")
var errReaderClosed = errors.New("cbrotli: Reader is closed")
// Reader implements io.ReadCloser by reading Brotli-encoded data from an
// underlying Reader.
type Reader struct {
src io.Reader
state *C.BrotliDecoderState
buf []byte // scratch space for reading from src
in []byte // current chunk to decode; usually aliases buf
}
// readBufSize is a "good" buffer size that avoids excessive round-trips
// between C and Go but doesn't waste too much memory on buffering.
// It is arbitrarily chosen to be equal to the constant used in io.Copy.
const readBufSize = 32 * 1024
// NewReader initializes new Reader instance.
// Close MUST be called to free resources.
func NewReader(src io.Reader) *Reader {
return &Reader{
src: src,
state: C.BrotliDecoderCreateInstance(nil, nil, nil),
buf: make([]byte, readBufSize),
}
}
func (r *Reader) SetDictionary(p []byte) {
var data *C.uint8_t
if len(p) != 0 {
data = (*C.uint8_t)(&p[0])
}
C.BrotliDecoderSetCustomDictionary(r.state, C.size_t(len(p)), data)
}
// Close implements io.Closer. Close MUST be invoked to free native resources.
func (r *Reader) Close() error {
if r.state == nil {
return errReaderClosed
}
// Close despite the state; i.e. there might be some unread decoded data.
C.BrotliDecoderDestroyInstance(r.state)
r.state = nil
return nil
}
func (r *Reader) Read(p []byte) (n int, err error) {
if int(C.BrotliDecoderHasMoreOutput(r.state)) == 0 && len(r.in) == 0 {
m, readErr := r.src.Read(r.buf)
if m == 0 {
// If readErr is `nil`, we just proxy underlying stream behavior.
return 0, readErr
}
r.in = r.buf[:m]
}
if len(p) == 0 {
return 0, nil
}
for {
var written, consumed C.size_t
var data *C.uint8_t
if len(r.in) != 0 {
data = (*C.uint8_t)(&r.in[0])
}
result := C.DecompressStream(r.state,
(*C.uint8_t)(&p[0]), C.size_t(len(p)),
data, C.size_t(len(r.in)),
&written, &consumed)
r.in = r.in[int(consumed):]
n = int(written)
switch result {
case C.BROTLI_DECODER_RESULT_SUCCESS:
if len(r.in) > 0 {
return n, errExcessiveInput
}
return n, nil
case C.BROTLI_DECODER_RESULT_ERROR:
return n, decodeError(C.BrotliDecoderGetErrorCode(r.state))
case C.BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT:
if n == 0 {
return 0, io.ErrShortBuffer
}
return n, nil
case C.BROTLI_DECODER_NEEDS_MORE_INPUT:
}
if len(r.in) != 0 {
return 0, errInvalidState
}
// Calling r.src.Read may block. Don't block if we have data to return.
if n > 0 {
return n, nil
}
// Top off the buffer.
encN, err := r.src.Read(r.buf)
if encN == 0 {
// Not enough data to complete decoding.
if err == io.EOF {
return 0, io.ErrUnexpectedEOF
}
return 0, err
}
r.in = r.buf[:encN]
}
}
// Decode decodes Brotli encoded data.
func Decode(encodedData []byte) ([]byte, error) {
r := &Reader{
src: bytes.NewReader(nil),
state: C.BrotliDecoderCreateInstance(nil, nil, nil),
buf: make([]byte, 4), // arbitrarily small but nonzero so that r.src.Read returns io.EOF
in: encodedData,
}
defer r.Close()
return ioutil.ReadAll(r)
}

View File

@ -1,175 +0,0 @@
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
#include "./dec/state.h"
#include <stdlib.h> /* free, malloc */
#include <brotli/types.h>
#include "./dec/huffman.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static void* DefaultAllocFunc(void* opaque, size_t size) {
BROTLI_UNUSED(opaque);
return malloc(size);
}
static void DefaultFreeFunc(void* opaque, void* address) {
BROTLI_UNUSED(opaque);
free(address);
}
void BrotliDecoderStateInit(BrotliDecoderState* s) {
BrotliDecoderStateInitWithCustomAllocators(s, 0, 0, 0);
}
void BrotliDecoderStateInitWithCustomAllocators(BrotliDecoderState* s,
brotli_alloc_func alloc_func, brotli_free_func free_func, void* opaque) {
if (!alloc_func) {
s->alloc_func = DefaultAllocFunc;
s->free_func = DefaultFreeFunc;
s->memory_manager_opaque = 0;
} else {
s->alloc_func = alloc_func;
s->free_func = free_func;
s->memory_manager_opaque = opaque;
}
s->error_code = 0; /* BROTLI_DECODER_NO_ERROR */
BrotliInitBitReader(&s->br);
s->state = BROTLI_STATE_UNINITED;
s->substate_metablock_header = BROTLI_STATE_METABLOCK_HEADER_NONE;
s->substate_tree_group = BROTLI_STATE_TREE_GROUP_NONE;
s->substate_context_map = BROTLI_STATE_CONTEXT_MAP_NONE;
s->substate_uncompressed = BROTLI_STATE_UNCOMPRESSED_NONE;
s->substate_huffman = BROTLI_STATE_HUFFMAN_NONE;
s->substate_decode_uint8 = BROTLI_STATE_DECODE_UINT8_NONE;
s->substate_read_block_length = BROTLI_STATE_READ_BLOCK_LENGTH_NONE;
s->dictionary = BrotliGetDictionary();
s->buffer_length = 0;
s->loop_counter = 0;
s->pos = 0;
s->rb_roundtrips = 0;
s->partial_pos_out = 0;
s->block_type_trees = NULL;
s->block_len_trees = NULL;
s->ringbuffer = NULL;
s->ringbuffer_size = 0;
s->new_ringbuffer_size = 0;
s->ringbuffer_mask = 0;
s->context_map = NULL;
s->context_modes = NULL;
s->dist_context_map = NULL;
s->context_map_slice = NULL;
s->dist_context_map_slice = NULL;
s->sub_loop_counter = 0;
s->literal_hgroup.codes = NULL;
s->literal_hgroup.htrees = NULL;
s->insert_copy_hgroup.codes = NULL;
s->insert_copy_hgroup.htrees = NULL;
s->distance_hgroup.codes = NULL;
s->distance_hgroup.htrees = NULL;
s->custom_dict = NULL;
s->custom_dict_size = 0;
s->is_last_metablock = 0;
s->is_uncompressed = 0;
s->is_metadata = 0;
s->should_wrap_ringbuffer = 0;
s->canny_ringbuffer_allocation = 1;
s->window_bits = 0;
s->max_distance = 0;
s->dist_rb[0] = 16;
s->dist_rb[1] = 15;
s->dist_rb[2] = 11;
s->dist_rb[3] = 4;
s->dist_rb_idx = 0;
s->block_type_trees = NULL;
s->block_len_trees = NULL;
/* Make small negative indexes addressable. */
s->symbol_lists = &s->symbols_lists_array[BROTLI_HUFFMAN_MAX_CODE_LENGTH + 1];
s->mtf_upper_bound = 63;
}
void BrotliDecoderStateMetablockBegin(BrotliDecoderState* s) {
s->meta_block_remaining_len = 0;
s->block_length[0] = 1U << 28;
s->block_length[1] = 1U << 28;
s->block_length[2] = 1U << 28;
s->num_block_types[0] = 1;
s->num_block_types[1] = 1;
s->num_block_types[2] = 1;
s->block_type_rb[0] = 1;
s->block_type_rb[1] = 0;
s->block_type_rb[2] = 1;
s->block_type_rb[3] = 0;
s->block_type_rb[4] = 1;
s->block_type_rb[5] = 0;
s->context_map = NULL;
s->context_modes = NULL;
s->dist_context_map = NULL;
s->context_map_slice = NULL;
s->literal_htree = NULL;
s->dist_context_map_slice = NULL;
s->dist_htree_index = 0;
s->context_lookup1 = NULL;
s->context_lookup2 = NULL;
s->literal_hgroup.codes = NULL;
s->literal_hgroup.htrees = NULL;
s->insert_copy_hgroup.codes = NULL;
s->insert_copy_hgroup.htrees = NULL;
s->distance_hgroup.codes = NULL;
s->distance_hgroup.htrees = NULL;
}
void BrotliDecoderStateCleanupAfterMetablock(BrotliDecoderState* s) {
BROTLI_FREE(s, s->context_modes);
BROTLI_FREE(s, s->context_map);
BROTLI_FREE(s, s->dist_context_map);
BROTLI_FREE(s, s->literal_hgroup.htrees);
BROTLI_FREE(s, s->insert_copy_hgroup.htrees);
BROTLI_FREE(s, s->distance_hgroup.htrees);
}
void BrotliDecoderStateCleanup(BrotliDecoderState* s) {
BrotliDecoderStateCleanupAfterMetablock(s);
BROTLI_FREE(s, s->ringbuffer);
BROTLI_FREE(s, s->block_type_trees);
}
BROTLI_BOOL BrotliDecoderHuffmanTreeGroupInit(BrotliDecoderState* s,
HuffmanTreeGroup* group, uint32_t alphabet_size, uint32_t ntrees) {
/* Pack two allocations into one */
const size_t max_table_size = kMaxHuffmanTableSize[(alphabet_size + 31) >> 5];
const size_t code_size = sizeof(HuffmanCode) * ntrees * max_table_size;
const size_t htree_size = sizeof(HuffmanCode*) * ntrees;
/* Pointer alignment is, hopefully, wider than sizeof(HuffmanCode). */
HuffmanCode** p = (HuffmanCode**)BROTLI_ALLOC(s, code_size + htree_size);
group->alphabet_size = (uint16_t)alphabet_size;
group->num_htrees = (uint16_t)ntrees;
group->htrees = p;
group->codes = (HuffmanCode*)(&p[ntrees]);
return !!p;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,482 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
#include "./enc/static_dict.h"
#include "./common/dictionary.h"
#include "./enc/find_match_length.h"
#include "./enc/port.h"
#include "./enc/static_dict_lut.h"
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static const uint8_t kUppercaseFirst = 10;
static const uint8_t kOmitLastNTransforms[10] = {
0, 12, 27, 23, 42, 63, 56, 48, 59, 64,
};
static BROTLI_INLINE uint32_t Hash(const uint8_t *data) {
uint32_t h = BROTLI_UNALIGNED_LOAD32(data) * kDictHashMul32;
/* The higher bits contain more mixture from the multiplication,
so we take our results from there. */
return h >> (32 - kDictNumBits);
}
static BROTLI_INLINE void AddMatch(size_t distance, size_t len, size_t len_code,
uint32_t* matches) {
uint32_t match = (uint32_t)((distance << 5) + len_code);
matches[len] = BROTLI_MIN(uint32_t, matches[len], match);
}
static BROTLI_INLINE size_t DictMatchLength(const BrotliDictionary* dictionary,
const uint8_t* data,
size_t id,
size_t len,
size_t maxlen) {
const size_t offset = dictionary->offsets_by_length[len] + len * id;
return FindMatchLengthWithLimit(&dictionary->data[offset], data,
BROTLI_MIN(size_t, len, maxlen));
}
static BROTLI_INLINE BROTLI_BOOL IsMatch(const BrotliDictionary* dictionary,
DictWord w, const uint8_t* data, size_t max_length) {
if (w.len > max_length) {
return BROTLI_FALSE;
} else {
const size_t offset = dictionary->offsets_by_length[w.len] +
(size_t)w.len * (size_t)w.idx;
const uint8_t* dict = &dictionary->data[offset];
if (w.transform == 0) {
/* Match against base dictionary word. */
return
TO_BROTLI_BOOL(FindMatchLengthWithLimit(dict, data, w.len) == w.len);
} else if (w.transform == 10) {
/* Match against uppercase first transform.
Note that there are only ASCII uppercase words in the lookup table. */
return TO_BROTLI_BOOL(dict[0] >= 'a' && dict[0] <= 'z' &&
(dict[0] ^ 32) == data[0] &&
FindMatchLengthWithLimit(&dict[1], &data[1], w.len - 1u) ==
w.len - 1u);
} else {
/* Match against uppercase all transform.
Note that there are only ASCII uppercase words in the lookup table. */
size_t i;
for (i = 0; i < w.len; ++i) {
if (dict[i] >= 'a' && dict[i] <= 'z') {
if ((dict[i] ^ 32) != data[i]) return BROTLI_FALSE;
} else {
if (dict[i] != data[i]) return BROTLI_FALSE;
}
}
return BROTLI_TRUE;
}
}
}
BROTLI_BOOL BrotliFindAllStaticDictionaryMatches(
const BrotliDictionary* dictionary, const uint8_t* data, size_t min_length,
size_t max_length, uint32_t* matches) {
BROTLI_BOOL has_found_match = BROTLI_FALSE;
{
size_t offset = kStaticDictionaryBuckets[Hash(data)];
BROTLI_BOOL end = !offset;
while (!end) {
DictWord w = kStaticDictionaryWords[offset++];
const size_t l = w.len & 0x1F;
const size_t n = (size_t)1 << dictionary->size_bits_by_length[l];
const size_t id = w.idx;
end = !!(w.len & 0x80);
w.len = (uint8_t)l;
if (w.transform == 0) {
const size_t matchlen =
DictMatchLength(dictionary, data, id, l, max_length);
const uint8_t* s;
size_t minlen;
size_t maxlen;
size_t len;
/* Transform "" + kIdentity + "" */
if (matchlen == l) {
AddMatch(id, l, l, matches);
has_found_match = BROTLI_TRUE;
}
/* Transforms "" + kOmitLast1 + "" and "" + kOmitLast1 + "ing " */
if (matchlen >= l - 1) {
AddMatch(id + 12 * n, l - 1, l, matches);
if (l + 2 < max_length &&
data[l - 1] == 'i' && data[l] == 'n' && data[l + 1] == 'g' &&
data[l + 2] == ' ') {
AddMatch(id + 49 * n, l + 3, l, matches);
}
has_found_match = BROTLI_TRUE;
}
/* Transform "" + kOmitLastN + "" (N = 2 .. 9) */
minlen = min_length;
if (l > 9) minlen = BROTLI_MAX(size_t, minlen, l - 9);
maxlen = BROTLI_MIN(size_t, matchlen, l - 2);
for (len = minlen; len <= maxlen; ++len) {
AddMatch(id + kOmitLastNTransforms[l - len] * n, len, l, matches);
has_found_match = BROTLI_TRUE;
}
if (matchlen < l || l + 6 >= max_length) {
continue;
}
s = &data[l];
/* Transforms "" + kIdentity + <suffix> */
if (s[0] == ' ') {
AddMatch(id + n, l + 1, l, matches);
if (s[1] == 'a') {
if (s[2] == ' ') {
AddMatch(id + 28 * n, l + 3, l, matches);
} else if (s[2] == 's') {
if (s[3] == ' ') AddMatch(id + 46 * n, l + 4, l, matches);
} else if (s[2] == 't') {
if (s[3] == ' ') AddMatch(id + 60 * n, l + 4, l, matches);
} else if (s[2] == 'n') {
if (s[3] == 'd' && s[4] == ' ') {
AddMatch(id + 10 * n, l + 5, l, matches);
}
}
} else if (s[1] == 'b') {
if (s[2] == 'y' && s[3] == ' ') {
AddMatch(id + 38 * n, l + 4, l, matches);
}
} else if (s[1] == 'i') {
if (s[2] == 'n') {
if (s[3] == ' ') AddMatch(id + 16 * n, l + 4, l, matches);
} else if (s[2] == 's') {
if (s[3] == ' ') AddMatch(id + 47 * n, l + 4, l, matches);
}
} else if (s[1] == 'f') {
if (s[2] == 'o') {
if (s[3] == 'r' && s[4] == ' ') {
AddMatch(id + 25 * n, l + 5, l, matches);
}
} else if (s[2] == 'r') {
if (s[3] == 'o' && s[4] == 'm' && s[5] == ' ') {
AddMatch(id + 37 * n, l + 6, l, matches);
}
}
} else if (s[1] == 'o') {
if (s[2] == 'f') {
if (s[3] == ' ') AddMatch(id + 8 * n, l + 4, l, matches);
} else if (s[2] == 'n') {
if (s[3] == ' ') AddMatch(id + 45 * n, l + 4, l, matches);
}
} else if (s[1] == 'n') {
if (s[2] == 'o' && s[3] == 't' && s[4] == ' ') {
AddMatch(id + 80 * n, l + 5, l, matches);
}
} else if (s[1] == 't') {
if (s[2] == 'h') {
if (s[3] == 'e') {
if (s[4] == ' ') AddMatch(id + 5 * n, l + 5, l, matches);
} else if (s[3] == 'a') {
if (s[4] == 't' && s[5] == ' ') {
AddMatch(id + 29 * n, l + 6, l, matches);
}
}
} else if (s[2] == 'o') {
if (s[3] == ' ') AddMatch(id + 17 * n, l + 4, l, matches);
}
} else if (s[1] == 'w') {
if (s[2] == 'i' && s[3] == 't' && s[4] == 'h' && s[5] == ' ') {
AddMatch(id + 35 * n, l + 6, l, matches);
}
}
} else if (s[0] == '"') {
AddMatch(id + 19 * n, l + 1, l, matches);
if (s[1] == '>') {
AddMatch(id + 21 * n, l + 2, l, matches);
}
} else if (s[0] == '.') {
AddMatch(id + 20 * n, l + 1, l, matches);
if (s[1] == ' ') {
AddMatch(id + 31 * n, l + 2, l, matches);
if (s[2] == 'T' && s[3] == 'h') {
if (s[4] == 'e') {
if (s[5] == ' ') AddMatch(id + 43 * n, l + 6, l, matches);
} else if (s[4] == 'i') {
if (s[5] == 's' && s[6] == ' ') {
AddMatch(id + 75 * n, l + 7, l, matches);
}
}
}
}
} else if (s[0] == ',') {
AddMatch(id + 76 * n, l + 1, l, matches);
if (s[1] == ' ') {
AddMatch(id + 14 * n, l + 2, l, matches);
}
} else if (s[0] == '\n') {
AddMatch(id + 22 * n, l + 1, l, matches);
if (s[1] == '\t') {
AddMatch(id + 50 * n, l + 2, l, matches);
}
} else if (s[0] == ']') {
AddMatch(id + 24 * n, l + 1, l, matches);
} else if (s[0] == '\'') {
AddMatch(id + 36 * n, l + 1, l, matches);
} else if (s[0] == ':') {
AddMatch(id + 51 * n, l + 1, l, matches);
} else if (s[0] == '(') {
AddMatch(id + 57 * n, l + 1, l, matches);
} else if (s[0] == '=') {
if (s[1] == '"') {
AddMatch(id + 70 * n, l + 2, l, matches);
} else if (s[1] == '\'') {
AddMatch(id + 86 * n, l + 2, l, matches);
}
} else if (s[0] == 'a') {
if (s[1] == 'l' && s[2] == ' ') {
AddMatch(id + 84 * n, l + 3, l, matches);
}
} else if (s[0] == 'e') {
if (s[1] == 'd') {
if (s[2] == ' ') AddMatch(id + 53 * n, l + 3, l, matches);
} else if (s[1] == 'r') {
if (s[2] == ' ') AddMatch(id + 82 * n, l + 3, l, matches);
} else if (s[1] == 's') {
if (s[2] == 't' && s[3] == ' ') {
AddMatch(id + 95 * n, l + 4, l, matches);
}
}
} else if (s[0] == 'f') {
if (s[1] == 'u' && s[2] == 'l' && s[3] == ' ') {
AddMatch(id + 90 * n, l + 4, l, matches);
}
} else if (s[0] == 'i') {
if (s[1] == 'v') {
if (s[2] == 'e' && s[3] == ' ') {
AddMatch(id + 92 * n, l + 4, l, matches);
}
} else if (s[1] == 'z') {
if (s[2] == 'e' && s[3] == ' ') {
AddMatch(id + 100 * n, l + 4, l, matches);
}
}
} else if (s[0] == 'l') {
if (s[1] == 'e') {
if (s[2] == 's' && s[3] == 's' && s[4] == ' ') {
AddMatch(id + 93 * n, l + 5, l, matches);
}
} else if (s[1] == 'y') {
if (s[2] == ' ') AddMatch(id + 61 * n, l + 3, l, matches);
}
} else if (s[0] == 'o') {
if (s[1] == 'u' && s[2] == 's' && s[3] == ' ') {
AddMatch(id + 106 * n, l + 4, l, matches);
}
}
} else {
/* Set is_all_caps=0 for kUppercaseFirst and
is_all_caps=1 otherwise (kUppercaseAll) transform. */
const BROTLI_BOOL is_all_caps =
TO_BROTLI_BOOL(w.transform != kUppercaseFirst);
const uint8_t* s;
if (!IsMatch(dictionary, w, data, max_length)) {
continue;
}
/* Transform "" + kUppercase{First,All} + "" */
AddMatch(id + (is_all_caps ? 44 : 9) * n, l, l, matches);
has_found_match = BROTLI_TRUE;
if (l + 1 >= max_length) {
continue;
}
/* Transforms "" + kUppercase{First,All} + <suffix> */
s = &data[l];
if (s[0] == ' ') {
AddMatch(id + (is_all_caps ? 68 : 4) * n, l + 1, l, matches);
} else if (s[0] == '"') {
AddMatch(id + (is_all_caps ? 87 : 66) * n, l + 1, l, matches);
if (s[1] == '>') {
AddMatch(id + (is_all_caps ? 97 : 69) * n, l + 2, l, matches);
}
} else if (s[0] == '.') {
AddMatch(id + (is_all_caps ? 101 : 79) * n, l + 1, l, matches);
if (s[1] == ' ') {
AddMatch(id + (is_all_caps ? 114 : 88) * n, l + 2, l, matches);
}
} else if (s[0] == ',') {
AddMatch(id + (is_all_caps ? 112 : 99) * n, l + 1, l, matches);
if (s[1] == ' ') {
AddMatch(id + (is_all_caps ? 107 : 58) * n, l + 2, l, matches);
}
} else if (s[0] == '\'') {
AddMatch(id + (is_all_caps ? 94 : 74) * n, l + 1, l, matches);
} else if (s[0] == '(') {
AddMatch(id + (is_all_caps ? 113 : 78) * n, l + 1, l, matches);
} else if (s[0] == '=') {
if (s[1] == '"') {
AddMatch(id + (is_all_caps ? 105 : 104) * n, l + 2, l, matches);
} else if (s[1] == '\'') {
AddMatch(id + (is_all_caps ? 116 : 108) * n, l + 2, l, matches);
}
}
}
}
}
/* Transforms with prefixes " " and "." */
if (max_length >= 5 && (data[0] == ' ' || data[0] == '.')) {
BROTLI_BOOL is_space = TO_BROTLI_BOOL(data[0] == ' ');
size_t offset = kStaticDictionaryBuckets[Hash(&data[1])];
BROTLI_BOOL end = !offset;
while (!end) {
DictWord w = kStaticDictionaryWords[offset++];
const size_t l = w.len & 0x1F;
const size_t n = (size_t)1 << dictionary->size_bits_by_length[l];
const size_t id = w.idx;
end = !!(w.len & 0x80);
w.len = (uint8_t)l;
if (w.transform == 0) {
const uint8_t* s;
if (!IsMatch(dictionary, w, &data[1], max_length - 1)) {
continue;
}
/* Transforms " " + kIdentity + "" and "." + kIdentity + "" */
AddMatch(id + (is_space ? 6 : 32) * n, l + 1, l, matches);
has_found_match = BROTLI_TRUE;
if (l + 2 >= max_length) {
continue;
}
/* Transforms " " + kIdentity + <suffix> and "." + kIdentity + <suffix>
*/
s = &data[l + 1];
if (s[0] == ' ') {
AddMatch(id + (is_space ? 2 : 77) * n, l + 2, l, matches);
} else if (s[0] == '(') {
AddMatch(id + (is_space ? 89 : 67) * n, l + 2, l, matches);
} else if (is_space) {
if (s[0] == ',') {
AddMatch(id + 103 * n, l + 2, l, matches);
if (s[1] == ' ') {
AddMatch(id + 33 * n, l + 3, l, matches);
}
} else if (s[0] == '.') {
AddMatch(id + 71 * n, l + 2, l, matches);
if (s[1] == ' ') {
AddMatch(id + 52 * n, l + 3, l, matches);
}
} else if (s[0] == '=') {
if (s[1] == '"') {
AddMatch(id + 81 * n, l + 3, l, matches);
} else if (s[1] == '\'') {
AddMatch(id + 98 * n, l + 3, l, matches);
}
}
}
} else if (is_space) {
/* Set is_all_caps=0 for kUppercaseFirst and
is_all_caps=1 otherwise (kUppercaseAll) transform. */
const BROTLI_BOOL is_all_caps =
TO_BROTLI_BOOL(w.transform != kUppercaseFirst);
const uint8_t* s;
if (!IsMatch(dictionary, w, &data[1], max_length - 1)) {
continue;
}
/* Transforms " " + kUppercase{First,All} + "" */
AddMatch(id + (is_all_caps ? 85 : 30) * n, l + 1, l, matches);
has_found_match = BROTLI_TRUE;
if (l + 2 >= max_length) {
continue;
}
/* Transforms " " + kUppercase{First,All} + <suffix> */
s = &data[l + 1];
if (s[0] == ' ') {
AddMatch(id + (is_all_caps ? 83 : 15) * n, l + 2, l, matches);
} else if (s[0] == ',') {
if (!is_all_caps) {
AddMatch(id + 109 * n, l + 2, l, matches);
}
if (s[1] == ' ') {
AddMatch(id + (is_all_caps ? 111 : 65) * n, l + 3, l, matches);
}
} else if (s[0] == '.') {
AddMatch(id + (is_all_caps ? 115 : 96) * n, l + 2, l, matches);
if (s[1] == ' ') {
AddMatch(id + (is_all_caps ? 117 : 91) * n, l + 3, l, matches);
}
} else if (s[0] == '=') {
if (s[1] == '"') {
AddMatch(id + (is_all_caps ? 110 : 118) * n, l + 3, l, matches);
} else if (s[1] == '\'') {
AddMatch(id + (is_all_caps ? 119 : 120) * n, l + 3, l, matches);
}
}
}
}
}
if (max_length >= 6) {
/* Transforms with prefixes "e ", "s ", ", " and "\xc2\xa0" */
if ((data[1] == ' ' &&
(data[0] == 'e' || data[0] == 's' || data[0] == ',')) ||
(data[0] == 0xc2 && data[1] == 0xa0)) {
size_t offset = kStaticDictionaryBuckets[Hash(&data[2])];
BROTLI_BOOL end = !offset;
while (!end) {
DictWord w = kStaticDictionaryWords[offset++];
const size_t l = w.len & 0x1F;
const size_t n = (size_t)1 << dictionary->size_bits_by_length[l];
const size_t id = w.idx;
end = !!(w.len & 0x80);
w.len = (uint8_t)l;
if (w.transform == 0 &&
IsMatch(dictionary, w, &data[2], max_length - 2)) {
if (data[0] == 0xc2) {
AddMatch(id + 102 * n, l + 2, l, matches);
has_found_match = BROTLI_TRUE;
} else if (l + 2 < max_length && data[l + 2] == ' ') {
size_t t = data[0] == 'e' ? 18 : (data[0] == 's' ? 7 : 13);
AddMatch(id + t * n, l + 3, l, matches);
has_found_match = BROTLI_TRUE;
}
}
}
}
}
if (max_length >= 9) {
/* Transforms with prefixes " the " and ".com/" */
if ((data[0] == ' ' && data[1] == 't' && data[2] == 'h' &&
data[3] == 'e' && data[4] == ' ') ||
(data[0] == '.' && data[1] == 'c' && data[2] == 'o' &&
data[3] == 'm' && data[4] == '/')) {
size_t offset = kStaticDictionaryBuckets[Hash(&data[5])];
BROTLI_BOOL end = !offset;
while (!end) {
DictWord w = kStaticDictionaryWords[offset++];
const size_t l = w.len & 0x1F;
const size_t n = (size_t)1 << dictionary->size_bits_by_length[l];
const size_t id = w.idx;
end = !!(w.len & 0x80);
w.len = (uint8_t)l;
if (w.transform == 0 &&
IsMatch(dictionary, w, &data[5], max_length - 5)) {
AddMatch(id + (data[0] == ' ' ? 41 : 72) * n, l + 5, l, matches);
has_found_match = BROTLI_TRUE;
if (l + 5 < max_length) {
const uint8_t* s = &data[l + 5];
if (data[0] == ' ') {
if (l + 8 < max_length &&
s[0] == ' ' && s[1] == 'o' && s[2] == 'f' && s[3] == ' ') {
AddMatch(id + 62 * n, l + 9, l, matches);
if (l + 12 < max_length &&
s[4] == 't' && s[5] == 'h' && s[6] == 'e' && s[7] == ' ') {
AddMatch(id + 73 * n, l + 13, l, matches);
}
}
}
}
}
}
}
}
return has_found_match;
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,85 +0,0 @@
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Heuristics for deciding about the UTF8-ness of strings. */
#include "./enc/utf8_util.h"
#include <brotli/types.h>
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
static size_t BrotliParseAsUTF8(
int* symbol, const uint8_t* input, size_t size) {
/* ASCII */
if ((input[0] & 0x80) == 0) {
*symbol = input[0];
if (*symbol > 0) {
return 1;
}
}
/* 2-byte UTF8 */
if (size > 1u &&
(input[0] & 0xe0) == 0xc0 &&
(input[1] & 0xc0) == 0x80) {
*symbol = (((input[0] & 0x1f) << 6) |
(input[1] & 0x3f));
if (*symbol > 0x7f) {
return 2;
}
}
/* 3-byte UFT8 */
if (size > 2u &&
(input[0] & 0xf0) == 0xe0 &&
(input[1] & 0xc0) == 0x80 &&
(input[2] & 0xc0) == 0x80) {
*symbol = (((input[0] & 0x0f) << 12) |
((input[1] & 0x3f) << 6) |
(input[2] & 0x3f));
if (*symbol > 0x7ff) {
return 3;
}
}
/* 4-byte UFT8 */
if (size > 3u &&
(input[0] & 0xf8) == 0xf0 &&
(input[1] & 0xc0) == 0x80 &&
(input[2] & 0xc0) == 0x80 &&
(input[3] & 0xc0) == 0x80) {
*symbol = (((input[0] & 0x07) << 18) |
((input[1] & 0x3f) << 12) |
((input[2] & 0x3f) << 6) |
(input[3] & 0x3f));
if (*symbol > 0xffff && *symbol <= 0x10ffff) {
return 4;
}
}
/* Not UTF8, emit a special symbol above the UTF8-code space */
*symbol = 0x110000 | input[0];
return 1;
}
/* Returns 1 if at least min_fraction of the data is UTF8-encoded.*/
BROTLI_BOOL BrotliIsMostlyUTF8(
const uint8_t* data, const size_t pos, const size_t mask,
const size_t length, const double min_fraction) {
size_t size_utf8 = 0;
size_t i = 0;
while (i < length) {
int symbol;
size_t bytes_read =
BrotliParseAsUTF8(&symbol, &data[(pos + i) & mask], length - i);
i += bytes_read;
if (symbol < 0x110000) size_utf8 += bytes_read;
}
return TO_BROTLI_BOOL(size_utf8 > min_fraction * (double)length);
}
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif

View File

@ -1,169 +0,0 @@
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Distributed under MIT license.
// See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
package brotli
/*
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <brotli/encode.h>
struct CompressStreamResult {
size_t bytes_consumed;
const uint8_t* output_data;
size_t output_data_size;
int success;
int has_more;
};
static struct CompressStreamResult CompressStream(
BrotliEncoderState* s, BrotliEncoderOperation op,
const uint8_t* data, size_t data_size) {
struct CompressStreamResult result;
size_t available_in = data_size;
const uint8_t* next_in = data;
size_t available_out = 0;
result.success = BrotliEncoderCompressStream(s, op,
&available_in, &next_in, &available_out, 0, 0) ? 1 : 0;
result.bytes_consumed = data_size - available_in;
result.output_data = 0;
result.output_data_size = 0;
if (result.success) {
result.output_data = BrotliEncoderTakeOutput(s, &result.output_data_size);
}
result.has_more = BrotliEncoderHasMoreOutput(s) ? 1 : 0;
return result;
}
*/
import "C"
import (
"bytes"
"errors"
"io"
"unsafe"
)
// WriterOptions configures Writer.
type WriterOptions struct {
// Quality controls the compression-speed vs compression-density trade-offs.
// The higher the quality, the slower the compression. Range is 0 to 11.
Quality int
// LGWin is the base 2 logarithm of the sliding window size.
// Range is 10 to 24. 0 indicates automatic configuration based on Quality.
LGWin int
}
// Writer implements io.WriteCloser by writing Brotli-encoded data to an
// underlying Writer.
type Writer struct {
dst io.Writer
state *C.BrotliEncoderState
buf, encoded []byte
}
var (
errEncode = errors.New("cbrotli: encode error")
errWriterClosed = errors.New("cbrotli: Writer is closed")
)
// NewWriter initializes new Writer instance.
// Close MUST be called to free resources.
func NewWriter(dst io.Writer, options WriterOptions) *Writer {
state := C.BrotliEncoderCreateInstance(nil, nil, nil)
C.BrotliEncoderSetParameter(
state, C.BROTLI_PARAM_QUALITY, (C.uint32_t)(options.Quality))
if options.LGWin > 0 {
C.BrotliEncoderSetParameter(
state, C.BROTLI_PARAM_LGWIN, (C.uint32_t)(options.LGWin))
}
return &Writer{
dst: dst,
state: state,
}
}
func (w *Writer) SetDictionary(p []byte) {
var data *C.uint8_t
if len(p) != 0 {
data = (*C.uint8_t)(&p[0])
}
C.BrotliEncoderSetCustomDictionary(w.state, C.size_t(len(p)), data)
}
func (w *Writer) writeChunk(p []byte, op C.BrotliEncoderOperation) (n int, err error) {
if w.state == nil {
return 0, errWriterClosed
}
for {
var data *C.uint8_t
if len(p) != 0 {
data = (*C.uint8_t)(&p[0])
}
result := C.CompressStream(w.state, op, data, C.size_t(len(p)))
if result.success == 0 {
return n, errEncode
}
p = p[int(result.bytes_consumed):]
n += int(result.bytes_consumed)
length := int(result.output_data_size)
if length != 0 {
// It is a workaround for non-copying-wrapping of native memory.
// C-encoder never pushes output block longer than ((2 << 25) + 502).
// TODO: use natural wrapper, when it becomes available, see
// https://golang.org/issue/13656.
output := (*[1 << 30]byte)(unsafe.Pointer(result.output_data))[:length:length]
_, err = w.dst.Write(output)
if err != nil {
return n, err
}
}
if len(p) == 0 && result.has_more == 0 {
return n, nil
}
}
}
// Flush outputs encoded data for all input provided to Write. The resulting
// output can be decoded to match all input before Flush, but the stream is
// not yet complete until after Close.
// Flush has a negative impact on compression.
func (w *Writer) Flush() error {
_, err := w.writeChunk(nil, C.BROTLI_OPERATION_FLUSH)
return err
}
// Close flushes remaining data to the decorated writer and frees C resources.
func (w *Writer) Close() error {
// If stream is already closed, it is reported by `writeChunk`.
_, err := w.writeChunk(nil, C.BROTLI_OPERATION_FINISH)
// C-Brotli tolerates `nil` pointer here.
C.BrotliEncoderDestroyInstance(w.state)
w.state = nil
return err
}
// Write implements io.Writer. Flush or Close must be called to ensure that the
// encoded bytes are actually flushed to the underlying Writer.
func (w *Writer) Write(p []byte) (n int, err error) {
return w.writeChunk(p, C.BROTLI_OPERATION_PROCESS)
}
// Encode returns content encoded with Brotli.
func Encode(content []byte, options WriterOptions) ([]byte, error) {
var buf bytes.Buffer
writer := NewWriter(&buf, options)
_, err := writer.Write(content)
if closeErr := writer.Close(); err == nil {
err = closeErr
}
return buf.Bytes(), err
}

7
vendor/modules.txt vendored
View File

@ -11,13 +11,6 @@ github.com/beorn7/perks/quantile
# github.com/cespare/xxhash/v2 v2.1.2
## explicit; go 1.11
github.com/cespare/xxhash/v2
# github.com/cloudflare/brotli-go v0.0.0-20191101163834-d34379f7ff93
## explicit; go 1.12
github.com/cloudflare/brotli-go
github.com/cloudflare/brotli-go/brotli
github.com/cloudflare/brotli-go/common
github.com/cloudflare/brotli-go/dec
github.com/cloudflare/brotli-go/enc
# github.com/cloudflare/circl v1.2.1-0.20220809205628-0a9554f37a47
## explicit; go 1.16
github.com/cloudflare/circl/dh/x25519