Change more hasher functions to methods.

This commit is contained in:
Andy Balholm 2019-03-08 16:43:15 -08:00
parent bbbdedf380
commit 7ed41e6356
31 changed files with 458 additions and 2163 deletions

View File

@ -43,45 +43,142 @@ func ComputeDistanceCode(distance uint, max_distance uint, dist_cache []int) uin
} }
func BrotliCreateBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) { func BrotliCreateBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
switch params.hasher.type_ { var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
case 2: var orig_commands []Command = commands
CreateBackwardReferencesNH2(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) var insert_length uint = *last_insert_len
return var pos_end uint = position + num_bytes
case 3: var store_end uint
CreateBackwardReferencesNH3(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) if num_bytes >= hasher.StoreLookahead() {
return store_end = position + num_bytes - hasher.StoreLookahead() + 1
case 4: } else {
CreateBackwardReferencesNH4(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) store_end = position
return }
case 5: var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
CreateBackwardReferencesNH5(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) var apply_random_heuristics uint = position + random_heuristics_window_size
return var gap uint = 0
case 6: /* Set maximum distance, see section 9.1. of the spec. */
CreateBackwardReferencesNH6(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals)
return var kMinScore uint = BROTLI_SCORE_BASE + 100
case 40:
CreateBackwardReferencesNH40(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) /* For speed up heuristics for random data. */
return
case 41: /* Minimum score to accept a backward reference. */
CreateBackwardReferencesNH41(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) hasher.PrepareDistanceCache(dist_cache)
return
case 42: for position+hasher.HashTypeLength() < pos_end {
CreateBackwardReferencesNH42(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) var max_length uint = pos_end - position
return var max_distance uint = brotli_min_size_t(position, max_backward_limit)
case 54: var sr HasherSearchResult
CreateBackwardReferencesNH54(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) sr.len = 0
return sr.len_code_delta = 0
case 35: sr.distance = 0
CreateBackwardReferencesNH35(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) sr.score = kMinScore
return hasher.FindLongestMatch(&params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
case 55: if sr.score > kMinScore {
CreateBackwardReferencesNH55(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) /* Found a match. Let's look for something even better ahead. */
return var delayed_backward_references_in_row int = 0
case 65: max_length--
CreateBackwardReferencesNH65(num_bytes, position, ringbuffer, ringbuffer_mask, params, hasher, dist_cache, last_insert_len, commands, num_commands, num_literals) for ; ; max_length-- {
return var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
hasher.FindLongestMatch(&params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+hasher.HashTypeLength() < pos_end {
continue
}
}
default:
break break
} }
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
hasher.PrepareDistanceCache(dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
hasher.StoreRange(ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
hasher.Store(ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(hasher.StoreLookahead()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
hasher.Store(ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
} }

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH2(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH2() {
store_end = position + num_bytes - StoreLookaheadH2() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH2(hasher, dist_cache)
for position+HashTypeLengthH2() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH2(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH2(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH2() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH2(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH2(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH2()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH2(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH2()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH2(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH3(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH3() {
store_end = position + num_bytes - StoreLookaheadH3() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH3(hasher, dist_cache)
for position+HashTypeLengthH3() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH3(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH3(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH3() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH3(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH3(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH3()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH3(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH3()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH3(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH35(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH35() {
store_end = position + num_bytes - StoreLookaheadH35() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH35(hasher, dist_cache)
for position+HashTypeLengthH35() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH35(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH35(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH35() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH35(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH35(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH35()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH35(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH35()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH35(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH4(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH4() {
store_end = position + num_bytes - StoreLookaheadH4() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH4(hasher, dist_cache)
for position+HashTypeLengthH4() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH4(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH4(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH4() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH4(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH4(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH4()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH4(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH4()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH4(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH40(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH40() {
store_end = position + num_bytes - StoreLookaheadH40() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH40(hasher, dist_cache)
for position+HashTypeLengthH40() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH40(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH40(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH40() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH40(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH40(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH40()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH40(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH40()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH40(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH41(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH41() {
store_end = position + num_bytes - StoreLookaheadH41() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH41(hasher, dist_cache)
for position+HashTypeLengthH41() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH41(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH41(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH41() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH41(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH41(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH41()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH41(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH41()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH41(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH42(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH42() {
store_end = position + num_bytes - StoreLookaheadH42() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH42(hasher, dist_cache)
for position+HashTypeLengthH42() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH42(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH42(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH42() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH42(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH42(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH42()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH42(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH42()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH42(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH5(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH5() {
store_end = position + num_bytes - StoreLookaheadH5() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH5(hasher, dist_cache)
for position+HashTypeLengthH5() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH5(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH5(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH5() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH5(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH5(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH5()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH5(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH5()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH5(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH54(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH54() {
store_end = position + num_bytes - StoreLookaheadH54() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH54(hasher, dist_cache)
for position+HashTypeLengthH54() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH54(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH54(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH54() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH54(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH54(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH54()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH54(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH54()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH54(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH55(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH55() {
store_end = position + num_bytes - StoreLookaheadH55() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH55(hasher, dist_cache)
for position+HashTypeLengthH55() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH55(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH55(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH55() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH55(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH55(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH55()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH55(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH55()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH55(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH6(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH6() {
store_end = position + num_bytes - StoreLookaheadH6() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH6(hasher, dist_cache)
for position+HashTypeLengthH6() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH6(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH6(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH6() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH6(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH6(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH6()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH6(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH6()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH6(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -1,148 +0,0 @@
package brotli
/* NOLINT(build/header_guard) */
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
func CreateBackwardReferencesNH65(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var orig_commands []Command = commands
var insert_length uint = *last_insert_len
var pos_end uint = position + num_bytes
var store_end uint
if num_bytes >= StoreLookaheadH65() {
store_end = position + num_bytes - StoreLookaheadH65() + 1
} else {
store_end = position
}
var random_heuristics_window_size uint = LiteralSpreeLengthForSparseSearch(params)
var apply_random_heuristics uint = position + random_heuristics_window_size
var gap uint = 0
/* Set maximum distance, see section 9.1. of the spec. */
var kMinScore uint = BROTLI_SCORE_BASE + 100
/* For speed up heuristics for random data. */
/* Minimum score to accept a backward reference. */
PrepareDistanceCacheH65(hasher, dist_cache)
for position+HashTypeLengthH65() < pos_end {
var max_length uint = pos_end - position
var max_distance uint = brotli_min_size_t(position, max_backward_limit)
var sr HasherSearchResult
sr.len = 0
sr.len_code_delta = 0
sr.distance = 0
sr.score = kMinScore
FindLongestMatchH65(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position, max_length, max_distance, gap, params.dist.max_distance, &sr)
if sr.score > kMinScore {
/* Found a match. Let's look for something even better ahead. */
var delayed_backward_references_in_row int = 0
max_length--
for ; ; max_length-- {
var cost_diff_lazy uint = 175
var sr2 HasherSearchResult
if params.quality < MIN_QUALITY_FOR_EXTENSIVE_REFERENCE_SEARCH {
sr2.len = brotli_min_size_t(sr.len-1, max_length)
} else {
sr2.len = 0
}
sr2.len_code_delta = 0
sr2.distance = 0
sr2.score = kMinScore
max_distance = brotli_min_size_t(position+1, max_backward_limit)
FindLongestMatchH65(hasher, &params.dictionary, ringbuffer, ringbuffer_mask, dist_cache, position+1, max_length, max_distance, gap, params.dist.max_distance, &sr2)
if sr2.score >= sr.score+cost_diff_lazy {
/* Ok, let's just write one byte for now and start a match from the
next byte. */
position++
insert_length++
sr = sr2
delayed_backward_references_in_row++
if delayed_backward_references_in_row < 4 && position+HashTypeLengthH65() < pos_end {
continue
}
}
break
}
apply_random_heuristics = position + 2*sr.len + random_heuristics_window_size
max_distance = brotli_min_size_t(position, max_backward_limit)
{
/* The first 16 codes are special short-codes,
and the minimum offset is 1. */
var distance_code uint = ComputeDistanceCode(sr.distance, max_distance+gap, dist_cache)
if (sr.distance <= (max_distance + gap)) && distance_code > 0 {
dist_cache[3] = dist_cache[2]
dist_cache[2] = dist_cache[1]
dist_cache[1] = dist_cache[0]
dist_cache[0] = int(sr.distance)
PrepareDistanceCacheH65(hasher, dist_cache)
}
InitCommand(&commands[0], &params.dist, insert_length, sr.len, sr.len_code_delta, distance_code)
commands = commands[1:]
}
*num_literals += insert_length
insert_length = 0
/* Put the hash keys into the table, if there are enough bytes left.
Depending on the hasher implementation, it can push all positions
in the given range or only a subset of them.
Avoid hash poisoning with RLE data. */
{
var range_start uint = position + 2
var range_end uint = brotli_min_size_t(position+sr.len, store_end)
if sr.distance < sr.len>>2 {
range_start = brotli_min_size_t(range_end, brotli_max_size_t(range_start, position+sr.len-(sr.distance<<2)))
}
StoreRangeH65(hasher, ringbuffer, ringbuffer_mask, range_start, range_end)
}
position += sr.len
} else {
insert_length++
position++
/* If we have not seen matches for a long time, we can skip some
match lookups. Unsuccessful match lookups are very very expensive
and this kind of a heuristic speeds up compression quite
a lot. */
if position > apply_random_heuristics {
/* Going through uncompressible data, jump. */
if position > apply_random_heuristics+4*random_heuristics_window_size {
var kMargin uint = brotli_max_size_t(StoreLookaheadH65()-1, 4)
/* It is quite a long time since we saw a copy, so we assume
that this data is not compressible, and store hashes less
often. Hashes of non compressible data are less likely to
turn out to be useful in the future, too, so we store less of
them to not to flood out the hash table of good compressible
data. */
var pos_jump uint = brotli_min_size_t(position+16, pos_end-kMargin)
for ; position < pos_jump; position += 4 {
StoreH65(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 4
}
} else {
var kMargin uint = brotli_max_size_t(StoreLookaheadH65()-1, 2)
var pos_jump uint = brotli_min_size_t(position+8, pos_end-kMargin)
for ; position < pos_jump; position += 2 {
StoreH65(hasher, ringbuffer, ringbuffer_mask, position)
insert_length += 2
}
}
}
}
}
insert_length += pos_end - position
*last_insert_len = insert_length
*num_commands += uint(-cap(commands) + cap(orig_commands))
}

View File

@ -617,15 +617,15 @@ func ZopfliIterate(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_
} }
/* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */ /* REQUIRES: nodes != NULL and len(nodes) >= num_bytes + 1 */
func BrotliZopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, dist_cache []int, hasher HasherHandle, nodes []ZopfliNode) uint { func BrotliZopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, dist_cache []int, hasher *H10, nodes []ZopfliNode) uint {
var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin) var max_backward_limit uint = BROTLI_MAX_BACKWARD_LIMIT(params.lgwin)
var max_zopfli_len uint = MaxZopfliLen(params) var max_zopfli_len uint = MaxZopfliLen(params)
var model ZopfliCostModel var model ZopfliCostModel
var queue StartPosQueue var queue StartPosQueue
var matches [2 * (MAX_NUM_MATCHES_H10 + 64)]BackwardMatch var matches [2 * (MAX_NUM_MATCHES_H10 + 64)]BackwardMatch
var store_end uint var store_end uint
if num_bytes >= StoreLookaheadH10() { if num_bytes >= hasher.StoreLookahead() {
store_end = position + num_bytes - StoreLookaheadH10() + 1 store_end = position + num_bytes - hasher.StoreLookahead() + 1
} else { } else {
store_end = position store_end = position
} }
@ -637,7 +637,7 @@ func BrotliZopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer [
InitZopfliCostModel(&model, &params.dist, num_bytes) InitZopfliCostModel(&model, &params.dist, num_bytes)
ZopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask) ZopfliCostModelSetFromLiteralCosts(&model, position, ringbuffer, ringbuffer_mask)
InitStartPosQueue(&queue) InitStartPosQueue(&queue)
for i = 0; i+HashTypeLengthH10()-1 < num_bytes; i++ { for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ {
var pos uint = position + i var pos uint = position + i
var max_distance uint = brotli_min_size_t(pos, max_backward_limit) var max_distance uint = brotli_min_size_t(pos, max_backward_limit)
var skip uint var skip uint
@ -658,12 +658,12 @@ func BrotliZopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer [
if skip > 1 { if skip > 1 {
/* Add the tail of the copy to the hasher. */ /* Add the tail of the copy to the hasher. */
StoreRangeH10(hasher, ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+skip, store_end)) hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+skip, store_end))
skip-- skip--
for skip != 0 { for skip != 0 {
i++ i++
if i+HashTypeLengthH10()-1 >= num_bytes { if i+hasher.HashTypeLength()-1 >= num_bytes {
break break
} }
EvaluateNode(position, i, max_backward_limit, gap, dist_cache, &model, &queue, nodes) EvaluateNode(position, i, max_backward_limit, gap, dist_cache, &model, &queue, nodes)
@ -676,7 +676,7 @@ func BrotliZopfliComputeShortestPath(num_bytes uint, position uint, ringbuffer [
return ComputeShortestPathFromNodes(num_bytes, nodes) return ComputeShortestPathFromNodes(num_bytes, nodes)
} }
func BrotliCreateZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher HasherHandle, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) { func BrotliCreateZopfliBackwardReferences(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint, params *BrotliEncoderParams, hasher *H10, dist_cache []int, last_insert_len *uint, commands []Command, num_commands *uint, num_literals *uint) {
var nodes []ZopfliNode var nodes []ZopfliNode
nodes = make([]ZopfliNode, (num_bytes + 1)) nodes = make([]ZopfliNode, (num_bytes + 1))
BrotliInitZopfliNodes(nodes, num_bytes+1) BrotliInitZopfliNodes(nodes, num_bytes+1)
@ -690,8 +690,8 @@ func BrotliCreateHqZopfliBackwardReferences(num_bytes uint, position uint, ringb
var num_matches []uint32 = make([]uint32, num_bytes) var num_matches []uint32 = make([]uint32, num_bytes)
var matches_size uint = 4 * num_bytes var matches_size uint = 4 * num_bytes
var store_end uint var store_end uint
if num_bytes >= StoreLookaheadH10() { if num_bytes >= hasher.StoreLookahead() {
store_end = position + num_bytes - StoreLookaheadH10() + 1 store_end = position + num_bytes - hasher.StoreLookahead() + 1
} else { } else {
store_end = position store_end = position
} }
@ -707,7 +707,7 @@ func BrotliCreateHqZopfliBackwardReferences(num_bytes uint, position uint, ringb
var gap uint = 0 var gap uint = 0
var shadow_matches uint = 0 var shadow_matches uint = 0
var new_array []BackwardMatch var new_array []BackwardMatch
for i = 0; i+HashTypeLengthH10()-1 < num_bytes; i++ { for i = 0; i+hasher.HashTypeLength()-1 < num_bytes; i++ {
var pos uint = position + i var pos uint = position + i
var max_distance uint = brotli_min_size_t(pos, max_backward_limit) var max_distance uint = brotli_min_size_t(pos, max_backward_limit)
var max_length uint = num_bytes - i var max_length uint = num_bytes - i
@ -751,7 +751,7 @@ func BrotliCreateHqZopfliBackwardReferences(num_bytes uint, position uint, ringb
num_matches[i] = 1 num_matches[i] = 1
/* Add the tail of the copy to the hasher. */ /* Add the tail of the copy to the hasher. */
StoreRangeH10(hasher, ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+match_len, store_end)) hasher.StoreRange(ringbuffer, ringbuffer_mask, pos+1, brotli_min_size_t(pos+match_len, store_end))
var pos uint = i var pos uint = i
for i := 0; i < int(skip); i++ { for i := 0; i < int(skip); i++ {
num_matches[pos+1:][i] = 0 num_matches[pos+1:][i] = 0

View File

@ -1425,7 +1425,7 @@ func EncodeData(s *Writer, is_last bool, force_flush bool, out_size *uint, outpu
if s.params.quality == ZOPFLIFICATION_QUALITY { if s.params.quality == ZOPFLIFICATION_QUALITY {
assert(s.params.hasher.type_ == 10) assert(s.params.hasher.type_ == 10)
BrotliCreateZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_) BrotliCreateZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_.(*H10), s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_)
} else if s.params.quality == HQ_ZOPFLIFICATION_QUALITY { } else if s.params.quality == HQ_ZOPFLIFICATION_QUALITY {
assert(s.params.hasher.type_ == 10) assert(s.params.hasher.type_ == 10)
BrotliCreateHqZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_) BrotliCreateHqZopfliBackwardReferences(uint(bytes), uint(wrapped_last_processed_pos), data, uint(mask), &s.params, s.hasher_, s.dist_cache_[:], &s.last_insert_len_, s.commands_[s.num_commands_:], &s.num_commands_, &s.num_literals_)
@ -1599,7 +1599,7 @@ func BrotliCompressBufferQuality10(lgwin int, input_size uint, input_buffer []by
var new_cmd_alloc_size uint var new_cmd_alloc_size uint
BrotliInitZopfliNodes(nodes, block_size+1) BrotliInitZopfliNodes(nodes, block_size+1)
hasher.StitchToPreviousBlock(block_size, block_start, input_buffer, mask) hasher.StitchToPreviousBlock(block_size, block_start, input_buffer, mask)
path_size = BrotliZopfliComputeShortestPath(block_size, block_start, input_buffer, mask, &params, dist_cache[:], hasher, nodes) path_size = BrotliZopfliComputeShortestPath(block_size, block_start, input_buffer, mask, &params, dist_cache[:], hasher.(*H10), nodes)
/* We allocate a command buffer in the first iteration of this loop that /* We allocate a command buffer in the first iteration of this loop that
will be likely big enough for the whole metablock, so that for most will be likely big enough for the whole metablock, so that for most

27
h10.go
View File

@ -13,11 +13,11 @@ package brotli
position in the input data. The binary tree is sorted by the lexicographic position in the input data. The binary tree is sorted by the lexicographic
order of the sequences, and it is also a max-heap with respect to the order of the sequences, and it is also a max-heap with respect to the
starting positions. */ starting positions. */
func HashTypeLengthH10() uint { func (*H10) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadH10() uint { func (*H10) StoreLookahead() uint {
return 128 return 128
} }
@ -234,14 +234,13 @@ func FindAllMatchesH10(handle HasherHandle, dictionary *BrotliEncoderDictionary,
/* Stores the hash of the next 4 bytes and re-roots the binary tree at the /* Stores the hash of the next 4 bytes and re-roots the binary tree at the
current sequence, without returning any matches. current sequence, without returning any matches.
REQUIRES: ix + 128 <= end-of-current-block */ REQUIRES: ix + 128 <= end-of-current-block */
func StoreH10(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H10) Store(data []byte, mask uint, ix uint) {
var self *H10 = SelfH10(handle) var max_backward uint = h.window_mask_ - BROTLI_WINDOW_GAP + 1
var max_backward uint = self.window_mask_ - BROTLI_WINDOW_GAP + 1
/* Maximum distance is window size - 16, see section 9.1. of the spec. */ /* Maximum distance is window size - 16, see section 9.1. of the spec. */
StoreAndFindMatchesH10(self, data, ix, mask, 128, max_backward, nil, nil) StoreAndFindMatchesH10(h, data, ix, mask, 128, max_backward, nil, nil)
} }
func StoreRangeH10(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H10) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint = ix_start var i uint = ix_start
var j uint = ix_start var j uint = ix_start
if ix_start+63 <= ix_end { if ix_start+63 <= ix_end {
@ -250,17 +249,17 @@ func StoreRangeH10(handle HasherHandle, data []byte, mask uint, ix_start uint, i
if ix_start+512 <= i { if ix_start+512 <= i {
for ; j < i; j += 8 { for ; j < i; j += 8 {
StoreH10(handle, data, mask, j) h.Store(data, mask, j)
} }
} }
for ; i < ix_end; i++ { for ; i < ix_end; i++ {
StoreH10(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH10()-1 && position >= 128 { if num_bytes >= h.HashTypeLength()-1 && position >= 128 {
var i_start uint = position - 128 + 1 var i_start uint = position - 128 + 1
var i_end uint = brotli_min_size_t(position, i_start+num_bytes) var i_end uint = brotli_min_size_t(position, i_start+num_bytes)
/* Store the last `128 - 1` positions in the hasher. /* Store the last `128 - 1` positions in the hasher.
@ -285,3 +284,11 @@ func (h *H10) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []
/* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */ /* MAX_NUM_MATCHES == 64 + MAX_TREE_SEARCH_DEPTH */
const MAX_NUM_MATCHES_H10 = 128 const MAX_NUM_MATCHES_H10 = 128
func (*H10) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
panic("unimplemented")
}
func (*H10) PrepareDistanceCache(distance_cache []int) {
panic("unimplemented")
}

38
h2.go
View File

@ -10,11 +10,11 @@ package brotli
/* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression /* For BUCKET_SWEEP == 1, enabling the dictionary lookup makes compression
a little faster (0.5% - 1%) and it compresses 0.15% better on small text a little faster (0.5% - 1%) and it compresses 0.15% better on small text
and HTML inputs. */ and HTML inputs. */
func HashTypeLengthH2() uint { func (*H2) HashTypeLength() uint {
return 8 return 8
} }
func StoreLookaheadH2() uint { func (*H2) StoreLookahead() uint {
return 8 return 8
} }
@ -70,33 +70,32 @@ func (h *H2) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 5 bytes at &data[ix & mask]. /* Look at 5 bytes at &data[ix & mask].
Compute a hash from these, and store the value somewhere within Compute a hash from these, and store the value somewhere within
[ix .. ix+3]. */ [ix .. ix+3]. */
func StoreH2(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H2) Store(data []byte, mask uint, ix uint) {
var key uint32 = HashBytesH2(data[ix&mask:]) var key uint32 = HashBytesH2(data[ix&mask:])
var off uint32 = uint32(ix>>3) % 1 var off uint32 = uint32(ix>>3) % 1
/* Wiggle the value with the bucket sweep range. */ /* Wiggle the value with the bucket sweep range. */
SelfH2(handle).buckets_[key+off] = uint32(ix) h.buckets_[key+off] = uint32(ix)
} }
func StoreRangeH2(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H2) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH2(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H2) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H2) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH2()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH2(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH2(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH2(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH2(handle HasherHandle, distance_cache []int) { func (*H2) PrepareDistanceCache(distance_cache []int) {
} }
/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] /* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
@ -107,8 +106,7 @@ func PrepareDistanceCacheH2(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH2(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H2) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H2 = SelfH2(handle)
var best_len_in uint = out.len var best_len_in uint = out.len
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var key uint32 = HashBytesH2(data[cur_ix_masked:]) var key uint32 = HashBytesH2(data[cur_ix_masked:])
@ -134,7 +132,7 @@ func FindLongestMatchH2(handle HasherHandle, dictionary *BrotliEncoderDictionary
out.score = best_score out.score = best_score
compare_char = int(data[cur_ix_masked+best_len]) compare_char = int(data[cur_ix_masked+best_len])
if 1 == 1 { if 1 == 1 {
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
return return
} }
} }
@ -147,9 +145,9 @@ func FindLongestMatchH2(handle HasherHandle, dictionary *BrotliEncoderDictionary
var len uint var len uint
/* Only one to look for, don't bother to prepare for a loop. */ /* Only one to look for, don't bother to prepare for a loop. */
prev_ix = uint(self.buckets_[key]) prev_ix = uint(h.buckets_[key])
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
backward = cur_ix - prev_ix backward = cur_ix - prev_ix
prev_ix &= uint(uint32(ring_buffer_mask)) prev_ix &= uint(uint32(ring_buffer_mask))
if compare_char != int(data[prev_ix+best_len_in]) { if compare_char != int(data[prev_ix+best_len_in]) {
@ -171,7 +169,7 @@ func FindLongestMatchH2(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
} else { } else {
bucket = self.buckets_[key:] bucket = h.buckets_[key:]
var i int var i int
prev_ix = uint(bucket[0]) prev_ix = uint(bucket[0])
bucket = bucket[1:] bucket = bucket[1:]
@ -203,8 +201,8 @@ func FindLongestMatchH2(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
if min_score == out.score { if min_score == out.score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true)
} }
self.buckets_[key+uint32((cur_ix>>3)%1)] = uint32(cur_ix) h.buckets_[key+uint32((cur_ix>>3)%1)] = uint32(cur_ix)
} }

36
h3.go
View File

@ -6,11 +6,11 @@ package brotli
Distributed under MIT license. Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/ */
func HashTypeLengthH3() uint { func (*H3) HashTypeLength() uint {
return 8 return 8
} }
func StoreLookaheadH3() uint { func (*H3) StoreLookahead() uint {
return 8 return 8
} }
@ -68,33 +68,32 @@ func (h *H3) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 5 bytes at &data[ix & mask]. /* Look at 5 bytes at &data[ix & mask].
Compute a hash from these, and store the value somewhere within Compute a hash from these, and store the value somewhere within
[ix .. ix+3]. */ [ix .. ix+3]. */
func StoreH3(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H3) Store(data []byte, mask uint, ix uint) {
var key uint32 = HashBytesH3(data[ix&mask:]) var key uint32 = HashBytesH3(data[ix&mask:])
var off uint32 = uint32(ix>>3) % 2 var off uint32 = uint32(ix>>3) % 2
/* Wiggle the value with the bucket sweep range. */ /* Wiggle the value with the bucket sweep range. */
SelfH3(handle).buckets_[key+off] = uint32(ix) h.buckets_[key+off] = uint32(ix)
} }
func StoreRangeH3(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H3) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH3(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H3) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H3) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH3()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH3(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH3(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH3(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH3(handle HasherHandle, distance_cache []int) { func (*H3) PrepareDistanceCache(distance_cache []int) {
} }
/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] /* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
@ -105,8 +104,7 @@ func PrepareDistanceCacheH3(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH3(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H3) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H3 = SelfH3(handle)
var best_len_in uint = out.len var best_len_in uint = out.len
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var key uint32 = HashBytesH3(data[cur_ix_masked:]) var key uint32 = HashBytesH3(data[cur_ix_masked:])
@ -131,7 +129,7 @@ func FindLongestMatchH3(handle HasherHandle, dictionary *BrotliEncoderDictionary
out.score = best_score out.score = best_score
compare_char = int(data[cur_ix_masked+best_len]) compare_char = int(data[cur_ix_masked+best_len])
if 2 == 1 { if 2 == 1 {
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
return return
} }
} }
@ -144,9 +142,9 @@ func FindLongestMatchH3(handle HasherHandle, dictionary *BrotliEncoderDictionary
var len uint var len uint
/* Only one to look for, don't bother to prepare for a loop. */ /* Only one to look for, don't bother to prepare for a loop. */
prev_ix = uint(self.buckets_[key]) prev_ix = uint(h.buckets_[key])
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
backward = cur_ix - prev_ix backward = cur_ix - prev_ix
prev_ix &= uint(uint32(ring_buffer_mask)) prev_ix &= uint(uint32(ring_buffer_mask))
if compare_char != int(data[prev_ix+best_len_in]) { if compare_char != int(data[prev_ix+best_len_in]) {
@ -168,7 +166,7 @@ func FindLongestMatchH3(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
} else { } else {
bucket = self.buckets_[key:] bucket = h.buckets_[key:]
var i int var i int
prev_ix = uint(bucket[0]) prev_ix = uint(bucket[0])
bucket = bucket[1:] bucket = bucket[1:]
@ -199,5 +197,5 @@ func FindLongestMatchH3(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
self.buckets_[key+uint32((cur_ix>>3)%2)] = uint32(cur_ix) h.buckets_[key+uint32((cur_ix>>3)%2)] = uint32(cur_ix)
} }

40
h35.go
View File

@ -11,9 +11,9 @@ package brotli
/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A /* Composite hasher: This hasher allows to combine two other hashers, HASHER_A
and HASHER_B. */ and HASHER_B. */
func HashTypeLengthH35() uint { func (h *H35) HashTypeLength() uint {
var a uint = HashTypeLengthH3() var a uint = h.ha.HashTypeLength()
var b uint = HashTypeLengthHROLLING_FAST() var b uint = h.hb.HashTypeLength()
if a > b { if a > b {
return a return a
} else { } else {
@ -21,9 +21,9 @@ func HashTypeLengthH35() uint {
} }
} }
func StoreLookaheadH35() uint { func (h *H35) StoreLookahead() uint {
var a uint = StoreLookaheadH3() var a uint = h.ha.StoreLookahead()
var b uint = StoreLookaheadHROLLING_FAST() var b uint = h.hb.StoreLookahead()
if a > b { if a > b {
return a return a
} else { } else {
@ -78,16 +78,14 @@ func (h *H35) Prepare(one_shot bool, input_size uint, data []byte) {
h.hb.Prepare(one_shot, input_size, data) h.hb.Prepare(one_shot, input_size, data)
} }
func StoreH35(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H35) Store(data []byte, mask uint, ix uint) {
var self *H35 = SelfH35(handle) h.ha.Store(data, mask, ix)
StoreH3(self.ha, data, mask, ix) h.hb.Store(data, mask, ix)
StoreHROLLING_FAST(self.hb, data, mask, ix)
} }
func StoreRangeH35(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H35) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var self *H35 = SelfH35(handle) h.ha.StoreRange(data, mask, ix_start, ix_end)
StoreRangeH3(self.ha, data, mask, ix_start, ix_end) h.hb.StoreRange(data, mask, ix_start, ix_end)
StoreRangeHROLLING_FAST(self.hb, data, mask, ix_start, ix_end)
} }
func (h *H35) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H35) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
@ -95,14 +93,12 @@ func (h *H35) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []
h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask)
} }
func PrepareDistanceCacheH35(handle HasherHandle, distance_cache []int) { func (h *H35) PrepareDistanceCache(distance_cache []int) {
var self *H35 = SelfH35(handle) h.ha.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheH3(self.ha, distance_cache) h.hb.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheHROLLING_FAST(self.hb, &distance_cache[0])
} }
func FindLongestMatchH35(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H35) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H35 = SelfH35(handle) h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchH3(self.ha, dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchHROLLING_FAST(self.hb, dictionary, data, ring_buffer_mask, &distance_cache[0], cur_ix, max_length, max_backward, gap, max_distance, out)
} }

38
h4.go
View File

@ -6,11 +6,11 @@ package brotli
Distributed under MIT license. Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/ */
func HashTypeLengthH4() uint { func (*H4) HashTypeLength() uint {
return 8 return 8
} }
func StoreLookaheadH4() uint { func (*H4) StoreLookahead() uint {
return 8 return 8
} }
@ -68,33 +68,32 @@ func (h *H4) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 5 bytes at &data[ix & mask]. /* Look at 5 bytes at &data[ix & mask].
Compute a hash from these, and store the value somewhere within Compute a hash from these, and store the value somewhere within
[ix .. ix+3]. */ [ix .. ix+3]. */
func StoreH4(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H4) Store(data []byte, mask uint, ix uint) {
var key uint32 = HashBytesH4(data[ix&mask:]) var key uint32 = HashBytesH4(data[ix&mask:])
var off uint32 = uint32(ix>>3) % 4 var off uint32 = uint32(ix>>3) % 4
/* Wiggle the value with the bucket sweep range. */ /* Wiggle the value with the bucket sweep range. */
SelfH4(handle).buckets_[key+off] = uint32(ix) h.buckets_[key+off] = uint32(ix)
} }
func StoreRangeH4(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H4) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH4(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H4) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H4) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH4()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH4(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH4(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH4(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH4(handle HasherHandle, distance_cache []int) { func (*H4) PrepareDistanceCache(distance_cache []int) {
} }
/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] /* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
@ -105,8 +104,7 @@ func PrepareDistanceCacheH4(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH4(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H4) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H4 = SelfH4(handle)
var best_len_in uint = out.len var best_len_in uint = out.len
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var key uint32 = HashBytesH4(data[cur_ix_masked:]) var key uint32 = HashBytesH4(data[cur_ix_masked:])
@ -132,7 +130,7 @@ func FindLongestMatchH4(handle HasherHandle, dictionary *BrotliEncoderDictionary
out.score = best_score out.score = best_score
compare_char = int(data[cur_ix_masked+best_len]) compare_char = int(data[cur_ix_masked+best_len])
if 4 == 1 { if 4 == 1 {
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
return return
} }
} }
@ -145,9 +143,9 @@ func FindLongestMatchH4(handle HasherHandle, dictionary *BrotliEncoderDictionary
var len uint var len uint
/* Only one to look for, don't bother to prepare for a loop. */ /* Only one to look for, don't bother to prepare for a loop. */
prev_ix = uint(self.buckets_[key]) prev_ix = uint(h.buckets_[key])
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
backward = cur_ix - prev_ix backward = cur_ix - prev_ix
prev_ix &= uint(uint32(ring_buffer_mask)) prev_ix &= uint(uint32(ring_buffer_mask))
if compare_char != int(data[prev_ix+best_len_in]) { if compare_char != int(data[prev_ix+best_len_in]) {
@ -169,7 +167,7 @@ func FindLongestMatchH4(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
} else { } else {
bucket = self.buckets_[key:] bucket = h.buckets_[key:]
var i int var i int
prev_ix = uint(bucket[0]) prev_ix = uint(bucket[0])
bucket = bucket[1:] bucket = bucket[1:]
@ -201,8 +199,8 @@ func FindLongestMatchH4(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
if min_score == out.score { if min_score == out.score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, true)
} }
self.buckets_[key+uint32((cur_ix>>3)%4)] = uint32(cur_ix) h.buckets_[key+uint32((cur_ix>>3)%4)] = uint32(cur_ix)
} }

57
h40.go
View File

@ -13,11 +13,11 @@ package brotli
Hashes are stored in chains which are bucketed to groups. Group of chains Hashes are stored in chains which are bucketed to groups. Group of chains
share a storage "bank". When more than "bank size" chain nodes are added, share a storage "bank". When more than "bank size" chain nodes are added,
oldest nodes are replaced; this way several chains may share a tail. */ oldest nodes are replaced; this way several chains may share a tail. */
func HashTypeLengthH40() uint { func (*H40) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadH40() uint { func (*H40) StoreLookahead() uint {
return 4 return 4
} }
@ -94,44 +94,42 @@ func (h *H40) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend /* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
node to corresponding chain; also update tiny_hash for current position. */ node to corresponding chain; also update tiny_hash for current position. */
func StoreH40(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H40) Store(data []byte, mask uint, ix uint) {
var self *H40 = SelfH40(handle)
var key uint = HashBytesH40(data[ix&mask:]) var key uint = HashBytesH40(data[ix&mask:])
var bank uint = key & (1 - 1) var bank uint = key & (1 - 1)
var idx uint var idx uint
idx = uint(self.free_slot_idx[bank]) & ((1 << 16) - 1) idx = uint(h.free_slot_idx[bank]) & ((1 << 16) - 1)
self.free_slot_idx[bank]++ h.free_slot_idx[bank]++
var delta uint = ix - uint(self.addr[key]) var delta uint = ix - uint(h.addr[key])
self.tiny_hash[uint16(ix)] = byte(key) h.tiny_hash[uint16(ix)] = byte(key)
if delta > 0xFFFF { if delta > 0xFFFF {
delta = 0xFFFF delta = 0xFFFF
} }
self.banks[bank].slots[idx].delta = uint16(delta) h.banks[bank].slots[idx].delta = uint16(delta)
self.banks[bank].slots[idx].next = self.head[key] h.banks[bank].slots[idx].next = h.head[key]
self.addr[key] = uint32(ix) h.addr[key] = uint32(ix)
self.head[key] = uint16(idx) h.head[key] = uint16(idx)
} }
func StoreRangeH40(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H40) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH40(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H40) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H40) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
if num_bytes >= HashTypeLengthH40()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH40(h, ringbuffer, ring_buffer_mask, position-3) h.Store(ringbuffer, ring_buffer_mask, position-3)
h.Store(ringbuffer, ring_buffer_mask, position-2)
StoreH40(h, ringbuffer, ring_buffer_mask, position-2) h.Store(ringbuffer, ring_buffer_mask, position-1)
StoreH40(h, ringbuffer, ring_buffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH40(handle HasherHandle, distance_cache []int) { func (*H40) PrepareDistanceCache(distance_cache []int) {
PrepareDistanceCache(distance_cache, 4) PrepareDistanceCache(distance_cache, 4)
} }
@ -146,8 +144,7 @@ func PrepareDistanceCacheH40(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH40(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H40) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H40 = SelfH40(handle)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var min_score uint = out.score var min_score uint = out.score
var best_score uint = out.score var best_score uint = out.score
@ -166,7 +163,7 @@ func FindLongestMatchH40(handle HasherHandle, dictionary *BrotliEncoderDictionar
var prev_ix uint = (cur_ix - backward) var prev_ix uint = (cur_ix - backward)
/* For distance code 0 we want to consider 2-byte matches. */ /* For distance code 0 we want to consider 2-byte matches. */
if i > 0 && self.tiny_hash[uint16(prev_ix)] != tiny_hash { if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash {
continue continue
} }
if prev_ix >= cur_ix || backward > max_backward { if prev_ix >= cur_ix || backward > max_backward {
@ -196,9 +193,9 @@ func FindLongestMatchH40(handle HasherHandle, dictionary *BrotliEncoderDictionar
{ {
var bank uint = key & (1 - 1) var bank uint = key & (1 - 1)
var backward uint = 0 var backward uint = 0
var hops uint = self.max_hops var hops uint = h.max_hops
var delta uint = cur_ix - uint(self.addr[key]) var delta uint = cur_ix - uint(h.addr[key])
var slot uint = uint(self.head[key]) var slot uint = uint(h.head[key])
for { for {
tmp6 := hops tmp6 := hops
hops-- hops--
@ -212,8 +209,8 @@ func FindLongestMatchH40(handle HasherHandle, dictionary *BrotliEncoderDictionar
break break
} }
prev_ix = (cur_ix - backward) & ring_buffer_mask prev_ix = (cur_ix - backward) & ring_buffer_mask
slot = uint(self.banks[bank].slots[last].next) slot = uint(h.banks[bank].slots[last].next)
delta = uint(self.banks[bank].slots[last].delta) delta = uint(h.banks[bank].slots[last].delta)
if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
continue continue
} }
@ -235,10 +232,10 @@ func FindLongestMatchH40(handle HasherHandle, dictionary *BrotliEncoderDictionar
} }
} }
StoreH40(handle, data, ring_buffer_mask, cur_ix) h.Store(data, ring_buffer_mask, cur_ix)
} }
if out.score == min_score { if out.score == min_score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
} }
} }

57
h41.go
View File

@ -13,11 +13,11 @@ package brotli
Hashes are stored in chains which are bucketed to groups. Group of chains Hashes are stored in chains which are bucketed to groups. Group of chains
share a storage "bank". When more than "bank size" chain nodes are added, share a storage "bank". When more than "bank size" chain nodes are added,
oldest nodes are replaced; this way several chains may share a tail. */ oldest nodes are replaced; this way several chains may share a tail. */
func HashTypeLengthH41() uint { func (*H41) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadH41() uint { func (*H41) StoreLookahead() uint {
return 4 return 4
} }
@ -94,44 +94,42 @@ func (h *H41) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend /* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
node to corresponding chain; also update tiny_hash for current position. */ node to corresponding chain; also update tiny_hash for current position. */
func StoreH41(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H41) Store(data []byte, mask uint, ix uint) {
var self *H41 = SelfH41(handle)
var key uint = HashBytesH41(data[ix&mask:]) var key uint = HashBytesH41(data[ix&mask:])
var bank uint = key & (1 - 1) var bank uint = key & (1 - 1)
var idx uint var idx uint
idx = uint(self.free_slot_idx[bank]) & ((1 << 16) - 1) idx = uint(h.free_slot_idx[bank]) & ((1 << 16) - 1)
self.free_slot_idx[bank]++ h.free_slot_idx[bank]++
var delta uint = ix - uint(self.addr[key]) var delta uint = ix - uint(h.addr[key])
self.tiny_hash[uint16(ix)] = byte(key) h.tiny_hash[uint16(ix)] = byte(key)
if delta > 0xFFFF { if delta > 0xFFFF {
delta = 0xFFFF delta = 0xFFFF
} }
self.banks[bank].slots[idx].delta = uint16(delta) h.banks[bank].slots[idx].delta = uint16(delta)
self.banks[bank].slots[idx].next = self.head[key] h.banks[bank].slots[idx].next = h.head[key]
self.addr[key] = uint32(ix) h.addr[key] = uint32(ix)
self.head[key] = uint16(idx) h.head[key] = uint16(idx)
} }
func StoreRangeH41(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H41) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH41(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H41) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H41) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
if num_bytes >= HashTypeLengthH41()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH41(h, ringbuffer, ring_buffer_mask, position-3) h.Store(ringbuffer, ring_buffer_mask, position-3)
h.Store(ringbuffer, ring_buffer_mask, position-2)
StoreH41(h, ringbuffer, ring_buffer_mask, position-2) h.Store(ringbuffer, ring_buffer_mask, position-1)
StoreH41(h, ringbuffer, ring_buffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH41(handle HasherHandle, distance_cache []int) { func (*H41) PrepareDistanceCache(distance_cache []int) {
PrepareDistanceCache(distance_cache, 10) PrepareDistanceCache(distance_cache, 10)
} }
@ -146,8 +144,7 @@ func PrepareDistanceCacheH41(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH41(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H41) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H41 = SelfH41(handle)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var min_score uint = out.score var min_score uint = out.score
var best_score uint = out.score var best_score uint = out.score
@ -166,7 +163,7 @@ func FindLongestMatchH41(handle HasherHandle, dictionary *BrotliEncoderDictionar
var prev_ix uint = (cur_ix - backward) var prev_ix uint = (cur_ix - backward)
/* For distance code 0 we want to consider 2-byte matches. */ /* For distance code 0 we want to consider 2-byte matches. */
if i > 0 && self.tiny_hash[uint16(prev_ix)] != tiny_hash { if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash {
continue continue
} }
if prev_ix >= cur_ix || backward > max_backward { if prev_ix >= cur_ix || backward > max_backward {
@ -196,9 +193,9 @@ func FindLongestMatchH41(handle HasherHandle, dictionary *BrotliEncoderDictionar
{ {
var bank uint = key & (1 - 1) var bank uint = key & (1 - 1)
var backward uint = 0 var backward uint = 0
var hops uint = self.max_hops var hops uint = h.max_hops
var delta uint = cur_ix - uint(self.addr[key]) var delta uint = cur_ix - uint(h.addr[key])
var slot uint = uint(self.head[key]) var slot uint = uint(h.head[key])
for { for {
tmp7 := hops tmp7 := hops
hops-- hops--
@ -212,8 +209,8 @@ func FindLongestMatchH41(handle HasherHandle, dictionary *BrotliEncoderDictionar
break break
} }
prev_ix = (cur_ix - backward) & ring_buffer_mask prev_ix = (cur_ix - backward) & ring_buffer_mask
slot = uint(self.banks[bank].slots[last].next) slot = uint(h.banks[bank].slots[last].next)
delta = uint(self.banks[bank].slots[last].delta) delta = uint(h.banks[bank].slots[last].delta)
if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
continue continue
} }
@ -235,10 +232,10 @@ func FindLongestMatchH41(handle HasherHandle, dictionary *BrotliEncoderDictionar
} }
} }
StoreH41(handle, data, ring_buffer_mask, cur_ix) h.Store(data, ring_buffer_mask, cur_ix)
} }
if out.score == min_score { if out.score == min_score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
} }
} }

57
h42.go
View File

@ -13,11 +13,11 @@ package brotli
Hashes are stored in chains which are bucketed to groups. Group of chains Hashes are stored in chains which are bucketed to groups. Group of chains
share a storage "bank". When more than "bank size" chain nodes are added, share a storage "bank". When more than "bank size" chain nodes are added,
oldest nodes are replaced; this way several chains may share a tail. */ oldest nodes are replaced; this way several chains may share a tail. */
func HashTypeLengthH42() uint { func (*H42) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadH42() uint { func (*H42) StoreLookahead() uint {
return 4 return 4
} }
@ -94,44 +94,42 @@ func (h *H42) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend /* Look at 4 bytes at &data[ix & mask]. Compute a hash from these, and prepend
node to corresponding chain; also update tiny_hash for current position. */ node to corresponding chain; also update tiny_hash for current position. */
func StoreH42(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H42) Store(data []byte, mask uint, ix uint) {
var self *H42 = SelfH42(handle)
var key uint = HashBytesH42(data[ix&mask:]) var key uint = HashBytesH42(data[ix&mask:])
var bank uint = key & (512 - 1) var bank uint = key & (512 - 1)
var idx uint var idx uint
idx = uint(self.free_slot_idx[bank]) & ((1 << 9) - 1) idx = uint(h.free_slot_idx[bank]) & ((1 << 9) - 1)
self.free_slot_idx[bank]++ h.free_slot_idx[bank]++
var delta uint = ix - uint(self.addr[key]) var delta uint = ix - uint(h.addr[key])
self.tiny_hash[uint16(ix)] = byte(key) h.tiny_hash[uint16(ix)] = byte(key)
if delta > 0xFFFF { if delta > 0xFFFF {
delta = 0xFFFF delta = 0xFFFF
} }
self.banks[bank].slots[idx].delta = uint16(delta) h.banks[bank].slots[idx].delta = uint16(delta)
self.banks[bank].slots[idx].next = self.head[key] h.banks[bank].slots[idx].next = h.head[key]
self.addr[key] = uint32(ix) h.addr[key] = uint32(ix)
self.head[key] = uint16(idx) h.head[key] = uint16(idx)
} }
func StoreRangeH42(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H42) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH42(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H42) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H42) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
if num_bytes >= HashTypeLengthH42()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH42(h, ringbuffer, ring_buffer_mask, position-3) h.Store(ringbuffer, ring_buffer_mask, position-3)
h.Store(ringbuffer, ring_buffer_mask, position-2)
StoreH42(h, ringbuffer, ring_buffer_mask, position-2) h.Store(ringbuffer, ring_buffer_mask, position-1)
StoreH42(h, ringbuffer, ring_buffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH42(handle HasherHandle, distance_cache []int) { func (*H42) PrepareDistanceCache(distance_cache []int) {
PrepareDistanceCache(distance_cache, 16) PrepareDistanceCache(distance_cache, 16)
} }
@ -146,8 +144,7 @@ func PrepareDistanceCacheH42(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH42(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H42) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H42 = SelfH42(handle)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var min_score uint = out.score var min_score uint = out.score
var best_score uint = out.score var best_score uint = out.score
@ -166,7 +163,7 @@ func FindLongestMatchH42(handle HasherHandle, dictionary *BrotliEncoderDictionar
var prev_ix uint = (cur_ix - backward) var prev_ix uint = (cur_ix - backward)
/* For distance code 0 we want to consider 2-byte matches. */ /* For distance code 0 we want to consider 2-byte matches. */
if i > 0 && self.tiny_hash[uint16(prev_ix)] != tiny_hash { if i > 0 && h.tiny_hash[uint16(prev_ix)] != tiny_hash {
continue continue
} }
if prev_ix >= cur_ix || backward > max_backward { if prev_ix >= cur_ix || backward > max_backward {
@ -196,9 +193,9 @@ func FindLongestMatchH42(handle HasherHandle, dictionary *BrotliEncoderDictionar
{ {
var bank uint = key & (512 - 1) var bank uint = key & (512 - 1)
var backward uint = 0 var backward uint = 0
var hops uint = self.max_hops var hops uint = h.max_hops
var delta uint = cur_ix - uint(self.addr[key]) var delta uint = cur_ix - uint(h.addr[key])
var slot uint = uint(self.head[key]) var slot uint = uint(h.head[key])
for { for {
tmp8 := hops tmp8 := hops
hops-- hops--
@ -212,8 +209,8 @@ func FindLongestMatchH42(handle HasherHandle, dictionary *BrotliEncoderDictionar
break break
} }
prev_ix = (cur_ix - backward) & ring_buffer_mask prev_ix = (cur_ix - backward) & ring_buffer_mask
slot = uint(self.banks[bank].slots[last].next) slot = uint(h.banks[bank].slots[last].next)
delta = uint(self.banks[bank].slots[last].delta) delta = uint(h.banks[bank].slots[last].delta)
if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] { if cur_ix_masked+best_len > ring_buffer_mask || prev_ix+best_len > ring_buffer_mask || data[cur_ix_masked+best_len] != data[prev_ix+best_len] {
continue continue
} }
@ -235,10 +232,10 @@ func FindLongestMatchH42(handle HasherHandle, dictionary *BrotliEncoderDictionar
} }
} }
StoreH42(handle, data, ring_buffer_mask, cur_ix) h.Store(data, ring_buffer_mask, cur_ix)
} }
if out.score == min_score { if out.score == min_score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
} }
} }

58
h5.go
View File

@ -13,11 +13,11 @@ package brotli
This is a hash map of fixed size (bucket_size_) to a ring buffer of This is a hash map of fixed size (bucket_size_) to a ring buffer of
fixed size (block_size_). The ring buffer contains the last block_size_ fixed size (block_size_). The ring buffer contains the last block_size_
index positions of the given hash key in the compressed data. */ index positions of the given hash key in the compressed data. */
func HashTypeLengthH5() uint { func (*H5) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadH5() uint { func (*H5) StoreLookahead() uint {
return 4 return 4
} }
@ -80,37 +80,35 @@ func (h *H5) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 4 bytes at &data[ix & mask]. /* Look at 4 bytes at &data[ix & mask].
Compute a hash from these, and store the value of ix at that position. */ Compute a hash from these, and store the value of ix at that position. */
func StoreH5(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H5) Store(data []byte, mask uint, ix uint) {
var self *H5 = SelfH5(handle) var num []uint16 = h.num
var num []uint16 = NumH5(self) var key uint32 = HashBytesH5(data[ix&mask:], h.hash_shift_)
var key uint32 = HashBytesH5(data[ix&mask:], self.hash_shift_) var minor_ix uint = uint(num[key]) & uint(h.block_mask_)
var minor_ix uint = uint(num[key]) & uint(self.block_mask_) var offset uint = minor_ix + uint(key<<uint(h.params.block_bits))
var offset uint = minor_ix + uint(key<<uint(handle.Common().params.block_bits)) h.buckets[offset] = uint32(ix)
BucketsH5(self)[offset] = uint32(ix)
num[key]++ num[key]++
} }
func StoreRangeH5(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H5) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH5(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H5) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H5) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH5()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH5(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH5(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH5(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH5(handle HasherHandle, distance_cache []int) { func (h *H5) PrepareDistanceCache(distance_cache []int) {
PrepareDistanceCache(distance_cache, handle.Common().params.num_last_distances_to_check) PrepareDistanceCache(distance_cache, h.params.num_last_distances_to_check)
} }
/* Find a longest backward match of &data[cur_ix] up to the length of /* Find a longest backward match of &data[cur_ix] up to the length of
@ -124,11 +122,9 @@ func PrepareDistanceCacheH5(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH5(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H5) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var common *HasherCommon = handle.Common() var num []uint16 = h.num
var self *H5 = SelfH5(handle) var buckets []uint32 = h.buckets
var num []uint16 = NumH5(self)
var buckets []uint32 = BucketsH5(self)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var min_score uint = out.score var min_score uint = out.score
var best_score uint = out.score var best_score uint = out.score
@ -141,7 +137,7 @@ func FindLongestMatchH5(handle HasherHandle, dictionary *BrotliEncoderDictionary
out.len_code_delta = 0 out.len_code_delta = 0
/* Try last distance first. */ /* Try last distance first. */
for i = 0; i < uint(common.params.num_last_distances_to_check); i++ { for i = 0; i < uint(h.params.num_last_distances_to_check); i++ {
var backward uint = uint(distance_cache[i]) var backward uint = uint(distance_cache[i])
var prev_ix uint = uint(cur_ix - backward) var prev_ix uint = uint(cur_ix - backward)
if prev_ix >= cur_ix { if prev_ix >= cur_ix {
@ -180,18 +176,18 @@ func FindLongestMatchH5(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
{ {
var key uint32 = HashBytesH5(data[cur_ix_masked:], self.hash_shift_) var key uint32 = HashBytesH5(data[cur_ix_masked:], h.hash_shift_)
bucket = buckets[key<<uint(common.params.block_bits):] bucket = buckets[key<<uint(h.params.block_bits):]
var down uint var down uint
if uint(num[key]) > self.block_size_ { if uint(num[key]) > h.block_size_ {
down = uint(num[key]) - self.block_size_ down = uint(num[key]) - h.block_size_
} else { } else {
down = 0 down = 0
} }
for i = uint(num[key]); i > down; { for i = uint(num[key]); i > down; {
var prev_ix uint var prev_ix uint
i-- i--
prev_ix = uint(bucket[uint32(i)&self.block_mask_]) prev_ix = uint(bucket[uint32(i)&h.block_mask_])
var backward uint = cur_ix - prev_ix var backward uint = cur_ix - prev_ix
if backward > max_backward { if backward > max_backward {
break break
@ -219,11 +215,11 @@ func FindLongestMatchH5(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
bucket[uint32(num[key])&self.block_mask_] = uint32(cur_ix) bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix)
num[key]++ num[key]++
} }
if min_score == out.score { if min_score == out.score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
} }
} }

36
h54.go
View File

@ -6,11 +6,11 @@ package brotli
Distributed under MIT license. Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/ */
func HashTypeLengthH54() uint { func (*H54) HashTypeLength() uint {
return 8 return 8
} }
func StoreLookaheadH54() uint { func (*H54) StoreLookahead() uint {
return 8 return 8
} }
@ -65,33 +65,32 @@ func (h *H54) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 5 bytes at &data[ix & mask]. /* Look at 5 bytes at &data[ix & mask].
Compute a hash from these, and store the value somewhere within Compute a hash from these, and store the value somewhere within
[ix .. ix+3]. */ [ix .. ix+3]. */
func StoreH54(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H54) Store(data []byte, mask uint, ix uint) {
var key uint32 = HashBytesH54(data[ix&mask:]) var key uint32 = HashBytesH54(data[ix&mask:])
var off uint32 = uint32(ix>>3) % 4 var off uint32 = uint32(ix>>3) % 4
/* Wiggle the value with the bucket sweep range. */ /* Wiggle the value with the bucket sweep range. */
SelfH54(handle).buckets_[key+off] = uint32(ix) h.buckets_[key+off] = uint32(ix)
} }
func StoreRangeH54(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H54) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH54(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H54) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H54) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH54()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH54(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH54(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH54(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH54(handle HasherHandle, distance_cache []int) { func (*H54) PrepareDistanceCache(distance_cache []int) {
} }
/* Find a longest backward match of &data[cur_ix & ring_buffer_mask] /* Find a longest backward match of &data[cur_ix & ring_buffer_mask]
@ -102,8 +101,7 @@ func PrepareDistanceCacheH54(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH54(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H54) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H54 = SelfH54(handle)
var best_len_in uint = out.len var best_len_in uint = out.len
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var key uint32 = HashBytesH54(data[cur_ix_masked:]) var key uint32 = HashBytesH54(data[cur_ix_masked:])
@ -128,7 +126,7 @@ func FindLongestMatchH54(handle HasherHandle, dictionary *BrotliEncoderDictionar
out.score = best_score out.score = best_score
compare_char = int(data[cur_ix_masked+best_len]) compare_char = int(data[cur_ix_masked+best_len])
if 4 == 1 { if 4 == 1 {
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
return return
} }
} }
@ -141,9 +139,9 @@ func FindLongestMatchH54(handle HasherHandle, dictionary *BrotliEncoderDictionar
var len uint var len uint
/* Only one to look for, don't bother to prepare for a loop. */ /* Only one to look for, don't bother to prepare for a loop. */
prev_ix = uint(self.buckets_[key]) prev_ix = uint(h.buckets_[key])
self.buckets_[key] = uint32(cur_ix) h.buckets_[key] = uint32(cur_ix)
backward = cur_ix - prev_ix backward = cur_ix - prev_ix
prev_ix &= uint(uint32(ring_buffer_mask)) prev_ix &= uint(uint32(ring_buffer_mask))
if compare_char != int(data[prev_ix+best_len_in]) { if compare_char != int(data[prev_ix+best_len_in]) {
@ -165,7 +163,7 @@ func FindLongestMatchH54(handle HasherHandle, dictionary *BrotliEncoderDictionar
} }
} }
} else { } else {
bucket = self.buckets_[key:] bucket = h.buckets_[key:]
var i int var i int
prev_ix = uint(bucket[0]) prev_ix = uint(bucket[0])
bucket = bucket[1:] bucket = bucket[1:]
@ -196,5 +194,5 @@ func FindLongestMatchH54(handle HasherHandle, dictionary *BrotliEncoderDictionar
} }
} }
self.buckets_[key+uint32((cur_ix>>3)%4)] = uint32(cur_ix) h.buckets_[key+uint32((cur_ix>>3)%4)] = uint32(cur_ix)
} }

40
h55.go
View File

@ -9,9 +9,9 @@ package brotli
/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A /* Composite hasher: This hasher allows to combine two other hashers, HASHER_A
and HASHER_B. */ and HASHER_B. */
func HashTypeLengthH55() uint { func (h *H55) HashTypeLength() uint {
var a uint = HashTypeLengthH54() var a uint = h.ha.HashTypeLength()
var b uint = HashTypeLengthHROLLING_FAST() var b uint = h.hb.HashTypeLength()
if a > b { if a > b {
return a return a
} else { } else {
@ -19,9 +19,9 @@ func HashTypeLengthH55() uint {
} }
} }
func StoreLookaheadH55() uint { func (h *H55) StoreLookahead() uint {
var a uint = StoreLookaheadH54() var a uint = h.ha.StoreLookahead()
var b uint = StoreLookaheadHROLLING_FAST() var b uint = h.hb.StoreLookahead()
if a > b { if a > b {
return a return a
} else { } else {
@ -76,16 +76,14 @@ func (h *H55) Prepare(one_shot bool, input_size uint, data []byte) {
h.hb.Prepare(one_shot, input_size, data) h.hb.Prepare(one_shot, input_size, data)
} }
func StoreH55(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H55) Store(data []byte, mask uint, ix uint) {
var self *H55 = SelfH55(handle) h.ha.Store(data, mask, ix)
StoreH54(self.ha, data, mask, ix) h.hb.Store(data, mask, ix)
StoreHROLLING_FAST(self.hb, data, mask, ix)
} }
func StoreRangeH55(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H55) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var self *H55 = SelfH55(handle) h.ha.StoreRange(data, mask, ix_start, ix_end)
StoreRangeH54(self.ha, data, mask, ix_start, ix_end) h.hb.StoreRange(data, mask, ix_start, ix_end)
StoreRangeHROLLING_FAST(self.hb, data, mask, ix_start, ix_end)
} }
func (h *H55) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H55) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
@ -93,14 +91,12 @@ func (h *H55) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []
h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask)
} }
func PrepareDistanceCacheH55(handle HasherHandle, distance_cache []int) { func (h *H55) PrepareDistanceCache(distance_cache []int) {
var self *H55 = SelfH55(handle) h.ha.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheH54(self.ha, distance_cache) h.hb.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheHROLLING_FAST(self.hb, &distance_cache[0])
} }
func FindLongestMatchH55(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H55) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H55 = SelfH55(handle) h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchH54(self.ha, dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchHROLLING_FAST(self.hb, dictionary, data, ring_buffer_mask, &distance_cache[0], cur_ix, max_length, max_backward, gap, max_distance, out)
} }

58
h6.go
View File

@ -13,11 +13,11 @@ package brotli
This is a hash map of fixed size (bucket_size_) to a ring buffer of This is a hash map of fixed size (bucket_size_) to a ring buffer of
fixed size (block_size_). The ring buffer contains the last block_size_ fixed size (block_size_). The ring buffer contains the last block_size_
index positions of the given hash key in the compressed data. */ index positions of the given hash key in the compressed data. */
func HashTypeLengthH6() uint { func (*H6) HashTypeLength() uint {
return 8 return 8
} }
func StoreLookaheadH6() uint { func (*H6) StoreLookahead() uint {
return 8 return 8
} }
@ -82,37 +82,35 @@ func (h *H6) Prepare(one_shot bool, input_size uint, data []byte) {
/* Look at 4 bytes at &data[ix & mask]. /* Look at 4 bytes at &data[ix & mask].
Compute a hash from these, and store the value of ix at that position. */ Compute a hash from these, and store the value of ix at that position. */
func StoreH6(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H6) Store(data []byte, mask uint, ix uint) {
var self *H6 = SelfH6(handle) var num []uint16 = h.num
var num []uint16 = NumH6(self) var key uint32 = HashBytesH6(data[ix&mask:], h.hash_mask_, h.hash_shift_)
var key uint32 = HashBytesH6(data[ix&mask:], self.hash_mask_, self.hash_shift_) var minor_ix uint = uint(num[key]) & uint(h.block_mask_)
var minor_ix uint = uint(num[key]) & uint(self.block_mask_) var offset uint = minor_ix + uint(key<<uint(h.params.block_bits))
var offset uint = minor_ix + uint(key<<uint(handle.Common().params.block_bits)) h.buckets[offset] = uint32(ix)
BucketsH6(self)[offset] = uint32(ix)
num[key]++ num[key]++
} }
func StoreRangeH6(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H6) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var i uint var i uint
for i = ix_start; i < ix_end; i++ { for i = ix_start; i < ix_end; i++ {
StoreH6(handle, data, mask, i) h.Store(data, mask, i)
} }
} }
func (h *H6) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) { func (h *H6) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) {
if num_bytes >= HashTypeLengthH6()-1 && position >= 3 { if num_bytes >= h.HashTypeLength()-1 && position >= 3 {
/* Prepare the hashes for three last bytes of the last write. /* Prepare the hashes for three last bytes of the last write.
These could not be calculated before, since they require knowledge These could not be calculated before, since they require knowledge
of both the previous and the current block. */ of both the previous and the current block. */
StoreH6(h, ringbuffer, ringbuffer_mask, position-3) h.Store(ringbuffer, ringbuffer_mask, position-3)
h.Store(ringbuffer, ringbuffer_mask, position-2)
StoreH6(h, ringbuffer, ringbuffer_mask, position-2) h.Store(ringbuffer, ringbuffer_mask, position-1)
StoreH6(h, ringbuffer, ringbuffer_mask, position-1)
} }
} }
func PrepareDistanceCacheH6(handle HasherHandle, distance_cache []int) { func (h *H6) PrepareDistanceCache(distance_cache []int) {
PrepareDistanceCache(distance_cache, handle.Common().params.num_last_distances_to_check) PrepareDistanceCache(distance_cache, h.params.num_last_distances_to_check)
} }
/* Find a longest backward match of &data[cur_ix] up to the length of /* Find a longest backward match of &data[cur_ix] up to the length of
@ -126,11 +124,9 @@ func PrepareDistanceCacheH6(handle HasherHandle, distance_cache []int) {
Does not look for matches further away than max_backward. Does not look for matches further away than max_backward.
Writes the best match into |out|. Writes the best match into |out|.
|out|->score is updated only if a better match is found. */ |out|->score is updated only if a better match is found. */
func FindLongestMatchH6(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H6) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var common *HasherCommon = handle.Common() var num []uint16 = h.num
var self *H6 = SelfH6(handle) var buckets []uint32 = h.buckets
var num []uint16 = NumH6(self)
var buckets []uint32 = BucketsH6(self)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var min_score uint = out.score var min_score uint = out.score
var best_score uint = out.score var best_score uint = out.score
@ -143,7 +139,7 @@ func FindLongestMatchH6(handle HasherHandle, dictionary *BrotliEncoderDictionary
out.len_code_delta = 0 out.len_code_delta = 0
/* Try last distance first. */ /* Try last distance first. */
for i = 0; i < uint(common.params.num_last_distances_to_check); i++ { for i = 0; i < uint(h.params.num_last_distances_to_check); i++ {
var backward uint = uint(distance_cache[i]) var backward uint = uint(distance_cache[i])
var prev_ix uint = uint(cur_ix - backward) var prev_ix uint = uint(cur_ix - backward)
if prev_ix >= cur_ix { if prev_ix >= cur_ix {
@ -182,18 +178,18 @@ func FindLongestMatchH6(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
{ {
var key uint32 = HashBytesH6(data[cur_ix_masked:], self.hash_mask_, self.hash_shift_) var key uint32 = HashBytesH6(data[cur_ix_masked:], h.hash_mask_, h.hash_shift_)
bucket = buckets[key<<uint(common.params.block_bits):] bucket = buckets[key<<uint(h.params.block_bits):]
var down uint var down uint
if uint(num[key]) > self.block_size_ { if uint(num[key]) > h.block_size_ {
down = uint(num[key]) - self.block_size_ down = uint(num[key]) - h.block_size_
} else { } else {
down = 0 down = 0
} }
for i = uint(num[key]); i > down; { for i = uint(num[key]); i > down; {
var prev_ix uint var prev_ix uint
i-- i--
prev_ix = uint(bucket[uint32(i)&self.block_mask_]) prev_ix = uint(bucket[uint32(i)&h.block_mask_])
var backward uint = cur_ix - prev_ix var backward uint = cur_ix - prev_ix
if backward > max_backward { if backward > max_backward {
break break
@ -221,11 +217,11 @@ func FindLongestMatchH6(handle HasherHandle, dictionary *BrotliEncoderDictionary
} }
} }
bucket[uint32(num[key])&self.block_mask_] = uint32(cur_ix) bucket[uint32(num[key])&h.block_mask_] = uint32(cur_ix)
num[key]++ num[key]++
} }
if min_score == out.score { if min_score == out.score {
SearchInStaticDictionary(dictionary, handle, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false) SearchInStaticDictionary(dictionary, h, data[cur_ix_masked:], max_length, max_backward+gap, max_distance, out, false)
} }
} }

40
h65.go
View File

@ -9,9 +9,9 @@ package brotli
/* Composite hasher: This hasher allows to combine two other hashers, HASHER_A /* Composite hasher: This hasher allows to combine two other hashers, HASHER_A
and HASHER_B. */ and HASHER_B. */
func HashTypeLengthH65() uint { func (h *H65) HashTypeLength() uint {
var a uint = HashTypeLengthH6() var a uint = h.ha.HashTypeLength()
var b uint = HashTypeLengthHROLLING() var b uint = h.hb.HashTypeLength()
if a > b { if a > b {
return a return a
} else { } else {
@ -19,9 +19,9 @@ func HashTypeLengthH65() uint {
} }
} }
func StoreLookaheadH65() uint { func (h *H65) StoreLookahead() uint {
var a uint = StoreLookaheadH6() var a uint = h.ha.StoreLookahead()
var b uint = StoreLookaheadHROLLING() var b uint = h.hb.StoreLookahead()
if a > b { if a > b {
return a return a
} else { } else {
@ -76,16 +76,14 @@ func (h *H65) Prepare(one_shot bool, input_size uint, data []byte) {
h.hb.Prepare(one_shot, input_size, data) h.hb.Prepare(one_shot, input_size, data)
} }
func StoreH65(handle HasherHandle, data []byte, mask uint, ix uint) { func (h *H65) Store(data []byte, mask uint, ix uint) {
var self *H65 = SelfH65(handle) h.ha.Store(data, mask, ix)
StoreH6(self.ha, data, mask, ix) h.hb.Store(data, mask, ix)
StoreHROLLING(self.hb, data, mask, ix)
} }
func StoreRangeH65(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (h *H65) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
var self *H65 = SelfH65(handle) h.ha.StoreRange(data, mask, ix_start, ix_end)
StoreRangeH6(self.ha, data, mask, ix_start, ix_end) h.hb.StoreRange(data, mask, ix_start, ix_end)
StoreRangeHROLLING(self.hb, data, mask, ix_start, ix_end)
} }
func (h *H65) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *H65) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
@ -93,14 +91,12 @@ func (h *H65) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []
h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask) h.hb.StitchToPreviousBlock(num_bytes, position, ringbuffer, ring_buffer_mask)
} }
func PrepareDistanceCacheH65(handle HasherHandle, distance_cache []int) { func (h *H65) PrepareDistanceCache(distance_cache []int) {
var self *H65 = SelfH65(handle) h.ha.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheH6(self.ha, distance_cache) h.hb.PrepareDistanceCache(distance_cache)
PrepareDistanceCacheHROLLING(self.hb, &distance_cache[0])
} }
func FindLongestMatchH65(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *H65) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *H65 = SelfH65(handle) h.ha.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchH6(self.ha, dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out) h.hb.FindLongestMatch(dictionary, data, ring_buffer_mask, distance_cache, cur_ix, max_length, max_backward, gap, max_distance, out)
FindLongestMatchHROLLING(self.hb, dictionary, data, ring_buffer_mask, &distance_cache[0], cur_ix, max_length, max_backward, gap, max_distance, out)
} }

View File

@ -34,6 +34,12 @@ type HasherHandle interface {
Initialize(params *BrotliEncoderParams) Initialize(params *BrotliEncoderParams)
Prepare(one_shot bool, input_size uint, data []byte) Prepare(one_shot bool, input_size uint, data []byte)
StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ringbuffer_mask uint)
HashTypeLength() uint
StoreLookahead() uint
PrepareDistanceCache(distance_cache []int)
FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult)
StoreRange(data []byte, mask uint, ix_start uint, ix_end uint)
Store(data []byte, mask uint, ix uint)
} }
type score_t uint type score_t uint

View File

@ -18,11 +18,11 @@ var kInvalidPosHROLLING uint32 = 0xffffffff
/* This hasher uses a longer forward length, but returning a higher value here /* This hasher uses a longer forward length, but returning a higher value here
will hurt compression by the main hasher when combined with a composite will hurt compression by the main hasher when combined with a composite
hasher. The hasher tests for forward itself instead. */ hasher. The hasher tests for forward itself instead. */
func HashTypeLengthHROLLING() uint { func (*HROLLING) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadHROLLING() uint { func (*HROLLING) StoreLookahead() uint {
return 4 return 4
} }
@ -88,10 +88,10 @@ func (h *HROLLING) Prepare(one_shot bool, input_size uint, data []byte) {
} }
} }
func StoreHROLLING(handle HasherHandle, data []byte, mask uint, ix uint) { func (*HROLLING) Store(data []byte, mask uint, ix uint) {
} }
func StoreRangeHROLLING(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (*HROLLING) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
} }
func (h *HROLLING) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *HROLLING) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
@ -121,13 +121,12 @@ func (h *HROLLING) StitchToPreviousBlock(num_bytes uint, position uint, ringbuff
h.next_ix = position h.next_ix = position
} }
func PrepareDistanceCacheHROLLING(handle HasherHandle, distance_cache *int) { func (*HROLLING) PrepareDistanceCache(distance_cache []int) {
} }
func FindLongestMatchHROLLING(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache *int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *HROLLING) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *HROLLING = SelfHROLLING(handle)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var pos uint = self.next_ix var pos uint = h.next_ix
if cur_ix&(1-1) != 0 { if cur_ix&(1-1) != 0 {
return return
@ -138,17 +137,17 @@ func FindLongestMatchHROLLING(handle HasherHandle, dictionary *BrotliEncoderDict
return return
} }
for pos = self.next_ix; pos <= cur_ix; pos += 1 { for pos = h.next_ix; pos <= cur_ix; pos += 1 {
var code uint32 = self.state & ((16777216 * 64) - 1) var code uint32 = h.state & ((16777216 * 64) - 1)
var rem byte = data[pos&ring_buffer_mask] var rem byte = data[pos&ring_buffer_mask]
var add byte = data[(pos+32)&ring_buffer_mask] var add byte = data[(pos+32)&ring_buffer_mask]
var found_ix uint = uint(kInvalidPosHROLLING) var found_ix uint = uint(kInvalidPosHROLLING)
self.state = HashRollingFunctionHROLLING(self.state, add, rem, self.factor, self.factor_remove) h.state = HashRollingFunctionHROLLING(h.state, add, rem, h.factor, h.factor_remove)
if code < 16777216 { if code < 16777216 {
found_ix = uint(self.table[code]) found_ix = uint(h.table[code])
self.table[code] = uint32(pos) h.table[code] = uint32(pos)
if pos == cur_ix && uint32(found_ix) != kInvalidPosHROLLING { if pos == cur_ix && uint32(found_ix) != kInvalidPosHROLLING {
/* The cast to 32-bit makes backward distances up to 4GB work even /* The cast to 32-bit makes backward distances up to 4GB work even
if cur_ix is above 4GB, despite using 32-bit values in the table. */ if cur_ix is above 4GB, despite using 32-bit values in the table. */
@ -170,5 +169,5 @@ func FindLongestMatchHROLLING(handle HasherHandle, dictionary *BrotliEncoderDict
} }
} }
self.next_ix = cur_ix + 1 h.next_ix = cur_ix + 1
} }

View File

@ -16,11 +16,11 @@ var kInvalidPosHROLLING_FAST uint32 = 0xffffffff
/* This hasher uses a longer forward length, but returning a higher value here /* This hasher uses a longer forward length, but returning a higher value here
will hurt compression by the main hasher when combined with a composite will hurt compression by the main hasher when combined with a composite
hasher. The hasher tests for forward itself instead. */ hasher. The hasher tests for forward itself instead. */
func HashTypeLengthHROLLING_FAST() uint { func (*HROLLING_FAST) HashTypeLength() uint {
return 4 return 4
} }
func StoreLookaheadHROLLING_FAST() uint { func (*HROLLING_FAST) StoreLookahead() uint {
return 4 return 4
} }
@ -86,10 +86,10 @@ func (h *HROLLING_FAST) Prepare(one_shot bool, input_size uint, data []byte) {
} }
} }
func StoreHROLLING_FAST(handle HasherHandle, data []byte, mask uint, ix uint) { func (*HROLLING_FAST) Store(data []byte, mask uint, ix uint) {
} }
func StoreRangeHROLLING_FAST(handle HasherHandle, data []byte, mask uint, ix_start uint, ix_end uint) { func (*HROLLING_FAST) StoreRange(data []byte, mask uint, ix_start uint, ix_end uint) {
} }
func (h *HROLLING_FAST) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) { func (h *HROLLING_FAST) StitchToPreviousBlock(num_bytes uint, position uint, ringbuffer []byte, ring_buffer_mask uint) {
@ -119,13 +119,12 @@ func (h *HROLLING_FAST) StitchToPreviousBlock(num_bytes uint, position uint, rin
h.next_ix = position h.next_ix = position
} }
func PrepareDistanceCacheHROLLING_FAST(handle HasherHandle, distance_cache *int) { func (*HROLLING_FAST) PrepareDistanceCache(distance_cache []int) {
} }
func FindLongestMatchHROLLING_FAST(handle HasherHandle, dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache *int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) { func (h *HROLLING_FAST) FindLongestMatch(dictionary *BrotliEncoderDictionary, data []byte, ring_buffer_mask uint, distance_cache []int, cur_ix uint, max_length uint, max_backward uint, gap uint, max_distance uint, out *HasherSearchResult) {
var self *HROLLING_FAST = SelfHROLLING_FAST(handle)
var cur_ix_masked uint = cur_ix & ring_buffer_mask var cur_ix_masked uint = cur_ix & ring_buffer_mask
var pos uint = self.next_ix var pos uint = h.next_ix
if cur_ix&(4-1) != 0 { if cur_ix&(4-1) != 0 {
return return
@ -136,17 +135,17 @@ func FindLongestMatchHROLLING_FAST(handle HasherHandle, dictionary *BrotliEncode
return return
} }
for pos = self.next_ix; pos <= cur_ix; pos += 4 { for pos = h.next_ix; pos <= cur_ix; pos += 4 {
var code uint32 = self.state & ((16777216 * 64) - 1) var code uint32 = h.state & ((16777216 * 64) - 1)
var rem byte = data[pos&ring_buffer_mask] var rem byte = data[pos&ring_buffer_mask]
var add byte = data[(pos+32)&ring_buffer_mask] var add byte = data[(pos+32)&ring_buffer_mask]
var found_ix uint = uint(kInvalidPosHROLLING_FAST) var found_ix uint = uint(kInvalidPosHROLLING_FAST)
self.state = HashRollingFunctionHROLLING_FAST(self.state, add, rem, self.factor, self.factor_remove) h.state = HashRollingFunctionHROLLING_FAST(h.state, add, rem, h.factor, h.factor_remove)
if code < 16777216 { if code < 16777216 {
found_ix = uint(self.table[code]) found_ix = uint(h.table[code])
self.table[code] = uint32(pos) h.table[code] = uint32(pos)
if pos == cur_ix && uint32(found_ix) != kInvalidPosHROLLING_FAST { if pos == cur_ix && uint32(found_ix) != kInvalidPosHROLLING_FAST {
/* The cast to 32-bit makes backward distances up to 4GB work even /* The cast to 32-bit makes backward distances up to 4GB work even
if cur_ix is above 4GB, despite using 32-bit values in the table. */ if cur_ix is above 4GB, despite using 32-bit values in the table. */
@ -168,5 +167,5 @@ func FindLongestMatchHROLLING_FAST(handle HasherHandle, dictionary *BrotliEncode
} }
} }
self.next_ix = cur_ix + 4 h.next_ix = cur_ix + 4
} }