Skip to content

Commit

Permalink
Merge pull request #198 from nyurik/simplify-compress-frg
Browse files Browse the repository at this point in the history
Minor compress_fragment cleanup
  • Loading branch information
danielrh authored May 19, 2024
2 parents 789bdd6 02c192a commit ef5d7be
Showing 1 changed file with 137 additions and 148 deletions.
285 changes: 137 additions & 148 deletions src/enc/compress_fragment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 708,7 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
storage_ix,
storage,
);
let mut code_block_selection: CodeBlockState = CodeBlockState::EMIT_COMMANDS;
let mut code_block_selection = CodeBlockState::EMIT_COMMANDS;
'continue_to_next_block: loop {
let mut ip_index: usize;
if code_block_selection == CodeBlockState::EMIT_COMMANDS {
Expand All @@ -734,155 734,146 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
let mut next_ip = ip_index;
let mut candidate = 0usize;
loop {
{
'break15: loop {
{
let hash = next_hash;
let bytes_between_hash_lookups: u32 = skip >> 5;
skip = skip.wrapping_add(1);
ip_index = next_ip;
next_ip =
ip_index.wrapping_add(bytes_between_hash_lookups as usize);
if next_ip > ip_limit {
code_block_selection = CodeBlockState::EMIT_REMAINDER;
break 'break15;
}
next_hash = Hash(&input_ptr[next_ip..], shift);
candidate = ip_index.wrapping_sub(last_distance as usize);
if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..])
&& candidate < ip_index
{
table[hash as usize] =
ip_index.wrapping_sub(base_ip) as i32;
break 'break15;
}
candidate = base_ip.wrapping_add(table[hash as usize] as usize);
table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
}
if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
break;
}
loop {
let hash = next_hash;
let bytes_between_hash_lookups: u32 = skip >> 5;
skip = skip.wrapping_add(1);
ip_index = next_ip;
next_ip = ip_index.wrapping_add(bytes_between_hash_lookups as usize);
if next_ip > ip_limit {
code_block_selection = CodeBlockState::EMIT_REMAINDER;
break;
}
next_hash = Hash(&input_ptr[next_ip..], shift);
candidate = ip_index.wrapping_sub(last_distance as usize);
if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..])
&& candidate < ip_index
{
table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
break;
}
candidate = base_ip.wrapping_add(table[hash as usize] as usize);
table[hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
if IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
break;
}
}
if !(ip_index.wrapping_sub(candidate)
> (1usize << 18).wrapping_sub(16) as isize as usize
&& (code_block_selection as i32
== CodeBlockState::EMIT_COMMANDS as i32))
&& code_block_selection == CodeBlockState::EMIT_COMMANDS)
{
break;
}
}
if code_block_selection as i32 != CodeBlockState::EMIT_COMMANDS as i32 {
if code_block_selection != CodeBlockState::EMIT_COMMANDS {
break;
}
{
let base: usize = ip_index;
let matched = (5usize).wrapping_add(FindMatchLengthWithLimit(
&input_ptr[candidate 5..],
&input_ptr[ip_index 5..],
ip_end.wrapping_sub(ip_index).wrapping_sub(5),
));
let distance = base.wrapping_sub(candidate) as i32;
let insert = base.wrapping_sub(next_emit);
ip_index = ip_index.wrapping_add(matched);
if insert < 6210 {
EmitInsertLen(
insert,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
} else if ShouldUseUncompressedMode(
(next_emit as isize) - (metablock_start as isize),
insert,
literal_ratio,
) {
EmitUncompressedMetaBlock(
&input_ptr[metablock_start..],
base.wrapping_sub(metablock_start),
mlen_storage_ix.wrapping_sub(3),
storage_ix,
storage,
);
input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
input_index = base;
next_emit = input_index;
code_block_selection = CodeBlockState::NEXT_BLOCK;
continue 'continue_to_next_block;
} else {
EmitLongInsertLen(
insert,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
}
EmitLiterals(
&input_ptr[next_emit..],

let base: usize = ip_index;
let matched = (5usize).wrapping_add(FindMatchLengthWithLimit(
&input_ptr[candidate 5..],
&input_ptr[ip_index 5..],
ip_end.wrapping_sub(ip_index).wrapping_sub(5),
));
let distance = base.wrapping_sub(candidate) as i32;
let insert = base.wrapping_sub(next_emit);
ip_index = ip_index.wrapping_add(matched);
if insert < 6210 {
EmitInsertLen(
insert,
&mut lit_depth[..],
&mut lit_bits[..],
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
if distance == last_distance {
BrotliWriteBits(
cmd_depth[64] as usize,
cmd_bits[64] as u64,
storage_ix,
storage,
);
{
let _rhs = 1u32;
let _lhs = &mut cmd_histo[64];
*_lhs = (*_lhs).wrapping_add(_rhs);
}
} else {
EmitDistance(
distance as usize,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
last_distance = distance;
}
EmitCopyLenLastDistance(
matched,
} else if ShouldUseUncompressedMode(
(next_emit as isize) - (metablock_start as isize),
insert,
literal_ratio,
) {
EmitUncompressedMetaBlock(
&input_ptr[metablock_start..],
base.wrapping_sub(metablock_start),
mlen_storage_ix.wrapping_sub(3),
storage_ix,
storage,
);
input_size = input_size.wrapping_sub(base.wrapping_sub(input_index));
input_index = base;
next_emit = input_index;
code_block_selection = CodeBlockState::NEXT_BLOCK;
continue 'continue_to_next_block;
} else {
EmitLongInsertLen(
insert,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
next_emit = ip_index;
if ip_index >= ip_limit {
code_block_selection = CodeBlockState::EMIT_REMAINDER;
continue 'continue_to_next_block;
}
}
EmitLiterals(
&input_ptr[next_emit..],
insert,
&mut lit_depth[..],
&mut lit_bits[..],
storage_ix,
storage,
);
if distance == last_distance {
BrotliWriteBits(
cmd_depth[64] as usize,
cmd_bits[64] as u64,
storage_ix,
storage,
);
{
assert!(ip_index >= 3);
let input_bytes: u64 =
BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
let _rhs = 1u32;
let _lhs = &mut cmd_histo[64];
*_lhs = (*_lhs).wrapping_add(_rhs);
}
} else {
EmitDistance(
distance as usize,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
last_distance = distance;
}
EmitCopyLenLastDistance(
matched,
cmd_depth,
cmd_bits,
&mut cmd_histo[..],
storage_ix,
storage,
);
next_emit = ip_index;
if ip_index >= ip_limit {
code_block_selection = CodeBlockState::EMIT_REMAINDER;
continue 'continue_to_next_block;
}

assert!(ip_index >= 3);
let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0, shift);
let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;

while IsMatch(&input_ptr[ip_index..], &input_ptr[candidate..]) {
let base: usize = ip_index;
let matched: usize = (5usize).wrapping_add(FindMatchLengthWithLimit(
Expand Down Expand Up @@ -916,28 907,26 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
code_block_selection = CodeBlockState::EMIT_REMAINDER;
continue 'continue_to_next_block;
}
{
assert!(ip_index >= 3);
let input_bytes: u64 =
BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0i32, shift);
let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 1i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 2i32, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
}

assert!(ip_index >= 3);
let input_bytes: u64 = BROTLI_UNALIGNED_LOAD64(&input_ptr[ip_index - 3..]);
let mut prev_hash: u32 = HashBytesAtOffset(input_bytes, 0, shift);
let cur_hash: u32 = HashBytesAtOffset(input_bytes, 3, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(3) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 1, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(2) as i32;
prev_hash = HashBytesAtOffset(input_bytes, 2, shift);
table[prev_hash as usize] =
ip_index.wrapping_sub(base_ip).wrapping_sub(1) as i32;
candidate = base_ip.wrapping_add(table[cur_hash as usize] as usize);
table[cur_hash as usize] = ip_index.wrapping_sub(base_ip) as i32;
}
if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
if code_block_selection == CodeBlockState::EMIT_REMAINDER {
break;
}
if code_block_selection as i32 == CodeBlockState::EMIT_COMMANDS as i32 {
if code_block_selection == CodeBlockState::EMIT_COMMANDS {
next_hash = Hash(
&input_ptr[{
ip_index = ip_index.wrapping_add(1);
Expand All @@ -950,7 939,7 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
}
code_block_selection = CodeBlockState::EMIT_REMAINDER;
continue 'continue_to_next_block;
} else if code_block_selection as i32 == CodeBlockState::EMIT_REMAINDER as i32 {
} else if code_block_selection == CodeBlockState::EMIT_REMAINDER {
input_index = input_index.wrapping_add(block_size);
input_size = input_size.wrapping_sub(block_size);
block_size = min(input_size, kMergeBlockSize);
Expand Down Expand Up @@ -1021,7 1010,7 @@ fn BrotliCompressFragmentFastImpl<AllocHT: alloc::Allocator<HuffmanTree>>(
next_emit = ip_end;
code_block_selection = CodeBlockState::NEXT_BLOCK;
continue 'continue_to_next_block;
} else if code_block_selection as i32 == CodeBlockState::NEXT_BLOCK as i32 {
} else if code_block_selection == CodeBlockState::NEXT_BLOCK {
if input_size > 0 {
metablock_start = input_index;
block_size = min(input_size, kFirstBlockSize);
Expand Down

0 comments on commit ef5d7be

Please sign in to comment.