Skip to content

Commit

Permalink
Anvil: Added LV4 Compression
Browse files Browse the repository at this point in the history
  • Loading branch information
Snowiiii committed Oct 14, 2024
1 parent df80ac2 commit 3eacf1e
Show file tree
Hide file tree
Showing 6 changed files with 62 additions and 24 deletions.
20 changes: 20 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions pumpkin-world/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,12 @@ derive_more.workspace = true
itertools.workspace = true
thiserror = "1.0"
futures = "0.3"


# Compression
flate2 = "1.0"
lz4 = "1.11.1"

serde.workspace = true
serde_json = "1.0"
log.workspace = true
Expand Down
45 changes: 32 additions & 13 deletions pumpkin-world/src/chunk/anvil.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,41 +26,60 @@ impl AnvilChunkReader {

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Compression {
Gzip,
Zlib,
/// GZip Compression
GZip,
/// ZLib Compression
ZLib,
/// Uncompressed (since a version before 1.15.1)
None,
/// LZ4 Compression (since 24w04a)
LZ4,
/// Custom compression algorithm (since 24w05a)
Custom,
}

impl Compression {
pub fn from_byte(byte: u8) -> Option<Self> {
match byte {
1 => Some(Self::Gzip),
2 => Some(Self::Zlib),
1 => Some(Self::GZip),
2 => Some(Self::ZLib),
3 => Some(Self::None),
4 => Some(Self::LZ4),
// Creative i guess?
127 => Some(Self::Custom),
_ => None,
}
}

fn decompress_data(&self, compressed_data: Vec<u8>) -> Result<Vec<u8>, CompressionError> {
match self {
Compression::Gzip => {
let mut z = GzDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::with_capacity(compressed_data.len());
z.read_to_end(&mut chunk_data)
Compression::GZip => {
let mut decoder = GzDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::new();
decoder
.read_to_end(&mut chunk_data)
.map_err(CompressionError::GZipError)?;
Ok(chunk_data)
}
Compression::Zlib => {
let mut z = ZlibDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::with_capacity(compressed_data.len());
z.read_to_end(&mut chunk_data)
Compression::ZLib => {
let mut decoder = ZlibDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::new();
decoder
.read_to_end(&mut chunk_data)
.map_err(CompressionError::ZlibError)?;
Ok(chunk_data)
}
Compression::None => Ok(compressed_data),
Compression::LZ4 => todo!(),
Compression::LZ4 => {
let mut decoder = lz4::Decoder::new(compressed_data.as_slice())
.map_err(CompressionError::LZ4Error)?;
let mut decompressed_data = Vec::new();
decoder
.read_to_end(&mut decompressed_data)
.map_err(CompressionError::LZ4Error)?;
Ok(decompressed_data)
}
Compression::Custom => todo!(),
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions pumpkin-world/src/chunk/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ pub enum CompressionError {
ZlibError(std::io::Error),
#[error("Error while working with Gzip compression: {0}")]
GZipError(std::io::Error),
#[error("Error while working with LZ4 compression: {0}")]
LZ4Error(std::io::Error),
}

pub struct ChunkData {
Expand Down
10 changes: 3 additions & 7 deletions pumpkin-world/src/level.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use std::{collections::HashMap, path::PathBuf, sync::Arc};
use parking_lot::Mutex;
use pumpkin_core::math::vector2::Vector2;
use rayon::prelude::*;
use thiserror::Error;
use tokio::sync::mpsc;

use crate::{
Expand Down Expand Up @@ -33,9 +32,6 @@ pub struct SaveFile {
pub region_folder: PathBuf,
}

#[derive(Error, Debug)]
pub enum WorldError {}

impl Level {
pub fn from_root_folder(root_folder: PathBuf) -> Self {
let world_gen = get_world_gen(Seed(0)); // TODO Read Seed from config.
Expand Down Expand Up @@ -79,7 +75,7 @@ impl Level {
pub fn fetch_chunks(
&self,
chunks: &[Vector2<i32>],
channel: mpsc::Sender<Result<Arc<ChunkData>, WorldError>>,
channel: mpsc::Sender<Arc<ChunkData>>,
is_alive: bool,
) {
chunks.into_par_iter().for_each(|at| {
Expand All @@ -92,7 +88,7 @@ impl Level {
// Check if chunks is already loaded
if loaded_chunks.contains_key(at) {
channel
.blocking_send(Ok(loaded_chunks.get(at).unwrap().clone()))
.blocking_send(loaded_chunks.get(at).unwrap().clone())
.expect("Failed sending ChunkData.");
return;
}
Expand All @@ -116,7 +112,7 @@ impl Level {
.unwrap();
let data = Arc::new(data);
channel
.blocking_send(Ok(data.clone()))
.blocking_send(data.clone())
.expect("Failed sending ChunkData.");
loaded_chunks.insert(at, data);
})
Expand Down
4 changes: 0 additions & 4 deletions pumpkin/src/world/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,10 +235,6 @@ impl World {

while let Some(chunk_data) = chunk_receiver.recv().await {
// dbg!(chunk_pos);
let chunk_data = match chunk_data {
Ok(d) => d,
Err(_) => continue,
};
#[cfg(debug_assertions)]
if chunk_data.position == (0, 0).into() {
use pumpkin_protocol::bytebuf::ByteBuffer;
Expand Down

0 comments on commit 3eacf1e

Please sign in to comment.