From 60c1c2983c41609de58e10ad1742884c18c7c9e8 Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Wed, 18 Dec 2024 23:23:04 +0100 Subject: [PATCH 1/6] anvil saving foundation --- pumpkin-world/src/block/block_registry.rs | 8 + pumpkin-world/src/chunk/anvil.rs | 387 +++++++++++++++++++--- pumpkin-world/src/chunk/mod.rs | 53 ++- pumpkin-world/src/level.rs | 125 +++---- pumpkin/src/entity/player.rs | 10 +- pumpkin/src/world/mod.rs | 24 +- pumpkin/src/world/player_chunker.rs | 7 +- 7 files changed, 467 insertions(+), 147 deletions(-) diff --git a/pumpkin-world/src/block/block_registry.rs b/pumpkin-world/src/block/block_registry.rs index d043c21f..20a424d9 100644 --- a/pumpkin-world/src/block/block_registry.rs +++ b/pumpkin-world/src/block/block_registry.rs @@ -24,6 +24,14 @@ pub static BLOCK_ID_BY_REGISTRY_ID: LazyLock> = LazyLock::n map }); +pub static BLOCK_ID_TO_REGISTRY_ID: LazyLock> = LazyLock::new(|| { + let mut map = HashMap::new(); + for block in &*BLOCKS.blocks { + map.insert(block.default_state_id, block.name.clone()); + } + map +}); + pub static BLOCK_ID_BY_STATE_ID: LazyLock> = LazyLock::new(|| { let mut map = HashMap::new(); for block in &BLOCKS.blocks { diff --git a/pumpkin-world/src/chunk/anvil.rs b/pumpkin-world/src/chunk/anvil.rs index 86389be6..b34ca80b 100644 --- a/pumpkin-world/src/chunk/anvil.rs +++ b/pumpkin-world/src/chunk/anvil.rs @@ -1,27 +1,30 @@ use std::{ + collections::HashMap, fs::OpenOptions, - io::{Read, Seek}, + io::{Read, Seek, SeekFrom, Write}, }; -use flate2::bufread::{GzDecoder, ZlibDecoder}; +use fastnbt::LongArray; +use flate2::bufread::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder}; -use crate::level::SaveFile; +use crate::{ + block::block_registry::BLOCK_ID_TO_REGISTRY_ID, chunk::ChunkWritingError, level::LevelFolder, + WORLD_LOWEST_Y, +}; -use super::{ChunkData, ChunkReader, ChunkReadingError, CompressionError}; +use super::{ + ChunkData, ChunkNbt, ChunkReader, ChunkReadingError, ChunkSection, ChunkSectionBlockStates, + ChunkSerializingError, ChunkWriter, CompressionError, PaletteEntry, +}; -#[derive(Clone)] -pub struct AnvilChunkReader {} +// 1.21.4 +const WORLD_DATA_VERSION: i32 = 4189; -impl Default for AnvilChunkReader { - fn default() -> Self { - Self::new() - } -} +#[derive(Clone, Default)] +pub struct AnvilChunkFormat; -impl AnvilChunkReader { - pub fn new() -> Self { - Self {} - } +fn modulus(a: i32, b: i32) -> i32 { + ((a % b) + b) % b } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -30,8 +33,6 @@ pub enum Compression { GZip, /// ZLib Compression ZLib, - /// Uncompressed (since a version before 1.15.1) - None, /// LZ4 Compression (since 24w04a) LZ4, /// Custom compression algorithm (since 24w05a) @@ -39,22 +40,25 @@ pub enum Compression { } impl Compression { - pub fn from_byte(byte: u8) -> Option { + /// Returns Ok when a compression is found otherwise an Err + #[allow(clippy::result_unit_err)] + pub fn from_byte(byte: u8) -> Result, ()> { match byte { - 1 => Some(Self::GZip), - 2 => Some(Self::ZLib), - 3 => Some(Self::None), - 4 => Some(Self::LZ4), - // Creative i guess? - 127 => Some(Self::Custom), - _ => None, + 1 => Ok(Some(Self::GZip)), + 2 => Ok(Some(Self::ZLib)), + // Uncompressed (since a version before 1.15.1) + 3 => Ok(None), + 4 => Ok(Some(Self::LZ4)), + 127 => Ok(Some(Self::Custom)), + // Unknown format + _ => Err(()), } } - fn decompress_data(&self, compressed_data: Vec) -> Result, CompressionError> { + fn decompress_data(&self, compressed_data: &[u8]) -> Result, CompressionError> { match self { Compression::GZip => { - let mut decoder = GzDecoder::new(&compressed_data[..]); + let mut decoder = GzDecoder::new(compressed_data); let mut chunk_data = Vec::new(); decoder .read_to_end(&mut chunk_data) @@ -62,17 +66,16 @@ impl Compression { Ok(chunk_data) } Compression::ZLib => { - let mut decoder = ZlibDecoder::new(&compressed_data[..]); + let mut decoder = ZlibDecoder::new(compressed_data); let mut chunk_data = Vec::new(); decoder .read_to_end(&mut chunk_data) .map_err(CompressionError::ZlibError)?; Ok(chunk_data) } - Compression::None => Ok(compressed_data), Compression::LZ4 => { - let mut decoder = lz4::Decoder::new(compressed_data.as_slice()) - .map_err(CompressionError::LZ4Error)?; + let mut decoder = + lz4::Decoder::new(compressed_data).map_err(CompressionError::LZ4Error)?; let mut decompressed_data = Vec::new(); decoder .read_to_end(&mut decompressed_data) @@ -82,12 +85,57 @@ impl Compression { Compression::Custom => todo!(), } } + fn compress_data( + &self, + uncompressed_data: &[u8], + compression_level: u32, + ) -> Result, CompressionError> { + match self { + Compression::GZip => { + let mut decoder = GzEncoder::new( + uncompressed_data, + flate2::Compression::new(compression_level), + ); + let mut chunk_data = Vec::new(); + decoder + .read_to_end(&mut chunk_data) + .map_err(CompressionError::GZipError)?; + Ok(chunk_data) + } + Compression::ZLib => { + let mut decoder = ZlibEncoder::new( + uncompressed_data, + flate2::Compression::new(compression_level), + ); + let mut chunk_data = Vec::new(); + decoder + .read_to_end(&mut chunk_data) + .map_err(CompressionError::ZlibError)?; + Ok(chunk_data) + } + Compression::LZ4 => { + let mut compressed_data = Vec::new(); + let mut encoder = lz4::EncoderBuilder::new() + .level(compression_level) + .build(&mut compressed_data) + .map_err(CompressionError::LZ4Error)?; + if let Err(err) = encoder.write_all(uncompressed_data) { + return Err(CompressionError::LZ4Error(err)); + } + if let (_output, Err(err)) = encoder.finish() { + return Err(CompressionError::LZ4Error(err)); + } + Ok(compressed_data) + } + Compression::Custom => todo!(), + } + } } -impl ChunkReader for AnvilChunkReader { +impl ChunkReader for AnvilChunkFormat { fn read_chunk( &self, - save_file: &SaveFile, + save_file: &LevelFolder, at: &pumpkin_core::math::vector2::Vector2, ) -> Result { let region = (at.x >> 5, at.z >> 5); @@ -141,25 +189,270 @@ impl ChunkReader for AnvilChunkReader { out }; - // TODO: check checksum to make sure chunk is not corrupted let header: Vec = file_buf.drain(0..5).collect(); - - let compression = Compression::from_byte(header[4]).ok_or( - ChunkReadingError::Compression(CompressionError::UnknownCompression), - )?; - + if header.len() != 5 { + return Err(ChunkReadingError::InvalidHeader); + } let size = u32::from_be_bytes(header[..4].try_into().unwrap()); + let compression = header[4]; + + let compression = Compression::from_byte(compression) + .map_err(|_| ChunkReadingError::Compression(CompressionError::UnknownCompression))?; // size includes the compression scheme byte, so we need to subtract 1 - let chunk_data = file_buf.drain(0..size as usize - 1).collect(); - let decompressed_chunk = compression - .decompress_data(chunk_data) - .map_err(ChunkReadingError::Compression)?; + let chunk_data: Vec = file_buf.drain(0..size as usize - 1).collect(); + + let decompressed_chunk = if let Some(compression) = compression { + compression + .decompress_data(&chunk_data) + .map_err(ChunkReadingError::Compression)? + } else { + chunk_data + }; ChunkData::from_bytes(&decompressed_chunk, *at).map_err(ChunkReadingError::ParsingError) } } +impl ChunkWriter for AnvilChunkFormat { + fn write_chunk( + &self, + chunk_data: &ChunkData, + level_folder: &LevelFolder, + at: &pumpkin_core::math::vector2::Vector2, + ) -> Result<(), super::ChunkWritingError> { + // TODO: update timestamp + + let bytes = self + .to_bytes(chunk_data) + .map_err(|err| ChunkWritingError::ChunkSerializingError(err.to_string()))?; + // TODO: config + let compression = Compression::ZLib; + let bytes = compression + // TODO: config + .compress_data(&bytes, 6) + .map_err(ChunkWritingError::Compression)?; + + let region = (at.x >> 5, at.z >> 5); + + let mut region_file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(false) + .open( + level_folder + .region_folder + .join(format!("./r.{}.{}.mca", region.0, region.1)), + ) + .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + + let mut location_table: [u8; 4096] = [0; 4096]; + let mut timestamp_table: [u8; 4096] = [0; 4096]; + + let file_meta = region_file + .metadata() + .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + + // fill the location and timestamp tables if they exist + if file_meta.len() >= 4096 * 2 { + region_file + .read_exact(&mut location_table) + .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + region_file + .read_exact(&mut timestamp_table) + .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + } + + let chunk_x = modulus(at.x, 32) as u32; + let chunk_z = modulus(at.z, 32) as u32; + + let table_entry = (chunk_x + chunk_z * 32) * 4; + + let mut offset = vec![0u8]; + offset.extend_from_slice(&location_table[table_entry as usize..table_entry as usize + 3]); + let at_offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let at_size = location_table[table_entry as usize + 3] as usize * 4096; + + let mut end_index = 4096 * 2; + if at_offset != 0 || at_size != 0 { + // move other chunks earlier, if there is a hole + for (other_offset, other_size, other_table_entry) in location_table + .chunks(4) + .enumerate() + .filter_map(|(index, v)| { + if table_entry / 4 == index as u32 { + return None; + } + let mut offset = vec![0u8]; + offset.extend_from_slice(&v[0..3]); + let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let size = v[3] as usize * 4096; + if offset == 0 && size == 0 { + return None; + } + Some((offset, size, index * 4)) + }) + .collect::>() + { + if at_offset > other_offset { + continue; + } + + fn read_at_most(file: &mut std::fs::File, size: usize) -> std::io::Result> { + let mut buf = vec![0u8; size]; + + let mut cursor = 0; + loop { + match file.read(&mut buf[cursor..])? { + 0 => break, + bytes_read => { + cursor += bytes_read; + } + } + } + + Ok(buf) + } + + region_file.seek(SeekFrom::Start(other_offset)).unwrap(); // TODO + let buf = match read_at_most(&mut region_file, other_size) { + Ok(v) => v, + Err(_) => panic!( + "Region file r.-{},{}.mca got corrupted, sorry", + region.0, region.1 + ), + }; + + region_file + .seek(SeekFrom::Start(other_offset - at_size as u64)) + .unwrap(); // TODO + region_file.write_all(&buf).unwrap_or_else(|_| { + panic!( + "Region file r.-{},{}.mca got corrupted, sorry", + region.0, region.1 + ) + }); + let location_bytes = + &(((other_offset - at_size as u64) / 4096) as u32).to_be_bytes()[1..4]; + let size_bytes = [(other_size.div_ceil(4096)) as u8]; + let location_entry = [location_bytes, &size_bytes].concat(); + location_table[other_table_entry..other_table_entry + 4] + .as_mut() + .copy_from_slice(&location_entry); + + end_index = (other_offset as isize + other_size as isize - at_size as isize) as u64; + } + } else { + for (offset, size) in location_table.chunks(4).filter_map(|v| { + let mut offset = vec![0u8]; + offset.extend_from_slice(&v[0..3]); + let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let size = v[3] as usize * 4096; + if offset == 0 && size == 0 { + return None; + } + Some((offset, size)) + }) { + end_index = u64::max(offset + size as u64, end_index); + } + } + + let location_bytes = &(end_index as u32 / 4096).to_be_bytes()[1..4]; + let size_bytes = [(bytes.len().div_ceil(4096)) as u8]; + dbg!(end_index, bytes.len()); + dbg!(&location_bytes, &size_bytes); + location_table[table_entry as usize..table_entry as usize + 4] + .as_mut() + .copy_from_slice(&[location_bytes, &size_bytes].concat()); + + // write new location and timestamp table + + region_file.seek(SeekFrom::Start(0)).unwrap(); // TODO + if let Err(err) = region_file.write_all(&[location_table, timestamp_table].concat()) { + return Err(ChunkWritingError::IoError(err.kind())); + } + // dbg!(&location_table.iter().map(|v| v.to_string()).join(",")); + + region_file.seek(SeekFrom::Start(end_index)).unwrap(); // TODO + region_file.write_all(&bytes).unwrap_or_else(|_| { + panic!( + "Region file r.-{},{}.mca got corrupted, sorry", + region.0, region.1 + ) + }); + + // region_file.write_all(&vec![0u8; 4096]); + + Ok(()) + } +} + +impl AnvilChunkFormat { + pub fn to_bytes(&self, chunk_data: &ChunkData) -> Result, ChunkSerializingError> { + let mut sections = Vec::new(); + + for (i, blocks) in chunk_data.blocks.blocks.chunks(16 * 16 * 16).enumerate() { + // get unique blocks + let palette = HashMap::::from_iter( + blocks.iter().enumerate().map(|(i, v)| { + ( + *v, + ( + BLOCK_ID_TO_REGISTRY_ID + .get(&v) + .expect("Tried saving a block which does not exist."), + i, + ), + ) + }), + ); + + let block_bit_size = { + let size = 64 - (palette.len() as i64 - 1).leading_zeros(); + std::cmp::max(4, size) + } as usize; + let blocks_in_pack = 64 / block_bit_size; + let mut section_longs = Vec::new(); + + let mut current_pack_long = 0i64; + + for block_pack in blocks.chunks(blocks_in_pack) { + for block in block_pack { + let index = palette.get(block).expect("Just added all unique").1; + current_pack_long = current_pack_long << block_bit_size | index as i64; + } + section_longs.push(current_pack_long); + current_pack_long = 0; + } + + sections.push(ChunkSection { + y: i as i8, + block_states: Some(ChunkSectionBlockStates { + data: Some(LongArray::new(section_longs)), + palette: palette + .into_iter() + .map(|entry| PaletteEntry { + name: entry.1 .0.clone(), + _properties: None, + }) + .collect(), + }), + }); + } + + let nbt = ChunkNbt { + data_version: WORLD_DATA_VERSION, + heightmaps: chunk_data.blocks.heightmap.clone(), + sections, + }; + + let bytes = fastnbt::to_bytes(&nbt); + + bytes.map_err(ChunkSerializingError::ErrorSerializingChunk) + } +} + #[cfg(test)] mod tests { use std::path::PathBuf; @@ -167,15 +460,15 @@ mod tests { use pumpkin_core::math::vector2::Vector2; use crate::{ - chunk::{anvil::AnvilChunkReader, ChunkReader, ChunkReadingError}, - level::SaveFile, + chunk::{anvil::AnvilChunkFormat, ChunkReader, ChunkReadingError}, + level::LevelFolder, }; #[test] fn not_existing() { let region_path = PathBuf::from("not_existing"); - let result = AnvilChunkReader::new().read_chunk( - &SaveFile { + let result = AnvilChunkFormat.read_chunk( + &LevelFolder { root_folder: PathBuf::from(""), region_folder: region_path, }, diff --git a/pumpkin-world/src/chunk/mod.rs b/pumpkin-world/src/chunk/mod.rs index fcbb53f1..6295375e 100644 --- a/pumpkin-world/src/chunk/mod.rs +++ b/pumpkin-world/src/chunk/mod.rs @@ -9,7 +9,7 @@ use thiserror::Error; use crate::{ block::BlockState, coordinates::{ChunkRelativeBlockCoordinates, Height}, - level::SaveFile, + level::LevelFolder, WORLD_HEIGHT, }; @@ -22,15 +22,26 @@ const CHUNK_VOLUME: usize = CHUNK_AREA * WORLD_HEIGHT; pub trait ChunkReader: Sync + Send { fn read_chunk( &self, - save_file: &SaveFile, + save_file: &LevelFolder, at: &Vector2, ) -> Result; } +pub trait ChunkWriter: Send + Sync { + fn write_chunk( + &self, + chunk: &ChunkData, + level_folder: &LevelFolder, + at: &Vector2, + ) -> Result<(), ChunkWritingError>; +} + #[derive(Error, Debug)] pub enum ChunkReadingError { #[error("Io error: {0}")] IoError(std::io::ErrorKind), + #[error("Invalid header")] + InvalidHeader, #[error("Region is invalid")] RegionIsInvalid, #[error("Compression error {0}")] @@ -41,6 +52,16 @@ pub enum ChunkReadingError { ParsingError(ChunkParsingError), } +#[derive(Error, Debug)] +pub enum ChunkWritingError { + #[error("Io error: {0}")] + IoError(std::io::ErrorKind), + #[error("Compression error {0}")] + Compression(CompressionError), + #[error("Chunk serializing error: {0}")] + ChunkSerializingError(String), +} + #[derive(Error, Debug)] pub enum CompressionError { #[error("Compression scheme not recognised")] @@ -68,14 +89,14 @@ pub struct ChunkBlocks { pub heightmap: ChunkHeightmaps, } -#[derive(Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "PascalCase")] struct PaletteEntry { name: String, _properties: Option>, } -#[derive(Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone)] struct ChunkSectionBlockStates { // #[serde(with = "LongArray")] data: Option, @@ -91,23 +112,25 @@ pub struct ChunkHeightmaps { world_surface: LongArray, } -#[derive(Deserialize, Debug)] -#[expect(dead_code)] +#[derive(Serialize, Deserialize, Debug)] struct ChunkSection { #[serde(rename = "Y")] - y: i32, + y: i8, block_states: Option, } -#[derive(Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "PascalCase")] struct ChunkNbt { - #[expect(dead_code)] - data_version: usize, - + data_version: i32, + // #[serde(rename = "xPos")] + // x_pos: i32, + // #[serde(rename = "yPos")] + // y_pos: i32, + // #[serde(rename = "zPos")] + // z_pos: i32, #[serde(rename = "sections")] sections: Vec, - heightmaps: ChunkHeightmaps, } @@ -326,3 +349,9 @@ pub enum ChunkParsingError { #[error("Error deserializing chunk: {0}")] ErrorDeserializingChunk(String), } + +#[derive(Error, Debug)] +pub enum ChunkSerializingError { + #[error("Error serializing chunk: {0}")] + ErrorSerializingChunk(fastnbt::error::Error), +} diff --git a/pumpkin-world/src/level.rs b/pumpkin-world/src/level.rs index 2a6a9571..60883150 100644 --- a/pumpkin-world/src/level.rs +++ b/pumpkin-world/src/level.rs @@ -12,7 +12,8 @@ use tokio::{ use crate::{ chunk::{ - anvil::AnvilChunkReader, ChunkData, ChunkParsingError, ChunkReader, ChunkReadingError, + anvil::AnvilChunkFormat, ChunkData, ChunkParsingError, ChunkReader, ChunkReadingError, + ChunkWriter, }, world_gen::{get_world_gen, Seed, WorldGenerator}, }; @@ -28,15 +29,16 @@ use crate::{ /// For more details on world generation, refer to the `WorldGenerator` module. pub struct Level { pub seed: Seed, - save_file: Option, + level_folder: LevelFolder, loaded_chunks: Arc, Arc>>>, chunk_watchers: Arc, usize>>, chunk_reader: Arc, + chunk_writer: Arc, world_gen: Arc, } #[derive(Clone)] -pub struct SaveFile { +pub struct LevelFolder { pub root_folder: PathBuf, pub region_folder: PathBuf, } @@ -48,39 +50,24 @@ fn get_or_create_seed() -> Seed { impl Level { pub fn from_root_folder(root_folder: PathBuf) -> Self { - // If we are using an already existing world we want to read the seed from the level.dat, If not we want to check if there is a seed in the config, if not lets create a random one - if root_folder.exists() { - let region_folder = root_folder.join("region"); - assert!( - region_folder.exists(), - "World region folder does not exist, despite there being a root folder." - ); - // TODO: read seed from level.dat - let seed = get_or_create_seed(); - let world_gen = get_world_gen(seed).into(); // TODO Read Seed from config. - - Self { - seed, - world_gen, - save_file: Some(SaveFile { - root_folder, - region_folder, - }), - chunk_reader: Arc::new(AnvilChunkReader::new()), - loaded_chunks: Arc::new(DashMap::new()), - chunk_watchers: Arc::new(DashMap::new()), - } - } else { - let seed = get_or_create_seed(); - let world_gen = get_world_gen(seed).into(); // TODO Read Seed from config. - Self { - seed, - world_gen, - save_file: None, - chunk_reader: Arc::new(AnvilChunkReader::new()), - loaded_chunks: Arc::new(DashMap::new()), - chunk_watchers: Arc::new(DashMap::new()), - } + let seed = get_or_create_seed(); + let world_gen = get_world_gen(seed).into(); + // Check if region folder exists, if not lets make one + let region_folder = root_folder.join("region"); + if !region_folder.exists() { + std::fs::create_dir_all(®ion_folder).expect("Failed to create Region folder"); + } + Self { + world_gen, + level_folder: LevelFolder { + root_folder, + region_folder, + }, + seed, + chunk_reader: Arc::new(AnvilChunkFormat::default()), + chunk_writer: Arc::new(AnvilChunkFormat::default()), + loaded_chunks: Arc::new(DashMap::new()), + chunk_watchers: Arc::new(DashMap::new()), } } @@ -156,16 +143,16 @@ impl Level { } } - pub fn clean_chunks(&self, chunks: &[Vector2]) { - chunks.iter().for_each(|chunk_pos| { + pub async fn clean_chunks(&self, chunks: &[Vector2]) { + for chunk_pos in chunks { //log::debug!("Unloading {:?}", chunk_pos); - self.clean_chunk(chunk_pos); - }); + self.clean_chunk(chunk_pos).await; + } } - pub fn clean_chunk(&self, chunk: &Vector2) { + pub async fn clean_chunk(&self, chunk: &Vector2) { if let Some(data) = self.loaded_chunks.remove(chunk) { - self.write_chunk(data); + self.write_chunk(data).await; } } @@ -189,16 +176,22 @@ impl Level { self.chunk_watchers.shrink_to_fit(); } - pub fn write_chunk(&self, _chunk_to_write: (Vector2, Arc>)) { - //TODO + pub async fn write_chunk(&self, chunk_to_write: (Vector2, Arc>)) { + let data = chunk_to_write.1.read().await; + if let Err(error) = + self.chunk_writer + .write_chunk(&data, &self.level_folder, &chunk_to_write.0) + { + log::error!("Failed writing Chunk to disk {}", error.to_string()); + } } fn load_chunk_from_save( chunk_reader: Arc, - save_file: SaveFile, + save_file: &LevelFolder, chunk_pos: Vector2, ) -> Result>>, ChunkReadingError> { - match chunk_reader.read_chunk(&save_file, &chunk_pos) { + match chunk_reader.read_chunk(save_file, &chunk_pos) { Ok(data) => Ok(Some(Arc::new(RwLock::new(data)))), Err( ChunkReadingError::ChunkNotExist @@ -223,7 +216,8 @@ impl Level { let channel = channel.clone(); let loaded_chunks = self.loaded_chunks.clone(); let chunk_reader = self.chunk_reader.clone(); - let save_file = self.save_file.clone(); + let chunk_writer = self.chunk_writer.clone(); + let level_folder = self.level_folder.clone(); let world_gen = self.world_gen.clone(); let chunk_pos = *at; @@ -231,20 +225,33 @@ impl Level { .get(&chunk_pos) .map(|entry| entry.value().clone()) .unwrap_or_else(|| { - let loaded_chunk = save_file - .and_then(|save_file| { - match Self::load_chunk_from_save(chunk_reader, save_file, chunk_pos) { - Ok(chunk) => chunk, - Err(err) => { - log::error!( - "Failed to read chunk (regenerating) {:?}: {:?}", - chunk_pos, - err - ); - None + let loaded_chunk = + match Self::load_chunk_from_save(chunk_reader, &level_folder, chunk_pos) { + Ok(chunk) => { + // Save new Chunk + if let Some(chunk) = &chunk { + if let Err(error) = chunk_writer.write_chunk( + &chunk.blocking_read(), + &level_folder, + &chunk_pos, + ) { + log::error!( + "Failed writing Chunk to disk {}", + error.to_string() + ); + }; } + chunk + } + Err(err) => { + log::error!( + "Failed to read chunk (regenerating) {:?}: {:?}", + chunk_pos, + err + ); + None } - }) + } .unwrap_or_else(|| { Arc::new(RwLock::new(world_gen.generate_chunk(chunk_pos))) }); diff --git a/pumpkin/src/entity/player.rs b/pumpkin/src/entity/player.rs index cc8af323..29e4d66a 100644 --- a/pumpkin/src/entity/player.rs +++ b/pumpkin/src/entity/player.rs @@ -216,19 +216,21 @@ impl Player { radial_chunks.len() ); + let level = &world.level; + // Decrement value of watched chunks - let chunks_to_clean = world.mark_chunks_as_not_watched(&radial_chunks); + let chunks_to_clean = level.mark_chunks_as_not_watched(&radial_chunks); // Remove chunks with no watchers from the cache - world.clean_chunks(&chunks_to_clean); + level.clean_chunks(&chunks_to_clean).await; // Remove left over entries from all possiblily loaded chunks - world.clean_memory(&radial_chunks); + level.clean_memory(&radial_chunks); log::debug!( "Removed player id {} ({}) ({} chunks remain cached)", self.gameprofile.name, self.client.id, - self.world().get_cached_chunk_len() + level.loaded_chunk_count() ); //self.world().level.list_cached(); diff --git a/pumpkin/src/world/mod.rs b/pumpkin/src/world/mod.rs index 54c5d105..d3b23606 100644 --- a/pumpkin/src/world/mod.rs +++ b/pumpkin/src/world/mod.rs @@ -519,26 +519,6 @@ impl World { player.set_health(20.0, 20, 20.0).await; } - pub fn mark_chunks_as_not_watched(&self, chunks: &[Vector2]) -> Vec> { - self.level.mark_chunks_as_not_watched(chunks) - } - - pub fn mark_chunks_as_watched(&self, chunks: &[Vector2]) { - self.level.mark_chunks_as_newly_watched(chunks); - } - - pub fn clean_chunks(&self, chunks: &[Vector2]) { - self.level.clean_chunks(chunks); - } - - pub fn clean_memory(&self, chunks_to_check: &[Vector2]) { - self.level.clean_memory(chunks_to_check); - } - - pub fn get_cached_chunk_len(&self) -> usize { - self.level.loaded_chunk_count() - } - /// IMPORTANT: Chunks have to be non-empty fn spawn_world_chunks( &self, @@ -590,7 +570,7 @@ impl World { "Received chunk {:?}, but it is no longer watched... cleaning", &chunk_data.position ); - level.clean_chunk(&chunk_data.position); + level.clean_chunk(&chunk_data.position).await; continue; } @@ -765,7 +745,7 @@ impl World { "Received chunk {:?}, but it is not watched... cleaning", chunk_pos ); - self.level.clean_chunk(&chunk_pos); + self.level.clean_chunk(&chunk_pos).await; } chunk diff --git a/pumpkin/src/world/player_chunker.rs b/pumpkin/src/world/player_chunker.rs index a7c6f8a5..ba29aa11 100644 --- a/pumpkin/src/world/player_chunker.rs +++ b/pumpkin/src/world/player_chunker.rs @@ -81,12 +81,13 @@ pub async fn update_position(player: &Arc) { // Make sure the watched section and the chunk watcher updates are async atomic. We want to // ensure what we unload when the player disconnects is correct - entity.world.mark_chunks_as_watched(&loading_chunks); - let chunks_to_clean = entity.world.mark_chunks_as_not_watched(&unloading_chunks); + let level = &entity.world.level; + level.mark_chunks_as_newly_watched(&loading_chunks); + let chunks_to_clean = level.mark_chunks_as_not_watched(&unloading_chunks); player.watched_section.store(new_cylindrical); if !chunks_to_clean.is_empty() { - entity.world.clean_chunks(&chunks_to_clean); + level.clean_chunks(&chunks_to_clean).await; // This can take a little if we are sending a bunch of packets, queue it up :p let client = player.client.clone(); From 2f089a0770848f6bb95079541f0c1342295598d4 Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Wed, 18 Dec 2024 23:27:17 +0100 Subject: [PATCH 2/6] fix: clippy warns --- pumpkin-world/src/chunk/anvil.rs | 3 +-- pumpkin-world/src/level.rs | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/pumpkin-world/src/chunk/anvil.rs b/pumpkin-world/src/chunk/anvil.rs index b34ca80b..775b6910 100644 --- a/pumpkin-world/src/chunk/anvil.rs +++ b/pumpkin-world/src/chunk/anvil.rs @@ -9,7 +9,6 @@ use flate2::bufread::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder}; use crate::{ block::block_registry::BLOCK_ID_TO_REGISTRY_ID, chunk::ChunkWritingError, level::LevelFolder, - WORLD_LOWEST_Y, }; use super::{ @@ -400,7 +399,7 @@ impl AnvilChunkFormat { *v, ( BLOCK_ID_TO_REGISTRY_ID - .get(&v) + .get(v) .expect("Tried saving a block which does not exist."), i, ), diff --git a/pumpkin-world/src/level.rs b/pumpkin-world/src/level.rs index 60883150..7ea1d1a9 100644 --- a/pumpkin-world/src/level.rs +++ b/pumpkin-world/src/level.rs @@ -64,8 +64,8 @@ impl Level { region_folder, }, seed, - chunk_reader: Arc::new(AnvilChunkFormat::default()), - chunk_writer: Arc::new(AnvilChunkFormat::default()), + chunk_reader: Arc::new(AnvilChunkFormat), + chunk_writer: Arc::new(AnvilChunkFormat), loaded_chunks: Arc::new(DashMap::new()), chunk_watchers: Arc::new(DashMap::new()), } From 9b184f73b99c6b7b97cf4ee0fe08be84d497b33a Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Thu, 19 Dec 2024 23:58:20 +0100 Subject: [PATCH 3/6] write chunk header --- pumpkin-world/Cargo.toml | 3 ++ pumpkin-world/src/chunk/anvil.rs | 70 +++++++++++++++++++------------- pumpkin-world/src/chunk/mod.rs | 14 +++++-- 3 files changed, 55 insertions(+), 32 deletions(-) diff --git a/pumpkin-world/Cargo.toml b/pumpkin-world/Cargo.toml index fcbc14a5..1d2ceeac 100644 --- a/pumpkin-world/Cargo.toml +++ b/pumpkin-world/Cargo.toml @@ -8,6 +8,9 @@ pumpkin-core = { path = "../pumpkin-core" } pumpkin-config = { path = "../pumpkin-config" } pumpkin-macros = { path = "../pumpkin-macros" } +bytes.workspace = true + + tokio.workspace = true rayon.workspace = true derive_more.workspace = true diff --git a/pumpkin-world/src/chunk/anvil.rs b/pumpkin-world/src/chunk/anvil.rs index 775b6910..769e4344 100644 --- a/pumpkin-world/src/chunk/anvil.rs +++ b/pumpkin-world/src/chunk/anvil.rs @@ -4,6 +4,7 @@ use std::{ io::{Read, Seek, SeekFrom, Write}, }; +use bytes::*; use fastnbt::LongArray; use flate2::bufread::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder}; @@ -22,20 +23,17 @@ const WORLD_DATA_VERSION: i32 = 4189; #[derive(Clone, Default)] pub struct AnvilChunkFormat; -fn modulus(a: i32, b: i32) -> i32 { - ((a % b) + b) % b -} - #[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u8)] pub enum Compression { /// GZip Compression - GZip, + GZip = 1, /// ZLib Compression - ZLib, + ZLib = 2, /// LZ4 Compression (since 24w04a) - LZ4, + LZ4 = 4, /// Custom compression algorithm (since 24w05a) - Custom, + Custom = 127, } impl Compression { @@ -162,14 +160,14 @@ impl ChunkReader for AnvilChunkFormat { .read_exact(&mut timestamp_table) .map_err(|err| ChunkReadingError::IoError(err.kind()))?; - let modulus = |a: i32, b: i32| ((a % b) + b) % b; - let chunk_x = modulus(at.x, 32) as u32; - let chunk_z = modulus(at.z, 32) as u32; + let chunk_x = at.x & 0x1F; + let chunk_z = at.z & 0x1F; let table_entry = (chunk_x + chunk_z * 32) * 4; - let mut offset = vec![0u8]; + let mut offset = BytesMut::new(); + offset.put_u8(0); offset.extend_from_slice(&location_table[table_entry as usize..table_entry as usize + 3]); - let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let offset = offset.get_u32() as u64 * 4096; let size = location_table[table_entry as usize + 3] as usize * 4096; if offset == 0 && size == 0 { @@ -188,12 +186,12 @@ impl ChunkReader for AnvilChunkFormat { out }; - let header: Vec = file_buf.drain(0..5).collect(); - if header.len() != 5 { + let mut header: Bytes = file_buf.drain(0..5).collect(); + if header.remaining() != 5 { return Err(ChunkReadingError::InvalidHeader); } - let size = u32::from_be_bytes(header[..4].try_into().unwrap()); - let compression = header[4]; + let size = header.get_u32(); + let compression = header.get_u8(); let compression = Compression::from_byte(compression) .map_err(|_| ChunkReadingError::Compression(CompressionError::UnknownCompression))?; @@ -263,14 +261,15 @@ impl ChunkWriter for AnvilChunkFormat { .map_err(|err| ChunkWritingError::IoError(err.kind()))?; } - let chunk_x = modulus(at.x, 32) as u32; - let chunk_z = modulus(at.z, 32) as u32; + let chunk_x = at.x & 0x1F; + let chunk_z = at.z & 0x1F; let table_entry = (chunk_x + chunk_z * 32) * 4; - let mut offset = vec![0u8]; + let mut offset = BytesMut::new(); + offset.put_u8(0); offset.extend_from_slice(&location_table[table_entry as usize..table_entry as usize + 3]); - let at_offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let at_offset = offset.get_u32() as u64 * 4096; let at_size = location_table[table_entry as usize + 3] as usize * 4096; let mut end_index = 4096 * 2; @@ -280,12 +279,13 @@ impl ChunkWriter for AnvilChunkFormat { .chunks(4) .enumerate() .filter_map(|(index, v)| { - if table_entry / 4 == index as u32 { + if table_entry / 4 == index as i32 { return None; } - let mut offset = vec![0u8]; + let mut offset = BytesMut::new(); + offset.put_u8(0); offset.extend_from_slice(&v[0..3]); - let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let offset = offset.get_u32() as u64 * 4096; let size = v[3] as usize * 4096; if offset == 0 && size == 0 { return None; @@ -344,9 +344,10 @@ impl ChunkWriter for AnvilChunkFormat { } } else { for (offset, size) in location_table.chunks(4).filter_map(|v| { - let mut offset = vec![0u8]; + let mut offset = BytesMut::new(); + offset.put_u8(0); offset.extend_from_slice(&v[0..3]); - let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096; + let offset = offset.get_u32() as u64 * 4096; let size = v[3] as usize * 4096; if offset == 0 && size == 0 { return None; @@ -359,8 +360,6 @@ impl ChunkWriter for AnvilChunkFormat { let location_bytes = &(end_index as u32 / 4096).to_be_bytes()[1..4]; let size_bytes = [(bytes.len().div_ceil(4096)) as u8]; - dbg!(end_index, bytes.len()); - dbg!(&location_bytes, &size_bytes); location_table[table_entry as usize..table_entry as usize + 4] .as_mut() .copy_from_slice(&[location_bytes, &size_bytes].concat()); @@ -374,6 +373,16 @@ impl ChunkWriter for AnvilChunkFormat { // dbg!(&location_table.iter().map(|v| v.to_string()).join(",")); region_file.seek(SeekFrom::Start(end_index)).unwrap(); // TODO + + let mut header: BytesMut = BytesMut::with_capacity(5); + // total chunk size includes the byte representing the compression + // scheme, so +1. + header.put_u32(bytes.len() as u32 + 1); + // compression scheme + header.put_u8(compression as u8); + region_file + .write_all(&header) + .expect("Failed to write header"); region_file.write_all(&bytes).unwrap_or_else(|_| { panic!( "Region file r.-{},{}.mca got corrupted, sorry", @@ -381,7 +390,9 @@ impl ChunkWriter for AnvilChunkFormat { ) }); - // region_file.write_all(&vec![0u8; 4096]); + region_file + .write_all(&vec![0u8; 4096]) + .expect("Failed to add padding"); Ok(()) } @@ -442,6 +453,7 @@ impl AnvilChunkFormat { let nbt = ChunkNbt { data_version: WORLD_DATA_VERSION, + status: super::ChunkStatus::Full, heightmaps: chunk_data.blocks.heightmap.clone(), sections, }; diff --git a/pumpkin-world/src/chunk/mod.rs b/pumpkin-world/src/chunk/mod.rs index 6295375e..3abcf0c3 100644 --- a/pumpkin-world/src/chunk/mod.rs +++ b/pumpkin-world/src/chunk/mod.rs @@ -123,6 +123,7 @@ struct ChunkSection { #[serde(rename_all = "PascalCase")] struct ChunkNbt { data_version: i32, + status: ChunkStatus, // #[serde(rename = "xPos")] // x_pos: i32, // #[serde(rename = "yPos")] @@ -134,8 +135,7 @@ struct ChunkNbt { heightmaps: ChunkHeightmaps, } -#[derive(Deserialize, Debug, PartialEq, Eq)] -#[serde(tag = "Status")] +#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)] enum ChunkStatus { #[serde(rename = "minecraft:empty")] Empty, @@ -254,10 +254,18 @@ impl Index for ChunkBlocks { } } +// I can't use an tag because it will break ChunkNBT, but status need to have a big S, so "Status" +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "PascalCase")] +pub struct ChunkStatusWrapper { + status: ChunkStatus, +} + impl ChunkData { pub fn from_bytes(chunk_data: &[u8], at: Vector2) -> Result { - if fastnbt::from_bytes::(chunk_data) + if fastnbt::from_bytes::(chunk_data) .map_err(|_| ChunkParsingError::FailedReadStatus)? + .status != ChunkStatus::Full { return Err(ChunkParsingError::ChunkNotGenerated); From 6d49ba8bb150055d1637541d58f9aa28fb27ed94 Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Mon, 23 Dec 2024 12:38:22 +0100 Subject: [PATCH 4/6] write x_pos and z_pos --- pumpkin-world/src/chunk/anvil.rs | 70 +++++++++++++++++++------------- pumpkin-world/src/chunk/mod.rs | 60 ++++++++++++++++----------- 2 files changed, 78 insertions(+), 52 deletions(-) diff --git a/pumpkin-world/src/chunk/anvil.rs b/pumpkin-world/src/chunk/anvil.rs index 769e4344..6d1f943e 100644 --- a/pumpkin-world/src/chunk/anvil.rs +++ b/pumpkin-world/src/chunk/anvil.rs @@ -7,6 +7,7 @@ use std::{ use bytes::*; use fastnbt::LongArray; use flate2::bufread::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder}; +use pumpkin_core::math::ceil_log2; use crate::{ block::block_registry::BLOCK_ID_TO_REGISTRY_ID, chunk::ChunkWritingError, level::LevelFolder, @@ -219,19 +220,7 @@ impl ChunkWriter for AnvilChunkFormat { at: &pumpkin_core::math::vector2::Vector2, ) -> Result<(), super::ChunkWritingError> { // TODO: update timestamp - - let bytes = self - .to_bytes(chunk_data) - .map_err(|err| ChunkWritingError::ChunkSerializingError(err.to_string()))?; - // TODO: config - let compression = Compression::ZLib; - let bytes = compression - // TODO: config - .compress_data(&bytes, 6) - .map_err(ChunkWritingError::Compression)?; - let region = (at.x >> 5, at.z >> 5); - let mut region_file = OpenOptions::new() .read(true) .write(true) @@ -244,6 +233,16 @@ impl ChunkWriter for AnvilChunkFormat { ) .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + let bytes = self + .to_bytes(chunk_data) + .map_err(|err| ChunkWritingError::ChunkSerializingError(err.to_string()))?; + // TODO: config + let compression = Compression::ZLib; + let bytes = compression + // TODO: config + .compress_data(&bytes, 6) + .map_err(ChunkWritingError::Compression)?; + let mut location_table: [u8; 4096] = [0; 4096]; let mut timestamp_table: [u8; 4096] = [0; 4096]; @@ -251,8 +250,9 @@ impl ChunkWriter for AnvilChunkFormat { .metadata() .map_err(|err| ChunkWritingError::IoError(err.kind()))?; + // The header consists of 8 KiB of data // fill the location and timestamp tables if they exist - if file_meta.len() >= 4096 * 2 { + if file_meta.len() >= 8192 { region_file .read_exact(&mut location_table) .map_err(|err| ChunkWritingError::IoError(err.kind()))?; @@ -390,8 +390,11 @@ impl ChunkWriter for AnvilChunkFormat { ) }); + let sector_count = (bytes.len() + 4).div_ceil(4096) * 4096; + + // padding region_file - .write_all(&vec![0u8; 4096]) + .write_all(&vec![0u8; sector_count]) .expect("Failed to add padding"); Ok(()) @@ -418,22 +421,31 @@ impl AnvilChunkFormat { }), ); - let block_bit_size = { - let size = 64 - (palette.len() as i64 - 1).leading_zeros(); - std::cmp::max(4, size) - } as usize; - let blocks_in_pack = 64 / block_bit_size; - let mut section_longs = Vec::new(); - - let mut current_pack_long = 0i64; + let block_bit_size = if palette.len() < 16 { + 4 + } else { + ceil_log2(palette.len() as u32).max(4) + }; + let _blocks_in_pack = 64 / block_bit_size; - for block_pack in blocks.chunks(blocks_in_pack) { - for block in block_pack { - let index = palette.get(block).expect("Just added all unique").1; - current_pack_long = current_pack_long << block_bit_size | index as i64; + let mut section_longs = Vec::new(); + let mut current_pack_long: i64 = 0; + let mut bits_used_in_pack: u32 = 0; + + for block in blocks { + let index = palette.get(block).expect("Just added all unique").1; + current_pack_long |= (index as i64) << bits_used_in_pack; + bits_used_in_pack += block_bit_size as u32; + + if bits_used_in_pack >= 64 { + section_longs.push(current_pack_long); + current_pack_long = 0; + bits_used_in_pack = 0; } + } + + if bits_used_in_pack > 0 { section_longs.push(current_pack_long); - current_pack_long = 0; } sections.push(ChunkSection { @@ -444,7 +456,7 @@ impl AnvilChunkFormat { .into_iter() .map(|entry| PaletteEntry { name: entry.1 .0.clone(), - _properties: None, + properties: None, }) .collect(), }), @@ -453,6 +465,8 @@ impl AnvilChunkFormat { let nbt = ChunkNbt { data_version: WORLD_DATA_VERSION, + x_pos: chunk_data.position.x, + z_pos: chunk_data.position.z, status: super::ChunkStatus::Full, heightmaps: chunk_data.blocks.heightmap.clone(), sections, diff --git a/pumpkin-world/src/chunk/mod.rs b/pumpkin-world/src/chunk/mod.rs index 3abcf0c3..ff721c2d 100644 --- a/pumpkin-world/src/chunk/mod.rs +++ b/pumpkin-world/src/chunk/mod.rs @@ -1,7 +1,6 @@ use fastnbt::LongArray; -use pumpkin_core::math::vector2::Vector2; +use pumpkin_core::math::{ceil_log2, vector2::Vector2}; use serde::{Deserialize, Serialize}; -use std::cmp::max; use std::collections::HashMap; use std::ops::Index; use thiserror::Error; @@ -92,15 +91,9 @@ pub struct ChunkBlocks { #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "PascalCase")] struct PaletteEntry { + // bloc name name: String, - _properties: Option>, -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -struct ChunkSectionBlockStates { - // #[serde(with = "LongArray")] - data: Option, - palette: Vec, + properties: Option>, } #[derive(Deserialize, Serialize, Debug, Clone)] @@ -119,17 +112,24 @@ struct ChunkSection { block_states: Option, } +#[derive(Serialize, Deserialize, Debug, Clone)] +struct ChunkSectionBlockStates { + // #[serde(with = "LongArray")] + data: Option, + palette: Vec, +} + #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "PascalCase")] struct ChunkNbt { data_version: i32, - status: ChunkStatus, - // #[serde(rename = "xPos")] - // x_pos: i32, + #[serde(rename = "xPos")] + x_pos: i32, // #[serde(rename = "yPos")] - // y_pos: i32, - // #[serde(rename = "zPos")] - // z_pos: i32, + //y_pos: i32, + #[serde(rename = "zPos")] + z_pos: i32, + status: ChunkStatus, #[serde(rename = "sections")] sections: Vec, heightmaps: ChunkHeightmaps, @@ -262,7 +262,10 @@ pub struct ChunkStatusWrapper { } impl ChunkData { - pub fn from_bytes(chunk_data: &[u8], at: Vector2) -> Result { + pub fn from_bytes( + chunk_data: &[u8], + position: Vector2, + ) -> Result { if fastnbt::from_bytes::(chunk_data) .map_err(|_| ChunkParsingError::FailedReadStatus)? .status @@ -274,6 +277,17 @@ impl ChunkData { let chunk_data = fastnbt::from_bytes::(chunk_data) .map_err(|e| ChunkParsingError::ErrorDeserializingChunk(e.to_string()))?; + if chunk_data.x_pos != position.x || chunk_data.z_pos != position.z { + log::error!( + "Expected chunk at {}:{}, but got {}:{}", + position.x, + position.z, + chunk_data.x_pos, + chunk_data.z_pos + ); + // lets still continue + } + // this needs to be boxed, otherwise it will cause a stack-overflow let mut blocks = ChunkBlocks::empty_with_heightmap(chunk_data.heightmaps); let mut block_index = 0; // which block we're currently at @@ -305,9 +319,10 @@ impl ChunkData { }; // How many bits each block has in one of the palette u64s - let block_bit_size = { - let size = 64 - (palette.len() as i64 - 1).leading_zeros(); - max(4, size) + let block_bit_size = if palette.len() < 16 { + 4 + } else { + ceil_log2(palette.len() as u32).max(4) }; // How many blocks there are in one of the palettes u64s let blocks_in_palette = 64 / block_bit_size; @@ -341,10 +356,7 @@ impl ChunkData { } } - Ok(ChunkData { - blocks, - position: at, - }) + Ok(ChunkData { blocks, position }) } } From 627db9461807d971de589fe84614fddafe9c94f2 Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Mon, 30 Dec 2024 11:55:48 +0100 Subject: [PATCH 5/6] cargo: fmt --- pumpkin/src/entity/player.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/pumpkin/src/entity/player.rs b/pumpkin/src/entity/player.rs index 8c61772c..d293df0b 100644 --- a/pumpkin/src/entity/player.rs +++ b/pumpkin/src/entity/player.rs @@ -245,7 +245,6 @@ impl Player { // Remove left over entries from all possiblily loaded chunks level.clean_memory(&radial_chunks); - log::debug!( "Removed player id {} ({}) ({} chunks remain cached)", self.gameprofile.name, From fef17de54f9bae12d305d0b73478c60d33ed7cf2 Mon Sep 17 00:00:00 2001 From: Alexander Medvedev Date: Tue, 31 Dec 2024 12:55:00 +0100 Subject: [PATCH 6/6] maybe improve something, maybe made something worse --- pumpkin-world/src/chunk/anvil.rs | 52 +++++++++++++++++--------------- pumpkin-world/src/chunk/mod.rs | 2 +- 2 files changed, 28 insertions(+), 26 deletions(-) diff --git a/pumpkin-world/src/chunk/anvil.rs b/pumpkin-world/src/chunk/anvil.rs index 6d1f943e..5b151783 100644 --- a/pumpkin-world/src/chunk/anvil.rs +++ b/pumpkin-world/src/chunk/anvil.rs @@ -233,15 +233,19 @@ impl ChunkWriter for AnvilChunkFormat { ) .map_err(|err| ChunkWritingError::IoError(err.kind()))?; - let bytes = self + let raw_bytes = self .to_bytes(chunk_data) .map_err(|err| ChunkWritingError::ChunkSerializingError(err.to_string()))?; + let mut buf = Vec::with_capacity(4096); // TODO: config let compression = Compression::ZLib; - let bytes = compression - // TODO: config - .compress_data(&bytes, 6) - .map_err(ChunkWritingError::Compression)?; + buf.put_u8(compression as u8); + buf.put_slice( + &compression + // TODO: config + .compress_data(&raw_bytes, 6) + .map_err(ChunkWritingError::Compression)?, + ); let mut location_table: [u8; 4096] = [0; 4096]; let mut timestamp_table: [u8; 4096] = [0; 4096]; @@ -359,7 +363,7 @@ impl ChunkWriter for AnvilChunkFormat { } let location_bytes = &(end_index as u32 / 4096).to_be_bytes()[1..4]; - let size_bytes = [(bytes.len().div_ceil(4096)) as u8]; + let size_bytes = [(buf.len().div_ceil(4096)) as u8]; location_table[table_entry as usize..table_entry as usize + 4] .as_mut() .copy_from_slice(&[location_bytes, &size_bytes].concat()); @@ -374,23 +378,20 @@ impl ChunkWriter for AnvilChunkFormat { region_file.seek(SeekFrom::Start(end_index)).unwrap(); // TODO - let mut header: BytesMut = BytesMut::with_capacity(5); - // total chunk size includes the byte representing the compression - // scheme, so +1. - header.put_u32(bytes.len() as u32 + 1); - // compression scheme - header.put_u8(compression as u8); + let mut header: BytesMut = BytesMut::new(); + // length + header.put_u32(buf.len() as u32); region_file .write_all(&header) .expect("Failed to write header"); - region_file.write_all(&bytes).unwrap_or_else(|_| { + region_file.write_all(&buf).unwrap_or_else(|_| { panic!( "Region file r.-{},{}.mca got corrupted, sorry", region.0, region.1 ) }); - let sector_count = (bytes.len() + 4).div_ceil(4096) * 4096; + let sector_count = (buf.len() + 4).div_ceil(4096) * 4096; // padding region_file @@ -407,18 +408,19 @@ impl AnvilChunkFormat { for (i, blocks) in chunk_data.blocks.blocks.chunks(16 * 16 * 16).enumerate() { // get unique blocks + let palette = HashMap::::from_iter(blocks.iter().map(|v| { + ( + *v, + BLOCK_ID_TO_REGISTRY_ID + .get(v) + .expect("Tried saving a block which does not exist."), + ) + })); let palette = HashMap::::from_iter( - blocks.iter().enumerate().map(|(i, v)| { - ( - *v, - ( - BLOCK_ID_TO_REGISTRY_ID - .get(v) - .expect("Tried saving a block which does not exist."), - i, - ), - ) - }), + palette + .into_iter() + .enumerate() + .map(|(index, (block_id, registry_str))| (block_id, (registry_str, index))), ); let block_bit_size = if palette.len() < 16 { diff --git a/pumpkin-world/src/chunk/mod.rs b/pumpkin-world/src/chunk/mod.rs index ff721c2d..9c555b39 100644 --- a/pumpkin-world/src/chunk/mod.rs +++ b/pumpkin-world/src/chunk/mod.rs @@ -91,7 +91,7 @@ pub struct ChunkBlocks { #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "PascalCase")] struct PaletteEntry { - // bloc name + // block name name: String, properties: Option>, }