Skip to content

Commit

Permalink
now we got sinkholes
Browse files Browse the repository at this point in the history
  • Loading branch information
Alvsch committed Jan 5, 2025
1 parent 25fc75f commit 714ff0c
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 55 deletions.
1 change: 1 addition & 0 deletions pumpkin-world/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ lz4 = "1.28.0"

file-guard = "0.2.0"
fs2 = "0.4.3"
indexmap = "2.7.0"

enum_dispatch = "0.3.13"

Expand Down
83 changes: 29 additions & 54 deletions pumpkin-world/src/chunk/anvil.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
use std::{
collections::HashMap,
collections::HashSet,
fs::OpenOptions,
io::{Read, Seek, SeekFrom, Write}, os::linux::raw,
io::{Read, Seek, SeekFrom, Write},
};

use bytes::*;
use fastnbt::LongArray;
use flate2::bufread::{GzDecoder, GzEncoder, ZlibDecoder, ZlibEncoder};
use fs2::FileExt;
use indexmap::IndexMap;
use pumpkin_core::math::ceil_log2;

use crate::{
Expand Down Expand Up @@ -139,8 +140,6 @@ impl ChunkReader for AnvilChunkFormat {
) -> Result<super::ChunkData, ChunkReadingError> {
let region = (at.x >> 5, at.z >> 5);

let log = at.x == 1 && at.z == 0;

let mut region_file = OpenOptions::new()
.read(true)
.open(
Expand Down Expand Up @@ -182,50 +181,24 @@ impl ChunkReader for AnvilChunkFormat {

// Read the file using the offset and size
let mut file_buf = {
let v = region_file
region_file
.seek(std::io::SeekFrom::Start(offset_at))
.map_err(|_| ChunkReadingError::RegionIsInvalid)?;
if log {
dbg!(v);
dbg!(&region_file.stream_position());
}
let mut out = vec![0; size_at];
region_file
.read_exact(&mut out)
.map_err(|_| ChunkReadingError::RegionIsInvalid)?;
out
};

if log {
println!("{:?}", file_buf.to_vec());
}
let mut header: Bytes = file_buf.drain(0..5).collect();
if log {
dbg!(header.to_vec());
}

if header.remaining() != 5 {
return Err(ChunkReadingError::InvalidHeader);
}

let size = header.get_u32();
let compression = header.get_u8();

if log {
dbg!(
"[...",
compression,
offset_at,
size,
size_at,
table_entry,
chunk_x,
chunk_z,
file_buf.len() + 5,
"..]"
);
}

let compression = Compression::from_byte(compression)
.map_err(|_| ChunkReadingError::Compression(CompressionError::UnknownCompression))?;

Expand Down Expand Up @@ -254,7 +227,6 @@ impl ChunkWriter for AnvilChunkFormat {
// return Ok(()); // REMOVE
// TODO: update timestamp
let region = (at.x >> 5, at.z >> 5);
let _log = region == (0, 0);

let mut region_file = OpenOptions::new()
.read(true)
Expand Down Expand Up @@ -324,11 +296,13 @@ impl ChunkWriter for AnvilChunkFormat {
u32::from_be_bytes([0, chunk_location[0], chunk_location[1], chunk_location[2]]) as u64
* 4096;
let sector_size = chunk_location[3] as usize * 4096;
if length as usize > sector_size {
panic!("AAAAAAAAAAAAAHHHHHHHHHHHHH!!");
}

// TODO: move shit

assert!(
length as usize >= sector_size,
"AAAAAAAAAAAAAHHHHHHHHHHHHH!!"
);

// Write new location and timestamp table
region_file.seek(SeekFrom::Start(0)).unwrap();
region_file
Expand All @@ -337,7 +311,7 @@ impl ChunkWriter for AnvilChunkFormat {

// Seek to where the chunk is located
region_file.seek(SeekFrom::Start(location_offset)).unwrap();

// Write header and payload
region_file
.write_all(&chunk_payload)
Expand All @@ -361,25 +335,26 @@ impl AnvilChunkFormat {
pub fn to_bytes(&self, chunk_data: &ChunkData) -> Result<Vec<u8>, ChunkSerializingError> {
let mut sections = Vec::new();

for (i, blocks) in chunk_data.blocks.blocks.chunks(16 * 16 * 16).enumerate() {
for (i, blocks) in chunk_data.blocks.iter_subchunks().enumerate() {
// get unique blocks
let mut palette = HashMap::new();
for block in blocks {
let len = palette.len();
palette.entry(*block).or_insert_with(|| {
let registry_str = BLOCK_ID_TO_REGISTRY_ID
.get(block)
.expect("Tried saving a block which does not exist.")
.as_str();
(registry_str, len)
});
}

let unique_blocks: HashSet<_> = blocks.iter().collect();

let palette: IndexMap<_, _> = unique_blocks
.into_iter()
.enumerate()
.map(|(i, block)| {
let name = BLOCK_ID_TO_REGISTRY_ID.get(block).unwrap().as_str();
(block, (name, i))
})
.collect();

// Determine the number of bits needed to represent the largest index in the palette
let block_bit_size = if palette.len() < 16 {
4
} else {
ceil_log2(palette.len() as u32).max(4)
};
// Calculate how many blocks can be packed into a single 64-bit integer
let _blocks_in_pack = 64 / block_bit_size;

let mut section_longs = Vec::new();
Expand All @@ -391,19 +366,21 @@ impl AnvilChunkFormat {
current_pack_long |= (index as i64) << bits_used_in_pack;
bits_used_in_pack += block_bit_size as u32;

// If the current 64-bit integer is full, push it to the section_longs and start a new one
if bits_used_in_pack >= 64 {
section_longs.push(current_pack_long);
current_pack_long = 0;
bits_used_in_pack = 0;
}
}

// Push the last 64-bit integer if it contains any data
if bits_used_in_pack > 0 {
section_longs.push(current_pack_long);
}

sections.push(ChunkSection {
y: i as i8,
y: i as i8 - 1,
block_states: Some(ChunkSectionBlockStates {
data: Some(LongArray::new(section_longs)),
palette: palette
Expand All @@ -426,9 +403,7 @@ impl AnvilChunkFormat {
sections,
};

let bytes = fastnbt::to_bytes(&nbt);

bytes.map_err(ChunkSerializingError::ErrorSerializingChunk)
fastnbt::to_bytes(&nbt).map_err(ChunkSerializingError::ErrorSerializingChunk)
}
}

Expand Down
2 changes: 1 addition & 1 deletion pumpkin-world/src/level.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ impl Level {
let chunk_reader = self.chunk_reader.clone();
let chunk_writer = self.chunk_writer.clone();
let level_folder = self.level_folder.clone();
let world_gen = self.world_gen.clone();
let _world_gen = self.world_gen.clone();
let chunk_pos = *at;

let chunk = loaded_chunks
Expand Down

0 comments on commit 714ff0c

Please sign in to comment.