Skip to content

Commit

Permalink
Fix: Server panics when loading certain chunks
Browse files Browse the repository at this point in the history
Cause:
The size that is given in the file for the length of the chunk includes the compression byte, thus when reading a chunk that is at the end of the file, it would read 1 byte more that should be possible, causing a panic.
  • Loading branch information
lukas0008 committed Aug 13, 2024
1 parent 59749af commit 85785c9
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions pumpkin-world/src/world.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,10 @@ impl Level {
.into_iter()
.map(|(region, chunk_vec)| {
let mut path = self.root_folder.clone();
let chunk_vec = chunk_vec.collect_vec();
path.push("region");
path.push(format!("r.{}.{}.mca", region.0, region.1));
self.read_region_chunks(path, chunk_vec.collect_vec())
self.read_region_chunks(path, chunk_vec)
}),
)
.await
Expand All @@ -93,12 +94,12 @@ impl Level {
}
async fn read_region_chunks(
&self,
region_file: PathBuf,
region_file_path: PathBuf,
chunks: Vec<(i32, i32)>,
) -> Vec<((i32, i32), Result<ChunkData, WorldError>)> {
// dbg!(at);
// return different error when file is not found (because that means that the chunks have just not been generated yet)
let mut region_file = match File::open(region_file).await {
let mut region_file = match File::open(&region_file_path).await {
Ok(f) => f,
Err(err) => match err.kind() {
std::io::ErrorKind::NotFound => {
Expand Down Expand Up @@ -190,12 +191,13 @@ impl Level {

match compression {
Compression::Zlib => {}
_ => panic!(), // TODO: support other compression types
_ => panic!("Compression type is not supported"), // TODO: support other compression types
}

let size = u32::from_be_bytes(header[0..4].try_into().unwrap());

let chunk_data = file_buf.drain(0..size as usize).collect_vec();

// size includes the compression scheme byte, so we need to subtract 1
let chunk_data = file_buf.drain(0..size as usize - 1).collect_vec();

((old_chunk_x, old_chunk_z), Ok(chunk_data))
}
Expand Down

0 comments on commit 85785c9

Please sign in to comment.