Skip to content

Commit

Permalink
Add abstraction to support multiple world formats
Browse files Browse the repository at this point in the history
  • Loading branch information
Snowiiii committed Oct 14, 2024
1 parent 41e5355 commit df80ac2
Show file tree
Hide file tree
Showing 8 changed files with 219 additions and 198 deletions.
7 changes: 0 additions & 7 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ version = "0.1.0"
edition = "2021"

[profile.release]
debug = 1
lto = true
codegen-units = 1

Expand Down
1 change: 0 additions & 1 deletion pumpkin-world/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ futures = "0.3"
flate2 = "1.0"
serde.workspace = true
serde_json = "1.0"
static_assertions = "1.1.0"
log.workspace = true

parking_lot.workspace = true
Expand Down
16 changes: 12 additions & 4 deletions pumpkin-world/src/block/block_state.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::collections::HashMap;

use crate::level::WorldError;
use thiserror::Error;

use super::block_registry::{Block, BlockCategory, BLOCKS};

Expand All @@ -21,16 +21,16 @@ impl BlockState {
pub fn new(
registry_id: &str,
properties: Option<&HashMap<String, String>>,
) -> Result<Self, WorldError> {
) -> Result<Self, BlockStateError> {
let block_registry = BLOCKS
.get(registry_id)
.ok_or(WorldError::BlockIdentifierNotFound)?;
.ok_or(BlockStateError::BlockIdentifierNotFound)?;
let mut block_states = block_registry.states.iter();

let block_state = match properties {
Some(properties) => block_states
.find(|state| &state.properties == properties)
.ok_or_else(|| WorldError::BlockStateIdNotFound)?,
.ok_or(BlockStateError::BlockStateIdNotFound)?,
None => block_states
.find(|state| state.is_default)
.expect("Every Block should have at least 1 default state"),
Expand Down Expand Up @@ -71,3 +71,11 @@ impl BlockState {
self.category == category
}
}

#[derive(Error, Debug)]
pub enum BlockStateError {
#[error("The requested block identifier does not exist")]
BlockIdentifierNotFound,
#[error("The requested block state id does not exist")]
BlockStateIdNotFound,
}
144 changes: 144 additions & 0 deletions pumpkin-world/src/chunk/anvil.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
use std::{
fs::OpenOptions,
io::{Read, Seek},
};

use flate2::bufread::{GzDecoder, ZlibDecoder};
use itertools::Itertools;

use crate::level::SaveFile;

use super::{ChunkData, ChunkReader, ChunkReadingError, CompressionError};

pub struct AnvilChunkReader {}

impl Default for AnvilChunkReader {
fn default() -> Self {
Self::new()
}
}

impl AnvilChunkReader {
pub fn new() -> Self {
Self {}
}
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Compression {
Gzip,
Zlib,
None,
LZ4,
}

impl Compression {
pub fn from_byte(byte: u8) -> Option<Self> {
match byte {
1 => Some(Self::Gzip),
2 => Some(Self::Zlib),
3 => Some(Self::None),
4 => Some(Self::LZ4),
_ => None,
}
}

fn decompress_data(&self, compressed_data: Vec<u8>) -> Result<Vec<u8>, CompressionError> {
match self {
Compression::Gzip => {
let mut z = GzDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::with_capacity(compressed_data.len());
z.read_to_end(&mut chunk_data)
.map_err(CompressionError::GZipError)?;
Ok(chunk_data)
}
Compression::Zlib => {
let mut z = ZlibDecoder::new(&compressed_data[..]);
let mut chunk_data = Vec::with_capacity(compressed_data.len());
z.read_to_end(&mut chunk_data)
.map_err(CompressionError::ZlibError)?;
Ok(chunk_data)
}
Compression::None => Ok(compressed_data),
Compression::LZ4 => todo!(),
}
}
}

impl ChunkReader for AnvilChunkReader {
fn read_chunk(
&self,
save_file: &SaveFile,
at: pumpkin_core::math::vector2::Vector2<i32>,
) -> Result<super::ChunkData, ChunkReadingError> {
let region = (
((at.x as f32) / 32.0).floor() as i32,
((at.z as f32) / 32.0).floor() as i32,
);

let mut region_file = OpenOptions::new()
.read(true)
.open(
save_file
.region_folder
.join(format!("r.{}.{}.mca", region.0, region.1)),
)
.map_err(|err| match err.kind() {
std::io::ErrorKind::NotFound => ChunkReadingError::ChunkNotExist,
kind => ChunkReadingError::IoError(kind),
})?;

let mut location_table: [u8; 4096] = [0; 4096];
let mut timestamp_table: [u8; 4096] = [0; 4096];

// fill the location and timestamp tables
region_file
.read_exact(&mut location_table)
.map_err(|err| ChunkReadingError::IoError(err.kind()))?;
region_file
.read_exact(&mut timestamp_table)
.map_err(|err| ChunkReadingError::IoError(err.kind()))?;

let modulus = |a: i32, b: i32| ((a % b) + b) % b;
let chunk_x = modulus(at.x, 32) as u32;
let chunk_z = modulus(at.z, 32) as u32;
let table_entry = (chunk_x + chunk_z * 32) * 4;

let mut offset = vec![0u8];
offset.extend_from_slice(&location_table[table_entry as usize..table_entry as usize + 3]);
let offset = u32::from_be_bytes(offset.try_into().unwrap()) as u64 * 4096;
let size = location_table[table_entry as usize + 3] as usize * 4096;

if offset == 0 && size == 0 {
return Err(ChunkReadingError::ChunkNotExist);
}

// Read the file using the offset and size
let mut file_buf = {
region_file
.seek(std::io::SeekFrom::Start(offset))
.map_err(|_| ChunkReadingError::RegionIsInvalid)?;
let mut out = vec![0; size];
region_file
.read_exact(&mut out)
.map_err(|_| ChunkReadingError::RegionIsInvalid)?;
out
};

// TODO: check checksum to make sure chunk is not corrupted
let header = file_buf.drain(0..5).collect_vec();

let compression = Compression::from_byte(header[4])
.ok_or_else(|| ChunkReadingError::Compression(CompressionError::UnknownCompression))?;

let size = u32::from_be_bytes(header[..4].try_into().unwrap());

// size includes the compression scheme byte, so we need to subtract 1
let chunk_data = file_buf.drain(0..size as usize - 1).collect_vec();
let decompressed_chunk = compression
.decompress_data(chunk_data)
.map_err(ChunkReadingError::Compression)?;

ChunkData::from_bytes(decompressed_chunk, at).map_err(ChunkReadingError::ParsingError)
}
}
60 changes: 52 additions & 8 deletions pumpkin-world/src/chunk.rs → pumpkin-world/src/chunk/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,53 @@ use std::ops::Index;
use fastnbt::LongArray;
use pumpkin_core::math::vector2::Vector2;
use serde::{Deserialize, Serialize};
use thiserror::Error;

use crate::{
block::{BlockId, BlockState},
block::{block_state::BlockStateError, BlockId, BlockState},
coordinates::{ChunkRelativeBlockCoordinates, Height},
level::{ChunkNotGeneratedError, WorldError},
level::SaveFile,
WORLD_HEIGHT,
};

pub mod anvil;

const CHUNK_AREA: usize = 16 * 16;
const SUBCHUNK_VOLUME: usize = CHUNK_AREA * 16;
const CHUNK_VOLUME: usize = CHUNK_AREA * WORLD_HEIGHT;

pub trait ChunkReader: Sync + Send {
fn read_chunk(
&self,
save_file: &SaveFile,
at: Vector2<i32>,
) -> Result<ChunkData, ChunkReadingError>;
}

#[derive(Error, Debug)]
pub enum ChunkReadingError {
#[error("Io error: {0}")]
IoError(std::io::ErrorKind),
#[error("Region is invalid")]
RegionIsInvalid,
#[error("Compression error {0}")]
Compression(CompressionError),
#[error("Tried to read chunk which does not exist")]
ChunkNotExist,
#[error("Failed to parse Chunk from bytes: {0}")]
ParsingError(ChunkParsingError),
}

#[derive(Error, Debug)]
pub enum CompressionError {
#[error("Compression scheme not recognised")]
UnknownCompression,
#[error("Error while working with zlib compression: {0}")]
ZlibError(std::io::Error),
#[error("Error while working with Gzip compression: {0}")]
GZipError(std::io::Error),
}

pub struct ChunkData {
pub blocks: ChunkBlocks,
pub position: Vector2<i32>,
Expand Down Expand Up @@ -188,18 +223,16 @@ impl Index<ChunkRelativeBlockCoordinates> for ChunkBlocks {
}

impl ChunkData {
pub fn from_bytes(chunk_data: Vec<u8>, at: Vector2<i32>) -> Result<Self, WorldError> {
pub fn from_bytes(chunk_data: Vec<u8>, at: Vector2<i32>) -> Result<Self, ChunkParsingError> {
if fastnbt::from_bytes::<ChunkStatus>(&chunk_data).expect("Failed reading chunk status.")
!= ChunkStatus::Full
{
return Err(WorldError::ChunkNotGenerated(
ChunkNotGeneratedError::IncompleteGeneration,
));
return Err(ChunkParsingError::ChunkNotGenerated);
}

let chunk_data = match fastnbt::from_bytes::<ChunkNbt>(chunk_data.as_slice()) {
Ok(v) => v,
Err(err) => return Err(WorldError::ErrorDeserializingChunk(err.to_string())),
Err(err) => return Err(ChunkParsingError::ErrorDeserializingChunk(err.to_string())),
};

// this needs to be boxed, otherwise it will cause a stack-overflow
Expand All @@ -221,7 +254,8 @@ impl ChunkData {
Ok(state) => Ok(state.into()),
},
)
.collect::<Result<Vec<_>, _>>()?;
.collect::<Result<Vec<_>, _>>()
.map_err(ChunkParsingError::BlockStateError)?;

let block_data = match block_states.data {
None => {
Expand Down Expand Up @@ -277,3 +311,13 @@ impl ChunkData {
})
}
}

#[derive(Error, Debug)]
pub enum ChunkParsingError {
#[error("BlockState error: {0}")]
BlockStateError(BlockStateError),
#[error("The chunk isn't generated yet")]
ChunkNotGenerated,
#[error("Error deserializing chunk: {0}")]
ErrorDeserializingChunk(String),
}
Loading

0 comments on commit df80ac2

Please sign in to comment.