Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

converting usize to u64 #1

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
target/
**/*.rs.bk
Cargo.lock
.idea
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
[package]
name = "serde_sqlite_jsonb"
name = "fifthtry-serde_sqlite_jsonb"
version = "0.1.0"
authors = ["Ophir Lojkine"]
categories = ["encoding"]
description = "A Serializer and Deserializer for SQLite jsonb binary format"
description = "A Serializer and Deserializer for SQLite jsonb binary format (FifthTry fork, till PR is merged)"
edition = "2021"
keywords = ["serde"]
license = "MIT OR Apache-2.0"
Expand Down
3 changes: 3 additions & 0 deletions rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[toolchain]
channel = "1.77.2"

21 changes: 11 additions & 10 deletions src/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
use crate::error::{Error, Result};
use crate::header::{ElementType, Header};
use serde::de::{self, Deserialize, IntoDeserializer, SeqAccess, Visitor};
use std::convert::Infallible;
use std::io::Read;

/// A structure that deserializes SQLite JSONB data into Rust values.
Expand Down Expand Up @@ -94,13 +95,13 @@ impl<R: Read> Deserializer<R> {
15 => 8,
n => unreachable!("{n} does not fit in four bits"),
};
let payload_size: usize = if bytes_to_read == 0 {
usize::from(upper_four_bits)
let payload_size: u64 = if bytes_to_read == 0 {
u64::from(upper_four_bits)
} else {
let mut buf = [0u8; 8];
let start = 8 - bytes_to_read;
self.reader.read_exact(&mut buf[start..8])?;
usize::from_be_bytes(buf)
u64::from_be_bytes(buf)
};
Ok(Header {
element_type: ElementType::from(first_byte),
Expand All @@ -109,19 +110,19 @@ impl<R: Read> Deserializer<R> {
}

fn read_payload_string(&mut self, header: Header) -> Result<String> {
let mut str = String::with_capacity(header.payload_size);
let mut str = String::with_capacity(header.payload_size as usize);
let read = self.reader_with_limit(header)?.read_to_string(&mut str)?;
assert_eq!(read, header.payload_size);
assert_eq!(read, header.payload_size as usize);
Ok(str)
}

fn drop_payload(&mut self, header: Header) -> Result<ElementType> {
let mut remaining = header.payload_size;
while remaining > 0 {
let mut buf = [0u8; 256];
let len = buf.len().min(remaining);
let len = buf.len().min(remaining as usize);
self.reader.read_exact(&mut buf[..len])?;
remaining -= len;
remaining -= len as u64;
}
Ok(header.element_type)
}
Expand All @@ -145,7 +146,7 @@ impl<R: Read> Deserializer<R> {

fn reader_with_limit(&mut self, header: Header) -> Result<impl Read + '_> {
let limit =
u64::try_from(header.payload_size).map_err(usize_conversion)?;
u64::try_from(header.payload_size).map_err(u64_conversion)?;
Ok((&mut self.reader).take(limit))
}

Expand All @@ -156,7 +157,7 @@ impl<R: Read> Deserializer<R> {
if header.payload_size <= 8 {
// micro-optimization: read small payloads into a stack buffer
let mut buf = [0u8; 8];
let smallbuf = &mut buf[..header.payload_size];
let smallbuf = &mut buf[..header.payload_size as usize];
self.reader.read_exact(smallbuf)?;
Ok(crate::json::parse_json_slice(smallbuf)?)
} else {
Expand Down Expand Up @@ -284,7 +285,7 @@ fn read_with_quotes(r: impl Read) -> impl Read {
b"\"".chain(r).chain(&b"\""[..])
}

fn usize_conversion(e: std::num::TryFromIntError) -> Error {
fn u64_conversion(e: Infallible) -> Error {
Error::Io(std::io::Error::new(std::io::ErrorKind::InvalidData, e))
}

Expand Down
2 changes: 1 addition & 1 deletion src/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub enum ElementType {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Header {
pub element_type: ElementType,
pub payload_size: usize,
pub payload_size: u64,
}

impl Header {
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ mod header;
mod json;
mod ser;

extern crate self as serde_sqlite_jsonb;

pub use crate::de::{from_reader, from_slice, Deserializer};
pub use crate::error::{Error, Result};
pub use crate::ser::{to_vec, Serializer};
21 changes: 11 additions & 10 deletions src/ser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,23 +23,24 @@ where
/// Helper struct to write JSONB data, then finalize the header to its minimal size
pub struct JsonbWriter<'a> {
buffer: &'a mut Vec<u8>,
header_start: usize,
header_start: u64,
}

impl<'a> JsonbWriter<'a> {
fn new(buffer: &'a mut Vec<u8>, element_type: ElementType) -> Self {
let header_start = buffer.len();
let header_start = buffer.len() as u64;
buffer.extend_from_slice(&[u8::from(element_type); 9]);
Self {
buffer,
header_start,
}
}
fn finalize(self) {
let data_start = self.header_start + 9;
let data_start = self.header_start as usize + 9;
let data_end = self.buffer.len();
let payload_size = data_end - data_start;
let header = &mut self.buffer[self.header_start..self.header_start + 9];
let header = &mut self.buffer
[(self.header_start as usize)..(self.header_start as usize) + 9];
let head_len = if payload_size <= 11 {
header[0] |= (payload_size as u8) << 4;
1
Expand All @@ -63,10 +64,10 @@ impl<'a> JsonbWriter<'a> {
if head_len < 9 {
self.buffer.copy_within(
data_start..data_end,
self.header_start + head_len,
self.header_start as usize + head_len,
);
self.buffer
.truncate(self.header_start + head_len + payload_size);
.truncate(self.header_start as usize + head_len + payload_size);
}
}
}
Expand Down Expand Up @@ -341,7 +342,7 @@ impl<'a> ser::SerializeTupleStruct for JsonbWriter<'a> {
/// MyEnum::Variant { field1: 1, field2: 2 } -> {"Variant": {"field1": 1, "field2": 2}}
/// We need to keep track of two jsonb headers, one for the inner array or map, and one for the object.
pub struct EnumVariantSerializer<'a> {
map_header_start: usize,
map_header_start: u64,
inner_jsonb_writer: JsonbWriter<'a>,
}

Expand Down Expand Up @@ -480,8 +481,8 @@ mod tests {
assert_eq!(to_vec(&"hello").unwrap(), b"\x5ahello");
}

fn assert_long_str(repeats: usize, expected_header: &[u8]) {
let long_str = "x".repeat(repeats);
fn assert_long_str(repeats: u64, expected_header: &[u8]) {
let long_str = "x".repeat(repeats as usize);
assert_eq!(
to_vec(&long_str).unwrap(),
[&expected_header[..], &long_str.as_bytes()].concat()
Expand Down Expand Up @@ -533,7 +534,7 @@ mod tests {
#[derive(serde_derive::Serialize)]
struct TestStruct {
smol: char,
long_long_long_long: usize,
long_long_long_long: u64,
}
let test_struct = TestStruct {
smol: 'X',
Expand Down
2 changes: 2 additions & 0 deletions tests/test_real_sqlite.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
extern crate fifthtry_serde_sqlite_jsonb as serde_sqlite_jsonb;

use std::collections::HashMap;

use rusqlite::{Connection, DatabaseName};
Expand Down