Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
laurmaedje committed Mar 19, 2023
1 parent fe6e237 commit 6a7d440
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 55 deletions.
13 changes: 4 additions & 9 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
unstable_features = true

overflow_delimited_expr = true
spaces_around_ranges = true
use_field_init_shorthand = true
merge_derives = false

use_small_heuristics = "Max"
max_width = 90
struct_lit_width = 40
chain_width = 70
single_line_if_else_max_width = 60
struct_lit_width = 50
use_field_init_shorthand = true
merge_derives = false
22 changes: 11 additions & 11 deletions src/cff/dict.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl<'a> Dict<'a> {
&[Operand::Int(len), Operand::Int(offset)] if offset > 0 => {
let offset = usize::try_from(offset).ok()?;
let len = usize::try_from(len).ok()?;
Some(offset .. offset + len)
Some(offset..offset + len)
}
_ => None,
}
Expand All @@ -50,10 +50,10 @@ impl<'a> Dict<'a> {
}

pub fn set_range(&mut self, op: Op, range: &Range<usize>) {
self.set(op, vec![
Operand::Offset(range.end - range.start),
Operand::Offset(range.start),
]);
self.set(
op,
vec![Operand::Offset(range.end - range.start), Operand::Offset(range.start)],
);
}
}

Expand Down Expand Up @@ -91,8 +91,8 @@ impl<'a> Structure<'a> for Pair<'a> {
let mut operands = vec![];
loop {
match r.data().first().ok_or(Error::MissingData)? {
0 ..= 21 => break,
28 ..= 30 | 32 ..= 254 => operands.push(r.read::<Operand>()?),
0..=21 => break,
28..=30 | 32..=254 => operands.push(r.read::<Operand>()?),
_ => r.skip(1)?,
}
}
Expand Down Expand Up @@ -122,7 +122,7 @@ impl Structure<'_> for Op {
let b0 = r.read::<u8>()?;
match b0 {
12 => Ok(Self(b0, r.read::<u8>()?)),
0 ..= 21 => Ok(Self(b0, 0)),
0..=21 => Ok(Self(b0, 0)),
_ => panic!("cannot read operator here"),
}
}
Expand Down Expand Up @@ -159,12 +159,12 @@ impl<'a> Structure<'a> for Operand<'a> {
}
Self::Real(r.take(len)?)
}
32 ..= 246 => Self::Int(b0 - 139),
247 ..= 250 => {
32..=246 => Self::Int(b0 - 139),
247..=250 => {
let b1 = i32::from(r.read::<u8>()?);
Self::Int((b0 - 247) * 256 + b1 + 108)
}
251 ..= 254 => {
251..=254 => {
let b1 = i32::from(r.read::<u8>()?);
Self::Int(-(b0 - 251) * 256 - b1 - 108)
}
Expand Down
8 changes: 4 additions & 4 deletions src/cff/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,16 @@ where
let base = 3 + offsize * (count + 1) - 1;
let mut read_offset = || {
let mut bytes: [u8; 4] = [0; 4];
bytes[4 - offsize .. 4].copy_from_slice(r.take(offsize)?);
bytes[4 - offsize..4].copy_from_slice(r.take(offsize)?);
Ok(base + u32::from_be_bytes(bytes) as usize)
};

let mut objects = Vec::with_capacity(count);
let mut last = read_offset()?;
let mut skip = 0;
for _ in 0 .. count {
for _ in 0..count {
let offset = read_offset()?;
let slice = data.get(last .. offset).ok_or(Error::InvalidOffset)?;
let slice = data.get(last..offset).ok_or(Error::InvalidOffset)?;
objects.push(T::read_at(slice, 0)?);
skip += slice.len();
last = offset;
Expand Down Expand Up @@ -75,7 +75,7 @@ where
let offsize = offsize as usize;
for offset in offsets {
let bytes = u32::to_be_bytes(offset);
w.give(&bytes[4 - offsize .. 4]);
w.give(&bytes[4 - offsize..4]);
}

w.give(&buffer.finish());
Expand Down
18 changes: 9 additions & 9 deletions src/cff/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ pub(crate) fn subset(ctx: &mut Context) -> Result<()> {

// Write twice because we first need to find out the offsets of various data
// structures.
for _ in 0 .. 2 {
for _ in 0..2 {
let mut w = Writer::new();
insert_offsets(&mut table, &offsets);
write_cff_table(&mut w, &table, &mut offsets);
Expand All @@ -125,7 +125,7 @@ pub(crate) fn subset(ctx: &mut Context) -> Result<()> {

/// Subset the glyph descriptions.
fn subset_char_strings<'a>(ctx: &Context, strings: &mut Index<Opaque<'a>>) -> Result<()> {
for glyph in 0 .. ctx.num_glyphs {
for glyph in 0..ctx.num_glyphs {
if !ctx.subset.contains(&glyph) {
// The byte sequence [14] is the minimal valid charstring consisting
// of just a single `endchar` operator.
Expand Down Expand Up @@ -162,7 +162,7 @@ fn read_cff_table<'a>(ctx: &Context, cff: &'a [u8]) -> Result<Table<'a>> {
r.read::<u8>()?;
r.read::<u8>()?;
let header_size = r.read::<u8>()? as usize;
r = Reader::new(cff.get(header_size ..).ok_or(Error::InvalidOffset)?);
r = Reader::new(cff.get(header_size..).ok_or(Error::InvalidOffset)?);

// Read four indices at fixed positions.
let name = r.read::<Index<Opaque>>()?;
Expand All @@ -188,8 +188,8 @@ fn read_cff_table<'a>(ctx: &Context, cff: &'a [u8]) -> Result<Table<'a>> {

// Read the charset.
let mut charset = None;
if let Some(offset @ 1 ..) = top.get_offset(top::CHARSET) {
let sub = cff.get(offset ..).ok_or(Error::InvalidOffset)?;
if let Some(offset @ 1..) = top.get_offset(top::CHARSET) {
let sub = cff.get(offset..).ok_or(Error::InvalidOffset)?;
charset = Some(read_charset(sub, ctx.num_glyphs)?);
}

Expand Down Expand Up @@ -285,7 +285,7 @@ fn read_cid_data<'a>(
// Read FD Select data structure.
let select = {
let offset = top.get_offset(top::FD_SELECT).ok_or(Error::MissingData)?;
let sub = cff.get(offset ..).ok_or(Error::InvalidOffset)?;
let sub = cff.get(offset..).ok_or(Error::InvalidOffset)?;
read_fd_select(sub, ctx.num_glyphs)?
};

Expand Down Expand Up @@ -425,10 +425,10 @@ fn read_fd_select(data: &[u8], num_glyphs: u16) -> Result<FdSelect<'_>> {
let count = r.read::<u16>()?;
let mut fds = vec![];
let mut first = r.read::<u16>()?;
for _ in 0 .. count {
for _ in 0..count {
let fd = r.read::<u8>()?;
let end = r.read::<u16>()?;
for _ in first .. end {
for _ in first..end {
fds.push(fd);
}
first = end;
Expand Down Expand Up @@ -468,7 +468,7 @@ fn create_cid_offsets(cid: &CidData) -> CidOffsets {
/// Create initial zero offsets for a Private DICT.
fn create_private_offsets(private: &PrivateData) -> PrivateOffsets {
PrivateOffsets {
dict: 0 .. 0,
dict: 0..0,
subrs: private.subrs.as_ref().map(|_| 0),
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/glyf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ impl<'a> Table<'a> {

let from = read_offset(id as usize)?;
let to = read_offset(id as usize + 1)?;
self.glyf.get(from .. to).ok_or(Error::InvalidOffset)
self.glyf.get(from..to).ok_or(Error::InvalidOffset)
}
}

Expand Down Expand Up @@ -130,7 +130,7 @@ pub(crate) fn subset(ctx: &mut Context) -> Result<()> {
}
};

for id in 0 .. ctx.num_glyphs {
for id in 0..ctx.num_glyphs {
// If the glyph shouldn't be contained in the subset, it will
// still get a loca entry, but the glyf data is simply empty.
write_offset(sub_glyf.len());
Expand Down
2 changes: 1 addition & 1 deletion src/head.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use super::*;
/// Updates the loca format.
pub(crate) fn subset(ctx: &mut Context) -> Result<()> {
let mut head = ctx.expect_table(Tag::HEAD)?.to_vec();
let index_to_loc = head.get_mut(50 .. 52).ok_or(Error::InvalidOffset)?;
let index_to_loc = head.get_mut(50..52).ok_or(Error::InvalidOffset)?;
index_to_loc[0] = 0;
index_to_loc[1] = ctx.long_loca as u8;
ctx.push(Tag::HEAD, head);
Expand Down
8 changes: 4 additions & 4 deletions src/hmtx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@ pub(crate) fn subset(ctx: &mut Context) -> Result<()> {
let mut hmtx = ctx.expect_table(Tag::HMTX)?.to_vec();

let mut offset = 0;
for i in 0 .. num_h_metrics {
for i in 0..num_h_metrics {
if !ctx.subset.contains(&i) {
hmtx.get_mut(offset .. offset + 4).ok_or(Error::MissingData)?.fill(0);
hmtx.get_mut(offset..offset + 4).ok_or(Error::MissingData)?.fill(0);
}
offset += 4;
}

for i in num_h_metrics .. ctx.num_glyphs {
for i in num_h_metrics..ctx.num_glyphs {
if !ctx.subset.contains(&i) {
hmtx.get_mut(offset .. offset + 2).ok_or(Error::MissingData)?.fill(0);
hmtx.get_mut(offset..offset + 2).ok_or(Error::MissingData)?.fill(0);
}
offset += 2;
}
Expand Down
17 changes: 7 additions & 10 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ fn parse(data: &[u8], index: u32) -> Result<Face<'_>> {
// Parse font collection header if necessary.
if kind == FontKind::Collection {
let offset = u32::read_at(data, 12 + 4 * (index as usize))?;
let subdata = data.get(offset as usize ..).ok_or(Error::InvalidOffset)?;
let subdata = data.get(offset as usize..).ok_or(Error::InvalidOffset)?;
r = Reader::new(subdata);
kind = r.read::<FontKind>()?;
if kind == FontKind::Collection {
Expand All @@ -163,7 +163,7 @@ fn parse(data: &[u8], index: u32) -> Result<Face<'_>> {

// Read table records.
let mut records = vec![];
for _ in 0 .. count {
for _ in 0..count {
records.push(r.read::<TableRecord>()?);
}

Expand Down Expand Up @@ -198,7 +198,7 @@ fn construct(mut ctx: Context) -> Vec<u8> {
for (tag, data) in &mut ctx.tables {
if *tag == Tag::HEAD {
// Zero out checksum field in head table.
data.to_mut()[8 .. 12].fill(0);
data.to_mut()[8..12].fill(0);
checksum_adjustment_offset = Some(offset + 8);
}

Expand Down Expand Up @@ -232,7 +232,7 @@ fn construct(mut ctx: Context) -> Vec<u8> {
if let Some(i) = checksum_adjustment_offset {
let sum = checksum(&data);
let val = 0xB1B0AFBA_u32.wrapping_sub(sum);
data[i .. i + 4].copy_from_slice(&val.to_be_bytes());
data[i..i + 4].copy_from_slice(&val.to_be_bytes());
}

data
Expand All @@ -245,7 +245,7 @@ fn checksum(data: &[u8]) -> u32 {
let mut sum = 0u32;
for chunk in data.chunks(4) {
let mut bytes = [0; 4];
bytes[.. chunk.len()].copy_from_slice(chunk);
bytes[..chunk.len()].copy_from_slice(chunk);
sum = sum.wrapping_add(u32::from_be_bytes(bytes));
}
sum
Expand Down Expand Up @@ -319,7 +319,7 @@ impl<'a> Face<'a> {
let record = self.records.get(i)?;
let start = record.offset as usize;
let end = start + (record.length as usize);
self.data.get(start .. end)
self.data.get(start..end)
}
}

Expand Down Expand Up @@ -575,10 +575,7 @@ mod tests {
assert_eq!(sink1, sink2);
assert_eq!(ttf.glyph_hor_advance(id), ttfs.glyph_hor_advance(id));
assert_eq!(ttf.glyph_name(id), ttfs.glyph_name(id));
assert_eq!(
ttf.glyph_hor_side_bearing(id),
ttfs.glyph_hor_side_bearing(id)
);
assert_eq!(ttf.glyph_hor_side_bearing(id), ttfs.glyph_hor_side_bearing(id));
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub(crate) fn subset(ctx: &mut Context) -> Result<()> {
// Read glyph name table.
let num_glyphs = r.read::<u16>()?;
let mut indices = vec![];
for _ in 0 .. num_glyphs {
for _ in 0..num_glyphs {
indices.push(r.read::<u16>()?);
}

Expand Down
8 changes: 4 additions & 4 deletions src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ impl<'a> Reader<'a> {
/// Take the first `n` bytes from the stream.
pub fn take(&mut self, n: usize) -> Result<&'a [u8]> {
if n <= self.0.len() {
let head = &self.0[.. n];
self.0 = &self.0[n ..];
let head = &self.0[..n];
self.0 = &self.0[n..];
Ok(head)
} else {
Err(Error::MissingData)
Expand All @@ -38,7 +38,7 @@ impl<'a> Reader<'a> {
/// Skip the first `n` bytes from the stream.
pub fn skip(&mut self, n: usize) -> Result<()> {
if n <= self.0.len() {
self.0 = &self.0[n ..];
self.0 = &self.0[n..];
Ok(())
} else {
Err(Error::MissingData)
Expand Down Expand Up @@ -111,7 +111,7 @@ pub trait Structure<'a>: Sized {

/// Read self at the given offset in the binary data.
fn read_at(data: &'a [u8], offset: usize) -> Result<Self> {
if let Some(sub) = data.get(offset ..) {
if let Some(sub) = data.get(offset..) {
Self::read(&mut Reader::new(sub))
} else {
Err(Error::InvalidOffset)
Expand Down

0 comments on commit 6a7d440

Please sign in to comment.