Skip to content

Commit

Permalink
fix derive, par iter and rename from meta
Browse files Browse the repository at this point in the history
  • Loading branch information
stelzo committed May 15, 2024
1 parent 73e05f5 commit 56c29ac
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 56 deletions.
64 changes: 31 additions & 33 deletions src/iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ where
iteration: usize,
iteration_back: usize,
data: ByteBufferView<N>,
phantom_c: std::marker::PhantomData<C>, // internally used for meta names array
phantom_c: std::marker::PhantomData<C>, // internally used for pdata names array
}

#[cfg(feature = "rayon")]
Expand Down Expand Up @@ -133,7 +133,6 @@ where
}
}

/// Implementation of the iterator trait.
impl<const N: usize, C> Iterator for PointCloudIterator<N, C>
where
C: PointConvertible<N>,
Expand Down Expand Up @@ -162,7 +161,7 @@ struct ByteBufferView<const N: usize> {
end_point_idx: usize,
point_step_size: usize,
offsets: [usize; N],
meta: Vec<(String, FieldDatatype)>,
pdata: Vec<(String, FieldDatatype)>,
endian: Endian,
}

Expand All @@ -173,7 +172,7 @@ impl<const N: usize> ByteBufferView<N> {
start_point_idx: usize,
end_point_idx: usize,
offsets: [usize; N],
meta: Vec<(String, FieldDatatype)>,
pdata: Vec<(String, FieldDatatype)>,
endian: Endian,
) -> Self {
Self {
Expand All @@ -182,7 +181,7 @@ impl<const N: usize> ByteBufferView<N> {
end_point_idx,
point_step_size,
offsets,
meta,
pdata,
endian,
}
}
Expand All @@ -195,22 +194,21 @@ impl<const N: usize> ByteBufferView<N> {
#[inline(always)]
fn point_at(&self, idx: usize) -> RPCL2Point<N> {
let offset = (self.start_point_idx + idx) * self.point_step_size;

// TODO memcpy entire point at once, then extract fields?
let mut meta = [PointData::default(); N];
meta.iter_mut()
let mut pdata = [PointData::default(); N];
pdata
.iter_mut()
.zip(self.offsets.iter())
.zip(self.meta.iter())
.for_each(|((p_meta, in_point_offset), (_, meta_type))| {
*p_meta = PointData::from_buffer(
.zip(self.pdata.iter())
.for_each(|((pdata_entry, in_point_offset), (_, pdata_type))| {
*pdata_entry = PointData::from_buffer(
&self.data,
offset + in_point_offset,
*meta_type,
*pdata_type,
self.endian,
);
});

RPCL2Point { fields: meta }
pdata.into()
}

#[inline]
Expand All @@ -221,7 +219,7 @@ impl<const N: usize> ByteBufferView<N> {
end_point_idx: start + size - 1,
point_step_size: self.point_step_size,
offsets: self.offsets,
meta: self.meta.clone(),
pdata: self.pdata.clone(),
endian: self.endian,
}
}
Expand All @@ -231,9 +229,8 @@ impl<const N: usize> ByteBufferView<N> {
let left_start = self.start_point_idx;
let left_size = point_index;

let right_start = point_index;
let right_start = self.start_point_idx + point_index;
let right_size = self.len() - point_index;

(
self.clone_with_bounds(left_start, left_size),
self.clone_with_bounds(right_start, right_size),
Expand All @@ -248,12 +245,12 @@ where
type Error = MsgConversionError;

/// Convert a PointCloud2Msg into an iterator.
/// Converting a PointCloud2Msg into an iterator is a fallible operation since the message can contain only a subset of the required fields.
/// Converting a PointCloud2Msg into an iterator is a fallible operation since the message could contain a subset of the required fields.
///
/// The theoretical time complexity is O(n) where n is the number of fields defined in the message for a single point which is typically small.
/// It therefore has a constant time complexity O(1) for practical purposes.
fn try_from(cloud: PointCloud2Msg) -> Result<Self, Self::Error> {
let mut meta_with_offsets = vec![(String::default(), FieldDatatype::default(), 0); N];
let mut pdata_with_offsets = vec![(String::default(), FieldDatatype::default(), 0); N];

let not_found_fieldnames = C::field_names_ordered()
.into_iter()
Expand All @@ -273,7 +270,7 @@ where
}

let ordered_fieldnames = C::field_names_ordered();
for (field, with_offset) in cloud.fields.iter().zip(meta_with_offsets.iter_mut()) {
for (field, with_offset) in cloud.fields.iter().zip(pdata_with_offsets.iter_mut()) {
if ordered_fieldnames.contains(&field.name.as_str()) {
*with_offset = (
field.name.clone(),
Expand All @@ -283,25 +280,26 @@ where
}
}

meta_with_offsets.sort_unstable_by(|(_, _, offset1), (_, _, offset2)| offset1.cmp(offset2));
pdata_with_offsets
.sort_unstable_by(|(_, _, offset1), (_, _, offset2)| offset1.cmp(offset2));

debug_assert!(
meta_with_offsets.len() == N,
pdata_with_offsets.len() == N,
"Not all fields were found in the message. Expected {} but found {}.",
N,
meta_with_offsets.len()
pdata_with_offsets.len()
);

let mut offsets = [usize::default(); N];
let mut meta = vec![(String::default(), FieldDatatype::default()); N];
let mut pdata = vec![(String::default(), FieldDatatype::default()); N];

meta_with_offsets
pdata_with_offsets
.into_iter()
.zip(meta.iter_mut())
.zip(pdata.iter_mut())
.zip(offsets.iter_mut())
.for_each(|(((name, datatype, offset), meta), meta_offset)| {
*meta = (name, datatype);
*meta_offset = offset;
.for_each(|(((name, datatype, offset), pdata), pdata_offset)| {
*pdata = (name, datatype);
*pdata_offset = offset;
});

let point_step_size = cloud.point_step as usize;
Expand All @@ -312,9 +310,9 @@ where

let last_offset = offsets.last().expect("Dimensionality is 0.");

let last_meta = meta.last().expect("Dimensionality is 0.");
let size_with_last_meta = last_offset + last_meta.1.size();
if size_with_last_meta > point_step_size {
let last_pdata = pdata.last().expect("Dimensionality is 0.");
let size_with_last_pdata = last_offset + last_pdata.1.size();
if size_with_last_pdata > point_step_size {
return Err(MsgConversionError::DataLengthMismatch);
}

Expand All @@ -326,7 +324,7 @@ where
0,
cloud_length - 1,
offsets,
meta,
pdata,
cloud.endian,
);

Expand Down
40 changes: 20 additions & 20 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
//! pub x: f32,
//! pub y: f32,
//! pub z: f32,
//! #[cfg_attr(feature = "derive", rpcl2(name = "i"))]
//! #[cfg_attr(feature = "derive", rpcl2(rename("i")))]
//! pub intensity: f32,
//! }
//!
Expand Down Expand Up @@ -121,7 +121,7 @@
//! pub x: f32,
//! pub y: f32,
//! pub z: f32,
//! #[rpcl2(name = "i")]
//! #[rpcl2(rename("i"))]
//! pub intensity: f32,
//! }
//! ```
Expand Down Expand Up @@ -457,43 +457,43 @@ impl PointCloud2Msg {
let field_names = C::field_names_ordered();
debug_assert!(field_names.len() == N);

let mut meta_offsets_acc: u32 = 0;
let mut pdata_offsets_acc: u32 = 0;
let mut fields = vec![PointFieldMsg::default(); N];
let field_count: u32 = 1;
for ((meta_value, field_name), field_val) in point
for ((pdata_entry, field_name), field_val) in point
.fields
.into_iter()
.zip(field_names.into_iter())
.zip(fields.iter_mut())
{
let datatype_code = meta_value.datatype.into();
let datatype_code = pdata_entry.datatype.into();
let _ = FieldDatatype::try_from(datatype_code)?;

*field_val = PointFieldMsg {
name: field_name.into(),
offset: meta_offsets_acc,
offset: pdata_offsets_acc,
datatype: datatype_code,
count: 1,
};

meta_offsets_acc += field_count * meta_value.datatype.size() as u32;
pdata_offsets_acc += field_count * pdata_entry.datatype.size() as u32;
}

(
PointCloud2MsgBuilder::new()
.fields(fields)
.point_step(meta_offsets_acc),
meta_offsets_acc,
.point_step(pdata_offsets_acc),
pdata_offsets_acc,
)
};
let mut cloud_width = 0;

iterable.into_iter().for_each(|pointdata| {
let point: RPCL2Point<N> = pointdata.into();

point.fields.iter().for_each(|meta| {
point.fields.iter().for_each(|pdata| {
let truncated_bytes = unsafe {
std::slice::from_raw_parts(meta.bytes.as_ptr(), meta.datatype.size())
std::slice::from_raw_parts(pdata.bytes.as_ptr(), pdata.datatype.size())
};
cloud.data.extend_from_slice(truncated_bytes);
});
Expand Down Expand Up @@ -706,7 +706,7 @@ impl PointCloud2Msg {
}
}

/// Internal point representation. It is used to store the coordinates and meta data of a point.
/// Internal point representation. It is used to store the point data entries.
///
/// In each iteration, an internal point representation is converted to the desired point type.
/// Implement the `From` traits for your point type to use the conversion.
Expand Down Expand Up @@ -779,7 +779,7 @@ impl<const N: usize> From<[PointData; N]> for RPCL2Point<N> {
/// ```
#[cfg(not(feature = "derive"))]
pub trait PointConvertible<const N: usize>:
From<RPCL2Point<N>> + Into<RPCL2Point<N>> + Fields<N> + Clone + 'static + Default
From<RPCL2Point<N>> + Into<RPCL2Point<N>> + Fields<N> + Clone + Default
{
}

Expand Down Expand Up @@ -840,7 +840,7 @@ pub trait PointConvertible<const N: usize>:
/// ```
#[cfg(feature = "derive")]
pub trait PointConvertible<const N: usize>:
type_layout::TypeLayout + From<RPCL2Point<N>> + Into<RPCL2Point<N>> + Fields<N> + 'static + Default
type_layout::TypeLayout + From<RPCL2Point<N>> + Into<RPCL2Point<N>> + Fields<N> + Default
{
}

Expand Down Expand Up @@ -926,8 +926,8 @@ impl TryFrom<type_layout::TypeLayoutInfo> for TypeLayoutInfo {
/// use ros_pointcloud2::PointData;
///
/// let original_data: f64 = 1.0;
/// let meta = PointData::new(original_data);
/// let my_data: f64 = meta.get();
/// let pdata = PointData::new(original_data);
/// let my_data: f64 = pdata.get();
/// ```
#[derive(Debug, Clone, Copy)]
pub struct PointData {
Expand All @@ -951,7 +951,7 @@ impl PointData {
///
/// # Example
/// ```
/// let meta = ros_pointcloud2::PointData::new(1.0);
/// let pdata = ros_pointcloud2::PointData::new(1.0);
/// ```
#[inline(always)]
pub fn new<T: FromBytes>(value: T) -> Self {
Expand Down Expand Up @@ -984,8 +984,8 @@ impl PointData {
/// # Example
/// ```
/// let original_data: f64 = 1.0;
/// let meta = ros_pointcloud2::PointData::new(original_data);
/// let my_data: f64 = meta.get();
/// let pdata = ros_pointcloud2::PointData::new(original_data);
/// let my_data: f64 = pdata.get();
/// ```
pub fn get<T: FromBytes>(&self) -> T {
match self.endian {
Expand Down Expand Up @@ -1408,7 +1408,7 @@ mod tests {
#[derive(Fields)]
struct TestStruct {
field1: String,
#[rpcl2(name = "renamed_field")]
#[rpcl2(rename("renamed_field"))]
field2: i32,
field3: f64,
field4: bool,
Expand Down
44 changes: 41 additions & 3 deletions tests/e2e_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,6 @@ fn write_cloud_from_vec() {

let msg = PointCloud2Msg::try_from_vec(cloud);
assert!(msg.is_ok());

let msg = msg.unwrap();
println!("{:?}", msg);
}

#[test]
Expand All @@ -92,6 +89,47 @@ fn write_empty_cloud_iter() {
assert!(msg.unwrap().data.is_empty());
}

#[test]
#[cfg(all(feature = "derive", feature = "rayon"))]
fn conv_cloud_par_iter() {
let cloud = vec![
PointXYZ::new(0.0, 1.0, 5.0),
PointXYZ::new(1.0, 1.5, 5.0),
PointXYZ::new(1.3, 1.6, 5.7),
];
let copy = cloud.clone();

let msg: Result<PointCloud2Msg, MsgConversionError> = PointCloud2Msg::try_from_vec(cloud);
assert!(msg.is_ok());
let msg = msg.unwrap();
let to_p_type = msg.try_into_par_iter();
assert!(to_p_type.is_ok());
let to_p_type = to_p_type.unwrap();
let back_to_type = to_p_type.collect::<Vec<PointXYZ>>();
assert_eq!(copy, back_to_type);
}

#[test]
#[cfg(all(feature = "derive", feature = "rayon"))]
fn conv_cloud_par_par_iter() {
let cloud = vec![
PointXYZ::new(0.0, 1.0, 5.0),
PointXYZ::new(1.0, 1.5, 5.0),
PointXYZ::new(1.3, 1.6, 5.7),
PointXYZ::new(f32::MAX, f32::MIN, f32::MAX),
];
let copy = cloud.clone();

let msg = PointCloud2Msg::try_from_par_iter(cloud.into_par_iter());
assert!(msg.is_ok());
let msg = msg.unwrap();
let to_p_type = msg.try_into_par_iter();
assert!(to_p_type.is_ok());
let to_p_type = to_p_type.unwrap();
let back_to_type = to_p_type.collect::<Vec<PointXYZ>>();
assert_eq!(copy, back_to_type);
}

#[test]
#[cfg(feature = "derive")]
fn custom_xyz_f32() {
Expand Down

0 comments on commit 56c29ac

Please sign in to comment.