Skip to content

Commit

Permalink
Merge pull request #57 from zivid/add_support_for_normals
Browse files Browse the repository at this point in the history
Add support for normals
  • Loading branch information
apartridge authored Nov 1, 2021
2 parents d858f26 + ddd94be commit 873cd39
Show file tree
Hide file tree
Showing 9 changed files with 147 additions and 16 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ROS-commit.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,11 @@ jobs:
build-driver-and-run-tests-for-older-sdk:
name: Build driver and run tests for older Zivid SDK
runs-on: ubuntu-latest
if: ${{ false }} # Remove this line when adding zivid-version below
strategy:
fail-fast: true
matrix:
zivid-version: ['2.4.2+1a2e8cfb-1']
zivid-version: ['none'] # Remove if: ${{ false }} above, when adding a SDK version here
ros-distro: ['ros:noetic-ros-base-focal', 'ros:kinetic-ros-base-xenial', 'ros:melodic-ros-base-bionic']
steps:
- name: Check out code
Expand Down
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@

This project adheres to [Semantic Versioning](https://semver.org).

## 2.3.0

* Add support for normals. Normals are exposed via topic "normals/xyz".
* The minimum supported Zivid SDK version is now SDK 2.5.

## 2.2.0

* Add support for version 2.2.0 of the Zivid SDK.
Expand Down
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ sudo apt-get install -y python3-catkin-tools python3-osrf-pycommon git

### Zivid SDK

To use the ROS driver you need to download and install the "Zivid Core" package. Zivid SDK version 2.0 to 2.5
To use the ROS driver you need to download and install the "Zivid Core" package. Zivid SDK version 2.5
is supported. See [releases](https://github.com/zivid/zivid-ros/releases) for older ROS driver releases that
supports older SDK versions.

Expand Down Expand Up @@ -340,6 +340,14 @@ Camera calibration and metadata.
Each pixel contains the SNR (signal-to-noise ratio) value. The image is encoded as 32-bit
float. Published as a part of the [capture](#capture) service.

### normals/xyz
[sensor_msgs/PointCloud2](http://docs.ros.org/api/sensor_msgs/html/msg/PointCloud2.html)

Normals for the point cloud. The included fields are normal x, y and z coordinates.
Each coordinate is a float value. There are no additional padding floats, so point-step is
12 bytes (3*4 bytes). The normals are unit vectors. Note that subscribing to this topic
will cause some additional processing time for calculating the normals.

## Configuration

The `zivid_camera` node supports both single-acquisition (2D and 3D) and multi-acquisition
Expand Down
2 changes: 1 addition & 1 deletion zivid_camera/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ find_package(catkin REQUIRED COMPONENTS

find_package(Boost REQUIRED COMPONENTS filesystem)

find_package(Zivid 2.0.0 COMPONENTS Core REQUIRED)
find_package(Zivid 2.5.0 COMPONENTS Core REQUIRED)
message(STATUS "Found Zivid version ${Zivid_VERSION}")

find_package(OpenMP REQUIRED)
Expand Down
4 changes: 4 additions & 0 deletions zivid_camera/include/zivid_camera.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class ZividCamera
bool shouldPublishColorImg() const;
bool shouldPublishDepthImg() const;
bool shouldPublishSnrImg() const;
bool shouldPublishNormalsXYZ() const;
std_msgs::Header makeHeader();
void publishPointCloudXYZ(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud);
void publishPointCloudXYZRGBA(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud);
Expand All @@ -65,6 +66,7 @@ class ZividCamera
const Zivid::PointCloud& point_cloud);
void publishSnrImage(const std_msgs::Header& header, const sensor_msgs::CameraInfoConstPtr& camera_info,
const Zivid::PointCloud& point_cloud);
void publishNormalsXYZ(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud);
sensor_msgs::CameraInfoConstPtr makeCameraInfo(const std_msgs::Header& header, std::size_t width, std::size_t height,
const Zivid::CameraIntrinsics& intrinsics);

Expand All @@ -82,12 +84,14 @@ class ZividCamera
bool use_latched_publisher_for_color_image_;
bool use_latched_publisher_for_depth_image_;
bool use_latched_publisher_for_snr_image_;
bool use_latched_publisher_for_normals_xyz_;
ros::Publisher points_xyz_publisher_;
ros::Publisher points_xyzrgba_publisher_;
image_transport::ImageTransport image_transport_;
image_transport::CameraPublisher color_image_publisher_;
image_transport::CameraPublisher depth_image_publisher_;
image_transport::CameraPublisher snr_image_publisher_;
ros::Publisher normals_xyz_publisher_;
ros::ServiceServer camera_info_serial_number_service_;
ros::ServiceServer camera_info_model_name_service_;
ros::ServiceServer capture_service_;
Expand Down
2 changes: 1 addition & 1 deletion zivid_camera/package.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<package format="2">
<name>zivid_camera</name>
<version>2.2.0</version>
<version>2.3.0</version>
<description>Driver for using the Zivid 3D cameras in ROS.</description>
<maintainer email="[email protected]">Zivid</maintainer>
<license>BSD3</license>
Expand Down
49 changes: 41 additions & 8 deletions zivid_camera/src/zivid_camera.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ ZividCamera::ZividCamera(ros::NodeHandle& nh, ros::NodeHandle& priv)
, use_latched_publisher_for_color_image_(false)
, use_latched_publisher_for_depth_image_(false)
, use_latched_publisher_for_snr_image_(false)
, use_latched_publisher_for_normals_xyz_(false)
, image_transport_(nh_)
, header_seq_(0)
{
Expand Down Expand Up @@ -134,6 +135,7 @@ ZividCamera::ZividCamera(ros::NodeHandle& nh, ros::NodeHandle& priv)
priv_.param<bool>("use_latched_publisher_for_color_image", use_latched_publisher_for_color_image_, false);
priv_.param<bool>("use_latched_publisher_for_depth_image", use_latched_publisher_for_depth_image_, false);
priv_.param<bool>("use_latched_publisher_for_snr_image", use_latched_publisher_for_snr_image_, false);
priv_.param<bool>("use_latched_publisher_for_normals_xyz", use_latched_publisher_for_normals_xyz_, false);

if (file_camera_mode)
{
Expand Down Expand Up @@ -215,6 +217,8 @@ ZividCamera::ZividCamera(ros::NodeHandle& nh, ros::NodeHandle& priv)
image_transport_.advertiseCamera("color/image_color", 1, use_latched_publisher_for_color_image_);
depth_image_publisher_ = image_transport_.advertiseCamera("depth/image", 1, use_latched_publisher_for_depth_image_);
snr_image_publisher_ = image_transport_.advertiseCamera("snr/image", 1, use_latched_publisher_for_snr_image_);
normals_xyz_publisher_ =
nh_.advertise<sensor_msgs::PointCloud2>("normals/xyz", 1, use_latched_publisher_for_normals_xyz_);

ROS_INFO("Advertising services");
camera_info_model_name_service_ =
Expand Down Expand Up @@ -424,8 +428,10 @@ void ZividCamera::publishFrame(Zivid::Frame&& frame)
const bool publish_color_img = shouldPublishColorImg();
const bool publish_depth_img = shouldPublishDepthImg();
const bool publish_snr_img = shouldPublishSnrImg();
const bool publish_normals_xyz = shouldPublishNormalsXYZ();

if (publish_points_xyz || publish_points_xyzrgba || publish_color_img || publish_depth_img || publish_snr_img)
if (publish_points_xyz || publish_points_xyzrgba || publish_color_img || publish_depth_img || publish_snr_img ||
publish_normals_xyz)
{
const auto header = makeHeader();
auto point_cloud = frame.pointCloud();
Expand Down Expand Up @@ -461,6 +467,10 @@ void ZividCamera::publishFrame(Zivid::Frame&& frame)
publishSnrImage(header, camera_info, point_cloud);
}
}
if (publish_normals_xyz)
{
publishNormalsXYZ(header, point_cloud);
}
}
}

Expand Down Expand Up @@ -489,6 +499,11 @@ bool ZividCamera::shouldPublishSnrImg() const
return snr_image_publisher_.getNumSubscribers() > 0 || use_latched_publisher_for_snr_image_;
}

bool ZividCamera::shouldPublishNormalsXYZ() const
{
return normals_xyz_publisher_.getNumSubscribers() > 0 || use_latched_publisher_for_normals_xyz_;
}

std_msgs::Header ZividCamera::makeHeader()
{
std_msgs::Header header;
Expand All @@ -500,7 +515,7 @@ std_msgs::Header ZividCamera::makeHeader()

void ZividCamera::publishPointCloudXYZ(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG("Publishing points/xyz");
ROS_DEBUG_STREAM("Publishing " << points_xyz_publisher_.getTopic());

// We are using the Zivid::XYZW type here for compatibility with the pcl::PointXYZ type, which contains an
// padding float for performance reasons. We could use the "pcl_conversion" utility functions to construct
Expand All @@ -523,7 +538,7 @@ void ZividCamera::publishPointCloudXYZ(const std_msgs::Header& header, const Ziv

void ZividCamera::publishPointCloudXYZRGBA(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG("Publishing points/xyzrgba");
ROS_DEBUG_STREAM("Publishing " << points_xyzrgba_publisher_.getTopic());

auto msg = boost::make_shared<sensor_msgs::PointCloud2>();
fillCommonMsgFields(*msg, header, point_cloud.width(), point_cloud.height());
Expand All @@ -547,16 +562,15 @@ void ZividCamera::publishPointCloudXYZRGBA(const std_msgs::Header& header, const
void ZividCamera::publishColorImage(const std_msgs::Header& header, const sensor_msgs::CameraInfoConstPtr& camera_info,
const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG("Publishing color image");
ROS_DEBUG_STREAM("Publishing " << color_image_publisher_.getTopic() << " from point cloud");
auto image = makePointCloudImage<Zivid::ColorRGBA>(point_cloud, header, sensor_msgs::image_encodings::RGBA8);
color_image_publisher_.publish(image, camera_info);
}

void ZividCamera::publishColorImage(const std_msgs::Header& header, const sensor_msgs::CameraInfoConstPtr& camera_info,
const Zivid::Image<Zivid::ColorRGBA>& image)
{
ROS_DEBUG("Publishing color image");

ROS_DEBUG_STREAM("Publishing " << color_image_publisher_.getTopic() << " from Image");
auto msg = makeImage(header, sensor_msgs::image_encodings::RGBA8, image.width(), image.height());
const auto uint8_ptr_begin = reinterpret_cast<const uint8_t*>(image.data());
const auto uint8_ptr_end = reinterpret_cast<const uint8_t*>(image.data() + image.size());
Expand All @@ -567,19 +581,38 @@ void ZividCamera::publishColorImage(const std_msgs::Header& header, const sensor
void ZividCamera::publishDepthImage(const std_msgs::Header& header, const sensor_msgs::CameraInfoConstPtr& camera_info,
const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG("Publishing depth image");
ROS_DEBUG_STREAM("Publishing " << depth_image_publisher_.getTopic());
auto image = makePointCloudImage<Zivid::PointZ>(point_cloud, header, sensor_msgs::image_encodings::TYPE_32FC1);
depth_image_publisher_.publish(image, camera_info);
}

void ZividCamera::publishSnrImage(const std_msgs::Header& header, const sensor_msgs::CameraInfoConstPtr& camera_info,
const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG("Publishing SNR image");
ROS_DEBUG_STREAM("Publishing " << snr_image_publisher_.getTopic());
auto image = makePointCloudImage<Zivid::SNR>(point_cloud, header, sensor_msgs::image_encodings::TYPE_32FC1);
snr_image_publisher_.publish(image, camera_info);
}

void ZividCamera::publishNormalsXYZ(const std_msgs::Header& header, const Zivid::PointCloud& point_cloud)
{
ROS_DEBUG_STREAM("Publishing " << normals_xyz_publisher_.getTopic());

using ZividDataType = Zivid::NormalXYZ;
auto msg = boost::make_shared<sensor_msgs::PointCloud2>();
fillCommonMsgFields(*msg, header, point_cloud.width(), point_cloud.height());
msg->fields.reserve(3);
msg->fields.push_back(createPointField("normal_x", 0, sensor_msgs::PointField::FLOAT32, 1));
msg->fields.push_back(createPointField("normal_y", 4, sensor_msgs::PointField::FLOAT32, 1));
msg->fields.push_back(createPointField("normal_z", 8, sensor_msgs::PointField::FLOAT32, 1));
msg->is_dense = false;
msg->point_step = sizeof(ZividDataType);
msg->row_step = msg->point_step * msg->width;
msg->data.resize(msg->row_step * msg->height);
point_cloud.copyData<ZividDataType>(reinterpret_cast<ZividDataType*>(msg->data.data()));
normals_xyz_publisher_.publish(msg);
}

sensor_msgs::CameraInfoConstPtr ZividCamera::makeCameraInfo(const std_msgs::Header& header, std::size_t width,
std::size_t height,
const Zivid::CameraIntrinsics& intrinsics)
Expand Down
86 changes: 83 additions & 3 deletions zivid_camera/test/test_zivid_camera.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ class ZividNodeTest : public ZividNodeTestBase
static constexpr auto snr_image_topic_name = "/zivid_camera/snr/image";
static constexpr auto points_xyz_topic_name = "/zivid_camera/points/xyz";
static constexpr auto points_xyzrgba_topic_name = "/zivid_camera/points/xyzrgba";
static constexpr auto normals_xyz_topic_name = "/zivid_camera/normals/xyz";
static constexpr size_t num_dr_capture_servers = 10;
static constexpr auto file_camera_path = "/usr/share/Zivid/data/FileCameraZividOne.zfc";

Expand Down Expand Up @@ -205,6 +206,16 @@ class ZividNodeTest : public ZividNodeTestBase
}
}

template <typename FieldType>
void assertPointCloud2Field(const FieldType& field, const std::string& name, uint32_t offset, uint32_t datatype,
uint32_t count)
{
ASSERT_EQ(field.name, name);
ASSERT_EQ(field.offset, offset);
ASSERT_EQ(field.datatype, datatype);
ASSERT_EQ(field.count, count);
}

void assertCameraInfoForFileCamera(const sensor_msgs::CameraInfo& ci) const
{
ASSERT_EQ(ci.width, 1920U);
Expand Down Expand Up @@ -275,6 +286,7 @@ TEST_F(ZividNodeTest, testCapturePublishesTopics)
auto snr_image_sub = subscribe<sensor_msgs::Image>(snr_image_topic_name);
auto points_xyz_sub = subscribe<sensor_msgs::PointCloud2>(points_xyz_topic_name);
auto points_xyzrgba_sub = subscribe<sensor_msgs::PointCloud2>(points_xyzrgba_topic_name);
auto normals_xyz_sub = subscribe<sensor_msgs::PointCloud2>(normals_xyz_topic_name);

auto assert_num_topics_received = [&](std::size_t numTopics) {
ASSERT_EQ(color_camera_info_sub.numMessages(), numTopics);
Expand All @@ -285,6 +297,7 @@ TEST_F(ZividNodeTest, testCapturePublishesTopics)
ASSERT_EQ(snr_image_sub.numMessages(), numTopics);
ASSERT_EQ(points_xyz_sub.numMessages(), numTopics);
ASSERT_EQ(points_xyzrgba_sub.numMessages(), numTopics);
ASSERT_EQ(normals_xyz_sub.numMessages(), numTopics);
};

short_wait_duration.sleep();
Expand Down Expand Up @@ -328,6 +341,18 @@ class CaptureOutputTest : public TestWithFileCamera
return camera_.capture(Zivid::Settings2D{ Zivid::Settings2D::Acquisitions{ Zivid::Settings2D::Acquisition{} } });
}

void compareFloat(float a, float b, float delta = 1e-6f) const
{
if (std::isnan(a))
{
ASSERT_TRUE(std::isnan(b));
}
else
{
ASSERT_NEAR(a, b, delta);
}
}

void comparePointCoordinate(float ros_coordinate, float sdk_coordinate) const
{
if (std::isnan(ros_coordinate))
Expand All @@ -353,6 +378,11 @@ TEST_F(CaptureOutputTest, testCapturePointsXYZGBA)
const auto& last_pc2 = points_sub.lastMessage();
ASSERT_TRUE(last_pc2.has_value());
assertSensorMsgsPointCloud2Meta(*last_pc2, 1920U, 1200U, 16U);
ASSERT_EQ(last_pc2->fields.size(), 4U);
assertPointCloud2Field(last_pc2->fields[0], "x", 0, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(last_pc2->fields[1], "y", 4, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(last_pc2->fields[2], "z", 8, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(last_pc2->fields[3], "rgba", 12, sensor_msgs::PointField::FLOAT32, 1);

const auto point_cloud = captureViaSDKDefaultSettings();
const auto expected_xyzrgba = point_cloud.copyData<Zivid::PointXYZColorRGBA>();
Expand Down Expand Up @@ -387,6 +417,10 @@ TEST_F(CaptureOutputTest, testCapturePointsXYZ)
ASSERT_TRUE(point_cloud.has_value());
assertSensorMsgsPointCloud2Meta(*point_cloud, 1920U, 1200U,
16U); // 3x4 bytes for xyz + 4 bytes padding (w) = 16 bytes total
ASSERT_EQ(point_cloud->fields.size(), 3U);
assertPointCloud2Field(point_cloud->fields[0], "x", 0, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(point_cloud->fields[1], "y", 4, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(point_cloud->fields[2], "z", 8, sensor_msgs::PointField::FLOAT32, 1);

auto point_cloud_sdk = captureViaSDKDefaultSettings();
auto expected_xyz = point_cloud_sdk.copyData<Zivid::PointXYZ>();
Expand Down Expand Up @@ -467,8 +501,55 @@ TEST_F(CaptureOutputTest, testCaptureSNRImage)
}
}

#if ZIVID_CORE_VERSION_MAJOR > 2 || (ZIVID_CORE_VERSION_MAJOR == 2 && ZIVID_CORE_VERSION_MINOR >= 2)
// The stripe engine setting was added in SDK 2.2
TEST_F(CaptureOutputTest, testCaptureNormals)
{
auto normals_sub = subscribe<sensor_msgs::PointCloud2>(normals_xyz_topic_name);

enableFirst3DAcquisitionAndCapture();

const auto& point_cloud = normals_sub.lastMessage();
ASSERT_TRUE(point_cloud.has_value());
assertSensorMsgsPointCloud2Meta(*point_cloud, 1920U, 1200U,
3U * sizeof(float)); // 12 bytes total
ASSERT_EQ(point_cloud->fields.size(), 3U);
assertPointCloud2Field(point_cloud->fields[0], "normal_x", 0, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(point_cloud->fields[1], "normal_y", 4, sensor_msgs::PointField::FLOAT32, 1);
assertPointCloud2Field(point_cloud->fields[2], "normal_z", 8, sensor_msgs::PointField::FLOAT32, 1);

auto point_cloud_sdk = captureViaSDKDefaultSettings();
auto expected_normal_xyz_before_transform = point_cloud_sdk.copyData<Zivid::NormalXYZ>();
ASSERT_EQ(point_cloud->width, expected_normal_xyz_before_transform.width());
ASSERT_EQ(point_cloud->height, expected_normal_xyz_before_transform.height());
ASSERT_EQ(point_cloud->width * point_cloud->height, expected_normal_xyz_before_transform.size());

// Transform from mm to m (like is done internally in Zivid driver)
const float scale = 0.001f;
point_cloud_sdk.transform(Zivid::Matrix4x4{ scale, 0, 0, 0, 0, scale, 0, 0, 0, 0, scale, 0, 0, 0, 0, 1 });
auto expected_normal_xyz_after_transform = point_cloud_sdk.copyData<Zivid::NormalXYZ>();
ASSERT_EQ(expected_normal_xyz_after_transform.size(), expected_normal_xyz_before_transform.size());

for (std::size_t i = 0; i < expected_normal_xyz_before_transform.size(); i++)
{
const uint8_t* cloud_ptr = &point_cloud->data[i * point_cloud->point_step];
const float normal_x = *reinterpret_cast<const float*>(&cloud_ptr[0]);
const float normal_y = *reinterpret_cast<const float*>(&cloud_ptr[4]);
const float normal_z = *reinterpret_cast<const float*>(&cloud_ptr[8]);

const auto& expected_sdk_before_transform = expected_normal_xyz_before_transform(i);
// We do a transform in the ROS driver to scale from mm to meters. However, `expected_normal_xyz`
// are calculated without transform, so we need a slightly higher delta to compare.
constexpr float delta = 0.001f;
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_x, expected_sdk_before_transform.x, delta));
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_y, expected_sdk_before_transform.y, delta));
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_z, expected_sdk_before_transform.z, delta));

const auto& expected_sdk_after_transform = expected_normal_xyz_after_transform(i);
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_x, expected_sdk_after_transform.x));
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_y, expected_sdk_after_transform.y));
ASSERT_NO_FATAL_FAILURE(compareFloat(normal_z, expected_sdk_after_transform.z));
}
}

TEST_F(TestWithFileCamera, testSettingsEngine)
{
waitForReady();
Expand All @@ -495,7 +576,6 @@ TEST_F(TestWithFileCamera, testSettingsEngine)
short_wait_duration.sleep();
ASSERT_EQ(points_sub.numMessages(), 1U);
}
#endif

TEST_F(ZividNodeTest, testCaptureCameraInfo)
{
Expand Down
2 changes: 1 addition & 1 deletion zivid_samples/package.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<package format="2">
<name>zivid_samples</name>
<version>2.2.0</version>
<version>2.3.0</version>
<description>Contains C++ and Python samples demonstrating
use of the zivid_camera package.</description>
<maintainer email="[email protected]">Zivid</maintainer>
Expand Down

0 comments on commit 873cd39

Please sign in to comment.