diff --git a/perception/autoware_lidar_transfusion/include/autoware/lidar_transfusion/utils.hpp b/perception/autoware_lidar_transfusion/include/autoware/lidar_transfusion/utils.hpp index feea2216c5f78..f3a7cecf9ce00 100644 --- a/perception/autoware_lidar_transfusion/include/autoware/lidar_transfusion/utils.hpp +++ b/perception/autoware_lidar_transfusion/include/autoware/lidar_transfusion/utils.hpp @@ -36,7 +36,7 @@ struct Box3D float yaw; }; -enum NetworkIO { voxels = 0, num_points, coors, cls_score, dir_pred, bbox_pred, ENUM_SIZE }; +enum NetworkIO { voxels = 0, num_points, coors, cls_score, bbox_pred, dir_pred, ENUM_SIZE }; // cspell: ignore divup template diff --git a/perception/autoware_lidar_transfusion/lib/network/network_trt.cpp b/perception/autoware_lidar_transfusion/lib/network/network_trt.cpp index 3eacf005cff4e..48e8c1014199c 100644 --- a/perception/autoware_lidar_transfusion/lib/network/network_trt.cpp +++ b/perception/autoware_lidar_transfusion/lib/network/network_trt.cpp @@ -23,6 +23,20 @@ namespace autoware::lidar_transfusion { +inline NetworkIO nameToNetworkIO(const char * name) +{ + static const std::unordered_map name_to_enum = { + {"voxels", NetworkIO::voxels}, {"num_points", NetworkIO::num_points}, + {"coors", NetworkIO::coors}, {"cls_score0", NetworkIO::cls_score}, + {"bbox_pred0", NetworkIO::bbox_pred}, {"dir_cls_pred0", NetworkIO::dir_pred}}; + + auto it = name_to_enum.find(name); + if (it != name_to_enum.end()) { + return it->second; + } + throw std::runtime_error("Invalid input name: " + std::string(name)); +} + std::ostream & operator<<(std::ostream & os, const ProfileDimension & profile) { std::string delim = ""; @@ -253,8 +267,14 @@ bool NetworkTRT::validateNetworkIO() << ". Actual size: " << engine->getNbIOTensors() << "." << std::endl; throw std::runtime_error("Failed to initialize TRT network."); } + + // Initialize tensors_names_ with null values + tensors_names_.resize(NetworkIO::ENUM_SIZE, nullptr); + + // Loop over the tensor names and place them in the correct positions for (int i = 0; i < NetworkIO::ENUM_SIZE; ++i) { - tensors_names_.push_back(engine->getIOTensorName(i)); + const char * name = engine->getIOTensorName(i); + tensors_names_[nameToNetworkIO(name)] = name; } // Log the network IO diff --git a/perception/autoware_lidar_transfusion/lib/postprocess/postprocess_kernel.cu b/perception/autoware_lidar_transfusion/lib/postprocess/postprocess_kernel.cu index 905b4ea6f39f4..cfd3bffcc18c9 100644 --- a/perception/autoware_lidar_transfusion/lib/postprocess/postprocess_kernel.cu +++ b/perception/autoware_lidar_transfusion/lib/postprocess/postprocess_kernel.cu @@ -83,8 +83,8 @@ __global__ void generateBoxes3D_kernel( det_boxes3d[point_idx].y = box_output[point_idx + num_proposals] * num_point_values * voxel_size_y + min_y_range; det_boxes3d[point_idx].z = box_output[point_idx + 2 * num_proposals]; - det_boxes3d[point_idx].width = expf(box_output[point_idx + 3 * num_proposals]); - det_boxes3d[point_idx].length = expf(box_output[point_idx + 4 * num_proposals]); + det_boxes3d[point_idx].length = expf(box_output[point_idx + 3 * num_proposals]); + det_boxes3d[point_idx].width = expf(box_output[point_idx + 4 * num_proposals]); det_boxes3d[point_idx].height = expf(box_output[point_idx + 5 * num_proposals]); det_boxes3d[point_idx].yaw = atan2f(dir_cls_output[point_idx], dir_cls_output[point_idx + num_proposals]); diff --git a/perception/autoware_lidar_transfusion/lib/ros_utils.cpp b/perception/autoware_lidar_transfusion/lib/ros_utils.cpp index dce4028759de5..ce2a8d1911941 100644 --- a/perception/autoware_lidar_transfusion/lib/ros_utils.cpp +++ b/perception/autoware_lidar_transfusion/lib/ros_utils.cpp @@ -48,12 +48,10 @@ void box3DToDetectedObject( obj.classification.emplace_back(classification); // pose and shape - // mmdet3d yaw format to ros yaw format - float yaw = box3d.yaw + autoware::universe_utils::pi / 2; obj.kinematics.pose_with_covariance.pose.position = autoware::universe_utils::createPoint(box3d.x, box3d.y, box3d.z); obj.kinematics.pose_with_covariance.pose.orientation = - autoware::universe_utils::createQuaternionFromYaw(yaw); + autoware::universe_utils::createQuaternionFromYaw(box3d.yaw); obj.shape.type = autoware_perception_msgs::msg::Shape::BOUNDING_BOX; obj.shape.dimensions = autoware::universe_utils::createTranslation(box3d.length, box3d.width, box3d.height);