Skip to content
This repository has been archived by the owner on Jan 23, 2024. It is now read-only.

Feature/revised time stamps #8

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
<launch>
<arg name="mav_name" default="falcon"/>
<arg name="namespace" default="$(arg mav_name)" />
<rosparam file="$(find dji_sdk)/resources/capabilities.yaml"/>
<group ns="$(arg namespace)" >

<node name="dji_sdk" pkg="dji_sdk" type="dji_sdk_node" output="screen">
<param name="serial_name" type="string" value="/dev/ttyUSB0"/>
<param name="baud_rate" type="int" value="921600"/>
<param name="app_id" type="int" value="1027253"/>
<param name="app_version" type="int" value="1"/>
<param name="app_bundle_id" type="string" value="Welcome to use dji-sdk"/>
<param name="enc_key" type="string" value="2267d8cc7874b641a2d16798afb659ba4876b5dfd9b0a0e471c00e188b5c9ed3"/>
<param name="groundstation_enable" type="int" value="1"/>
</node>

<node name="pose_sensor_rovio" pkg="msf_updates" type="pose_sensor" clear_params="true" output="screen">
<remap from="msf_core/imu_state_input" to="dji_ros/imu" />
<remap from="msf_updates/transform_input" to="rovio/transform" />
<rosparam file="$(find dji_sdk)/resources/msf_parameters_vision_realsense-2017-02-26.yaml"/>
<param name="capability_group" value="Core" />
</node>

<node name="mav_linear_mpc" pkg="mav_linear_mpc" type="mav_linear_mpc_node" respawn="true" clear_params="true" output="screen">
<remap from="odometry" to="msf_core/odometry" />
<param name="use_rc_teleop" value="false" />
<remap from="command/roll_pitch_yawrate_thrust" to="fcu/command/roll_pitch_yawrate_thrust" />
<rosparam file="$(find mav_linear_mpc)/resources/linear_mpc_rovio_$(arg mav_name).yaml"/>
<rosparam file="$(find mav_disturbance_observer)/resources/disturbance_observer_$(arg mav_name).yaml"/>
<param name="capability_group" value="Core" />
</node>

<node pkg="realsense_ros" type="zr300node" name="realsense_zr300">
<param name="imu/intrinsics_yaml" value="$(find dji_sdk)/resources/sensors/intel2/realsense.yaml"/>
<param name="imu/publish_sensors_inidividually" value="false"/>

<param name="depth/width" value="640"/>
<param name="depth/height" value="480"/>
<param name="depth/fps" value="30"/>
<param name="depth/enabled" value="false"/>
<param name="depth/subsample_factor" value="3"/>

<param name="infrared/enabled" value="false"/>

<param name="color/width" value="640"/>
<param name="color/height" value="480"/>
<param name="color/fps" value="30"/>
<param name="color/enabled" value="false"/>
<param name="color/subsample_factor" value="3"/>

<param name="fisheye/width" value="640"/>
<param name="fisheye/height" value="480"/>
<param name="fisheye/fps" value="30"/>
<param name="fisheye/enabled" value="true"/>
<param name="fisheye/subsample_factor" value="1"/>
<param name="pointcloud/enabled" value="false"/>
</node>

<node name="rovio" pkg="rovio" type="rovio_node" output="screen">
<param name="filter_config" value="$(find dji_sdk)/resources/sensors/intel2/rovio_filter.info" />
<param name="camera0_config" value="$(find dji_sdk)/resources/sensors/intel2/rovio_cam.yaml" />
<remap from="cam0/image_raw" to="fisheye/fisheye"/>
<remap from="imu0" to="imu/compensated"/>
<param name="world_frame" value="odom"/>
<param name="capability_group" value="Rovio" />
</node>

<!-- Init Rovio Yaw-->
<node pkg="rovio" type="init_rovio_enu.py" name="init_rovio_enu" output="screen">
<remap from="mag_imu" to="dji_ros/imu" />
</node>

<node name="waypoint_navigator_node" pkg="waypoint_navigator" type="waypoint_navigator_node" respawn="true" clear_params="true" output="screen">
<rosparam file="$(find dji_sdk)/resources/trajectory_exp_m100_rect_1m.yaml"/>
<param name="mav_name" type="string" value="$(arg namespace)" />
<!-- Real -->
<!-- remap from="odometry" to="msf_core/odometry" / -->
<!-- Simulation -->
<!-- <remap from="odometry" to="ground_truth/odometry" /> -->
<remap from="odometry" to="msf_core/odometry" />
</node>
<node name="trajectory_sampler" pkg="waypoint_navigator" type="trajectory_sampler_node" output="screen" />

</group>
</launch>
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
<launch>
<arg name="mav_name" default="falcon"/>
<arg name="namespace" default="$(arg mav_name)" />
<rosparam file="$(find dji_sdk)/resources/capabilities.yaml"/>
<group ns="$(arg namespace)" >

<node name="dji_sdk" pkg="dji_sdk" type="dji_sdk_node" output="screen">
<param name="serial_name" type="string" value="/dev/ttyUSB0"/>
<param name="baud_rate" type="int" value="921600"/>
<param name="app_id" type="int" value="1027253"/>
<param name="app_version" type="int" value="1"/>
<param name="app_bundle_id" type="string" value="Welcome to use dji-sdk"/>
<param name="enc_key" type="string" value="2267d8cc7874b641a2d16798afb659ba4876b5dfd9b0a0e471c00e188b5c9ed3"/>
<param name="groundstation_enable" type="int" value="1"/>
</node>

<node name="pose_sensor_rovio" pkg="msf_updates" type="pose_sensor" clear_params="true" output="screen">
<remap from="msf_core/imu_state_input" to="dji_ros/imu" />
<remap from="msf_updates/transform_input" to="rovio/transform" />
<rosparam file="$(find dji_sdk)/resources/msf_parameters_vision_realsense-2017-02-26.yaml"/>
<param name="capability_group" value="Core" />
</node>

<node name="mav_linear_mpc" pkg="mav_linear_mpc" type="mav_linear_mpc_node" respawn="true" clear_params="true" output="screen">
<remap from="odometry" to="msf_core/odometry" />
<param name="use_rc_teleop" value="false" />
<remap from="command/roll_pitch_yawrate_thrust" to="fcu/command/roll_pitch_yawrate_thrust" />
<rosparam file="$(find mav_linear_mpc)/resources/linear_mpc_rovio_$(arg mav_name).yaml"/>
<rosparam file="$(find mav_disturbance_observer)/resources/disturbance_observer_$(arg mav_name).yaml"/>
<param name="capability_group" value="Core" />
</node>

<node pkg="realsense_ros" type="zr300node" name="realsense_zr300">
<param name="imu/intrinsics_yaml" value="$(find dji_sdk)/resources/sensors/intel2/realsense.yaml"/>
<param name="imu/publish_sensors_inidividually" value="false"/>

<param name="depth/width" value="640"/>
<param name="depth/height" value="480"/>
<param name="depth/fps" value="30"/>
<param name="depth/enabled" value="false"/>
<param name="depth/subsample_factor" value="3"/>

<param name="infrared/enabled" value="false"/>

<param name="color/width" value="640"/>
<param name="color/height" value="480"/>
<param name="color/fps" value="30"/>
<param name="color/enabled" value="false"/>
<param name="color/subsample_factor" value="3"/>

<param name="fisheye/width" value="640"/>
<param name="fisheye/height" value="480"/>
<param name="fisheye/fps" value="30"/>
<param name="fisheye/enabled" value="true"/>
<param name="fisheye/subsample_factor" value="1"/>
<param name="pointcloud/enabled" value="false"/>
</node>

<node name="rovio" pkg="rovio" type="rovio_node" output="screen">
<param name="filter_config" value="$(find dji_sdk)/resources/sensors/intel2/rovio_filter.info" />
<param name="camera0_config" value="$(find dji_sdk)/resources/sensors/intel2/rovio_cam.yaml" />
<remap from="cam0/image_raw" to="fisheye/fisheye"/>
<remap from="imu0" to="imu/compensated"/>
<param name="world_frame" value="odom"/>
<param name="capability_group" value="Rovio" />
</node>

<!-- Init Rovio Yaw-->
<node pkg="rovio" type="init_rovio_enu.py" name="init_rovio_enu" output="screen">
<remap from="mag_imu" to="dji_ros/imu" />
</node>

<node name="waypoint_navigator_node" pkg="waypoint_navigator" type="waypoint_navigator_node" respawn="true" clear_params="true" output="screen">
<rosparam file="$(find dji_sdk)/resources/trajectory_exp_m100_rect_1m.yaml"/>
<param name="mav_name" type="string" value="$(arg namespace)" />
<!-- Real -->
<!-- remap from="odometry" to="msf_core/odometry" / -->
<!-- Simulation -->
<!-- <remap from="odometry" to="ground_truth/odometry" /> -->
<remap from="odometry" to="msf_core/odometry" />
</node>

<node name="trajectory_sampler" pkg="mav_trajectory_generation_ros" type="trajectory_sampler_node" output="screen" />

</group>
</launch>
157 changes: 82 additions & 75 deletions dji_sdk/src/modules/dji_sdk_node_main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,7 @@
//----------------------------------------------------------
using namespace std;

// TODO: Fix temporary hack for getting hardware timestamps using nanoTime
// should not be defined globally
auto g_counter = 0;
const auto g_WRAP_TIME = 4.295; //seconds
uint32_t g_prevNanoTime = 0;
long double g_hardwareStamp;


void DJISDKNode::transparent_transmission_callback(uint8_t *buf, uint8_t len)
{
dji_sdk::TransparentTransmissionData transparent_transmission_data;
Expand All @@ -43,39 +37,93 @@ void DJISDKNode::transparent_transmission_callback(uint8_t *buf, uint8_t len)
void DJISDKNode::broadcast_callback()
{
DJI::onboardSDK::BroadcastData bc_data = rosAdapter->coreAPI->getBroadcastData();

DJI::onboardSDK::Version version = rosAdapter->coreAPI->getSDKVersion();
unsigned short msg_flags = bc_data.dataFlag;

static int frame_id = 0;
frame_id ++;

auto current_time = ros::Time::now();


auto current_time = ros::Time::now();

if(msg_flags & HAS_TIME){
if (msg_flags & HAS_TIME) {

// Only this message containes the received time in the header and the
// hardware time within the message
// All following messages should have translated hardware times in the
// header.

time_stamp.header.frame_id = "/time";
time_stamp.header.stamp = current_time;
time_stamp.time = bc_data.timeStamp.time; // This is now in units of 1/400
// s but resolution is only of
// the order of 1s!!!
time_stamp.time_ns =
bc_data.timeStamp.nanoTime; // This is perhaps more accurate??? But
// requires handling the wrapping after
// every 4.295 secs. DO NOT SUM both as it
// leads to double counting.

time_stamp.sync_flag = bc_data.timeStamp.syncFlag;
time_stamp_publisher.publish(time_stamp);


// Use translated hardware time
if (device_time_translator_) {
current_time = device_time_translator_->update(bc_data.timeStamp.nanoTime, current_time,0.0);
}
/*
if (device_time_translator_->isReadyToTranslate()) {
current_time =
device_time_translator_->translate(bc_data.timeStamp.nanoTime);
}*/ else {
ROS_WARN(
"device_time_translator is not ready yet, using ros::Time::now()");
}

}

// Only this message containes the received time in the header and the hardware time within the message
// All following messages should have translated hardware times in the header.
time_stamp.header.frame_id = "/time";
time_stamp.header.stamp = current_time;
// update Imu message
if ((msg_flags & HAS_Q) && (msg_flags & HAS_W) && (msg_flags & HAS_A)) {

ROS_DEBUG_STREAM("Frame " << frame_id << " " << "Hardware timeStamp.nanoTime "<< std::setprecision(15) << double(bc_data.timeStamp.nanoTime)*1e-9);
imu_msg.header.frame_id = "/world";

imu_msg.header.stamp = current_time;
//TODO: Raghav - Update with nearest neighbour messages to increase frequency
// Conversion from NED to NWU (XYZ--robot body frame)
/*
tf::Quaternion qNED2ENU();
qNED2ENU.setEulerZYX(pi/2,0, -pi/2);
tf::Quaternion qIMU(bc_data.q.q1, bc_data.q.q2, bc_data.q.q3,
bc_data.q.q0);
tf::Quaternion qIMUENU;
qIMUENU = qNED2ENU*qIMU;
imu_msg.orientation = quaternionTFToMsg(qIMUENU);
*/
// Convert from NED to ENU frame // This is conversion to NWU frame not
// ENU
imu_msg.orientation.w = bc_data.q.q0;
imu_msg.orientation.x = bc_data.q.q1;
imu_msg.orientation.y = -bc_data.q.q2;
imu_msg.orientation.z = -bc_data.q.q3;
imu_msg.angular_velocity.x = bc_data.w.x;
imu_msg.angular_velocity.y = -bc_data.w.y;
imu_msg.angular_velocity.z = -bc_data.w.z;

imu_msg.linear_acceleration.x = bc_data.a.x * 9.807;
imu_msg.linear_acceleration.y = -bc_data.a.y * 9.807;
imu_msg.linear_acceleration.z = -bc_data.a.z * 9.807;

imu_msg.orientation_covariance = {-1.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0};
imu_msg.angular_velocity_covariance = {0, 0.0, 0.0, 0.0, 0,
0.0, 0.0, 0.0, 0};
imu_msg.linear_acceleration_covariance = {0, 0.0, 0.0, 0.0, 0,
0.0, 0.0, 0.0, 0};
imu_msg_publisher.publish(imu_msg);
}


if(device_time_translator_){
current_time = device_time_translator_->update(bc_data.timeStamp.time, current_time, 0.0); // estimated offset is 0.0 for now observed to usuall be < 2 ms can be upto ~ 20 ms!!!
//cout << setprecision(15) <<setw(30) << "Translated HW Time Offset: " << current_time.toSec() - ros::Time::now().toSec() << endl;
}
time_stamp.time = bc_data.timeStamp.time; // This is now in units of 1/400 s
time_stamp.time_ns = bc_data.timeStamp.nanoTime; // This is perhaps more accurate??? But requires handling the wrapping after every 4.295 secs. DO NOT SUM both as it leads to double counting.
if(bc_data.timeStamp.nanoTime < ::g_prevNanoTime){
::g_counter++;
}
g_hardwareStamp = ::g_counter*::g_WRAP_TIME + double(bc_data.timeStamp.nanoTime)*(1e-9);
::g_prevNanoTime = bc_data.timeStamp.nanoTime;
//cout << setprecision(15) <<setw(30) << "HW Time: " << g_hardwareStamp << endl; //TODO: Publish instead of printing, seems to drift by about 1ms every ~ 50s
time_stamp.sync_flag = bc_data.timeStamp.syncFlag;
time_stamp_publisher.publish(time_stamp);
}

//update attitude msg
if ( (msg_flags & HAS_Q) && (msg_flags & HAS_W) ) {
Expand All @@ -92,45 +140,7 @@ void DJISDKNode::broadcast_callback()
attitude_quaternion_publisher.publish(attitude_quaternion);
}

//update Imu message
if ( (msg_flags & HAS_Q) && (msg_flags & HAS_W) && (msg_flags & HAS_A) ) {
imu_msg.header.frame_id = "/world";
// Use translated hardware time
imu_msg.header.stamp = current_time;

// Conversion from NED to NWU (XYZ--robot body frame)
/*
tf::Quaternion qNED2ENU();
qNED2ENU.setEulerZYX(pi/2,0, -pi/2);
tf::Quaternion qIMU(bc_data.q.q1, bc_data.q.q2, bc_data.q.q3, bc_data.q.q0);
tf::Quaternion qIMUENU;
qIMUENU = qNED2ENU*qIMU;
imu_msg.orientation = quaternionTFToMsg(qIMUENU);
*/
// Convert from NED to ENU frame // This is conversion to NWU frame not ENU
imu_msg.orientation.w = bc_data.q.q0;
imu_msg.orientation.x = bc_data.q.q1;
imu_msg.orientation.y = -bc_data.q.q2;
imu_msg.orientation.z = -bc_data.q.q3;
imu_msg.angular_velocity.x = bc_data.w.x;
imu_msg.angular_velocity.y = -bc_data.w.y;
imu_msg.angular_velocity.z = -bc_data.w.z;

imu_msg.linear_acceleration.x = bc_data.a.x*9.807;
imu_msg.linear_acceleration.y = -bc_data.a.y*9.807;
imu_msg.linear_acceleration.z = -bc_data.a.z*9.807;

imu_msg.orientation_covariance={-1.0,0.0,0.0,\
0.0,0.0,0.0,\
0.0,0.0,0.0};
imu_msg.angular_velocity_covariance={0,0.0,0.0,\
0.0,0,0.0,\
0.0,0.0,0};
imu_msg.linear_acceleration_covariance={0,0.0,0.0,\
0.0,0,0.0,\
0.0,0.0,0};
imu_msg_publisher.publish(imu_msg);
}


//update global_position, gps and geopose msg
if (msg_flags & HAS_POS) {
Expand Down Expand Up @@ -575,11 +585,8 @@ DJISDKNode::DJISDKNode(ros::NodeHandle& nh, ros::NodeHandle& nh_private) : dji_s
init_subscribers(nh);
// Init time stamp translator
device_time_translator_.reset(new cuckoo_time_translator::DefaultDeviceTimeUnwrapperAndTranslator(
cuckoo_time_translator::WrappingClockParameters(
UINT32_MAX, // offset at every overflow of containing uint32_t
400 // at 400Hz
),
nh.getNamespace()
cuckoo_time_translator::WrappingClockParameters(UINT32_MAX, 1e9),
nh.getNamespace(), cuckoo_time_translator::Defaults().setFilterAlgorithm(cuckoo_time_translator::FilterAlgorithm::ConvexHull)
)
);

Expand Down
2 changes: 1 addition & 1 deletion dji_sdk_demo/script/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def main():
drone.stop_video()
elif main_operate_code == 'm':
# Local Navi Test
drone.local_position_navigation_send_request(-30, -30, 15)
drone.local_position_navigation_send_request(2, 2, 2)
elif main_operate_code == 'n':
# GPS Navi Test
drone.global_position_navigation_send_request(47.453184, 8.68028, 20)
Expand Down