diff --git a/README.md b/README.md
index de914eb..2d73638 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,13 @@
# HARMONI-PC
PC Hardware wrapper for HARMONI low-level interfaces
+
+Currently based on:
+
+* https://github.com/robotpt/qt-face-tracking (original source)
+* https://github.com/shinselrobots/body_tracker_msgs
+* https://github.com/shinselrobots/nuitrack_body_tracker
+* https://github.com/ros/robot_state_publisher
+
+Note that the use of git submodules is presently being avoided to encourage modification of any dependencies on an as needed basis. We may later transition to a more proper repository structure as our codebase becomes more mature.
+
+See simulator instructions [here](qt_simulator/README.md)
\ No newline at end of file
diff --git a/qt_simulator/.vscode/c_cpp_properties.json b/qt_simulator/.vscode/c_cpp_properties.json
new file mode 100644
index 0000000..0b0eed0
--- /dev/null
+++ b/qt_simulator/.vscode/c_cpp_properties.json
@@ -0,0 +1,16 @@
+{
+ "configurations": [
+ {
+ "name": "Linux",
+ "includePath": [
+ "${workspaceFolder}/**"
+ ],
+ "defines": [],
+ "compilerPath": "/usr/bin/gcc",
+ "cStandard": "c11",
+ "cppStandard": "c++17",
+ "intelliSenseMode": "clang-x64"
+ }
+ ],
+ "version": 4
+}
\ No newline at end of file
diff --git a/qt_simulator/README.md b/qt_simulator/README.md
new file mode 100644
index 0000000..7d6d476
--- /dev/null
+++ b/qt_simulator/README.md
@@ -0,0 +1,48 @@
+# Instructions
+
+## Robot state
+
+### Setup RVIZ on QT
+The commands are:
+
+* Run the following command for converting the QT joints from degrees to radians:
+```
+rosrun qt_robot_state qt_joint_radians.py
+```
+* Run the following command for running the RViz simulator of QTRobot:
+```
+roslaunch qt_robot_state qt_robot_state.launch
+```
+
+
+
+### Setup RVIZ simulator of QT model
+* Run the following command for running the RViz simulator of QTRobot:
+```
+roslaunch qt_robot_state simulator_robot_state.launch
+```
+
+## Face recognition
+
+### Setup QT for face recognition
+
+* Run the following command for running the nuitrack:
+```
+roslaunch nuitrack_body_tracker nuitrack_body_trackelaunch
+```
+
+### Run the face recognition
+* Run the following command for running the tf listener:
+```
+roslaunch qt_robot_state qt_robot_state.launch
+```
+* Run the following command for running the face recognition:
+```
+roslaunch qt_face_recognition qt_face_recognition.launch
+```
+
+* Run the following command for getting the depth information from camera:
+```
+rosrun qt_face_recognition depht_rgb_coordinator.py
+```
+
diff --git a/qt_simulator/body_tracker_msgs/.local_test.sh b/qt_simulator/body_tracker_msgs/.local_test.sh
new file mode 100644
index 0000000..7181c48
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/.local_test.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+docker run --rm -i -v `pwd`:`pwd` -e "CI_SOURCE_PATH=`pwd`" -e "ROS_DISTRO=indigo" -t ubuntu:trusty sh -c "cd `pwd`; ./.travis.sh"
+docker run --rm -i -v `pwd`:`pwd` -e "CI_SOURCE_PATH=`pwd`" -e "ROS_DISTRO=kinetic" -t ubuntu:xenial sh -c "cd `pwd`; ./.travis.sh"
+docker run --rm -i -v `pwd`:`pwd` -e "CI_SOURCE_PATH=`pwd`" -e "ROS_DISTRO=melodic" -t ubuntu:bionic sh -c "cd `pwd`; ./.travis.sh"
diff --git a/qt_simulator/body_tracker_msgs/.travis.sh b/qt_simulator/body_tracker_msgs/.travis.sh
new file mode 100644
index 0000000..a17fb96
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/.travis.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+set -e
+export DEBIAN_FRONTEND noninteractive
+export TERM xterm
+
+apt-get update && apt-get install -y -q wget sudo lsb-release gnupg
+
+#before_install:
+sh -c "echo \"deb http://packages.ros.org/ros/ubuntu `lsb_release -sc` main\" > /etc/apt/sources.list.d/ros-latest.list"
+sudo apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116
+apt-get update && apt-get install -y -q python-catkin-pkg python-rosdep python-wstool ros-$ROS_DISTRO-catkin build-essential
+source /opt/ros/$ROS_DISTRO/setup.bash
+rosdep init
+rosdep update
+
+#install:
+mkdir -p ~/catkin_ws/src
+cd ~/catkin_ws/src
+catkin_init_workspace
+ln -s $CI_SOURCE_PATH .
+
+#before_script:
+cd ~/catkin_ws
+rosdep install -q -y -r --from-paths src --ignore-src --rosdistro $ROS_DISTRO
+
+#script:
+source /opt/ros/$ROS_DISTRO/setup.bash
+cd ~/catkin_ws
+catkin_make
diff --git a/qt_simulator/body_tracker_msgs/.travis.yml b/qt_simulator/body_tracker_msgs/.travis.yml
new file mode 100644
index 0000000..d092dd9
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/.travis.yml
@@ -0,0 +1,14 @@
+sudo: required
+dist: trusty
+language: generic
+
+env:
+ - ROS_DISTRO=kinetic
+
+before_install:
+ - export CI_SOURCE_PATH=$(pwd)
+ - export REPOSITORY_NAME=${PWD##*/}
+
+script:
+ - echo "Testing branch $TRAVIS_BRANCH of $REPOSITORY_NAME"
+ - docker run --rm -i -v $CI_SOURCE_PATH:$CI_SOURCE_PATH -e "CI_SOURCE_PATH=$CI_SOURCE_PATH" -e "HOME=$HOME" -e "ROS_DISTRO=$ROS_DISTRO" -t ubuntu:xenial sh -c "cd $CI_SOURCE_PATH; ./.travis.sh"
diff --git a/qt_simulator/body_tracker_msgs/CMakeLists.txt b/qt_simulator/body_tracker_msgs/CMakeLists.txt
new file mode 100644
index 0000000..70e8716
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/CMakeLists.txt
@@ -0,0 +1,207 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(body_tracker_msgs)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ roscpp
+ rospy
+ std_msgs
+ geometry_msgs
+ message_generation
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+#set_target_properties(${_projname} PROPERTIES FOLDER "${SAMPLE_DIR_FOLDER}c-api")
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a run_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+add_message_files(
+ DIRECTORY
+ msg
+ FILES
+ BodyTracker.msg
+ BodyTrackerArray.msg
+ Skeleton.msg
+)
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+generate_messages(
+ DEPENDENCIES
+ std_msgs
+ geometry_msgs
+)
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a run_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+
+catkin_package(
+ CATKIN_DEPENDS
+ std_msgs
+ geometry_msgs
+ message_runtime
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+#include_directories(
+# include
+# ${catkin_INCLUDE_DIRS}
+#)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/astra_body_tracker.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+## add_executable(${PROJECT_NAME}_node src/body_tracker_msgs.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+##add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+#target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+#)
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_astra_body_tracker.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/qt_simulator/body_tracker_msgs/LICENSE b/qt_simulator/body_tracker_msgs/LICENSE
new file mode 100644
index 0000000..d9a10c0
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/LICENSE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/qt_simulator/body_tracker_msgs/README.md b/qt_simulator/body_tracker_msgs/README.md
new file mode 100644
index 0000000..12ae125
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/README.md
@@ -0,0 +1,11 @@
+# body_tracker_msgs
+
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
+[![Build Status](https://travis-ci.org/shinselrobots/body_tracker_msgs.svg?branch=master)](https://travis-ci.org/shinselrobots/body_tracker_msgs)
+
+## Description:
+ - Custom messages used for body tracker nodes.
+ - BodyTracker.msg: Provides 2D and 3D position of people detected
+ - Skelton.msg: Provides skeleton tracking info
+
+
diff --git a/qt_simulator/body_tracker_msgs/msg/BodyTracker.msg b/qt_simulator/body_tracker_msgs/msg/BodyTracker.msg
new file mode 100644
index 0000000..dd9615c
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/msg/BodyTracker.msg
@@ -0,0 +1,18 @@
+int32 body_id
+int32 tracking_status
+int32 gesture
+bool face_found
+
+# 2d face bounding Box position in pixels from 0,0 (top left of image)
+int32 face_left
+int32 face_top
+int32 face_width
+int32 face_height
+int32 age # rough estimate of persons age
+int32 gender # 0 = unknown, 1 = male, 2 = female
+string name # if match for persons face found in database
+
+geometry_msgs/Point32 position2d # body x,y in camera frame, z = range from camera
+geometry_msgs/Point32 position3d # body x,y,z in world coordinates
+geometry_msgs/Point32 face_center # face x,y in camera frame, z = range from camera
+
diff --git a/qt_simulator/body_tracker_msgs/msg/BodyTrackerArray.msg b/qt_simulator/body_tracker_msgs/msg/BodyTrackerArray.msg
new file mode 100644
index 0000000..5b9a8b7
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/msg/BodyTrackerArray.msg
@@ -0,0 +1,2 @@
+Header header
+BodyTracker[] detected_list
diff --git a/qt_simulator/body_tracker_msgs/msg/Skeleton.msg b/qt_simulator/body_tracker_msgs/msg/Skeleton.msg
new file mode 100644
index 0000000..7c170a2
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/msg/Skeleton.msg
@@ -0,0 +1,29 @@
+#Header header # I CANT GET THIS TO COMPILE!
+
+int32 body_id
+int32 tracking_status
+int32 gesture
+
+geometry_msgs/Point32 position2D # x,y in camera frame, z distance from camera
+
+geometry_msgs/Point32 centerOfMass
+
+#Position of interesting joints
+geometry_msgs/Point32 joint_position_head
+geometry_msgs/Point32 joint_position_neck
+geometry_msgs/Point32 joint_position_shoulder
+geometry_msgs/Point32 joint_position_spine_top
+geometry_msgs/Point32 joint_position_spine_mid
+geometry_msgs/Point32 joint_position_spine_bottom
+
+geometry_msgs/Point32 joint_position_left_shoulder
+geometry_msgs/Point32 joint_position_left_elbow
+geometry_msgs/Point32 joint_position_left_hand
+
+geometry_msgs/Point32 joint_position_right_shoulder
+geometry_msgs/Point32 joint_position_right_elbow
+geometry_msgs/Point32 joint_position_right_hand
+
+# Robot is usually too close to see legs, and not very interesting
+
+
diff --git a/qt_simulator/body_tracker_msgs/package.xml b/qt_simulator/body_tracker_msgs/package.xml
new file mode 100644
index 0000000..9082c30
--- /dev/null
+++ b/qt_simulator/body_tracker_msgs/package.xml
@@ -0,0 +1,26 @@
+
+
+ body_tracker_msgs
+ 0.1.0
+ body tracker custom message for skeleton tracking
+ Dave Shinsel
+ https://github.com/shinselrobots
+ Apache-2
+
+ catkin
+
+ roscpp
+ rospy
+ std_msgs
+ geometry_msgs
+ message_generation
+
+ std_msgs
+ geometry_msgs
+ message_runtime
+
+ std_msgs
+ geometry_msgs
+
+
+
diff --git a/qt_simulator/nuitrack_body_tracker/CMakeLists.txt b/qt_simulator/nuitrack_body_tracker/CMakeLists.txt
new file mode 100644
index 0000000..4a39e2a
--- /dev/null
+++ b/qt_simulator/nuitrack_body_tracker/CMakeLists.txt
@@ -0,0 +1,218 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(nuitrack_body_tracker)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+add_compile_options(-std=c++11)
+
+# For nuitrack
+# add_compile_options(-std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0)
+link_directories("/usr/local/lib/nuitrack")
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ roscpp
+ geometry_msgs
+ visualization_msgs
+ body_tracker_msgs
+ std_msgs
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a run_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# std_msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a run_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if you package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES nuitrack_body_tracker
+ CATKIN_DEPENDS
+ roscpp
+ geometry_msgs
+ visualization_msgs
+ std_msgs
+ body_tracker_msgs
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ~/robot/Nuitrack/include
+ ~/robot/Nuitrack/include/middleware
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/nuitrack_body_tracker.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+add_executable(${PROJECT_NAME}_node src/nuitrack_body_tracker_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+
+# FIND_LIBRARY(NUITRACK_LIBRARY libnuitrack.so /home/system/NuitrackSDK/Nuitrack/lib/linux64/) ## Does not seem to work!
+
+
+link_directories("/usr/local/lib/nuitrack")
+target_link_libraries(
+ ${PROJECT_NAME}_node
+ ${catkin_LIBRARIES} nuitrack
+)
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_nuitrack_body_tracker.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/qt_simulator/nuitrack_body_tracker/README.md b/qt_simulator/nuitrack_body_tracker/README.md
new file mode 100644
index 0000000..ba905a2
--- /dev/null
+++ b/qt_simulator/nuitrack_body_tracker/README.md
@@ -0,0 +1,88 @@
+# Nuitrack Body Tracker
+
+# Info
+ This is a ROS Node to provide functionality of the NuiTrack SDK (https://nuitrack.com)
+
+ - NuiTrack is NOT FREE, but reasonably inexpensive, and works with a number of cameras (see their webpage)
+ - Also see Orbbec Astra SDK as a possible alternative (but only for Orbbec cameras)
+
+ Publishes 3 messages:
+
+ 1. body_tracking_position_pub_ custom message:
+ Includes:
+ 2D position of person relative to head camera; allows for fast, smooth tracking
+ Joint.proj: position in normalized projective coordinates
+ (x, y from 0.0 to 1.0, z is real)
+ Astra Mini FOV: 60 horz, 49.5 vert (degrees)
+
+ 3D position of the shoulder joint in relation to robot (using TF)
+ Joint.real: position in real world coordinates
+ Useful for tracking person in 3D
+
+ 2. body_tracking_skeleton_pub_ custom message:
+ Includes:
+ Everyting in BodyTracker message above, plus 3D position of upper body
+ joints in relation to robot (using TF)
+
+ 3. marker_pub_ message:
+ Publishes 3d markers for selected joints. Visible as markers in RVIZ.
+
+
+# Installation Instructions / Prerequisites
+
+## Follow instructions on Nuitrack website
+ - http://download.3divi.com/Nuitrack/doc/Installation_page.html
+
+### Summary instructions below, but bits might not be up to date! Go to the website to get the latest SDK.
+
+
+ - Clone body_tracker_msgs into your Catkin workspace
+ - catkin_make to confirm complies OK
+
+ - Clone this project into your Catkin workspace
+
+ - Remove OpenNI - it conflicts with the version supplied by Nuitrack!
+ - sudo apt-get purge --auto-remove openni-utils
+
+ - Download BOTH the nuitrack linux drivers and the Nuitrack SDK
+
+ - Install Nuitrack Linux drivers:
+ - sudo dpkg -i nuitrack-ubuntu-amd64.deb
+ - sudo reboot
+ - confirm environment variables set correctly:
+ - echo $NUITRACK_HOME (should be /usr/etc/nuitrack)
+ - echo $LD_LIBRARY_PATH (should include /usr/local/lib/nuitrack)
+
+ - Install Nuitrack SDK (NuitrackSDK.zip)
+ - Download from: http://download.3divi.com/Nuitrack/
+ - mkdir ~/sdk/NuitrackSDK
+ - cp NuitrackSDK.zip ~/NuitrackSDK
+ - extract ZIP archive with ubuntu Archive Manager (double click the zip file)
+ - delete the zip file
+
+ - Edit CMakeLists.txt if you installed the SDK to a different location:
+ set(NUITRACK_SDK_PATH /home/system/sdk/NuitrackSDK)
+
+# NOTE: Nuitrack install may break other RealSense applications!
+ - See this discussion:
+ - https://community.nuitrack.com/t/nuitrack-prevents-other-realsense-apps-from-working/893
+ - if needed: export LD_LIBRARY_PATH="{$LD_LIBRARY_PATH}:/usr/local/lib/nuitrack"
+
+
+# Option: Edit Nuitrack Config parameters (now done in the node code)
+ - sudo vi $NUITRACK_HOME/data/nuitrack.config
+ - set Faces.ToUse and DepthProvider.Depth2ColorRegistration to true
+
+# Test NuiTrack SDK
+ - If you run into errors, it is probably becuse you did not reboot after driver install
+
+ - Try the license tool, it will test the system:
+ - sudo -E nuitrack_license_tool
+ - click on "compatibility test"
+ - if you have a license, enter it after the test completes.
+
+ - Follow instructions at: ~/sdk/NuitrackSDK/Examples/nuitrack_gl_sample/README.txt
+
+
+
+
diff --git a/qt_simulator/nuitrack_body_tracker/launch/nuitrack_body_tracker.launch b/qt_simulator/nuitrack_body_tracker/launch/nuitrack_body_tracker.launch
new file mode 100644
index 0000000..691d9b9
--- /dev/null
+++ b/qt_simulator/nuitrack_body_tracker/launch/nuitrack_body_tracker.launch
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/nuitrack_body_tracker/package.xml b/qt_simulator/nuitrack_body_tracker/package.xml
new file mode 100644
index 0000000..c44f84e
--- /dev/null
+++ b/qt_simulator/nuitrack_body_tracker/package.xml
@@ -0,0 +1,62 @@
+
+
+ nuitrack_body_tracker
+ 0.0.0
+ a body_tracker package for nuitrack SDK
+ Dave Shinsel
+ TODO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+
+ roscpp
+ geometry_msgs
+ std_msgs
+ visualization_msgs
+ body_tracker_msgs
+
+ roscpp
+ geometry_msgs
+ std_msgs
+ visualization_msgs
+ body_tracker_msgs
+
+ roscpp
+ geometry_msgs
+ std_msgs
+ visualization_msgs
+ body_tracker_msgs
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/nuitrack_body_tracker/src/nuitrack_body_tracker_node.cpp b/qt_simulator/nuitrack_body_tracker/src/nuitrack_body_tracker_node.cpp
new file mode 100644
index 0000000..8cfa8cc
--- /dev/null
+++ b/qt_simulator/nuitrack_body_tracker/src/nuitrack_body_tracker_node.cpp
@@ -0,0 +1,992 @@
+/* Body Tracker Node using Nuitrack library
+
+ Publish data messages:
+
+ 1. body_tracking_position_pub_ custom message:
+ Includes:
+ 2D position of person relative to head camera; allows for fast, smooth tracking
+ Joint.proj: position in normalized projective coordinates
+ (x, y from 0.0 to 1.0, z is real)
+ Astra Mini FOV: 60 horz, 49.5 vert (degrees)
+
+ body_tracking_array_pub_ : multiple person detections in one message
+
+ 3D position of the person's neck joint in relation to robot (using TF)
+ Joint.real: position in real world coordinates
+ Useful for tracking person in 3D
+
+ 2. body_tracking_skeleton_pub_ custom message:
+ Includes:
+ Everyting in BodyTracker message above, plus 3D position of upper body.
+ joints in relation to robot (using TF)
+ Joint.real: position in real world coordinates
+
+ 3. marker_pub_ message:
+ Publishes 3d markers for selected joints. Visible as markers in RVIZ
+
+*/
+
+#include "ros/ros.h"
+#include "std_msgs/String.h"
+#include "std_msgs/Int32.h"
+#include
+#include "ros/console.h"
+
+#include
+#include
+#include
+#include
+#include // setprecision
+
+#include
+#include
+#include
+#include
+#include // Publish custom message
+#include // Custom message, multiple people
+#include // Publish custom message
+
+// If Camera mounted on Pan/Tilt head
+//#include "sensor_msgs/JointState.h"
+#include "dynamixel_msgs/JointState.h"
+
+//For Nuitrack SDK
+#include "nuitrack/Nuitrack.h"
+#define KEY_JOINT_TO_TRACK JOINT_LEFT_COLLAR // JOINT_TORSO // JOINT_NECK
+
+// For Face JSON parsing
+#include
+#include
+#include
+
+// For Point Cloud publishing
+#include
+#include
+// #include
+
+const bool ENABLE_PUBLISHING_FRAMES = true;
+
+namespace nuitrack_body_tracker
+{
+ using namespace tdv::nuitrack;
+ //namespace pt = boost::property_tree;
+
+ class nuitrack_body_tracker_node
+ {
+ public:
+
+ nuitrack_body_tracker_node(std::string name) :
+ _name(name)
+ {
+ ROS_INFO("%s: Starting...", _name.c_str());
+
+ ros::NodeHandle nodeHandle("~");
+ nodeHandle.param("camera_depth_frame",camera_depth_frame_,"camera_depth_frame");
+ nodeHandle.param("camera_color_frame",camera_color_frame_,"camera_color_frame");
+
+ // Publishers and Subscribers
+
+ // Publish tracked person in 2D and 3D
+ // 2D: x,y in camera frame. 3D: x,y,z in world coordinates
+ body_tracking_position_pub_ = nh_.advertise
+ ("body_tracker/position", 1);
+
+ body_tracking_array_pub_ =
+ nh_.advertise
+ ("body_tracker_array/position", 1);
+
+ // Publish tracked person upper body skeleton for advanced uses
+ body_tracking_skeleton_pub_ = nh_.advertise
+ ("body_tracker/skeleton", 1);
+
+ // Publish markers to show where robot thinks person is in RViz
+ marker_pub_ = nh_.advertise
+ ("body_tracker/marker", 1);
+
+ // Publish the depth frame for other nodes
+ depth_image_pub_ = nh_.advertise
+ ("camera/depth/image", 1);
+ color_image_pub_ = nh_.advertise
+ ("camera/color/image", 1);
+ depth_cloud_pub_ = nh_.advertise
+ ("camera/depth_cloud", 1);
+
+ }
+
+ ~nuitrack_body_tracker_node()
+ {
+ ROS_INFO("nuitrack_body_tracker_node shutting down");
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Nuitrack callbacks
+ // WARNING! THIS CODE ASSUMES COLOR AND DEPTH ARE SAME RESOLUTION!
+ // TO FIX THIS, SEE NUITRACK GL SAMPLE
+
+ void onNewColorFrame(RGBFrame::Ptr frame)
+ {
+ //ROS_INFO("DBG: Nuitrack::onNewColorFrame(), Frame = %d", ++color_frame_number_);
+
+ if(!ENABLE_PUBLISHING_FRAMES)
+ {
+ return;
+ }
+
+ int _width = frame->getCols();
+ int _height = frame->getRows();
+ //std::cout << "DBG COLOR: Width = " << _width << " Height = " << _height << std::endl;
+
+
+ // Point Cloud message for colorized depth cloud
+ int numpoints = _width * _height;
+ //cloud_msg_ = new(sensor_msgs::PointCloud2);
+
+ sensor_msgs::PointCloud2Iterator out_r(cloud_msg_, "r");
+ sensor_msgs::PointCloud2Iterator out_g(cloud_msg_, "g");
+ sensor_msgs::PointCloud2Iterator out_b(cloud_msg_, "b");
+
+ sensor_msgs::Image color_msg;
+
+ const tdv::nuitrack::Color3* colorPtr = frame->getData();
+
+ color_msg.header.stamp = ros::Time::now();
+ color_msg.header.frame_id = camera_color_frame_;
+ color_msg.height = _height;
+ color_msg.width = _width;
+ color_msg.encoding = "rgb8"; //sensor_msgs::image_encodings::TYPE_16UC1;
+ color_msg.is_bigendian = false;
+
+ color_msg.step = 3 * _width; // sensor_msgs::ImagePtr row step size
+
+ for (size_t row = 0; row < _height; ++row)
+ {
+ for (size_t col = 0; col < _width; ++col )
+ {
+ color_msg.data.push_back((colorPtr + col)->red);
+ color_msg.data.push_back((colorPtr + col)->green);
+ color_msg.data.push_back((colorPtr + col)->blue);
+
+ *out_r = (colorPtr + col)->red; // pointcloud
+ *out_g = (colorPtr + col)->green;
+ *out_b = (colorPtr + col)->blue;
+ ++out_r;
+ ++out_g;
+ ++out_b;
+
+ }
+ colorPtr += _width; // Next row
+ }
+
+ // Publish color frame
+ color_image_pub_.publish(color_msg);
+
+ }
+
+
+ void onNewDepthFrame(DepthFrame::Ptr frame)
+ {
+ //ROS_INFO("DBG: Nuitrack::onNewDepthFrame(), Frame = %d", ++depth_frame_number_);
+
+ if(!ENABLE_PUBLISHING_FRAMES)
+ {
+ return;
+ }
+
+ int _width = frame->getCols();
+ int _height = frame->getRows();
+ const uint16_t* depthPtr = frame->getData();
+
+ //std::cout << "DBG DEPTH: Width = " << _width << " Height = " << _height << std::endl;
+
+
+ // Depth image message
+ sensor_msgs::Image depth_msg;
+ depth_msg.header.stamp = ros::Time::now();
+ depth_msg.header.frame_id = camera_depth_frame_;
+ depth_msg.height = _height;
+ depth_msg.width = _width;
+ depth_msg.encoding = "rgb8"; // see sensor_msgs::image_encodings
+ depth_msg.is_bigendian = false;
+ depth_msg.step = 3 * _width; // sensor_msgs::ImagePtr row step size
+
+
+ // Point Cloud message
+ sensor_msgs::PointCloud2Iterator out_x(cloud_msg_, "x");
+ sensor_msgs::PointCloud2Iterator out_y(cloud_msg_, "y");
+ sensor_msgs::PointCloud2Iterator out_z(cloud_msg_, "z");
+
+ // std::cout << "=========================================================" << std::endl;
+ //std::cout << "DEBUG: cloud x, y, z : world x, y, z " << std::endl;
+
+ for (size_t row = 0; row < _height; ++row)
+ {
+ for (size_t col = 0; col < _width; ++col )
+ {
+ uint16_t fulldepthValue = *(depthPtr+ col);
+ uint16_t depthValue = *(depthPtr+ col) >> 5;
+
+
+ // RGB are all the same for depth (monochrome)
+ depth_msg.data.push_back(depthValue);
+ depth_msg.data.push_back(depthValue);
+ depth_msg.data.push_back(depthValue);
+
+
+ //store xyz in point cloud, transforming from image coordinates, (Z Forward to X Forward)
+ Vector3 cloud_point = depthSensor_->convertProjToRealCoords(col, row, fulldepthValue );
+
+ float X_World = cloud_point.x / 1000.0; // mm to meters
+ float Y_World = cloud_point.y / 1000.0;
+ float Z_World = cloud_point.z / 1000.0;
+
+ *out_x = Z_World;
+ *out_y = -X_World;
+ *out_z = Y_World;
+ ++out_x;
+ ++out_y;
+ ++out_z;
+
+ }
+ depthPtr += _width; // Next row
+ }
+
+ // Publish depth frame
+ depth_image_pub_.publish(depth_msg);
+
+ // Publish colorized depth cloud
+ cloud_msg_.header.stamp = ros::Time::now();
+ depth_cloud_pub_.publish(cloud_msg_);
+
+ }
+
+ void onUserUpdate(tdv::nuitrack::UserFrame::Ptr frame)
+ {
+ // std::cout << "Nuitrack: onUserUpdate callback" << std::endl;
+ }
+
+
+ void onSkeletonUpdate(SkeletonData::Ptr userSkeletons)
+ {
+ // std::cout << "Nuitrack: onSkeletonUpdate callback" << std::endl;
+
+ // Message for array of body detections
+ body_tracker_msgs::BodyTrackerArray body_tracker_array_msg;
+ body_tracker_array_msg.header.frame_id = camera_depth_frame_;
+ ros::Time frame_time_stamp = ros::Time::now();
+ body_tracker_array_msg.header.stamp = frame_time_stamp;
+
+// body_tracker_msgs::BodyTrackerArray_
+// foo; // body_tracker_array_msg;
+
+
+ // process skeletons for each user found
+ auto skeletons = userSkeletons->getSkeletons();
+ for (auto skeleton: skeletons)
+ {
+ // std::cout << "Nuitrack: Skeleton.id = " << skeleton.id << std::endl;
+
+ // Use KEY_JOINT_TO_TRACK to determine if we have a good lock on the person
+ float tracking_confidence = skeleton.joints[KEY_JOINT_TO_TRACK].confidence;
+ if (tracking_confidence < 0.15)
+ {
+ std::cout << "Nuitrack: ID " << skeleton.id << " Low Confidence ("
+ << tracking_confidence << "), skipping" << std::endl;
+ continue; // assume none of the joints are valid
+ }
+
+
+ // Fill in message data from Nuitracker SDK data
+ // camera z,x,y coordinates are mapped to ROS x,y,z coordinates
+ // All values are relative to camera position in meters (ie, in camera's TF frame)
+ // ROS x = camera z - distance to person
+ // ROS y = camera x - side to side
+ // ROS z = camera y - vertical height, *relative to camera position*
+
+ ///////////////////////////////////////////////////////////////
+ // Position data in 2D and 3D for tracking people
+ body_tracker_msgs::BodyTracker person_data;
+// body_tracker_msgs::BodyTracker_ person_data;
+
+ person_data.body_id = skeleton.id;
+ person_data.tracking_status = 0; // TODO
+ person_data.gesture = -1; // No gesture
+ person_data.face_found = false;
+ person_data.face_left = 0;
+ person_data.face_top = 0;
+ person_data.face_width = 0;
+ person_data.face_height = 0;
+ person_data.age = 0;
+ person_data.gender = 0;
+ person_data.name = "";
+
+ //if(skeleton.id != last_id_)
+ {
+ ROS_INFO("%s: detected person ID %d", _name.c_str(), skeleton.id);
+ last_id_ = skeleton.id;
+ }
+
+ ///////////////////////////////////////////////////////////////
+ // 2D position for camera servo tracking
+ const float ASTRA_MINI_FOV_X = -1.047200; // (60 degrees horizontal)
+ const float ASTRA_MINI_FOV_Y = -0.863938; // (49.5 degrees vertical)
+
+ // Convert projection to radians
+ // proj is 0.0 (left) --> 1.0 (right)
+ geometry_msgs::Pose2D track2d;
+ track2d.x = (skeleton.joints[KEY_JOINT_TO_TRACK].proj.x - 0.5) * ASTRA_MINI_FOV_X;
+ track2d.y = (skeleton.joints[KEY_JOINT_TO_TRACK].proj.y - 0.5) * ASTRA_MINI_FOV_Y;
+ track2d.theta = (float)skeleton.id;
+
+ person_data.position2d.x =
+ (skeleton.joints[KEY_JOINT_TO_TRACK].proj.x - 0.5) * ASTRA_MINI_FOV_X;
+ person_data.position2d.y =
+ (skeleton.joints[KEY_JOINT_TO_TRACK].proj.y - 0.5) * ASTRA_MINI_FOV_Y;
+ person_data.position2d.z = skeleton.joints[KEY_JOINT_TO_TRACK].proj.z / 1000.0;
+
+
+ std::cout << std::setprecision(4) << std::setw(7)
+ << "Nuitrack: " << "2D Tracking"
+ << " x: " << track2d.x
+ << " y: " << track2d.y
+ << " ID: " << track2d.theta
+ << std::endl;
+
+
+
+ ///////////////////////////////////////////////////////////////
+ // Face Data
+ // if the same ID as skeleton id, publish face data too
+
+ std::string face_info = tdv::nuitrack::Nuitrack::getInstancesJson();
+ //std::cout << face_info; //This will print the entire json object.
+ // Good examples at: http://zenol.fr/blog/boost-property-tree/en.html
+
+ try
+ {
+ std::stringstream ss;
+ ss << face_info;
+ boost::property_tree::ptree root;
+ boost::property_tree::read_json(ss, root);
+
+ // Find all instances of objects (usually people)
+ for(boost::property_tree::ptree::value_type &instance : root.get_child("Instances"))
+ {
+ std::string json_id_str = "";
+ std::string json_class_str = "";
+ int json_id = -1;
+
+ for (boost::property_tree::ptree::value_type &found_object : instance.second)
+ {
+
+ if( "id" == found_object.first)
+ {
+ json_id_str = found_object.second.data();
+ json_id = found_object.second.get_value();
+ std::cout << "FIELD: id = " << json_id_str << " = " << json_id << std::endl;
+
+ }
+ else if( "class" == found_object.first)
+ {
+ std::cout << "FIELD: class = " << found_object.second.data() << std::endl;
+ json_class_str = found_object.second.data();
+
+ }
+ else if( "face" == found_object.first)
+ {
+
+ // See if we found a face ID that matches current skeleton
+ //if( (json_class_str == "human") && (json_id_str != "") )
+ if( !( (json_class_str == "human") && (json_id == skeleton.id) ))
+ {
+ std::cout << "FACE ID (" << json_id << ") DOES NOT MATCH SKELETON (" <<
+ skeleton.id << ")... SKIPPING (or object != Human?)" << std::endl;
+ }
+ else
+ {
+
+ boost::property_tree::ptree face = found_object.second; // subtree
+ if(face.empty())
+ {
+ std::cout << "Face tree is empty!" << std::endl;
+ }
+ else
+ {
+ // this is a face subtree
+ std::cout << "FACE FOUND " << std::endl;
+ person_data.face_found = true;
+ float face_left, face_top, face_width, face_height;
+ face_left = face_top = face_width = face_height = 0.0;
+
+ for(boost::property_tree::ptree::value_type &rectangle : face.get_child("rectangle"))
+ {
+ // Face bounding box from 0.0 -> 1.0 (from top left of image)
+ // convert to pixel position before publishing
+ std::string rec_name = rectangle.first;
+ std::string rec_val = rectangle.second.data();
+ //std::cout << "FACE RECTANGLE: " << rec_name << " : " << rec_val << std::endl;
+
+ if( rectangle.first == "left")
+ {
+ face_left = rectangle.second.get_value();
+ person_data.face_left = (int)((float)frame_width_ * face_left);
+ }
+ if( rectangle.first == "top")
+ {
+ face_top = rectangle.second.get_value();
+ person_data.face_top = (int)((float)frame_height_ * face_top);
+ }
+ if( rectangle.first == "width")
+ {
+ face_width = rectangle.second.get_value();
+ person_data.face_width = (int)((float)frame_width_ * face_width);
+ }
+ if( rectangle.first == "height")
+ {
+ face_height = rectangle.second.get_value();
+ person_data.face_height = (int)((float)frame_height_ * face_height);
+ }
+ }
+
+ // Get center of the face bounding box and convert projection to radians
+ // proj is 0.0 (left) --> 1.0 (right)
+
+ float face_center_proj_x = face_left + (face_width / 2.0);
+ float face_center_proj_y = face_top + (face_height / 2.0);
+ person_data.face_center.x = (face_center_proj_x - 0.5) * ASTRA_MINI_FOV_X;
+ person_data.face_center.y = (face_center_proj_y - 0.5) * ASTRA_MINI_FOV_Y;
+ // just use the skeleton location
+ person_data.face_center.z = skeleton.joints[JOINT_HEAD].real.z / 1000.0;
+
+ //std::cout << "DBG face_center_proj = " << face_center_proj_x << ", " <<
+ // face_center_proj_y << std::endl;
+
+ //std::cout << "DBG face_center_ROS = " << person_data.face_center.x << ", " <<
+ // person_data.face_center.y << std::endl;
+
+
+
+ for(boost::property_tree::ptree::value_type &angles : face.get_child("angles"))
+ {
+ // Face Angle (where the face is pointing)
+ std::string angles_key = angles.first;
+ std::string angles_val = angles.second.data();
+ //std::cout << "FACE ANGLES: " << angles_key << " : " << angles_val << std::endl;
+ // Not currently published for ROS (future)
+
+ }
+ for(boost::property_tree::ptree::value_type &age : face.get_child("age"))
+ {
+ // rectangle is set of std::pair
+ std::string age_key = age.first;
+ std::string age_val = age.second.data();
+ std::cout << "FACE AGE: " << age_key << " : " << age_val << std::endl;
+
+ if( age.first == "years")
+ {
+ float float_age = age.second.get_value();
+ person_data.age = (int)float_age;
+ }
+
+ }
+
+ std::string gender_val = face.get("gender");
+ std::cout << "GENDER: " << gender_val << std::endl;
+ if("male" == gender_val)
+ {
+ person_data.gender = 1;
+ }
+ else if("female" == gender_val)
+ {
+ person_data.gender = 2;
+ }
+
+ }
+ }
+ }
+ }
+ }
+ }
+ catch (std::exception const& e)
+ {
+ std::cerr << e.what() << std::endl;
+ }
+
+
+ ///////////////////////////////////////////////////////////////
+ // Skeleton Data for publishing more detail
+ body_tracker_msgs::Skeleton_ skeleton_data;
+
+ // skeleton_data.frame_id = camera_depth_frame_;
+ skeleton_data.body_id = skeleton.id;
+ skeleton_data.tracking_status = 0; // TODO
+
+ //skeleton_data.centerOfMass.x = 0.0;
+ //skeleton_data.centerOfMass.y = 0.0;
+ //skeleton_data.centerOfMass.z = 0.0;
+
+ // *** POSITION 3D ***
+ person_data.position3d.x = skeleton.joints[KEY_JOINT_TO_TRACK].real.z / 1000.0;
+ person_data.position3d.y = skeleton.joints[KEY_JOINT_TO_TRACK].real.x / 1000.0;
+ person_data.position3d.z = skeleton.joints[KEY_JOINT_TO_TRACK].real.y / 1000.0;
+
+
+ skeleton_data.joint_position_head.x = skeleton.joints[JOINT_HEAD].real.z / 1000.0;
+ skeleton_data.joint_position_head.y = skeleton.joints[JOINT_HEAD].real.x / 1000.0;
+ skeleton_data.joint_position_head.z = skeleton.joints[JOINT_HEAD].real.y / 1000.0;
+
+ skeleton_data.joint_position_neck.x = skeleton.joints[JOINT_NECK].real.z / 1000.0;
+ skeleton_data.joint_position_neck.y = skeleton.joints[JOINT_NECK].real.x / 1000.0;
+ skeleton_data.joint_position_neck.z = skeleton.joints[JOINT_NECK].real.y / 1000.0;
+
+ skeleton_data.joint_position_spine_top.x = skeleton.joints[JOINT_TORSO].real.z / 1000.0;
+ skeleton_data.joint_position_spine_top.y = skeleton.joints[JOINT_TORSO].real.x / 1000.0;
+ skeleton_data.joint_position_spine_top.z = skeleton.joints[JOINT_TORSO].real.y / 1000.0;
+
+ skeleton_data.joint_position_spine_mid.x = skeleton.joints[JOINT_WAIST].real.z / 1000.0;
+ skeleton_data.joint_position_spine_mid.y = skeleton.joints[JOINT_WAIST].real.x / 1000.0;
+ skeleton_data.joint_position_spine_mid.z = skeleton.joints[JOINT_WAIST].real.y / 1000.0;
+
+ skeleton_data.joint_position_spine_bottom.x = 0.0;
+ skeleton_data.joint_position_spine_bottom.y = 0.0;
+ skeleton_data.joint_position_spine_bottom.z = 0.0;
+
+ skeleton_data.joint_position_left_shoulder.x = skeleton.joints[JOINT_LEFT_SHOULDER].real.z / 1000.0;
+ skeleton_data.joint_position_left_shoulder.y = skeleton.joints[JOINT_LEFT_SHOULDER].real.x / 1000.0;
+ skeleton_data.joint_position_left_shoulder.z = skeleton.joints[JOINT_LEFT_SHOULDER].real.y / 1000.0;
+
+ skeleton_data.joint_position_left_elbow.x = skeleton.joints[JOINT_LEFT_ELBOW].real.z / 1000.0;
+ skeleton_data.joint_position_left_elbow.y = skeleton.joints[JOINT_LEFT_ELBOW].real.x / 1000.0;
+ skeleton_data.joint_position_left_elbow.z = skeleton.joints[JOINT_LEFT_ELBOW].real.y / 1000.0;
+
+ skeleton_data.joint_position_left_hand.x = skeleton.joints[JOINT_LEFT_HAND].real.z / 1000.0;
+ skeleton_data.joint_position_left_hand.y = skeleton.joints[JOINT_LEFT_HAND].real.x / 1000.0;
+ skeleton_data.joint_position_left_hand.z = skeleton.joints[JOINT_LEFT_HAND].real.y / 1000.0;
+
+ skeleton_data.joint_position_right_shoulder.x = skeleton.joints[JOINT_RIGHT_SHOULDER].real.z / 1000.0;
+ skeleton_data.joint_position_right_shoulder.y = skeleton.joints[JOINT_RIGHT_SHOULDER].real.x / 1000.0;
+ skeleton_data.joint_position_right_shoulder.z = skeleton.joints[JOINT_RIGHT_SHOULDER].real.y / 1000.0;
+
+ skeleton_data.joint_position_right_elbow.x = skeleton.joints[JOINT_RIGHT_ELBOW].real.z / 1000.0;
+ skeleton_data.joint_position_right_elbow.y = skeleton.joints[JOINT_RIGHT_ELBOW].real.x / 1000.0;
+ skeleton_data.joint_position_right_elbow.z = skeleton.joints[JOINT_RIGHT_ELBOW].real.y / 1000.0;
+
+ skeleton_data.joint_position_right_hand.x = skeleton.joints[JOINT_RIGHT_HAND].real.z / 1000.0;
+ skeleton_data.joint_position_right_hand.y = skeleton.joints[JOINT_RIGHT_HAND].real.x / 1000.0;
+ skeleton_data.joint_position_right_hand.z = skeleton.joints[JOINT_RIGHT_HAND].real.y / 1000.0;
+
+ // Hand: open (0), grasping (1), waving (2)
+ /* TODO - see which of these actually work
+ GESTURE_WAVING = 0,
+ GESTURE_SWIPE_LEFT = 1,
+ GESTURE_SWIPE_RIGHT = 2,
+ GESTURE_SWIPE_UP = 3,
+ GESTURE_SWIPE_DOWN = 4,
+ GESTURE_PUSH = 5,
+ in MSG: -1 = none, 0 = waving, 1 = right fist, 2 = left fist
+ By experimentation:
+ Gesture 0: Wave
+ Gesture 3: Hand down (go)
+ Gesture 4: Hand up, in stopping motion (stop)
+
+ */
+
+ skeleton_data.gesture = -1; // No gesture
+ person_data.gesture = -1;
+ for (int i = 0; i < userGestures_.size(); ++i)
+ {
+ if( (userGestures_[i].userId == skeleton.id) && // match the person being reported
+ (userGestures_[i].type > -1) ) // Not already reported
+ {
+ skeleton_data.gesture = userGestures_[i].type; // TODO - map Nuitrack to my MSG enum
+ person_data.gesture = userGestures_[i].type; // TODO - map Nuitrack to my MSG enum
+ printf("Reporting Gesture %d for User %d\n",
+ userGestures_[i].type, userGestures_[i].userId);
+ userGestures_[i].type = (tdv::nuitrack::GestureType)(-1); // clear so we don't report old gestures
+ }
+
+ }
+
+ ////////////////////////////////////////////////////
+ // Publish custom position and skeleton messages for each person found
+
+
+ body_tracking_position_pub_.publish(person_data); // position data
+ body_tracking_skeleton_pub_.publish(skeleton_data); // full skeleton data
+
+ // Msg with array of position data for each person detected
+ body_tracker_array_msg.detected_list.push_back(person_data);
+
+
+ // Publish skeleton markers
+
+ PublishMarker( // show marker at KEY_JOINT_TO_TRACK location
+ 1, // ID
+ person_data.position3d.x,
+ person_data.position3d.y,
+ person_data.position3d.z,
+ 1.0, 0.0, 0.0 ); // r,g,b
+
+ PublishMarker(
+ 3, // ID
+ skeleton_data.joint_position_head.x,
+ skeleton_data.joint_position_head.y,
+ skeleton_data.joint_position_head.z,
+ 0.7, 0.0, 0.7 ); // r,g,b
+
+ PublishMarker(
+ 4, // ID
+ skeleton_data.joint_position_spine_top.x,
+ skeleton_data.joint_position_spine_top.y,
+ skeleton_data.joint_position_spine_top.z,
+ 0.0, 0.0, 1.0 ); // r,g,b
+
+ PublishMarker(
+ 5, // ID
+ skeleton_data.joint_position_spine_mid.x,
+ skeleton_data.joint_position_spine_mid.y,
+ skeleton_data.joint_position_spine_mid.z,
+ 0.0, 1.0, 0.0 ); // r,g,b
+
+ }
+
+ ////////////////////////////////////////////////////
+ // Publish custom array message with position info for all people found
+ body_tracking_array_pub_.publish(body_tracker_array_msg);
+ }
+
+
+ void onHandUpdate(HandTrackerData::Ptr handData)
+ {
+ // std::cout << "Nuitrack: onHandUpdate callback" << std::endl;
+ }
+
+
+ void onNewGesture(GestureData::Ptr gestureData)
+ {
+ //std::cout << "Nuitrack: onNewGesture callback" << std::endl;
+
+ userGestures_ = gestureData->getGestures(); // Save for use in next skeleton frame
+ for (int i = 0; i < userGestures_.size(); ++i)
+ {
+ printf("onNewGesture: Gesture Recognized %d for User %d\n",
+ userGestures_[i].type, userGestures_[i].userId);
+
+ }
+
+ }
+
+
+ void PublishMarker(int id, float x, float y, float z,
+ float color_r, float color_g, float color_b)
+ {
+ // Display marker for RVIZ to show where robot thinks person is
+ // For Markers info, see http://wiki.ros.org/rviz/Tutorials/Markers%3A%20Basic%20Shapes
+
+ // ROS_INFO("DBG: PublishMarker called");
+ //if( id != 1)
+ // printf ("DBG PublishMarker called for %f, %f, %f\n", x,y,z);
+
+ visualization_msgs::Marker marker;
+ marker.header.frame_id = camera_depth_frame_;
+ marker.header.stamp = ros::Time::now();
+ marker.lifetime = ros::Duration(3.0); // seconds
+ // Any marker sent with the same namespace and id will overwrite the old one
+ marker.ns = _name;
+ marker.id = id; // This must be id unique for each marker
+
+ uint32_t shape = visualization_msgs::Marker::SPHERE;
+ marker.type = shape;
+
+ // Set the marker action. Options are ADD, DELETE, and DELETEALL
+ marker.action = visualization_msgs::Marker::ADD;
+ marker.color.r = color_r;
+ marker.color.g = color_g;
+ marker.color.b = color_b;
+ marker.color.a = 1.0;
+ marker.pose.orientation.x = 0.0;
+ marker.pose.orientation.y = 0.0;
+ marker.pose.orientation.z = 0.0;
+ marker.pose.orientation.w = 1.0;
+
+ marker.scale.x = 0.1; // size of marker in meters
+ marker.scale.y = 0.1;
+ marker.scale.z = 0.1;
+
+ marker.pose.position.x = x;
+ marker.pose.position.y = y;
+ marker.pose.position.z = z;
+
+ // ROS_INFO("DBG: Publishing Marker");
+ marker_pub_.publish(marker);
+
+ }
+
+ // Publish 2D position of person, relative to camera
+ // useful for direct control of servo pan/tilt for tracking
+ // Example: publishJoint2D("JOINT_NECK", joints[JOINT_NECK]);
+
+ void publishJoint2D(const char *name, const tdv::nuitrack::Joint& joint)
+ {
+ const float ASTRA_MINI_FOV_X = 1.047200; // (60 degrees horizontal)
+ const float ASTRA_MINI_FOV_Y = -0.863938; // (49.5 degrees vertical)
+ if (joint.confidence < 0.15)
+ {
+ return; // ignore low confidence joints
+ }
+
+ // Convert projection to radians
+ // proj is 0.0 (left) --> 1.0 (right)
+ float radians_x = (joint.proj.x - 0.5) * ASTRA_MINI_FOV_X;
+ float radians_y = (joint.proj.y - 0.5) * ASTRA_MINI_FOV_Y;
+ std::cout << std::setprecision(4) << std::setw(7)
+ << "Nuitrack: " << name
+ << " x: " << joint.proj.x << " (" << radians_x << ") y: "
+ << joint.proj.y << " (" << radians_y << ")"
+ // << " Confidence: " << joint.confidence
+ << std::endl;
+ }
+
+
+ ///////////////////////////////////////////////////////////////////////////
+ void Init(const std::string& config)
+ {
+ // Initialize Nuitrack first, then create Nuitrack modules
+ ROS_INFO("%s: Initializing...", _name.c_str());
+ // std::cout << "Nuitrack: Initializing..." << std::endl;
+
+ std::cout <<
+ "\n============ IGNORE ERRORS THAT SAY 'Couldnt open device...' ===========\n"
+ << std::endl;
+
+ try
+ {
+ tdv::nuitrack::Nuitrack::init("");
+ //tdv::nuitrack::Nuitrack::init(config);
+ }
+ catch (const tdv::nuitrack::Exception& e)
+ {
+ std::cerr <<
+ "Can not initialize Nuitrack (ExceptionType: " << e.type() << ")" << std::endl;
+ exit(EXIT_FAILURE);
+ }
+
+ // Set config values. Overrides $NUITRACK_HOME/data/nuitrack.config
+
+ // Align depth and color
+ Nuitrack::setConfigValue("DepthProvider.Depth2ColorRegistration", "true");
+
+ // Realsense Depth Module - force to 848x480 @ 60 FPS
+ Nuitrack::setConfigValue("Realsense2Module.Depth.Preset", "5");
+ Nuitrack::setConfigValue("Realsense2Module.Depth.RawWidth", "848");
+ Nuitrack::setConfigValue("Realsense2Module.Depth.RawHeight", "480");
+ Nuitrack::setConfigValue("Realsense2Module.Depth.ProcessWidth", "848");
+ Nuitrack::setConfigValue("Realsense2Module.Depth.ProcessHeight", "480");
+ Nuitrack::setConfigValue("Realsense2Module.Depth.FPS", "60");
+
+ // Realsense RGB Module - force to 848x480 @ 60 FPS
+ Nuitrack::setConfigValue("Realsense2Module.RGB.RawWidth", "848");
+ Nuitrack::setConfigValue("Realsense2Module.RGB.RawHeight", "480");
+ Nuitrack::setConfigValue("Realsense2Module.RGB.ProcessWidth", "848");
+ Nuitrack::setConfigValue("Realsense2Module.RGB.ProcessHeight", "480");
+ Nuitrack::setConfigValue("Realsense2Module.RGB.FPS", "60");
+
+ // Enable face tracking
+ Nuitrack::setConfigValue("Faces.ToUse", "true");
+
+ //Options for debug
+ //Nuitrack::setConfigValue("Skeletonization.ActiveUsers", "1");
+ //Nuitrack::setConfigValue("DepthProvider.Mirror", "true");
+ depth_frame_number_ = 0;
+ color_frame_number_ = 0;
+
+ // Create all required Nuitrack modules
+
+ std::cout << "Nuitrack: DepthSensor::create()" << std::endl;
+ depthSensor_ = tdv::nuitrack::DepthSensor::create();
+ // Bind to event new frame
+ depthSensor_->connectOnNewFrame(std::bind(
+ &nuitrack_body_tracker_node::onNewDepthFrame, this, std::placeholders::_1));
+
+ std::cout << "Nuitrack: ColorSensor::create()" << std::endl;
+ colorSensor_ = tdv::nuitrack::ColorSensor::create();
+ // Bind to event new frame
+ colorSensor_->connectOnNewFrame(std::bind(
+ &nuitrack_body_tracker_node::onNewColorFrame, this, std::placeholders::_1));
+
+ outputMode_ = depthSensor_->getOutputMode();
+ OutputMode colorOutputMode = colorSensor_->getOutputMode();
+ if ((colorOutputMode.xres != outputMode_.xres) || (colorOutputMode.yres != outputMode_.yres))
+ {
+ ROS_WARN("%s: WARNING! DEPTH AND COLOR SIZE NOT THE SAME!", _name.c_str() );
+ }
+
+ // Use depth as the frame size
+ frame_width_ = outputMode_.xres;
+ frame_height_ = outputMode_.yres;
+ last_id_ = -1;
+ std::cout << "========= Nuitrack: GOT DEPTH SENSOR =========" << std::endl;
+ std::cout << "Nuitrack: Depth: width = " << frame_width_ <<
+ " height = " << frame_height_ << std::endl;
+
+ // Point Cloud message (includes depth and color)
+ int numpoints = frame_width_ * frame_height_;
+ cloud_msg_.header.frame_id = camera_depth_frame_;
+ //cloud_msg_.header.stamp = ros::Time::now();
+ cloud_msg_.width = numpoints;
+ cloud_msg_.height = 1;
+ cloud_msg_.is_bigendian = false;
+ cloud_msg_.is_dense = false; // there may be invalid points
+
+ sensor_msgs::PointCloud2Modifier modifier(cloud_msg_);
+ modifier.setPointCloud2FieldsByString(2,"xyz","rgb");
+ modifier.resize(numpoints);
+
+
+ std::cout << "Nuitrack: UserTracker::create()" << std::endl;
+ userTracker_ = tdv::nuitrack::UserTracker::create();
+ // Bind to event update user tracker
+ userTracker_->connectOnUpdate(std::bind(
+ &nuitrack_body_tracker_node::onUserUpdate, this, std::placeholders::_1));
+
+ std::cout << "Nuitrack: SkeletonTracker::create()" << std::endl;
+ skeletonTracker_ = tdv::nuitrack::SkeletonTracker::create();
+ // Bind to event update skeleton tracker
+ skeletonTracker_->connectOnUpdate(std::bind(
+ &nuitrack_body_tracker_node::onSkeletonUpdate, this, std::placeholders::_1));
+
+ std::cout << "Nuitrack: HandTracker::create()" << std::endl;
+ handTracker_ = tdv::nuitrack::HandTracker::create();
+ // Bind to event update Hand tracker
+ handTracker_->connectOnUpdate(std::bind(
+ &nuitrack_body_tracker_node::onHandUpdate, this, std::placeholders::_1));
+
+ std::cout << "Nuitrack: GestureRecognizer::create()" << std::endl;
+ gestureRecognizer_ = tdv::nuitrack::GestureRecognizer::create();
+ gestureRecognizer_->connectOnNewGestures(std::bind(
+ &nuitrack_body_tracker_node::onNewGesture, this, std::placeholders::_1));
+
+ ROS_INFO("%s: Init complete. Waiting for frames...", _name.c_str());
+
+ }
+
+ void Run()
+ {
+ // Initialize Nuitrack first, then create Nuitrack modules
+ ROS_INFO("%s: Running...", _name.c_str());
+ // std::cout << "Nuitrack: Running..." << std::endl;
+ // Start Nuitrack
+ try
+ {
+ tdv::nuitrack::Nuitrack::run();
+ }
+ catch (const tdv::nuitrack::Exception& e)
+ {
+ std::cerr << "Can not start Nuitrack (ExceptionType: "
+ << e.type() << ")" << std::endl;
+ return;
+ }
+
+ ROS_INFO("%s: Waiting for person to be detected...", _name.c_str());
+
+ // Run Loop
+ ros::Rate r(30); // hz
+ while (ros::ok())
+ {
+ // std::cout << "Nuitrack: Looping..." << std::endl;
+
+ try
+ {
+ // Wait for new person tracking data
+ tdv::nuitrack::Nuitrack::waitUpdate(skeletonTracker_);
+ }
+ catch (tdv::nuitrack::LicenseNotAcquiredException& e)
+ {
+ std::cerr << "LicenseNotAcquired exception (ExceptionType: "
+ << e.type() << ")" << std::endl;
+ break;
+ }
+ catch (const tdv::nuitrack::Exception& e)
+ {
+ std::cerr << "Nuitrack update failed (ExceptionType: "
+ << e.type() << ")" << std::endl;
+ }
+
+ // std::cout << "Nuitrack: Sleeping..." << std::endl;
+ ros::spinOnce();
+ r.sleep();
+ }
+
+ // Release Nuitrack
+ try
+ {
+ tdv::nuitrack::Nuitrack::release();
+ }
+ catch (const tdv::nuitrack::Exception& e)
+ {
+ std::cerr << "Nuitrack release failed (ExceptionType: "
+ << e.type() << ")" << std::endl;
+ }
+
+ } // Run()
+
+
+ private:
+ /////////////// DATA MEMBERS /////////////////////
+
+ std::string _name;
+ ros::NodeHandle nh_;
+ std::string camera_depth_frame_;
+ std::string camera_color_frame_;
+ int frame_width_, frame_height_;
+ int last_id_;
+ sensor_msgs::PointCloud2 cloud_msg_; // color and depth point cloud
+ int depth_frame_number_;
+ int color_frame_number_;
+
+ ros::Publisher body_tracking_position_pub_;
+ ros::Publisher body_tracking_array_pub_;
+ ros::Publisher body_tracking_skeleton_pub_;
+ ros::Publisher marker_pub_;
+ ros::Publisher depth_image_pub_;
+ ros::Publisher color_image_pub_;
+ ros::Publisher depth_cloud_pub_;
+
+ //ros::Publisher body_tracking_pose2d_pub_;
+ //ros::Publisher body_tracking_pose3d_pub_;
+ //ros::Publisher body_tracking_gesture_pub_;
+ //ros::Subscriber servo_pan_sub_;
+
+ tdv::nuitrack::OutputMode outputMode_;
+ std::vector userGestures_;
+ tdv::nuitrack::DepthSensor::Ptr depthSensor_;
+ tdv::nuitrack::ColorSensor::Ptr colorSensor_;
+ tdv::nuitrack::UserTracker::Ptr userTracker_;
+ tdv::nuitrack::SkeletonTracker::Ptr skeletonTracker_;
+ tdv::nuitrack::HandTracker::Ptr handTracker_;
+ tdv::nuitrack::GestureRecognizer::Ptr gestureRecognizer_;
+ //tdv::nuitrack::getInstancesJson::Ptr getInstancesJson;
+
+
+ /* Note from http://download.3divi.com/Nuitrack/doc/Instance_based_API.html
+ Face modules are by default disabled. To enable face modules, open nuitrack.config file and set Faces.ToUse and DepthProvider.Depth2ColorRegistration to true.
+ */
+
+ };
+}; // namespace nuitrack_body_tracker
+
+
+ // The main entry point for this node.
+ int main( int argc, char *argv[] )
+ {
+ using namespace nuitrack_body_tracker;
+ ros::init( argc, argv, "nuitrack_body_tracker" );
+ nuitrack_body_tracker_node node(ros::this_node::getName());
+ node.Init("");
+ node.Run();
+
+ return 0;
+ }
+
+
+
+
diff --git a/qt_simulator/qt_face_recognition/CMakeLists.txt b/qt_simulator/qt_face_recognition/CMakeLists.txt
new file mode 100644
index 0000000..bef1a5c
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/CMakeLists.txt
@@ -0,0 +1,197 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(qt_face_recognition)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ rospy
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# std_msgs # Or other packages containing msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES qt_voice_app
+# CATKIN_DEPENDS rospy
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/qt_voice_app.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/qt_voice_app_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables and/or libraries for installation
+# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_qt_voice_app.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/qt_simulator/qt_face_recognition/README.md b/qt_simulator/qt_face_recognition/README.md
new file mode 100644
index 0000000..3897a46
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/README.md
@@ -0,0 +1,6 @@
+# Face recognition, Age & Gender
+This repository contains a set of code samples and tutorial for programming [QTrobot by LuxAI S.A.](http://luxai.com/qtrobot-for-research/#hardware)
+
+Check it in this video:
+---
+[![Face recognition, Age & Gender](http://img.youtube.com/vi/30wr7nXF6v4/0.jpg)](http://www.youtube.com/watch?v=30wr7nXF6v4 "3D Camera - Face recognition, Age & Gender")
diff --git a/qt_simulator/qt_face_recognition/launch/qt_face_recognition.launch b/qt_simulator/qt_face_recognition/launch/qt_face_recognition.launch
new file mode 100644
index 0000000..4ed4454
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/launch/qt_face_recognition.launch
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/qt_face_recognition/package.xml b/qt_simulator/qt_face_recognition/package.xml
new file mode 100644
index 0000000..71e3ad4
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/package.xml
@@ -0,0 +1,62 @@
+
+
+ qt_face_recognition
+ 1.0.0
+ The qt_face_recognition package
+
+
+
+
+ Ali Paikan
+
+
+
+
+
+ LuxAI S.A.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+ rospy
+ rospy
+ rospy
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/qt_face_recognition/src/depth_rgb_coordinator.py b/qt_simulator/qt_face_recognition/src/depth_rgb_coordinator.py
new file mode 100644
index 0000000..e3e170d
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/src/depth_rgb_coordinator.py
@@ -0,0 +1,72 @@
+import numpy as np
+import rospy
+import tf
+from sensor_msgs import point_cloud2 as pc2
+from sensor_msgs.msg import PointCloud2, PointField
+from geometry_msgs.msg import Pose2D
+from geometry_msgs.msg import Point
+from geometry_msgs.msg import PointStamped
+
+n_cols = 848
+
+class DepthRgbCoordinator:
+ def __init__(self, height=480, width=848):
+ self._height = height
+ self._width = width
+ rospy.init_node('image_coordinator', anonymous=True)
+ rospy.Subscriber('/camera/depth_cloud', PointCloud2, self.handle_depth_image)
+ rospy.Subscriber("qtpc/face_frame", Pose2D, self.handle_coordinate)
+ rospy.Timer(rospy.Duration(1), self.talker)
+ self.listener = tf.TransformListener()
+ self.rate = rospy.Rate(10.0)
+ self._depth_image = None
+
+
+
+
+ def handle_depth_image(self, data):
+ self._depth_image = list(pc2.read_points(data, field_names=("x", "y", "z"), skip_nans=True))
+
+ def handle_coordinate(self, data):
+ point = self.get_point(int(data.x), int(data.y))
+ self.rotation_matrix(point)
+ return point
+
+ def rotation_matrix(self, camera_point):
+ br = tf.TransformBroadcaster()
+ target_frame = "base_link"
+ point = PointStamped()
+ point.header.frame_id = "Camera"
+ point.header.stamp = rospy.Time(0)
+ point.point.x = camera_point[0]
+ point.point.y = camera_point[1]
+ point.point.z = camera_point[2]
+ point_target_frame = self.listener.transformPoint(target_frame, point)
+ print(str(point_target_frame))
+
+ br.sendTransform((point_target_frame.point.x, point_target_frame.point.y, point_target_frame.point.z),
+ (0.0, 0.0, 0.0, 1.0),
+ rospy.Time.now(),
+ "TargetFrame",
+ "base_link")
+ self.rate.sleep()
+
+ def get_point(self,
+ pixel_col,
+ pixel_row,
+ ):
+ idx = pixel_row * self._width + pixel_col
+ if idx > self._width*self._height:
+ raise IndexError("Outside of image index: row '%d', col '%d'" % (pixel_row, pixel_col))
+ return self._depth_image[idx]
+
+ def talker(self, timer):
+ pass
+
+
+if __name__ == '__main__':
+ try:
+ DepthRgbCoordinator()
+ rospy.spin()
+ except rospy.ROSInterruptException:
+ pass
diff --git a/qt_simulator/qt_face_recognition/src/frames.gv b/qt_simulator/qt_face_recognition/src/frames.gv
new file mode 100644
index 0000000..b03300d
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/src/frames.gv
@@ -0,0 +1,19 @@
+digraph G {
+"HeadPitch" -> "Camera"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1576194940.455 sec old)\nBuffer length: 0.000 sec\n"];
+"HeadYaw" -> "HeadPitch"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"HeadPitch" -> "Eyes"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1576194940.455 sec old)\nBuffer length: 0.000 sec\n"];
+"LeftElbowRoll" -> "LeftHand"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1576194940.455 sec old)\nBuffer length: 0.000 sec\n"];
+"LeftShoulderRoll" -> "LeftElbowRoll"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"base_link" -> "NeckFrame"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1576194940.455 sec old)\nBuffer length: 0.000 sec\n"];
+"RightElbowRoll" -> "RightHand"[label="Broadcaster: /robot_state_publisher\nAverage rate: 10000.000 Hz\nMost recent transform: 0.000 ( 1576194940.455 sec old)\nBuffer length: 0.000 sec\n"];
+"RightShoulderRoll" -> "RightElbowRoll"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"base_link" -> "HeadYaw"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"LeftShoulderPitch" -> "LeftShoulderRoll"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"base_link" -> "LeftShoulderPitch"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"RightShoulderPitch" -> "RightShoulderRoll"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+"base_link" -> "RightShoulderPitch"[label="Broadcaster: /robot_state_publisher\nAverage rate: 2.222 Hz\nMost recent transform: 1576146079.017 ( 48861.438 sec old)\nBuffer length: 4.500 sec\n"];
+edge [style=invis];
+ subgraph cluster_legend { style=bold; color=black; label ="view_frames Result";
+"Recorded at time: 1576194940.455"[ shape=plaintext ] ;
+ }->"base_link";
+}
\ No newline at end of file
diff --git a/qt_simulator/qt_face_recognition/src/frames.pdf b/qt_simulator/qt_face_recognition/src/frames.pdf
new file mode 100644
index 0000000..81c7e26
Binary files /dev/null and b/qt_simulator/qt_face_recognition/src/frames.pdf differ
diff --git a/qt_simulator/qt_face_recognition/src/qt_face_recognition.py b/qt_simulator/qt_face_recognition/src/qt_face_recognition.py
new file mode 100644
index 0000000..f3d90a7
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/src/qt_face_recognition.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+# import sys
+import rospy
+import cv2
+import threading
+import math
+import numpy as np
+from geometry_msgs.msg import Pose2D
+from sensor_msgs.msg import Image
+from cv_bridge import CvBridge, CvBridgeError
+from qt_nuitrack_app.msg import Faces, FaceInfo
+import face_recognition
+
+
+width_frame = 848
+height_frame = 480
+
+
+
+class image_converter:
+ #faces = None
+ #faces_time = None
+
+
+ def __init__(self):
+ self.lock = threading.Lock()
+ self.bridge = CvBridge()
+ self.count = 0
+ self.face_locations = []
+ self.face_encodings = []
+ self.face_names = []
+ self.face_frame_pub = rospy.Publisher("qtpc/face_frame", Pose2D, queue_size=10)
+ self.image_pub = rospy.Publisher("/face_recognition/out", Image, queue_size=1)
+ self.image_sub = rospy.Subscriber("/camera/color/image",Image, self.image_callback)
+ #self.face_sub = rospy.Subscriber("/qt_nuitrack_app/faces", Faces, self.face_callback)
+
+
+
+ def face_track(self, frame, small_frame):
+ self.face_locations = face_recognition.face_locations(small_frame)
+ self.face_encodings = face_recognition.face_encodings(small_frame, self.face_locations)
+ if self.face_locations == []:
+ print("Not in the frame")
+ # face_names = []
+ for face_encoding in self.face_encodings:
+ name = "Unknown"
+ self.face_names.append(name)
+ for (top, right, bottom, left), name in zip(self.face_locations, self.face_names):
+ # Scale back up face locations since the frame we detected in was scaled to 1/4 size
+ top *= 4
+ right *= 4
+ bottom *= 4
+ left *= 4
+ # Draw a box around the face
+ cv2.rectangle(frame, (left, top),(right, bottom), (0, 0, 255), 2)
+ width_rec = bottom - top
+ top_rec = right - left
+ Z_top_frame = top_rec/2 + top
+ X_left_frame = width_rec/2 + left
+ Z_origin_frame = height_frame/2
+ X_origin_frame = width_frame/2
+ x_vec = X_left_frame - X_origin_frame
+ z_vec = Z_origin_frame - Z_top_frame
+ theta_deg = 0
+ '''
+ theta = math.atan(z_vec/x_vec)
+ if (x_vec > 0 and z_vec > 0) or (x_vec < 0 and z_vec < 0):
+ theta = np.pi/2 - theta
+ theta_deg = math.degrees(theta)
+ if x_vec < 0 and z_vec < 0:
+ theta_deg = -(theta_deg)
+ elif (x_vec < 0 and z_vec > 0) or (x_vec > 0 and z_vec < 0):
+ theta = np.pi/2 + theta
+ theta_deg = math.degrees(theta)
+ if x_vec < 0 and z_vec > 0:
+ theta_deg = - theta_deg
+ '''
+ pixel_column = X_left_frame
+ pixel_row = Z_top_frame
+ # Draw the frames origin of the image (green) and of the target (blue)
+ cv2.rectangle(frame, (int(X_origin_frame), int(Z_origin_frame)), (int(X_origin_frame) + 1, int(Z_origin_frame) + 1), (255, 0, 0), 2)
+ cv2.rectangle(frame, (int(X_left_frame), int(Z_top_frame)), (int(X_left_frame) + 1, int(Z_top_frame) + 1), (0, 255, 0), 2)
+ self.face_frame_pub.publish(float(pixel_column), float(pixel_row),float(theta_deg))
+ try:
+ self.image_pub.publish(self.bridge.cv2_to_imgmsg(frame, "bgr8"))
+ #pass
+ except CvBridgeError as e:
+ print(e)
+
+
+ def face_callback(self, data):
+ # print("face_callback")
+ self.lock.acquire()
+ self.faces = data.faces
+ self.faces_time = rospy.Time.now()
+ self.lock.release()
+
+ def image_callback(self,data):
+ self.count += 1
+ if (self.count % 10) == 0:
+ try:
+ cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
+ except CvBridgeError as e:
+ print(e)
+
+ (rows, cols, channels) = cv_image.shape
+ #self.lock.acquire()
+ small_frame = cv2.resize(cv_image, (0, 0), fx=0.25, fy=0.25)
+ # Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
+ rgb_small_frame = small_frame[:, :, ::-1]
+ self.face_track(cv_image, rgb_small_frame)
+ #self.lock.release()
+
+ """
+ new_faces = self.faces
+ new_faces_time = self.faces_time
+ self.lock.release()
+
+ if new_faces and (rospy.Time.now()-new_faces_time) < rospy.Duration(5.0):
+ for face in new_faces:
+ rect = face.rectangle
+ cv2.rectangle(cv_image, (int(rect[0]*cols),int(rect[1]*rows)),
+ (int(rect[0]*cols+rect[2]*cols), int(rect[1]*rows+rect[3]*rows)), (0,255,0), 2)
+ x = int(rect[0]*cols)
+ y = int(rect[1]*rows)
+ w = int(rect[2]*cols)
+ h = int(rect[3]*rows)
+ #cv2.putText(cv_image, "Gender:", (x, y+h+10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,255), lineType=cv2.LINE_AA)
+ cv2.putText(cv_image, "Gender: %s" % face.gender, (x, y+h+20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.putText(cv_image, "Age: %d" % face.age_years, (x, y+h+40), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+
+ # Neutral
+ cv2.putText(cv_image, "Neutral:", (x, y+h+60), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+50),
+ (x+80+int(face.emotion_neutral*100), y+h+10+50), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+50),
+ (x+80+100, y+h+10+50), (255,255,255), 1)
+ # Angry
+ cv2.putText(cv_image, "Angry:", (x, y+h+80), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+70),
+ (x+80+int(face.emotion_angry*100), y+h+10+70), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+70),
+ (x+80+100, y+h+10+70), (255,255,255), 1)
+
+ # Happy
+ cv2.putText(cv_image, "Happy:", (x, y+h+100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+90),
+ (x+80+int(face.emotion_happy*100), y+h+10+90), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+90),
+ (x+80+100, y+h+10+90), (255,255,255), 1)
+
+ cv2.putText(cv_image, "Surprise:", (x, y+h+120), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+110),
+ (x+80+int(face.emotion_surprise*100), y+h+10+110), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+110),
+ (x+80+100, y+h+10+110), (255,255,255), 1)
+ """
+
+
+
+if __name__ == '__main__':
+ rospy.init_node('qt_face_recognition', anonymous=True)
+ ic = image_converter()
+ try:
+ rospy.spin()
+ except KeyboardInterrupt:
+ print("Shutting down")
diff --git a/qt_simulator/qt_face_recognition/src/qt_face_recognition.py.orig b/qt_simulator/qt_face_recognition/src/qt_face_recognition.py.orig
new file mode 100644
index 0000000..d23b94f
--- /dev/null
+++ b/qt_simulator/qt_face_recognition/src/qt_face_recognition.py.orig
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+# import sys
+import rospy
+import cv2
+import threading
+
+from sensor_msgs.msg import Image
+from cv_bridge import CvBridge, CvBridgeError
+from qt_nuitrack_app.msg import Faces, FaceInfo
+
+
+class image_converter:
+ faces = None
+ faces_time = None
+
+ def __init__(self):
+ self.lock = threading.Lock()
+ self.bridge = CvBridge()
+ self.image_pub = rospy.Publisher("/face_recognition/out", Image, queue_size=1)
+ self.image_sub = rospy.Subscriber("/camera/color/image_raw",Image,self.image_callback)
+ self.face_sub = rospy.Subscriber("/qt_nuitrack_app/faces", Faces, self.face_callback)
+
+ def face_callback(self, data):
+ # print("face_callback")
+ self.lock.acquire()
+ self.faces = data.faces
+ self.faces_time = rospy.Time.now()
+ self.lock.release()
+
+ def image_callback(self,data):
+ try:
+ cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
+ except CvBridgeError as e:
+ print(e)
+
+ (rows, cols, channels) = cv_image.shape
+ self.lock.acquire()
+ new_faces = self.faces
+ new_faces_time = self.faces_time
+ self.lock.release()
+
+ if new_faces and (rospy.Time.now()-new_faces_time) < rospy.Duration(5.0):
+ for face in new_faces:
+ rect = face.rectangle
+ cv2.rectangle(cv_image, (int(rect[0]*cols),int(rect[1]*rows)),
+ (int(rect[0]*cols+rect[2]*cols), int(rect[1]*rows+rect[3]*rows)), (0,255,0), 2)
+ x = int(rect[0]*cols)
+ y = int(rect[1]*rows)
+ w = int(rect[2]*cols)
+ h = int(rect[3]*rows)
+ #cv2.putText(cv_image, "Gender:", (x, y+h+10), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,255), lineType=cv2.LINE_AA)
+ cv2.putText(cv_image, "Gender: %s" % face.gender, (x, y+h+20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.putText(cv_image, "Age: %d" % face.age_years, (x, y+h+40), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+
+ # Neutral
+ cv2.putText(cv_image, "Neutral:", (x, y+h+60), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+50),
+ (x+80+int(face.emotion_neutral*100), y+h+10+50), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+50),
+ (x+80+100, y+h+10+50), (255,255,255), 1)
+ # Angry
+ cv2.putText(cv_image, "Angry:", (x, y+h+80), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+70),
+ (x+80+int(face.emotion_angry*100), y+h+10+70), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+70),
+ (x+80+100, y+h+10+70), (255,255,255), 1)
+
+ # Happy
+ cv2.putText(cv_image, "Happy:", (x, y+h+100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+90),
+ (x+80+int(face.emotion_happy*100), y+h+10+90), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+90),
+ (x+80+100, y+h+10+90), (255,255,255), 1)
+
+ cv2.putText(cv_image, "Surprise:", (x, y+h+120), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,255,0), 1, lineType=cv2.LINE_AA)
+ cv2.rectangle(cv_image, (x+80,y+h+110),
+ (x+80+int(face.emotion_surprise*100), y+h+10+110), (0,255,0), cv2.FILLED)
+ cv2.rectangle(cv_image, (x+80,y+h+110),
+ (x+80+100, y+h+10+110), (255,255,255), 1)
+ try:
+ self.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, "bgr8"))
+ except CvBridgeError as e:
+ print(e)
+
+
+if __name__ == '__main__':
+ rospy.init_node('qt_face_recognition', anonymous=True)
+ ic = image_converter()
+ try:
+ rospy.spin()
+ except KeyboardInterrupt:
+ print("Shutting down")
diff --git a/qt_simulator/qt_robot_state/CMakeLists.txt b/qt_simulator/qt_robot_state/CMakeLists.txt
new file mode 100644
index 0000000..1c5ba60
--- /dev/null
+++ b/qt_simulator/qt_robot_state/CMakeLists.txt
@@ -0,0 +1,208 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(qt_robot_state)
+
+## Compile as C++11, supported in ROS Kinetic and newer
+# add_compile_options(-std=c++11)
+
+## Find catkin macros and libraries
+## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
+## is used, also find other catkin packages
+find_package(catkin REQUIRED COMPONENTS
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ tf
+)
+
+## System dependencies are found with CMake's conventions
+# find_package(Boost REQUIRED COMPONENTS system)
+
+
+## Uncomment this if the package has a setup.py. This macro ensures
+## modules and global scripts declared therein get installed
+## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
+# catkin_python_setup()
+
+################################################
+## Declare ROS messages, services and actions ##
+################################################
+
+## To declare and build messages, services or actions from within this
+## package, follow these steps:
+## * Let MSG_DEP_SET be the set of packages whose message types you use in
+## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
+## * In the file package.xml:
+## * add a build_depend tag for "message_generation"
+## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
+## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
+## but can be declared for certainty nonetheless:
+## * add a exec_depend tag for "message_runtime"
+## * In this file (CMakeLists.txt):
+## * add "message_generation" and every package in MSG_DEP_SET to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * add "message_runtime" and every package in MSG_DEP_SET to
+## catkin_package(CATKIN_DEPENDS ...)
+## * uncomment the add_*_files sections below as needed
+## and list every .msg/.srv/.action file to be processed
+## * uncomment the generate_messages entry below
+## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
+
+## Generate messages in the 'msg' folder
+# add_message_files(
+# FILES
+# Message1.msg
+# Message2.msg
+# )
+
+## Generate services in the 'srv' folder
+# add_service_files(
+# FILES
+# Service1.srv
+# Service2.srv
+# )
+
+## Generate actions in the 'action' folder
+# add_action_files(
+# FILES
+# Action1.action
+# Action2.action
+# )
+
+## Generate added messages and services with any dependencies listed here
+# generate_messages(
+# DEPENDENCIES
+# sensor_msgs# std_msgs
+# )
+
+################################################
+## Declare ROS dynamic reconfigure parameters ##
+################################################
+
+## To declare and build dynamic reconfigure parameters within this
+## package, follow these steps:
+## * In the file package.xml:
+## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
+## * In this file (CMakeLists.txt):
+## * add "dynamic_reconfigure" to
+## find_package(catkin REQUIRED COMPONENTS ...)
+## * uncomment the "generate_dynamic_reconfigure_options" section below
+## and list every .cfg file to be processed
+
+## Generate dynamic reconfigure parameters in the 'cfg' folder
+# generate_dynamic_reconfigure_options(
+# cfg/DynReconf1.cfg
+# cfg/DynReconf2.cfg
+# )
+
+###################################
+## catkin specific configuration ##
+###################################
+## The catkin_package macro generates cmake config files for your package
+## Declare things to be passed to dependent projects
+## INCLUDE_DIRS: uncomment this if your package contains header files
+## LIBRARIES: libraries you create in this project that dependent projects also need
+## CATKIN_DEPENDS: catkin_packages dependent projects also need
+## DEPENDS: system dependencies of this project that dependent projects also need
+catkin_package(
+# INCLUDE_DIRS include
+# LIBRARIES qt_robot_state
+# CATKIN_DEPENDS roscpp rospy sensor_msgs std_msgs tf
+# DEPENDS system_lib
+)
+
+###########
+## Build ##
+###########
+
+## Specify additional locations of header files
+## Your package locations should be listed before other locations
+include_directories(
+# include
+ ${catkin_INCLUDE_DIRS}
+)
+
+## Declare a C++ library
+# add_library(${PROJECT_NAME}
+# src/${PROJECT_NAME}/qt_robot_state.cpp
+# )
+
+## Add cmake target dependencies of the library
+## as an example, code may need to be generated before libraries
+## either from message generation or dynamic reconfigure
+# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Declare a C++ executable
+## With catkin_make all packages are built within a single CMake context
+## The recommended prefix ensures that target names across packages don't collide
+# add_executable(${PROJECT_NAME}_node src/qt_robot_state_node.cpp)
+
+## Rename C++ executable without prefix
+## The above recommended prefix causes long target names, the following renames the
+## target back to the shorter version for ease of user use
+## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
+# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
+
+## Add cmake target dependencies of the executable
+## same as for the library above
+# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
+
+## Specify libraries to link a library or executable target against
+# target_link_libraries(${PROJECT_NAME}_node
+# ${catkin_LIBRARIES}
+# )
+
+#############
+## Install ##
+#############
+
+# all install targets should use catkin DESTINATION variables
+# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
+
+## Mark executable scripts (Python etc.) for installation
+## in contrast to setup.py, you can choose the destination
+# install(PROGRAMS
+# scripts/my_python_script
+# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark executables for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html
+# install(TARGETS ${PROJECT_NAME}_node
+# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+# )
+
+## Mark libraries for installation
+## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html
+# install(TARGETS ${PROJECT_NAME}
+# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION}
+# )
+
+## Mark cpp header files for installation
+# install(DIRECTORY include/${PROJECT_NAME}/
+# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+# FILES_MATCHING PATTERN "*.h"
+# PATTERN ".svn" EXCLUDE
+# )
+
+## Mark other files for installation (e.g. launch and bag files, etc.)
+# install(FILES
+# # myfile1
+# # myfile2
+# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+# )
+
+#############
+## Testing ##
+#############
+
+## Add gtest based cpp test target and link libraries
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_qt_robot_state.cpp)
+# if(TARGET ${PROJECT_NAME}-test)
+# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
+# endif()
+
+## Add folders to be run by python nosetests
+# catkin_add_nosetests(test)
diff --git a/qt_simulator/qt_robot_state/launch/qt_robot_state.launch b/qt_simulator/qt_robot_state/launch/qt_robot_state.launch
new file mode 100644
index 0000000..1704490
--- /dev/null
+++ b/qt_simulator/qt_robot_state/launch/qt_robot_state.launch
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/qt_robot_state/launch/simulator_robot_state.launch b/qt_simulator/qt_robot_state/launch/simulator_robot_state.launch
new file mode 100644
index 0000000..c93c5d6
--- /dev/null
+++ b/qt_simulator/qt_robot_state/launch/simulator_robot_state.launch
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/qt_simulator/qt_robot_state/package.xml b/qt_simulator/qt_robot_state/package.xml
new file mode 100644
index 0000000..7615598
--- /dev/null
+++ b/qt_simulator/qt_robot_state/package.xml
@@ -0,0 +1,74 @@
+
+
+ qt_robot_state
+ 0.0.0
+ The qt_robot_state package
+
+
+
+
+ micolspitale
+
+
+
+
+
+ TODO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ catkin
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ tf
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ tf
+ roscpp
+ rospy
+ sensor_msgs
+ std_msgs
+ tf
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/qt_robot_state/src/qt_joint_radians.py b/qt_simulator/qt_robot_state/src/qt_joint_radians.py
new file mode 100644
index 0000000..d8d3d3f
--- /dev/null
+++ b/qt_simulator/qt_robot_state/src/qt_joint_radians.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+import numpy as np
+import rospy
+import tf
+import math
+from sensor_msgs.msg import JointState
+
+class DegreeToRadians:
+ def __init__(self):
+ rospy.init_node("radians_joint_node", anonymous = True)
+ rospy.Subscriber("qt_robot/joints/state", JointState, self.handle_degree)
+ self.joint_pub = rospy.Publisher("qt_robot/joints/state_rad", JointState, queue_size = 1)
+
+ def handle_degree(self, data):
+ degrees = data.position
+ radians = [math.radians(d) for d in degrees]
+ joint_rad = JointState()
+ joint_rad.position = radians
+ joint_rad.name = data.name
+ joint_rad.velocity = data.velocity
+ joint_rad.effort = data.effort
+ joint_rad.header = data.header
+ self.joint_pub.publish(joint_rad)
+
+if __name__ == '__main__':
+ try:
+ DegreeToRadians()
+ rospy.spin()
+ except rospy.ROSInterruptException:
+ pass
diff --git a/qt_simulator/qt_robot_state/src/qt_robot_tf_listener.py b/qt_simulator/qt_robot_state/src/qt_robot_tf_listener.py
new file mode 100644
index 0000000..f2a9868
--- /dev/null
+++ b/qt_simulator/qt_robot_state/src/qt_robot_tf_listener.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+import roslib
+import rospy
+import math
+import tf
+from std_msgs.msg import Float64MultiArray
+from geometry_msgs.msg import Point
+from geometry_msgs.msg import PointStamped
+
+
+class QTtflistener:
+
+ def __init__(self):
+ self.head_publisher = rospy.Publisher("/qt_robot/head_position/command", Float64MultiArray, queue_size=5)
+ self.listener = tf.TransformListener()
+ self.rate = rospy.Rate(1)
+ target_frame = "TargetFrame"
+ source_frame = "NeckFrame"
+ self.transformationFrame(source_frame, target_frame)
+ self.rate.sleep()
+
+ def orientationMatrix(self, point):
+ yaw = math.atan2(point[1], point[0])
+ pitch = math.atan2(point[2], point[0])
+ yaw_deg = math.degrees(yaw)
+ pitch_deg = math.degrees(pitch)
+ if yaw_deg > 90 and yaw_deg < 180:
+ yaw_deg = - (180 - yaw_deg)
+ elif yaw_deg < 90:
+ yaw_deg = (180 + yaw_deg)
+ elif yaw_deg == 0 or yaw_deg == 180 or yaw_deg == -180:
+ yaw_deg = 0
+ else:
+ yaw_deg = "Out of range"
+
+ if pitch_deg > 90 and pitch_deg < 180:
+ pitch_deg = 180 - pitch_deg
+ elif pitch_deg < 90:
+ pitch_deg = - (180 + pitch_deg)
+ elif pitch_deg == 0 or pitch_deg == 180 or pitch_deg == -180:
+ pitch_deg = 0
+ else:
+ pitch_deg = "Out of range"
+ # Head commands for QT (HeadYaw, HeadPitch)
+ return(pitch_deg, yaw_deg)
+
+ def transformationPoint(self, target_frame, point):
+ while not rospy.is_shutdown():
+ try:
+ point_target_frame = self.listener.transformPoint(target_frame, point)
+ point_target_frame = point_target_frame.point
+ self.orientationMatrix(point_target_frame)
+ except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
+ continue
+ rate.sleep()
+
+ def transformationFrame(self, source_frame, target_frame):
+ while not rospy.is_shutdown():
+ try:
+ # we want to trasform from par[1] frame to par[0] frame
+ # timing at which we want that transformation: this instant (time(0))
+ (trans,rot) = self.listener.lookupTransform(target_frame, source_frame, rospy.Time(0))
+ euler = tf.transformations.euler_from_quaternion(rot)
+ [pitch, yaw] = self.orientationMatrix(trans)
+ self.head_command(pitch, yaw)
+ except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
+ continue
+ self.rate.sleep()
+
+
+ def head_command(self, pitch, yaw):
+ head_commands = Float64MultiArray()
+ head_commands.data = [yaw, pitch]
+ print(str(head_commands.data))
+ self.head_publisher.publish(head_commands)
+
+
+
+
+
+if __name__ == '__main__':
+ rospy.init_node('qt_robot_tf', anonymous=True)
+ QTtflistener()
+ try:
+ rospy.spin()
+ except KeyboardInterrupt:
+ print("Shutting down")
diff --git a/qt_simulator/qt_robot_state/urdf/qtrobot.urdf b/qt_simulator/qt_robot_state/urdf/qtrobot.urdf
new file mode 100644
index 0000000..9746b08
--- /dev/null
+++ b/qt_simulator/qt_robot_state/urdf/qtrobot.urdf
@@ -0,0 +1,251 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/CHANGELOG.rst b/qt_simulator/robot_state_publisher/CHANGELOG.rst
new file mode 100644
index 0000000..86fca81
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/CHANGELOG.rst
@@ -0,0 +1,126 @@
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Changelog for package robot_state_publisher
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+1.13.7 (2019-08-27)
+-------------------
+* Remove treefksolver completely from the repository. (`#100 `_) (`#101 `_)
+* Add Ian as a maintainer for robot_state_publisher (`#95 `_)
+* Fixed problem when building static library version (`#92 `_)
+* Contributors: Chris Lalancette, Shane Loretz, ivanpauno
+
+1.13.6 (2018-04-05)
+-------------------
+* added warning when joint is found in joint message but not in the urdf (`#83 `_)
+* added ros_warn if JointStateMessage is older than 30 seconds (`#84 `_)
+* Add tcp_no_delay to joint_states subscriber (`#80 `_)
+* make rostest in CMakeLists optional (`ros/rosdistro#3010 `_) (`#75 `_)
+* Added c++11 target_compile_options (`#78 `_)
+* Contributors: Lukas Bulwahn, Shane Loretz, Victor Lopez, jgueldenstein
+
+1.13.5 (2017-04-11)
+-------------------
+* Style cleanup throughout the tree.
+* add Chris and Shane as maintainers (`#70 `_)
+* Contributors: Chris Lalancette, William Woodall
+
+1.13.4 (2017-01-05)
+-------------------
+* Use ``urdf::*ShredPtr`` instead of ``boost::shared_ptr`` (`#60 `_)
+* Error log for empty JointState.position was downgraded to a throttled warning (`#64 `_)
+* Contributors: Jochen Sprickerhof, Sébastien BARTHÉLÉMY
+
+1.13.3 (2016-10-20)
+-------------------
+* Added a new parameter "ignore_timestamp" (`#65 `_)
+* Fixed joints are not published over tf_static by default (`#56 `_)
+* Fixed segfault on undefined robot_description (`#61 `_)
+* Fixed cmake eigen3 warning (`#62 `_)
+* Contributors: Davide Faconti, Ioan A Sucan, Johannes Meyer, Robert Haschke
+
+1.13.2 (2016-06-10)
+-------------------
+* Add target_link_libraries for joint_state_listener library + install it (`#54 `_)
+* Contributors: Kartik Mohta
+
+1.13.1 (2016-05-20)
+-------------------
+* Add back future dating for robot_state_publisher (`#49 `_) (`#51 `_)
+* Fix subclassing test (`#48 `_)
+* Support for subclassing (`#45 `_)
+ * Add joint_state_listener as a library
+* Contributors: Jackie Kay
+
+1.13.0 (2016-04-12)
+-------------------
+* fix bad rebase
+* Contributors: Jackie Kay, Paul Bovbel
+
+1.12.1 (2016-02-22)
+-------------------
+* Merge pull request `#42 `_ from ros/fix_tests_jade
+ Fix tests for Jade
+* Correct failing tests
+* Re-enabling rostests
+* Merge pull request `#39 `_ from scpeters/issue_38
+* Fix API break in publishFixedTransforms
+ A bool argument was added to
+ RobotStatePublisher::publishFixedTransforms
+ which broke API.
+ I've added a default value of false, to match
+ the default specified in the JointStateListener
+ constructor.
+* Contributors: Jackie Kay, Jonathan Bohren, Steven Peters
+
+1.12.0 (2015-10-21)
+-------------------
+* Merge pull request `#37 `_ from clearpathrobotics/static-default
+ Publish fixed joints over tf_static by default
+* Merge pull request `#34 `_ from ros/tf2-static-jade
+ Port to tf2 and enable using static broadcaster
+* Merge pull request `#32 `_ from `shadow-robot/fix_issue#19 `_
+ Check URDF to distinguish fixed joints from floating joints. Floating joint are ignored by the publisher.
+* Merge pull request `#26 `_ from xqms/remove-debug
+ get rid of argv[0] debug output on startup
+* Contributors: David Lu!!, Ioan A Sucan, Jackie Kay, Max Schwarz, Paul Bovbel, Toni Oliver
+
+1.11.1 (2016-02-22)
+-------------------
+* Merge pull request `#41 `_ from ros/fix_tests_indigo
+ Re-enable and clean up rostests
+* Correct failing tests
+* Re-enabling rostests
+* Fix API break in publishFixedTransforms
+ A bool argument was added to
+ RobotStatePublisher::publishFixedTransforms
+ which broke API.
+ I've added a default value of false, to match
+ the default specified in the JointStateListener
+ constructor.
+* Contributors: Jackie Kay, Jonathan Bohren, Steven Peters
+
+1.11.0 (2015-10-21)
+-------------------
+* Merge pull request `#28 `_ from clearpathrobotics/tf2-static
+
+1.10.4 (2014-11-30)
+-------------------
+* Merge pull request `#21 `_ from rcodddow/patch-1
+* Fix for joint transforms not being published anymore after a clock reset (e.g. when playing a bagfile and looping)
+* Contributors: Ioan A Sucan, Robert Codd-Downey, Timm Linder
+
+1.10.3 (2014-07-24)
+-------------------
+* add version depend on orocos_kdl >= 1.3.0
+ Conflicts:
+ package.xml
+* Update KDL SegmentMap interface to optionally use shared pointers
+ The KDL Tree API optionally uses shared pointers on platforms where
+ the STL containers don't support incomplete types.
+* Contributors: Brian Jensen, William Woodall
+
+1.10.0 (2014-03-03)
+-------------------
+* minor style fixes
+* Add support for mimic tag.
+* Contributors: Ioan Sucan, Konrad Banachowicz
diff --git a/qt_simulator/robot_state_publisher/CMakeLists.txt b/qt_simulator/robot_state_publisher/CMakeLists.txt
new file mode 100644
index 0000000..465c69c
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/CMakeLists.txt
@@ -0,0 +1,78 @@
+cmake_minimum_required(VERSION 2.8)
+project(robot_state_publisher)
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wextra")
+
+find_package(orocos_kdl REQUIRED)
+find_package(catkin REQUIRED
+ COMPONENTS roscpp rosconsole rostime tf tf2_ros tf2_kdl kdl_parser
+)
+find_package(Eigen3 REQUIRED)
+
+find_package(urdfdom_headers REQUIRED)
+
+catkin_package(
+ LIBRARIES ${PROJECT_NAME}_solver
+ INCLUDE_DIRS include
+ DEPENDS roscpp rosconsole rostime tf2_ros tf2_kdl kdl_parser orocos_kdl urdfdom_headers
+)
+
+include_directories(SYSTEM ${EIGEN3_INCLUDE_DIRS})
+include_directories(include ${catkin_INCLUDE_DIRS} ${orocos_kdl_INCLUDE_DIRS} ${urdfdom_headers_INCLUDE_DIRS})
+link_directories(${orocos_kdl_LIBRARY_DIRS})
+
+add_library(${PROJECT_NAME}_solver
+ src/robot_state_publisher.cpp
+)
+target_link_libraries(${PROJECT_NAME}_solver ${catkin_LIBRARIES} ${orocos_kdl_LIBRARIES})
+
+add_library(joint_state_listener src/joint_state_listener.cpp)
+target_link_libraries(joint_state_listener ${PROJECT_NAME}_solver ${orocos_kdl_LIBRARIES})
+
+add_executable(${PROJECT_NAME} src/joint_state_listener.cpp)
+target_link_libraries(${PROJECT_NAME} ${PROJECT_NAME}_solver ${orocos_kdl_LIBRARIES})
+
+# compile the same executable using the old name as well
+add_executable(state_publisher src/joint_state_listener.cpp)
+target_link_libraries(state_publisher ${PROJECT_NAME}_solver ${orocos_kdl_LIBRARIES})
+
+# Tests
+
+if (CATKIN_ENABLE_TESTING)
+
+ find_package(rostest REQUIRED)
+
+ add_rostest_gtest(test_one_link ${CMAKE_CURRENT_SOURCE_DIR}/test/test_one_link.launch test/test_one_link.cpp)
+ target_link_libraries(test_one_link ${catkin_LIBRARIES} ${PROJECT_NAME}_solver)
+
+ add_rostest_gtest(test_two_links_fixed_joint ${CMAKE_CURRENT_SOURCE_DIR}/test/test_two_links_fixed_joint.launch test/test_two_links_fixed_joint.cpp)
+ target_link_libraries(test_two_links_fixed_joint ${catkin_LIBRARIES} ${PROJECT_NAME}_solver)
+
+ add_rostest_gtest(test_two_links_moving_joint ${CMAKE_CURRENT_SOURCE_DIR}/test/test_two_links_moving_joint.launch test/test_two_links_moving_joint.cpp)
+ target_link_libraries(test_two_links_moving_joint ${catkin_LIBRARIES} ${PROJECT_NAME}_solver)
+
+ # Download needed data file
+ catkin_download_test_data(
+ joint_states_indexed_bag
+ http://wiki.ros.org/robot_state_publisher/data?action=AttachFile&do=get&target=joint_states_indexed.bag
+ DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_SHARE_DESTINATION}/test
+ FILENAME joint_states_indexed.bag
+ MD5 793e0b566ebe4698265a936b92fa2bba)
+
+ add_rostest_gtest(test_joint_states_bag ${CMAKE_CURRENT_SOURCE_DIR}/test/test_joint_states_bag.launch test/test_joint_states_bag.cpp)
+ target_link_libraries(test_joint_states_bag ${catkin_LIBRARIES} ${PROJECT_NAME}_solver)
+
+ add_rostest_gtest(test_subclass ${CMAKE_CURRENT_SOURCE_DIR}/test/test_subclass.launch test/test_subclass.cpp)
+ target_link_libraries(test_subclass ${catkin_LIBRARIES} ${PROJECT_NAME}_solver joint_state_listener)
+
+ install(FILES test/one_link.urdf test/pr2.urdf test/two_links_fixed_joint.urdf test/two_links_moving_joint.urdf DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/test)
+
+endif()
+
+install(TARGETS ${PROJECT_NAME}_solver joint_state_listener ${PROJECT_NAME} state_publisher
+ ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
+
+install(DIRECTORY include/${PROJECT_NAME}/
+ DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION})
diff --git a/qt_simulator/robot_state_publisher/include/robot_state_publisher/joint_state_listener.h b/qt_simulator/robot_state_publisher/include/robot_state_publisher/joint_state_listener.h
new file mode 100644
index 0000000..604dc04
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/include/robot_state_publisher/joint_state_listener.h
@@ -0,0 +1,84 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#ifndef JOINT_STATE_LISTENER_H
+#define JOINT_STATE_LISTENER_H
+
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/robot_state_publisher.h"
+
+using namespace std;
+using namespace ros;
+using namespace KDL;
+
+typedef boost::shared_ptr JointStateConstPtr;
+typedef std::map MimicMap;
+
+namespace robot_state_publisher {
+
+class JointStateListener {
+public:
+ /** Constructor
+ * \param tree The kinematic model of a robot, represented by a KDL Tree
+ */
+ JointStateListener(const KDL::Tree& tree, const MimicMap& m, const urdf::Model& model = urdf::Model());
+
+ /// Destructor
+ ~JointStateListener();
+
+protected:
+ virtual void callbackJointState(const JointStateConstPtr& state);
+ virtual void callbackFixedJoint(const ros::TimerEvent& e);
+
+ std::string tf_prefix_;
+ Duration publish_interval_;
+ robot_state_publisher::RobotStatePublisher state_publisher_;
+ Subscriber joint_state_sub_;
+ ros::Timer timer_;
+ ros::Time last_callback_time_;
+ std::map last_publish_time_;
+ MimicMap mimic_;
+ bool use_tf_static_;
+ bool ignore_timestamp_;
+
+};
+}
+
+#endif
diff --git a/qt_simulator/robot_state_publisher/include/robot_state_publisher/robot_state_publisher.h b/qt_simulator/robot_state_publisher/include/robot_state_publisher/robot_state_publisher.h
new file mode 100644
index 0000000..2bd0a10
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/include/robot_state_publisher/robot_state_publisher.h
@@ -0,0 +1,92 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#ifndef ROBOT_STATE_PUBLISHER_H
+#define ROBOT_STATE_PUBLISHER_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+namespace robot_state_publisher {
+
+class SegmentPair
+{
+public:
+ SegmentPair(const KDL::Segment& p_segment, const std::string& p_root, const std::string& p_tip):
+ segment(p_segment), root(p_root), tip(p_tip){}
+
+ KDL::Segment segment;
+ std::string root, tip;
+};
+
+
+class RobotStatePublisher
+{
+public:
+ /** Constructor
+ * \param tree The kinematic model of a robot, represented by a KDL Tree
+ */
+ RobotStatePublisher(const KDL::Tree& tree, const urdf::Model& model = urdf::Model());
+
+ /// Destructor
+ ~RobotStatePublisher(){};
+
+ /** Publish transforms to tf
+ * \param joint_positions A map of joint names and joint positions.
+ * \param time The time at which the joint positions were recorded
+ */
+ virtual void publishTransforms(const std::map& joint_positions, const ros::Time& time, const std::string& tf_prefix);
+ virtual void publishFixedTransforms(const std::string& tf_prefix, bool use_tf_static = false);
+
+protected:
+ virtual void addChildren(const KDL::SegmentMap::const_iterator segment);
+
+ std::map segments_, segments_fixed_;
+ const urdf::Model& model_;
+ tf2_ros::TransformBroadcaster tf_broadcaster_;
+ tf2_ros::StaticTransformBroadcaster static_tf_broadcaster_;
+};
+
+}
+
+#endif
diff --git a/qt_simulator/robot_state_publisher/package.xml b/qt_simulator/robot_state_publisher/package.xml
new file mode 100644
index 0000000..63bac88
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/package.xml
@@ -0,0 +1,53 @@
+
+ robot_state_publisher
+ 1.13.7
+
+ This package allows you to publish the state of a robot to
+ tf. Once the state gets published, it is
+ available to all components in the system that also use tf.
+ The package takes the joint angles of the robot as input
+ and publishes the 3D poses of the robot links, using a kinematic
+ tree model of the robot. The package can both be used as a library
+ and as a ROS node. This package has been well tested and the code
+ is stable. No major changes are planned in the near future.
+
+
+ Ioan Sucan
+ Jackie Kay
+ Wim Meeussen
+
+ Chris Lalancette
+ Ian McMahon
+ Shane Loretz
+
+ BSD
+
+ http://wiki.ros.org/robot_state_publisher
+
+ catkin
+
+ eigen
+ kdl_parser
+ orocos_kdl
+ rosconsole
+ roscpp
+ rostime
+ sensor_msgs
+ tf
+ tf2_ros
+ tf2_kdl
+ liburdfdom-headers-dev
+ catkin
+ eigen
+ kdl_parser
+ orocos_kdl
+ rosconsole
+ roscpp
+ rostime
+ sensor_msgs
+ tf
+ tf2_ros
+ tf2_kdl
+
+ rostest
+
diff --git a/qt_simulator/robot_state_publisher/src/joint_state_listener.cpp b/qt_simulator/robot_state_publisher/src/joint_state_listener.cpp
new file mode 100644
index 0000000..1428668
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/src/joint_state_listener.cpp
@@ -0,0 +1,198 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/robot_state_publisher.h"
+#include "robot_state_publisher/joint_state_listener.h"
+
+using namespace std;
+using namespace ros;
+using namespace KDL;
+using namespace robot_state_publisher;
+
+JointStateListener::JointStateListener(const KDL::Tree& tree, const MimicMap& m, const urdf::Model& model)
+ : state_publisher_(tree, model), mimic_(m)
+{
+ ros::NodeHandle n_tilde("~");
+ ros::NodeHandle n;
+
+ // set publish frequency
+ double publish_freq;
+ n_tilde.param("publish_frequency", publish_freq, 50.0);
+ // set whether to use the /tf_static latched static transform broadcaster
+ n_tilde.param("use_tf_static", use_tf_static_, true);
+ // ignore_timestamp_ == true, joins_states messages are accepted, no matter their timestamp
+ n_tilde.param("ignore_timestamp", ignore_timestamp_, false);
+ // get the tf_prefix parameter from the closest namespace
+ std::string tf_prefix_key;
+ n_tilde.searchParam("tf_prefix", tf_prefix_key);
+ n_tilde.param(tf_prefix_key, tf_prefix_, std::string(""));
+ publish_interval_ = ros::Duration(1.0/max(publish_freq, 1.0));
+
+ // Setting tcpNoNelay tells the subscriber to ask publishers that connect
+ // to set TCP_NODELAY on their side. This prevents some joint_state messages
+ // from being bundled together, increasing the latency of one of the messages.
+ ros::TransportHints transport_hints;
+ transport_hints.tcpNoDelay(true);
+ // subscribe to joint state
+ joint_state_sub_ = n.subscribe("joint_states", 1, &JointStateListener::callbackJointState, this, transport_hints);
+
+ // trigger to publish fixed joints
+ // if using static transform broadcaster, this will be a oneshot trigger and only run once
+ timer_ = n_tilde.createTimer(publish_interval_, &JointStateListener::callbackFixedJoint, this, use_tf_static_);
+}
+
+
+JointStateListener::~JointStateListener()
+{}
+
+
+void JointStateListener::callbackFixedJoint(const ros::TimerEvent& e)
+{
+ (void)e;
+ state_publisher_.publishFixedTransforms(tf_prefix_, use_tf_static_);
+}
+
+void JointStateListener::callbackJointState(const JointStateConstPtr& state)
+{
+ if (state->name.size() != state->position.size()){
+ if (state->position.empty()){
+ const int throttleSeconds = 300;
+ ROS_WARN_THROTTLE(throttleSeconds,
+ "Robot state publisher ignored a JointState message about joint(s) "
+ "\"%s\"(,...) whose position member was empty. This message will "
+ "not reappear for %d seconds.", state->name[0].c_str(),
+ throttleSeconds);
+ } else {
+ ROS_ERROR("Robot state publisher ignored an invalid JointState message");
+ }
+ return;
+ }
+
+ // check if we moved backwards in time (e.g. when playing a bag file)
+ ros::Time now = ros::Time::now();
+ if (last_callback_time_ > now) {
+ // force re-publish of joint transforms
+ ROS_WARN("Moved backwards in time (probably because ROS clock was reset), re-publishing joint transforms!");
+ last_publish_time_.clear();
+ }
+ ros::Duration warning_threshold(30.0);
+ if ((state->header.stamp + warning_threshold) < now) {
+ ROS_WARN_THROTTLE(10, "Received JointState is %f seconds old.", (now - state->header.stamp).toSec());
+ }
+ last_callback_time_ = now;
+
+ // determine least recently published joint
+ ros::Time last_published = now;
+ for (unsigned int i=0; iname.size(); i++) {
+ ros::Time t = last_publish_time_[state->name[i]];
+ last_published = (t < last_published) ? t : last_published;
+ }
+ // note: if a joint was seen for the first time,
+ // then last_published is zero.
+
+ // check if we need to publish
+ if (ignore_timestamp_ || state->header.stamp >= last_published + publish_interval_) {
+ // get joint positions from state message
+ map joint_positions;
+ for (unsigned int i=0; iname.size(); i++) {
+ joint_positions.insert(make_pair(state->name[i], state->position[i]));
+ }
+
+ for (MimicMap::iterator i = mimic_.begin(); i != mimic_.end(); i++) {
+ if(joint_positions.find(i->second->joint_name) != joint_positions.end()) {
+ double pos = joint_positions[i->second->joint_name] * i->second->multiplier + i->second->offset;
+ joint_positions.insert(make_pair(i->first, pos));
+ }
+ }
+
+ state_publisher_.publishTransforms(joint_positions, state->header.stamp, tf_prefix_);
+
+ // store publish time in joint map
+ for (unsigned int i = 0; iname.size(); i++) {
+ last_publish_time_[state->name[i]] = state->header.stamp;
+ }
+ }
+}
+
+// ----------------------------------
+// ----- MAIN -----------------------
+// ----------------------------------
+int main(int argc, char** argv)
+{
+ // Initialize ros
+ ros::init(argc, argv, "robot_state_publisher");
+ NodeHandle node;
+
+ ///////////////////////////////////////// begin deprecation warning
+ std::string exe_name = argv[0];
+ std::size_t slash = exe_name.find_last_of("/");
+ if (slash != std::string::npos) {
+ exe_name = exe_name.substr(slash + 1);
+ }
+ if (exe_name == "state_publisher") {
+ ROS_WARN("The 'state_publisher' executable is deprecated. Please use 'robot_state_publisher' instead");
+ }
+ ///////////////////////////////////////// end deprecation warning
+
+ // gets the location of the robot description on the parameter server
+ urdf::Model model;
+ if (!model.initParam("robot_description"))
+ return -1;
+
+ KDL::Tree tree;
+ if (!kdl_parser::treeFromUrdfModel(model, tree)) {
+ ROS_ERROR("Failed to extract kdl tree from xml robot description");
+ return -1;
+ }
+
+ MimicMap mimic;
+
+ for(std::map< std::string, urdf::JointSharedPtr >::iterator i = model.joints_.begin(); i != model.joints_.end(); i++) {
+ if(i->second->mimic) {
+ mimic.insert(make_pair(i->first, i->second->mimic));
+ }
+ }
+
+ JointStateListener state_publisher(tree, mimic, model);
+ ros::spin();
+
+ return 0;
+}
diff --git a/qt_simulator/robot_state_publisher/src/robot_state_publisher.cpp b/qt_simulator/robot_state_publisher/src/robot_state_publisher.cpp
new file mode 100644
index 0000000..054b33c
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/src/robot_state_publisher.cpp
@@ -0,0 +1,131 @@
+/*********************************************************************
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2008, Willow Garage, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of the Willow Garage nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+#include
+#include
+
+#include "robot_state_publisher/robot_state_publisher.h"
+
+using namespace std;
+using namespace ros;
+
+namespace robot_state_publisher {
+
+RobotStatePublisher::RobotStatePublisher(const KDL::Tree& tree, const urdf::Model& model)
+ : model_(model)
+{
+ // walk the tree and add segments to segments_
+ addChildren(tree.getRootSegment());
+}
+
+// add children to correct maps
+void RobotStatePublisher::addChildren(const KDL::SegmentMap::const_iterator segment)
+{
+ const std::string& root = GetTreeElementSegment(segment->second).getName();
+
+ const std::vector& children = GetTreeElementChildren(segment->second);
+ for (unsigned int i=0; isecond);
+ SegmentPair s(GetTreeElementSegment(children[i]->second), root, child.getName());
+ if (child.getJoint().getType() == KDL::Joint::None) {
+ if (model_.getJoint(child.getJoint().getName()) && model_.getJoint(child.getJoint().getName())->type == urdf::Joint::FLOATING) {
+ ROS_INFO("Floating joint. Not adding segment from %s to %s. This TF can not be published based on joint_states info", root.c_str(), child.getName().c_str());
+ }
+ else {
+ segments_fixed_.insert(make_pair(child.getJoint().getName(), s));
+ ROS_DEBUG("Adding fixed segment from %s to %s", root.c_str(), child.getName().c_str());
+ }
+ }
+ else {
+ segments_.insert(make_pair(child.getJoint().getName(), s));
+ ROS_DEBUG("Adding moving segment from %s to %s", root.c_str(), child.getName().c_str());
+ }
+ addChildren(children[i]);
+ }
+}
+
+
+// publish moving transforms
+void RobotStatePublisher::publishTransforms(const map& joint_positions, const Time& time, const std::string& tf_prefix)
+{
+ ROS_DEBUG("Publishing transforms for moving joints");
+ std::vector tf_transforms;
+
+ // loop over all joints
+ for (map::const_iterator jnt=joint_positions.begin(); jnt != joint_positions.end(); jnt++) {
+ std::map::const_iterator seg = segments_.find(jnt->first);
+ if (seg != segments_.end()) {
+ geometry_msgs::TransformStamped tf_transform = tf2::kdlToTransform(seg->second.segment.pose(jnt->second));
+ tf_transform.header.stamp = time;
+ tf_transform.header.frame_id = tf::resolve(tf_prefix, seg->second.root);
+ tf_transform.child_frame_id = tf::resolve(tf_prefix, seg->second.tip);
+ tf_transforms.push_back(tf_transform);
+ }
+ else {
+ ROS_WARN_THROTTLE(10, "Joint state with name: \"%s\" was received but not found in URDF", jnt->first.c_str());
+ }
+ }
+ tf_broadcaster_.sendTransform(tf_transforms);
+}
+
+// publish fixed transforms
+void RobotStatePublisher::publishFixedTransforms(const std::string& tf_prefix, bool use_tf_static)
+{
+ ROS_DEBUG("Publishing transforms for fixed joints");
+ std::vector tf_transforms;
+ geometry_msgs::TransformStamped tf_transform;
+
+ // loop over all fixed segments
+ for (map::const_iterator seg=segments_fixed_.begin(); seg != segments_fixed_.end(); seg++) {
+ geometry_msgs::TransformStamped tf_transform = tf2::kdlToTransform(seg->second.segment.pose(0));
+ tf_transform.header.stamp = ros::Time::now();
+ if (!use_tf_static) {
+ tf_transform.header.stamp += ros::Duration(0.5);
+ }
+ tf_transform.header.frame_id = tf::resolve(tf_prefix, seg->second.root);
+ tf_transform.child_frame_id = tf::resolve(tf_prefix, seg->second.tip);
+ tf_transforms.push_back(tf_transform);
+ }
+ if (use_tf_static) {
+ static_tf_broadcaster_.sendTransform(tf_transforms);
+ }
+ else {
+ tf_broadcaster_.sendTransform(tf_transforms);
+ }
+}
+
+}
diff --git a/qt_simulator/robot_state_publisher/test/one_link.urdf b/qt_simulator/robot_state_publisher/test/one_link.urdf
new file mode 100644
index 0000000..45d48f5
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/one_link.urdf
@@ -0,0 +1,3 @@
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/pr2.urdf b/qt_simulator/robot_state_publisher/test/pr2.urdf
new file mode 100644
index 0000000..ae3ee20
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/pr2.urdf
@@ -0,0 +1,3132 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ 1000.0
+
+
+
+ true
+ 1.0
+ 5
+
+ power_state
+ 10.0
+ 87.78
+ -474
+ 525
+ 15.52
+ 16.41
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640
+ 640
+ 1
+ 0.0 0.0 0.0
+ false
+ -129.998394137
+ 129.998394137
+ 0.08
+ 10.0
+ 0.01
+ 20
+
+ 0.005
+ true
+ 20
+ base_scan
+ base_laser_link
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 79.2380952381
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -79.2380952381
+
+
+
+
+
+ false
+
+ base_link_geom
+ 100.0
+
+ true
+ 100.0
+ base_bumper
+
+
+
+
+
+
+
+
+
+ true
+ 100.0
+ base_link
+ base_pose_ground_truth
+ 0.01
+ map
+ 25.7 25.7 0
+
+ 0 0 0
+
+
+ base_footprint
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ torso_lift_link_geom
+ 100.0
+
+ true
+ 100.0
+ torso_lift_bumper
+
+
+
+
+
+
+
+ -52143.33
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ 100.0
+ imu_link
+ torso_lift_imu/data
+ 2.89e-08
+ 0 0 0
+ 0 0 0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 6.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 6.0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ R8G8B8
+ 2448 2050
+ 45
+ 0.1
+ 100
+ 20.0
+
+ true
+ 20.0
+ /prosilica/image_raw
+ /prosilica/camera_info
+ /prosilica/request_image
+ high_def_frame
+ 1224.5
+ 1224.5
+ 1025.5
+ 2955
+
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ BAYER_BGGR8
+ 90
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ wide_stereo/left/image_raw
+ wide_stereo/left/camera_info
+ wide_stereo_optical_frame
+ 0
+ 320.5
+ 320.5
+ 240.5
+
+
+ 320
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ BAYER_BGGR8
+ 90
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ wide_stereo/right/image_raw
+ wide_stereo/right/camera_info
+ wide_stereo_optical_frame
+ 0.09
+ 320.5
+ 320.5
+ 240.5
+
+
+ 320
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ L8
+ 45
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ narrow_stereo/left/image_raw
+ narrow_stereo/left/camera_info
+ narrow_stereo_optical_frame
+ 0
+ 320.5
+ 320.5
+ 240.5
+
+
+ 772.55
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ L8
+ 45
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ narrow_stereo/right/image_raw
+ narrow_stereo/right/camera_info
+ narrow_stereo_optical_frame
+ 0.09
+ 320.5
+ 320.5
+ 240.5
+
+
+ 772.55
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ 15.0
+ stereo_projection_pattern_high_res_red.png
+ projector_wg6802418_child_frame
+ stereo_projection_pattern_filter.png
+ projector_wg6802418_controller/image
+ projector_wg6802418_controller/projector
+ 0.785398163397
+ 0.4
+ 10
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640
+ 640
+ 1
+ 0.0 0.0 0.0
+ false
+ -79.9999999086
+ 79.9999999086
+ 0.08
+ 10.0
+ 0.01
+ 40
+
+ 0.005
+ true
+ 40
+ tilt_scan
+ laser_tilt_link
+
+
+
+
+
+
+
+
+
+ -6.05
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ 32.6525111499
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+ 63.1552452977
+
+
+
+
+
+ 61.8948225713
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ -90.5142857143
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1.0
+
+
+ true
+
+
+
+
+
+
+
+
+ -36.167452007
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ r_gripper_l_finger_link_geom
+ 100.0
+
+ true
+ 100.0
+ r_gripper_l_finger_link
+ r_gripper_l_finger_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ r_gripper_r_finger_link_geom
+ 100.0
+
+ true
+ r_gripper_r_finger_link
+ 100.0
+ r_gripper_r_finger_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+ r_gripper_l_finger_tip_link_geom
+ 100.0
+
+ true
+ r_gripper_l_finger_tip_link
+ 100.0
+ r_gripper_l_finger_tip_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+ r_gripper_r_finger_tip_link_geom
+ 100.0
+
+ true
+ r_gripper_r_finger_tip_link
+ 100.0
+ r_gripper_r_finger_tip_bumper
+
+
+
+
+
+
+
+
+
+
+ true
+ 100.0
+ r_gripper_l_finger_link
+ r_gripper_l_finger_pose_ground_truth
+ 0.0
+ base_link
+
+
+
+ true
+ 100.0
+ r_gripper_l_finger_link
+ r_gripper_l_finger_force_ground_truth
+ r_gripper_l_finger_link
+
+
+
+
+
+
+
+
+
+
+
+ true
+ 0.17126
+ 7.7562e-05
+ 1.49095e-06
+ -9.83385e-06
+ 0.000197083
+ -3.06125e-06
+ 0.000181054
+ 0.03598
+ 0.0173
+ -0.00164
+ 0.82991 -0.157 0.790675
+ 0 -0 0
+ true
+ false
+
+
+ true
+ 0.17389
+ 7.73841e-05
+ -2.09309e-06
+ -8.36228e-06
+ 0.000198474
+ 2.4611e-06
+ 0.00018107
+ 0.03576
+ -0.01736
+ -0.00095
+ 0.82991 -0.219 0.790675
+ 0 -0 0
+ true
+ false
+
+
+
+
+ r_gripper_r_parallel_link
+ r_gripper_palm_link
+ r_gripper_palm_link
+ 0 0 -1
+ 0.2
+ 0.05891 -0.031 0
+
+
+ r_gripper_l_parallel_link
+ r_gripper_palm_link
+ r_gripper_palm_link
+ 0 0 1
+ 0.2
+ 0.05891 0.031 0
+
+
+ r_gripper_r_parallel_link
+ r_gripper_r_finger_tip_link
+ r_gripper_r_finger_tip_link
+ 0 0 1
+ -0.018 -0.021 0
+
+
+ r_gripper_l_parallel_link
+ r_gripper_l_finger_tip_link
+ r_gripper_l_finger_tip_link
+ 0 0 1
+ -0.018 0.021 0
+
+
+ r_gripper_l_finger_tip_link
+ r_gripper_r_finger_tip_link
+ r_gripper_r_finger_tip_link
+ 0 1 0
+
+
+
+ true
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ r_gripper_palm_link_geom
+ 100.0
+
+ true
+ 100.0
+ r_gripper_palm_link
+ r_gripper_palm_bumper
+
+
+
+
+
+
+ true
+ 100.0
+ r_gripper_palm_link
+ r_gripper_palm_pose_ground_truth
+ 0 0 0
+ 0 0 0
+ 0.0
+ map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+ 32.6525111499
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+ 63.1552452977
+
+
+
+
+
+ 61.8948225713
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+ -90.5142857143
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -1.0
+
+
+ true
+
+
+
+
+
+
+
+
+ -36.167452007
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+
+ true
+
+
+
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ l_gripper_l_finger_link_geom
+ 100.0
+
+ true
+ 100.0
+ l_gripper_l_finger_link
+ l_gripper_l_finger_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ l_gripper_r_finger_link_geom
+ 100.0
+
+ true
+ l_gripper_r_finger_link
+ 100.0
+ l_gripper_r_finger_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+ l_gripper_l_finger_tip_link_geom
+ 100.0
+
+ true
+ l_gripper_l_finger_tip_link
+ 100.0
+ l_gripper_l_finger_tip_bumper
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+ false
+
+ l_gripper_r_finger_tip_link_geom
+ 100.0
+
+ true
+ l_gripper_r_finger_tip_link
+ 100.0
+ l_gripper_r_finger_tip_bumper
+
+
+
+
+
+
+
+
+
+
+ true
+ 100.0
+ l_gripper_l_finger_link
+ l_gripper_l_finger_pose_ground_truth
+ 0.0
+ base_link
+
+
+
+ true
+ 100.0
+ l_gripper_l_finger_link
+ l_gripper_l_finger_force_ground_truth
+ l_gripper_l_finger_link
+
+
+
+
+
+
+
+
+
+
+
+ true
+ 0.17126
+ 7.7562e-05
+ 1.49095e-06
+ -9.83385e-06
+ 0.000197083
+ -3.06125e-06
+ 0.000181054
+ 0.03598
+ 0.0173
+ -0.00164
+ 0.82991 0.219 0.790675
+ 0 -0 0
+ true
+ false
+
+
+ true
+ 0.17389
+ 7.73841e-05
+ -2.09309e-06
+ -8.36228e-06
+ 0.000198474
+ 2.4611e-06
+ 0.00018107
+ 0.03576
+ -0.01736
+ -0.00095
+ 0.82991 0.157 0.790675
+ 0 -0 0
+ true
+ false
+
+
+
+
+ l_gripper_r_parallel_link
+ l_gripper_palm_link
+ l_gripper_palm_link
+ 0 0 -1
+ 0.2
+ 0.05891 -0.031 0
+
+
+ l_gripper_l_parallel_link
+ l_gripper_palm_link
+ l_gripper_palm_link
+ 0 0 1
+ 0.2
+ 0.05891 0.031 0
+
+
+ l_gripper_r_parallel_link
+ l_gripper_r_finger_tip_link
+ l_gripper_r_finger_tip_link
+ 0 0 1
+ -0.018 -0.021 0
+
+
+ l_gripper_l_parallel_link
+ l_gripper_l_finger_tip_link
+ l_gripper_l_finger_tip_link
+ 0 0 1
+ -0.018 0.021 0
+
+
+ l_gripper_l_finger_tip_link
+ l_gripper_r_finger_tip_link
+ l_gripper_r_finger_tip_link
+ 0 1 0
+
+
+
+ true
+
+
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+ true
+
+ l_gripper_palm_link_geom
+ 100.0
+
+ true
+ 100.0
+ l_gripper_palm_link
+ l_gripper_palm_bumper
+
+
+
+
+
+
+ true
+ 100.0
+ l_gripper_palm_link
+ l_gripper_palm_pose_ground_truth
+ 0 0 0
+ 0 0 0
+ 0.0
+ map
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ L8
+ 90
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ l_forearm_cam/image_raw
+ l_forearm_cam/camera_info
+ l_forearm_cam_optical_frame
+ 0
+ 320.5
+ 320.5
+ 240.5
+
+
+ 320
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 640 480
+ L8
+ 90
+ 0.1
+ 100
+ 25.0
+
+ true
+ 25.0
+ r_forearm_cam/image_raw
+ r_forearm_cam/camera_info
+ r_forearm_cam_optical_frame
+ 0
+ 320.5
+ 320.5
+ 240.5
+
+
+ 320
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+ 0.00000001
+
+
+
+ true
+ PR2/Blue
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/test_joint_states_bag.cpp b/qt_simulator/robot_state_publisher/test/test_joint_states_bag.cpp
new file mode 100644
index 0000000..848b4bc
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_joint_states_bag.cpp
@@ -0,0 +1,113 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/joint_state_listener.h"
+
+using namespace ros;
+using namespace tf2_ros;
+using namespace robot_state_publisher;
+
+
+int g_argc;
+char** g_argv;
+
+#define EPS 0.01
+
+class TestPublisher : public testing::Test
+{
+public:
+ JointStateListener* publisher;
+
+protected:
+ /// constructor
+ TestPublisher()
+ {}
+
+ /// Destructor
+ ~TestPublisher()
+ {}
+};
+
+
+TEST_F(TestPublisher, test)
+{
+ ROS_INFO("Creating tf listener");
+ Buffer buffer;
+ TransformListener tf(buffer);
+
+ ROS_INFO("Waiting for bag to complete");
+ Duration(15.0).sleep();
+
+ ASSERT_TRUE(buffer.canTransform("base_link", "torso_lift_link", Time()));
+ ASSERT_TRUE(buffer.canTransform("base_link", "r_gripper_palm_link", Time()));
+ ASSERT_TRUE(buffer.canTransform("base_link", "r_gripper_palm_link", Time()));
+ ASSERT_TRUE(buffer.canTransform("l_gripper_palm_link", "r_gripper_palm_link", Time()));
+ ASSERT_TRUE(buffer.canTransform("l_gripper_palm_link", "fl_caster_r_wheel_link", Time()));
+ ASSERT_FALSE(buffer.canTransform("base_link", "wim_link", Time()));
+
+ geometry_msgs::TransformStamped t = buffer.lookupTransform("base_link", "r_gripper_palm_link", Time());
+ EXPECT_NEAR(t.transform.translation.x, 0.769198, EPS);
+ EXPECT_NEAR(t.transform.translation.y, -0.188800, EPS);
+ EXPECT_NEAR(t.transform.translation.z, 0.764914, EPS);
+
+ t = buffer.lookupTransform("l_gripper_palm_link", "r_gripper_palm_link", Time());
+ EXPECT_NEAR(t.transform.translation.x, 0.000384222, EPS);
+ EXPECT_NEAR(t.transform.translation.y, -0.376021, EPS);
+ EXPECT_NEAR(t.transform.translation.z, -1.0858e-05, EPS); SUCCEED();
+}
+
+
+int main(int argc, char** argv)
+{
+ testing::InitGoogleTest(&argc, argv);
+ ros::init(argc, argv, "test_robot_state_publisher");
+
+ g_argc = argc;
+ g_argv = argv;
+ int res = RUN_ALL_TESTS();
+
+ return res;
+}
diff --git a/qt_simulator/robot_state_publisher/test/test_joint_states_bag.launch b/qt_simulator/robot_state_publisher/test/test_joint_states_bag.launch
new file mode 100644
index 0000000..ee27a94
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_joint_states_bag.launch
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/test_one_link.cpp b/qt_simulator/robot_state_publisher/test/test_one_link.cpp
new file mode 100644
index 0000000..509c9d6
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_one_link.cpp
@@ -0,0 +1,107 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/joint_state_listener.h"
+
+using namespace ros;
+using namespace tf2_ros;
+using namespace robot_state_publisher;
+
+#define EPS 0.01
+
+class TestPublisher : public testing::Test
+{
+public:
+ JointStateListener* publisher;
+
+protected:
+ /// constructor
+ TestPublisher()
+ {}
+
+ /// Destructor
+ ~TestPublisher()
+ {}
+};
+
+
+TEST_F(TestPublisher, test)
+{
+ {
+ ros::NodeHandle n_tilde;
+ std::string robot_description;
+ ASSERT_TRUE(n_tilde.getParam("robot_description", robot_description));
+ }
+
+
+ ROS_INFO("Creating tf listener");
+ Buffer buffer;
+ TransformListener tf(buffer);
+
+ // ROS_INFO("Publishing joint state to robot state publisher");
+ /*ros::NodeHandle n;
+ ros::Publisher js_pub = n.advertise("test_one_link/joint_states", 100);
+ sensor_msgs::JointState js_msg;
+ for (unsigned int i=0; i<100; i++){
+ js_msg.header.stamp = ros::Time::now();
+ js_pub.publish(js_msg);
+ ros::spinOnce();
+ ros::Duration(0.1).sleep();
+ }*/
+
+ ASSERT_TRUE(buffer.canTransform("link1", "link1", Time()));
+
+ SUCCEED();
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "test_one_link");
+ testing::InitGoogleTest(&argc, argv);
+
+ int res = RUN_ALL_TESTS();
+
+ return res;
+}
diff --git a/qt_simulator/robot_state_publisher/test/test_one_link.launch b/qt_simulator/robot_state_publisher/test/test_one_link.launch
new file mode 100644
index 0000000..615b265
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_one_link.launch
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/test_subclass.cpp b/qt_simulator/robot_state_publisher/test/test_subclass.cpp
new file mode 100644
index 0000000..0a16121
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_subclass.cpp
@@ -0,0 +1,110 @@
+/*
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2016, Open Source Robotics Foundation, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Willow Garage, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include
+
+#include
+
+#include
+
+#include "robot_state_publisher/joint_state_listener.h"
+#include "robot_state_publisher/robot_state_publisher.h"
+
+namespace robot_state_publisher_test
+{
+class AccessibleJointStateListener : public robot_state_publisher::JointStateListener
+{
+public:
+ AccessibleJointStateListener(
+ const KDL::Tree& tree, const MimicMap& m, const urdf::Model& model) :
+ robot_state_publisher::JointStateListener(tree, m, model)
+ {
+ }
+
+ bool usingTfStatic() const {
+ return use_tf_static_;
+ }
+};
+
+class AccessibleRobotStatePublisher : public robot_state_publisher::RobotStatePublisher
+{
+public:
+
+ AccessibleRobotStatePublisher(const KDL::Tree& tree, const urdf::Model& model) :
+ robot_state_publisher::RobotStatePublisher(tree, model)
+ {
+ }
+
+ const urdf::Model & getModel() const {
+ return model_;
+ }
+};
+} // robot_state_publisher_test
+
+TEST(TestRobotStatePubSubclass, robot_state_pub_subclass)
+{
+ urdf::Model model;
+ model.initParam("robot_description");
+ KDL::Tree tree;
+ if (!kdl_parser::treeFromUrdfModel(model, tree)){
+ ROS_ERROR("Failed to extract kdl tree from xml robot description");
+ FAIL();
+ }
+
+ MimicMap mimic;
+
+ for(std::map< std::string, urdf::JointSharedPtr >::iterator i = model.joints_.begin(); i != model.joints_.end(); i++){
+ if(i->second->mimic){
+ mimic.insert(make_pair(i->first, i->second->mimic));
+ }
+ }
+
+ robot_state_publisher_test::AccessibleRobotStatePublisher state_pub(tree, model);
+
+ EXPECT_EQ(model.name_, state_pub.getModel().name_);
+ EXPECT_EQ(model.root_link_, state_pub.getModel().root_link_);
+
+ robot_state_publisher_test::AccessibleJointStateListener state_listener(tree, mimic, model);
+ EXPECT_TRUE(state_listener.usingTfStatic());
+}
+
+int main(int argc, char** argv)
+{
+ ros::init(argc, argv, "test_subclass");
+ testing::InitGoogleTest(&argc, argv);
+
+ int res = RUN_ALL_TESTS();
+
+ return res;
+}
diff --git a/qt_simulator/robot_state_publisher/test/test_subclass.launch b/qt_simulator/robot_state_publisher/test/test_subclass.launch
new file mode 100644
index 0000000..731a049
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_subclass.launch
@@ -0,0 +1,6 @@
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.cpp b/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.cpp
new file mode 100644
index 0000000..4545b76
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.cpp
@@ -0,0 +1,108 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/joint_state_listener.h"
+
+using namespace ros;
+using namespace tf2_ros;
+using namespace robot_state_publisher;
+
+#define EPS 0.01
+
+class TestPublisher : public testing::Test
+{
+public:
+ JointStateListener* publisher;
+
+protected:
+ /// constructor
+ TestPublisher()
+ {}
+
+ /// Destructor
+ ~TestPublisher()
+ {}
+};
+
+TEST_F(TestPublisher, test)
+{
+ {
+ ros::NodeHandle n_tilde;
+ std::string robot_description;
+ ASSERT_TRUE(n_tilde.getParam("robot_description", robot_description));
+ }
+
+ ROS_INFO("Creating tf listener");
+ Buffer buffer;
+ TransformListener listener(buffer);
+
+
+ for (unsigned int i = 0; i < 100 && !buffer.canTransform("link1", "link2", Time()); i++) {
+ ros::Duration(0.1).sleep();
+ ros::spinOnce();
+ }
+
+
+ ASSERT_TRUE(buffer.canTransform("link1", "link2", Time()));
+ ASSERT_FALSE(buffer.canTransform("base_link", "wim_link", Time()));
+
+ geometry_msgs::TransformStamped t = buffer.lookupTransform("link1", "link2", Time());
+ EXPECT_NEAR(t.transform.translation.x, 5.0, EPS);
+ EXPECT_NEAR(t.transform.translation.y, 0.0, EPS);
+ EXPECT_NEAR(t.transform.translation.z, 0.0, EPS);
+
+ SUCCEED();
+}
+
+int main(int argc, char** argv)
+{
+ testing::InitGoogleTest(&argc, argv);
+ ros::init(argc, argv, "test_two_links_fixed_joint");
+
+ int res = RUN_ALL_TESTS();
+
+ return res;
+}
diff --git a/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.launch b/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.launch
new file mode 100644
index 0000000..b843268
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_two_links_fixed_joint.launch
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.cpp b/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.cpp
new file mode 100644
index 0000000..081a5a1
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.cpp
@@ -0,0 +1,119 @@
+/*********************************************************************
+* Software License Agreement (BSD License)
+*
+* Copyright (c) 2008, Willow Garage, Inc.
+* All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* * Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* * Redistributions in binary form must reproduce the above
+* copyright notice, this list of conditions and the following
+* disclaimer in the documentation and/or other materials provided
+* with the distribution.
+* * Neither the name of the Willow Garage nor the names of its
+* contributors may be used to endorse or promote products derived
+* from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+* POSSIBILITY OF SUCH DAMAGE.
+*********************************************************************/
+
+/* Author: Wim Meeussen */
+
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "robot_state_publisher/joint_state_listener.h"
+
+using namespace ros;
+using namespace tf2_ros;
+using namespace robot_state_publisher;
+
+#define EPS 0.01
+
+class TestPublisher : public testing::Test
+{
+public:
+ JointStateListener* publisher;
+
+protected:
+ /// constructor
+ TestPublisher()
+ {}
+
+ /// Destructor
+ ~TestPublisher()
+ {}
+};
+
+TEST_F(TestPublisher, test)
+{
+ {
+ ros::NodeHandle n_tilde;
+ std::string robot_description;
+ ASSERT_TRUE(n_tilde.getParam("robot_description", robot_description));
+ }
+
+ ROS_INFO("Creating tf listener");
+ Buffer buffer;
+ TransformListener listener(buffer);
+
+ ROS_INFO("Publishing joint state to robot state publisher");
+ ros::NodeHandle n;
+ ros::Publisher js_pub = n.advertise("joint_states", 10);
+ sensor_msgs::JointState js_msg;
+ js_msg.name.push_back("joint1");
+ js_msg.position.push_back(M_PI);
+ ros::Duration(1).sleep();
+ for (unsigned int i = 0; i < 100; i++) {
+ js_msg.header.stamp = ros::Time::now();
+ js_pub.publish(js_msg);
+ ros::Duration(0.1).sleep();
+ }
+
+ for (unsigned int i = 0; i < 100 && !buffer.canTransform("link1", "link2", Time()); i++) {
+ ros::Duration(0.1).sleep();
+ ros::spinOnce();
+ }
+ EXPECT_FALSE(buffer.canTransform("base_link", "wim_link", Time()));
+ ASSERT_TRUE(buffer.canTransform("link1", "link2", Time()));
+
+ geometry_msgs::TransformStamped t = buffer.lookupTransform("link1", "link2", Time());
+ EXPECT_NEAR(t.transform.translation.x, 5.0, EPS);
+ EXPECT_NEAR(t.transform.translation.y, 0.0, EPS);
+ EXPECT_NEAR(t.transform.translation.z, 0.0, EPS);
+
+ SUCCEED();
+}
+
+int main(int argc, char** argv)
+{
+ testing::InitGoogleTest(&argc, argv);
+ ros::init(argc, argv, "test_two_links_moving_joint");
+ ros::NodeHandle node;
+
+ int res = RUN_ALL_TESTS();
+
+ return res;
+}
diff --git a/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.launch b/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.launch
new file mode 100644
index 0000000..9998743
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/test_two_links_moving_joint.launch
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/two_links_fixed_joint.urdf b/qt_simulator/robot_state_publisher/test/two_links_fixed_joint.urdf
new file mode 100644
index 0000000..a59ae88
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/two_links_fixed_joint.urdf
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/qt_simulator/robot_state_publisher/test/two_links_moving_joint.urdf b/qt_simulator/robot_state_publisher/test/two_links_moving_joint.urdf
new file mode 100644
index 0000000..20e2460
--- /dev/null
+++ b/qt_simulator/robot_state_publisher/test/two_links_moving_joint.urdf
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
+
+
+