本文主要介紹在ar drone2四旋翼飛行器上,基於ROS,使用cv_bridge將ROS Image和OpenCV Image相互轉化,編寫簡單的Publisher和Sublisher程序,把結果圖像顯示出來。
開發平臺:AR drone2 ubuntu14.04 ROS indigo
ROS Image messages 和OpenCV Mat相互轉化可參考
http://wiki.ros.org/cv_bridge/Tutorials/UsingCvBridgeToConvertBetweenROSImagesAndOpenCVImages
ardrone_autonomy使用手冊
http://ardrone-autonomy.readthedocs.io/en/latest/index.html
image_transport example
http://wiki.ros.org/image_transport/Tutorials
Step 1:創建一個工作空間dronework,然後利用catkin_create_pkg創建dronevideo package,dronevideopackage開發包依賴於cv_bridge image_transport sensor_msgs roscpp std_msgs
mkdir -p /root/dronework/src
cd /root/dronework/src
source /opt/ros/indigo/setup.bash
catkin_create_pkg dronevideo cv_bridge image_transport sensor_msgs roscpp std_msgs
cd /root/dronework
catkin_make
在root目錄下.bashrc文件中添加
source /opt/ros/indigo/setup.bash
source /root/dronework/devel/setup.bash
這樣可以避免每次打開一個新的終端,需要source對應的setup.bash
Step 2:在dronevideo package的src目錄下添加dronevideo_pub.cpp
#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace std;
using namespace cv;
static const string OPENCV_WINDOW = "Image window";
image_transport::Subscriber image_sub_;
image_transport::Publisher image_pub_;
void imageCb(const sensor_msgs::ImageConstPtr& msg)
{
cv_bridge::CvImagePtr cv_ptr;
try
{
cv_ptr = cv_bridge::toCvCopy(msg, "bgr8");
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
Mat img_rgb,img_gray;
img_rgb = cv_ptr->image;
cvtColor(img_rgb,img_gray,CV_RGB2GRAY);
// Update GUI Window
imshow(OPENCV_WINDOW, img_gray);
waitKey(3);
// Output modified video stream
sensor_msgs::ImagePtr msg_pub;
msg_pub = cv_bridge::CvImage(std_msgs::Header(), "mono8", img_gray).toImageMsg();
image_pub_.publish(msg_pub);
}
int main(int argc, char** argv)
{
ros::init(argc, argv, "dronevideo_pub");
ros::NodeHandle nh_;
image_transport::ImageTransport it_(nh_);
// Subscrive to input video feed and publish output video feed
image_sub_ = it_.subscribe("/ardrone/image_raw", 1, imageCb);
image_pub_ = it_.advertise("/image_converter/output_video", 1);
namedWindow(OPENCV_WINDOW);
ros::spin();
destroyWindow(OPENCV_WINDOW);
return 0;
}
運行ardrone_autonomy ardrone_driver可以產生/ardrone/image_raw,通過訂閱該話題可以獲取ar drone2攝像頭 ROS Image message
/image_converter/output_video話題是爲了把轉換後的灰度圖像message發佈出去。
toCvCopy toCvShare toImageMsg關鍵函數
Step 3:在dronevideo package的src目錄下添加dronevideo_sub.cpp
#include <ros/ros.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <image_transport/image_transport.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
using namespace std;
using namespace cv;
void imageCallback(const sensor_msgs::ImageConstPtr& msg)
{
try
{
imshow("view", cv_bridge::toCvShare(msg, "mono8")->image);
waitKey(30); //30ms
}
catch (cv_bridge::Exception& e)
{
ROS_ERROR("Could not convert from '%s' to 'mono8'.", msg->encoding.c_str());
}
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "dronevideo_sub");
ros::NodeHandle nh_;
cv::namedWindow("view");
cv::startWindowThread();
image_transport::ImageTransport it_(nh_);
image_transport::Subscriber sub = it_.subscribe("/image_converter/output_video", 1, imageCallback);
ros::spin();
cv::destroyWindow("view");
}
<?xml version="1.0"?>
<package>
<name>dronevideo</name>
<version>0.0.0</version>
<description>The dronevideo package</description>
<maintainer email="[email protected]">root</maintainer>
<license>TODO</license>
<buildtool_depend>catkin</buildtool_depend>
<build_depend>cv_bridge</build_depend>
<build_depend>image_transport</build_depend>
<build_depend>sensor_msgs</build_depend>
<build_depend>message_generation</build_depend>
<build_depend>opencv2</build_depend>
<run_depend>cv_bridge</run_depend>
<run_depend>image_transport</run_depend>
<run_depend>sensor_msgs</run_depend>
<run_depend>message_runtime</run_depend>
<run_depend>opencv2</run_depend>
</package>
添加 opencv2 message_generationmessage_runtime依賴項
Step 5:修改CMakeLists.txt
cmake_minimum_required(VERSION 2.8.3)
project(dronevideo)
find_package(catkin REQUIRED COMPONENTS
roscpp
std_msgs
cv_bridge
image_transport
sensor_msgs
genmsg
)
#generate_messages(DEPENDENCIES sensor_msgs)
catkin_package()
find_package(OpenCV)
include_directories(include ${catkin_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS})
add_executable(dronevideo_pub src/dronevideo_pub.cpp)
target_link_libraries(dronevideo_pub ${catkin_LIBRARIES} ${OpenCV_LIBRARIES})
add_dependencies(dronevideo_pub dronevideo_generate_messages_cpp)
add_executable(dronevideo_sub src/dronevideo_sub.cpp)
target_link_libraries(dronevideo_sub ${catkin_LIBRARIES} ${OpenCV_LIBRARIES})
add_dependencies(dronevideo_sub dronevideo_generate_messages_cpp)
主要注意要包含OpenCV依賴項,然後Build Targets部分分別創建dronevideo_pub和dronevideo_sub節點。
Step 6:cmake & run
cd /root/dronework
catkin_make
First one terminal : roscore
Next another terminal: rosrun ardrone_autonomy ardrone_driver
And then a terminal: rosrun dronevideo dronevideo_pub
Finally the last one terminal: rosrun dronevideo dronevideo_sub
最後效果圖
出現問題:
當分別運行
# 200Hz real-time update
$ rosrun ardrone_autonomy ardrone_driver _realtime_navdata:=True _navdata_demo:=0
# 15Hz real-rime update
$ rosrun ardrone_autonomy ardrone_driver _realtime_navdata:=True _navdata_demo:=1
pub和sub節點實現圖像偶爾會出現卡頓,難道navdata 更新頻率會對Image message 有影響,後面再詳細研究ardrone_autonomy Parameter。