Retrieve simulation data
Note
Currently, all image data are retrieved as CV_8UC3. We are aware of the information loss that occurs and we try to change it ASAP. You can also contribute by fixing this on the server- and client-side.
Setup and spawn the camera
// Initialization
std::shared_ptr<Quadrotor> quad_ptr_ = std::make_unique<Quadrotor>();
std::shared_ptr<RGBCamera> rgb_camera_ = std::make_unique<RGBCamera>();
// Setup Camera
Vector<3> B_r_BC(0.0, 0.0, 0.3);
Matrix<3, 3> R_BC = Quaternion(1.0, 0.0, 0.0, 0.0).toRotationMatrix();
rgb_camera_->setFOV(90);
rgb_camera_->setWidth(720);
rgb_camera_->setHeight(480);
rgb_camera_->setRelPose(B_r_BC, R_BC);
rgb_camera_->setPostProcesscing(
std::vector<bool>{false, false, false}); // depth, segmentation, optical flow
quad_ptr_->addRGBCamera(rgb_camera_);
// Setup Quad
QuadState quad_state_;
quad_state_.setZero();
quad_ptr_->reset(quad_state_);
// Spawn
std::shared_ptr<UnityBridge> unity_bridge_ptr_ = UnityBridge::getInstance();
unity_bridge_ptr_->addQuadrotor(quad_ptr_);
bool unity_ready_ = unity_bridge_ptr_->connectUnity(UnityScene::WAREHOUSE);
Position camera
// Define new quadrotor state
quad_state_.x[QS::POSX] = (Scalar)position.x;
quad_state_.x[QS::POSY] = (Scalar)position.y;
quad_state_.x[QS::POSZ] = (Scalar)position.z;
quad_state_.x[QS::ATTW] = (Scalar)orientation.w;
quad_state_.x[QS::ATTX] = (Scalar)orientation.x;
quad_state_.x[QS::ATTY] = (Scalar)orientation.y;
quad_state_.x[QS::ATTZ] = (Scalar)orientation.z;
// Set new state
quad_ptr_->setState(quad_state_);
Retrieve data
// Render next frame
unity_bridge_ptr_->getRender(0);
unity_bridge_ptr_->handleOutput();
cv::Mat img;
rgb_camera_->getRGBImage(img);
// Save image
cv::imwrite("some.jpg", img);
[Optional] Publishing data
// initialize ROS
ros::init(argc, argv, "flightmare_rviz");
ros::NodeHandle pnh("~");
ros::Rate(50.0);
// initialize publishers
image_transport::ImageTransport it(pnh);
image_transport::Publisher rgb_pub_ = it.advertise("/rgb", 1);
sensor_msgs::ImagePtr rgb_msg =
cv_bridge::CvImage(std_msgs::Header(), "bgr8", img).toImageMsg();
rgb_msg->header.stamp.fromNSec(0);
rgb_pub_.publish(rgb_msg);
Run example
roslaunch flightros camera.launch
Here the full code example
Header
#pragma once
// ros
#include <cv_bridge/cv_bridge.h>
#include <image_transport/image_transport.h>
#include <ros/ros.h>
// standard libraries
#include <assert.h>
#include <Eigen/Dense>
#include <cmath>
#include <cstring>
#include <experimental/filesystem>
#include <fstream>
#include <iostream>
#include <iterator>
#include <sstream>
#include <thread>
#include <vector>
// flightlib
#include "flightlib/bridges/unity_bridge.hpp"
#include "flightlib/bridges/unity_message_types.hpp"
#include "flightlib/common/quad_state.hpp"
#include "flightlib/common/types.hpp"
#include "flightlib/objects/quadrotor.hpp"
#include "flightlib/sensors/rgb_camera.hpp"
using namespace flightlib;
namespace camera {
// publisher
image_transport::Publisher rgb_pub_;
image_transport::Publisher depth_pub_;
image_transport::Publisher segmentation_pub_;
image_transport::Publisher opticalflow_pub_;
int counter = 0;
// void setupQuad();
bool setUnity(const bool render);
bool connectUnity(void);
// unity quadrotor
std::shared_ptr<Quadrotor> quad_ptr_;
std::shared_ptr<RGBCamera> rgb_camera_;
QuadState quad_state_;
// Flightmare(Unity3D)
std::shared_ptr<UnityBridge> unity_bridge_ptr_;
SceneID scene_id_{UnityScene::WAREHOUSE};
bool unity_ready_{false};
bool unity_render_{true};
RenderMessage_t unity_output_;
uint16_t receive_id_{0};
} // namespace camera
Main
#include "flightros/camera/camera.hpp"
bool camera::setUnity(const bool render) {
unity_render_ = render;
if (unity_render_ && unity_bridge_ptr_ == nullptr) {
// create unity bridge
unity_bridge_ptr_ = UnityBridge::getInstance();
unity_bridge_ptr_->addQuadrotor(quad_ptr_);
std::cout << "Unity Bridge is created." << std::endl;
}
return true;
}
bool camera::connectUnity() {
if (!unity_render_ || unity_bridge_ptr_ == nullptr) return false;
unity_ready_ = unity_bridge_ptr_->connectUnity(scene_id_);
return unity_ready_;
}
int main(int argc, char *argv[]) {
// initialize ROS
ros::init(argc, argv, "flightmare_rviz");
ros::NodeHandle nh("");
ros::NodeHandle pnh("~");
ros::Rate(50.0);
// initialize publishers
image_transport::ImageTransport it(pnh);
camera::rgb_pub_ = it.advertise("/rgb", 1);
camera::depth_pub_ = it.advertise("/depth", 1);
camera::segmentation_pub_ = it.advertise("/segmentation", 1);
camera::opticalflow_pub_ = it.advertise("/opticalflow", 1);
// quad initialization
camera::quad_ptr_ = std::make_unique<Quadrotor>();
// add mono camera
camera::rgb_camera_ = std::make_unique<RGBCamera>();
// Flightmare
Vector<3> B_r_BC(0.0, 0.0, 0.3);
Matrix<3, 3> R_BC = Quaternion(1.0, 0.0, 0.0, 0.0).toRotationMatrix();
std::cout << R_BC << std::endl;
camera::rgb_camera_->setFOV(90);
camera::rgb_camera_->setWidth(720);
camera::rgb_camera_->setHeight(480);
camera::rgb_camera_->setRelPose(B_r_BC, R_BC);
camera::rgb_camera_->setPostProcesscing(
std::vector<bool>{true, true, true}); // depth, segmentation, optical flow
camera::quad_ptr_->addRGBCamera(camera::rgb_camera_);
// // initialization
camera::quad_state_.setZero();
camera::quad_ptr_->reset(camera::quad_state_);
// connect unity
camera::setUnity(camera::unity_render_);
camera::connectUnity();
while (ros::ok() && camera::unity_render_ && camera::unity_ready_) {
camera::quad_state_.x[QS::POSX] = (Scalar)0;
camera::quad_state_.x[QS::POSY] = (Scalar)0;
camera::quad_state_.x[QS::POSZ] = (Scalar)0;
camera::quad_state_.x[QS::ATTW] = (Scalar)0;
camera::quad_state_.x[QS::ATTX] = (Scalar)0;
camera::quad_state_.x[QS::ATTY] = (Scalar)0;
camera::quad_state_.x[QS::ATTZ] = (Scalar)0;
camera::quad_ptr_->setState(camera::quad_state_);
camera::unity_bridge_ptr_->getRender(0);
camera::unity_bridge_ptr_->handleOutput();
cv::Mat img;
camera::rgb_camera_->getRGBImage(img);
sensor_msgs::ImagePtr rgb_msg =
cv_bridge::CvImage(std_msgs::Header(), "bgr8", img).toImageMsg();
rgb_msg->header.stamp.fromNSec(camera::counter);
camera::rgb_pub_.publish(rgb_msg);
camera::rgb_camera_->getDepthMap(img);
sensor_msgs::ImagePtr depth_msg =
cv_bridge::CvImage(std_msgs::Header(), "bgr8", img).toImageMsg();
depth_msg->header.stamp.fromNSec(camera::counter);
camera::depth_pub_.publish(depth_msg);
camera::rgb_camera_->getSegmentation(img);
sensor_msgs::ImagePtr segmentation_msg =
cv_bridge::CvImage(std_msgs::Header(), "bgr8", img).toImageMsg();
segmentation_msg->header.stamp.fromNSec(camera::counter);
camera::segmentation_pub_.publish(segmentation_msg);
camera::rgb_camera_->getOpticalFlow(img);
sensor_msgs::ImagePtr opticflow_msg =
cv_bridge::CvImage(std_msgs::Header(), "bgr8", img).toImageMsg();
opticflow_msg->header.stamp.fromNSec(camera::counter);
camera::opticalflow_pub_.publish(opticflow_msg);
camera::counter++;
}
return 0;
}