Skip to content

Commit

Permalink
added image capture settings
Browse files Browse the repository at this point in the history
  • Loading branch information
sytelus committed Jul 11, 2017
1 parent 1adbe54 commit b553698
Show file tree
Hide file tree
Showing 13 changed files with 292 additions and 82 deletions.
10 changes: 10 additions & 0 deletions AirLib/include/controllers/Settings.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,16 @@ namespace msr {
}
}

double getFloat(std::string name, float defaultValue) const
{
if (doc_.count(name) == 1) {
return doc_[name].get<float>();
}
else {
return defaultValue;
}
}

bool getBool(std::string name, bool defaultValue) const
{
if (doc_.count(name) == 1) {
Expand Down
2 changes: 1 addition & 1 deletion Examples/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,6 @@ void runSteroImageGenerator(int num_samples, std::string storage_path)

int main(int argc, const char *argv[])
{
runSteroImageGenerator(argc < 2 ? 20000 : std::stoi(argv[1]), argc < 3 ? "c:\\temp\\stig2" : std::string(argv[2]));
runSteroImageGenerator(argc < 2 ? 20000 : std::stoi(argv[1]), argc < 3 ? "c:\\temp\\stig_res" : std::string(argv[2]));
}

2 changes: 2 additions & 0 deletions Unreal/Plugins/AirSim/Source/FlyingPawn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ void AFlyingPawn::initializeForBeginPlay()
setStencilIDs();

setupInputBindings();

setupCamerasFromSettings();
}

void AFlyingPawn::setStencilIDs()
Expand Down
64 changes: 59 additions & 5 deletions Unreal/Plugins/AirSim/Source/PIPCamera.cpp
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
#include "AirSim.h"
#include <string>
#include <exception>
#include "PIPCamera.h"
#include "AirBlueprintLib.h"
#include "ImageUtils.h"
#include <string>

void APIPCamera::PostInitializeComponents()
{
Expand All @@ -24,17 +25,20 @@ void APIPCamera::BeginPlay()
seg_capture_->CaptureSource = ESceneCaptureSource::SCS_FinalColorLDR;

scene_render_target_ = NewObject<UTextureRenderTarget2D>();
scene_render_target_->InitAutoFormat(960, 540); //256 X 144, X 480
//scene_render_target_->bHDR = false;
setCaptureSettings(ImageType_::Scene, scene_capture_settings_);
scene_render_target_->TargetGamma = 1.0f; // GEngine->GetDisplayGamma();
//scene_render_target_->bHDR = false;
//scene_render_target_->InitAutoFormat(960, 540); //256 X 144, X 480

depth_render_target_ = NewObject<UTextureRenderTarget2D>();
depth_render_target_->InitAutoFormat(960, 540);
setCaptureSettings(ImageType_::Depth, depth_capture_settings_);
depth_render_target_->TargetGamma = 1.0f;
//depth_render_target_->InitAutoFormat(960, 540);

seg_render_target_ = NewObject<UTextureRenderTarget2D>();
seg_render_target_->InitAutoFormat(960, 540);
setCaptureSettings(ImageType_::Segmentation, seg_capture_settings_);
seg_render_target_->TargetGamma = 1.0f;
//seg_render_target_->InitAutoFormat(960, 540);
}

void APIPCamera::EndPlay(const EEndPlayReason::Type EndPlayReason)
Expand Down Expand Up @@ -76,6 +80,56 @@ void APIPCamera::setEnableCameraTypes(APIPCamera::ImageType types)
enableCaptureComponent(ImageType_::Segmentation, (types & ImageType_::Segmentation));
}

APIPCamera::CaptureSettings APIPCamera::getCaptureSettings(ImageType_ type)
{
switch (type)
{
case ImageType_::Scene: return scene_capture_settings_;
case ImageType_::Depth: return depth_capture_settings_;
case ImageType_::Segmentation: return seg_capture_settings_;
default:
throw std::invalid_argument("the ImageType specified for getCaptureSettings is not recognized");
}
}

void APIPCamera::setCaptureSettings(APIPCamera::ImageType_ type, const APIPCamera::CaptureSettings& settings)
{
switch (type)
{
case ImageType_::Scene: {
scene_capture_settings_ = settings;
updateCaptureComponentSettings(screen_capture_, scene_render_target_, scene_capture_settings_);
break;
}
case ImageType_::Depth: {
depth_capture_settings_ = settings;
updateCaptureComponentSettings(depth_capture_, depth_render_target_, depth_capture_settings_);
break;
}
case ImageType_::Segmentation: {
seg_capture_settings_ = settings;
updateCaptureComponentSettings(seg_capture_, seg_render_target_, seg_capture_settings_);
break;
}
default:
throw std::invalid_argument("the ImageType specified for setCaptureSettings is not recognized");
}
}

void APIPCamera::updateCaptureComponentSettings(USceneCaptureComponent2D* capture, UTextureRenderTarget2D* render_target, const CaptureSettings& settings)
{
if (render_target)
render_target->InitAutoFormat(settings.width, settings.height); //256 X 144, X 480
//else we will set this after this components get created

if (capture) {
capture->FOVAngle = settings.fov_degrees;
capture->PostProcessSettings.AutoExposureSpeedDown = capture->PostProcessSettings.AutoExposureSpeedUp = settings.auto_exposure_speed;
capture->PostProcessSettings.MotionBlurAmount = settings.motion_blur_amount;
}
//else we will set this after this components get created
}

void APIPCamera::enableCaptureComponent(const APIPCamera::ImageType type, bool is_enabled)
{
USceneCaptureComponent2D* capture = getCaptureComponent(type, false);
Expand Down
15 changes: 14 additions & 1 deletion Unreal/Plugins/AirSim/Source/PIPCamera.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,14 @@ class AIRSIM_API APIPCamera : public ACameraActor
{
GENERATED_BODY()


public:
struct CaptureSettings {
unsigned int width = 960, height = 540;
float fov_degrees = 90;
float auto_exposure_speed = 100.0f;
float motion_blur_amount = 0.0f;
};

public:
typedef msr::airlib::VehicleCameraBase::ImageType ImageType;
typedef msr::airlib::VehicleCameraBase::ImageType_ ImageType_;
Expand All @@ -34,6 +41,9 @@ class AIRSIM_API APIPCamera : public ACameraActor
USceneCaptureComponent2D* getCaptureComponent(const ImageType type, bool if_active);
UTextureRenderTarget2D* getRenderTarget(const ImageType type, bool if_active);

CaptureSettings getCaptureSettings(ImageType_ type);
void setCaptureSettings(ImageType_ type, const CaptureSettings& settings);

private:
UPROPERTY() USceneCaptureComponent2D* screen_capture_;
UPROPERTY() USceneCaptureComponent2D* depth_capture_;
Expand All @@ -45,6 +55,9 @@ class AIRSIM_API APIPCamera : public ACameraActor

ImageType enabled_camera_types_ = DefaultEnabledCameras;

CaptureSettings scene_capture_settings_, seg_capture_settings_, depth_capture_settings_;

private:
void enableCaptureComponent(const ImageType type, bool is_enabled);
static void updateCaptureComponentSettings(USceneCaptureComponent2D* capture, UTextureRenderTarget2D* render_target, const CaptureSettings& settings);
};
5 changes: 5 additions & 0 deletions Unreal/Plugins/AirSim/Source/SimMode/SimModeBase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,11 @@ void ASimModeBase::BeginPlay()

UAirBlueprintLib::LogMessage(TEXT("Press F1 to see help"), TEXT(""), LogDebugLevel::Informational);

readSettings();
}

void ASimModeBase::readSettings()
{
typedef msr::airlib::Settings Settings;

Settings& settings = Settings::singleton();
Expand Down
3 changes: 3 additions & 0 deletions Unreal/Plugins/AirSim/Source/SimMode/SimModeBase.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,7 @@ class AIRSIM_API ASimModeBase : public AActor
std::string fpv_vehicle_name;
std::string physics_engine_name;
std::string usage_scenario;

private:
void readSettings();
};
36 changes: 36 additions & 0 deletions Unreal/Plugins/AirSim/Source/VehiclePawnBase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "common/ClockFactory.hpp"
#include "NedTransform.h"


AVehiclePawnBase::AVehiclePawnBase()
{
static ConstructorHelpers::FObjectFinder<UParticleSystem> collison_display(TEXT("ParticleSystem'/AirSim/StarterContent/Particles/P_Explosion.P_Explosion'"));
Expand All @@ -23,6 +24,41 @@ void AVehiclePawnBase::BeginPlay()
Super::BeginPlay();
}

void AVehiclePawnBase::setupCamerasFromSettings()
{
typedef msr::airlib::Settings Settings;
typedef msr::airlib::VehicleCameraBase::ImageType_ ImageType_;

Settings& settings = Settings::singleton();
Settings scene_settings_child, depth_settings_child, seg_settings_child;
APIPCamera::CaptureSettings scene_settings, depth_settings, seg_settings;
if (settings.getChild("SceneCaptureSettings", scene_settings_child))
createCaptureSettings(scene_settings_child, scene_settings);
if (settings.getChild("DepthCaptureSettings", depth_settings_child))
createCaptureSettings(depth_settings_child, depth_settings);
if (settings.getChild("SegCaptureSettings", seg_settings_child))
createCaptureSettings(seg_settings_child, seg_settings);


for (int camera_index = 0; camera_index < getCameraCount(); ++camera_index) {
APIPCamera* camera = getCamera(camera_index);
camera->setCaptureSettings(ImageType_::Scene, scene_settings);
camera->setCaptureSettings(ImageType_::Depth, depth_settings);
camera->setCaptureSettings(ImageType_::Segmentation, seg_settings);
}
}

void AVehiclePawnBase::createCaptureSettings(const msr::airlib::Settings& settings, APIPCamera::CaptureSettings& capture_settings)
{
typedef msr::airlib::Settings Settings;

capture_settings.width = settings.getInt("Width", capture_settings.width);
capture_settings.height = settings.getInt("Height", capture_settings.height);
capture_settings.fov_degrees = settings.getFloat("FOV_Degrees", capture_settings.fov_degrees);
capture_settings.auto_exposure_speed = settings.getFloat("AutoExposureSpeed", capture_settings.auto_exposure_speed);
capture_settings.motion_blur_amount = settings.getFloat("MotionBlurAmount", capture_settings.motion_blur_amount);
}

void AVehiclePawnBase::EndPlay(const EEndPlayReason::Type EndPlayReason)
{
state_ = initial_state_ = State();
Expand Down
4 changes: 4 additions & 0 deletions Unreal/Plugins/AirSim/Source/VehiclePawnBase.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "common/Common.hpp"
#include "common/CommonStructs.hpp"
#include "PIPCamera.h"
#include "controllers/Settings.hpp"
#include "GameFramework/Pawn.h"
#include "VehiclePawnBase.generated.h"

Expand Down Expand Up @@ -81,12 +82,15 @@ class AIRSIM_API AVehiclePawnBase : public APawn
UPROPERTY(VisibleAnywhere)
UParticleSystem* collison_display_template;

void setupCamerasFromSettings();

private: //methods
bool canTeleportWhileMove() const;
void allowPassthroughToggleInput();

//these methods are for future usage
void plot(std::istream& s, FColor color, const Vector3r& offset);
void createCaptureSettings(const msr::airlib::Settings& settings, APIPCamera::CaptureSettings& capture_settings);


private: //vars
Expand Down
89 changes: 19 additions & 70 deletions docs/apis.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
## Introduction
This project includes a self-contained cross-platform library to retrieve data from the quadrotor and send the control commands.
You can use this library for a simulated drone in Unreal engine or on a real quadrotor such as a MavLink based vehicle platform
(and very soon DJI quadrotors such as Matrice).
AirSim offers APIs to interact with vehicles. You can use these APIs to retrieve images, get state, command the vehicle and so on. The APIs use [msgpack-rpc protocol](https://github.com/msgpack-rpc/msgpack-rpc) which has bindings available in variety of languages including C++, C#, Python, Java etc.

## Hello Drone
Here's the taste of how you can use our APIs in C++:
See also [Python API](python.md) if you prefer that language.
Here's very quick overview of how to use AirSim APIs using C++ (see also [Python doc](python.md)):
See also if you prefer that language.

```
#include <iostream>
Expand Down Expand Up @@ -37,78 +35,29 @@ int main()
```

You can find a ready to run project in HelloDrone folder in the repository.
You can find a ready to run project in HelloDrone folder in the repository. Read more about [Hello Drone](hello_drone.md).

## How does Hello Drone work?
Hello Drone uses the RPC client to connect to the RPC server that is automatically started by the AirSim.
The RPC server routes all the commands to a class that implements [DroneControlBase](https://github.com/Microsoft/AirSim/blob/master/AirLib/include/controllers/DroneControllerBase.hpp).
In essence, DroneControlBase defines our abstract interface for getting data from the quadrotor and sending back commands.
We currently have concrete implementation for DroneControlBase for MavLink based vehicles. The implementation for DJI drone
platforms, specifically Matrice, is in works.
## Image / Computer Vision and Collision APIs
AirSim offers comprehensive images APIs to retrieve synchronized images from multiple cameras along with ground truth including depth and vision. You can set the resolution, FOV, motion blur etc parameters in [settings.json](settings.md). There is also API for detecting collison state. In addition, AirSim also includes complete examples of how to generate stereo images along with ground truth depth images.

## Timing
More on [image APIs](image_apis.md).

Notice each method of DroneControlBase API takes one of two possible parameters: `float duration` or: `float max_wait_seconds`.
## Note on Timing Related Parameters

Methods that take `float duration`, like moveByVelocity return control immediately. So you can therefore choose to sleep for this duration, or you can change their mind and call something else which will automatically cancel the moveByVelocity.
Many API methods has parameters named `float duration` or: `float max_wait_seconds`.

Methods that take `float max_wait_seconds`, like takeoff, land, moveOnPath, moveToPosition, moveToZ, and so will block this amount of time waiting for command to be successfully completed. If the command
completes before the max_wait_seconds they will return True, otherwise
if the max_wait_seconds times out they will return False.
Methods that take `float duration`, like `moveByVelocit`y return control immediately. So you can therefore choose to sleep for this duration, or you can change their mind and call something else which will automatically cancel the `moveByVelocity`.

If you want to wait for ever pass a big number. But if you want to be able to interrupt even these commands pass 0 and you can do something else or sleep in a loop while checking the drone position, etc.
Methods that take `float max_wait_seconds`, like `takeoff`, `land`, `moveOnPath`, `moveToPosition`, `moveToZ`, and so will block this amount of time waiting for command to be successfully completed. If the command completes before the max_wait_seconds they will return True, otherwise
if the `max_wait_seconds` times out they will return `false`. If you want to wait for ever pass a big number. But if you want to be able to interrupt even these commands pass 0 and you can do something else or sleep in a loop while checking the drone position, etc. We would not recommend interrupting takeoff/land on a real drone, of course, as the results may be unpredictable.

Note: We would not recommend interrupting takeoff/land on a real drone, of course, as the results may be unpredictable.


## How to get images from drone?
Here's a sample code to get a single image:

```
int playWithImages()
{
using namespace std;
using namespace msr::airlib;
msr::airlib::RpcLibClient client;
vector<uint8_t> image = client.simGetImage(0, DroneControlBase::ImageType::Depth);
//do something with images
}
```

You can also get multiple images using API `simGetImages` which is slighly more complex to use than `simGetImage`. For example, you can get left camera view, right camera view and depth image from left camera - all at once! For sample code please see [sample code in HelloDrone project](https://github.com/Microsoft/AirSim/blob/master/HelloDrone/main.cpp). We also have [complete code](https://github.com/Microsoft/AirSim/blob/master/Examples/StereoImageGenerator.hpp) that generates specified number of stereo images and ground truth depth with normalization to camera plan, computation of disparity image and saving it to pfm format.

Unlike `simGetImage`, the `simGetImages` API also allows you to get uncompressed images as well as floating point single channel images (instead of 3 channel (RGB), each 8 bit).

You can also use Python to get images. For sample code please see [PythonClient project](https://github.com/Microsoft/AirSim/tree/master/PythonClient) and [Python example doc](python.md).

Furthermore, if your work involves computer vision experiments and if you don't care about drone dynamics then you can use our so called "ComputerVision" mode. Please see next section for the details.

## Can I use AirSim just for computer vision? I don't care about drones, physics etc.
Yes, now you can! Simply go to settings.json that you can find in your Documents\AirSim folder (or ~/Documents/AirSim on Linux). Add following setting at root level:

```
{
"FpvVehicleName": "SimpleFlight",
"UsageScenario": "ComputerVision"
}
```

Now when you start AirSim, you won't be able to move drone using remote control, there is no drone dynamics and physics engine is disabled in this mode. Think of this mode as that justs you move around cameras, not drone. You can use keyboard to move around (use F1 to see help on keys) and call APIs to get images. You can also use two additional APIs `simSetPose` to set position and orientation of drone programatically (use nan to specify no change). Then use can image APIs as described in above section to get images for your desired pose. Please see [complete code](https://github.com/Microsoft/AirSim/blob/master/Examples/StereoImageGenerator.hpp) that generates specified number of stereo images and ground truth depth with normalization to camera plan, computation of disparity image and saving it to pfm format in this mode.

## Can I run above code on real quadrotors as well?
Absolutely! The AirLib is self-contained library that you can put on an offboard computing module such as the Gigabyte barebone Mini PC.
This module then can talk to the flight controllers such as Pixhawk using exact same code and MavLink protocol (or DJI protocol).
The code you write for testing in the simulator remains unchanged!
## Using APIs on Real Vehicles
We want to be able to run *same code* that runs in simulation as on real vehicle. The AirLib is self-contained library that you can put on an offboard computing module such as the Gigabyte barebone Mini PC. This module then can talk to the flight controllers such as Pixhawk using exact same code and MavLink protocol (or DJI protocol). The code you write for testing in the simulator remains unchanged!
See [AirLib on custom drones](https://github.com/Microsoft/AirSim/blob/master/docs/custom_drone.md).

## What else can I do ?

You can also program AirSim using [Python](python.md).

See [move on path](https://github.com/Microsoft/AirSim/wiki/moveOnPath-demo) demo showing video of fast flight through Modular Neighborhood environment.

See [building a hexacopter](https://github.com/Microsoft/AirSim/wiki/hexacopter).
## References and Examples

See [building point clouds](https://github.com/Microsoft/AirSim/wiki/Point-Clouds).
* AirSim APIs using [Python](python.md)
* [move on path](https://github.com/Microsoft/AirSim/wiki/moveOnPath-demo) demo showing video of fast flight through Modular Neighborhood environment
* [building a hexacopter](https://github.com/Microsoft/AirSim/wiki/hexacopter)
* [building point clouds](https://github.com/Microsoft/AirSim/wiki/Point-Clouds)
7 changes: 7 additions & 0 deletions docs/hello_drone.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
## How does Hello Drone work?

Hello Drone uses the RPC client to connect to the RPC server that is automatically started by the AirSim.
The RPC server routes all the commands to a class that implements [DroneControlBase](https://github.com/Microsoft/AirSim/blob/master/AirLib/include/controllers/DroneControllerBase.hpp).
In essence, DroneControlBase defines our abstract interface for getting data from the quadrotor and sending back commands.
We currently have concrete implementation for DroneControlBase for MavLink based vehicles. The implementation for DJI drone
platforms, specifically Matrice, is in works.
Loading

0 comments on commit b553698

Please sign in to comment.