diff --git a/beamng_control/config/rviz/example.rviz b/beamng_control/config/rviz/example.rviz new file mode 100644 index 0000000..c3c2a8e --- /dev/null +++ b/beamng_control/config/rviz/example.rviz @@ -0,0 +1,279 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /TF1 + - /TF1/Frames1 + - /TF1/Tree1 + - /MarkerArray1 + Splitter Ratio: 0.5 + Tree Height: 304 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Name: Time + SyncMode: 0 + SyncSource: PointCloud2 +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Class: rviz/TF + Enabled: true + Filter (blacklist): "" + Filter (whitelist): "" + Frame Timeout: 15 + Frames: + All Enabled: false + ego_vehicle: + Value: true + ego_vehicle_front_cam_left: + Value: true + ego_vehicle_front_cam_right: + Value: true + ego_vehicle_lidar0: + Value: true + map: + Value: true + Marker Alpha: 1 + Marker Scale: 5 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + map: + ego_vehicle: + ego_vehicle_front_cam_left: + {} + ego_vehicle_front_cam_right: + {} + ego_vehicle_lidar0: + {} + Update Interval: 0 + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: false + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 255; 255; 255 + Color Transformer: Intensity + Decay Time: 0 + Enabled: true + Invert Rainbow: true + Max Color: 255; 255; 255 + Max Intensity: 255 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: PointCloud2 + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.05000000074505806 + Style: Flat Squares + Topic: /beamng_control/ego_vehicle/lidar0 + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Class: rviz/MarkerArray + Enabled: true + Marker Topic: /beamng_control/road_network + Name: MarkerArray + Namespaces: + beamng_control: true + Queue Size: 1 + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /beamng_control/ego_vehicle/front_cam_left/colour + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: colour_left + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /beamng_control/ego_vehicle/front_cam_right/colour + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: colour_right + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /beamng_control/ego_vehicle/front_cam_left/depth + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: depth_left + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /beamng_control/ego_vehicle/front_cam_left/annotation + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: annotation_left + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /beamng_control/ego_vehicle/front_cam_left/instance + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: instance_left + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /beamng_control/ego_vehicle/front_cam_left/bounding_box + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: bbox_left + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Default Light: true + Fixed Frame: ego_vehicle + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 69.4802017211914 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: true + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 0.6547950506210327 + Target Frame: + Yaw: 4.299472332000732 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 1016 + Hide Left Dock: false + Hide Right Dock: false + QMainWindow State: 000000ff00000000fd00000004000000000000018d000001d8fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afc000001bf000001d8000000e60100001cfa000000040100000005fb0000001400640065007000740068005f006c0065006600740100000000ffffffff0000007d00fffffffb0000001e0061006e006e006f0074006100740069006f006e005f006c0065006600740100000000ffffffff0000009f00fffffffb0000001200620062006f0078005f006c0065006600740100000000ffffffff0000007600fffffffb0000001a0069006e007300740061006e00630065005f006c0065006600740100000000ffffffff0000008d00fffffffb000000100044006900730070006c0061007900730100000000000001fd0000015600fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c0000026100000001000001a8000001d8fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007301000001bf000001d8000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000002000007380000017cfc0100000003fb000000160063006f006c006f00750072005f006c0065006600740100000000000003e00000007f00fffffffb000000180063006f006c006f00750072005f0072006900670068007401000003e6000003520000008800fffffffb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007380000003efc0100000002fb0000000800540069006d0065010000000000000738000003bc00fffffffb0000000800540069006d00650100000000000004500000000000000000000003f7000001d800000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1848 + X: 72 + Y: 27 + annotation_left: + collapsed: false + bbox_left: + collapsed: false + colour_left: + collapsed: false + colour_right: + collapsed: false + depth_left: + collapsed: false + instance_left: + collapsed: false diff --git a/beamng_control/config/scenarios/.json b/beamng_control/config/scenarios/.json index b5ccc0d..8a2a2da 100644 --- a/beamng_control/config/scenarios/.json +++ b/beamng_control/config/scenarios/.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" @@ -22,28 +22,11 @@ { "name":"time0", "type":"Timer" - }, - { - "name": "front_cam", - "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] - }, - { - "name":"parking_sensor", - "type":"Ultrasonic.smallrange", - "position":[0, 1, 2], - "rotation":[0, 1, 0] }, { "name": "electrics0", "type": "Electrics" }, - { - "name": "lidar0", - "type": "Lidar.default", - "position": [0, 0, 1.7] - }, { "name": "node_imu", "type": "IMU", @@ -54,6 +37,26 @@ "type": "IMU", "position": [0.73, 0.51, 0.8] } + ], + "sensors_automation": [ + { + "name":"parking_sensor", + "type":"Ultrasonic.smallrange", + "position":[0, 1, 2], + "rotation":[0, 0] + }, + { + "name": "front_cam", + "type": "Camera.default", + "position": [-0.3, 1, 1], + "rotation": [0, 0] + }, + { + "name": "lidar0", + "type": "Lidar.default", + "position": [0, 0, 1.7], + "rotation": [0, 0] + } ] } ] diff --git a/beamng_control/config/scenarios/GridMap.json b/beamng_control/config/scenarios/GridMap.json index 3c1e476..32694d9 100644 --- a/beamng_control/config/scenarios/GridMap.json +++ b/beamng_control/config/scenarios/GridMap.json @@ -10,12 +10,12 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "front_cam", "type": "Camera.default", "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/simple_scenario.json b/beamng_control/config/scenarios/simple_scenario.json index 26d959f..65350c1 100644 --- a/beamng_control/config/scenarios/simple_scenario.json +++ b/beamng_control/config/scenarios/simple_scenario.json @@ -9,7 +9,7 @@ "model": "etk800", "position": [0, 0, 0], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" @@ -22,21 +22,33 @@ "name":"gforce0", "type":"GForces" }, - { - "name":"parking_sensor", - "type":"Ultrasonic.smallrange", - "position":[0, 1, 2], - "rotation":[0, 1, 0] - }, { "name": "electrics0", "type": "Electrics" }, { + "name": "node_imu", + "type": "IMU", + "node": 0 + }, + { + "name": "position_imu", + "type": "IMU", + "position": [0.73, 0.51, 0.8] + } + ], + "sensors_automation": [ + { + "name":"parking_sensor", + "type":"Ultrasonic.smallrange", + "position":[0, 1, 2], + "rotation": [0, 0] + }, + { "name": "front_cam", "type": "Camera.default", "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "rotation": [0, 0] }, { "name": "noise_cam", @@ -46,22 +58,13 @@ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] }, { "name": "lidar_noise", "type" : "LidarNoise", "base sensor": "lidar0" - }, - { - "name": "node_imu", - "type": "IMU", - "node": 0 - }, - { - "name": "position_imu", - "type": "IMU", - "position": [0.73, 0.51, 0.8] } ] } diff --git a/beamng_control/config/scenarios/simple_scenario_cam.json b/beamng_control/config/scenarios/simple_scenario_cam.json index 148de86..8e3428a 100644 --- a/beamng_control/config/scenarios/simple_scenario_cam.json +++ b/beamng_control/config/scenarios/simple_scenario_cam.json @@ -9,13 +9,12 @@ "model": "etk800", "position": [0, 0, 0], "rotation": [0, 0, 0, 1], - "sensors": [ - + "sensors_automation": [ { "name": "front_cam", "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "position": [0.3, -2.1, 1], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/simple_scenario_pickup.json b/beamng_control/config/scenarios/simple_scenario_pickup.json index 681dc07..1efb97d 100644 --- a/beamng_control/config/scenarios/simple_scenario_pickup.json +++ b/beamng_control/config/scenarios/simple_scenario_pickup.json @@ -9,13 +9,7 @@ "model": "pickup", "position": [0, 0, 0], "rotation": [0, 0, 0, 1], - "sensors": [ - { - "name": "front_camera", - "type": "Camera.default", - "position": [-0.3,1,1], - "rotation": [0,0,1,0] - }, + "sensors_classical": [ { "name": "imu0", "type": "IMU", @@ -33,26 +27,39 @@ "name":"gforce0", "type":"GForces" }, + { + "name": "electrics0", + "type": "Electrics" + }, + { + "name": "node_imu", + "type": "IMU", + "node": 0 + }, + { + "name": "position_imu", + "type": "IMU", + "position": [0.73, 0.51, 0.8] + } + ], + "sensors_automation": [ { "name":"parking_sensor", "type":"Ultrasonic.smallrange", "position":[0, 1, 2], - "rotation":[0, 1, 0] - }, - { - "name": "electrics0", - "type": "Electrics" + "rotation": [0, 0] }, { "name": "front_cam", "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "position": [-0.3, 2, 1], + "rotation": [0, 0] }, { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] }, { "name": "lidar_noise", @@ -63,16 +70,6 @@ "name": "noise_cam", "type" : "CameraNoise", "base sensor": "front_cam" - }, - { - "name": "node_imu", - "type": "IMU", - "node": 0 - }, - { - "name": "position_imu", - "type": "IMU", - "position": [0.73, 0.51, 0.8] } ] } diff --git a/beamng_control/config/scenarios/simple_scenario_with_lidar.json b/beamng_control/config/scenarios/simple_scenario_with_lidar.json index ba7f746..7f49a93 100644 --- a/beamng_control/config/scenarios/simple_scenario_with_lidar.json +++ b/beamng_control/config/scenarios/simple_scenario_with_lidar.json @@ -9,11 +9,12 @@ "model": "etk800", "position": [0, 0, 0], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/utah_scenario_with_cam.json b/beamng_control/config/scenarios/utah_scenario_with_cam.json index c0b7539..b1ca86b 100644 --- a/beamng_control/config/scenarios/utah_scenario_with_cam.json +++ b/beamng_control/config/scenarios/utah_scenario_with_cam.json @@ -7,18 +7,17 @@ "vehicles": [ { "name": "ego_vehicle", - "model": "etk800", - "position": [568.908, 13.422, 139.565], + "model": "pickup", + "position": [537.24, 25.95, 142.16], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "front_cam", "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "position": [0.3, -2.5, 1], + "rotation": [0, 0] } ] } ] } - diff --git a/beamng_control/config/scenarios/utah_scenario_with_cam_and_lidar.json b/beamng_control/config/scenarios/utah_scenario_with_cam_and_lidar.json new file mode 100644 index 0000000..9e8af5c --- /dev/null +++ b/beamng_control/config/scenarios/utah_scenario_with_cam_and_lidar.json @@ -0,0 +1,35 @@ +{ + "version": 0.1, + "level": "Utah", + "name": "road_network", + "mode": "None", + "network_vizualization": "on", + "vehicles": [ + { + "name": "ego_vehicle", + "model": "pickup", + "position": [537.24, 25.95, 142.16], + "rotation": [0, 0, 0, 1], + "sensors_automation": [ + { + "name": "front_cam_left", + "type": "Camera.default", + "position": [0.5, -2.3, 1], + "rotation": [0, 0] + }, + { + "name": "front_cam_right", + "type": "Camera.default", + "position": [-0.5, -2.3, 1], + "rotation": [0, 0] + }, + { + "name": "lidar0", + "type": "Lidar.default", + "position": [0, 0, 2.0], + "rotation": [0, 0] + } + ] + } + ] +} diff --git a/beamng_control/config/scenarios/utah_scenario_with_lidar.json b/beamng_control/config/scenarios/utah_scenario_with_lidar.json index d1e17d8..9b7c2b7 100644 --- a/beamng_control/config/scenarios/utah_scenario_with_lidar.json +++ b/beamng_control/config/scenarios/utah_scenario_with_lidar.json @@ -10,14 +10,14 @@ "model": "etk800", "position": [568.908, 13.422, 139.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] } ] } ] -} - +} \ No newline at end of file diff --git a/beamng_control/config/scenarios/utah_with_all_sensors.json b/beamng_control/config/scenarios/utah_with_all_sensors.json index 290189b..7ed4044 100644 --- a/beamng_control/config/scenarios/utah_with_all_sensors.json +++ b/beamng_control/config/scenarios/utah_with_all_sensors.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 139.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" @@ -22,28 +22,11 @@ { "name":"time0", "type":"Timer" - }, - { - "name": "front_cam", - "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] - }, - { - "name":"parking_sensor", - "type":"Ultrasonic.smallrange", - "position":[0, 1, 2], - "rotation":[0, 1, 0] }, { "name": "electrics0", "type": "Electrics" }, - { - "name": "lidar0", - "type": "Lidar.default", - "position": [0, 0, 1.7] - }, { "name": "node_imu", "type": "IMU", @@ -54,6 +37,26 @@ "type": "IMU", "position": [0.73, 0.51, 0.8] } + ], + "sensors_automation": [ + { + "name":"parking_sensor", + "type":"Ultrasonic.smallrange", + "position":[0, 1, 2], + "rotation": [0, 0] + }, + { + "name": "front_cam", + "type": "Camera.default", + "position": [-0.3, 1, 1], + "rotation": [0, 0] + }, + { + "name": "lidar0", + "type": "Lidar.default", + "position": [0, 0, 1.7], + "rotation": [0, 0] + } ] } ] diff --git a/beamng_control/config/scenarios/weather.json b/beamng_control/config/scenarios/weather.json index 601aabc..5912589 100644 --- a/beamng_control/config/scenarios/weather.json +++ b/beamng_control/config/scenarios/weather.json @@ -11,7 +11,6 @@ "model": "etk800", "position": [-124, 142, 465.3], "rotation": [0, 0, 0, 1] - } ] } \ No newline at end of file diff --git a/beamng_control/config/scenarios/weather_with_sensors.json b/beamng_control/config/scenarios/weather_with_sensors.json index 6fbf86d..f684727 100644 --- a/beamng_control/config/scenarios/weather_with_sensors.json +++ b/beamng_control/config/scenarios/weather_with_sensors.json @@ -11,7 +11,7 @@ "model": "etk800", "position": [-124, 142, 465.3], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" @@ -24,21 +24,33 @@ "name":"gforce0", "type":"GForces" }, - { - "name":"parking_sensor", - "type":"Ultrasonic.smallrange", - "position":[0, 1, 2], - "rotation":[0, 1, 0] - }, { "name": "electrics0", "type": "Electrics" }, { + "name": "node_imu", + "type": "IMU", + "node": 0 + }, + { + "name": "position_imu", + "type": "IMU", + "position": [0.73, 0.51, 0.8] + } + ], + "sensors_automation": [ + { + "name":"parking_sensor", + "type":"Ultrasonic.smallrange", + "position":[0, 1, 2], + "rotation": [0, 0] + }, + { "name": "front_cam", "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "position": [0.3, -2.1, 1], + "rotation": [0, 0] }, { "name": "noise_cam", @@ -48,24 +60,15 @@ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] }, { "name": "lidar_noise", "type" : "LidarNoise", "base sensor": "lidar0" - }, - { - "name": "node_imu", - "type": "IMU", - "node": 0 - }, - { - "name": "position_imu", - "type": "IMU", - "position": [0.73, 0.51, 0.8] } - ] + ] } ] } \ No newline at end of file diff --git a/beamng_control/config/scenarios/west_coast_with_all_sensors.json b/beamng_control/config/scenarios/west_coast_with_all_sensors.json index acc142c..6c27b27 100644 --- a/beamng_control/config/scenarios/west_coast_with_all_sensors.json +++ b/beamng_control/config/scenarios/west_coast_with_all_sensors.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" @@ -22,33 +22,35 @@ { "name":"time0", "type":"Timer" - }, + }, { - "name": "front_cam", - "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "name": "electrics0", + "type": "Electrics" }, + { + "name": "position_imu", + "type": "IMU", + "position": [0.73, 0.51, 0.8] + } + ], + "sensors_automation": [ { "name":"parking_sensor", "type":"Ultrasonic.smallrange", "position":[0, 1, 2], - "rotation":[0, 1, 0] + "rotation": [0, 0] }, { - "name": "electrics0", - "type": "Electrics" + "name": "front_cam", + "type": "Camera.default", + "position": [0.3, -2.1, 1], + "rotation": [0, 0] }, - { + { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] - }, - - { - "name": "position_imu", - "type": "IMU", - "position": [0.73, 0.51, 0.8] + "position": [0, 0, 1.7], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/west_coast_with_damage.json b/beamng_control/config/scenarios/west_coast_with_damage.json index b14f75e..3221bab 100644 --- a/beamng_control/config/scenarios/west_coast_with_damage.json +++ b/beamng_control/config/scenarios/west_coast_with_damage.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", "type":"Damage" diff --git a/beamng_control/config/scenarios/west_coast_with_electrics.json b/beamng_control/config/scenarios/west_coast_with_electrics.json index dd985db..bc0452e 100644 --- a/beamng_control/config/scenarios/west_coast_with_electrics.json +++ b/beamng_control/config/scenarios/west_coast_with_electrics.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name": "electrics0", "type": "Electrics" diff --git a/beamng_control/config/scenarios/west_coast_with_electrics_and_lidar.json b/beamng_control/config/scenarios/west_coast_with_electrics_and_lidar.json index ca6698d..d55ae01 100644 --- a/beamng_control/config/scenarios/west_coast_with_electrics_and_lidar.json +++ b/beamng_control/config/scenarios/west_coast_with_electrics_and_lidar.json @@ -10,16 +10,19 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name": "electrics0", "type": "Electrics" }, + ], + "sensors_automation": [ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 1.7], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/west_coast_with_gforce.json b/beamng_control/config/scenarios/west_coast_with_gforce.json index 98ada4f..36009d0 100644 --- a/beamng_control/config/scenarios/west_coast_with_gforce.json +++ b/beamng_control/config/scenarios/west_coast_with_gforce.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"gforce0", "type":"GForces" diff --git a/beamng_control/config/scenarios/west_coast_with_imu.json b/beamng_control/config/scenarios/west_coast_with_imu.json index ed58913..370994d 100644 --- a/beamng_control/config/scenarios/west_coast_with_imu.json +++ b/beamng_control/config/scenarios/west_coast_with_imu.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name": "position_imu", "type": "IMU", diff --git a/beamng_control/config/scenarios/west_coast_with_imu_node.json b/beamng_control/config/scenarios/west_coast_with_imu_node.json index ee18f2f..f63e00b 100644 --- a/beamng_control/config/scenarios/west_coast_with_imu_node.json +++ b/beamng_control/config/scenarios/west_coast_with_imu_node.json @@ -10,11 +10,11 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name": "node_imu", "type": "IMU", - "node": 0, + "node": 0 } ] } diff --git a/beamng_control/config/scenarios/west_coast_with_lidar.json b/beamng_control/config/scenarios/west_coast_with_lidar.json index 445e90c..f93b151 100644 --- a/beamng_control/config/scenarios/west_coast_with_lidar.json +++ b/beamng_control/config/scenarios/west_coast_with_lidar.json @@ -7,14 +7,15 @@ "vehicles": [ { "name": "ego_vehicle", - "model": "etk800", + "model": "pickup", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "lidar0", "type": "Lidar.default", - "position": [0, 0, 1.7] + "position": [0, 0, 2.0], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/west_coast_with_sensors.json b/beamng_control/config/scenarios/west_coast_with_sensors.json index 53ebbc8..511dd31 100644 --- a/beamng_control/config/scenarios/west_coast_with_sensors.json +++ b/beamng_control/config/scenarios/west_coast_with_sensors.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"damage0", diff --git a/beamng_control/config/scenarios/west_coast_with_time.json b/beamng_control/config/scenarios/west_coast_with_time.json index ed73456..c346762 100644 --- a/beamng_control/config/scenarios/west_coast_with_time.json +++ b/beamng_control/config/scenarios/west_coast_with_time.json @@ -10,7 +10,7 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_classical": [ { "name":"time0", diff --git a/beamng_control/config/scenarios/west_coast_with_ultrasonic.json b/beamng_control/config/scenarios/west_coast_with_ultrasonic.json index ddf02e2..434bc2b 100644 --- a/beamng_control/config/scenarios/west_coast_with_ultrasonic.json +++ b/beamng_control/config/scenarios/west_coast_with_ultrasonic.json @@ -10,12 +10,12 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name":"parking_sensor", "type":"Ultrasonic.smallrange", "position":[0, 1, 2], - "rotation":[0, 1, 0] + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/scenarios/west_scenario_with_cam.json b/beamng_control/config/scenarios/west_scenario_with_cam.json index 7d6ec5f..8379508 100644 --- a/beamng_control/config/scenarios/west_scenario_with_cam.json +++ b/beamng_control/config/scenarios/west_scenario_with_cam.json @@ -10,12 +10,12 @@ "model": "etk800", "position": [568.908, 13.422, 148.565], "rotation": [0, 0, 0, 1], - "sensors": [ + "sensors_automation": [ { "name": "front_cam", "type": "Camera.default", - "position": [-0.3, 1, 1], - "rotation": [0, 1, 0] + "position": [0.3, -2.1, 1], + "rotation": [0, 0] } ] } diff --git a/beamng_control/config/sensors.json b/beamng_control/config/sensors.json index f1e27f3..cd04d61 100644 --- a/beamng_control/config/sensors.json +++ b/beamng_control/config/sensors.json @@ -5,23 +5,24 @@ 256, 256 ], - "fov":120, - "colour": true, - "depth": true, - "annotation": true, - "instance": true, - "bounding_box":false + "field_of_view_y": 90, + "is_render_colours": true, + "is_render_depth": false, + "is_render_annotations": false, + "is_render_instance": false, + "bounding_box": false, + "requested_update_time": 0.1 } }, "Lidar": { "default": { - "vertical_resolution": 32, + "is_visualised": false, + "vertical_resolution": 64, "vertical_angle": 26.9, - "rps": 2200000, - "hz": 20, - "angle": 360, - "max_distance": 10, - "direction": [0, -1, 0] + "rays_per_second": 1310720, + "frequency": 10, + "horizontal_angle": 360, + "max_distance": 85 } }, "Ultrasonic": { diff --git a/beamng_control/config/vehicles/etk800.json b/beamng_control/config/vehicles/etk800.json index 39632f3..dd80cf5 100644 --- a/beamng_control/config/vehicles/etk800.json +++ b/beamng_control/config/vehicles/etk800.json @@ -23,7 +23,8 @@ 0.73, 0.51, 0.8 - ] + ], + "rotation": [0, 0] }, { "name": "lidar", @@ -32,7 +33,8 @@ 0, 0, 1.7 - ] + ], + "rotation": [0, 0] } ] } \ No newline at end of file diff --git a/beamng_control/config/vehicles/pickup.json b/beamng_control/config/vehicles/pickup.json index b203cac..fad769e 100644 --- a/beamng_control/config/vehicles/pickup.json +++ b/beamng_control/config/vehicles/pickup.json @@ -4,17 +4,8 @@ { "name": "front_camera", "type": "Camera.default", - "position": [ - -0.3, - 1, - 1 - ], - "rotation": [ - 0, - 0, - 1, - 0 - ] + "position": [-0.3, 2, 1], + "rotation": [0, 0] }, { "name": "imu0", @@ -23,7 +14,8 @@ 0.73, 0.51, 0.8 - ] + ], + "rotation": [0, 0] }, { "name": "lidar", @@ -32,7 +24,8 @@ 0, 0, 1.7 - ] + ], + "rotation": [0, 0] } ] } \ No newline at end of file diff --git a/beamng_control/launch/example.launch b/beamng_control/launch/example.launch index 69e4327..6d150a1 100644 --- a/beamng_control/launch/example.launch +++ b/beamng_control/launch/example.launch @@ -1,9 +1,10 @@ - + - + + diff --git a/beamng_control/scripts/beamng_control/bridge.py b/beamng_control/scripts/beamng_control/bridge.py index 020162f..d70e868 100644 --- a/beamng_control/scripts/beamng_control/bridge.py +++ b/beamng_control/scripts/beamng_control/bridge.py @@ -3,18 +3,24 @@ import sys import json +import copy from pathlib import Path from distutils.version import LooseVersion +import numpy as np import rospy import rospkg import actionlib +import tf +import tf2_ros +from tf.transformations import quaternion_from_euler, quaternion_multiply +import geometry_msgs.msg import beamngpy as bngpy import beamng_msgs.msg as bng_msgs import beamng_msgs.srv as bng_srv -from beamng_control.publishers import VehiclePublisher, NetworkPublisher +from beamng_control.publishers import VehiclePublisher, NetworkPublisher, get_sensor_publisher from beamng_control.sensorHelper import get_sensor MIN_BNG_VERSION_REQUIRED = '0.18.0' @@ -24,7 +30,7 @@ def load_json(file_name): pkg_path = rospkg.RosPack().get_path(NODE_NAME) file_path = Path(file_name).resolve() - relative_fp = Path(str(pkg_path)+'/'+str(file_path)) + relative_fp = Path(str(pkg_path) + '/' + str(file_path)) if not file_path.is_file() and relative_fp.is_file(): file_path = relative_fp with file_path.open('r') as fh: @@ -48,6 +54,9 @@ def __init__(self, host, port, sensor_paths=None): self._setup_services() self._publishers = list() + self._vehicle_publisher = None + self._static_tf_frames: list = [] + self._static_tf_broadcaster = tf2_ros.StaticTransformBroadcaster() self._setup_sensor_defs(sensor_paths) self._stepAS = actionlib.SimpleActionServer(f'{NODE_NAME}/step', @@ -58,13 +67,14 @@ def __init__(self, host, port, sensor_paths=None): self._stepAS.start() self._marker_idx = 0 + self.network_publisher = None def _setup_sensor_defs(self, sensor_paths): default_path = ['/config/sensors.json'] sensor_paths = default_path if not sensor_paths else sensor_paths print("------------") print("------------") - print ("sensor_paths",sensor_paths) + print("sensor_paths", sensor_paths) print("------------") print("------------") sensor_defs = dict() @@ -106,23 +116,24 @@ def get_marker_idx(self): self._marker_idx += 1 return m - def get_vehicle_from_dict(self, v_spec): - vehicle = bngpy.Vehicle(v_spec['name'], v_spec['model']) + def get_sensor_classical_from_dict(self, v_spec, vehicle): sensor_collection = list() noise_sensors = list() - if 'sensors' in v_spec: - for spec in v_spec['sensors']: + if 'sensors_classical' in v_spec: + for spec in v_spec['sensors_classical']: if 'base sensor' in spec: noise_sensors.append(spec) else: sensor_collection.append(spec) - rospy.logdebug(f'sensors: {sensor_collection}') - rospy.logdebug(f'noise: {noise_sensors}') + rospy.logdebug(f'sensors_classical: {sensor_collection}') + rospy.logdebug(f'noise_classical: {noise_sensors}') for s_spec in sensor_collection: s_name = s_spec.pop('name') s_type = s_spec.pop('type') rospy.logdebug(f'Attempting to set up {s_type} sensor.') - sensor = get_sensor(s_type, + sensor = get_sensor(self.game_client, + vehicle, + s_type, self._sensor_defs, dyn_sensor_properties=s_spec) vehicle.attach_sensor(s_name, sensor) @@ -136,13 +147,92 @@ def get_vehicle_from_dict(self, v_spec): rospy.logerr(f'Could not find sensor with id {sensor} to ' f'generate noise sensor of type {n_type}') n_spec['sensor'] = sensor - noise = get_sensor(n_type, + noise = get_sensor(self.game_client, + n_type, self._sensor_defs, dyn_sensor_properties=n_spec) vehicle.attach_sensor(n_name, noise) return vehicle - def _scenario_from_json(self, file_name): + @staticmethod + def get_stamped_static_tf_frame(translation, rotation, vehicle_name: str, sensor_name: str): + static_transform_stamped = geometry_msgs.msg.TransformStamped() + static_transform_stamped.header.frame_id = vehicle_name + static_transform_stamped.child_frame_id = f"{vehicle_name}_{sensor_name}" + + static_transform_stamped.transform.translation.x = float(translation[0]) + static_transform_stamped.transform.translation.y = float(translation[1]) + static_transform_stamped.transform.translation.z = float(translation[2]) + + quat = tf.transformations.quaternion_from_euler(float(0), + float(rotation[0]), + float(rotation[1])) # RPY to convert + + alignment_quat = np.array([0, 0, 0, 1]) # sets the forward direction as -y + # alignment_quat = np.array([0, 1, 0, 0]) # sets the forward direction as -y + quat = quaternion_multiply(alignment_quat, quat) + quat /= np.linalg.norm(quat) + static_transform_stamped.transform.rotation.x = quat[0] + static_transform_stamped.transform.rotation.y = quat[1] + static_transform_stamped.transform.rotation.z = quat[2] + static_transform_stamped.transform.rotation.w = quat[3] + return static_transform_stamped + + def set_sensor_automation_from_dict(self, scenario_spec, vehicle_list): + for v_spec, vehicle in zip(scenario_spec['vehicles'], vehicle_list): + sensor_collection = list() + noise_sensors = list() + if 'sensors_automation' in v_spec: + for spec in v_spec['sensors_automation']: + if 'base sensor' in spec: + noise_sensors.append(spec) + else: + sensor_collection.append(spec) + rospy.logdebug(f'sensors_automation: {sensor_collection}') + rospy.logdebug(f'noise_automation: {noise_sensors}') + for s_spec in sensor_collection: + dyn_spec = copy.deepcopy(s_spec) + dyn_spec.pop("name") + dyn_spec.pop("type") + s_type = s_spec["type"] + name = s_spec["name"] + + rospy.logdebug(f'Attempting to set up {s_type} sensor.') + sensor, sensor_publisher = get_sensor(s_type, + self._sensor_defs, + bng=self.game_client, + vehicle=vehicle, + name=name, + dyn_sensor_properties=dyn_spec) + if sensor_publisher is not None: + static_sensor_frame = self.get_stamped_static_tf_frame(translation=s_spec['position'], + rotation=s_spec['rotation'], + vehicle_name=vehicle.vid, + sensor_name=name) + self._static_tf_frames.append(static_sensor_frame) + self._publishers.append(sensor_publisher(sensor, f"{NODE_NAME}/{vehicle.vid}/{name}", vehicle)) + # for n_spec in noise_sensors: + # n_name = n_spec.pop('name') + # n_type = n_spec.pop('type') + # sensor = n_spec.pop('base sensor') + # if sensor in vehicle.sensors: + # sensor = vehicle.sensors[sensor] + # else: + # rospy.logerr(f'Could not find sensor with id {sensor} to ' + # f'generate noise sensor of type {n_type}') + # n_spec['sensor'] = sensor + # noise = get_sensor(n_type, + # self._sensor_defs, + # dyn_sensor_properties=n_spec) + # vehicle.attach_sensor(n_name, noise) + + @staticmethod + def get_vehicle_from_dict(v_spec): + vehicle = bngpy.Vehicle(v_spec['name'], v_spec['model']) + return vehicle + + @staticmethod + def _scenario_from_json(file_name): try: scenario_spec = load_json(file_name) except FileNotFoundError: @@ -152,41 +242,49 @@ def _scenario_from_json(self, file_name): return scenario_spec def decode_scenario(self, scenario_spec): + vehicle_list = list() scenario = bngpy.Scenario(scenario_spec.pop('level'), scenario_spec.pop('name')) for v_spec in scenario_spec['vehicles']: vehicle = self.get_vehicle_from_dict(v_spec) - self._publishers.append(VehiclePublisher(vehicle, NODE_NAME)) # todo markers need to be added somwhere else - scenario.add_vehicle(vehicle, pos=v_spec['position'], - rot_quat=v_spec['rotation']) + # set up classical sensors + vehicle = self.get_sensor_classical_from_dict(v_spec, vehicle) + self._vehicle_publisher = VehiclePublisher(vehicle, NODE_NAME) # we need this to be published first for tf + scenario.add_vehicle(vehicle, + pos=v_spec['position'], + rot_quat=v_spec['rotation'] + ) + vehicle_list.append(vehicle) on_scenario_start = list() wp_key = 'weather_presets' if wp_key in scenario_spec.keys(): def weather_presets(): self.game_client.set_weather_preset(scenario_spec[wp_key]) + on_scenario_start.append(weather_presets) if 'time_of_day' in scenario_spec.keys(): def tod(): self.game_client.set_tod(scenario_spec['time_of_day']) + on_scenario_start.append(tod) net_viz_key = 'network_vizualization' if net_viz_key in scenario_spec and scenario_spec[net_viz_key] == 'on': - self._publishers.append(NetworkPublisher(self.game_client, - NODE_NAME)) - return scenario, on_scenario_start + self._publishers.append(NetworkPublisher(self.game_client, NODE_NAME)) + return scenario, on_scenario_start, vehicle_list def start_scenario(self, file_name): self._publishers = list() scenario_spec = self._scenario_from_json(file_name) if not scenario_spec: return - scenario, on_scenario_start = self.decode_scenario(scenario_spec) + scenario, on_scenario_start, vehicle_list = self.decode_scenario(scenario_spec) scenario.make(self.game_client) self.game_client.load_scenario(scenario) self.game_client.start_scenario() - + # Automation sensors need to be set up after the scenario starts + self.set_sensor_automation_from_dict(scenario_spec, vehicle_list) for hook in on_scenario_start: hook() @@ -311,9 +409,9 @@ def step(self, goal): feedback = bng_msgs.StepFeedback() step_counter = 0 - imax = goal.total_number_of_steps//goal.feedback_cycle_size + imax = goal.total_number_of_steps // goal.feedback_cycle_size rest = goal.total_number_of_steps % goal.feedback_cycle_size - imax = imax+1 if rest else imax + imax = imax + 1 if rest else imax for i in range(0, imax): if self._stepAS.is_preempt_requested(): @@ -323,7 +421,7 @@ def step(self, goal): f"{step_counter}/{goal.total_number_of_steps} " "steps") break - steps_to_finish = goal.total_number_of_steps-step_counter + steps_to_finish = goal.total_number_of_steps - step_counter step_size = min(steps_to_finish, goal.feedback_cycle_size) self.game_client.step(step_size) step_counter += step_size @@ -359,12 +457,20 @@ def get_roads(self): return network def work(self): - rate = rospy.Rate(10) # todo increase - while not rospy.is_shutdown(): - if self.running: + ros_rate = 10 + rate = rospy.Rate(ros_rate) # todo add threading + if self.running: + + while not rospy.is_shutdown(): + current_time = rospy.Time.now() + for static_tf in self._static_tf_frames: + static_tf.header.stamp = current_time + self._static_tf_broadcaster.sendTransform(static_tf) + if self._vehicle_publisher is not None: + self._vehicle_publisher.publish(current_time) for pub in self._publishers: - pub.publish() - rate.sleep() + pub.publish(current_time) + rate.sleep() def on_shutdown(self): rospy.loginfo("Shutting down beamng_control/bridge.py node") @@ -383,10 +489,10 @@ def main(): f' version is {available_version}, aborting process.') sys.exit(1) - # bngpy.setup_logging() + # bngpy.setup_logging() argv = rospy.myargv(argv=sys.argv) - rospy.loginfo("cmd args"+str(argv)) + rospy.loginfo("cmd args" + str(argv)) params = rospy.get_param("beamng") if not ('host' in params.keys() and 'port' in params.keys()): diff --git a/beamng_control/scripts/beamng_control/publishers.py b/beamng_control/scripts/beamng_control/publishers.py index 59368ca..820ebbe 100644 --- a/beamng_control/scripts/beamng_control/publishers.py +++ b/beamng_control/scripts/beamng_control/publishers.py @@ -1,19 +1,30 @@ from abc import ABC, abstractmethod import rospy +import numpy as np + +np.float = np.float64 # temp fix for following import +import ros_numpy +from sensor_msgs.point_cloud2 import PointCloud2 +import tf +import tf2_ros from cv_bridge import CvBridge, CvBridgeError import numpy as np +from cv_bridge import CvBridge + +import threading import sensor_msgs -import sensor_msgs.point_cloud2 as pc2 import geometry_msgs.msg as geom_msgs import std_msgs.msg +from tf.transformations import quaternion_from_euler, quaternion_multiply import beamngpy.sensors as bng_sensors -#from beamngpy.noise import RandomImageNoise, RandomLIDARNoise +# from beamngpy.noise import RandomImageNoise, RandomLIDARNoise from visualization_msgs.msg import Marker, MarkerArray import beamng_msgs.msg as bng_msgs +from beamngpy.sensors import Camera def get_sensor_publisher(sensor): @@ -27,8 +38,8 @@ def get_sensor_publisher(sensor): bng_sensors.Electrics: ElectricsPublisher, bng_sensors.Camera: CameraPublisher, bng_sensors.Lidar: LidarPublisher, - # RandomImageNoise: CameraPublisher, - #s RandomLIDARNoise: LidarPublisher + # RandomImageNoise: CameraPublisher, + # s RandomLIDARNoise: LidarPublisher } for k, v in sensor_mapping.items(): if isinstance(sensor, k): @@ -39,7 +50,7 @@ def get_sensor_publisher(sensor): class BNGPublisher(ABC): @abstractmethod - def publish(self): + def publish(self, current_time): pass @@ -51,12 +62,15 @@ def __init__(self, sensor, topic_id, msg_type): self._pub = rospy.Publisher(topic_id, msg_type, queue_size=1) + self.current_time = rospy.get_rostime() + self.frame_map = 'map' @abstractmethod def _make_msg(self): pass - def publish(self): + def publish(self, current_time): + self.current_time = current_time msg = self._make_msg() self._pub.publish(msg) @@ -149,11 +163,11 @@ def _make_msg(self): data = self._sensor.data msg = sensor_msgs.msg.Imu() msg.orientation = geom_msgs.Quaternion(0, 0, 0, 0) - msg.orientation_covariance = [-1, ]*9 + msg.orientation_covariance = [-1, ] * 9 msg.angular_velocity = geom_msgs.Vector3(*[data[x] for x in ['aX', 'aY', 'aZ']]) - msg.angular_velocity_covariance = [-1, ]*9 + msg.angular_velocity_covariance = [-1, ] * 9 msg.linear_acceleration = geom_msgs.Vector3(*[data[x] for x in ['gX', 'gY', 'gZ']]) - msg.linear_acceleration_covariance = [-1, ]*9 + msg.linear_acceleration_covariance = [-1, ] * 9 return msg @@ -259,7 +273,28 @@ def _make_msg(self): return msg -class ColorImgPublisher(SensorDataPublisher): +class CameraDataPublisher: + + def __init__(self, sensor, topic_id, msg_type): + rospy.logdebug(f'publishing to {topic_id}') + self._sensor = sensor + self._pub = rospy.Publisher(topic_id, + msg_type, + queue_size=1) + self.current_time = rospy.get_rostime() + self.frame_map = 'map' + + @abstractmethod + def _make_msg(self, data): + pass + + def publish(self, current_time, data): + self.current_time = current_time + msg = self._make_msg(data) + self._pub.publish(msg) + + +class ColorImgPublisher(CameraDataPublisher): def __init__(self, sensor, topic_id, cv_helper, data_descriptor): super().__init__(sensor, @@ -268,11 +303,13 @@ def __init__(self, sensor, topic_id, cv_helper, data_descriptor): self._cv_helper = cv_helper self._data_descriptor = data_descriptor - def _make_msg(self): - data = self._sensor.data - img = data[self._data_descriptor].convert('RGB') - img = np.array(img) - img = img[:, :, ::-1].copy() + def _make_msg(self, data): + img = data[self._data_descriptor] + if img is not None: + img = np.array(img.convert('RGB')) + img = img[:, :, ::-1].copy() + else: + img = np.zeros_like(data['colour'].convert('RGB')) try: img = self._cv_helper.cv2_to_imgmsg(img, 'bgr8') except CvBridgeError as e: @@ -280,7 +317,7 @@ def _make_msg(self): return img -class DepthImgPublisher(SensorDataPublisher): +class DepthImgPublisher(CameraDataPublisher): def __init__(self, sensor, topic_id, cv_helper): super().__init__(sensor, @@ -288,11 +325,10 @@ def __init__(self, sensor, topic_id, cv_helper): sensor_msgs.msg.Image) self._cv_helper = cv_helper - def _make_msg(self): - data = self._sensor.data + def _make_msg(self, data): img = data['depth'] - near, far = self._sensor.near_far - img = (np.array(img)-near)/far*255 + near, far = self._sensor.near_far_planes + img = (np.array(img) - near) / far * 255 img = img.astype(np.uint8) try: img = self._cv_helper.cv2_to_imgmsg(img, 'mono8') @@ -301,7 +337,7 @@ def _make_msg(self): return img -class BBoxImgPublisher(SensorDataPublisher): +class BBoxImgPublisher(CameraDataPublisher): def __init__(self, sensor, topic_id, cv_helper, vehicle): super().__init__(sensor, @@ -311,23 +347,22 @@ def __init__(self, sensor, topic_id, cv_helper, vehicle): self._vehicle = vehicle self._classes = None - def _update_data_with_bbox(self): + def _update_data_with_bbox(self, data): if self._classes is None: annotations = self._vehicle.bng.get_annotations() self._classes = self._vehicle.bng.get_annotation_classes(annotations) - data = self._sensor.data - bboxes = bng_sensors.Camera.extract_bboxes(data['annotation'], - data['instance'], - self._classes) + bboxes = Camera.extract_bounding_boxes(data['annotation'], + data['instance'], + self._classes) bboxes = [b for b in bboxes if b['class'] == 'CAR'] rospy.logdebug(f'bboxes: {bboxes}') - bbox_img = bng_sensors.Camera.draw_bboxes(bboxes, - data['colour'], - width=3) + bbox_img = Camera.draw_bounding_boxes(bboxes, + data['colour'], + width=3) return bbox_img - def _make_msg(self): - img = self._update_data_with_bbox() + def _make_msg(self, data): + img = self._update_data_with_bbox(data) img = img.convert('RGB') img = np.array(img) img = img[:, :, ::-1].copy() @@ -344,27 +379,27 @@ def __init__(self, sensor, topic_id, vehicle): self._sensor = sensor self._cv_helper = CvBridge() self._publishers = list() - if self._sensor.colour: - color_topic = '/'.join([topic_id, 'color']) + if self._sensor.is_render_colours: + color_topic = '/'.join([topic_id, 'colour']) pub = ColorImgPublisher(sensor, color_topic, self._cv_helper, 'colour') self._publishers.append(pub) - if self._sensor.depth: + if self._sensor.is_render_depth: depth_topic = '/'.join([topic_id, 'depth']) pub = DepthImgPublisher(sensor, depth_topic, self._cv_helper) self._publishers.append(pub) - if self._sensor.annotation: + if self._sensor.is_render_annotations: annotation_topic = '/'.join([topic_id, 'annotation']) pub = ColorImgPublisher(sensor, annotation_topic, self._cv_helper, 'annotation') self._publishers.append(pub) - if self._sensor.instance: + if self._sensor.is_render_instance: inst_ann_topic = '/'.join([topic_id, 'instance']) pub = ColorImgPublisher(sensor, inst_ann_topic, @@ -379,25 +414,54 @@ def __init__(self, sensor, topic_id, vehicle): vehicle) self._publishers.append(pub) - def publish(self): + def publish(self, current_time): + if self._sensor.is_render_instance: + data = self._sensor.get_full_poll_request() + else: + data = self._sensor.poll() for pub in self._publishers: - pub.publish() + pub.current_time = current_time + pub.publish(current_time, data) class LidarPublisher(SensorDataPublisher): - def __init__(self, sensor, topic_id): + def __init__(self, sensor, topic_id, vehicle): super().__init__(sensor, topic_id, sensor_msgs.msg.PointCloud2) - rospy.logdebug(f'sensor_msgs.msg.PointCloud2: {sensor_msgs.msg.PointCloud2}') + self.listener = tf.TransformListener() + sensor_name = topic_id.split("/")[-1] + self.frame_lidar_sensor = f'{vehicle.vid}_{sensor_name}' def _make_msg(self): - points = self._sensor.data['points'] - point_num = points.shape[0]//3 - points = points.reshape(point_num, 3) header = std_msgs.msg.Header() - header.frame_id = 'map' - header.stamp = rospy.get_rostime() - msg = pc2.create_cloud_xyz32(header, points) + header.frame_id = self.frame_lidar_sensor + header.stamp = self.current_time + + readings_data = self._sensor.poll() + points = np.array(readings_data['pointCloud']) + colours = readings_data['colours'] + + pointcloud_fields = [('x', np.float32), + ('y', np.float32), + ('z', np.float32), + ('intensity', np.float32)] + + try: + (trans_map, _) = self.listener.lookupTransform(self.frame_map, self.frame_lidar_sensor, header.stamp) + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e: + rospy.logwarn(f'No transform between {self.frame_map} and ' + f'{self.frame_lidar_sensor} available with exception: {e}') + points = np.zeros((0, 3)) + colours = np.zeros((0,)) + trans_map = np.zeros(3) + + pointcloud_data = np.zeros(points.shape[0], dtype=pointcloud_fields) + pointcloud_data['x'] = points[:, 0] - trans_map[0] + pointcloud_data['y'] = points[:, 1] - trans_map[1] + pointcloud_data['z'] = points[:, 2] - trans_map[2] + pointcloud_data['intensity'] = np.array(colours) + msg = ros_numpy.msgify(PointCloud2, pointcloud_data) + msg.header = header return msg @@ -407,6 +471,18 @@ def __init__(self, vehicle, node_name, visualize=True): self._vehicle = vehicle + self.node_name = node_name + self._broadcaster_pose = tf2_ros.TransformBroadcaster() + self.tf_msg = tf2_ros.TransformStamped() + self.frame_map = 'map' + self.tf_msg.header.frame_id = self.frame_map + self.tf_msg.child_frame_id = self._vehicle.vid + self.alignment_quat = np.array([0, 1, 0, 0]) # sets the forward direction as -y + # self.alignment_quat = np.array([0, 0, 0, 1]) # sets the forward direction as -y + # self.alignment_quat = np.array([1, 0, 0, 0]) + self.current_time = rospy.get_rostime() + + self.node_name = node_name self._sensor_publishers = list() for sensor_name, sensor in vehicle.sensors.items(): topic_id = [node_name, vehicle.vid, sensor_name] @@ -418,6 +494,7 @@ def __init__(self, vehicle, else: pub = pub(sensor, topic_id) self._sensor_publishers.append(pub) + self.visualizer = None if visualize: topic_id = [node_name, vehicle.vid, 'marker'] @@ -426,10 +503,30 @@ def __init__(self, vehicle, Marker, queue_size=1) + def broadcast_vehicle_pose(self, data): + self.tf_msg.header.stamp = self.current_time + self.tf_msg.transform.translation.x = data['pos'][0] + self.tf_msg.transform.translation.y = data['pos'][1] + self.tf_msg.transform.translation.z = data['pos'][2] + + quat_orientation = np.array([data['rotation'][0], + data['rotation'][1], + data['rotation'][2], + data['rotation'][3]]) + + quat_orientation = quaternion_multiply(self.alignment_quat, quat_orientation) + quat_orientation /= np.linalg.norm(quat_orientation) + + self.tf_msg.transform.rotation.x = quat_orientation[0] # data['rotation'][0] + self.tf_msg.transform.rotation.y = quat_orientation[1] # data['rotation'][1] + self.tf_msg.transform.rotation.z = quat_orientation[2] # data['rotation'][2] + self.tf_msg.transform.rotation.w = quat_orientation[3] # data['rotation'][3] + self._broadcaster_pose.sendTransform(self.tf_msg) + def state_to_marker(self, data, marker_ns): mark = Marker() - mark.header.frame_id = 'map' - mark.header.stamp = rospy.Time.now() + mark.header.frame_id = self.frame_map + mark.header.stamp = self.current_time mark.type = Marker.CUBE mark.ns = marker_ns mark.action = Marker.ADD @@ -455,10 +552,12 @@ def state_to_marker(self, data, marker_ns): return mark - def publish(self): + def publish(self, current_time): + self.current_time = current_time self._vehicle.poll_sensors() - for pub in self._sensor_publishers: - pub.publish() + self.broadcast_vehicle_pose(self._vehicle.sensors['state'].data) + for pub in self._sensor_publishers: # this got us 1fps more + threading.Thread(target=pub.publish, args=(current_time,), daemon=True).start() if self.visualizer is not None: mark = self.state_to_marker(self._vehicle.sensors['state'].data, self._vehicle.vid) @@ -468,11 +567,13 @@ def publish(self): class NetworkPublisher(BNGPublisher): def __init__(self, game_client, node_name): + self.frame_map = 'map' self._game_client = game_client self._road_network = None self._node_name = node_name topic_id = '/'.join([node_name, 'road_network']) self._pub = rospy.Publisher(topic_id, MarkerArray, queue_size=1) + self.current_time = rospy.get_rostime() def set_up_road_network_viz(self): roads = self._game_client.get_roads() @@ -486,13 +587,14 @@ def set_up_road_network_viz(self): rospy.logdebug(f'++++++++++\nroad: {road}') mark = Marker() mark.header = std_msgs.msg.Header() - mark.header.frame_id = 'map' + mark.header.frame_id = self.frame_map + mark.header.stamp = self.current_time mark.type = Marker.LINE_STRIP ns = self._node_name mark.ns = ns mark.action = Marker.ADD mark.id = r_id - mark.lifetime = rospy.Duration() + mark.lifetime = rospy.Duration(0) # leave them up forever mark.pose.position.x = 0 mark.pose.position.y = 0 @@ -517,9 +619,10 @@ def set_up_road_network_viz(self): marker_num = len(self._road_network.markers) rospy.logdebug(f'the road network contains {marker_num} markers') - def publish(self): + def publish(self, current_time): + self.current_time = current_time if self._road_network is None: self.set_up_road_network_viz() - for r in self._road_network.markers: - r.header.stamp = rospy.get_rostime() + for marker in self._road_network.markers: + marker.header.stamp = self.current_time self._pub.publish(self._road_network.markers) diff --git a/beamng_control/scripts/beamng_control/sensorHelper.py b/beamng_control/scripts/beamng_control/sensorHelper.py index bcb6366..c7465a9 100644 --- a/beamng_control/scripts/beamng_control/sensorHelper.py +++ b/beamng_control/scripts/beamng_control/sensorHelper.py @@ -1,8 +1,12 @@ import rospy import copy +import numpy as np import beamngpy.sensors as bng_sensors -#from beamngpy.noise import RandomImageNoise, RandomLIDARNoise +from beamng_control.publishers import LidarPublisher, CameraPublisher + + +# from beamngpy.noise import RandomImageNoise, RandomLIDARNoise class SensorSpecificationError(TypeError): @@ -12,27 +16,40 @@ class SensorSpecificationError(TypeError): pass -def get_lidar(position, - direction, +def rotate_direction_vector(vec, pitch, yaw): + Rx = np.array([[1, 0, 0], + [0, np.cos(pitch), np.sin(pitch)], + [0, -np.sin(pitch), np.cos(pitch)]]) + Rz = np.array([[np.cos(yaw), -np.sin(yaw), 0], + [np.sin(yaw), np.cos(yaw), 0], + [0, 0, 1]]) + return Rx @ Rz @ vec + + +def get_lidar(bng, + vehicle, + position, + rotation, vertical_resolution, vertical_angle, max_distance, **spec): - rospy.logdebug('Lidar visualization cannot ' - 'be enabled through the beamng ros integration') - spec['visualized'] = False if 'shmem' in spec and spec['shmem']: spec.pop('shmem') rospy.logwarn('The Lidar sensor provides no shared memory support, ' 'sensor data is always shared through sockets.') try: - lidar = bng_sensors.Lidar(offset=position, - direction=direction, - vres=vertical_resolution, - vangle=vertical_angle, - max_dist=max_distance, - shmem=False, + lidar = bng_sensors.Lidar(bng=bng, + vehicle=vehicle, + pos=position, + dir=(0, -1, 0), + up=(0, 0, 1), + vertical_resolution=vertical_resolution, + vertical_angle=vertical_angle, + max_distance=max_distance, + is_using_shared_memory=False, **spec) + except TypeError as e: raise SensorSpecificationError('Could not get Lidar instance, the ' 'json specification provided an' @@ -63,9 +80,15 @@ def get_ultrasonic(position, rotation, **spec): spec['near_far'] = (spec.pop('min_distance'), spec.pop('max_distance')) try: - us = bng_sensors.Ultrasonic(position, - rotation, + us = bng_sensors.Ultrasonic(pos=position, + dir=(0, -1, 0), + up=(0, 0, 1), **spec) + rotation = np.radians(rotation) + new_position = rotate_direction_vector(vec=position, + pitch=rotation[0], + yaw=rotation[1]) + us.set_direction((new_position[0], new_position[1], new_position[2])) except TypeError as e: raise SensorSpecificationError('Could not get ultrasonic sensor ' 'instance, the json specification ' @@ -76,26 +99,31 @@ def get_ultrasonic(position, rotation, **spec): return us -def get_camera(position, rotation, fov, resolution, **spec): - if 'bounding_box' in spec: - if spec['bounding_box']: - rospy.logerr('Bounding boxes are not supported ' - 'for the camera sensor.') - bbox = spec.pop('bounding_box') - bbox = False # remove when bboxes are working - - if 'shmem' in spec: +def get_camera(name, bng, vehicle, position, rotation, field_of_view_y, resolution, **spec): + bbox = spec.pop('bounding_box') + if 'is_using_shared_memory' in spec: rospy.logerr('Shared memory is automatically disabled ' 'for the camera sensor.') - spec.pop('shmem') + spec.pop('is_using_shared_memory') try: - cam = bng_sensors.Camera(position, - rotation, - fov, - resolution, - shmem=False, + # we yaw then pitch + cam = bng_sensors.Camera(name=name, + bng=bng, + vehicle=vehicle, + pos=position, + dir=(0, -1, 0), + up=(0, 0, 1), + field_of_view_y=field_of_view_y, + resolution=resolution, + is_using_shared_memory=False, **spec) + rotation = np.radians(rotation) + new_position = rotate_direction_vector(vec=position, + pitch=rotation[0], + yaw=rotation[1]) + cam.set_direction((new_position[0], new_position[1], new_position[2])) + except TypeError as e: raise SensorSpecificationError('Could not get Camera instance, the ' 'json specification provided an' @@ -103,7 +131,7 @@ def get_camera(position, rotation, fov, resolution, **spec): f'unexpected inputs:\n{spec}\n' '\nOriginal error ' f'message:\n{e}') - if bbox and not(cam.instance and cam.annotation): + if bbox and not (cam.is_render_instance and cam.is_render_annotations): rospy.logerr('Enabled annotations and instance annotations' 'are required to generate images with bounding box.') else: @@ -155,7 +183,7 @@ def select_sensor_definition(sensor_type_name, sensor_defs): return sensor_type[0], sensor_spec -def get_sensor(sensor_type, all_sensor_defs, dyn_sensor_properties=None): +def get_sensor(sensor_type, all_sensor_defs, bng=None, vehicle=None, name=None, dyn_sensor_properties=None): """ Args: sensor_type(string): used to look up static part of sensor definition @@ -175,15 +203,38 @@ def get_sensor(sensor_type, all_sensor_defs, dyn_sensor_properties=None): sensor_def.update(dyn_sensor_properties) rospy.logdebug(f'sensor_def: {sensor_def}') try: - sensor = _sensor_getters[sensor_class_name](**sensor_def) + if sensor_class_name in _automation_sensors: + sensor = _sensor_getters[sensor_class_name](bng=bng, + name=name, + vehicle=vehicle, + **sensor_def) + sensor_publisher = _sensor_automation_type_publisher_getter[sensor_class_name] + else: + sensor = _sensor_getters[sensor_class_name](**sensor_def) + sensor_publisher = None except TypeError as err: raise SensorSpecificationError(f'The {sensor_class_name} sensor ' 'definition is missing one or more ' 'fields. These fields ' f'where defined:\n{sensor_def}\n' f'Original error message:\n{err}') - return sensor - + return sensor, sensor_publisher + + +_automation_sensors = ['Camera', + 'Lidar', + 'CameraNoise', + 'LidarNoise', + 'Ultrasonic', + 'AdvancedIMU', # unsure about this one + 'PowerTrain', # unsure about this one + 'Radar', # unsure about this one + 'meshsensor', # unsure about this one + ] +_sensor_automation_type_publisher_getter = { + 'Lidar': LidarPublisher, + 'Camera': CameraPublisher, +} _sensor_getters = { 'IMU': get_imu,